prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ['HERTZ_SECRET_KEY']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ['HERTZ_DEBUG'] != 'False'
ALLOWED_HOSTS = ['*' if DEBUG else os.environ['HERTZ_HOST']]
<|fim▁hole|># Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'widget_tweaks',
'attendance',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'hertz.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'hertz.wsgi.application'
# Database
if 'DATABASE_HOST' in os.environ:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': os.environ['POSTGRES_USER'],
'PASSWORD': os.environ['POSTGRES_PASSWORD'],
'HOST': os.environ['DATABASE_HOST'],
'PORT': 5432,
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Sao_Paulo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
# STATICFILES_DIRS = [
# os.path.join(BASE_DIR, 'static'),
# ]
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = '/login'<|fim▁end|> | |
<|file_name|>characterfeatures.py<|end_file_name|><|fim▁begin|>"""Module containing character feature extractors."""<|fim▁hole|>
@register_feat
def characterSpace(text):
"""Return the total number of characters."""
return len(text)
@register_feat
def letterSpace(text):
"""Return the total number of letters (excludes spaces and punctuation)"""
count = 0
alphabet = string.ascii_lowercase + string.ascii_uppercase
for char in text:
if char in alphabet:
count += 1
return count<|fim▁end|> |
import string
from unstyle.features.featregister import register_feat |
<|file_name|>utils.ts<|end_file_name|><|fim▁begin|>export function round(value: number, multiplicator?: number): number {
multiplicator = multiplicator || 10;
return Math.round(value * multiplicator) / multiplicator;
}
export function int16(msb: number, lsb: number): number {
const val = uint16(msb, lsb);
return val > 32767 ? val - 65536 : val;
}
<|fim▁hole|> return (msb << 8) | lsb;
}
export function uint20(msb: number, lsb: number, xlsb: number): number {
return ((((msb << 8) | lsb) << 8) | xlsb) >> 4;
}<|fim▁end|> |
export function uint16(msb: number, lsb: number): number {
|
<|file_name|>test_gpg.py<|end_file_name|><|fim▁begin|>#
# Copyright © 2012–2022 Michal Čihař <[email protected]>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import subprocess
from distutils.version import LooseVersion
from unittest import SkipTest
from django.core.cache import cache
from django.test import TestCase
from django.test.utils import override_settings<|fim▁hole|>from weblate.utils.checks import check_data_writable
from weblate.utils.unittest import tempdir_setting
from weblate.vcs.gpg import (
generate_gpg_key,
get_gpg_key,
get_gpg_public_key,
get_gpg_sign_key,
)
class GPGTest(TestCase):
gpg_error = None
@classmethod
def setUpClass(cls):
"""Check whether we can use gpg."""
super().setUpClass()
try:
result = subprocess.run(
["gpg", "--version"],
check=True,
text=True,
capture_output=True,
)
version = result.stdout.splitlines()[0].strip().rsplit(None, 1)[-1]
if LooseVersion(version) < LooseVersion("2.1"):
cls.gpg_error = "gpg too old"
except (subprocess.CalledProcessError, OSError):
cls.gpg_error = "gpg not found"
def setUp(self):
if self.gpg_error:
raise SkipTest(self.gpg_error)
def check_errors(self):
self.assertEqual(weblate.vcs.gpg.GPG_ERRORS, {})
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <[email protected]>", WEBLATE_GPG_ALGO="rsa512"
)
def test_generate(self):
self.assertEqual(check_data_writable(), [])
self.assertIsNone(get_gpg_key(silent=True))
key = generate_gpg_key()
self.check_errors()
self.assertIsNotNone(key)
self.assertEqual(key, get_gpg_key())
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <[email protected]>", WEBLATE_GPG_ALGO="rsa512"
)
def test_get(self):
self.assertEqual(check_data_writable(), [])
# This will generate new key
key = get_gpg_sign_key()
self.check_errors()
self.assertIsNotNone(key)
# Check cache access
self.assertEqual(key, get_gpg_sign_key())
# Check empty cache
cache.delete("gpg-key-id")
self.assertEqual(key, get_gpg_sign_key())
@tempdir_setting("DATA_DIR")
@override_settings(
WEBLATE_GPG_IDENTITY="Weblate <[email protected]>", WEBLATE_GPG_ALGO="rsa512"
)
def test_public(self):
self.assertEqual(check_data_writable(), [])
# This will generate new key
key = get_gpg_public_key()
self.check_errors()
self.assertIsNotNone(key)
# Check cache access
self.assertEqual(key, get_gpg_public_key())<|fim▁end|> |
import weblate.vcs.gpg |
<|file_name|>init-dev-env.js<|end_file_name|><|fim▁begin|>module.exports = function(grunt) {
<|fim▁hole|> * - register git hooks (commit-msg)
*/
grunt.registerTask('init-dev-env', 'Initialize dev environment.', function() {
var fs = require('fs');
var done = this.async();
fs.symlink('../../tasks/lib/validate-commit-msg.js', '.git/hooks/commit-msg', function(e) {
if (!e) {
grunt.log.ok('Hook "validate-commit-msg" installed.');
}
done();
});
});
};<|fim▁end|> | /**
* Initialize development environment for Testacular
* |
<|file_name|>shadow-mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Local module shadows `ep_lib` from extern prelude
<|fim▁hole|>mod ep_lib {
pub struct S;
impl S {
pub fn internal(&self) {}
}
}
fn main() {
let s = ep_lib::S;
s.internal(); // OK
}<|fim▁end|> | |
<|file_name|>dispatcher.py<|end_file_name|><|fim▁begin|>#
# Copyright (C) 2012-2014, Quarkslab.
#
# This file is part of qb-sync.
#
# qb-sync is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import socket
import select
import base64
import binascii
import re
import ConfigParser
import traceback
HOST = 'localhost'
PORT = 9100
try:
import json
except:
print "[-] failed to import json\n%s" % repr(sys.exc_info())
sys.exit(0)
class Client():
def __init__(self, s_client, s_srv, name):
self.client_sock = s_client
self.srv_sock = s_srv
self.name = name
self.enabled = False
self.buffer = ''
def close(self):
self.enabled = False
if self.client_sock:
self.client_sock.close()
if self.srv_sock:
self.srv_sock.close()
def feed(self, data):
batch = []
self.buffer = ''.join([self.buffer, data])
if self.buffer.endswith("\n"):
batch = [req for req in self.buffer.strip().split('\n') if req != '']
self.buffer = ''
return batch
class DispatcherSrv():
def __init__(self):
self.idb_clients = []
self.dbg_client = None
self.srv_socks = []
self.opened_socks = []
self.current_dbg = None
self.current_dialect = 'unknown'
self.current_idb = None
self.current_module = None
self.sync_mode_auto = True
self.pat = re.compile('dbg disconnected')
self.req_handlers = {
'new_client': self.req_new_client,
'new_dbg': self.req_new_dbg,
'dbg_quit': self.req_dbg_quit,
'idb_n': self.req_idb_n,
'idb_list': self.req_idb_list,
'module': self.req_module,
'sync_mode': self.req_sync_mode,
'cmd': self.req_cmd,
'bc': self.req_bc,
'kill': self.req_kill<|fim▁hole|> def bind(self, host, port):
self.dbg_srv_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.dbg_srv_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.dbg_srv_sock.bind((host, port))
self.srv_socks.append(self.dbg_srv_sock)
if not (socket.gethostbyname(host) == '127.0.0.1'):
self.localhost_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.localhost_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.localhost_sock.bind(('localhost', port))
self.srv_socks.append(self.localhost_sock)
def accept(self, s):
new_socket, addr = s.accept()
self.opened_socks.append(new_socket)
def listen(self):
for s in self.srv_socks:
s.listen(5)
def close(self, s):
s.close()
self.opened_socks.remove(s)
def loop(self):
self.listen()
self.announcement("dispatcher listening")
while True:
rlist, wlist, xlist = select.select(self.srv_socks + self.opened_socks, [], [])
if not rlist:
self.announcement("socket error: select")
raise Exception("rabbit eating the cable")
for s in rlist:
if s in self.srv_socks:
self.accept(s)
else:
self.handle(s)
def handle(self, s):
client = self.sock_to_client(s)
for req in self.recvall(client):
self.parse_exec(s, req)
# find client object for its srv socket
def sock_to_client(self, s):
if self.current_dbg and (s == self.current_dbg.srv_sock):
client = self.current_dbg
else:
clist = [client for client in self.idb_clients if (client.srv_sock == s)]
if not clist:
client = Client(None, s, None)
self.idb_clients.append(client)
else:
client = clist[0]
return client
# buffered readline like function
def recvall(self, client):
try:
data = client.srv_sock.recv(4096)
if data == '':
raise
except:
if client == self.current_dbg:
self.broadcast("debugger closed the connection")
self.dbg_quit()
else:
self.client_quit(client.srv_sock)
self.broadcast("a client quit, nb client(s) left: %d" % len(self.idb_clients))
return []
return client.feed(data)
# parse and execute requests from clients (idbs or dbg)
def parse_exec(self, s, req):
if not (req[0:8] == '[notice]'):
# this is a normal [sync] request from debugger, forward it
self.forward(req)
# receive 'dbg disconnected', socket can be closed
if re.search(self.pat, req):
self.close(s)
return
req = self.normalize(req, 8)
try:
hash = json.loads(req)
except:
print "[-] dispatcher failed to parse json\n %s\n" % req
return
type = hash['type']
if not type in self.req_handlers:
print ("[*] dispatcher unknown request: %s" % type)
return
req_handler = self.req_handlers[type]
req_handler(s, hash)
def normalize(self, req, taglen):
req = req[taglen:]
req = req.replace("\\", "\\\\")
req = req.replace("\n", "")
return req
def puts(self, msg, s):
s.sendall(msg)
# dispatcher announcements are forwarded to the idb
def announcement(self, msg, s=None):
if not s:
if not self.current_idb:
return
s = self.current_idb.client_sock
try:
s.sendall("[notice]{\"type\":\"dispatcher\",\"subtype\":\"msg\",\"msg\":\"%s\"}\n" % msg)
except:
return
# send message to all connected idb clients
def broadcast(self, msg):
for idbc in self.idb_clients:
self.announcement(msg, idbc.client_sock)
# send dbg message to currently active idb client
def forward(self, msg, s=None):
if not s:
if not self.current_idb:
return
s = self.current_idb.client_sock
if s:
s.sendall(msg + "\n")
# send dbg message to all idb clients
def forward_all(self, msg, s=None):
for idbc in self.idb_clients:
self.forward(msg, idbc.client_sock)
# disable current idb and enable new idb matched from current module name
def switch_idb(self, new_idb):
msg = "[sync]{\"type\":\"broker\",\"subtype\":\"%s\"}\n"
if (not self.current_idb == new_idb) & (self.current_idb.enabled):
self.current_idb.client_sock.sendall(msg % "disable_idb")
self.current_idb.enabled = False
if new_idb:
new_idb.client_sock.sendall(msg % "enable_idb")
self.current_idb = new_idb
new_idb.enabled = True
# a new idb client connects to the dispatcher via its broker
def req_new_client(self, srv_sock, hash):
port, name = hash['port'], hash['idb']
try:
client_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_sock.connect(('localhost', port))
self.opened_socks.append(client_sock)
except:
self.opened_socks.remove(srv_sock)
srv_sock.close()
return
# check if an idb client is already registered with the same name
conflicting = [client for client in self.idb_clients if (client.name == name)]
# promote to idb client
new_client = self.sock_to_client(srv_sock)
new_client.client_sock = client_sock
new_client.name = name
self.broadcast("add new client (listening on port %d), nb client(s): %d" % (port, len(self.idb_clients)))
if conflicting:
self.broadcast("conflicting name: %s !" % new_client.name)
if not self.current_idb:
self.current_idb = new_client
# if new client match current module name, then enable it
if self.current_module == name:
self.switch_idb(new_client)
# inform new client about debugger's dialect
self.dbg_dialect(new_client)
# clean state when a client is quiting
def client_quit(self, s):
self.opened_socks.remove(s)
# remove exiting client from the list of active clients
for idbc in [idbc for idbc in self.idb_clients if (idbc.srv_sock == s)]:
self.idb_clients.remove(idbc)
self.opened_socks.remove(idbc.client_sock)
idbc.close()
# no more clients, let's kill ourself
if not self.idb_clients:
for s in self.srv_socks:
s.close()
sys.exit()
# a new debugger client connects to the dispatcher
def req_new_dbg(self, s, hash):
msg = hash['msg']
if self.current_dbg:
self.dbg_quit()
# promote to dbg client
self.current_dbg = self.sock_to_client(s)
self.current_dbg.client_sock = s
self.idb_clients.remove(self.current_dbg)
self.broadcast("new debugger client: %s" % msg)
# store dbb's dialect
if 'dialect' in hash:
self.current_dialect = hash['dialect']
self.dbg_dialect()
# inform client about debugger's dialect
def dbg_dialect(self, client=None):
msg = "[sync]{\"type\":\"dialect\",\"dialect\":\"%s\"}\n" % self.current_dialect
if client:
client.client_sock.sendall(msg)
else:
for idbc in self.idb_clients:
idbc.client_sock.sendall(msg)
# debugger client disconnect from the dispatcher
def req_dbg_quit(self, s, hash):
msg = hash['msg']
self.broadcast("debugger quit: %s" % msg)
self.dbg_quit()
# clean state when debugger is quiting
def dbg_quit(self):
self.opened_socks.remove(self.current_dbg.srv_sock)
self.current_dbg.close()
self.current_dbg = None
self.current_module = None
self.switch_idb(None)
self.current_dialect = 'unknown'
# handle kill notice from a client, exit properly if no more client
def req_kill(self, s, hash):
self.client_quit(s)
self.broadcast("received a kill notice from client, %d client(s) left" % len(self.idb_clients))
# send list of currently connected idb clients
def req_idb_list(self, s, hash):
clist = "> currently connected idb(s):\n"
if not self.idb_clients:
clist += " no idb client yet\n"
else:
for i in range(len(self.idb_clients)):
clist += (" [%d] %s\n" % (i, self.idb_clients[i].name))
s.sendall(clist)
# manually set current active idb to idb n from idb list
def req_idb_n(self, s, hash):
idb = hash['idb']
try:
idbn = int(idb)
except:
s.sendall("> n should be a decimal value")
return
try:
idbc = self.idb_clients[idbn]
except:
s.sendall("> %d is invalid (see idblist)" % idbn)
return
self.switch_idb(idbc)
s.sendall("> current idb set to %d" % idbn)
# dbg notice that its current module has changed
def req_module(self, s, hash):
modpath = hash['path']
self.current_module = modname = os.path.basename(modpath)
matching = [idbc for idbc in self.idb_clients if (idbc.name.lower() == modname.lower())]
if not self.sync_mode_auto:
self.broadcast("sync_mode_auto off")
return
if len(matching) == 1:
# matched is set as active
self.switch_idb(matching[0])
else:
if not len(matching):
msg = "mod request has no match for %s"
else:
msg = "ambiguous mod request, too many matches for %s"
self.broadcast(msg % modname)
# no match current idb (if existing) is disabled
if self.current_idb.enabled:
self.switch_idb(None)
# sync mode tells if idb switch is automatic or manual
def req_sync_mode(self, s, hash):
mode = hash['auto']
self.broadcast("sync mode auto set to %s" % mode)
self.sync_mode_auto = (mode == "on")
# bc request should be forwarded to all idbs
def req_bc(self, s, hash):
msg = "[sync]%s" % json.dumps(hash)
self.forward_all(msg)
def req_cmd(self, s, hash):
cmd = hash['cmd']
self.current_dbg.client_sock.sendall("%s\n" % cmd)
def err_log(msg):
fd = open("%s.err" % __file__, 'w')
fd.write(msg)
fd.close()
if __name__ == "__main__":
server = DispatcherSrv()
for loc in ['IDB_PATH', 'USERPROFILE', 'HOME']:
if loc in os.environ:
confpath = os.path.join(os.path.realpath(os.environ[loc]), '.sync')
if os.path.exists(confpath):
config = ConfigParser.SafeConfigParser({'host': HOST, 'port': PORT})
config.read(confpath)
HOST = config.get("INTERFACE", 'host')
PORT = config.getint("INTERFACE", 'port')
server.announcement("configuration file loaded")
break
try:
server.bind(HOST, PORT)
except Exception as e:
err_log("dispatcher failed to bind on %s:%s\n-> %s" % (HOST, PORT, repr(e)))
sys.exit()
try:
server.loop()
except Exception as e:
err_log("dispatcher failed\n-> %s" % repr(e))
server.announcement("dispatcher stop")<|fim▁end|> | }
|
<|file_name|>run_sdae.py<|end_file_name|><|fim▁begin|>import cPickle
import gzip
import os, sys, errno
import time
import math
# numpy & theano imports need to be done in this order (only for some numpy installations, not sure why)
import numpy
#import gnumpy as gnp
# we need to explicitly import this in some cases, not sure why this doesn't get imported with numpy itself
import numpy.distutils.__config__
# and only after that can we import theano
import theano
from utils.providers import ListDataProvider
from frontend.label_normalisation import HTSLabelNormalisation, XMLLabelNormalisation
from frontend.silence_remover import SilenceRemover
from frontend.silence_remover import trim_silence
from frontend.min_max_norm import MinMaxNormalisation
from frontend.acoustic_composition import AcousticComposition
from frontend.parameter_generation import ParameterGeneration
from frontend.mean_variance_norm import MeanVarianceNorm
# the new class for label composition and normalisation
from frontend.label_composer import LabelComposer
from frontend.label_modifier import HTSLabelModification
#from frontend.mlpg_fast import MLParameterGenerationFast
#from frontend.mlpg_fast_layer import MLParameterGenerationFastLayer
import configuration
from models.deep_rnn import DeepRecurrentNetwork
from models.sdae import StackedDenoiseAutoEncoder
from utils.compute_distortion import DistortionComputation, IndividualDistortionComp
from utils.generate import generate_wav
from utils.learn_rates import ExpDecreaseLearningRate
from io_funcs.binary_io import BinaryIOCollection
#import matplotlib.pyplot as plt
# our custom logging class that can also plot
#from logplot.logging_plotting import LoggerPlotter, MultipleTimeSeriesPlot, SingleWeightMatrixPlot
from logplot.logging_plotting import LoggerPlotter, MultipleSeriesPlot, SingleWeightMatrixPlot
import logging # as logging
import logging.config
import StringIO
def extract_file_id_list(file_list):
file_id_list = []
for file_name in file_list:
file_id = os.path.basename(os.path.splitext(file_name)[0])
file_id_list.append(file_id)
return file_id_list
def read_file_list(file_name):
logger = logging.getLogger("read_file_list")
file_lists = []
fid = open(file_name)
for line in fid.readlines():
line = line.strip()
if len(line) < 1:
continue
file_lists.append(line)
fid.close()
logger.debug('Read file list from %s' % file_name)
return file_lists
def make_output_file_list(out_dir, in_file_lists):
out_file_lists = []
for in_file_name in in_file_lists:
file_id = os.path.basename(in_file_name)
out_file_name = out_dir + '/' + file_id
out_file_lists.append(out_file_name)
return out_file_lists
def prepare_file_path_list(file_id_list, file_dir, file_extension, new_dir_switch=True):
if not os.path.exists(file_dir) and new_dir_switch:
os.makedirs(file_dir)
file_name_list = []
for file_id in file_id_list:
file_name = file_dir + '/' + file_id + file_extension
file_name_list.append(file_name)
return file_name_list
def visualize_dnn(dnn):
layer_num = len(dnn.params) ## including input and output
plotlogger = logging.getLogger("plotting")
for i in xrange(layer_num):
fig_name = 'Activation weights W' + str(i) + '_' + dnn.params[i].name
fig_title = 'Activation weights of W' + str(i)
xlabel = 'Neuron index of hidden layer ' + str(i)
ylabel = 'Neuron index of hidden layer ' + str(i+1)
if i == 0:
xlabel = 'Input feature index'
if i == layer_num-1:
ylabel = 'Output feature index'
aa = dnn.params[i].get_value(borrow=True).T
print aa.shape, aa.size
if aa.size > aa.shape[0]:
logger.create_plot(fig_name, SingleWeightMatrixPlot)
plotlogger.add_plot_point(fig_name, fig_name, dnn.params[i].get_value(borrow=True).T)
plotlogger.save_plot(fig_name, title=fig_name, xlabel=xlabel, ylabel=ylabel)
def load_covariance(var_file_dict, out_dimension_dict):
var = {}
io_funcs = BinaryIOCollection()
for feature_name in var_file_dict.keys():
var_values, dimension = io_funcs.load_binary_file_frame(var_file_dict[feature_name], 1)
var_values = numpy.reshape(var_values, (out_dimension_dict[feature_name], 1))
var[feature_name] = var_values
return var
def train_DNN(train_xy_file_list, valid_xy_file_list, \
nnets_file_name, n_ins, n_outs, ms_outs, hyper_params, buffer_size, plot=False, var_dict=None,
cmp_mean_vector = None, cmp_std_vector = None, init_dnn_model_file = None):
# get loggers for this function
# this one writes to both console and file
logger = logging.getLogger("main.train_DNN")
logger.debug('Starting train_DNN')
if plot:
# this one takes care of plotting duties
plotlogger = logging.getLogger("plotting")
# create an (empty) plot of training convergence, ready to receive data points
logger.create_plot('training convergence',MultipleSeriesPlot)
try:
assert numpy.sum(ms_outs) == n_outs
except AssertionError:
logger.critical('the summation of multi-stream outputs does not equal to %d' %(n_outs))
raise
####parameters#####
finetune_lr = float(hyper_params['learning_rate'])
training_epochs = int(hyper_params['training_epochs'])
batch_size = int(hyper_params['batch_size'])
l1_reg = float(hyper_params['l1_reg'])
l2_reg = float(hyper_params['l2_reg'])
warmup_epoch = int(hyper_params['warmup_epoch'])
momentum = float(hyper_params['momentum'])
warmup_momentum = float(hyper_params['warmup_momentum'])
hidden_layer_size = hyper_params['hidden_layer_size']
buffer_utt_size = buffer_size
early_stop_epoch = int(hyper_params['early_stop_epochs'])
hidden_activation = hyper_params['hidden_activation']
output_activation = hyper_params['output_activation']
model_type = hyper_params['model_type']
hidden_layer_type = hyper_params['hidden_layer_type']
## use a switch to turn on pretraining
## pretraining may not help too much, if this case, we turn it off to save time
do_pretraining = hyper_params['do_pretraining']
pretraining_epochs = int(hyper_params['pretraining_epochs'])
pretraining_lr = float(hyper_params['pretraining_lr'])
sequential_training = hyper_params['sequential_training']
dropout_rate = hyper_params['dropout_rate']
# sequential_training = True
buffer_size = int(buffer_size / batch_size) * batch_size
###################
(train_x_file_list, train_y_file_list) = train_xy_file_list
(valid_x_file_list, valid_y_file_list) = valid_xy_file_list
logger.debug('Creating training data provider')
train_data_reader = ListDataProvider(x_file_list = train_x_file_list, y_file_list = train_y_file_list,
n_ins = n_ins, n_outs = n_outs, buffer_size = buffer_size, sequential = sequential_training, shuffle = True)
logger.debug('Creating validation data provider')
valid_data_reader = ListDataProvider(x_file_list = valid_x_file_list, y_file_list = valid_y_file_list,
n_ins = n_ins, n_outs = n_outs, buffer_size = buffer_size, sequential = sequential_training, shuffle = False)
shared_train_set_xy, temp_train_set_x, temp_train_set_y = train_data_reader.load_one_partition()
train_set_x, train_set_y = shared_train_set_xy
shared_valid_set_xy, valid_set_x, valid_set_y = valid_data_reader.load_one_partition() #validation data is still read block by block
valid_set_x, valid_set_y = shared_valid_set_xy
train_data_reader.reset()
valid_data_reader.reset()
##temporally we use the training set as pretrain_set_x.
##we need to support any data for pretraining
# pretrain_set_x = train_set_x
# numpy random generator
numpy_rng = numpy.random.RandomState(123)
logger.info('building the model')
dnn_model = None
pretrain_fn = None ## not all the model support pretraining right now
train_fn = None
valid_fn = None
valid_model = None ## valid_fn and valid_model are the same. reserve to computer multi-stream distortion
if model_type == 'DNN':
dnn_model = DeepRecurrentNetwork(n_in= n_ins, hidden_layer_size = hidden_layer_size, n_out = n_outs,
L1_reg = l1_reg, L2_reg = l2_reg, hidden_layer_type = hidden_layer_type, dropout_rate = dropout_rate)
train_fn, valid_fn = dnn_model.build_finetune_functions(
(train_set_x, train_set_y), (valid_set_x, valid_set_y)) #, batch_size=batch_size
elif model_type == 'SDAE':
dnn_model = StackedDenoiseAutoEncoder(n_in= n_ins, hidden_layer_size = hidden_layer_size, n_out = n_outs,
L1_reg = l1_reg, L2_reg = l2_reg, hidden_layer_type = hidden_layer_type, dropout_rate = dropout_rate)
if do_pretraining:
#temporally we use the training set as pretrain_set_x.
#we need to support any data for pretraining
pretrain_set_x = train_set_x
pretraining_fn = dnn_model.pretraining_functions(pretrain_set_x)
train_fn, valid_fn = dnn_model.build_finetune_functions(
(train_set_x, train_set_y), (valid_set_x, valid_set_y)) #, batch_size=batch_size
else:
logger.critical('%s type NN model is not supported!' %(model_type))
raise
## if pretraining is supported more than one model, add the switch here
## be careful to use autoencoder for pretraining here:
if do_pretraining and model_type == 'SDAE':
logger.info('pretraining the %s model' %(model_type))
corruption_level = 0.0
## in SDAE we do layer-wise pretraining using autoencoders
for i in xrange(dnn_model.n_layers):
for epoch in xrange(pretraining_epochs):
sub_start_time = time.clock()
pretrain_loss = []
while (not train_data_reader.is_finish()):
shared_train_set_xy, temp_train_set_x, temp_train_set_y = train_data_reader.load_one_partition()
# if sequential training, the batch size will be the number of frames in an utterance
if sequential_training == True:
batch_size = temp_train_set_x.shape[0]
n_train_batches = temp_train_set_x.shape[0] / batch_size
for index in xrange(n_train_batches):
## send a batch to the shared variable, rather than pass the batch size and batch index to the finetune function
pretrain_set_x.set_value(numpy.asarray(temp_train_set_x[index*batch_size:(index + 1)*batch_size], dtype=theano.config.floatX), borrow=True)
pretrain_loss.append(pretraining_fn[i](corruption=corruption_level,
learning_rate=pretraining_lr))
sub_end_time = time.clock()
logger.info('Pre-training layer %i, epoch %d, cost %s, time spent%.2f' % (i+1, epoch+1, numpy.mean(pretrain_loss), (sub_end_time - sub_start_time)))
train_data_reader.reset()
logger.info('fine-tuning the %s model' %(model_type))
start_time = time.time()
best_dnn_model = dnn_model
best_validation_loss = sys.float_info.max
previous_loss = sys.float_info.max
early_stop = 0
epoch = 0
# finetune_lr = 0.000125
previous_finetune_lr = finetune_lr
print finetune_lr
while (epoch < training_epochs):
epoch = epoch + 1
current_momentum = momentum
current_finetune_lr = finetune_lr
if epoch <= warmup_epoch:
current_finetune_lr = finetune_lr
current_momentum = warmup_momentum
else:
current_finetune_lr = previous_finetune_lr * 0.5
previous_finetune_lr = current_finetune_lr
train_error = []
sub_start_time = time.time()
while (not train_data_reader.is_finish()):
shared_train_set_xy, temp_train_set_x, temp_train_set_y = train_data_reader.load_one_partition()
# train_set_x.set_value(numpy.asarray(temp_train_set_x, dtype=theano.config.floatX), borrow=True)
# train_set_y.set_value(numpy.asarray(temp_train_set_y, dtype=theano.config.floatX), borrow=True)
# if sequential training, the batch size will be the number of frames in an utterance
if sequential_training == True:
batch_size = temp_train_set_x.shape[0]
n_train_batches = temp_train_set_x.shape[0] / batch_size
for index in xrange(n_train_batches):
## send a batch to the shared variable, rather than pass the batch size and batch index to the finetune function
train_set_x.set_value(numpy.asarray(temp_train_set_x[index*batch_size:(index + 1)*batch_size], dtype=theano.config.floatX), borrow=True)
train_set_y.set_value(numpy.asarray(temp_train_set_y[index*batch_size:(index + 1)*batch_size], dtype=theano.config.floatX), borrow=True)
this_train_error = train_fn(current_finetune_lr, current_momentum)
train_error.append(this_train_error)
train_data_reader.reset()
logger.debug('calculating validation loss')
validation_losses = []
while (not valid_data_reader.is_finish()):
shared_valid_set_xy, temp_valid_set_x, temp_valid_set_y = valid_data_reader.load_one_partition()
valid_set_x.set_value(numpy.asarray(temp_valid_set_x, dtype=theano.config.floatX), borrow=True)
valid_set_y.set_value(numpy.asarray(temp_valid_set_y, dtype=theano.config.floatX), borrow=True)
this_valid_loss = valid_fn()
validation_losses.append(this_valid_loss)
valid_data_reader.reset()
this_validation_loss = numpy.mean(validation_losses)
this_train_valid_loss = numpy.mean(numpy.asarray(train_error))
sub_end_time = time.time()
loss_difference = this_validation_loss - previous_loss
logger.info('epoch %i, validation error %f, train error %f time spent %.2f' %(epoch, this_validation_loss, this_train_valid_loss, (sub_end_time - sub_start_time)))
if plot:
plotlogger.add_plot_point('training convergence','validation set',(epoch,this_validation_loss))
plotlogger.add_plot_point('training convergence','training set',(epoch,this_train_valid_loss))
plotlogger.save_plot('training convergence',title='Progress of training and validation error',xlabel='epochs',ylabel='error')
if this_validation_loss < best_validation_loss:
if epoch > 5:
cPickle.dump(best_dnn_model, open(nnets_file_name, 'wb'))
best_dnn_model = dnn_model
best_validation_loss = this_validation_loss
# logger.debug('validation loss decreased, so saving model')
if this_validation_loss >= previous_loss:
logger.debug('validation loss increased')
# dbn = best_dnn_model
early_stop += 1
if epoch > 15 and early_stop > early_stop_epoch:
logger.debug('stopping early')
break
if math.isnan(this_validation_loss):
break
previous_loss = this_validation_loss
end_time = time.time()
# cPickle.dump(best_dnn_model, open(nnets_file_name, 'wb'))
logger.info('overall training time: %.2fm validation error %f' % ((end_time - start_time) / 60., best_validation_loss))
if plot:
plotlogger.save_plot('training convergence',title='Final training and validation error',xlabel='epochs',ylabel='error')
return best_validation_loss
def dnn_generation(valid_file_list, nnets_file_name, n_ins, n_outs, out_file_list):
logger = logging.getLogger("dnn_generation")
logger.debug('Starting dnn_generation')
plotlogger = logging.getLogger("plotting")
dnn_model = cPickle.load(open(nnets_file_name, 'rb'))
file_number = len(valid_file_list)
for i in xrange(file_number): #file_number
logger.info('generating %4d of %4d: %s' % (i+1,file_number,valid_file_list[i]) )
fid_lab = open(valid_file_list[i], 'rb')
features = numpy.fromfile(fid_lab, dtype=numpy.float32)
fid_lab.close()
features = features[:(n_ins * (features.size / n_ins))]
test_set_x = features.reshape((-1, n_ins))
predicted_parameter = dnn_model.parameter_prediction(test_set_x)
### write to cmp file
predicted_parameter = numpy.array(predicted_parameter, 'float32')
temp_parameter = predicted_parameter
fid = open(out_file_list[i], 'wb')
predicted_parameter.tofile(fid)
logger.debug('saved to %s' % out_file_list[i])
fid.close()
def dnn_generation_lstm(valid_file_list, nnets_file_name, n_ins, n_outs, out_file_list):
logger = logging.getLogger("dnn_generation")
logger.debug('Starting dnn_generation')
plotlogger = logging.getLogger("plotting")
dnn_model = cPickle.load(open(nnets_file_name, 'rb'))
visualize_dnn(dnn_model)
file_number = len(valid_file_list)
for i in xrange(file_number): #file_number
logger.info('generating %4d of %4d: %s' % (i+1,file_number,valid_file_list[i]) )
fid_lab = open(valid_file_list[i], 'rb')
features = numpy.fromfile(fid_lab, dtype=numpy.float32)
fid_lab.close()
features = features[:(n_ins * (features.size / n_ins))]
test_set_x = features.reshape((-1, n_ins))
predicted_parameter = dnn_model.parameter_prediction_lstm(test_set_x)
### write to cmp file
predicted_parameter = numpy.array(predicted_parameter, 'float32')
temp_parameter = predicted_parameter
fid = open(out_file_list[i], 'wb')
predicted_parameter.tofile(fid)
logger.debug('saved to %s' % out_file_list[i])
fid.close()
##generate bottleneck layer as festures
def dnn_hidden_generation(valid_file_list, nnets_file_name, n_ins, n_outs, out_file_list):
logger = logging.getLogger("dnn_generation")
logger.debug('Starting dnn_generation')
plotlogger = logging.getLogger("plotting")
dnn_model = cPickle.load(open(nnets_file_name, 'rb'))
file_number = len(valid_file_list)
for i in xrange(file_number):
logger.info('generating %4d of %4d: %s' % (i+1,file_number,valid_file_list[i]) )
fid_lab = open(valid_file_list[i], 'rb')
features = numpy.fromfile(fid_lab, dtype=numpy.float32)
fid_lab.close()
features = features[:(n_ins * (features.size / n_ins))]
features = features.reshape((-1, n_ins))
temp_set_x = features.tolist()
test_set_x = theano.shared(numpy.asarray(temp_set_x, dtype=theano.config.floatX))
predicted_parameter = dnn_model.generate_top_hidden_layer(test_set_x=test_set_x)
### write to cmp file
predicted_parameter = numpy.array(predicted_parameter, 'float32')
temp_parameter = predicted_parameter
fid = open(out_file_list[i], 'wb')
predicted_parameter.tofile(fid)
logger.debug('saved to %s' % out_file_list[i])
fid.close()
def main_function(cfg):
# get a logger for this main function
logger = logging.getLogger("main")
# get another logger to handle plotting duties
plotlogger = logging.getLogger("plotting")
# later, we might do this via a handler that is created, attached and configured
# using the standard config mechanism of the logging module
# but for now we need to do it manually
plotlogger.set_plot_path(cfg.plot_dir)
#### parameter setting########
hidden_layer_size = cfg.hyper_params['hidden_layer_size']
####prepare environment
try:
file_id_list = read_file_list(cfg.file_id_scp)
logger.debug('Loaded file id list from %s' % cfg.file_id_scp)
except IOError:
# this means that open(...) threw an error
logger.critical('Could not load file id list from %s' % cfg.file_id_scp)
raise
###total file number including training, development, and testing
total_file_number = len(file_id_list)
data_dir = cfg.data_dir
nn_cmp_dir = os.path.join(data_dir, 'nn' + cfg.combined_feature_name + '_' + str(cfg.cmp_dim))
nn_cmp_norm_dir = os.path.join(data_dir, 'nn_norm' + cfg.combined_feature_name + '_' + str(cfg.cmp_dim))
model_dir = os.path.join(cfg.work_dir, 'nnets_model')
gen_dir = os.path.join(cfg.work_dir, 'gen')
in_file_list_dict = {}
for feature_name in cfg.in_dir_dict.keys():
in_file_list_dict[feature_name] = prepare_file_path_list(file_id_list, cfg.in_dir_dict[feature_name], cfg.file_extension_dict[feature_name], False)
nn_cmp_file_list = prepare_file_path_list(file_id_list, nn_cmp_dir, cfg.cmp_ext)
nn_cmp_norm_file_list = prepare_file_path_list(file_id_list, nn_cmp_norm_dir, cfg.cmp_ext)
###normalisation information
norm_info_file = os.path.join(data_dir, 'norm_info' + cfg.combined_feature_name + '_' + str(cfg.cmp_dim) + '_' + cfg.output_feature_normalisation + '.dat')
### normalise input full context label
# currently supporting two different forms of lingustic features
# later, we should generalise this
if cfg.label_style == 'HTS':
label_normaliser = HTSLabelNormalisation(question_file_name=cfg.question_file_name, add_frame_features=cfg.add_frame_features, subphone_feats=cfg.subphone_feats)
lab_dim = label_normaliser.dimension + cfg.appended_input_dim
logger.info('Input label dimension is %d' % lab_dim)
suffix=str(lab_dim)
# no longer supported - use new "composed" style labels instead
elif cfg.label_style == 'composed':
# label_normaliser = XMLLabelNormalisation(xpath_file_name=cfg.xpath_file_name)
suffix='composed'
if cfg.process_labels_in_work_dir:
label_data_dir = cfg.work_dir
else:
label_data_dir = data_dir
# the number can be removed
binary_label_dir = os.path.join(label_data_dir, 'binary_label_'+suffix)
nn_label_dir = os.path.join(label_data_dir, 'nn_no_silence_lab_'+suffix)
nn_label_norm_dir = os.path.join(label_data_dir, 'nn_no_silence_lab_norm_'+suffix)
in_label_align_file_list = prepare_file_path_list(file_id_list, cfg.in_label_align_dir, cfg.lab_ext, False)
binary_label_file_list = prepare_file_path_list(file_id_list, binary_label_dir, cfg.lab_ext)
nn_label_file_list = prepare_file_path_list(file_id_list, nn_label_dir, cfg.lab_ext)
nn_label_norm_file_list = prepare_file_path_list(file_id_list, nn_label_norm_dir, cfg.lab_ext)
dur_file_list = prepare_file_path_list(file_id_list, cfg.in_dur_dir, cfg.dur_ext)
lf0_file_list = prepare_file_path_list(file_id_list, cfg.in_lf0_dir, cfg.lf0_ext)
# to do - sanity check the label dimension here?
min_max_normaliser = None
label_norm_file = 'label_norm_%s_%d.dat' %(cfg.label_style, lab_dim)
label_norm_file = os.path.join(label_data_dir, label_norm_file)
if cfg.GenTestList:
try:
test_id_list = read_file_list(cfg.test_id_scp)
logger.debug('Loaded file id list from %s' % cfg.test_id_scp)
except IOError:
# this means that open(...) threw an error
logger.critical('Could not load file id list from %s' % cfg.test_id_scp)
raise
in_label_align_file_list = prepare_file_path_list(test_id_list, cfg.in_label_align_dir, cfg.lab_ext, False)
binary_label_file_list = prepare_file_path_list(test_id_list, binary_label_dir, cfg.lab_ext)
nn_label_file_list = prepare_file_path_list(test_id_list, nn_label_dir, cfg.lab_ext)
nn_label_norm_file_list = prepare_file_path_list(test_id_list, nn_label_norm_dir, cfg.lab_ext)
if cfg.NORMLAB and (cfg.label_style == 'HTS'):
# simple HTS labels
logger.info('preparing label data (input) using standard HTS style labels')
label_normaliser.perform_normalisation(in_label_align_file_list, binary_label_file_list)
remover = SilenceRemover(n_cmp = lab_dim, silence_pattern = cfg.silence_pattern, remove_frame_features = cfg.add_frame_features, subphone_feats = cfg.subphone_feats)
remover.remove_silence(binary_label_file_list, in_label_align_file_list, nn_label_file_list)
min_max_normaliser = MinMaxNormalisation(feature_dimension = lab_dim, min_value = 0.01, max_value = 0.99)
###use only training data to find min-max information, then apply on the whole dataset
if cfg.GenTestList:
min_max_normaliser.load_min_max_values(label_norm_file)
else:
min_max_normaliser.find_min_max_values(nn_label_file_list[0:cfg.train_file_number])
min_max_normaliser.normalise_data(nn_label_file_list, nn_label_norm_file_list)
if cfg.NORMLAB and (cfg.label_style == 'composed'):
# new flexible label preprocessor
logger.info('preparing label data (input) using "composed" style labels')
label_composer = LabelComposer()
label_composer.load_label_configuration(cfg.label_config_file)
logger.info('Loaded label configuration')
# logger.info('%s' % label_composer.configuration.labels )
lab_dim=label_composer.compute_label_dimension()
logger.info('label dimension will be %d' % lab_dim)
if cfg.precompile_xpaths:
label_composer.precompile_xpaths()
# there are now a set of parallel input label files (e.g, one set of HTS and another set of Ossian trees)
# create all the lists of these, ready to pass to the label composer
in_label_align_file_list = {}
for label_style, label_style_required in label_composer.label_styles.iteritems():
if label_style_required:
logger.info('labels of style %s are required - constructing file paths for them' % label_style)
if label_style == 'xpath':
in_label_align_file_list['xpath'] = prepare_file_path_list(file_id_list, cfg.xpath_label_align_dir, cfg.utt_ext, False)
elif label_style == 'hts':
in_label_align_file_list['hts'] = prepare_file_path_list(file_id_list, cfg.hts_label_align_dir, cfg.lab_ext, False)
else:
logger.critical('unsupported label style %s specified in label configuration' % label_style)
raise Exception
# now iterate through the files, one at a time, constructing the labels for them
num_files=len(file_id_list)
logger.info('the label styles required are %s' % label_composer.label_styles)
for i in xrange(num_files):
logger.info('making input label features for %4d of %4d' % (i+1,num_files))
# iterate through the required label styles and open each corresponding label file
<|fim▁hole|>
for label_style, label_style_required in label_composer.label_styles.iteritems():
# the files will be a parallel set of files for a single utterance
# e.g., the XML tree and an HTS label file
if label_style_required:
required_labels[label_style] = open(in_label_align_file_list[label_style][i] , 'r')
logger.debug(' opening label file %s' % in_label_align_file_list[label_style][i])
logger.debug('label styles with open files: %s' % required_labels)
label_composer.make_labels(required_labels,out_file_name=binary_label_file_list[i],fill_missing_values=cfg.fill_missing_values,iterate_over_frames=cfg.iterate_over_frames)
# now close all opened files
for fd in required_labels.itervalues():
fd.close()
# silence removal
if cfg.remove_silence_using_binary_labels:
silence_feature = 0 ## use first feature in label -- hardcoded for now
logger.info('Silence removal from label using silence feature: %s'%(label_composer.configuration.labels[silence_feature]))
logger.info('Silence will be removed from CMP files in same way')
## Binary labels have 2 roles: both the thing trimmed and the instructions for trimming:
trim_silence(binary_label_file_list, nn_label_file_list, lab_dim, \
binary_label_file_list, lab_dim, silence_feature)
else:
logger.info('No silence removal done')
# start from the labels we have just produced, not trimmed versions
nn_label_file_list = binary_label_file_list
min_max_normaliser = MinMaxNormalisation(feature_dimension = lab_dim, min_value = 0.01, max_value = 0.99)
###use only training data to find min-max information, then apply on the whole dataset
min_max_normaliser.find_min_max_values(nn_label_file_list[0:cfg.train_file_number])
min_max_normaliser.normalise_data(nn_label_file_list, nn_label_norm_file_list)
if min_max_normaliser != None and not cfg.GenTestList:
### save label normalisation information for unseen testing labels
label_min_vector = min_max_normaliser.min_vector
label_max_vector = min_max_normaliser.max_vector
label_norm_info = numpy.concatenate((label_min_vector, label_max_vector), axis=0)
label_norm_info = numpy.array(label_norm_info, 'float32')
fid = open(label_norm_file, 'wb')
label_norm_info.tofile(fid)
fid.close()
logger.info('saved %s vectors to %s' %(label_min_vector.size, label_norm_file))
### make output duration data
if cfg.MAKEDUR:
logger.info('creating duration (output) features')
feature_type = cfg.dur_feature_type
label_normaliser.prepare_dur_data(in_label_align_file_list, dur_file_list, feature_type)
### make output acoustic data
if cfg.MAKECMP:
logger.info('creating acoustic (output) features')
delta_win = cfg.delta_win #[-0.5, 0.0, 0.5]
acc_win = cfg.acc_win #[1.0, -2.0, 1.0]
acoustic_worker = AcousticComposition(delta_win = delta_win, acc_win = acc_win)
if 'dur' in cfg.in_dir_dict.keys() and cfg.AcousticModel:
acoustic_worker.make_equal_frames(dur_file_list, lf0_file_list, cfg.in_dimension_dict)
acoustic_worker.prepare_nn_data(in_file_list_dict, nn_cmp_file_list, cfg.in_dimension_dict, cfg.out_dimension_dict)
if cfg.remove_silence_using_binary_labels:
## do this to get lab_dim:
label_composer = LabelComposer()
label_composer.load_label_configuration(cfg.label_config_file)
lab_dim=label_composer.compute_label_dimension()
silence_feature = 0 ## use first feature in label -- hardcoded for now
logger.info('Silence removal from CMP using binary label file')
## overwrite the untrimmed audio with the trimmed version:
trim_silence(nn_cmp_file_list, nn_cmp_file_list, cfg.cmp_dim,
binary_label_file_list, lab_dim, silence_feature)
else: ## back off to previous method using HTS labels:
remover = SilenceRemover(n_cmp = cfg.cmp_dim, silence_pattern = cfg.silence_pattern, remove_frame_features = cfg.add_frame_features, subphone_feats = cfg.subphone_feats)
remover.remove_silence(nn_cmp_file_list[0:cfg.train_file_number+cfg.valid_file_number],
in_label_align_file_list[0:cfg.train_file_number+cfg.valid_file_number],
nn_cmp_file_list[0:cfg.train_file_number+cfg.valid_file_number]) # save to itself
### save acoustic normalisation information for normalising the features back
var_dir = os.path.join(data_dir, 'var')
if not os.path.exists(var_dir):
os.makedirs(var_dir)
var_file_dict = {}
for feature_name in cfg.out_dimension_dict.keys():
var_file_dict[feature_name] = os.path.join(var_dir, feature_name + '_' + str(cfg.out_dimension_dict[feature_name]))
### normalise output acoustic data
if cfg.NORMCMP:
logger.info('normalising acoustic (output) features using method %s' % cfg.output_feature_normalisation)
cmp_norm_info = None
if cfg.output_feature_normalisation == 'MVN':
normaliser = MeanVarianceNorm(feature_dimension=cfg.cmp_dim)
###calculate mean and std vectors on the training data, and apply on the whole dataset
global_mean_vector = normaliser.compute_mean(nn_cmp_file_list[0:cfg.train_file_number], 0, cfg.cmp_dim)
global_std_vector = normaliser.compute_std(nn_cmp_file_list[0:cfg.train_file_number], global_mean_vector, 0, cfg.cmp_dim)
normaliser.feature_normalisation(nn_cmp_file_list[0:cfg.train_file_number+cfg.valid_file_number],
nn_cmp_norm_file_list[0:cfg.train_file_number+cfg.valid_file_number])
cmp_norm_info = numpy.concatenate((global_mean_vector, global_std_vector), axis=0)
elif cfg.output_feature_normalisation == 'MINMAX':
min_max_normaliser = MinMaxNormalisation(feature_dimension = cfg.cmp_dim)
global_mean_vector = min_max_normaliser.compute_mean(nn_cmp_file_list[0:cfg.train_file_number])
global_std_vector = min_max_normaliser.compute_std(nn_cmp_file_list[0:cfg.train_file_number], global_mean_vector)
min_max_normaliser = MinMaxNormalisation(feature_dimension = cfg.cmp_dim, min_value = 0.01, max_value = 0.99)
min_max_normaliser.find_min_max_values(nn_cmp_file_list[0:cfg.train_file_number])
min_max_normaliser.normalise_data(nn_cmp_file_list, nn_cmp_norm_file_list)
cmp_min_vector = min_max_normaliser.min_vector
cmp_max_vector = min_max_normaliser.max_vector
cmp_norm_info = numpy.concatenate((cmp_min_vector, cmp_max_vector), axis=0)
else:
logger.critical('Normalisation type %s is not supported!\n' %(cfg.output_feature_normalisation))
raise
cmp_norm_info = numpy.array(cmp_norm_info, 'float32')
fid = open(norm_info_file, 'wb')
cmp_norm_info.tofile(fid)
fid.close()
logger.info('saved %s vectors to %s' %(cfg.output_feature_normalisation, norm_info_file))
feature_index = 0
for feature_name in cfg.out_dimension_dict.keys():
feature_std_vector = numpy.array(global_std_vector[:,feature_index:feature_index+cfg.out_dimension_dict[feature_name]], 'float32')
fid = open(var_file_dict[feature_name], 'w')
feature_std_vector.tofile(fid)
fid.close()
logger.info('saved %s variance vector to %s' %(feature_name, var_file_dict[feature_name]))
feature_index += cfg.out_dimension_dict[feature_name]
train_x_file_list = nn_label_norm_file_list[0:cfg.train_file_number]
train_y_file_list = nn_cmp_norm_file_list[0:cfg.train_file_number]
valid_x_file_list = nn_label_norm_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number]
valid_y_file_list = nn_cmp_norm_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number]
test_x_file_list = nn_label_norm_file_list[cfg.train_file_number+cfg.valid_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
test_y_file_list = nn_cmp_norm_file_list[cfg.train_file_number+cfg.valid_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
# we need to know the label dimension before training the DNN
# computing that requires us to look at the labels
#
# currently, there are two ways to do this
if cfg.label_style == 'HTS':
label_normaliser = HTSLabelNormalisation(question_file_name=cfg.question_file_name, add_frame_features=cfg.add_frame_features, subphone_feats=cfg.subphone_feats)
lab_dim = label_normaliser.dimension + cfg.appended_input_dim
elif cfg.label_style == 'composed':
label_composer = LabelComposer()
label_composer.load_label_configuration(cfg.label_config_file)
lab_dim=label_composer.compute_label_dimension()
logger.info('label dimension is %d' % lab_dim)
combined_model_arch = str(len(hidden_layer_size))
for hid_size in hidden_layer_size:
combined_model_arch += '_' + str(hid_size)
nnets_file_name = '%s/%s_%s_%d_%s_%d.%d.train.%d.%f.rnn.model' \
%(model_dir, cfg.combined_model_name, cfg.combined_feature_name, int(cfg.multistream_switch),
combined_model_arch, lab_dim, cfg.cmp_dim, cfg.train_file_number, cfg.hyper_params['learning_rate'])
### DNN model training
if cfg.TRAINDNN:
var_dict = load_covariance(var_file_dict, cfg.out_dimension_dict)
logger.info('training DNN')
fid = open(norm_info_file, 'rb')
cmp_min_max = numpy.fromfile(fid, dtype=numpy.float32)
fid.close()
cmp_min_max = cmp_min_max.reshape((2, -1))
cmp_mean_vector = cmp_min_max[0, ]
cmp_std_vector = cmp_min_max[1, ]
try:
os.makedirs(model_dir)
except OSError as e:
if e.errno == errno.EEXIST:
# not an error - just means directory already exists
pass
else:
logger.critical('Failed to create model directory %s' % model_dir)
logger.critical(' OS error was: %s' % e.strerror)
raise
try:
train_DNN(train_xy_file_list = (train_x_file_list, train_y_file_list), \
valid_xy_file_list = (valid_x_file_list, valid_y_file_list), \
nnets_file_name = nnets_file_name, \
n_ins = lab_dim, n_outs = cfg.cmp_dim, ms_outs = cfg.multistream_outs, \
hyper_params = cfg.hyper_params, buffer_size = cfg.buffer_size, plot = cfg.plot, var_dict = var_dict,
cmp_mean_vector = cmp_mean_vector, cmp_std_vector = cmp_std_vector)
except KeyboardInterrupt:
logger.critical('train_DNN interrupted via keyboard')
# Could 'raise' the exception further, but that causes a deep traceback to be printed
# which we don't care about for a keyboard interrupt. So, just bail out immediately
sys.exit(1)
except:
logger.critical('train_DNN threw an exception')
raise
if cfg.GENBNFEA:
'''
Please only tune on this step when you want to generate bottleneck features from DNN
'''
temp_dir_name = '%s_%s_%d_%d_%d_%d_%s_hidden' \
%(cfg.model_type, cfg.combined_feature_name, \
cfg.train_file_number, lab_dim, cfg.cmp_dim, \
len(hidden_layers_sizes), combined_model_arch)
gen_dir = os.path.join(gen_dir, temp_dir_name)
bottleneck_size = min(hidden_layers_sizes)
bottleneck_index = 0
for i in xrange(len(hidden_layers_sizes)):
if hidden_layers_sizes(i) == bottleneck_size:
bottleneck_index = i
logger.info('generating bottleneck features from DNN')
try:
os.makedirs(gen_dir)
except OSError as e:
if e.errno == errno.EEXIST:
# not an error - just means directory already exists
pass
else:
logger.critical('Failed to create generation directory %s' % gen_dir)
logger.critical(' OS error was: %s' % e.strerror)
raise
gen_file_id_list = file_id_list[0:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
test_x_file_list = nn_label_norm_file_list[0:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
gen_file_list = prepare_file_path_list(gen_file_id_list, gen_dir, cfg.cmp_ext)
dnn_hidden_generation(test_x_file_list, nnets_file_name, lab_dim, cfg.cmp_dim, gen_file_list, bottleneck_index)
### generate parameters from DNN
temp_dir_name = '%s_%s_%d_%d_%d_%d_%d_%d_%d' \
%(cfg.combined_model_name, cfg.combined_feature_name, int(cfg.do_post_filtering), \
cfg.train_file_number, lab_dim, cfg.cmp_dim, \
len(hidden_layer_size), hidden_layer_size[0], hidden_layer_size[-1])
gen_dir = os.path.join(gen_dir, temp_dir_name)
gen_file_id_list = file_id_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
test_x_file_list = nn_label_norm_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
if cfg.GenTestList:
gen_file_id_list = test_id_list
test_x_file_list = nn_label_norm_file_list
if cfg.DNNGEN:
logger.info('generating from DNN')
try:
os.makedirs(gen_dir)
except OSError as e:
if e.errno == errno.EEXIST:
# not an error - just means directory already exists
pass
else:
logger.critical('Failed to create generation directory %s' % gen_dir)
logger.critical(' OS error was: %s' % e.strerror)
raise
gen_file_list = prepare_file_path_list(gen_file_id_list, gen_dir, cfg.cmp_ext)
dnn_generation(test_x_file_list, nnets_file_name, lab_dim, cfg.cmp_dim, gen_file_list)
logger.debug('denormalising generated output using method %s' % cfg.output_feature_normalisation)
fid = open(norm_info_file, 'rb')
cmp_min_max = numpy.fromfile(fid, dtype=numpy.float32)
fid.close()
cmp_min_max = cmp_min_max.reshape((2, -1))
cmp_min_vector = cmp_min_max[0, ]
cmp_max_vector = cmp_min_max[1, ]
if cfg.output_feature_normalisation == 'MVN':
denormaliser = MeanVarianceNorm(feature_dimension = cfg.cmp_dim)
denormaliser.feature_denormalisation(gen_file_list, gen_file_list, cmp_min_vector, cmp_max_vector)
elif cfg.output_feature_normalisation == 'MINMAX':
denormaliser = MinMaxNormalisation(cfg.cmp_dim, min_value = 0.01, max_value = 0.99, min_vector = cmp_min_vector, max_vector = cmp_max_vector)
denormaliser.denormalise_data(gen_file_list, gen_file_list)
else:
logger.critical('denormalising method %s is not supported!\n' %(cfg.output_feature_normalisation))
raise
if cfg.AcousticModel:
##perform MLPG to smooth parameter trajectory
## lf0 is included, the output features much have vuv.
generator = ParameterGeneration(gen_wav_features = cfg.gen_wav_features)
generator.acoustic_decomposition(gen_file_list, cfg.cmp_dim, cfg.out_dimension_dict, cfg.file_extension_dict, var_file_dict, do_MLPG=cfg.do_MLPG)
if cfg.DurationModel:
### Perform duration normalization(min. state dur set to 1) ###
gen_dur_list = prepare_file_path_list(gen_file_id_list, gen_dir, cfg.dur_ext)
gen_label_list = prepare_file_path_list(gen_file_id_list, gen_dir, cfg.lab_ext)
in_gen_label_align_file_list = prepare_file_path_list(gen_file_id_list, cfg.in_label_align_dir, cfg.lab_ext, False)
generator = ParameterGeneration(gen_wav_features = cfg.gen_wav_features)
generator.duration_decomposition(gen_file_list, cfg.cmp_dim, cfg.out_dimension_dict, cfg.file_extension_dict)
label_modifier = HTSLabelModification(silence_pattern = cfg.silence_pattern)
label_modifier.modify_duration_labels(in_gen_label_align_file_list, gen_dur_list, gen_label_list)
### generate wav
if cfg.GENWAV:
logger.info('reconstructing waveform(s)')
print len(gen_file_id_list)
generate_wav(gen_dir, gen_file_id_list, cfg) # generated speech
# generate_wav(nn_cmp_dir, gen_file_id_list, cfg) # reference copy synthesis speech
### setting back to original conditions before calculating objective scores ###
if cfg.GenTestList:
in_label_align_file_list = prepare_file_path_list(file_id_list, cfg.in_label_align_dir, cfg.lab_ext, False)
binary_label_file_list = prepare_file_path_list(file_id_list, binary_label_dir, cfg.lab_ext)
gen_file_id_list = file_id_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
### evaluation: RMSE and CORR for duration
if cfg.CALMCD and cfg.DurationModel:
logger.info('calculating MCD')
ref_data_dir = os.path.join(data_dir, 'ref_data')
ref_dur_list = prepare_file_path_list(gen_file_id_list, ref_data_dir, cfg.dur_ext)
in_gen_label_align_file_list = in_label_align_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
calculator = IndividualDistortionComp()
valid_file_id_list = file_id_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number]
test_file_id_list = file_id_list[cfg.train_file_number+cfg.valid_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
if cfg.remove_silence_using_binary_labels:
untrimmed_reference_data = in_file_list_dict['dur'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
trim_silence(untrimmed_reference_data, ref_dur_list, cfg.dur_dim, \
untrimmed_test_labels, lab_dim, silence_feature)
else:
remover = SilenceRemover(n_cmp = cfg.dur_dim, silence_pattern = cfg.silence_pattern, remove_frame_features = cfg.add_frame_features)
remover.remove_silence(in_file_list_dict['dur'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number], in_gen_label_align_file_list, ref_dur_list)
valid_dur_rmse, valid_dur_corr = calculator.compute_distortion(valid_file_id_list, ref_data_dir, gen_dir, cfg.dur_ext, cfg.dur_dim)
test_dur_rmse, test_dur_corr = calculator.compute_distortion(test_file_id_list , ref_data_dir, gen_dir, cfg.dur_ext, cfg.dur_dim)
logger.info('Develop: DNN -- RMSE: %.3f frames/phoneme; CORR: %.3f; ' \
%(valid_dur_rmse, valid_dur_corr))
logger.info('Test: DNN -- RMSE: %.3f frames/phoneme; CORR: %.3f; ' \
%(test_dur_rmse, test_dur_corr))
### evaluation: calculate distortion
if cfg.CALMCD and cfg.AcousticModel:
logger.info('calculating MCD')
ref_data_dir = os.path.join(data_dir, 'ref_data')
ref_mgc_list = prepare_file_path_list(gen_file_id_list, ref_data_dir, cfg.mgc_ext)
ref_bap_list = prepare_file_path_list(gen_file_id_list, ref_data_dir, cfg.bap_ext)
ref_lf0_list = prepare_file_path_list(gen_file_id_list, ref_data_dir, cfg.lf0_ext)
in_gen_label_align_file_list = in_label_align_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
calculator = IndividualDistortionComp()
spectral_distortion = 0.0
bap_mse = 0.0
f0_mse = 0.0
vuv_error = 0.0
valid_file_id_list = file_id_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number]
test_file_id_list = file_id_list[cfg.train_file_number+cfg.valid_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
if cfg.remove_silence_using_binary_labels:
## get lab_dim:
label_composer = LabelComposer()
label_composer.load_label_configuration(cfg.label_config_file)
lab_dim=label_composer.compute_label_dimension()
## use first feature in label -- hardcoded for now
silence_feature = 0
## Use these to trim silence:
untrimmed_test_labels = binary_label_file_list[cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
if cfg.in_dimension_dict.has_key('mgc'):
if cfg.remove_silence_using_binary_labels:
untrimmed_reference_data = in_file_list_dict['mgc'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
trim_silence(untrimmed_reference_data, ref_mgc_list, cfg.mgc_dim, \
untrimmed_test_labels, lab_dim, silence_feature)
else:
remover = SilenceRemover(n_cmp = cfg.mgc_dim, silence_pattern = cfg.silence_pattern)
remover.remove_silence(in_file_list_dict['mgc'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number], in_gen_label_align_file_list, ref_mgc_list)
valid_spectral_distortion = calculator.compute_distortion(valid_file_id_list, ref_data_dir, gen_dir, cfg.mgc_ext, cfg.mgc_dim)
test_spectral_distortion = calculator.compute_distortion(test_file_id_list , ref_data_dir, gen_dir, cfg.mgc_ext, cfg.mgc_dim)
valid_spectral_distortion *= (10 /numpy.log(10)) * numpy.sqrt(2.0) ##MCD
test_spectral_distortion *= (10 /numpy.log(10)) * numpy.sqrt(2.0) ##MCD
if cfg.in_dimension_dict.has_key('bap'):
if cfg.remove_silence_using_binary_labels:
untrimmed_reference_data = in_file_list_dict['bap'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
trim_silence(untrimmed_reference_data, ref_bap_list, cfg.bap_dim, \
untrimmed_test_labels, lab_dim, silence_feature)
else:
remover = SilenceRemover(n_cmp = cfg.bap_dim, silence_pattern = cfg.silence_pattern)
remover.remove_silence(in_file_list_dict['bap'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number], in_gen_label_align_file_list, ref_bap_list)
valid_bap_mse = calculator.compute_distortion(valid_file_id_list, ref_data_dir, gen_dir, cfg.bap_ext, cfg.bap_dim)
test_bap_mse = calculator.compute_distortion(test_file_id_list , ref_data_dir, gen_dir, cfg.bap_ext, cfg.bap_dim)
valid_bap_mse = valid_bap_mse / 10.0 ##Cassia's bap is computed from 10*log|S(w)|. if use HTS/SPTK style, do the same as MGC
test_bap_mse = test_bap_mse / 10.0 ##Cassia's bap is computed from 10*log|S(w)|. if use HTS/SPTK style, do the same as MGC
if cfg.in_dimension_dict.has_key('lf0'):
if cfg.remove_silence_using_binary_labels:
untrimmed_reference_data = in_file_list_dict['lf0'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number]
trim_silence(untrimmed_reference_data, ref_lf0_list, cfg.lf0_dim, \
untrimmed_test_labels, lab_dim, silence_feature)
else:
remover = SilenceRemover(n_cmp = cfg.lf0_dim, silence_pattern = cfg.silence_pattern)
remover.remove_silence(in_file_list_dict['lf0'][cfg.train_file_number:cfg.train_file_number+cfg.valid_file_number+cfg.test_file_number], in_gen_label_align_file_list, ref_lf0_list)
valid_f0_mse, valid_f0_corr, valid_vuv_error = calculator.compute_distortion(valid_file_id_list, ref_data_dir, gen_dir, cfg.lf0_ext, cfg.lf0_dim)
test_f0_mse , test_f0_corr, test_vuv_error = calculator.compute_distortion(test_file_id_list , ref_data_dir, gen_dir, cfg.lf0_ext, cfg.lf0_dim)
logger.info('Develop: DNN -- MCD: %.3f dB; BAP: %.3f dB; F0:- RMSE: %.3f Hz; CORR: %.3f; VUV: %.3f%%' \
%(valid_spectral_distortion, valid_bap_mse, valid_f0_mse, valid_f0_corr, valid_vuv_error*100.))
logger.info('Test : DNN -- MCD: %.3f dB; BAP: %.3f dB; F0:- RMSE: %.3f Hz; CORR: %.3f; VUV: %.3f%%' \
%(test_spectral_distortion , test_bap_mse , test_f0_mse , test_f0_corr, test_vuv_error*100.))
if __name__ == '__main__':
# these things should be done even before trying to parse the command line
# create a configuration instance
# and get a short name for this instance
cfg=configuration.cfg
# set up logging to use our custom class
logging.setLoggerClass(LoggerPlotter)
# get a logger for this main function
logger = logging.getLogger("main")
if len(sys.argv) != 2:
logger.critical('usage: run_dnn.sh [config file name]')
sys.exit(1)
config_file = sys.argv[1]
config_file = os.path.abspath(config_file)
cfg.configure(config_file)
if cfg.profile:
logger.info('profiling is activated')
import cProfile, pstats
cProfile.run('main_function(cfg)', 'mainstats')
# create a stream for the profiler to write to
profiling_output = StringIO.StringIO()
p = pstats.Stats('mainstats', stream=profiling_output)
# print stats to that stream
# here we just report the top 10 functions, sorted by total amount of time spent in each
p.strip_dirs().sort_stats('tottime').print_stats(10)
# print the result to the log
logger.info('---Profiling result follows---\n%s' % profiling_output.getvalue() )
profiling_output.close()
logger.info('---End of profiling result---')
else:
main_function(cfg)
# if gnp._boardId is not None:
# import gpu_lock
# gpu_lock.free_lock(gnp._boardId)
sys.exit(0)<|fim▁end|> | # a dictionary of file descriptors, pointing at the required files
required_labels={} |
<|file_name|>factory_generator.hpp<|end_file_name|><|fim▁begin|>/**
* file mirror/doc/factory_generator.hpp
* Documentation only header
*
* @author Matus Chochlik
*
* Copyright 2008-2010 Matus Chochlik. Distributed under the Boost
* Software License, Version 1.0. (See accompanying file
* LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
*/
#ifndef MIRROR_DOC_FACTORY_GENERATOR_1011291729_HPP
#define MIRROR_DOC_FACTORY_GENERATOR_1011291729_HPP
#ifdef MIRROR_DOCUMENTATION_ONLY
#include <mirror/config.hpp>
MIRROR_NAMESPACE_BEGIN
/** @page mirror_factory_generator_utility Factory generator utility
*
* @section mirror_factory_generator_intro Introduction
*
* The factory generator utility allows to easily create implementations
* of object factories at compile-time by using the meta-data provided
* by Mirror.
* By object factories we mean here classes, which can create
* instances of various types (@c Products), but do not require that the caller
* supplies the parameters for the construction directly.
* Factories pick or let the application user pick the @c Product 's most
* appropriate constructor, they gather the necessary parameters in
* a generic, application-specific way and use the selected constructor
* to create an instance of the @c Product.
*
* Obviously the most interesting feature of these factories is, that
* they separate the caller (who just needs to get an instance of the
* specified type) from the actual method of creation which involves
* choosing of right constructor and supplying the
* required parameters, which in turn can also be constructed or supplied
* in some other way (for example from a pool of existing objects).
*
* This is useful when we need to create instances of (possibly different)
* types having multiple constructors from a single point in code and we want
* the method of construction to be determined by parameters available only
* at run-time.
*
* Here follows a brief list of examples:<|fim▁hole|> * - Creation of objects based on user input from a GUI: The factory object
* creates a GUI component (a dialog for example) having the necessary
* controls for selecting one of the constructors of the constructed type
* and for the input of the values of all parameters for the selected
* constructor.
* - Creation of objects from a database dataset: The factory can go through
* the list of constructors and find the one whose parameters are matching
* to the columns in a database dataset. Then it calls this constructor
* and passes the values of the matching columns as the parameters, doing
* the appropriate type conversions where necessary. By doing this repeatedly
* we can create multiple objects, each representing a different row in
* the dataset.
* - Creation of objects from other external representations: Similar to
* the option above one can create instances from other file or stream-based
* data representations like (XML, JSON, XDR, etc.)
*
* The factories generated by this utility can then be used for example
* for the implementation of the Abstract factory design pattern, where
* even the exact type of the created object is not known.
*
* Because the factory classes are created by a generic meta-program
* at compile-time a good optimizing compiler can generate source
* code, which is as efficient as if the factories were hand-coded.
* This however depends on the implementation of the application-specific
* parts that are supplied to the factory generator.
*
* @section mirror_factory_generator_principles Principles
*
* As we mentioned in the introduction, the factory must handle two important
* tasks during the construction of an instance:
* - Choose the constructor (default, copy, custom).
* - Supply the parameters to the constructor if necessary.
*
* The implementation of these tasks is also the most distinctive thing
* about a particular factory. The rest of the process is the same regardless
* of the @c Product type. This is why the factory generator utility provides
* all the common boilerplate code and the application only specifies how
* a constuctor is selected and how the arguments are supplied to it.
*
* Furthermore there are two basic ways how to supply a parameter value:
* - Create one from scratch by using the same factory with a different
* @c Product type recursivelly.
* - Use an existing instance which can be acquired from a pool of instances,
* or be a result of a functor call.
*
* In order to create a factory, the application needs to supply the factory
* generator with two template classes with the following signature:
*
* @code
* template <class Product, class SourceTraits>
* class constructor_parameter_source;
* @endcode
*
* The first one is referred to as a @c Manufacturer and is responsible
* for the creation of new parameter values. The second template is
* referred to as @c Suppliers and is responsible for returning of an existing
* value.
*
* One of the specializations of the @c Manufacturer template, namely the one
* having the @c void type as @c Product is referred to as @c Manager.
* This @c Manager is responsible for the selecting of the constructor that
* to be used.
*
* Both of these templates have the following parameters:
* - @c Product is the type produced by the source. A @c Manufacturer creates
* a new instance of @c Product and @c Suppliers return an
* existing instance of @c Product (one of possibly multiple candidates)
* - @c SourceTraits is an optional type parameter used by some factory
* implementations for the configuration and fine tuning of the factory's
* behavior and appearance.
*
* Whether these sources (@c Manufacturer or @c Suppliers) are going to be used
* for the supplying of a constructor parameter and if so, which of them,
* depends on the selected constructor:
* - If the @c Product 's default constructor is selected, then no parameters
* are required and neither the @c Manufacturer nor the @c Suppliers are used.
* - If the copy constructor is selected then the @c Suppliers template
* (which returns existing instances of the same type) is used.
* - If another constructor was picked, then the @c Manufacturer template is used
* to create the individual parameters.
*
* @subsection mirror_factory_generator_manufacturer The Manufacturer
*
* The application-defined @c Manufacturer template should have several
* distinct specializations, which serve for different purposes.
*
* As mentioned before, a @c Manufacturer with the @c void type as @c Product
* serves as a manager which is responsible for choosing the constructor
* that is ultimately to be used for the construction of an instance
* of the @c Product. This means that besides the regular @c Manufacturer
* which in fact creates the instances, there is one instance of the @c Manager
* for every @c Product in the generated factory. The @c Manager has also
* a different interface then the other @c Manufacturers.
*
* Other specializations of the @c Manufacturer should handle the creation of
* values of some special types (like the native C++ types; boolean, integers,
* floating points, strings, etc), considered atomic (i.e. not eleborated)
* by the factory. Values of such types can be input directly by the user
* into some kind of UI, they can be converted from some external representation
* like the value of row/column in a database dataset, or a value of an XML
* attribute, etc.
*
* The default implementation of the @c Manufacturer is for elaborated types and it
* uses a generated factory to recursively create instances of the constructor
* parameters.
*
* @subsection mirror_factory_generator_suppliers The Suppliers
*
* The @c Suppliers template is responsible for returning existing instances
* of the type passed as the @c Product parameter. Depending on the specific
* factory and the @c Product, the suppliers may or may not have means
* to get instances of that particular @c Product type and should be implemented
* accordingly. If there are multiple possible sources of values of a type
* then the specialization of @c Suppliers must provide some means how to
* select which of the external sources is to be used.
*
* @subsection mirror_factory_generator_source_traits The parameter source traits
*
* For additional flexibility both the @c Manufacturer and the @c Suppliers
* template have (besides the @c Product type they create) an aditional template
* type parameter called @c SourceTraits. This type is usually defined together
* with the @c Manufacturer and @c Suppliers and is passed to the instantiations
* of these templates by the factory generator when a new factory is created.
* The factory generator treats this type as opaque.
* If a concrete implementation of the parameter sources (@c Manufacturer and
* @c Suppliers) has no use for this additional parameter, the void type
* can be passed to the factory generator.
*
* @subsection mirror_factory_generator_factory_maker Factory and Factory Maker
*
* The @c Manufacturers, the @c Suppliers, the @c SourceTraits and
* the boilerplate code common to all factories is tied together by
* the @c mirror::factory template, with the following definition:
*
* @code
* template <
* template <class, class> class Manufacturer,
* template <class, class> class Suppliers,
* class SourceTraits,
* typename Product
* > class factory
* {
* public:
* Product operator()(void);
* Product* new_(void);
* };
* @endcode
*
* Perhaps a more convenient way how to create factories, especially when
* one wants to create multiple factories of the same kind (with the same
* @c Manufacturers, @c Suppliers and @c SourceTraits) for constructing
* different @c Product types is to use the @c mirror::factory_maker template
* class defined as follows:
*
* @code
* template <
* template <class, class> class Manufacturer,
* template <class, class> class Suppliers,
* class SourceTraits
* > struct factory_maker
* {
* template <typename Product>
* struct factory
* {
* typedef factory<Manufacturer, Suppliers, SourceTraits, Product> type;
* };
* };
* @endcode
*
* @section mirror_factory_generator_usage Usage
*
* The basic usage of the factory generator utility should be obvious from the above;
* It is necessary to implement the @c Manufacturer and the @c Suppliers
* templates (and possibly also the @c SourceTraits) and then use either the
* @c mirror::factory template directly or the @c mirror::factory_maker 's factory
* nested template to create an instantiation of a factory constructing instances
* of a @c Product type:
*
* @code
* // use the factory directly ...
* mirror::factory<my_manuf, my_suppl, my_traits, my_product_1> f1;
* my_product_1 x1(f1());
* my_product_1* px1(f1.new_());
*
* // ...
* // or use the factory_maker
* typedef mirror::factory_maker<my_manuf, my_suppl, my_traits> my_fact_maker;
* my_fact_maker::factory<my_product_1>::type f1;
* my_fact_maker::factory<my_product_2>::type f2;
* my_fact_maker::factory<my_product_3>::type f2;
* //
* my_product_1 x1(f1());
* my_product_1* px1(f1.new_());
* my_product_2 x2(f2());
* my_product_3* px3(f3.new_());
* @endcode
*
* @subsection mirror_factory_generator_tutorials Tutorials and other resources
*
* Here follows a list of references to other pages dealing with the factory
* generator utility in depth and also tutorials showing how to write
* the plugins (the Managers, Manufacturers and Suppliers) for the factory
* generator:
*
* - @subpage mirror_fact_gen_in_depth
* - @subpage mirror_sql_fact_gen_tutorial
*
* @section mirror_factory_generator_existing_templates Existing Manufacturers and Suppliers
*
* The Mirror library provides several working sample implementations
* of the @c Manufacturer and @c Suppliers templates.
* The following example shows the usage of the @c wx_gui_factory template
* with the factory generator utility in a simple wxWidgets-based application.
* The generated factory creates a wxDialog containing all necessary widgets
* for the selection of the constructor to be used and for the input of all
* parameters required by the selected constructor. Screenshots of dialogs
* generated by this implementation can be found
* @link mirror_wx_gui_fact_examples here@endlink.
*/
MIRROR_NAMESPACE_END
#endif // DOCUMENTATION_ONLY
#endif //include guard<|fim▁end|> | |
<|file_name|>pluginmanagedlg.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2011 kubtek <[email protected]>
*
* This file is part of StarDict.
*
* StarDict is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* StarDict is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with StarDict. If not, see <http://www.gnu.org/licenses/>.
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include "pluginmanagedlg.h"
#include "desktop.h"
#include <glib/gi18n.h>
#include "stardict.h"
PluginManageDlg::PluginManageDlg()
:
window(NULL),
treeview(NULL),
detail_label(NULL),
pref_button(NULL),
plugin_tree_model(NULL),
dict_changed_(false),
order_changed_(false)
{
}
PluginManageDlg::~PluginManageDlg()
{
g_assert(!window);
g_assert(!treeview);
g_assert(!detail_label);
g_assert(!pref_button);
g_assert(!plugin_tree_model);
}
void PluginManageDlg::response_handler (GtkDialog *dialog, gint res_id, PluginManageDlg *oPluginManageDlg)
{
if (res_id == GTK_RESPONSE_HELP) {
show_help("stardict-plugins");
} else if (res_id == STARDICT_RESPONSE_CONFIGURE) {
GtkTreeSelection *selection = gtk_tree_view_get_selection(GTK_TREE_VIEW(oPluginManageDlg->treeview));
GtkTreeModel *model;
GtkTreeIter iter;
if (! gtk_tree_selection_get_selected (selection, &model, &iter))
return;
if (!gtk_tree_model_iter_has_child(model, &iter)) {
gchar *filename;
StarDictPlugInType plugin_type;
gtk_tree_model_get (model, &iter, 4, &filename, 5, &plugin_type, -1);
gpAppFrame->oStarDictPlugins->configure_plugin(filename, plugin_type);
g_free(filename);
}
}
}
static gboolean get_disable_list(GtkTreeModel *model, GtkTreePath *path, GtkTreeIter *iter, gpointer data)
{
if (!gtk_tree_model_iter_has_child(model, iter)) {
gboolean enable;
gtk_tree_model_get (model, iter, 1, &enable, -1);
if (!enable) {
gchar *filename;
gtk_tree_model_get (model, iter, 4, &filename, -1);
std::list<std::string> *disable_list = (std::list<std::string> *)data;
disable_list->push_back(filename);
g_free(filename);
}
}
return FALSE;
}
void PluginManageDlg::on_plugin_enable_toggled (GtkCellRendererToggle *cell, gchar *path_str, PluginManageDlg *oPluginManageDlg)
{
GtkTreeModel *model = GTK_TREE_MODEL(oPluginManageDlg->plugin_tree_model);
GtkTreePath *path = gtk_tree_path_new_from_string (path_str);
GtkTreeIter iter;
gtk_tree_model_get_iter (model, &iter, path);
gtk_tree_path_free (path);
gboolean enable;
gchar *filename;
StarDictPlugInType plugin_type;
gboolean can_configure;
gtk_tree_model_get (model, &iter, 1, &enable, 4, &filename, 5, &plugin_type, 6, &can_configure, -1);
enable = !enable;
gtk_tree_store_set (GTK_TREE_STORE (model), &iter, 1, enable, -1);
if (enable) {
gpAppFrame->oStarDictPlugins->load_plugin(filename);
} else {
gpAppFrame->oStarDictPlugins->unload_plugin(filename, plugin_type);
}
g_free(filename);
if (enable)
gtk_widget_set_sensitive(oPluginManageDlg->pref_button, can_configure);
else
gtk_widget_set_sensitive(oPluginManageDlg->pref_button, FALSE);
if (plugin_type == StarDictPlugInType_VIRTUALDICT
|| plugin_type == StarDictPlugInType_NETDICT) {
oPluginManageDlg->dict_changed_ = true;
} else if (plugin_type == StarDictPlugInType_TTS) {
gpAppFrame->oMidWin.oToolWin.UpdatePronounceMenu();
}
std::list<std::string> disable_list;
gtk_tree_model_foreach(model, get_disable_list, &disable_list);
#ifdef _WIN32
{
std::list<std::string> disable_list_rel;
rel_path_to_data_dir(disable_list, disable_list_rel);
std::swap(disable_list, disable_list_rel);
}
#endif
conf->set_strlist("/apps/stardict/manage_plugins/plugin_disable_list", disable_list);
}
struct plugininfo_ParseUserData {
gchar *info_str;
gchar *detail_str;
std::string filename;
std::string name;
std::string version;
std::string short_desc;
std::string long_desc;
std::string author;
std::string website;
};
static void plugininfo_parse_start_element(GMarkupParseContext *context, const gchar *element_name, const gchar **attribute_names, const gchar **attribute_values, gpointer user_data, GError **error)
{
if (strcmp(element_name, "plugin_info")==0) {
plugininfo_ParseUserData *Data = (plugininfo_ParseUserData *)user_data;
Data->name.clear();
Data->version.clear();
Data->short_desc.clear();
Data->long_desc.clear();
Data->author.clear();
Data->website.clear();
}
}
static void plugininfo_parse_end_element(GMarkupParseContext *context, const gchar *element_name, gpointer user_data, GError **error)
{
if (strcmp(element_name, "plugin_info")==0) {
plugininfo_ParseUserData *Data = (plugininfo_ParseUserData *)user_data;
Data->info_str = g_markup_printf_escaped("<b>%s</b> %s\n%s", Data->name.c_str(), Data->version.c_str(), Data->short_desc.c_str());
Data->detail_str = g_markup_printf_escaped(_("%s\n\n<b>Author:</b>\t%s\n<b>Website:</b>\t%s\n<b>Filename:</b>\t%s"), Data->long_desc.c_str(), Data->author.c_str(), Data->website.c_str(), Data->filename.c_str());
}
}
static void plugininfo_parse_text(GMarkupParseContext *context, const gchar *text, gsize text_len, gpointer user_data, GError **error)
{
const gchar *element = g_markup_parse_context_get_element(context);
if (!element)
return;
plugininfo_ParseUserData *Data = (plugininfo_ParseUserData *)user_data;
if (strcmp(element, "name")==0) {
Data->name.assign(text, text_len);
} else if (strcmp(element, "version")==0) {
Data->version.assign(text, text_len);
} else if (strcmp(element, "short_desc")==0) {
Data->short_desc.assign(text, text_len);
} else if (strcmp(element, "long_desc")==0) {
Data->long_desc.assign(text, text_len);
} else if (strcmp(element, "author")==0) {
Data->author.assign(text, text_len);
} else if (strcmp(element, "website")==0) {
Data->website.assign(text, text_len);
}
}
static void add_tree_model(GtkTreeStore *tree_model, GtkTreeIter*parent, const std::list<StarDictPluginInfo> &infolist)
{
plugininfo_ParseUserData Data;
GMarkupParser parser;
parser.start_element = plugininfo_parse_start_element;
parser.end_element = plugininfo_parse_end_element;
parser.text = plugininfo_parse_text;
parser.passthrough = NULL;
parser.error = NULL;
GtkTreeIter iter;
for (std::list<StarDictPluginInfo>::const_iterator i = infolist.begin(); i != infolist.end(); ++i) {
Data.info_str = NULL;
Data.detail_str = NULL;
Data.filename = i->filename;
GMarkupParseContext* context = g_markup_parse_context_new(&parser, (GMarkupParseFlags)0, &Data, NULL);
g_markup_parse_context_parse(context, i->info_xml.c_str(), -1, NULL);
g_markup_parse_context_end_parse(context, NULL);
g_markup_parse_context_free(context);
gtk_tree_store_append(tree_model, &iter, parent);
bool loaded = gpAppFrame->oStarDictPlugins->get_loaded(i->filename.c_str());
gtk_tree_store_set(tree_model, &iter, 0, true, 1, loaded, 2, Data.info_str, 3, Data.detail_str, 4, i->filename.c_str(), 5, i->plugin_type, 6, i->can_configure, -1);
g_free(Data.info_str);
g_free(Data.detail_str);
}
}
static void init_tree_model(GtkTreeStore *tree_model)
{
std::list<std::pair<StarDictPlugInType, std::list<StarDictPluginInfo> > > plugin_list;
{
#ifdef _WIN32
std::list<std::string> plugin_order_list;
const std::list<std::string>& plugin_order_list_rel
= conf->get_strlist("/apps/stardict/manage_plugins/plugin_order_list");
abs_path_to_data_dir(plugin_order_list_rel, plugin_order_list);
#else
const std::list<std::string>& plugin_order_list
= conf->get_strlist("/apps/stardict/manage_plugins/plugin_order_list");
#endif
gpAppFrame->oStarDictPlugins->get_plugin_list(plugin_order_list, plugin_list);
}
GtkTreeIter iter;
for (std::list<std::pair<StarDictPlugInType, std::list<StarDictPluginInfo> > >::iterator i = plugin_list.begin(); i != plugin_list.end(); ++i) {
switch (i->first) {
case StarDictPlugInType_VIRTUALDICT:
gtk_tree_store_append(tree_model, &iter, NULL);
gtk_tree_store_set(tree_model, &iter, 0, false, 2, _("<b>Virtual Dictionary</b>"), -1);
add_tree_model(tree_model, &iter, i->second);
break;
case StarDictPlugInType_NETDICT:
gtk_tree_store_append(tree_model, &iter, NULL);
gtk_tree_store_set(tree_model, &iter, 0, false, 2, _("<b>Network Dictionary</b>"), -1);
add_tree_model(tree_model, &iter, i->second);
break;
case StarDictPlugInType_SPECIALDICT:
gtk_tree_store_append(tree_model, &iter, NULL);
gtk_tree_store_set(tree_model, &iter, 0, false, 2, _("<b>Special Dictionary</b>"), -1);
add_tree_model(tree_model, &iter, i->second);
break;
case StarDictPlugInType_TTS:
gtk_tree_store_append(tree_model, &iter, NULL);
gtk_tree_store_set(tree_model, &iter, 0, false, 2, _("<b>TTS Engine</b>"), -1);
add_tree_model(tree_model, &iter, i->second);
break;
case StarDictPlugInType_PARSEDATA:
gtk_tree_store_append(tree_model, &iter, NULL);
gtk_tree_store_set(tree_model, &iter, 0, false, 2, _("<b>Data Parsing Engine</b>"), -1);
add_tree_model(tree_model, &iter, i->second);
break;
case StarDictPlugInType_MISC:
gtk_tree_store_append(tree_model, &iter, NULL);
gtk_tree_store_set(tree_model, &iter, 0, false, 2, _("<b>Misc</b>"), -1);
add_tree_model(tree_model, &iter, i->second);
break;
default:
break;
}
}
}
void PluginManageDlg::on_plugin_treeview_selection_changed(GtkTreeSelection *selection, PluginManageDlg *oPluginManageDlg)
{
GtkTreeModel *model;
GtkTreeIter iter;
if (! gtk_tree_selection_get_selected (selection, &model, &iter))
return;
if (gtk_tree_model_iter_has_child(model, &iter)) {
gtk_widget_set_sensitive(oPluginManageDlg->pref_button, FALSE);
} else {
gboolean loaded;
gchar *detail;
gboolean can_configure;
gtk_tree_model_get (model, &iter, 1, &loaded, 3, &detail, 6, &can_configure, -1);
gtk_label_set_markup(GTK_LABEL(oPluginManageDlg->detail_label), detail);
g_free(detail);
if (loaded)
gtk_widget_set_sensitive(oPluginManageDlg->pref_button, can_configure);
else
gtk_widget_set_sensitive(oPluginManageDlg->pref_button, FALSE);
}
}
gboolean PluginManageDlg::on_treeview_button_press(GtkWidget * widget, GdkEventButton * event, PluginManageDlg *oPluginManageDlg)
{
if (event->type==GDK_2BUTTON_PRESS) {
if (gtk_widget_get_sensitive(GTK_WIDGET(oPluginManageDlg->pref_button)))
gtk_dialog_response(GTK_DIALOG(oPluginManageDlg->window), STARDICT_RESPONSE_CONFIGURE);
return true;
} else {<|fim▁hole|> return false;
}
}
static void add_order_list(std::list<std::string> &order_list, GtkTreeModel *now_tree_model, GtkTreeIter *parent)
{
gboolean have_iter;
GtkTreeIter iter;
have_iter = gtk_tree_model_iter_children(now_tree_model, &iter, parent);
gchar *filename;
while (have_iter) {
gtk_tree_model_get (now_tree_model, &iter, 4, &filename, -1);
order_list.push_back(filename);
g_free(filename);
have_iter = gtk_tree_model_iter_next(now_tree_model, &iter);
}
}
void PluginManageDlg::write_order_list()
{
std::list<std::string> order_list;
GtkTreeModel *now_tree_model = GTK_TREE_MODEL(plugin_tree_model);
gboolean have_iter;
GtkTreeIter iter;
have_iter = gtk_tree_model_get_iter_first(now_tree_model, &iter);
while (have_iter) {
if (gtk_tree_model_iter_has_child(now_tree_model, &iter)) {
add_order_list(order_list, now_tree_model, &iter);
}
have_iter = gtk_tree_model_iter_next(now_tree_model, &iter);
}
#ifdef _WIN32
{
std::list<std::string> order_list_rel;
rel_path_to_data_dir(order_list, order_list_rel);
std::swap(order_list, order_list_rel);
}
#endif
conf->set_strlist("/apps/stardict/manage_plugins/plugin_order_list", order_list);
}
void PluginManageDlg::drag_data_get_cb(GtkWidget *widget, GdkDragContext *ctx, GtkSelectionData *data, guint info, guint time, PluginManageDlg *oPluginManageDlg)
{
if (gtk_selection_data_get_target(data) == gdk_atom_intern("STARDICT_PLUGINMANAGE", FALSE)) {
GtkTreeRowReference *ref;
GtkTreePath *source_row;
ref = (GtkTreeRowReference *)g_object_get_data(G_OBJECT(ctx), "gtk-tree-view-source-row");
source_row = gtk_tree_row_reference_get_path(ref);
if (source_row == NULL)
return;
GtkTreeIter iter;
gtk_tree_model_get_iter(GTK_TREE_MODEL(oPluginManageDlg->plugin_tree_model), &iter, source_row);
gtk_selection_data_set(data, gdk_atom_intern("STARDICT_PLUGINMANAGE", FALSE), 8, (const guchar *)&iter, sizeof(iter));
gtk_tree_path_free(source_row);
}
}
void PluginManageDlg::drag_data_received_cb(GtkWidget *widget, GdkDragContext *ctx, guint x, guint y, GtkSelectionData *sd, guint info, guint t, PluginManageDlg *oPluginManageDlg)
{
if (gtk_selection_data_get_target(sd) == gdk_atom_intern("STARDICT_PLUGINMANAGE", FALSE) && gtk_selection_data_get_data(sd)) {
GtkTreePath *path = NULL;
GtkTreeViewDropPosition position;
GtkTreeIter drag_iter;
memcpy(&drag_iter, gtk_selection_data_get_data(sd), sizeof(drag_iter));
if (gtk_tree_view_get_dest_row_at_pos(GTK_TREE_VIEW(widget), x, y, &path, &position)) {
GtkTreeIter iter;
GtkTreeModel *model = GTK_TREE_MODEL(oPluginManageDlg->plugin_tree_model);
gtk_tree_model_get_iter(model, &iter, path);
if (gtk_tree_model_iter_has_child(model, &iter)) {
gtk_drag_finish (ctx, FALSE, FALSE, t);
return;
}
if (gtk_tree_model_iter_has_child(model, &drag_iter)) {
gtk_drag_finish (ctx, FALSE, FALSE, t);
return;
}
GtkTreeIter parent_iter;
if (!gtk_tree_model_iter_parent(model, &parent_iter, &iter)) {
gtk_drag_finish (ctx, FALSE, FALSE, t);
return;
}
GtkTreeIter drag_parent_iter;
if (!gtk_tree_model_iter_parent(model, &drag_parent_iter, &drag_iter)) {
gtk_drag_finish (ctx, FALSE, FALSE, t);
return;
}
char *iter_str, *drag_iter_str;
iter_str = gtk_tree_model_get_string_from_iter(model, &parent_iter);
drag_iter_str = gtk_tree_model_get_string_from_iter(model, &drag_parent_iter);
if (strcmp(iter_str, drag_iter_str) != 0) {
g_free(iter_str);
g_free(drag_iter_str);
gtk_drag_finish (ctx, FALSE, FALSE, t);
return;
}
g_free(iter_str);
g_free(drag_iter_str);
switch (position) {
case GTK_TREE_VIEW_DROP_AFTER:
case GTK_TREE_VIEW_DROP_INTO_OR_AFTER:
gtk_tree_store_move_after(GTK_TREE_STORE(model), &drag_iter, &iter);
break;
case GTK_TREE_VIEW_DROP_BEFORE:
case GTK_TREE_VIEW_DROP_INTO_OR_BEFORE:
gtk_tree_store_move_before(GTK_TREE_STORE(model), &drag_iter, &iter);
break;
default: {
gtk_drag_finish (ctx, FALSE, FALSE, t);
return;
}
}
oPluginManageDlg->write_order_list();
oPluginManageDlg->order_changed_ = true;
gtk_drag_finish (ctx, TRUE, FALSE, t);
}
}
}
GtkWidget *PluginManageDlg::create_plugin_list()
{
GtkWidget *sw;
sw = gtk_scrolled_window_new (NULL, NULL);
gtk_scrolled_window_set_shadow_type (GTK_SCROLLED_WINDOW (sw), GTK_SHADOW_IN);
gtk_scrolled_window_set_policy (GTK_SCROLLED_WINDOW (sw), GTK_POLICY_AUTOMATIC, GTK_POLICY_AUTOMATIC);
plugin_tree_model = gtk_tree_store_new(7, G_TYPE_BOOLEAN, G_TYPE_BOOLEAN, G_TYPE_STRING, G_TYPE_STRING, G_TYPE_STRING, G_TYPE_INT, G_TYPE_BOOLEAN);
init_tree_model(plugin_tree_model);
treeview = gtk_tree_view_new_with_model (GTK_TREE_MODEL(plugin_tree_model));
g_object_unref (G_OBJECT (plugin_tree_model));
#if GTK_MAJOR_VERSION >= 3
#else
gtk_tree_view_set_rules_hint (GTK_TREE_VIEW (treeview), TRUE);
#endif
g_signal_connect (G_OBJECT (treeview), "button_press_event", G_CALLBACK (on_treeview_button_press), this);
GtkTreeSelection *selection;
selection = gtk_tree_view_get_selection (GTK_TREE_VIEW (treeview));
gtk_tree_selection_set_mode (selection, GTK_SELECTION_SINGLE);
g_signal_connect (selection, "changed", G_CALLBACK (on_plugin_treeview_selection_changed), this);
GtkCellRenderer *renderer;
GtkTreeViewColumn *column;
renderer = gtk_cell_renderer_toggle_new ();
g_signal_connect (renderer, "toggled", G_CALLBACK (on_plugin_enable_toggled), this);
column = gtk_tree_view_column_new_with_attributes (_("Enable"), renderer, "visible", 0, "active", 1, NULL);
gtk_tree_view_append_column (GTK_TREE_VIEW(treeview), column);
gtk_tree_view_column_set_expand(GTK_TREE_VIEW_COLUMN (column), FALSE);
gtk_tree_view_column_set_clickable (GTK_TREE_VIEW_COLUMN (column), FALSE);
renderer = gtk_cell_renderer_text_new ();
g_object_set (G_OBJECT (renderer), "xalign", 0.0, NULL);
column = gtk_tree_view_column_new_with_attributes (_("Plug-in Name"), renderer, "markup", 2, NULL);
gtk_tree_view_append_column (GTK_TREE_VIEW(treeview), column);
gtk_tree_view_column_set_expand(GTK_TREE_VIEW_COLUMN (column), TRUE);
gtk_tree_view_column_set_clickable (GTK_TREE_VIEW_COLUMN (column), FALSE);
GtkTargetEntry gte[] = {{(gchar *)"STARDICT_PLUGINMANAGE", GTK_TARGET_SAME_APP, 0}};
gtk_tree_view_enable_model_drag_source(GTK_TREE_VIEW(treeview), GDK_BUTTON1_MASK, gte, 1, GDK_ACTION_COPY);
gtk_tree_view_enable_model_drag_dest(GTK_TREE_VIEW(treeview), gte, 1, (GdkDragAction)(GDK_ACTION_COPY | GDK_ACTION_MOVE));
g_signal_connect(G_OBJECT(treeview), "drag-data-received", G_CALLBACK(drag_data_received_cb), this);
g_signal_connect(G_OBJECT(treeview), "drag-data-get", G_CALLBACK(drag_data_get_cb), this);
gtk_tree_view_expand_all(GTK_TREE_VIEW (treeview));
gtk_container_add (GTK_CONTAINER (sw), treeview);
return sw;
}
bool PluginManageDlg::ShowModal(GtkWindow *parent_win, bool &dict_changed, bool &order_changed)
{
window = gtk_dialog_new();
oStarDictPluginSystemInfo.pluginwin = window;
gtk_window_set_transient_for(GTK_WINDOW(window), parent_win);
//gtk_dialog_set_has_separator(GTK_DIALOG(window), false);
gtk_dialog_add_button(GTK_DIALOG(window), GTK_STOCK_HELP, GTK_RESPONSE_HELP);
pref_button = gtk_dialog_add_button(GTK_DIALOG(window), _("Configure Pl_ug-in"), STARDICT_RESPONSE_CONFIGURE);
gtk_widget_set_sensitive(pref_button, FALSE);
gtk_dialog_add_button(GTK_DIALOG(window), GTK_STOCK_CLOSE, GTK_RESPONSE_CLOSE);
gtk_dialog_set_default_response(GTK_DIALOG(window), GTK_RESPONSE_CLOSE);
g_signal_connect(G_OBJECT(window), "response", G_CALLBACK(response_handler), this);
GtkWidget *vbox;
#if GTK_MAJOR_VERSION >= 3
vbox = gtk_box_new (GTK_ORIENTATION_VERTICAL, 5);
#else
vbox = gtk_vbox_new (FALSE, 5);
#endif
gtk_container_set_border_width (GTK_CONTAINER (vbox), 2);
GtkWidget *pluginlist = create_plugin_list();
gtk_box_pack_start (GTK_BOX (vbox), pluginlist, true, true, 0);
GtkWidget *expander = gtk_expander_new (_("<b>Plug-in Details</b>"));
gtk_expander_set_use_markup(GTK_EXPANDER(expander), TRUE);
gtk_box_pack_start (GTK_BOX (vbox), expander, false, false, 0);
detail_label = gtk_label_new (NULL);
gtk_label_set_line_wrap(GTK_LABEL(detail_label), TRUE);
gtk_label_set_selectable(GTK_LABEL (detail_label), TRUE);
gtk_container_add (GTK_CONTAINER (expander), detail_label);
gtk_box_pack_start (GTK_BOX(gtk_dialog_get_content_area(GTK_DIALOG (window))), vbox, true, true, 0);
gtk_widget_show_all (gtk_dialog_get_content_area(GTK_DIALOG (window)));
gtk_window_set_title (GTK_WINDOW (window), _("Manage Plugins"));
gtk_window_set_default_size(GTK_WINDOW(window), 250, 350);
dict_changed_ = false;
order_changed_ = false;
gint result;
while (true) {
result = gtk_dialog_run(GTK_DIALOG(window));
if (result ==GTK_RESPONSE_HELP || result == STARDICT_RESPONSE_CONFIGURE) {
} else {
break;
}
}
/* When do we get GTK_RESPONSE_NONE response? Unable to reproduce. */
if (result != GTK_RESPONSE_NONE) {
dict_changed = dict_changed_;
order_changed = order_changed_;
gtk_widget_destroy(GTK_WIDGET(window));
}
window = NULL;
treeview = NULL;
detail_label = NULL;
pref_button = NULL;
plugin_tree_model = NULL;
oStarDictPluginSystemInfo.pluginwin = NULL;
return result == GTK_RESPONSE_NONE;
}<|fim▁end|> | |
<|file_name|>TransitionIntoState0.py<|end_file_name|><|fim▁begin|>"""
Wind Turbine Company - 2013
Author: Stephan Rayner
Email: [email protected]
"""
import time
from test.Base_Test import Base_Test
class Maintenance_153Validation(Base_Test):
def setUp(self):
self.WindSpeedValue = "4.5"
self.interface.reset()
self.interface.write("Yaw_Generation", "2")
self.interface.expect("Emergency_Stop", "off")
self.interface.expect("Maintenance_Mode", "off")
def test_MaintenanceSD46(self):
'''
Moving into Maintenance Mode while the turbine is running (State 2 or
Higher) Causes SD_46 to before any other shutdowns. In other words
SD_46 Should fire and only SD_46
'''
self._State2Setup()
self.interface.expect("Maintenance_Mode", "on")
self.TEST_CONDITION = self.interface.Shutdown.read(self.interface.Shutdown_List, return_onlyHigh = True)
print self.TEST_CONDITION
self.assertTrue("@GV.SD_46" in self.TEST_CONDITION,"Shutdown 46 did not fire")
self.assertEqual(self.TEST_CONDITION.keys()[0], "@GV.SD_46","Shutdown did not fire first")
self.assertEqual(len(self.TEST_CONDITION), 1,"More that one shutdown is pressent.")
self.TEST_CONDITION = self.interface.read("Turbine_State")
self.assertEqual(self.TEST_CONDITION,"0")
def test_MaintenanceHardwareControl(self):
'''
DO_BypLineProtRelMaintMode and DO_BypassRotorOverSpeed should be 0
then Maintenance Mode is activated SD_46 goes high and 1 minute later
DO_BypLineProtRelMaintMode and DO_BypassRotorOverSpeed should be 1
//I am using a running counter with a read to check time not a wait and read.
//This maintain that the values don't flip early.
'''
self._State2Setup()
read_Vars = ["@GV.DO_BypLineProtRelMaintMode","@GV.DO_BypassRotorOverSpeed"]
#
self.assertEqual(self._readpxUtils(read_Vars),["0","0"])
self.interface.expect("Maintenance_Mode","on")
elapseTime = 0.0
initialTime = time.time()
self.TEST_CONDITION = self.interface.Shutdown.read(self.interface.Shutdown_List, return_onlyHigh = True)
#
self.assertTrue("@GV.SD_46" in self.TEST_CONDITION,"Shutdown 46 did not fire")
print "\nPlease Wait One Minute\n"
while((self._readpxUtils(read_Vars) == ["0","0"]) and (elapseTime < 120)):
elapseTime = time.time() - initialTime
expectedRunningTime = 60
tollerance = 10
self.TEST_CONDITION = self._readpxUtils(read_Vars)<|fim▁hole|> #
self.assertEqual(self.TEST_CONDITION,["1","1"])
#
self.assertLessEqual(abs(expectedRunningTime-elapseTime),tollerance,"The hardware does not retain control over the UPR and the Smartplug unitil the breaks apply as expected:\nElapse Time: %s\n%s : %s\n%s : %s\n" % (str(elapseTime), read_Vars[0], self.TEST_CONDITION[0], read_Vars[1], self.TEST_CONDITION[1]))
#Helper Functions
def _State2Setup(self):
self.interface.write("Wind_Speed",self.WindSpeedValue)
self.interface.write("Yaw_Generation", "2")
print ("Waiting for 2 minutes")
time.sleep(70)# must hold this here for the Minute averages to hold
self.interface.Shutdown.bypass([24, 31])
self.interface.Shutdown.reset()
self.interface.start()
def _readpxUtils(self,List):
a = self.interface.mcc.read(List)
tmp=[]
for x in List:
tmp.append(a[x])
return tmp<|fim▁end|> | |
<|file_name|>youku.js<|end_file_name|><|fim▁begin|>/**
* Created by jiangli on 15/1/6.
*/
"use strict";
var request = require('request');
var iconv = require('iconv-lite');
var crypto = require('crypto');
var Buffer = require('buffer').Buffer;
/**
* [_parseYouku 解析优酷网]
* @param [type] $url [description]
* @return [type] [description]
*/
module.exports = function($url,callback){
var $matches = $url.match(/id\_([\w=]+)/);
if ($matches&&$matches.length>1){
return _getYouku($matches[1].trim(),callback);
}else{
return null;
}
}
function _getYouku($vid,callback){
var $base = "http://v.youku.com/player/getPlaylist/VideoIDS/";
var $blink = $base+$vid;
var $link = $blink+"/Pf/4/ctype/12/ev/1";
request($link, function(er, response,body) {
if (er)
return callback(er);
var $retval = body;
if($retval){
var $rs = JSON.parse($retval);
request($blink, function(er, response,body) {
if (er)
return callback(er);
var $data = {
'1080Phd3':[],
'超清hd2':[],
'高清mp4':[],
'高清flvhd':[],
'标清flv':[],
'高清3gphd':[],
'3gp':[]
};
var $bretval = body;
var $brs = JSON.parse($bretval);
var $rs_data = $rs.data[0];
var $brs_data = $brs.data[0];
if($rs_data.error){
return callback(null, $data['error'] = $rs_data.error);
}
var $streamtypes = $rs_data.streamtypes; //可以输出的视频清晰度
var $streamfileids = $rs_data.streamfileids;
var $seed = $rs_data.seed;
var $segs = $rs_data.segs;
var $ip = $rs_data.ip;
var $bsegs = $brs_data.segs;
var yk_e_result = yk_e('becaf9be', yk_na($rs_data.ep)).split('_');
var $sid = yk_e_result[0], $token = yk_e_result[1];
for(var $key in $segs){
if(in_array($key,$streamtypes)){
var $segs_key_val = $segs[$key];
for(var kk=0;kk<$segs_key_val.length;kk++){
var $v = $segs_key_val[kk];
var $no = $v.no.toString(16).toUpperCase(); //转换为16进制 大写
if($no.length == 1){
$no ="0"+$no; //no 为每段视频序号
}
//构建视频地址K值
var $_k = $v.k;
if ((!$_k || $_k == '') || $_k == '-1') {
$_k = $bsegs[$key][kk].k;
}
var $fileId = getFileid($streamfileids[$key],$seed);
$fileId = $fileId.substr(0,8)+$no+$fileId.substr(10);
var m0 = yk_e('bf7e5f01', $sid + '_' + $fileId + '_' + $token);
var m1 = yk_d(m0);
var iconv_result = iconv.decode(new Buffer(m1), 'UTF-8');
if(iconv_result!=""){
var $ep = urlencode(iconv_result);
var $typeArray = [];
$typeArray['flv']= 'flv';
$typeArray['mp4']= 'mp4';
$typeArray['hd2']= 'flv';
$typeArray['3gphd']= 'mp4';
$typeArray['3gp']= 'flv';
$typeArray['hd3']= 'flv';
//判断视频清晰度
var $sharpness = []; //清晰度 数组
$sharpness['flv']= '标清flv';
$sharpness['flvhd']= '高清flvhd';
$sharpness['mp4']= '高清mp4';
$sharpness['hd2']= '超清hd2';
$sharpness['3gphd']= '高清3gphd';
$sharpness['3gp']= '3gp';
$sharpness['hd3']= '1080Phd3';
var $fileType = $typeArray[$key];
$data[$sharpness[$key]][kk] = "http://k.youku.com/player/getFlvPath/sid/"+$sid+"_00/st/"+$fileType+"/fileid/"+$fileId+"?K="+$_k+"&hd=1&myp=0&ts="+((((($v['seconds']+'&ypp=0&ctype=12&ev=1&token=')+$token)+'&oip=')+$ip)+'&ep=')+$ep;
}
}
}
}
//返回 图片 标题 链接 时长 视频地址
$data['coverImg'] = $rs['data'][0]['logo'];
$data['title'] = $rs['data'][0]['title'];
$data['seconds'] = $rs['data'][0]['seconds'];
return callback(null,$data);
});
}else{
return callback(null,null);
}
})
}
function urlencode(str) {
str = (str + '').toString();
return encodeURIComponent(str)
.replace(/!/g, '%21')
.replace(/'/g, '%27')
.replace(/\(/g, '%28')
.replace(/\)/g, '%29')
.replace(/\*/g, '%2A')
.replace(/%20/g, '+');
};
function in_array(needle, haystack, argStrict) {
var key = '',
strict = !! argStrict;
if (strict) {
for (key in haystack) {
if (haystack[key] === needle) {
return true;
}
}
} else {
for (key in haystack) {
if (haystack[key] == needle) {
return true;
}
}
}
return false;
};
//start 获得优酷视频需要用到的方法
function getSid(){
var $sid = new Date().getTime()+(Math.random() * 9001+10000);
return $sid;
}
function getKey($key1,$key2){
var $a = parseInt($key1,16);
var $b = $a ^0xA55AA5A5;
var $b = $b.toString(16);
return $key2+$b;
}
function getFileid($fileId,$seed){
var $mixed = getMixString($seed);
var $ids = $fileId.replace(/(\**$)/g, "").split('*'); //去掉末尾的*号分割为数组
var $realId = "";
for (var $i=0;$i<$ids.length;$i++){
var $idx = $ids[$i];
$realId += $mixed.substr($idx,1);
}
return $realId;
}
function getMixString($seed){
var $mixed = "";
var $source = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ/\\:._-1234567890";
var $len = $source.length;
for(var $i=0;$i<$len;$i++){
$seed = ($seed * 211 + 30031)%65536;
var $index = ($seed / 65536 * $source.length);
var $c = $source.substr($index,1);
$mixed += $c;
$source = $source.replace($c,"");
}
return $mixed;
}
function yk_d($a){
if (!$a) {
return '';
}
var $f = $a.length;
var $b = 0;
var $str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
for (var $c = ''; $b < $f;) {
var $e = charCodeAt($a, $b++) & 255;
if ($b == $f) {
$c += charAt($str, $e >> 2);
$c += charAt($str, ($e & 3) << 4);
$c += '==';
break;
}
var $g = charCodeAt($a, $b++);
if ($b == $f) {
$c += charAt($str, $e >> 2);
$c += charAt($str, ($e & 3) << 4 | ($g & 240) >> 4);
$c += charAt($str, ($g & 15) << 2);
$c += '=';
break;
}
var $h = charCodeAt($a, $b++);
$c += charAt($str, $e >> 2);
$c += charAt($str, ($e & 3) << 4 | ($g & 240) >> 4);
$c += charAt($str, ($g & 15) << 2 | ($h & 192) >> 6);
$c += charAt($str, $h & 63);
}
return $c;
}
function yk_na($a){
if (!$a) {
return '';
}
var $sz = '-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,62,-1,-1,-1,63,52,53,54,55,56,57,58,59,60,61,-1,-1,-1,-1,-1,-1,-1,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,-1,-1,-1,-1,-1,-1,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,-1,-1,-1,-1,-1';
var $h = $sz.split(',');
var $i = $a.length;
var $f = 0;
for (var $e = ''; $f < $i;) {
var $c;
do {
$c = $h[charCodeAt($a, $f++) & 255];
} while ($f < $i && -1 == $c);
if (-1 == $c) {
break;
}
var $b;
do {
$b = $h[charCodeAt($a, $f++) & 255];
} while ($f < $i && -1 == $b);
if (-1 == $b) {
break;
}
$e += String.fromCharCode($c << 2 | ($b & 48) >> 4);
do {
$c = charCodeAt($a, $f++) & 255;
if (61 == $c) {
return $e;
}
$c = $h[$c];
} while ($f < $i && -1 == $c);
if (-1 == $c) {
break;
}
$e += String.fromCharCode(($b & 15) << 4 | ($c & 60) >> 2);
do {
$b = charCodeAt($a, $f++) & 255;
if (61 == $b) {
return $e;
}
$b = $h[$b];
} while ($f < $i && -1 == $b);
if (-1 == $b) {
break;
}
$e += String.fromCharCode(($c & 3) << 6 | $b);
}
return $e;
}
function yk_e($a, $c){
var $b = [];
for (var $f = 0, $i, $e = '', $h = 0; 256 > $h; $h++) {
$b[$h] = $h;
}
for ($h = 0; 256 > $h; $h++) {
$f = (($f + $b[$h]) + charCodeAt($a, $h % $a.length)) % 256;
$i = $b[$h];
$b[$h] = $b[$f];
$b[$f] = $i;
}
for (var $q = ($f = ($h = 0)); $q < $c.length; $q++) {
$h = ($h + 1) % 256;
$f = ($f + $b[$h]) % 256;
$i = $b[$h];
$b[$h] = $b[$f];
$b[$f] = $i;
$e += String.fromCharCode(charCodeAt($c, $q) ^ $b[($b[$h] + $b[$f]) % 256]);
}
return $e;
}
function md5(str){
var shasum = crypto.createHash('md5');
shasum.update(str);
return shasum.digest('hex');
}
function charCodeAt($str, $index){
var $charCode = [];
var $key = md5($str);<|fim▁hole|> }
$charCode[$key] = unpack('C*', $str);
return $charCode[$key][$index];
}
function charAt($str, $index){
return $str.substr($index, 1);
}
function unpack(format, data) {
var formatPointer = 0, dataPointer = 0, result = {}, instruction = '',
quantifier = '', label = '', currentData = '', i = 0, j = 0,
word = '', fbits = 0, ebits = 0, dataByteLength = 0;
var fromIEEE754 = function(bytes, ebits, fbits) {
// Bytes to bits
var bits = [];
for (var i = bytes.length; i; i -= 1) {
var m_byte = bytes[i - 1];
for (var j = 8; j; j -= 1) {
bits.push(m_byte % 2 ? 1 : 0); m_byte = m_byte >> 1;
}
}
bits.reverse();
var str = bits.join('');
// Unpack sign, exponent, fraction
var bias = (1 << (ebits - 1)) - 1;
var s = parseInt(str.substring(0, 1), 2) ? -1 : 1;
var e = parseInt(str.substring(1, 1 + ebits), 2);
var f = parseInt(str.substring(1 + ebits), 2);
// Produce number
if (e === (1 << ebits) - 1) {
return f !== 0 ? NaN : s * Infinity;
}
else if (e > 0) {
return s * Math.pow(2, e - bias) * (1 + f / Math.pow(2, fbits));
}
else if (f !== 0) {
return s * Math.pow(2, -(bias-1)) * (f / Math.pow(2, fbits));
}
else {
return s * 0;
}
}
while (formatPointer < format.length) {
instruction = format.charAt(formatPointer);
// Start reading 'quantifier'
quantifier = '';
formatPointer++;
while ((formatPointer < format.length) &&
(format.charAt(formatPointer).match(/[\d\*]/) !== null)) {
quantifier += format.charAt(formatPointer);
formatPointer++;
}
if (quantifier === '') {
quantifier = '1';
}
// Start reading label
label = '';
while ((formatPointer < format.length) &&
(format.charAt(formatPointer) !== '/')) {
label += format.charAt(formatPointer);
formatPointer++;
}
if (format.charAt(formatPointer) === '/') {
formatPointer++;
}
// Process given instruction
switch (instruction) {
case 'a': // NUL-padded string
case 'A': // SPACE-padded string
if (quantifier === '*') {
quantifier = data.length - dataPointer;
} else {
quantifier = parseInt(quantifier, 10);
}
currentData = data.substr(dataPointer, quantifier);
dataPointer += quantifier;
var currentResult;
if (instruction === 'a') {
currentResult = currentData.replace(/\0+$/, '');
} else {
currentResult = currentData.replace(/ +$/, '');
}
result[label] = currentResult;
break;
case 'h': // Hex string, low nibble first
case 'H': // Hex string, high nibble first
if (quantifier === '*') {
quantifier = data.length - dataPointer;
} else {
quantifier = parseInt(quantifier, 10);
}
currentData = data.substr(dataPointer, quantifier);
dataPointer += quantifier;
if (quantifier > currentData.length) {
throw new Error('Warning: unpack(): Type ' + instruction +
': not enough input, need ' + quantifier);
}
currentResult = '';
for (i = 0; i < currentData.length; i++) {
word = currentData.charCodeAt(i).toString(16);
if (instruction === 'h') {
word = word[1] + word[0];
}
currentResult += word;
}
result[label] = currentResult;
break;
case 'c': // signed char
case 'C': // unsigned c
if (quantifier === '*') {
quantifier = data.length - dataPointer;
} else {
quantifier = parseInt(quantifier, 10);
}
currentData = data.substr(dataPointer, quantifier);
dataPointer += quantifier;
for (i = 0; i < currentData.length; i++) {
currentResult = currentData.charCodeAt(i);
if ((instruction === 'c') && (currentResult >= 128)) {
currentResult -= 256;
}
result[label + (quantifier > 1 ?
(i + 1) :
'')] = currentResult;
}
break;
case 'S': // unsigned short (always 16 bit, machine byte order)
case 's': // signed short (always 16 bit, machine byte order)
case 'v': // unsigned short (always 16 bit, little endian byte order)
if (quantifier === '*') {
quantifier = (data.length - dataPointer) / 2;
} else {
quantifier = parseInt(quantifier, 10);
}
currentData = data.substr(dataPointer, quantifier * 2);
dataPointer += quantifier * 2;
for (i = 0; i < currentData.length; i += 2) {
// sum per word;
currentResult = ((currentData.charCodeAt(i + 1) & 0xFF) << 8) +
(currentData.charCodeAt(i) & 0xFF);
if ((instruction === 's') && (currentResult >= 32768)) {
currentResult -= 65536;
}
result[label + (quantifier > 1 ?
((i / 2) + 1) :
'')] = currentResult;
}
break;
case 'n': // unsigned short (always 16 bit, big endian byte order)
if (quantifier === '*') {
quantifier = (data.length - dataPointer) / 2;
} else {
quantifier = parseInt(quantifier, 10);
}
currentData = data.substr(dataPointer, quantifier * 2);
dataPointer += quantifier * 2;
for (i = 0; i < currentData.length; i += 2) {
// sum per word;
currentResult = ((currentData.charCodeAt(i) & 0xFF) << 8) +
(currentData.charCodeAt(i + 1) & 0xFF);
result[label + (quantifier > 1 ?
((i / 2) + 1) :
'')] = currentResult;
}
break;
case 'i': // signed integer (machine dependent size and byte order)
case 'I': // unsigned integer (machine dependent size & byte order)
case 'l': // signed long (always 32 bit, machine byte order)
case 'L': // unsigned long (always 32 bit, machine byte order)
case 'V': // unsigned long (always 32 bit, little endian byte order)
if (quantifier === '*') {
quantifier = (data.length - dataPointer) / 4;
} else {
quantifier = parseInt(quantifier, 10);
}
currentData = data.substr(dataPointer, quantifier * 4);
dataPointer += quantifier * 4;
for (i = 0; i < currentData.length; i += 4) {
currentResult =
((currentData.charCodeAt(i + 3) & 0xFF) << 24) +
((currentData.charCodeAt(i + 2) & 0xFF) << 16) +
((currentData.charCodeAt(i + 1) & 0xFF) << 8) +
((currentData.charCodeAt(i) & 0xFF));
result[label + (quantifier > 1 ?
((i / 4) + 1) :
'')] = currentResult;
}
break;
case 'N': // unsigned long (always 32 bit, little endian byte order)
if (quantifier === '*') {
quantifier = (data.length - dataPointer) / 4;
} else {
quantifier = parseInt(quantifier, 10);
}
currentData = data.substr(dataPointer, quantifier * 4);
dataPointer += quantifier * 4;
for (i = 0; i < currentData.length; i += 4) {
currentResult =
((currentData.charCodeAt(i) & 0xFF) << 24) +
((currentData.charCodeAt(i + 1) & 0xFF) << 16) +
((currentData.charCodeAt(i + 2) & 0xFF) << 8) +
((currentData.charCodeAt(i + 3) & 0xFF));
result[label + (quantifier > 1 ?
((i / 4) + 1) :
'')] = currentResult;
}
break;
case 'f': //float
case 'd': //double
ebits = 8;
fbits = (instruction === 'f') ? 23 : 52;
dataByteLength = 4;
if (instruction === 'd') {
ebits = 11;
dataByteLength = 8;
}
if (quantifier === '*') {
quantifier = (data.length - dataPointer) / dataByteLength;
} else {
quantifier = parseInt(quantifier, 10);
}
currentData = data.substr(dataPointer, quantifier * dataByteLength);
dataPointer += quantifier * dataByteLength;
for (i = 0; i < currentData.length; i += dataByteLength) {
data = currentData.substr(i, dataByteLength);
var bytes = [];
for (j = data.length - 1; j >= 0; --j) {
bytes.push(data.charCodeAt(j));
}
result[label + (quantifier > 1 ?
((i / 4) + 1) :
'')] = fromIEEE754(bytes, ebits, fbits);
}
break;
case 'x': // NUL byte
case 'X': // Back up one byte
case '@': // NUL byte
if (quantifier === '*') {
quantifier = data.length - dataPointer;
} else {
quantifier = parseInt(quantifier, 10);
}
if (quantifier > 0) {
if (instruction === 'X') {
dataPointer -= quantifier;
} else {
if (instruction === 'x') {
dataPointer += quantifier;
} else {
dataPointer = quantifier;
}
}
}
break;
default:
throw new Error('Warning: unpack() Type ' + instruction +
': unknown format code');
}
}
return result;
}<|fim▁end|> | $index = $index + 1;
if ($charCode[$key]) {
return $charCode[$key][$index]; |
<|file_name|>top-level-alternation.rs<|end_file_name|><|fim▁begin|>#![feature(let_else)]
#![deny(unreachable_patterns)]
fn main() {
while let 0..=2 | 1 = 0 {} //~ ERROR unreachable pattern
if let 0..=2 | 1 = 0 {} //~ ERROR unreachable pattern
match 0u8 {
0
| 0 => {} //~ ERROR unreachable pattern
_ => {}
}
match Some(0u8) {
Some(0)
| Some(0) => {} //~ ERROR unreachable pattern
_ => {}
}
match (0u8, 0u8) {
(0, _) | (_, 0) => {}
(0, 0) => {} //~ ERROR unreachable pattern
(1, 1) => {}
_ => {}
}
match (0u8, 0u8) {
(0, 1) | (2, 3) => {}
(0, 3) => {}
(2, 1) => {}
_ => {}
}
match (0u8, 0u8) {
(_, 0) | (_, 1) => {}
_ => {}
}
match (0u8, 0u8) {
(0, _) | (1, _) => {}
_ => {}
}
match Some(0u8) {
None | Some(_) => {}
_ => {} //~ ERROR unreachable pattern
}
match Some(0u8) {
None | Some(_) => {}
Some(_) => {} //~ ERROR unreachable pattern
None => {} //~ ERROR unreachable pattern
}
match Some(0u8) {
Some(_) => {}
None => {}
None | Some(_) => {} //~ ERROR unreachable pattern
}
match 0u8 {
1 | 2 => {},
1..=2 => {}, //~ ERROR unreachable pattern
_ => {},<|fim▁hole|><|fim▁end|> | }
let (0 | 0) = 0 else { return }; //~ ERROR unreachable pattern
} |
<|file_name|>media_player.py<|end_file_name|><|fim▁begin|>"""Vizio SmartCast Device support."""
from datetime import timedelta
import logging
from typing import Any, Callable, Dict, List, Optional, Union
from pyvizio import VizioAsync
from pyvizio.api.apps import find_app_name
from pyvizio.const import APP_HOME, INPUT_APPS, NO_APP_RUNNING, UNKNOWN_APP
from homeassistant.components.media_player import (
DEVICE_CLASS_SPEAKER,
DEVICE_CLASS_TV,
SUPPORT_SELECT_SOUND_MODE,
MediaPlayerEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_ACCESS_TOKEN,
CONF_DEVICE_CLASS,
CONF_EXCLUDE,
CONF_HOST,
CONF_INCLUDE,
CONF_NAME,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import callback
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import entity_platform
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import (
CONF_ADDITIONAL_CONFIGS,
CONF_APPS,
CONF_VOLUME_STEP,
DEFAULT_TIMEOUT,
DEFAULT_VOLUME_STEP,
DEVICE_ID,
DOMAIN,
ICON,
SERVICE_UPDATE_SETTING,
SUPPORTED_COMMANDS,
UPDATE_SETTING_SCHEMA,
VIZIO_AUDIO_SETTINGS,
VIZIO_DEVICE_CLASSES,
VIZIO_MUTE,
VIZIO_MUTE_ON,
VIZIO_SOUND_MODE,
VIZIO_VOLUME,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=30)
PARALLEL_UPDATES = 0
async def async_setup_entry(
hass: HomeAssistantType,
config_entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up a Vizio media player entry."""
host = config_entry.data[CONF_HOST]
token = config_entry.data.get(CONF_ACCESS_TOKEN)
name = config_entry.data[CONF_NAME]
device_class = config_entry.data[CONF_DEVICE_CLASS]
# If config entry options not set up, set them up, otherwise assign values managed in options
volume_step = config_entry.options.get(
CONF_VOLUME_STEP, config_entry.data.get(CONF_VOLUME_STEP, DEFAULT_VOLUME_STEP)
)
params = {}
if not config_entry.options:
params["options"] = {CONF_VOLUME_STEP: volume_step}
include_or_exclude_key = next(
(
key
for key in config_entry.data.get(CONF_APPS, {})
if key in [CONF_INCLUDE, CONF_EXCLUDE]
),
None,
)
if include_or_exclude_key:
params["options"][CONF_APPS] = {
include_or_exclude_key: config_entry.data[CONF_APPS][
include_or_exclude_key
].copy()
}
if not config_entry.data.get(CONF_VOLUME_STEP):
new_data = config_entry.data.copy()
new_data.update({CONF_VOLUME_STEP: volume_step})
params["data"] = new_data
if params:
hass.config_entries.async_update_entry(config_entry, **params)
device = VizioAsync(
DEVICE_ID,
host,
name,
auth_token=token,
device_type=VIZIO_DEVICE_CLASSES[device_class],
session=async_get_clientsession(hass, False),
timeout=DEFAULT_TIMEOUT,
)
if not await device.can_connect_with_auth_check():
_LOGGER.warning("Failed to connect to %s", host)
raise PlatformNotReady
apps_coordinator = hass.data[DOMAIN].get(CONF_APPS)
entity = VizioDevice(config_entry, device, name, device_class, apps_coordinator)
async_add_entities([entity], update_before_add=True)
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_UPDATE_SETTING, UPDATE_SETTING_SCHEMA, "async_update_setting"
)
class VizioDevice(MediaPlayerEntity):
"""Media Player implementation which performs REST requests to device."""
def __init__(
self,
config_entry: ConfigEntry,
device: VizioAsync,
name: str,
device_class: str,
apps_coordinator: DataUpdateCoordinator,
) -> None:
"""Initialize Vizio device."""
self._config_entry = config_entry
self._apps_coordinator = apps_coordinator
self._name = name
self._state = None
self._volume_level = None
self._volume_step = config_entry.options[CONF_VOLUME_STEP]
self._is_volume_muted = None
self._current_input = None
self._current_app = None
self._current_app_config = None
self._current_sound_mode = None
self._available_sound_modes = []
self._available_inputs = []
self._available_apps = []
self._all_apps = apps_coordinator.data if apps_coordinator else None
self._conf_apps = config_entry.options.get(CONF_APPS, {})
self._additional_app_configs = config_entry.data.get(CONF_APPS, {}).get(
CONF_ADDITIONAL_CONFIGS, []
)
self._device_class = device_class
self._supported_commands = SUPPORTED_COMMANDS[device_class]
self._device = device
self._max_volume = float(self._device.get_max_volume())
self._icon = ICON[device_class]
self._available = True
self._model = None
self._sw_version = None
def _apps_list(self, apps: List[str]) -> List[str]:
"""Return process apps list based on configured filters."""
if self._conf_apps.get(CONF_INCLUDE):
return [app for app in apps if app in self._conf_apps[CONF_INCLUDE]]
if self._conf_apps.get(CONF_EXCLUDE):
return [app for app in apps if app not in self._conf_apps[CONF_EXCLUDE]]
return apps
async def async_update(self) -> None:
"""Retrieve latest state of the device."""
if not self._model:
self._model = await self._device.get_model_name()
if not self._sw_version:
self._sw_version = await self._device.get_version()
is_on = await self._device.get_power_state(log_api_exception=False)
if is_on is None:
if self._available:
_LOGGER.warning(
"Lost connection to %s", self._config_entry.data[CONF_HOST]
)
self._available = False
return
if not self._available:
_LOGGER.info(
"Restored connection to %s", self._config_entry.data[CONF_HOST]
)
self._available = True
if not is_on:
self._state = STATE_OFF
self._volume_level = None
self._is_volume_muted = None
self._current_input = None
self._current_app = None
self._current_app_config = None
self._current_sound_mode = None
return
self._state = STATE_ON
audio_settings = await self._device.get_all_settings(
VIZIO_AUDIO_SETTINGS, log_api_exception=False
)
if audio_settings:
self._volume_level = float(audio_settings[VIZIO_VOLUME]) / self._max_volume
if VIZIO_MUTE in audio_settings:
self._is_volume_muted = (
audio_settings[VIZIO_MUTE].lower() == VIZIO_MUTE_ON
)
else:
self._is_volume_muted = None
if VIZIO_SOUND_MODE in audio_settings:
self._supported_commands |= SUPPORT_SELECT_SOUND_MODE
self._current_sound_mode = audio_settings[VIZIO_SOUND_MODE]
if not self._available_sound_modes:
self._available_sound_modes = (
await self._device.get_setting_options(
VIZIO_AUDIO_SETTINGS, VIZIO_SOUND_MODE
)
)
else:
# Explicitly remove SUPPORT_SELECT_SOUND_MODE from supported features
self._supported_commands &= ~SUPPORT_SELECT_SOUND_MODE
input_ = await self._device.get_current_input(log_api_exception=False)
if input_:
self._current_input = input_
inputs = await self._device.get_inputs_list(log_api_exception=False)
# If no inputs returned, end update
if not inputs:
return
self._available_inputs = [input_.name for input_ in inputs]
# Return before setting app variables if INPUT_APPS isn't in available inputs
if self._device_class == DEVICE_CLASS_SPEAKER or not any(
app for app in INPUT_APPS if app in self._available_inputs
):
return
# Create list of available known apps from known app list after
# filtering by CONF_INCLUDE/CONF_EXCLUDE
self._available_apps = self._apps_list([app["name"] for app in self._all_apps])
self._current_app_config = await self._device.get_current_app_config(
log_api_exception=False
)
self._current_app = find_app_name(
self._current_app_config,
[APP_HOME, *self._all_apps, *self._additional_app_configs],
)
if self._current_app == NO_APP_RUNNING:
self._current_app = None
def _get_additional_app_names(self) -> List[Dict[str, Any]]:
"""Return list of additional apps that were included in configuration.yaml."""
return [
additional_app["name"] for additional_app in self._additional_app_configs
]
@staticmethod
async def _async_send_update_options_signal(
hass: HomeAssistantType, config_entry: ConfigEntry
) -> None:
"""Send update event when Vizio config entry is updated."""
# Move this method to component level if another entity ever gets added for a single config entry.
# See here: https://github.com/home-assistant/core/pull/30653#discussion_r366426121
async_dispatcher_send(hass, config_entry.entry_id, config_entry)
async def _async_update_options(self, config_entry: ConfigEntry) -> None:
"""Update options if the update signal comes from this entity."""
self._volume_step = config_entry.options[CONF_VOLUME_STEP]
# Update so that CONF_ADDITIONAL_CONFIGS gets retained for imports
self._conf_apps.update(config_entry.options.get(CONF_APPS, {}))
async def async_update_setting(
self, setting_type: str, setting_name: str, new_value: Union[int, str]
) -> None:
"""Update a setting when update_setting service is called."""
await self._device.set_setting(
setting_type,
setting_name,
new_value,
)
async def async_added_to_hass(self) -> None:
"""Register callbacks when entity is added."""
# Register callback for when config entry is updated.
self.async_on_remove(
self._config_entry.add_update_listener(
self._async_send_update_options_signal
)
)
# Register callback for update event
self.async_on_remove(
async_dispatcher_connect(
self.hass, self._config_entry.entry_id, self._async_update_options
)
)
# Register callback for app list updates if device is a TV
@callback
def apps_list_update():
"""Update list of all apps."""
self._all_apps = self._apps_coordinator.data
self.async_write_ha_state()
if self._device_class == DEVICE_CLASS_TV:
self.async_on_remove(
self._apps_coordinator.async_add_listener(apps_list_update)
)
@property
def available(self) -> bool:
"""Return the availabiliity of the device."""
return self._available
@property
def state(self) -> Optional[str]:
"""Return the state of the device."""
return self._state
@property
def name(self) -> str:
"""Return the name of the device."""
return self._name
<|fim▁hole|> """Return the icon of the device."""
return self._icon
@property
def volume_level(self) -> Optional[float]:
"""Return the volume level of the device."""
return self._volume_level
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._is_volume_muted
@property
def source(self) -> Optional[str]:
"""Return current input of the device."""
if self._current_app is not None and self._current_input in INPUT_APPS:
return self._current_app
return self._current_input
@property
def source_list(self) -> List[str]:
"""Return list of available inputs of the device."""
# If Smartcast app is in input list, and the app list has been retrieved,
# show the combination with , otherwise just return inputs
if self._available_apps:
return [
*[
_input
for _input in self._available_inputs
if _input not in INPUT_APPS
],
*self._available_apps,
*[
app
for app in self._get_additional_app_names()
if app not in self._available_apps
],
]
return self._available_inputs
@property
def app_id(self) -> Optional[str]:
"""Return the ID of the current app if it is unknown by pyvizio."""
if self._current_app_config and self.app_name == UNKNOWN_APP:
return {
"APP_ID": self._current_app_config.APP_ID,
"NAME_SPACE": self._current_app_config.NAME_SPACE,
"MESSAGE": self._current_app_config.MESSAGE,
}
return None
@property
def app_name(self) -> Optional[str]:
"""Return the friendly name of the current app."""
return self._current_app
@property
def supported_features(self) -> int:
"""Flag device features that are supported."""
return self._supported_commands
@property
def unique_id(self) -> str:
"""Return the unique id of the device."""
return self._config_entry.unique_id
@property
def device_info(self) -> Dict[str, Any]:
"""Return device registry information."""
return {
"identifiers": {(DOMAIN, self._config_entry.unique_id)},
"name": self.name,
"manufacturer": "VIZIO",
"model": self._model,
"sw_version": self._sw_version,
}
@property
def device_class(self) -> str:
"""Return device class for entity."""
return self._device_class
@property
def sound_mode(self) -> Optional[str]:
"""Name of the current sound mode."""
return self._current_sound_mode
@property
def sound_mode_list(self) -> Optional[List[str]]:
"""List of available sound modes."""
return self._available_sound_modes
async def async_select_sound_mode(self, sound_mode):
"""Select sound mode."""
if sound_mode in self._available_sound_modes:
await self._device.set_setting(
VIZIO_AUDIO_SETTINGS, VIZIO_SOUND_MODE, sound_mode
)
async def async_turn_on(self) -> None:
"""Turn the device on."""
await self._device.pow_on()
async def async_turn_off(self) -> None:
"""Turn the device off."""
await self._device.pow_off()
async def async_mute_volume(self, mute: bool) -> None:
"""Mute the volume."""
if mute:
await self._device.mute_on()
self._is_volume_muted = True
else:
await self._device.mute_off()
self._is_volume_muted = False
async def async_media_previous_track(self) -> None:
"""Send previous channel command."""
await self._device.ch_down()
async def async_media_next_track(self) -> None:
"""Send next channel command."""
await self._device.ch_up()
async def async_select_source(self, source: str) -> None:
"""Select input source."""
if source in self._available_inputs:
await self._device.set_input(source)
elif source in self._get_additional_app_names():
await self._device.launch_app_config(
**next(
app["config"]
for app in self._additional_app_configs
if app["name"] == source
)
)
elif source in self._available_apps:
await self._device.launch_app(source, self._all_apps)
async def async_volume_up(self) -> None:
"""Increase volume of the device."""
await self._device.vol_up(num=self._volume_step)
if self._volume_level is not None:
self._volume_level = min(
1.0, self._volume_level + self._volume_step / self._max_volume
)
async def async_volume_down(self) -> None:
"""Decrease volume of the device."""
await self._device.vol_down(num=self._volume_step)
if self._volume_level is not None:
self._volume_level = max(
0.0, self._volume_level - self._volume_step / self._max_volume
)
async def async_set_volume_level(self, volume: float) -> None:
"""Set volume level."""
if self._volume_level is not None:
if volume > self._volume_level:
num = int(self._max_volume * (volume - self._volume_level))
await self._device.vol_up(num=num)
self._volume_level = volume
elif volume < self._volume_level:
num = int(self._max_volume * (self._volume_level - volume))
await self._device.vol_down(num=num)
self._volume_level = volume<|fim▁end|> | @property
def icon(self) -> str: |
<|file_name|>295.js<|end_file_name|><|fim▁begin|>"use strict";
var SHOW_DATA = {
"venue_name": "Merriweather Post Pavilion, Columbia, Maryland",
"venue_id": 76,
"show_date": "26th of Jun, 1984",
"sets": [<|fim▁hole|> "encore": false,
"songs": [
{"name": "Casey Jones", "length":"5:59", "trans":"/"},
{"name": "Feel Like A Stranger", "length":"7:32", "trans":"/"},
{"name": "Althea", "length":"8:00", "trans":"/"},
{"name": "Cassidy", "length":"6:08", "trans":"/"},
{"name": "Tennessee Jed", "length":"7:58", "trans":"/"},
{"name": "Looks Like Rain", "length":"6:32", "trans":"/"},
{"name": "Might As Well", "length":"4:47", "trans":"/"},
]},
{"set_title": "2nd set",
"encore": false,
"songs": [
{"name": "China Cat Sunflower", "length":"3:32", "trans":">"},
{"name": "Weir's Segue", "length":"3:53", "trans":">"},
{"name": "I Know You Rider", "length":"5:37", "trans":"/"},
{"name": "Man Smart, Woman Smarter", "length":"6:30", "trans":"/"},
{"name": "He's Gone", "length":"12:02", "trans":">"},
{"name": "Improv", "length":"5:16", "trans":">"},
{"name": "Drums", "length":"9:49", "trans":">"},
{"name": "Space", "length":"10:01", "trans":">"},
{"name": "Don't Need Love", "length":"8:07", "trans":">"},
{"name": "Prelude", "length":"0:47", "trans":">"},
{"name": "Truckin'", "length":"5:11", "trans":">"},
{"name": "Truckin' Jam", "length":"1:21", "trans":">"},
{"name": "Improv", "length":"0:53", "trans":">"},
{"name": "Wang Dang Doodle", "length":"3:24", "trans":">"},
{"name": "Stella Blue", "length":"9:13", "trans":">"},
{"name": "Around & Around", "length":"3:44", "trans":"/"},
{"name": "Good Lovin'", "length":"7:28", "trans":"/"},
]},
{"set_title": "3rd set",
"encore": false,
"songs": [
{"name": "Keep Your Day Job", "length":"4:14", "trans":"/"},
]},
],
};<|fim▁end|> | {"set_title": "1st set", |
<|file_name|>error_code.rs<|end_file_name|><|fim▁begin|>/*!
* gash : error_code.rs
* cs4414 ps2
* Jeremy Letang / free student
*/
<|fim▁hole|>pub enum ErrorCode {
Exit,
Break,
Continue
}<|fim▁end|> |
/// Error return for commands |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>extern crate pkg_config;
<|fim▁hole|> std::env::var("TARGET").unwrap()
}
fn main() {
if !get_target().contains("darwin") {
pkg_config::find_library("avahi-compat-libdns_sd").unwrap();
}
}<|fim▁end|> | fn get_target() -> String { |
<|file_name|>tut-worker-3d.py<|end_file_name|><|fim▁begin|>import bee
from bee import *
import dragonfly
from dragonfly.commandhive import commandhive, commandapp
from dragonfly.sys import exitactuator
from dragonfly.io import display, commandsensor
from dragonfly.std import variable, transistor, test
from dragonfly.sys import on_next_tick
from components.workers.chessprocessor2 import chessprocessor2
from components.workers.chesskeeper import chesskeeper
from components.workers.chessboard2 import chessboard2
from components.workers.except_valueerror import except_valueerror
from components.workers.human import human
from components.workers.computer2b import computer2b
from direct.showbase.ShowBase import taskMgr
from panda3d.core import getModelPath
import os
getModelPath().prependPath(os.getcwd())
from bee import hivemodule
class myapp(commandapp):
def on_tick(self):
taskMgr.step()
taskMgr.step()
class myhive(commandhive):
_hivecontext = hivemodule.appcontext(myapp)
g = chessprocessor2()
exc_v = except_valueerror()
connect(g.evexc, exc_v)
com = commandsensor()
turn = variable("str")("White")
t_turn = transistor("str")()
connect(g.turn, t_turn)
connect(t_turn, turn)
on_next = on_next_tick()
connect(on_next, t_turn)
connect(g.made_move, on_next)
on_next2 = on_next_tick()
connect(g.made_move, on_next2)
p1 = computer2b(("White", "glaurung"))
connect(g.turn, p1.turn)
connect(on_next, p1.trigger_move)
p2 = computer2b(("Black", "glaurung"))
connect(g.turn, p2.turn)
connect(on_next2, p2.trigger_move)
k = chesskeeper()
connect(k, g)
connect(p1.move, g)
connect(p2.move, g)
connect(g, k)
connect(g, p1.make_move)
connect(g, p2.make_move)
b = chessboard2(None)
connect(turn, b.turn)
connect(b.get_move, g)
connect(g, b.make_move)
<|fim▁hole|> connect(g, d)
ex = exitactuator()
connect(g.finished, ex)
raiser = bee.raiser()
bee.connect("evexc", raiser)
m = myhive().getinstance()
m.build("m")
m.place()
m.close()
m.init()
m.run()<|fim▁end|> | d = display("str")() |
<|file_name|>zoodefs.rs<|end_file_name|><|fim▁begin|>pub mod perms {
pub const READ: i32 = 1;
pub const WRITE: i32 = 1 << 1;
pub const CREATE: i32 = 1 << 2;
pub const DELETE: i32 = 1 << 3;
pub const ADMIN: i32 = 1 << 4;
pub const ALL: i32 = READ | WRITE | CREATE | DELETE | ADMIN;
}
pub mod acls {
use perms;
use proto::Acl;
fn acl(perm: i32, scheme: &str, id: &str) -> Vec<Acl> {
vec![Acl {
perms: perm,
scheme: scheme.to_owned(),
id: id.to_owned(),
}]
}
<|fim▁hole|> pub static ref OPEN_ACL_UNSAFE: Vec<Acl> = acl(perms::ALL, "world", "anyone");
pub static ref READ_ACL_UNSAFE: Vec<Acl> = acl(perms::READ, "world", "anyone");
}
}<|fim▁end|> | lazy_static!{
pub static ref CREATOR_ALL_ACL: Vec<Acl> = acl(perms::ALL, "auth", ""); |
<|file_name|>question_baseselect.ts<|end_file_name|><|fim▁begin|>import { Serializer } from "./jsonobject";
import { Question } from "./question";
import { Base, SurveyError, ISurveyImpl } from "./base";
import { ItemValue } from "./itemvalue";
import { Helpers, HashTable } from "./helpers";
import { surveyLocalization } from "./surveyStrings";
import { OtherEmptyError } from "./error";
import { ChoicesRestfull } from "./choicesRestfull";
import { LocalizableString } from "./localizablestring";
import { ConditionRunner } from "./conditions";
import { settings } from "./settings";
/**
* It is a base class for checkbox, dropdown and radiogroup questions.
*/
export class QuestionSelectBase extends Question {
public visibleChoicesChangedCallback: () => void;
private filteredChoicesValue: Array<ItemValue> = null;
private conditionChoicesVisibleIfRunner: ConditionRunner;
private conditionChoicesEnableIfRunner: ConditionRunner;
private commentValue: string;
private prevCommentValue: string;
private otherItemValue: ItemValue = new ItemValue("other");
private choicesFromUrl: Array<ItemValue> = null;
private cachedValueForUrlRequests: any = null;
private isChoicesLoaded: boolean = false;
private enableOnLoadingChoices: boolean = false;
constructor(name: string) {
super(name);
var self = this;
this.createItemValues("choices");
this.registerFunctionOnPropertyValueChanged("choices", function () {
if (!self.filterItems()) {
self.onVisibleChoicesChanged();
}
});
this.registerFunctionOnPropertyValueChanged(
"hideIfChoicesEmpty",
function () {
self.updateVisibilityBasedOnChoices();
}
);
this.createNewArray("visibleChoices");
this.setPropertyValue("choicesByUrl", this.createRestfull());
this.choicesByUrl.owner = this;
this.choicesByUrl.loadingOwner = this;
var locOtherText = this.createLocalizableString("otherText", this, true);
this.createLocalizableString("otherErrorText", this, true);
this.otherItemValue.locOwner = this;
this.otherItemValue.setLocText(locOtherText);
locOtherText.onGetTextCallback = function (text) {
return !!text ? text : surveyLocalization.getString("otherItemText");
};
this.choicesByUrl.beforeSendRequestCallback = function () {
self.onBeforeSendRequest();
};
this.choicesByUrl.getResultCallback = function (items: Array<ItemValue>) {
self.onLoadChoicesFromUrl(items);
};
this.choicesByUrl.updateResultCallback = function (
items: Array<ItemValue>,
serverResult: any
): Array<ItemValue> {
if (self.survey) {
return self.survey.updateChoicesFromServer(self, items, serverResult);
}
return items;
};
this.createLocalizableString("otherPlaceHolder", this);
}
public getType(): string {
return "selectbase";
}
public supportGoNextPageError() {
return !this.isOtherSelected || !!this.comment;
}
isLayoutTypeSupported(layoutType: string): boolean {
return true;
}
/**
* Returns the other item. By using this property, you may change programmatically it's value and text.
* @see hasOther
*/
public get otherItem(): ItemValue {
return this.otherItemValue;
}
/**
* Returns true if a user select the 'other' item.
*/
public get isOtherSelected(): boolean {
return this.hasOther && this.getHasOther(this.renderedValue);
}
/**
* An expression that returns true or false. It runs against each choices item and if for this item it returns true, then the item is visible otherwise the item becomes invisible. Please use {item} to get the current item value in the expression.
* @see visibleIf
* @see choicesEnableIf
*/
public get choicesVisibleIf(): string {
return this.getPropertyValue("choicesVisibleIf", "");
}
public set choicesVisibleIf(val: string) {
this.setPropertyValue("choicesVisibleIf", val);
this.filterItems();
}
/**
* An expression that returns true or false. It runs against each choices item and if for this item it returns true, then the item is enabled otherwise the item becomes disabled. Please use {item} to get the current item value in the expression.
* @see choicesVisibleIf
*/
public get choicesEnableIf(): string {
return this.getPropertyValue("choicesEnableIf", "");
}
public set choicesEnableIf(val: string) {
this.setPropertyValue("choicesEnableIf", val);
this.filterItems();
}
public runCondition(values: HashTable<any>, properties: HashTable<any>) {
super.runCondition(values, properties);
this.runItemsEnableCondition(values, properties);
this.runItemsCondition(values, properties);
}
protected isTextValue(): boolean {
return true; //for comments and others
}
private isSettingDefaultValue: boolean = false;
protected setDefaultValue() {
this.isSettingDefaultValue =
!this.isValueEmpty(this.defaultValue) &&
this.hasUnknownValue(this.defaultValue);
super.setDefaultValue();
this.isSettingDefaultValue = false;
}
protected getIsMultipleValue(): boolean {
return false;
}
protected convertDefaultValue(val: any): any {
if (val == null || val == undefined) return val;
if (this.getIsMultipleValue()) {
if (!Array.isArray(val)) return [val];
} else {
if (Array.isArray(val) && val.length > 0) return val[0];
}
return val;
}
protected filterItems(): boolean {
if (
this.isLoadingFromJson ||
!this.data ||
this.areInvisibleElementsShowing
)
return false;
var values = this.getDataFilteredValues();
var properties = this.getDataFilteredProperties();
this.runItemsEnableCondition(values, properties);
return this.runItemsCondition(values, properties);
}
protected runItemsCondition(
values: HashTable<any>,
properties: HashTable<any>
): boolean {
this.setConditionalChoicesRunner();
var hasChanges = this.runConditionsForItems(values, properties);
if (
!!this.filteredChoicesValue &&
this.filteredChoicesValue.length === this.activeChoices.length
) {
this.filteredChoicesValue = null;
}
if (hasChanges) {
this.onVisibleChoicesChanged();
this.clearIncorrectValues();
}
return hasChanges;
}
protected runItemsEnableCondition(
values: HashTable<any>,
properties: HashTable<any>
): any {
this.setConditionalEnableChoicesRunner();
var hasChanged = ItemValue.runEnabledConditionsForItems(
this.activeChoices,
this.conditionChoicesEnableIfRunner,
values,
properties,
(item: ItemValue): boolean => {
return this.onEnableItemCallBack(item);
}
);
if (hasChanged) {
this.clearDisabledValues();
}
this.onAfterRunItemsEnableCondition();
}
protected onAfterRunItemsEnableCondition() {}
protected onEnableItemCallBack(item: ItemValue): boolean {
return true;
}
private setConditionalChoicesRunner() {
if (this.choicesVisibleIf) {
if (!this.conditionChoicesVisibleIfRunner) {
this.conditionChoicesVisibleIfRunner = new ConditionRunner(
this.choicesVisibleIf
);
}
this.conditionChoicesVisibleIfRunner.expression = this.choicesVisibleIf;
} else {
this.conditionChoicesVisibleIfRunner = null;
}
}
private setConditionalEnableChoicesRunner() {
if (this.choicesEnableIf) {
if (!this.conditionChoicesEnableIfRunner) {
this.conditionChoicesEnableIfRunner = new ConditionRunner(
this.choicesEnableIf
);
}
this.conditionChoicesEnableIfRunner.expression = this.choicesEnableIf;
} else {
this.conditionChoicesEnableIfRunner = null;
}
}
private runConditionsForItems(
values: HashTable<any>,
properties: HashTable<any>
): boolean {
this.filteredChoicesValue = [];
return ItemValue.runConditionsForItems(
this.activeChoices,
this.filteredChoices,
this.areInvisibleElementsShowing
? null
: this.conditionChoicesVisibleIfRunner,
values,
properties,
!this.survey || !this.survey.areInvisibleElementsShowing
);
}
protected getHasOther(val: any): boolean {
return val === this.otherItem.value;
}
get validatedValue(): any {
return this.rendredValueToDataCore(this.value);
}
protected createRestfull(): ChoicesRestfull {
return new ChoicesRestfull();
}
protected getQuestionComment(): string {
if (!!this.commentValue) return this.commentValue;
if (this.hasComment || this.getStoreOthersAsComment())
return super.getQuestionComment();
return this.commentValue;
}
private isSettingComment: boolean = false;
protected setQuestionComment(newValue: string) {
if (this.hasComment || this.getStoreOthersAsComment())
super.setQuestionComment(newValue);
else {
if (!this.isSettingComment && newValue != this.commentValue) {
this.isSettingComment = true;
this.commentValue = newValue;
if (this.isOtherSelected && !this.isRenderedValueSetting) {
this.value = this.rendredValueToData(this.renderedValue);
}
this.isSettingComment = false;
}
}
}
public get renderedValue(): any {
return this.getPropertyValue("renderedValue", null);
}
public set renderedValue(val: any) {
this.setPropertyValue("renderedValue", val);
this.value = this.rendredValueToData(val);
}
protected setQuestionValue(newValue: any, updateIsAnswered: boolean = true) {
if (
this.isLoadingFromJson ||
Helpers.isTwoValueEquals(this.value, newValue)
)
return;
super.setQuestionValue(newValue, updateIsAnswered);
this.setPropertyValue("renderedValue", this.rendredValueFromData(newValue));
if (this.hasComment) return;
var isOtherSel = this.isOtherSelected;
if (isOtherSel && !!this.prevCommentValue) {
var oldComment = this.prevCommentValue;
this.prevCommentValue = "";
this.comment = oldComment;
}
if (!isOtherSel && !!this.comment) {
if (this.getStoreOthersAsComment()) {
this.prevCommentValue = this.comment;
}
this.comment = "";
}<|fim▁hole|> (!this.choicesByUrl.isRunning &&
!this.choicesByUrl.isWaitingForParameters) ||
!this.isValueEmpty(newValue)
) {
this.cachedValueForUrlRequests = newValue;
}
super.setNewValue(newValue);
}
protected valueFromData(val: any): any {
let choiceitem = ItemValue.getItemByValue(this.activeChoices, val);
if (!!choiceitem) {
return choiceitem.value;
}
return super.valueFromData(val);
}
protected rendredValueFromData(val: any): any {
if (this.getStoreOthersAsComment()) return val;
return this.renderedValueFromDataCore(val);
}
protected rendredValueToData(val: any): any {
if (this.getStoreOthersAsComment()) return val;
return this.rendredValueToDataCore(val);
}
protected renderedValueFromDataCore(val: any): any {
if (!this.hasUnknownValue(val, true)) return this.valueFromData(val);
this.comment = val;
return this.otherItem.value;
}
protected rendredValueToDataCore(val: any): any {
if (val == this.otherItem.value && this.getQuestionComment()) {
val = this.getQuestionComment();
}
return val;
}
protected hasUnknownValue(val: any, includeOther: boolean = false): boolean {
if (Helpers.isValueEmpty(val)) return false;
if (includeOther && val == this.otherItem.value) return false;
return ItemValue.getItemByValue(this.filteredChoices, val) == null;
}
protected isValueDisabled(val: any): boolean {
var itemValue = ItemValue.getItemByValue(this.filteredChoices, val);
return !!itemValue && !itemValue.isEnabled;
}
/**
* If the clearIncorrectValuesCallback is set, it is used to clear incorrect values instead of default behaviour.
*/
public clearIncorrectValuesCallback: () => void;
/**
* Use this property to fill the choices from a restful service.
* @see choices
*/
public get choicesByUrl(): ChoicesRestfull {
return this.getPropertyValue("choicesByUrl");
}
/**
* The list of items. Every item has value and text. If text is empty, the value is rendered. The item text supports markdown.
* @see choicesByUrl
*/
public get choices(): Array<any> {
return this.getPropertyValue("choices");
}
public set choices(newValue: Array<any>) {
this.setPropertyValue("choices", newValue);
}
/**
* Set this property to true to hide the question if there is no visible choices.
*/
public get hideIfChoicesEmpty(): boolean {
return this.getPropertyValue("hideIfChoicesEmpty", false);
}
public set hideIfChoicesEmpty(val: boolean) {
this.setPropertyValue("hideIfChoicesEmpty", val);
}
public get keepIncorrectValues(): boolean {
return this.getPropertyValue("keepIncorrectValues", false);
}
public set keepIncorrectValues(val: boolean) {
this.setPropertyValue("keepIncorrectValues", val);
}
/**
* Please use survey.storeOthersAsComment to change the behavior on the survey level. This property is depricated and invisible in Survey Creator.
* By default the entered text in the others input in the checkbox/radiogroup/dropdown are stored as "question name " + "-Comment". The value itself is "question name": "others". Set this property to false, to store the entered text directly in the "question name" key.
* Possible values are: "default", true, false
* @see SurveyModel.storeOthersAsComment
*/
public get storeOthersAsComment(): any {
return this.getPropertyValue("storeOthersAsComment", "default");
}
public set storeOthersAsComment(val: any) {
this.setPropertyValue("storeOthersAsComment", val);
}
protected hasOtherChanged() {
this.onVisibleChoicesChanged();
}
/**
* Use this property to render items in a specific order: "asc", "desc", "random". Default value is "none".
*/
public get choicesOrder(): string {
return this.getPropertyValue("choicesOrder");
}
public set choicesOrder(val: string) {
val = val.toLowerCase();
if (val == this.choicesOrder) return;
this.setPropertyValue("choicesOrder", val);
this.onVisibleChoicesChanged();
}
/**
* Use this property to set the different text for other item.
*/
public get otherText(): string {
return this.getLocalizableStringText(
"otherText",
surveyLocalization.getString("otherItemText")
);
}
public set otherText(val: string) {
this.setLocalizableStringText("otherText", val);
this.onVisibleChoicesChanged();
}
get locOtherText(): LocalizableString {
return this.getLocalizableString("otherText");
}
/**
* Use this property to set the place holder text for other or comment field .
*/
public get otherPlaceHolder(): string {
return this.getLocalizableStringText("otherPlaceHolder");
}
public set otherPlaceHolder(val: string) {
this.setLocalizableStringText("otherPlaceHolder", val);
}
get locOtherPlaceHolder(): LocalizableString {
return this.getLocalizableString("otherPlaceHolder");
}
/**
* The text that shows when the other item is choosed by the other input is empty.
*/
public get otherErrorText(): string {
return this.getLocalizableStringText(
"otherErrorText",
surveyLocalization.getString("otherRequiredError")
);
}
public set otherErrorText(val: string) {
this.setLocalizableStringText("otherErrorText", val);
}
get locOtherErrorText(): LocalizableString {
return this.getLocalizableString("otherErrorText");
}
/**
* The list of items as they will be rendered. If needed items are sorted and the other item is added.
* @see hasOther
* @see choicesOrder
* @see enabledChoices
*/
public get visibleChoices(): Array<ItemValue> {
return this.getPropertyValue("visibleChoices", []);
}
/**
* The list of enabled items as they will be rendered. The disabled items are not included
* @see hasOther
* @see choicesOrder
* @see visibleChoices
*/
public get enabledChoices(): Array<ItemValue> {
var res = [];
var items = this.visibleChoices;
for (var i = 0; i < items.length; i++) {
if (items[i].isEnabled) res.push(items[i]);
}
return res;
}
protected updateVisibleChoices() {
if (this.isLoadingFromJson) return;
var newValue = new Array<ItemValue>();
var calcValue = this.calcVisibleChoices();
if (!calcValue) calcValue = [];
for (var i = 0; i < calcValue.length; i++) {
newValue.push(calcValue[i]);
}
this.setPropertyValue("visibleChoices", newValue);
}
private calcVisibleChoices(): Array<ItemValue> {
if (this.canUseFilteredChoices()) return this.filteredChoices;
var res = this.sortVisibleChoices(this.filteredChoices.slice());
this.addToVisibleChoices(res);
return res;
}
protected canUseFilteredChoices(): boolean {
return !this.hasOther && this.choicesOrder == "none";
}
protected addToVisibleChoices(items: Array<ItemValue>) {
if (this.hasOther) {
items.push(this.otherItem);
}
}
public getPlainData(
options: {
includeEmpty?: boolean;
includeQuestionTypes?: boolean;
calculations?: Array<{
propertyName: string;
}>;
} = {
includeEmpty: true,
includeQuestionTypes: false,
}
) {
var questionPlainData = super.getPlainData(options);
if (!!questionPlainData) {
var values = Array.isArray(this.value) ? this.value : [this.value];
questionPlainData.isNode = true;
questionPlainData.data = (questionPlainData.data || []).concat(
values.map((dataValue, index) => {
var choice = ItemValue.getItemByValue(this.visibleChoices, dataValue);
var choiceDataItem = <any>{
name: index,
title: "Choice",
value: dataValue,
displayValue: this.getChoicesDisplayValue(
this.visibleChoices,
dataValue
),
getString: (val: any) =>
typeof val === "object" ? JSON.stringify(val) : val,
isNode: false,
};
if (!!choice) {
(options.calculations || []).forEach((calculation) => {
choiceDataItem[calculation.propertyName] =
choice[calculation.propertyName];
});
}
if (this.isOtherSelected && this.otherItemValue === choice) {
choiceDataItem.isOther = true;
choiceDataItem.displayValue = this.comment;
}
return choiceDataItem;
})
);
}
return questionPlainData;
}
/**
* Returns the text for the current value. If the value is null then returns empty string. If 'other' is selected then returns the text for other value.
*/
protected getDisplayValueCore(keysAsText: boolean, value: any): any {
return this.getChoicesDisplayValue(this.visibleChoices, value);
}
protected getChoicesDisplayValue(items: ItemValue[], val: any): any {
if (val == this.otherItemValue.value)
return this.comment ? this.comment : this.locOtherText.textOrHtml;
var str = ItemValue.getTextOrHtmlByValue(items, val);
return str == "" && val ? val : str;
}
private get filteredChoices(): Array<ItemValue> {
return this.filteredChoicesValue
? this.filteredChoicesValue
: this.activeChoices;
}
protected get activeChoices(): Array<ItemValue> {
return this.choicesFromUrl ? this.choicesFromUrl : this.getChoices();
}
protected getChoices(): Array<ItemValue> {
return this.choices;
}
public supportComment(): boolean {
return true;
}
public supportOther(): boolean {
return true;
}
protected onCheckForErrors(
errors: Array<SurveyError>,
isOnValueChanged: boolean
) {
super.onCheckForErrors(errors, isOnValueChanged);
if (!this.hasOther || !this.isOtherSelected || this.comment) return;
errors.push(new OtherEmptyError(this.otherErrorText, this));
}
public setSurveyImpl(value: ISurveyImpl) {
super.setSurveyImpl(value);
this.runChoicesByUrl();
}
protected getStoreOthersAsComment() {
if (this.isSettingDefaultValue) return false;
return (
this.storeOthersAsComment === true ||
(this.storeOthersAsComment == "default" &&
(this.survey != null ? this.survey.storeOthersAsComment : true)) ||
(!this.choicesByUrl.isEmpty && !this.choicesFromUrl)
);
}
onSurveyLoad() {
super.onSurveyLoad();
this.runChoicesByUrl();
this.onVisibleChoicesChanged();
}
onAnyValueChanged(name: string) {
super.onAnyValueChanged(name);
if (name != this.getValueName()) {
this.runChoicesByUrl();
}
}
updateValueFromSurvey(newValue: any) {
var newComment = "";
if (
this.hasOther &&
this.getStoreOthersAsComment() &&
this.hasUnknownValue(newValue) &&
!this.getHasOther(newValue)
) {
newComment = this.getCommentFromValue(newValue);
newValue = this.setOtherValueIntoValue(newValue);
}
super.updateValueFromSurvey(newValue);
if (!!newComment) {
this.setNewComment(newComment);
}
}
protected getCommentFromValue(newValue: any): string {
return newValue;
}
protected setOtherValueIntoValue(newValue: any): any {
return this.otherItem.value;
}
private isRunningChoices: boolean = false;
private runChoicesByUrl() {
if (!this.choicesByUrl || this.isLoadingFromJson || this.isRunningChoices)
return;
var processor = this.surveyImpl
? this.surveyImpl.getTextProcessor()
: this.textProcessor;
if (!processor) processor = this.survey;
if (!processor) return;
this.isReadyValue = this.isChoicesLoaded || this.choicesByUrl.isEmpty;
this.isRunningChoices = true;
this.choicesByUrl.run(processor);
this.isRunningChoices = false;
}
private isFirstLoadChoicesFromUrl = true;
protected onBeforeSendRequest() {
if (settings.disableOnGettingChoicesFromWeb === true && !this.isReadOnly) {
this.enableOnLoadingChoices = true;
this.readOnly = true;
}
}
protected onLoadChoicesFromUrl(array: Array<ItemValue>) {
if (this.enableOnLoadingChoices) {
this.readOnly = false;
}
if (!this.isReadOnly) {
var errors = [];
if (this.choicesByUrl && this.choicesByUrl.error) {
errors.push(this.choicesByUrl.error);
}
this.errors = errors;
}
var newChoices = null;
var checkCachedValuesOnExisting = true;
if (
this.isFirstLoadChoicesFromUrl &&
!this.cachedValueForUrlRequests &&
this.defaultValue
) {
this.cachedValueForUrlRequests = this.defaultValue;
checkCachedValuesOnExisting = false;
}
if (this.isValueEmpty(this.cachedValueForUrlRequests)) {
this.cachedValueForUrlRequests = this.value;
}
this.isFirstLoadChoicesFromUrl = false;
var cachedValues = this.createCachedValueForUrlRequests(
this.cachedValueForUrlRequests,
checkCachedValuesOnExisting
);
if (array && array.length > 0) {
newChoices = new Array<ItemValue>();
ItemValue.setData(newChoices, array);
}
this.choicesFromUrl = newChoices;
this.filterItems();
this.onVisibleChoicesChanged();
if (newChoices) {
var newValue = this.updateCachedValueForUrlRequests(
cachedValues,
newChoices
);
if (!!newValue && !this.isReadOnly) {
var hasChanged = !Helpers.isTwoValueEquals(this.value, newValue.value);
try {
if (!Helpers.isValueEmpty(newValue.value)) {
this.allowNotifyValueChanged = false;
this.locNotificationInData = true;
this.value = undefined;
this.locNotificationInData = false;
}
this.allowNotifyValueChanged = hasChanged;
this.value = newValue.value;
} finally {
this.allowNotifyValueChanged = true;
}
}
}
this.choicesLoaded();
}
private createCachedValueForUrlRequests(
val: any,
checkOnExisting: boolean
): any {
if (this.isValueEmpty(val)) return null;
if (Array.isArray(val)) {
var res = [];
for (var i = 0; i < val.length; i++) {
res.push(this.createCachedValueForUrlRequests(val[i], true));
}
return res;
}
var isExists = checkOnExisting ? !this.hasUnknownValue(val) : true;
return { value: val, isExists: isExists };
}
private updateCachedValueForUrlRequests(
val: any,
newChoices: Array<ItemValue>
): any {
if (this.isValueEmpty(val)) return null;
if (Array.isArray(val)) {
var res = [];
for (var i = 0; i < val.length; i++) {
var updatedValue = this.updateCachedValueForUrlRequests(
val[i],
newChoices
);
if (updatedValue && !this.isValueEmpty(updatedValue.value)) {
var newValue = updatedValue.value;
var item = ItemValue.getItemByValue(newChoices, updatedValue.value);
if (!!item) {
newValue = item.value;
}
res.push(newValue);
}
}
return { value: res };
}
var value =
val.isExists && this.hasUnknownValue(val.value) ? null : val.value;
var item = ItemValue.getItemByValue(newChoices, value);
if (!!item) {
value = item.value;
}
return { value: value };
}
protected onVisibleChoicesChanged() {
if (this.isLoadingFromJson) return;
this.updateVisibleChoices();
this.updateVisibilityBasedOnChoices();
if (!!this.visibleChoicesChangedCallback)
this.visibleChoicesChangedCallback();
}
private updateVisibilityBasedOnChoices() {
if (this.hideIfChoicesEmpty) {
this.visible = !this.filteredChoices || this.filteredChoices.length > 0;
}
}
private sortVisibleChoices(array: Array<ItemValue>): Array<ItemValue> {
var order = this.choicesOrder.toLowerCase();
if (order == "asc") return this.sortArray(array, 1);
if (order == "desc") return this.sortArray(array, -1);
if (order == "random") return this.randomizeArray(array);
return array;
}
private sortArray(array: Array<ItemValue>, mult: number): Array<ItemValue> {
return array.sort(function (a, b) {
if (a.calculatedText < b.calculatedText) return -1 * mult;
if (a.calculatedText > b.calculatedText) return 1 * mult;
return 0;
});
}
private randomizeArray(array: Array<ItemValue>): Array<ItemValue> {
return Helpers.randomizeArray<ItemValue>(array);
}
public clearIncorrectValues() {
if (this.keepIncorrectValues) return;
if (
!!this.survey &&
this.survey.questionCountByValueName(this.getValueName()) > 1
)
return;
if (!!this.choicesByUrl && !this.choicesByUrl.isEmpty) return;
if (this.clearIncorrectValuesCallback) {
this.clearIncorrectValuesCallback();
} else {
this.clearIncorrectValuesCore();
}
}
public clearValueIfInvisible() {
super.clearValueIfInvisible();
this.clearIncorrectValues();
}
/**
* Returns true if item is selected
* @param item checkbox or radio item value
*/
public isItemSelected(item: ItemValue): boolean {
return item.value === this.value;
}
private clearDisabledValues() {
if (!this.survey || !this.survey.clearValueOnDisableItems) return;
this.clearDisabledValuesCore();
}
protected clearIncorrectValuesCore() {
var val = this.value;
if (this.canClearValueAnUnknow(val)) {
this.clearValue();
}
}
protected canClearValueAnUnknow(val: any): boolean {
if (!this.getStoreOthersAsComment() && this.isOtherSelected) return false;
return this.hasUnknownValue(val, true);
}
protected clearDisabledValuesCore() {
if (this.isValueDisabled(this.value)) {
this.clearValue();
}
}
clearUnusedValues() {
super.clearUnusedValues();
if (!this.isOtherSelected && !this.hasComment) {
this.comment = "";
}
}
getColumnClass() {
var columnClass = this.cssClasses.column;
if (this.hasColumns) {
columnClass += " sv-q-column-" + this.colCount;
}
return columnClass;
}
getLabelClass(item: ItemValue) {
var labelClass = this.cssClasses.label;
if (this.isItemSelected(item)) {
labelClass += " " + this.cssClasses.labelChecked;
}
return labelClass;
}
getControlLabelClass(item: ItemValue) {
var controlLabelClass = this.cssClasses.controlLabel;
if (this.isItemSelected(item)) {
controlLabelClass += " " + this.cssClasses.controlLabelChecked;
}
return controlLabelClass;
}
get columns() {
var columns = [];
var colCount = this.colCount;
if (this.hasColumns && this.visibleChoices.length > 0) {
if (settings.showItemsInOrder == "column") {
var prevIndex = 0;
var leftElementsCount = this.visibleChoices.length % colCount;
for (var i = 0; i < colCount; i++) {
var column = [];
for (
var j = prevIndex;
j < prevIndex + Math.floor(this.visibleChoices.length / colCount);
j++
) {
column.push(this.visibleChoices[j]);
}
if (leftElementsCount > 0) {
leftElementsCount--;
column.push(this.visibleChoices[j]);
j++;
}
prevIndex = j;
columns.push(column);
}
} else {
for (var i = 0; i < colCount; i++) {
var column = [];
for (var j = i; j < this.visibleChoices.length; j += colCount) {
column.push(this.visibleChoices[j]);
}
columns.push(column);
}
}
}
return columns;
}
get hasColumns() {
return this.colCount > 1;
}
public choicesLoaded(): void {
this.isChoicesLoaded = true;
let oldIsReady: boolean = this.isReadyValue;
this.isReadyValue = true;
this.onReadyChanged &&
this.onReadyChanged.fire(this, {
question: this,
isReady: true,
oldIsReady: oldIsReady,
});
}
}
/**
* A base class for checkbox and radiogroup questions. It introduced a colCount property.
*/
export class QuestionCheckboxBase extends QuestionSelectBase {
colCountChangedCallback: () => void;
constructor(public name: string) {
super(name);
}
/**
* The number of columns for radiogroup and checkbox questions. Items are rendred in one line if the value is 0.
*/
public get colCount(): number {
return this.getPropertyValue("colCount", this.isFlowLayout ? 0 : 1);
}
public set colCount(value: number) {
if (value < 0 || value > 5 || this.isFlowLayout) return;
this.setPropertyValue("colCount", value);
this.fireCallback(this.colCountChangedCallback);
}
getItemIndex(item: any) {
return this.visibleChoices.indexOf(item);
}
protected onParentChanged() {
super.onParentChanged();
if (this.isFlowLayout) {
this.setPropertyValue("colCount", null);
}
}
}
Serializer.addClass(
"selectbase",
[
{ name: "hasComment:switch", layout: "row" },
{
name: "commentText",
dependsOn: "hasComment",
visibleIf: function (obj: any) {
return obj.hasComment;
},
serializationProperty: "locCommentText",
layout: "row",
},
{
name: "choices:itemvalue[]",
baseValue: function () {
return surveyLocalization.getString("choices_Item");
},
},
{
name: "choicesOrder",
default: "none",
choices: ["none", "asc", "desc", "random"],
},
{
name: "choicesByUrl:restfull",
className: "ChoicesRestfull",
onGetValue: function (obj: any) {
return obj.choicesByUrl.getData();
},
onSetValue: function (obj: any, value: any) {
obj.choicesByUrl.setData(value);
},
},
"hideIfChoicesEmpty:boolean",
"choicesVisibleIf:condition",
"choicesEnableIf:condition",
"hasOther:boolean",
{
name: "otherPlaceHolder",
serializationProperty: "locOtherPlaceHolder",
dependsOn: "hasOther",
visibleIf: function (obj: any) {
return obj.hasOther;
},
},
{
name: "otherText",
serializationProperty: "locOtherText",
dependsOn: "hasOther",
visibleIf: function (obj: any) {
return obj.hasOther;
},
},
{
name: "otherErrorText",
serializationProperty: "locOtherErrorText",
dependsOn: "hasOther",
visibleIf: function (obj: any) {
return obj.hasOther;
},
},
{
name: "storeOthersAsComment",
default: "default",
choices: ["default", true, false],
visible: false,
},
],
null,
"question"
);
Serializer.addClass(
"checkboxbase",
[
{
name: "colCount:number",
default: 1,
choices: [0, 1, 2, 3, 4, 5],
layout: "row",
},
],
null,
"selectbase"
);<|fim▁end|> | }
protected setNewValue(newValue: any) {
newValue = this.valueFromData(newValue);
if ( |
<|file_name|>PluginListener.java<|end_file_name|><|fim▁begin|>/**
*
*/
package jframe.core.plugin;
import java.util.EventListener;
/**
* @author dzh
* @date Sep 12, 2013 9:42:33 PM
* @since 1.0
*/
public interface PluginListener extends EventListener {
<|fim▁hole|> void pluginChanged(PluginEvent event);
}<|fim▁end|> | |
<|file_name|>formatsvgz.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*-
# Copyright 2014 Lukas Kemmer
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Load and save functions for zipped svg files."""
import faint.svg.parse_svg as parse_svg
import faint.svg.write_svg as write_svg
def load(filename, imageprops):
"""Load image from the zipped svg file."""
parse_svg.parse_svgz_file(filename, imageprops, "en")
def save(filename, canvas):
"""Save the image to the specified file as zipped svg."""
write_svg.write_svgz(filename, canvas)<|fim▁end|> | #!/usr/bin/env python3 |
<|file_name|>trait-bounds-impl-comparison-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// Make sure rustc checks the type parameter bounds in implementations of traits,
// see #2687
trait A {}
trait B: A {}
trait C: A {}
trait Foo {
fn test_error1_fn<T: Eq>(&self);
fn test_error2_fn<T: Eq + Ord>(&self);
fn test_error3_fn<T: Eq + Ord>(&self);
fn test3_fn<T: Eq + Ord>(&self);
fn test4_fn<T: Eq + Ord>(&self);
fn test_error5_fn<T: A>(&self);
fn test6_fn<T: A + Eq>(&self);
fn test_error7_fn<T: A>(&self);
fn test_error8_fn<T: B>(&self);
}
impl Foo for isize {
// invalid bound for T, was defined as Eq in trait
fn test_error1_fn<T: Ord>(&self) {}
//~^ ERROR the requirement `T : core::cmp::Ord` appears on the impl
// invalid bound for T, was defined as Eq + Ord in trait
fn test_error2_fn<T: Eq + B>(&self) {}
//~^ ERROR the requirement `T : B` appears on the impl
// invalid bound for T, was defined as Eq + Ord in trait
fn test_error3_fn<T: B + Eq>(&self) {}
//~^ ERROR the requirement `T : B` appears on the impl
// multiple bounds, same order as in trait
fn test3_fn<T: Ord + Eq>(&self) {}
// multiple bounds, different order as in trait
fn test4_fn<T: Eq + Ord>(&self) {}
// parameters in impls must be equal or more general than in the defining trait
fn test_error5_fn<T: B>(&self) {}
//~^ ERROR the requirement `T : B` appears on the impl
// bound `std::cmp::Eq` not enforced by this implementation, but this is OK
fn test6_fn<T: A>(&self) {}
fn test_error7_fn<T: A + Eq>(&self) {}
//~^ ERROR the requirement `T : core::cmp::Eq` appears on the impl
fn test_error8_fn<T: C>(&self) {}
//~^ ERROR the requirement `T : C` appears on the impl
}
trait Getter<T> { }
trait Trait {
fn method<G:Getter<isize>>();
}
<|fim▁hole|> fn method<G: Getter<usize>>() {}
//~^ G : Getter<usize>` appears on the impl method but not on the corresponding trait method
}
fn main() {}<|fim▁end|> | impl Trait for usize { |
<|file_name|>field_isempty.rs<|end_file_name|><|fim▁begin|>//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA<|fim▁hole|>//
use libimagstore::store::Entry;
use toml_query::read::TomlValueReadExt;
use builtin::header::field_path::FieldPath;
use filters::filter::Filter;
use toml::Value;
pub struct FieldIsEmpty {
header_field_path: FieldPath,
}
impl FieldIsEmpty {
pub fn new(path: FieldPath) -> FieldIsEmpty {
FieldIsEmpty {
header_field_path: path,
}
}
}
impl Filter<Entry> for FieldIsEmpty {
fn filter(&self, e: &Entry) -> bool {
e.get_header()
.read(&self.header_field_path[..])
.map(|v| {
match v {
Some(&Value::Array(ref a)) => a.is_empty(),
Some(&Value::String(ref s)) => s.is_empty(),
Some(&Value::Table(ref t)) => t.is_empty(),
Some(&Value::Boolean(_)) |
Some(&Value::Float(_)) |
Some(&Value::Integer(_)) => false,
_ => true,
}
})
.unwrap_or(false)
}
}<|fim▁end|> | |
<|file_name|>order.ts<|end_file_name|><|fim▁begin|>// Copyright 2017 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import {describe, it, beforeEach, afterEach} from 'mocha';
import {expect} from 'chai';
import {google} from '../protos/firestore_v1_proto_api';
import {
Firestore,
QueryDocumentSnapshot,
setLogFunction,
Timestamp,
} from '../src';
import {GeoPoint} from '../src';
import {DocumentReference} from '../src';
import * as order from '../src/order';
import {QualifiedResourcePath} from '../src/path';
import {createInstance, InvalidApiUsage, verifyInstance} from './util/helpers';
import api = google.firestore.v1;
// Change the argument to 'console.log' to enable debug output.
setLogFunction(null);
describe('Order', () => {
let firestore: Firestore;
beforeEach(() => {
return createInstance().then(firestoreInstance => {
firestore = firestoreInstance;
});
});
afterEach(() => verifyInstance(firestore));
/** Converts a value into its proto representation. */
function wrap(value: unknown): api.IValue {
const val = firestore._serializer!.encodeValue(value);
expect(val).to.not.be.null;
return val!;
}
function blob(data: number[]): api.IValue {
return wrap(Buffer.from(data));
}
function resource(pathString: string): api.IValue {
return wrap(
new DocumentReference(
firestore,
QualifiedResourcePath.fromSlashSeparatedString(pathString)
)
);
}
function geopoint(lat: number, lng: number): api.IValue {
return wrap(new GeoPoint(lat, lng));
}
function int(n: number): api.IValue {
return {
integerValue: n,
};
}
function double(n: number): api.IValue {
return {
doubleValue: n,
};
}
it('throws on invalid value', () => {
expect(() => {
order.compare(
{valueType: 'foo'} as InvalidApiUsage,
{valueType: 'foo'} as InvalidApiUsage
);
}).to.throw('Unexpected value type: foo');
});
it('throws on invalid blob', () => {
expect(() => {
order.compare(
{
bytesValue: new Uint8Array([1, 2, 3]),
},
{
bytesValue: new Uint8Array([1, 2, 3]),
}
);
}).to.throw('Blobs can only be compared if they are Buffers');
});
it('compares document snapshots by name', () => {
const docs = [
new QueryDocumentSnapshot(
firestore.doc('col/doc3'),
{},
Timestamp.now(),
Timestamp.now(),
Timestamp.now()
),
new QueryDocumentSnapshot(
firestore.doc('col/doc2'),
{},
Timestamp.now(),
Timestamp.now(),
Timestamp.now()
),
new QueryDocumentSnapshot(
firestore.doc('col/doc2'),
{},
Timestamp.now(),
Timestamp.now(),
Timestamp.now()
),
new QueryDocumentSnapshot(
firestore.doc('col/doc1'),
{},
Timestamp.now(),
Timestamp.now(),
Timestamp.now()
),
];
docs.sort(firestore.collection('col').comparator());
expect(docs.map(doc => doc.id)).to.deep.eq([
'doc1',
'doc2',
'doc2',
'doc3',
]);
});
it('is correct', () => {
const groups = [
// null first
[wrap(null)],
// booleans
[wrap(false)],
[wrap(true)],
// numbers
[double(NaN), double(NaN)],
[double(-Infinity)],
[double(-Number.MAX_VALUE)],
[int(Number.MIN_SAFE_INTEGER - 1)],
[int(Number.MIN_SAFE_INTEGER)],
[double(-1.1)],
// Integers and Doubles order the same.
[int(-1), double(-1.0)],
[double(-Number.MIN_VALUE)],
// zeros all compare the same.
[int(0), double(0.0), double(-0)],
[double(Number.MIN_VALUE)],
[int(1), double(1.0)],
[double(1.1)],
[int(2)],
[int(10)],
[int(Number.MAX_SAFE_INTEGER)],
[int(Number.MAX_SAFE_INTEGER + 1)],
[double(Infinity)],
// timestamps
[wrap(new Date(2016, 5, 20, 10, 20))],
[wrap(new Date(2016, 10, 21, 15, 32))],
// strings
[wrap('')],
[wrap('\u0000\ud7ff\ue000\uffff')],
[wrap('(╯°□°)╯︵ ┻━┻')],
[wrap('a')],
[wrap('abc def')],
// latin small letter e + combining acute accent + latin small letter b
[wrap('e\u0301b')],
[wrap('æ')],
// latin small letter e with acute accent + latin small letter a
[wrap('\u00e9a')],
// blobs
[blob([])],
[blob([0])],
[blob([0, 1, 2, 3, 4])],
[blob([0, 1, 2, 4, 3])],
[blob([255])],
<|fim▁hole|> // resource names
[resource('projects/p1/databases/d1/documents/c1/doc1')],
[resource('projects/p1/databases/d1/documents/c1/doc2')],
[resource('projects/p1/databases/d1/documents/c1/doc2/c2/doc1')],
[resource('projects/p1/databases/d1/documents/c1/doc2/c2/doc2')],
[resource('projects/p1/databases/d1/documents/c10/doc1')],
[resource('projects/p1/databases/d1/documents/c2/doc1')],
[resource('projects/p2/databases/d2/documents/c1/doc1')],
[resource('projects/p2/databases/d2/documents/c1-/doc1')],
[resource('projects/p2/databases/d3/documents/c1-/doc1')],
// geo points
[geopoint(-90, -180)],
[geopoint(-90, 0)],
[geopoint(-90, 180)],
[geopoint(0, -180)],
[geopoint(0, 0)],
[geopoint(0, 180)],
[geopoint(1, -180)],
[geopoint(1, 0)],
[geopoint(1, 180)],
[geopoint(90, -180)],
[geopoint(90, 0)],
[geopoint(90, 180)],
// arrays
[wrap([])],
[wrap(['bar'])],
[wrap(['foo'])],
[wrap(['foo', 1])],
[wrap(['foo', 2])],
[wrap(['foo', '0'])],
// objects
[wrap({bar: 0})],
[wrap({bar: 0, foo: 1})],
[wrap({foo: 1})],
[wrap({foo: 2})],
[wrap({foo: '0'})],
];
for (let i = 0; i < groups.length; i++) {
for (const left of groups[i]) {
for (let j = 0; j < groups.length; j++) {
for (const right of groups[j]) {
let expected = order.primitiveComparator(i, j);
expect(order.compare(left, right)).to.equal(
expected,
'comparing ' +
left +
' (' +
JSON.stringify(left) +
') to ' +
right +
' (' +
JSON.stringify(right) +
') at (' +
i +
', ' +
j +
')'
);
expected = order.primitiveComparator(j, i);
expect(order.compare(right, left)).to.equal(
expected,
'comparing ' +
right +
' (' +
JSON.stringify(right) +
') to ' +
left +
' (' +
JSON.stringify(left) +
') at (' +
j +
', ' +
i +
')'
);
}
}
}
}
});
});<|fim▁end|> | |
<|file_name|>nl.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
const u = undefined;
function plural(n: number): number {
let i = Math.floor(Math.abs(n)), v = n.toString().replace(/^[^.]*\.?/, '').length;
if (i === 1 && v === 0) return 1;
return 5;
}
export default [<|fim▁hole|> ['zondag', 'maandag', 'dinsdag', 'woensdag', 'donderdag', 'vrijdag', 'zaterdag'],
['zo', 'ma', 'di', 'wo', 'do', 'vr', 'za']
],
u,
[
['J', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'],
['jan.', 'feb.', 'mrt.', 'apr.', 'mei', 'jun.', 'jul.', 'aug.', 'sep.', 'okt.', 'nov.', 'dec.'],
[
'januari', 'februari', 'maart', 'april', 'mei', 'juni', 'juli', 'augustus', 'september',
'oktober', 'november', 'december'
]
],
u, [['v.C.', 'n.C.'], ['v.Chr.', 'n.Chr.'], ['voor Christus', 'na Christus']], 1, [6, 0],
['dd-MM-y', 'd MMM y', 'd MMMM y', 'EEEE d MMMM y'],
['HH:mm', 'HH:mm:ss', 'HH:mm:ss z', 'HH:mm:ss zzzz'], ['{1} {0}', u, '{1} \'om\' {0}', u],
[',', '.', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'],
['#,##0.###', '#,##0%', '¤ #,##0.00;¤ -#,##0.00', '#E0'], '€', 'Euro', {
'AUD': ['AU$', '$'],
'CAD': ['C$', '$'],
'FJD': ['FJ$', '$'],
'JPY': ['JP¥', '¥'],
'SBD': ['SI$', '$'],
'THB': ['฿'],
'TWD': ['NT$'],
'USD': ['US$', '$'],
'XPF': [],
'XXX': []
},
plural
];<|fim▁end|> | 'nl', [['a.m.', 'p.m.'], u, u], u,
[
['Z', 'M', 'D', 'W', 'D', 'V', 'Z'], ['zo', 'ma', 'di', 'wo', 'do', 'vr', 'za'], |
<|file_name|>shortest_path.py<|end_file_name|><|fim▁begin|># coding: utf-8
# C++
# http://www.e-olimp.com/articles/21
# http://hardfire.ru/Dij_sparse
# Weak 5
# TODO: hear lession + lession about heap<|fim▁hole|>- X = [S] [vert. processed so far]
- A[S] = 0 [computed shortest path distances]
[- B[S] = []] [path - help only]
- while X != V:
- among all edges (v, w) in E, with v in X, w not in X
- pick the one that minimize A[v] + l_vw [call it (v*, w*)]
- add w* to X
- set A[w*] := A[v*] + l_v*w*
[- set B[w*] := B[v*] or (v*, w*)]
WITH HEAP:
"""<|fim▁end|> |
"""
NAIVE:
Initialize: |
<|file_name|>webpack.base.config.js<|end_file_name|><|fim▁begin|>var webpack = require('webpack');
module.exports = {
devtool: 'inline-source-map',
entry: {
'react-bootstrap-table': './src/index.js'
},
output: {
path: './dist',
filename: '[name].js',
library: 'ReactBootstrapTable',
libraryTarget: 'umd'
},<|fim▁hole|> {
'react': {
root: 'React',
commonjs2: 'react',
commonjs: 'react',
amd: 'react'
}
},
{
'react-dom': {
root: 'ReactDOM',
commonjs2: 'react-dom',
commonjs: 'react-dom',
amd: 'react-dom'
}
}
],
module: {
loaders: [{
test: /\.js$/,
exclude: /node_modules/,
loaders: ['babel']
}]
}
};<|fim▁end|> | externals: [ |
<|file_name|>files_77.js<|end_file_name|><|fim▁begin|>var searchData=
[
['wizard_2ec',['wizard.c',['../wizard_8c.html',1,'']]],
['wrapper_2ecpp',['wrapper.cpp',['../wrapper_8cpp.html',1,'']]],
['wxforms_2ec',['wxforms.c',['../wxforms_8c.html',1,'']]]<|fim▁hole|><|fim▁end|> | ]; |
<|file_name|>information.tsx<|end_file_name|><|fim▁begin|>import shx from 'shelljs'
import React from 'react'
import { render, Text, Box } from 'ink'
import Settings from '../../classes/settings'
import Pacman from '../../classes/pacman'
import Utils from '../../classes/utils'
import Title from './title'
/**
*
*/
export default async function information(verbose = false): Promise<void> {
const echo = Utils.setEcho(verbose)
console.clear()
const settings = new Settings()
settings.load()
/**
* nest
*/
const Nest = () => (
<Box borderStyle="round" marginRight={2}>
<Box marginRight={2}><Text>nest: <Text color="cyan">{settings.config.snapshot_dir}</Text></Text></Box>
<Box marginRight={2}><Text>name: <Text color="cyan">{settings.config.snapshot_prefix}{settings.config.snapshot_basename}</Text></Text></Box>
<Box marginRight={2}><Text>ovarium: <Text color="cyan">{settings.work_dir.path}</Text></Text></Box>
</Box>
)
//render(<Nest />)
/**
* Boot
* @returns
*/
const Boot = () => (
<Box borderStyle="round" marginRight={2}>
<Box marginRight={2}><Text>kernel: <Text color="cyan">{settings.kernel_image}</Text></Text></Box>
<Box marginRight={2}><Text>initrd.img: <Text color="cyan">{settings.initrd_image}</Text></Text></Box>
</Box>
)
render(<Boot />)
/**
* Live
*/
const Live = () => (
<Box borderStyle="round" marginRight={2}>
<Box marginRight={2}><Text>live user/passwd: <Text color="cyan">{settings.config.user_opt}/{settings.config.user_opt_passwd}</Text></Text></Box>
<Box marginRight={2}><Text>root passwd: <Text color="cyan">{settings.config.root_passwd}</Text></Text></Box>
</Box>
)
render(<Live />)
const distroId = shx.exec('lsb_release -is', { silent: true }).stdout.trim()
const releaseId = shx.exec('lsb_release -rs', { silent: true }).stdout.trim()
const codenameId = shx.exec('lsb_release -cs', { silent: true }).stdout.trim()
const Distro = () => (
<Box flexDirection='column'>
<Box borderStyle="round" marginRight={2} flexDirection='row' >
<Box marginRight={2}><Text>distro: <Text color="cyan">{distroId} {releaseId} {codenameId}</Text></Text></Box>
<Box marginRight={2}><Text>compatible: <Text color="cyan">{settings.distro.distroLike}/{settings.distro.releaseLike}/{settings.distro.codenameLikeId}</Text></Text></Box>
</Box>
</Box>
)
render(<Distro />)
const dependencies = await Pacman.prerequisitesCheck()
const configurations = Pacman.configurationCheck()
let uefi = Pacman.isUefi()
let installer = false
if (await Pacman.isInstalledGui()) {
installer = await Pacman.calamaresCheck()
}
const Ok = () => (
<Text backgroundColor="green">OK</Text>
)
render(<Ok />)
const Ko = () => (
<Text backgroundColor="red" color="white">KO</Text>
)
render(<Ko />)
/**
* CLI va verde se naked, altrimenti giallo
*/
const CLI = () => (
<Text backgroundColor="green">CLI</Text>
)
render(<CLI />)
const GUI = () => (
<Text backgroundColor="green">GUI</Text>
)
render(<GUI />)
let initType = ''
if (Utils.isSysvinit()) {
initType = 'sysvinit'
}
if (Utils.isSystemd()) {
if (initType === 'sysvinit') {
initType += '/'
}
initType = 'systemd'
}
const sysvinit = Utils.isSysvinit()
const systemd = Utils.isSystemd()
const Checks = () => (
<Box borderStyle="round" marginRight={2} flexDirection="row">
<Box marginRight={2}><Text>dependencies: {dependencies ? <Ok /> : <Ko />}</Text></Box>
<Box marginRight={2}><Text>configurations: {configurations ? <Ok /> : <Ko />}</Text></Box>
<Box marginRight={2}><Text>installer: {installer ? <GUI /> : <CLI />}</Text></Box>
<Box marginRight={2}><Text>uefi: {uefi ? <Ok /> : <Ko />}</Text></Box>
<Box marginRight={2}><Text>init: <Text color="cyan">{initType}</Text></Text></Box>
</Box>
)
render(<Checks />)
const Presentation = () => (
<>
<Box ><Text> </Text></Box>
<Box borderStyle="round" marginRight={2} flexDirection="column">
<Box ><Text>ISO images made with eggs can be installed with either the calamares GUI installer or the krill CLI installer. eggs includes krill installer inside.</Text></Box>
<Box><Text>krill installer is an opportunity if you are low on RAM, working on old distros or on architectures not yet supported by calamares.</Text></Box>
<Box><Text>Usage: sudo eggs install will allways run calamares if present, sudo eggs install --cli will force CLI installer.</Text></Box>
<Box ><Text> </Text></Box>
<Box flexDirection="row"><|fim▁hole|> <Box flexDirection="column">
<Box marginRight={2}><Text>blog </Text><Text color="cyan">https://penguins-eggs.net</Text></Box>
<Box marginRight={2}><Text>sources </Text><Text color="cyan">https://github.com/pieroproietti/penguins-eggs</Text></Box>
<Box marginRight={2}><Text>meeting </Text><Text color="cyan">https://meet.jit.si/PenguinsEggsMeeting</Text></Box>
</Box>
</Box>
</Box>
</>
)
render(<Presentation />)
/**
*
*/
const Main = () => (
<>
<Title />
<Box >
<Live />
<Nest />
<Boot />
</Box>
<Box>
<Distro />
<Checks />
</Box>
<Presentation />
</>
)
render(<Main />)
}<|fim▁end|> | <Box marginRight={1}><Text>Info: </Text></Box> |
<|file_name|>BenchmarkTest04982.java<|end_file_name|><|fim▁begin|>/**
* OWASP Benchmark Project v1.1
*
* This file is part of the Open Web Application Security Project (OWASP)
* Benchmark Project. For details, please see
* <a href="https://www.owasp.org/index.php/Benchmark">https://www.owasp.org/index.php/Benchmark</a>.
*
* The Benchmark is free software: you can redistribute it and/or modify it under the terms
* of the GNU General Public License as published by the Free Software Foundation, version 2.
*
* The Benchmark is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details
*
* @author Nick Sanidas <a href="https://www.aspectsecurity.com">Aspect Security</a>
* @created 2015
*/
package org.owasp.benchmark.testcode;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/BenchmarkTest04982")
public class BenchmarkTest04982 extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
org.owasp.benchmark.helpers.SeparateClassRequest scr = new org.owasp.benchmark.helpers.SeparateClassRequest( request );
String param = scr.getTheParameter("foo");
String bar;
String guess = "ABC";
char switchTarget = guess.charAt(1); // condition 'B', which is safe
// Simple case statement that assigns param to bar on conditions 'A' or 'C'
switch (switchTarget) {
case 'A':
bar = param;
break;
case 'B':
bar = "bob";
break;
case 'C':
case 'D':
bar = param;
break;
default:
bar = "bob's your uncle";
break;
}
<|fim▁hole|>
Object[] obj = { bar, "b"};
response.getWriter().printf("notfoo",obj);
}
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>module.exports = {
addAttribute: require("./addAttribute"),
addAttributes: require("./addAttributes"),
addClass: require("./addClass"),
alignElement: require("./alignElement"),
appendChild: require("./appendChild"),
createElement: require("./createElement"),
createElementNS: require("./createElementNS"),
filterElements: require("./filterElements"),
findElement: require("./findElement"),
findElements: require("./findElements"),
findLastElement: require("./findLastElement"),
findNextElement: require("./findNextElement"),
findNextElements: require("./findNextElements"),
findParentElement: require("./findParentElement"),
findPreviousElement: require("./findPreviousElement"),
findPreviousElements: require("./findPreviousElements"),
findSiblingElement: require("./findSiblingElement"),
findSiblingElements: require("./findSiblingElements"),
getAllNextElements: require("./getAllNextElements"),
getAllPreviousElements: require("./getAllPreviousElements"),
getAllSiblingElements: require("./getAllSiblingElements"),
getAttribute: require("./getAttribute"),
getAttributes: require("./getAttributes"),
getBoundings: require("./getBoundings"),
getChildren: require("./getChildren"),
getDistributedElement: require("./getDistributedElement"),
getDistributedElements: require("./getDistributedElements"),
getElement: require("./getElement"),
getElementById: require("./getElementById"),
getElements: require("./getElements"),
getHTML: require("./getHTML"),
getHeight: require("./getHeight"),
getMargin: require("./getMargin"),
getNextElement: require("./getNextElement"),
getNode: require("./getNode"),
getNodes: require("./getNodes"),
getPadding: require("./getPadding"),
getParentElement: require("./getParentElement"),
getPreviousElement: require("./getPreviousElement"),
getSiblingElements: require("./getSiblingElements"),
getStyle: require("./getStyle"),<|fim▁hole|> getText: require("./getText"),
getValue: require("./getValue"),
getWidth: require("./getWidth"),
hasAttribute: require("./hasAttribute"),
hasChild: require("./hasChild"),
hasClass: require("./hasClass"),
listen: require("./listen"),
matches: require("./matches"),
onMutation: require("./onMutation"),
prependChild: require("./prependChild"),
preventDefault: require("./preventDefault"),
removeAttribute: require("./removeAttribute"),
removeAttributes: require("./removeAttributes"),
removeChild: require("./removeChild"),
removeClass: require("./removeClass"),
removeStyle: require("./removeStyle"),
removeStyles: require("./removeStyles"),
renameElement: require("./renameElement"),
replaceNode: require("./replaceNode"),
requestAnimationFrame: require("./requestAnimationFrame"),
setAttribute: require("./setAttribute"),
setAttributes: require("./setAttributes"),
setChildren: require("./setChildren"),
setHTML: require("./setHTML"),
setStyle: require("./setStyle"),
setStyles: require("./setStyles"),
setText: require("./setText"),
stop: require("./stop"),
stopPropagation: require("./stopPropagation"),
toggleAttribute: require("./toggleAttribute"),
toggleClass: require("./toggleClass"),
unlisten: require("./unlisten"),
updateElement: require("./updateElement"),
willBleedBottom: require("./willBleedBottom"),
willBleedHorizontally: require("./willBleedHorizontally"),
willBleedLeft: require("./willBleedLeft"),
willBleedRight: require("./willBleedRight"),
willBleedTop: require("./willBleedTop"),
willBleedVertically: require("./willBleedVertically")
};<|fim▁end|> | getStyles: require("./getStyles"),
getTag: require("./getTag"), |
<|file_name|>stanza.py<|end_file_name|><|fim▁begin|>import logging
from ..directives import directives_by_section
logger = logging.getLogger(__name__)
class Stanza(object):
"""
Subclass for config file stanzas.
In an HAProxy config file, a stanza is in the form of::
stanza header
directive
directive
directive
Stanza instances have a `header` attribute for the header and a list of
`lines`, one for each directive line.
"""
def __init__(self, section_name):
self.section_name = section_name
self.header = section_name
self.lines = []
def add_lines(self, lines):
"""
Simple helper method for adding multiple lines at once.
"""
for line in lines:
self.add_line(line)
def add_line(self, line):
"""
Adds a given line string to the list of lines, validating the line
first.
"""
if not self.is_valid_line(line):<|fim▁hole|> "Invalid line for %s section: '%s'",
self.section_name, line
)
return
self.lines.append(line)
def is_valid_line(self, line):
"""
Validates a given line against the associated "section" (e.g. 'global'
or 'frontend', etc.) of a stanza.
If a line represents a directive that shouldn't be within the stanza
it is rejected. See the `directives.json` file for a condensed look
at valid directives based on section.
"""
adjusted_line = line.strip().lower()
return any([
adjusted_line.startswith(directive)
for directive in directives_by_section[self.section_name]
])
def __str__(self):
"""
Returns the string representation of a Stanza, meant for use in
config file content.
if no lines are defined an empty string is returned.
"""
if not self.lines:
return ""
return self.header + "\n" + "\n".join([
"\t" + line
for line in self.lines
])<|fim▁end|> | logger.warn( |
<|file_name|>PlanItineraryRoute.js<|end_file_name|><|fim▁begin|>import React from 'react'
import { PlanItineraryContainer } from './index'
const PlanMapRoute = () => {
return (
<div><|fim▁hole|> <PlanItineraryContainer />
</div>
)
}
export default PlanMapRoute<|fim▁end|> | |
<|file_name|>RuDupElimLogRecord.cpp<|end_file_name|><|fim▁begin|>/**********************************************************************
// @@@ START COPYRIGHT @@@
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
// @@@ END COPYRIGHT @@@
//
**********************************************************************/
/* -*-C++-*-
******************************************************************************
*
* File: RuDupElimLogRecord.cpp
* Description: Implementation of class CRUIUDLogRecord
*
* Created: 06/12/2000
* Language: C++
*
*
******************************************************************************
*/
#include "dmprepstatement.h"
#include "ddobject.h"
#include "RuDupElimLogRecord.h"
#include "RuDeltaDef.h"
#include "RuException.h"
//--------------------------------------------------------------------------//
// CLASS CRUIUDLogRecord - PUBLIC AREA
//--------------------------------------------------------------------------//
//--------------------------------------------------------------------------//
// Constructors and destructor
//--------------------------------------------------------------------------//
CRUIUDLogRecord::
CRUIUDLogRecord(CDMSqlTupleDesc &ckDesc, Int32 updateBmpSize) :
// Persistent data members
ckTuple_(ckDesc),
syskey_(0),
epoch_(0),
opType_(0),
ignore_(0),
rangeSize_(0),
pUpdateBitmap_(NULL),
// Non-persistent data members
ckTag_(0),
action_(0)
{
if (0 != updateBmpSize)
{
pUpdateBitmap_ = new CRUUpdateBitmap(updateBmpSize);
}
}<|fim▁hole|>
CRUIUDLogRecord::CRUIUDLogRecord(const CRUIUDLogRecord &other) :
// Persistent data members
ckTuple_(other.ckTuple_),
syskey_(other.syskey_),
epoch_(other.epoch_),
opType_(other.opType_),
ignore_(other.ignore_),
rangeSize_(other.rangeSize_),
pUpdateBitmap_(NULL),
// Non-persistent data members
ckTag_(other.ckTag_)
{
CRUUpdateBitmap *pOtherUpdateBitmap = other.pUpdateBitmap_;
if (NULL != pOtherUpdateBitmap)
{
pUpdateBitmap_ = new CRUUpdateBitmap(*pOtherUpdateBitmap);
}
}
CRUIUDLogRecord::~CRUIUDLogRecord()
{
delete pUpdateBitmap_;
}
//--------------------------------------------------------------------------//
// CRUIUDLogRecord::CopyCKTupleValuesToParams()
//
// Copy the tuple's values to N consecutive parameters
// of the statement: firstParam, ... firstParam + N - 1.
//
//--------------------------------------------------------------------------//
void CRUIUDLogRecord::
CopyCKTupleValuesToParams(CDMPreparedStatement &stmt,
Int32 firstParam) const
{
Lng32 len = GetCKLength();
for (Int32 i=0; i<len; i++)
{
ckTuple_.GetItem(i).SetStatementParam(stmt, firstParam+i);
}
}
//--------------------------------------------------------------------------//
// CRUIUDLogRecord::Build()
//
// Retrieve the tuple's data from the result set and store it.
// The tuple's columns are contiguous in the result set,
// starting from the *startCKColumn* parameter.
//
//--------------------------------------------------------------------------//
void CRUIUDLogRecord::Build(CDMResultSet &rs, Int32 startCKColumn)
{
ReadControlColumns(rs, startCKColumn);
ReadCKColumns(rs, startCKColumn);
}
//--------------------------------------------------------------------------//
// CLASS CRUIUDLogRecord - PRIVATE AREA
//--------------------------------------------------------------------------//
//--------------------------------------------------------------------------//
// CRUIUDLogRecord::ReadControlColumns()
//
// Get the control columns from the result set (epoch, syskey etc).
// The operation_type column is stored as a bitmap, and hence
// requires decoding.
//
//--------------------------------------------------------------------------//
void CRUIUDLogRecord::ReadControlColumns(CDMResultSet &rs, Int32 startCKColumn)
{
// Performance optimization - switch the IsNull check off!
rs.PresetNotNullable(TRUE);
// Read the mandatory columns
epoch_ = rs.GetInt(CRUDupElimConst::OFS_EPOCH+1);
opType_ = rs.GetInt(CRUDupElimConst::OFS_OPTYPE+1);
if (FALSE == IsSingleRowOp())
{
// The range records are logged in the negative epochs.
// Logically, however, they belong to the positive epochs.
epoch_ = -epoch_;
}
if (FALSE == IsSingleRowOp() && FALSE == IsBeginRange())
{
// End-range record
rangeSize_ = rs.GetInt(CRUDupElimConst::OFS_RNGSIZE+1);
}
else
{
rangeSize_ = 0;
}
Int32 numCKCols = startCKColumn-2; // Count from 1 + syskey
if (CRUDupElimConst::NUM_IUD_LOG_CONTROL_COLS_EXTEND == numCKCols)
{
// This is DE level 3, read the optional columns
ignore_ = rs.GetInt(CRUDupElimConst::OFS_IGNORE+1);
// The update bitmap buffer must be setup
RUASSERT(NULL != pUpdateBitmap_);
// The update bitmap can be a null
rs.PresetNotNullable(FALSE);
rs.GetString(CRUDupElimConst::OFS_UPD_BMP+1,
pUpdateBitmap_->GetBuffer(),
pUpdateBitmap_->GetSize());
}
// Syskey is always the last column before the CK
syskey_ = rs.GetLargeInt(startCKColumn-1);
}
//--------------------------------------------------------------------------//
// CRUIUDLogRecord::ReadCKColumns()
//
// Retrieve the values of the clustering key columns and store
// them in an SQL tuple.
//
//--------------------------------------------------------------------------//
void CRUIUDLogRecord::
ReadCKColumns(CDMResultSet &rs, Int32 startCKColumn)
{
// Performance optimization - switch the IsNull check off!
rs.PresetNotNullable(TRUE);
Lng32 len = GetCKLength();
for (Int32 i=0; i<len; i++)
{
Int32 colIndex = i + startCKColumn;
ckTuple_.GetItem(i).Build(rs, colIndex);
}
rs.PresetNotNullable(FALSE);
}
// Define the class CRUIUDLogRecordList with this macro
DEFINE_PTRLIST(CRUIUDLogRecord);<|fim▁end|> | |
<|file_name|>KeyHandler.java<|end_file_name|><|fim▁begin|>package com.zalthrion.zylroth.handler;
import net.minecraft.client.settings.KeyBinding;
<|fim▁hole|>import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class KeyHandler {
@SideOnly(Side.CLIENT) public static KeyBinding openSummonGui;
public static void init() {
openSummonGui = new KeyBinding("key.zylroth:summongui", Keyboard.KEY_Z, "key.categories.zylroth");
ClientRegistry.registerKeyBinding(openSummonGui);
}
}<|fim▁end|> | import org.lwjgl.input.Keyboard;
import cpw.mods.fml.client.registry.ClientRegistry; |
<|file_name|>constref.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
const CONST_REF: &[u8; 3] = b"foo";
trait Foo {
const CONST_REF_DEFAULT: &'static [u8; 3] = b"bar";
const CONST_REF: &'static [u8; 3];
}
impl Foo for i32 {
const CONST_REF: &'static [u8; 3] = b"jjj";
}
impl Foo for i64 {
const CONST_REF_DEFAULT: &'static [u8; 3] = b"ggg";
const CONST_REF: &'static [u8; 3] = b"fff";
}
// Check that (associated and free) const references are not mistaken for a
// non-reference pattern (in which case they would be auto-dereferenced, making
// the types mismatched).
fn const_ref() -> bool {
let f = b"foo";
match f {
CONST_REF => true,
_ => false,
}
}
fn associated_const_ref() -> bool {<|fim▁hole|> }
}
pub fn main() {
assert!(const_ref());
assert!(associated_const_ref());
}<|fim▁end|> | match (b"bar", b"jjj", b"ggg", b"fff") {
(i32::CONST_REF_DEFAULT, i32::CONST_REF, i64::CONST_REF_DEFAULT, i64::CONST_REF) => true,
_ => false, |
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>const LogTestPlugin = require("../../helpers/LogTestPlugin");
/** @type {import("../../../").Configuration} */
module.exports = {
mode: "production",
entry: "./index",
stats: "normal",
plugins: [new LogTestPlugin()]<|fim▁hole|><|fim▁end|> | }; |
<|file_name|>gotoolchain.py<|end_file_name|><|fim▁begin|>#
# SPDX-License-Identifier: MIT
#
import glob
import os
import shutil
import tempfile
from oeqa.selftest.case import OESelftestTestCase
from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
class oeGoToolchainSelfTest(OESelftestTestCase):
"""
Test cases for OE's Go toolchain
"""
@staticmethod
def get_sdk_environment(tmpdir_SDKQA):
pattern = os.path.join(tmpdir_SDKQA, "environment-setup-*")
# FIXME: this is a very naive implementation
return glob.glob(pattern)[0]
@staticmethod
def get_sdk_toolchain():
bb_vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAIN_OUTPUTNAME'],
"meta-go-toolchain")
sdk_deploy = bb_vars['SDK_DEPLOY']
toolchain_name = bb_vars['TOOLCHAIN_OUTPUTNAME']
return os.path.join(sdk_deploy, toolchain_name + ".sh")
@classmethod
def setUpClass(cls):
super(oeGoToolchainSelfTest, cls).setUpClass()
cls.tmpdir_SDKQA = tempfile.mkdtemp(prefix='SDKQA')
cls.go_path = os.path.join(cls.tmpdir_SDKQA, "go")
# Build the SDK and locate it in DEPLOYDIR
bitbake("meta-go-toolchain")
cls.sdk_path = oeGoToolchainSelfTest.get_sdk_toolchain()
# Install the SDK into the tmpdir
runCmd("sh %s -y -d \"%s\"" % (cls.sdk_path, cls.tmpdir_SDKQA))
cls.env_SDK = oeGoToolchainSelfTest.get_sdk_environment(cls.tmpdir_SDKQA)
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True)
super(oeGoToolchainSelfTest, cls).tearDownClass()
def run_sdk_go_command(self, gocmd):
cmd = "cd %s; " % self.tmpdir_SDKQA
cmd = cmd + ". %s; " % self.env_SDK
cmd = cmd + "export GOPATH=%s; " % self.go_path<|fim▁hole|> proj = "github.com/golang"
name = "dep"
ver = "v0.3.1"
archive = ".tar.gz"
url = "https://%s/%s/archive/%s%s" % (proj, name, ver, archive)
runCmd("cd %s; wget %s" % (self.tmpdir_SDKQA, url))
runCmd("cd %s; tar -xf %s" % (self.tmpdir_SDKQA, ver+archive))
runCmd("mkdir -p %s/src/%s" % (self.go_path, proj))
runCmd("mv %s/dep-0.3.1 %s/src/%s/%s"
% (self.tmpdir_SDKQA, self.go_path, proj, name))
retv = self.run_sdk_go_command('build %s/%s/cmd/dep'
% (proj, name))
self.assertEqual(retv, 0,
msg="Running go build failed for %s" % name)<|fim▁end|> | cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd
return runCmd(cmd).status
def test_go_dep_build(self): |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2006 Damien Miller <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# $Id$
import editdist
import unittest
import random
test_vectors = (
( 'abc', 'abc', 0 ),
( 'abc', 'ab', 1 ),
( 'abc', 'abcd', 1 ),
( 'abc', 'bc', 1 ),
( 'abc', 'a', 2 ),
( 'abc', '', 3 ),
( '', '', 0 ),
( 'abc', 'acx', 2 ),
( 'abc', 'acxx', 3 ),
( 'abc', 'bcd', 2 ),
( 'a' * 1000, 'a' * 1000, 0 ),
( 'a' * 1000, 'b' * 1000, 1000),
)
def randstring(l):
a = "abcdefghijklmnopqrstuvwxyz"
r = ""
for i in range(0, l):<|fim▁hole|> def test_00__test_vectors(self):
for a, b, score in test_vectors:
self.assertEqual(editdist.distance(a, b), score)
def test_01__reversed_test_vectors(self):
for b, a, score in test_vectors:
self.assertEqual(editdist.distance(a, b), score)
def test_02__fuzz(self):
for i in range(0, 32) + range(128, 1024, 128):
for j in range(0, 32):
a = randstring(i)
b = randstring(j)
dist = editdist.distance(a, b)
self.assert_(dist >= 0)
def main():
unittest.main()
if __name__ == '__main__':
main()<|fim▁end|> | r += a[random.randint(0, len(a) - 1)]
return r
class TestRadix(unittest.TestCase): |
<|file_name|>recipe-181780.py<|end_file_name|><|fim▁begin|>from wxPython.wx import *<|fim▁hole|>from twisted.internet import reactor
class MyApp(wxApp):
def OnInit(self):
# Twisted Reactor Code
reactor.startRunning()
EVT_TIMER(self,999999,self.OnTimer)
self.timer=wxTimer(self,999999)
self.timer.Start(250,False)
# End Twisted Code
# Do whatever you need to do here
return True
def OnTimer(self,event):
reactor.runUntilCurrent()
reactor.doIteration(0)<|fim▁end|> | |
<|file_name|>qmultimediawidgets.py<|end_file_name|><|fim▁begin|>#############################################################################
##
## Copyright (C) 2017 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of the test suite of PySide2.
##
## $QT_BEGIN_LICENSE:GPL-EXCEPT$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3 as published by the Free Software
## Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################
'''Test cases for QtMultimediaWidgets'''
import unittest
from helper import UsesQApplication
from PySide2.QtMultimediaWidgets import QGraphicsVideoItem, QVideoWidget
from PySide2.QtWidgets import QGraphicsScene, QGraphicsView, QVBoxLayout, QWidget
from PySide2.QtCore import QTimer
class MyWidget(QWidget):<|fim▁hole|> layout = QVBoxLayout(self)
layout.addWidget(QVideoWidget())
graphicsScene = QGraphicsScene()
graphicsView = QGraphicsView(graphicsScene)
graphicsScene.addItem(QGraphicsVideoItem())
layout.addWidget(graphicsView)
class QMultimediaWidgetsTest(UsesQApplication):
def testMultimediaWidgets(self):
w = MyWidget()
w.show()
timer = QTimer.singleShot(100, self.app.quit)
self.app.exec_()
if __name__ == '__main__':
unittest.main()<|fim▁end|> | def __init__(self):
QWidget.__init__(self)
|
<|file_name|>app.ts<|end_file_name|><|fim▁begin|>/// <reference types="@argonjs/argon" />
/// <reference types="three" />
/// <reference types="dat-gui" />
/// <reference types="stats" />
// set up Argon
const app = Argon.init();
// set up THREE. Create a scene, a perspective camera and an object
// for the user's location
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera();
const userLocation = new THREE.Object3D();
scene.add(camera);
scene.add(userLocation);
// We use the standard WebGLRenderer when we only need WebGL-based content
const renderer = new THREE.WebGLRenderer({
alpha: true,
logarithmicDepthBuffer: true
});
// account for the pixel density of the device
renderer.setPixelRatio(window.devicePixelRatio);
app.view.element.appendChild(renderer.domElement);
// to easily control stuff on the display
const hud = new (<any>THREE).CSS3DArgonHUD();
// We put some elements in the index.html, for convenience.
// Here, we retrieve the description box and move it to the
// the CSS3DArgonHUD hudElements[0]. We only put it in the left
// hud since we'll be hiding it in stereo
var description = document.getElementById( 'description' );
hud.hudElements[0].appendChild(description);
app.view.element.appendChild(hud.domElement);
// let's show the rendering stats
var stats = new Stats();
hud.hudElements[0].appendChild( stats.dom );
// Tell argon what local coordinate system you want. The default coordinate
// frame used by Argon is Cesium's FIXED frame, which is centered at the center
// of the earth and oriented with the earth's axes.
// The FIXED frame is inconvenient for a number of reasons: the numbers used are
// large and cause issues with rendering, and the orientation of the user's "local
// view of the world" is different that the FIXED orientation (my perception of "up"
// does not correspond to one of the FIXED axes).
// Therefore, Argon uses a local coordinate frame that sits on a plane tangent to
// the earth near the user's current location. This frame automatically changes if the
// user moves more than a few kilometers.
// The EUS frame cooresponds to the typical 3D computer graphics coordinate frame, so we use
// that here. The other option Argon supports is localOriginEastNorthUp, which is
// more similar to what is used in the geospatial industry
app.context.setDefaultReferenceFrame(app.context.localOriginEastUpSouth);
// create a bit of animated 3D text that says "argon.js" to display
var uniforms = {
amplitude: { type: "f", value: 0.0 }
}
var argonTextObject = new THREE.Object3D();
argonTextObject.position.z = -0.5;
userLocation.add(argonTextObject);
var loader = new THREE.FontLoader();
loader.load( '../resources/fonts/helvetiker_bold.typeface.js', function ( font ) {
var textGeometry = new THREE.TextGeometry( "argon.js", {
font: <any>font,
size: 40,
height: 5,
curveSegments: 3,
bevelThickness: 2,
bevelSize: 1,
bevelEnabled: true
});
textGeometry.center();
var tessellateModifier = new (<any>THREE).TessellateModifier( 8 );
for ( var i = 0; i < 6; i ++ ) {
tessellateModifier.modify( textGeometry );
}
var explodeModifier = new (<any>THREE).ExplodeModifier();
explodeModifier.modify( textGeometry );
var numFaces = textGeometry.faces.length;
var bufferGeometry = new THREE.BufferGeometry().fromGeometry( textGeometry );
var colors = new Float32Array( numFaces * 3 * 3 );
var displacement = new Float32Array( numFaces * 3 * 3 );<|fim▁hole|> var color = new THREE.Color();
for ( var f = 0; f < numFaces; f ++ ) {
var index = 9 * f;
var h = 0.07 + 0.1 * Math.random();
var s = 0.5 + 0.5 * Math.random();
var l = 0.6 + 0.4 * Math.random();
color.setHSL( h, s, l );
var d = 5 + 20 * ( 0.5 - Math.random() );
for ( var i = 0; i < 3; i ++ ) {
colors[ index + ( 3 * i ) ] = color.r;
colors[ index + ( 3 * i ) + 1 ] = color.g;
colors[ index + ( 3 * i ) + 2 ] = color.b;
displacement[ index + ( 3 * i ) ] = d;
displacement[ index + ( 3 * i ) + 1 ] = d;
displacement[ index + ( 3 * i ) + 2 ] = d;
}
}
bufferGeometry.addAttribute( 'customColor', new THREE.BufferAttribute( colors, 3 ) );
bufferGeometry.addAttribute( 'displacement', new THREE.BufferAttribute( displacement, 3 ) );
var shaderMaterial = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: `
uniform float amplitude;
attribute vec3 customColor;
attribute vec3 displacement;
varying vec3 vNormal;
varying vec3 vColor;
void main() {
vNormal = normal;
vColor = customColor;
vec3 newPosition = position + normal * amplitude * displacement;
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
`,
fragmentShader: `
varying vec3 vNormal;
varying vec3 vColor;
void main() {
const float ambient = 0.4;
vec3 light = vec3( 1.0 );
light = normalize( light );
float directional = max( dot( vNormal, light ), 0.0 );
gl_FragColor = vec4( ( directional + ambient ) * vColor, 1.0 );
}
`
});
var textMesh = new THREE.Mesh( bufferGeometry, shaderMaterial );
argonTextObject.add( textMesh );
argonTextObject.scale.set (0.001,0.001,0.001);
argonTextObject.position.z = -0.50;
// add an argon updateEvent listener to slowly change the text over time.
// we don't have to pack all our logic into one listener.
app.context.updateEvent.addEventListener(() => {
uniforms.amplitude.value = 1.0 + Math.sin( Date.now() * 0.001 * 0.5 );
});
});
app.vuforia.isAvailable().then(function(available) {
// vuforia not available on this platform
if (!available) {
console.warn("vuforia not available on this platform.");
return;
}
// tell argon to initialize vuforia for our app, using our license information.
app.vuforia.init({
encryptedLicenseData:
`-----BEGIN PGP MESSAGE-----
Version: OpenPGP.js v2.3.2
Comment: http://openpgpjs.org
wcFMA+gV6pi+O8zeARAAssqSfRHFNoDTNaEdU7i6rVRjht5U4fHnwihcmiOR
u15f5zQrYlT+g8xDM69uz0r2PlcoD6DWllgFhokkDmm6775Yg9I7YcguUTLF
V6t+wCp/IgSRl665KXmmHxEd/cXlcL6c9vIFT/heEOgK2hpsPXGfLl1BJKHc
CqFZ3I3uSCqoM2eDymNSWaiF0Ci6fp5LB7i1oVgB9ujI0b2SSf2NHUa0JfP9
GPSgveAc2GTysUCqk3dkgcH272Fzf4ldG48EoM48B7e0FLuEqx9V5nHxP3lh
9VRcAzA3S3LaujA+Kz9/JUOckyL9T/HON/h1iDDmsrScL4PaGWX5EX0yuvBw
FtWDauLbzAn5BSV+pw7dOmpbSGFAKKUnfhj9d1c5TVeaMkcBhxlkt7j7WvxS
uuURU3lrH8ytnQqPJzw2YSmxdeHSsAjAWnCRJSaUBlAMj0QsXkPGmMwN8EFS
9bbkJETuJoVDFfD472iGJi4NJXQ/0Cc4062J5AuYb71QeU8d9nixXlIDXW5U
fxo9/JpnZRSmWB9R6A2H3+e5dShWDxZF/xVpHNQWi3fQaSKWscQSvUJ83BBP
ltCvDo+gpD6tTt+3SnAThLuhl38ud7i1B8e0dOCKpuYeSG0rXQPY53n2+mGK
P1s0e0R7D5jztijwXvGPf45z232cztWsZWvuD2x42DXBwU0DAGn1enGTza0Q
B/j9y72hJrXx/TdOq85QDMBAA+Ocm9MSGylOqMOb9ozC+DVhhVx7doqS3xV9
h3jLf6V+OF6VIPHQBxAzH5svlktEOcTtjrjQxnUMmNuHbNQmZlA7uYsAqUpF
nWqPtJeHMi2F/gYYI/ApK3NGxzJe21dAf2cdp26wf/PoLusotCQH1YVpuR+V
18Mb8hMpPlB1j5SXnBlv98LxiOGlG6/lQWxpMzkMSZZTxMxa1pCsYNJKK9Bg
pFUyp4x0W4bQL1mRlqaO04cfoErfHqQzboS2b7WRrNy7YJ9rcBbmpbSc+GEY
T7ZUPs66EHgdp6uWYPbM1/oajHQBSPALiV65k06XlR4H+QG1ClkSIkbguKnu
mbpgF7wF5bAfjVVK/ST000Dzr09sgfm4wlIHRcezOzUgjIDVAQE63PznhzfZ
PEwOKC9ex9t9G+HjvhxICYFoxJLcHJ8ytTWEguNFqSIRTKWTgvAycvTFkJA/
pasmzov3Nouak8sE28r2NRpWbmI7muLvHfPWgy/rVczF+E1sOkbwtsdOgmym
yC9yB2IB3fhpLgU28cuI26+cx5IIke0jUgftvza8Oqa0gFZzvu8LaR/RsUdp
9/CRpiYFvvamNmCDIxxYKtAFCOkEni/5ht4poI2ZxHeWtjwZ2GBqby7BqpUu
xLXgv+3XpVq1sSUVurKbntDXUy3BwUwDju235GExYfIBEADMsiKpgf0sGKeW
a5uzMKZgnMm1MoRFBJNsjmBZrbsMxn6lf2ry3XM1xw/w15lepn4X/EMDLeRw
1m3vw4JL7dLY6e2oOllWyscCs+qE8Cwwx9x6q/gAMfwyrqMQ5EH8psIrRKZM
eZwGEnSIuUXtJu3ShyqZUqfbpXhr+TxUEXY7n7NuCRJeM70PWPZB5IC1h3Bp
kgxMRP4zHN2VG4PlcX2fLjpYsx1BHtR2T1biYxbk1AZ26s97XEMH7t9oe+8b
G+QZc500MmPOd+62UZmnOf/Dul9q/H/0+IlWlWSUTTZFtlL+LwR56t28xqca
FjUW8TXv6zYUvY7kk5Mlf2iWPA11wJuHaL5DnGaOoNgFVzicNQKy3SfeuYyp
rSwClM37jRKw+ZNGQDPSAhtrwYZxtndCw/jieqdxIbFG9Td+BunpJNE+KICN
jmnvG5JrzdueKAyTGqxNOtQnNDJYcg+p5rZVZHGQMN/22n2aiRpWhVAdJIXE
YgpsFH6R01N3Y55RFNrhusOhuWodj0XuS1EhknU47XyIpNVSZhWG/e+vXMHb
sN5cO0V7iCFrSxKXg6AwVneoWJC5anT9IabIcgAz07SjdjceC2MlW0vdjPks
FNygBlP9fTIjBGRzg5QQCh/LyyFUTr1rYRbF+4k5kBQ3MtD2a/lS3Sk1MK/+
Es9PfWaAoNLB+QGqSi1qtIhds22zelOtc2MGFxgwb/iNZOUccauv6OXThvDD
gzpn7gZi0+N7pOwx9lJM9QgC4hTMlo268vhNd/MMIPMeyp5n5D8p8ewAutZm
AcIJkP3h2tUG1V/RvVLF22F+ilh3h++7TeSfHdTdv6ArwDJXdQunHCp3020f
vhT6XG0ND+UMFtrptJe7+NoRpNg9oZo6kvwDzhPdIa2OlVjXmr25ueC8FlET
cYdFbIisK+std7/XMlkE5wlGkf9G0RoHsxXqB2Nsj8l3qF5UNyWD+/2Wh+L9
CDjUbY1FxwlVJ4UZ7lz+8jWHO5jYY99adPoATpUaWYxm9oPxz/QR4kvgvLjl
9Ti8379Y8qihzqsRmf6YLYyggknlt9Uyl2HjA+1zcwbDnb3I6g/XjTFUPy1D
xZqqSEuCNDLh7m1+GDA3KXQnLIqOdcxOVzyFCDtKI9c6b0D0ezNkxUjgkoIp
mxSSLDjzmHuPLsQVwqxP4KNU1gT7mXTnhlhsG2Vll/WZD+tuzGK8h9anf6/p
4pCk61Dhj1hmb9msTaK4FGhmBMtJ6kQ4SzGOfFKG5IElAHidYgd0iz7AqEzX
GttDkcHGM9iPIYUBY2r/538M/kxeVx5fBiWEkmWz5FMzqPRs3GZWYiAb2tnp
WSDXW3B1mwznwcCkyUP6OP/c6FFmb6Rag/ZaItVAvVjmA7tXICLJPhYIs9hE
I6zJSVZ81YtKg9Nb6Rx49qf18pQ1SWZNGrZrWaTJTLu4cu4c5v/czY5kyT0Y
8RqNUlI5hwWU8G9LpJ5jv8dssrgcweTG/PEbCkzqz0R6W6VgDUyqo6WSGgoS
B9or791lGcDazNT6CJ4/2Z1wBd4BSHkhSwfcPovGOleZFE24gLiG6puHyVjk
WEIir2WXzhypwLkG/dn+ZJW1ezOvTb4gVVILHrWhNh8=
=LoZg
-----END PGP MESSAGE-----`
}).then((api)=>{
// the vuforia API is ready, so we can start using it.
// tell argon to download a vuforia dataset. The .xml and .dat file must be together
// in the web directory, even though we just provide the .xml file url here
api.objectTracker.createDataSet("../resources/datasets/ArgonTutorial.xml").then( (dataSet)=>{
// the data set has been succesfully downloaded
// tell vuforia to load the dataset.
dataSet.load().then(()=>{
// when it is loaded, we retrieve a list of trackables defined in the
// dataset and set up the content for the target
const trackables = dataSet.getTrackables();
// tell argon we want to track a specific trackable. Each trackable
// has a Cesium entity associated with it, and is expressed in a
// coordinate frame relative to the camera. Because they are Cesium
// entities, we can ask for their pose in any coordinate frame we know
// about.
const gvuBrochureEntity = app.context.subscribeToEntityById(trackables["GVUBrochure"].id)
// create a THREE object to put on the trackable
const gvuBrochureObject = new THREE.Object3D;
scene.add(gvuBrochureObject);
// the updateEvent is called each time the 3D world should be
// rendered, before the renderEvent. The state of your application
// should be updated here.
app.context.updateEvent.addEventListener(() => {
// get the pose (in local coordinates) of the gvuBrochure target
const gvuBrochurePose = app.context.getEntityPose(gvuBrochureEntity);
// if the pose is known the target is visible, so set the
// THREE object to the location and orientation
if (gvuBrochurePose.poseStatus & Argon.PoseStatus.KNOWN) {
gvuBrochureObject.position.copy(<any>gvuBrochurePose.position);
gvuBrochureObject.quaternion.copy(<any>gvuBrochurePose.orientation);
}
// when the target is first seen after not being seen, the
// status is FOUND. Here, we move the 3D text object from the
// world to the target.
// when the target is first lost after being seen, the status
// is LOST. Here, we move the 3D text object back to the world
if (gvuBrochurePose.poseStatus & Argon.PoseStatus.FOUND) {
gvuBrochureObject.add(argonTextObject);
argonTextObject.position.z = 0;
} else if (gvuBrochurePose.poseStatus & Argon.PoseStatus.LOST) {
argonTextObject.position.z = -0.50;
userLocation.add(argonTextObject);
}
})
}).catch(function(err) {
console.log("could not load dataset: " + err.message);
});
// activate the dataset.
api.objectTracker.activateDataSet(dataSet);
});
}).catch(function(err) {
console.log("vuforia failed to initialize: " + err.message);
});
});
// the updateEvent is called each time the 3D world should be
// rendered, before the renderEvent. The state of your application
// should be updated here.
app.context.updateEvent.addEventListener(() => {
// get the position and orientation (the "pose") of the user
// in the local coordinate frame.
const userPose = app.context.getEntityPose(app.context.user);
// assuming we know the user's pose, set the position of our
// THREE user object to match it
if (userPose.poseStatus & Argon.PoseStatus.KNOWN) {
userLocation.position.copy(<any>userPose.position);
}
});
// renderEvent is fired whenever argon wants the app to update its display
app.renderEvent.addEventListener(() => {
// update the rendering stats
stats.update();
// if we have 1 subView, we're in mono mode. If more, stereo.
var monoMode = (app.view.getSubviews()).length == 1;
// set the renderer to know the current size of the viewport.
// This is the full size of the viewport, which would include
// both views if we are in stereo viewing mode
const viewport = app.view.getViewport();
renderer.setSize(viewport.width, viewport.height);
hud.setSize(viewport.width, viewport.height);
// there is 1 subview in monocular mode, 2 in stereo mode
for (let subview of app.view.getSubviews()) {
// set the position and orientation of the camera for
// this subview
camera.position.copy(<any>subview.pose.position);
camera.quaternion.copy(<any>subview.pose.orientation);
// the underlying system provide a full projection matrix
// for the camera.
camera.projectionMatrix.fromArray(<any>subview.projectionMatrix);
// set the viewport for this view
let {x,y,width,height} = subview.viewport;
renderer.setViewport(x,y,width,height);
// set the webGL rendering parameters and render this view
renderer.setScissor(x,y,width,height);
renderer.setScissorTest(true);
renderer.render(scene, camera);
// adjust the hud, but only in mono
if (monoMode) {
hud.setViewport(x,y,width,height, subview.index);
hud.render(subview.index);
}
}
})<|fim▁end|> | |
<|file_name|>stochrsi.spec.ts<|end_file_name|><|fim▁begin|>import * as chai from "chai";
import * as path from "path";
import * as indicators from "../../../src/indicators/";
import { TestDataFactory } from "../../testData";
const jsonfile = require("jsonfile");
chai.should();
describe("STOCHRSI Indicator", () => {
let taResultFile: string;
let sourceData: any;
let taResultData: any;
let indicator: indicators.STOCHRSI;
let indicatorResults: Array<{ fastK: number, fastD: number }>;
let indicatorOnDataRasied: boolean = false;
const timePeriod: number = 14;
const fastKTimePeriod: number = 5;
const fastDMAType: indicators.MA_TYPE = indicators.MA_TYPE.SMA;
const fastDTimePeriod: number = 3;
beforeEach(() => {
taResultFile = path.resolve("./test/talib-results/stochrsi.json");
sourceData = TestDataFactory.getInstance().sourceData;
taResultData = jsonfile.readFileSync(taResultFile);
indicatorResults = new Array<{ fastK: number, fastD: number }>(sourceData.close.length - taResultData.begIndex);
});
describe("when constructing", () => {
beforeEach(() => {
indicator = new indicators.STOCHRSI(timePeriod, fastKTimePeriod, fastDTimePeriod, fastDMAType);
});
it("should set the indicator name", () => {
indicator.name.should.equal(indicators.STOCHRSI.INDICATOR_NAME);
});
it("should set the indicator description", () => {
indicator.description.should.equal(indicators.STOCHRSI.INDICATOR_DESCR);
});
it("should match the talib lookback", () => {
taResultData.begIndex.should.equal(indicator.lookback);
});
});
describe("when constructing with explicit non default arguments", () => {
beforeEach(() => {
indicator = new indicators.STOCHRSI(timePeriod + 1, fastKTimePeriod + 1, fastDTimePeriod + 1, indicators.MA_TYPE.EMA);
});
it("should set the timePeriod", () => {
indicator.timePeriod.should.equal(timePeriod + 1);
});
it("should set the fastKTimePeriod", () => {
indicator.fastKTimePeriod.should.equal(fastKTimePeriod + 1);
});
it("should set the slowKTimePeriod", () => {
indicator.fastDTimePeriod.should.equal(fastDTimePeriod + 1);
});
it("should set the fastDMAType", () => {
indicator.fastDMAType.should.equal(indicators.MA_TYPE.EMA);
});
});
describe("when constructing with default arguments", () => {
beforeEach(() => {
indicator = new indicators.STOCHRSI();
});
it("should set the timePeriod", () => {
indicator.timePeriod.should.equal(indicators.STOCHRSI.TIMEPERIOD_DEFAULT);
});
it("should set the fastKTimePeriod", () => {
indicator.fastKTimePeriod.should.equal(indicators.STOCHRSI.FASTKPERIOD_DEFAULT);
});
it("should set the fastDTimePeriod", () => {
indicator.fastDTimePeriod.should.equal(indicators.STOCHRSI.FASTDPERIOD_DEFAULT);
});
it("should set the fastDMAType", () => {
indicator.fastDMAType.should.equal(indicators.STOCHRSI.FASTDMATYPE_DEFAULT);
});
});
describe("when constructing with timePeriod less than the minimum", () => {
let exception: Error;
beforeEach(() => {
try {
indicator = new indicators.STOCHRSI(0);
} catch (error) {
exception = error;
}
});
it("should return a correctly formatted error", () => {
const message = indicators.generateMinTimePeriodError(indicator.name, indicators.STOCHRSI.TIMEPERIOD_MIN, 0);
exception.message.should.equal(message);
});
});
describe("when constructing with fastKTimePeriod less than the minimum", () => {
let exception: Error;
beforeEach(() => {
try {
indicator = new indicators.STOCHRSI(timePeriod, 0);
} catch (error) {
exception = error;
}
});
it("should return a correctly formatted error", () => {
const message = indicators.generateMinTimePeriodError(indicator.name, indicators.STOCHRSI.FASTKPERIOD_MIN, 0);
exception.message.should.equal(message);
});
});
describe("when constructing with fastDTimePeriod less than the minimum", () => {
let exception: Error;
beforeEach(() => {
try {
indicator = new indicators.STOCHRSI(timePeriod, fastKTimePeriod, 0);
} catch (error) {
exception = error;
}
});
it("should return a correctly formatted error", () => {
const message = indicators.generateMinTimePeriodError(indicator.name, indicators.STOCHRSI.FASTDPERIOD_MIN, 0);
exception.message.should.equal(message);
});
});
describe("when receiving all tick data", () => {
beforeEach(() => {
indicator = new indicators.STOCHRSI(timePeriod, fastKTimePeriod, fastDTimePeriod, fastDMAType);
let idx = 0;
sourceData.close.forEach((value: number, index: number) => {
if (indicator.receiveData(sourceData.close[index])) {
indicatorResults[idx] = { "fastK": 0, "fastD": 0 };
indicatorResults[idx].fastD = indicator.fastD;
indicatorResults[idx].fastK = indicator.fastK;
idx++;
}
});
});
it("should match the talib fastD results", () => {
for (let i = 0; i < taResultData.result.outFastD.length; i++) {
isNaN(indicatorResults[i].fastD).should.be.false;
taResultData.result.outFastD[i].should.be.closeTo(indicatorResults[i].fastD, 0.001);
}
});
it("should match the talib fastK results", () => {
for (let i = 0; i < taResultData.result.outFastK.length; i++) {
isNaN(indicatorResults[i].fastK).should.be.false;
taResultData.result.outFastK[i].should.be.closeTo(indicatorResults[i].fastK, 0.001);
}
});
});
describe("when receiving less tick data than the lookback period", () => {
beforeEach(() => {
indicator = new indicators.STOCHRSI(timePeriod, fastKTimePeriod, fastDTimePeriod, fastDMAType);
let idx = 0;
for (let index = 0; index < indicator.lookback; index++) {
if (indicator.receiveData(sourceData.close[index])) {
indicatorResults[idx] = { "fastK": 0, "fastD": 0 };
indicatorResults[idx].fastD = indicator.fastD;
indicatorResults[idx].fastK = indicator.fastK;
idx++;
}
};
});
<|fim▁hole|> it("should not have raised the ondata event", () => {
indicatorOnDataRasied.should.equal(false);
});
});
describe("when receiving tick data equal to the lookback period", () => {
beforeEach(() => {
indicator = new indicators.STOCHRSI(timePeriod, fastKTimePeriod, fastDTimePeriod, fastDMAType);
let idx = 0;
indicatorOnDataRasied = false;
indicator.on("data", () => {
indicatorOnDataRasied = true;
});
for (let index = 0; index <= indicator.lookback; index++) {
if (indicator.receiveData(sourceData.close[index])) {
indicatorResults[idx] = { "fastK": 0, "fastD": 0 };
indicatorResults[idx].fastD = indicator.fastD;
indicatorResults[idx].fastK = indicator.fastK;
idx++;
}
};
});
it("the indicator should indicate that it is ready to be consumed", () => {
indicator.isReady.should.equal(true);
});
it("should have raised the ondata event", () => {
indicatorOnDataRasied.should.equal(true);
});
});
});<|fim▁end|> | it("the indicator should not indicate that it is ready to be consumed", () => {
indicator.isReady.should.equal(false);
});
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# TWX documentation build configuration file, created by
# sphinx-quickstart on Sat Jun 27 15:07:02 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'TWX'
copyright = '2015, Vince Castellano, Phillip Lopo'
author = 'Vince Castellano, Phillip Lopo'<|fim▁hole|>#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0b3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
html_theme_options = {
'github_user': 'datamachine',
'github_repo': 'twx',
'description': 'Telegram Bot API and MTProto Clients',
'github_banner': True,
'github_button': True,
'show_powered_by': False,
#'link': '#0088cc',
#'sidebar_link': '#0088cc',
#'anchor': '#0088cc',
'gray_1': '#0088cc',
'gray_2': '#ecf3f8',
#'gray_3': '#0088cc',
#'pre_bg': '#ecf3f8',
#'font_family': "'Lucida Grande', 'Lucida Sans Unicode', Arial, Helvetica, Verdana, sans-serif",
#'head_font_family': "'Lucida Grande', 'Lucida Sans Unicode', Arial, Helvetica, Verdana, sans-serif"
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'TWXdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'TWX.tex', 'TWX Documentation',
'Vince Castellano, Phillip Lopo', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'twx', 'TWX Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'TWX', 'TWX Documentation',
author, 'TWX', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}<|fim▁end|> |
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents. |
<|file_name|>DrawServiceUtil.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2000-2013 Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package net.sareweb.emg.service;
<|fim▁hole|>import com.liferay.portal.kernel.util.ReferenceRegistry;
import com.liferay.portal.service.InvokableService;
/**
* Provides the remote service utility for Draw. This utility wraps
* {@link net.sareweb.emg.service.impl.DrawServiceImpl} and is the
* primary access point for service operations in application layer code running
* on a remote server. Methods of this service are expected to have security
* checks based on the propagated JAAS credentials because this service can be
* accessed remotely.
*
* @author A.Galdos
* @see DrawService
* @see net.sareweb.emg.service.base.DrawServiceBaseImpl
* @see net.sareweb.emg.service.impl.DrawServiceImpl
* @generated
*/
public class DrawServiceUtil {
/*
* NOTE FOR DEVELOPERS:
*
* Never modify this class directly. Add custom service methods to {@link net.sareweb.emg.service.impl.DrawServiceImpl} and rerun ServiceBuilder to regenerate this class.
*/
/**
* Returns the Spring bean ID for this bean.
*
* @return the Spring bean ID for this bean
*/
public static java.lang.String getBeanIdentifier() {
return getService().getBeanIdentifier();
}
/**
* Sets the Spring bean ID for this bean.
*
* @param beanIdentifier the Spring bean ID for this bean
*/
public static void setBeanIdentifier(java.lang.String beanIdentifier) {
getService().setBeanIdentifier(beanIdentifier);
}
public static java.lang.Object invokeMethod(java.lang.String name,
java.lang.String[] parameterTypes, java.lang.Object[] arguments)
throws java.lang.Throwable {
return getService().invokeMethod(name, parameterTypes, arguments);
}
public static java.util.List<net.sareweb.emg.model.Draw> getDrawsNewerThanDate(
long date) throws com.liferay.portal.kernel.exception.SystemException {
return getService().getDrawsNewerThanDate(date);
}
public static void clearService() {
_service = null;
}
public static DrawService getService() {
if (_service == null) {
InvokableService invokableService = (InvokableService)PortletBeanLocatorUtil.locate(ClpSerializer.getServletContextName(),
DrawService.class.getName());
if (invokableService instanceof DrawService) {
_service = (DrawService)invokableService;
}
else {
_service = new DrawServiceClp(invokableService);
}
ReferenceRegistry.registerReference(DrawServiceUtil.class,
"_service");
}
return _service;
}
/**
* @deprecated As of 6.2.0
*/
public void setService(DrawService service) {
}
private static DrawService _service;
}<|fim▁end|> | import com.liferay.portal.kernel.bean.PortletBeanLocatorUtil; |
<|file_name|>http_tracker_connection.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2014, Arvid Norberg
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
* Neither the name of the author nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#include <vector>
#include <list>
#include <cctype>
#include <algorithm>
#include "libtorrent/config.hpp"
#include "libtorrent/gzip.hpp"
#include "libtorrent/socket_io.hpp"
#ifdef _MSC_VER
#pragma warning(push, 1)
#endif
#include <boost/bind.hpp>
#ifdef _MSC_VER
#pragma warning(pop)
#endif
#include "libtorrent/tracker_manager.hpp"
#include "libtorrent/http_tracker_connection.hpp"
#include "libtorrent/http_connection.hpp"
#include "libtorrent/entry.hpp"
#include "libtorrent/bencode.hpp"
#include "libtorrent/torrent.hpp"
#include "libtorrent/io.hpp"
#include "libtorrent/socket.hpp"
#include "libtorrent/aux_/session_impl.hpp"
#include "libtorrent/broadcast_socket.hpp" // for is_local
using namespace libtorrent;
namespace libtorrent
{
#if TORRENT_USE_I2P
// defined in torrent_info.cpp
bool is_i2p_url(std::string const& url);
#endif
http_tracker_connection::http_tracker_connection(
io_service& ios
, connection_queue& cc
, tracker_manager& man
, tracker_request const& req
, boost::weak_ptr<request_callback> c
, aux::session_impl& ses
, std::string const& auth
#if TORRENT_USE_I2P
, i2p_connection* i2p_conn
#endif
)
: tracker_connection(man, req, ios, c)
, m_man(man)
, m_ses(ses)
, m_cc(cc)
, m_ios(ios)
#if TORRENT_USE_I2P
, m_i2p_conn(i2p_conn)
#endif
{}
void http_tracker_connection::start()
{
// TODO: 0 support authentication (i.e. user name and password) in the URL
std::string url = tracker_req().url;
if (tracker_req().kind == tracker_request::scrape_request)
{
// find and replace "announce" with "scrape"
// in request
std::size_t pos = url.find("announce");
if (pos == std::string::npos)
{
tracker_connection::fail(error_code(errors::scrape_not_available));
return;
}
url.replace(pos, 8, "scrape");
}
#if TORRENT_USE_I2P
bool i2p = is_i2p_url(url);
#else
static const bool i2p = false;
#endif
aux::session_settings const& settings = m_ses.settings();
// if request-string already contains
// some parameters, append an ampersand instead
// of a question mark
size_t arguments_start = url.find('?');
if (arguments_start != std::string::npos)
url += "&";
else
url += "?";
if (tracker_req().kind == tracker_request::announce_request)
{
const char* event_string[] = {"completed", "started", "stopped", "paused"};
char str[1024];
const bool stats = tracker_req().send_stats;
snprintf(str, sizeof(str)
, "info_hash=%s"
"&peer_id=%s"
"&port=%d"
"&uploaded=%" PRId64
"&downloaded=%" PRId64
"&left=%" PRId64
"&corrupt=%" PRId64
"&key=%08X"
"%s%s" // event
"&numwant=%d"
"&compact=1"
"&no_peer_id=1"
, escape_string((const char*)&tracker_req().info_hash[0], 20).c_str()
, escape_string((const char*)&tracker_req().pid[0], 20).c_str()
// the i2p tracker seems to verify that the port is not 0,
// even though it ignores it otherwise
, i2p ? 1 : tracker_req().listen_port
, stats ? tracker_req().uploaded : 0
, stats ? tracker_req().downloaded : 0
, stats ? tracker_req().left : 0
, stats ? tracker_req().corrupt : 0
, tracker_req().key
, (tracker_req().event != tracker_request::none) ? "&event=" : ""
, (tracker_req().event != tracker_request::none) ? event_string[tracker_req().event - 1] : ""
, tracker_req().num_want);
url += str;
#ifndef TORRENT_DISABLE_ENCRYPTION
if (m_ses.settings().get_int(settings_pack::in_enc_policy) != settings_pack::pe_disabled
&& m_ses.settings().get_bool(settings_pack::announce_crypto_support))
url += "&supportcrypto=1";
#endif
if (stats && m_ses.settings().get_bool(settings_pack::report_redundant_bytes))
{
url += "&redundant=";
url += to_string(tracker_req().redundant).elems;
}
if (!tracker_req().trackerid.empty())
{
std::string id = tracker_req().trackerid;
url += "&trackerid=";
url += escape_string(id.c_str(), id.length());
}
#if TORRENT_USE_I2P
if (i2p)
{
url += "&ip=";
url += escape_string(m_i2p_conn->local_endpoint().c_str()
, m_i2p_conn->local_endpoint().size());
url += ".i2p";
}
else
#endif
if (!m_ses.settings().get_bool(settings_pack::anonymous_mode))
{
std::string announce_ip = settings.get_str(settings_pack::announce_ip);
if (!announce_ip.empty())
{
url += "&ip=" + escape_string(announce_ip.c_str(), announce_ip.size());
}
else if (m_ses.settings().get_bool(settings_pack::announce_double_nat)
&& is_local(m_ses.listen_address()))
{
// only use the global external listen address here
// if it turned out to be on a local network
// since otherwise the tracker should use our
// source IP to determine our origin
url += "&ip=" + print_address(m_ses.listen_address());
}
}
}
m_tracker_connection.reset(new http_connection(m_ios, m_cc, m_ses.m_host_resolver
, boost::bind(&http_tracker_connection::on_response, self(), _1, _2, _3, _4)
, true, settings.get_int(settings_pack::max_http_recv_buffer_size)
, boost::bind(&http_tracker_connection::on_connect, self(), _1)
, boost::bind(&http_tracker_connection::on_filter, self(), _1, _2)
#ifdef TORRENT_USE_OPENSSL
, tracker_req().ssl_ctx
#endif
));
int timeout = tracker_req().event==tracker_request::stopped
?settings.get_int(settings_pack::stop_tracker_timeout)
:settings.get_int(settings_pack::tracker_completion_timeout);
// when sending stopped requests, prefer the cached DNS entry
// to avoid being blocked for slow or failing responses. Chances
// are that we're shutting down, and this should be a best-effort
// attempt. It's not worth stalling shutdown.
proxy_settings ps = m_ses.proxy();
m_tracker_connection->get(url, seconds(timeout)
, tracker_req().event == tracker_request::stopped ? 2 : 1
, &ps, 5, settings.get_bool(settings_pack::anonymous_mode)
? "" : settings.get_str(settings_pack::user_agent)
, bind_interface()
, tracker_req().event == tracker_request::stopped
? resolver_interface::prefer_cache
: 0
#if TORRENT_USE_I2P
, m_i2p_conn
#endif
);
// the url + 100 estimated header size
sent_bytes(url.size() + 100);
#if defined(TORRENT_VERBOSE_LOGGING) || defined(TORRENT_LOGGING)
boost::shared_ptr<request_callback> cb = requester();
if (cb)
{
cb->debug_log("==> TRACKER_REQUEST [ url: %s ]", url.c_str());
}
#endif
}
void http_tracker_connection::close()
{
if (m_tracker_connection)
{
m_tracker_connection->close();
m_tracker_connection.reset();
}
tracker_connection::close();
}
void http_tracker_connection::on_filter(http_connection& c, std::vector<tcp::endpoint>& endpoints)
{
if (tracker_req().apply_ip_filter == false) return;
// remove endpoints that are filtered by the IP filter
for (std::vector<tcp::endpoint>::iterator i = endpoints.begin();
i != endpoints.end();)
{
if (m_ses.m_ip_filter.access(i->address()) == ip_filter::blocked)
i = endpoints.erase(i);
else
++i;
}
#if defined(TORRENT_VERBOSE_LOGGING) || defined(TORRENT_LOGGING)
boost::shared_ptr<request_callback> cb = requester();
if (cb)
{
cb->debug_log("*** TRACKER_FILTER");
}
#endif
if (endpoints.empty())
fail(error_code(errors::banned_by_ip_filter));
}
void http_tracker_connection::on_connect(http_connection& c)
{
error_code ec;
tcp::endpoint ep = c.socket().remote_endpoint(ec);
m_tracker_ip = ep.address();
boost::shared_ptr<request_callback> cb = requester();
}
void http_tracker_connection::on_response(error_code const& ec
, http_parser const& parser, char const* data, int size)
{
// keep this alive
boost::intrusive_ptr<http_tracker_connection> me(this);
if (ec && ec != asio::error::eof)
{
fail(ec);
return;
}
if (!parser.header_finished())
{
fail(asio::error::eof);
return;
}
if (parser.status_code() != 200)
{
fail(error_code(parser.status_code(), get_http_category())
, parser.status_code(), parser.message().c_str());
return;
}
if (ec && ec != asio::error::eof)
{
fail(ec, parser.status_code());
return;
}
received_bytes(size + parser.body_start());
// handle tracker response
error_code ecode;
boost::shared_ptr<request_callback> cb = requester();
if (!cb)
{
close();
return;
}
tracker_response resp = parse_tracker_response(data, size, ecode
, tracker_req().kind == tracker_request::scrape_request
, tracker_req().info_hash);
if (!resp.warning_message.empty())
cb->tracker_warning(tracker_req(), resp.warning_message);
if (ecode)
{
fail(ecode, parser.status_code());
close();
return;
}
if (!resp.failure_reason.empty())
{
fail(error_code(errors::tracker_failure), parser.status_code()
, resp.failure_reason.c_str(), resp.interval, resp.min_interval);
close();
return;
}
// do slightly different things for scrape requests
if (tracker_req().kind == tracker_request::scrape_request)
{
cb->tracker_scrape_response(tracker_req(), resp.complete
, resp.incomplete, resp.downloaded, resp.downloaders);
}
else
{
std::list<address> ip_list;
if (m_tracker_connection)
{
error_code ec;
ip_list.push_back(
m_tracker_connection->socket().remote_endpoint(ec).address());
std::vector<tcp::endpoint> const& epts = m_tracker_connection->endpoints();
for (std::vector<tcp::endpoint>::const_iterator i = epts.begin()
, end(epts.end()); i != end; ++i)
{
ip_list.push_back(i->address());
}
}
cb->tracker_response(tracker_req(), m_tracker_ip, ip_list, resp);
}
close();
}
bool extract_peer_info(lazy_entry const& info, peer_entry& ret, error_code& ec)
{
// extract peer id (if any)
if (info.type() != lazy_entry::dict_t)
{
ec.assign(errors::invalid_peer_dict, get_libtorrent_category());
return false;
}
lazy_entry const* i = info.dict_find_string("peer id");
if (i != 0 && i->string_length() == 20)
{
std::copy(i->string_ptr(), i->string_ptr()+20, ret.pid.begin());
}
else
{
// if there's no peer_id, just initialize it to a bunch of zeroes
std::fill_n(ret.pid.begin(), 20, 0);<|fim▁hole|> if (i == 0)
{
ec.assign(errors::invalid_tracker_response, get_libtorrent_category());
return false;
}
ret.hostname = i->string_value();
// extract port
i = info.dict_find_int("port");
if (i == 0)
{
ec.assign(errors::invalid_tracker_response, get_libtorrent_category());
return false;
}
ret.port = (unsigned short)i->int_value();
return true;
}
tracker_response parse_tracker_response(char const* data, int size, error_code& ec
, bool scrape_request, sha1_hash scrape_ih)
{
tracker_response resp;
lazy_entry e;
int res = lazy_bdecode(data, data + size, e, ec);
if (ec) return resp;
if (res != 0 || e.type() != lazy_entry::dict_t)
{
ec.assign(errors::invalid_tracker_response, get_libtorrent_category());
return resp;
}
int interval = int(e.dict_find_int_value("interval", 0));
// if no interval is specified, default to 30 minutes
if (interval == 0) interval = 1800;
int min_interval = int(e.dict_find_int_value("min interval", 30));
resp.interval = interval;
resp.min_interval = min_interval;
lazy_entry const* tracker_id = e.dict_find_string("tracker id");
if (tracker_id)
resp.trackerid = tracker_id->string_value();
// parse the response
lazy_entry const* failure = e.dict_find_string("failure reason");
if (failure)
{
resp.failure_reason = failure->string_value();
ec.assign(errors::tracker_failure, get_libtorrent_category());
return resp;
}
lazy_entry const* warning = e.dict_find_string("warning message");
if (warning)
resp.warning_message = warning->string_value();
if (scrape_request)
{
lazy_entry const* files = e.dict_find_dict("files");
if (files == 0)
{
ec.assign(errors::invalid_files_entry, get_libtorrent_category());
return resp;
}
lazy_entry const* scrape_data = files->dict_find_dict(
scrape_ih.to_string());
if (scrape_data == 0)
{
ec.assign(errors::invalid_hash_entry, get_libtorrent_category());
return resp;
}
resp.complete = int(scrape_data->dict_find_int_value("complete", -1));
resp.incomplete = int(scrape_data->dict_find_int_value("incomplete", -1));
resp.downloaded = int(scrape_data->dict_find_int_value("downloaded", -1));
resp.downloaders = int(scrape_data->dict_find_int_value("downloaders", -1));
return resp;
}
// look for optional scrape info
resp.complete = int(e.dict_find_int_value("complete", -1));
resp.incomplete = int(e.dict_find_int_value("incomplete", -1));
resp.downloaded = int(e.dict_find_int_value("downloaded", -1));
lazy_entry const* peers_ent = e.dict_find("peers");
if (peers_ent && peers_ent->type() == lazy_entry::string_t)
{
char const* peers = peers_ent->string_ptr();
int len = peers_ent->string_length();
resp.peers4.reserve(len / 6);
for (int i = 0; i < len; i += 6)
{
if (len - i < 6) break;
ipv4_peer_entry p;
error_code ec;
p.ip = detail::read_v4_address(peers).to_v4().to_bytes();
p.port = detail::read_uint16(peers);
resp.peers4.push_back(p);
}
}
else if (peers_ent && peers_ent->type() == lazy_entry::list_t)
{
int len = peers_ent->list_size();
resp.peers.reserve(len);
error_code parse_error;
for (int i = 0; i < len; ++i)
{
peer_entry p;
if (!extract_peer_info(*peers_ent->list_at(i), p, parse_error))
continue;
resp.peers.push_back(p);
}
// only report an error if all peer entries are invalid
if (resp.peers.empty() && parse_error)
{
ec = parse_error;
return resp;
}
}
else
{
peers_ent = 0;
}
#if TORRENT_USE_IPV6
lazy_entry const* ipv6_peers = e.dict_find_string("peers6");
if (ipv6_peers)
{
char const* peers = ipv6_peers->string_ptr();
int len = ipv6_peers->string_length();
resp.peers6.reserve(len / 18);
for (int i = 0; i < len; i += 18)
{
if (len - i < 18) break;
ipv6_peer_entry p;
p.ip = detail::read_v6_address(peers).to_v6().to_bytes();
p.port = detail::read_uint16(peers);
resp.peers6.push_back(p);
}
}
else
{
ipv6_peers = 0;
}
#else
lazy_entry const* ipv6_peers = 0;
#endif
/*
// if we didn't receive any peers. We don't care if we're stopping anyway
if (peers_ent == 0 && ipv6_peers == 0
&& tracker_req().event != tracker_request::stopped)
{
ec.assign(errors::invalid_peers_entry, get_libtorrent_category());
return resp;
}
*/
lazy_entry const* ip_ent = e.dict_find_string("external ip");
if (ip_ent)
{
char const* p = ip_ent->string_ptr();
if (ip_ent->string_length() == int(address_v4::bytes_type().size()))
resp.external_ip = detail::read_v4_address(p);
#if TORRENT_USE_IPV6
else if (ip_ent->string_length() == int(address_v6::bytes_type().size()))
resp.external_ip = detail::read_v6_address(p);
#endif
}
return resp;
}
}<|fim▁end|> | }
// extract ip
i = info.dict_find_string("ip"); |
<|file_name|>a.py<|end_file_name|><|fim▁begin|># print absolute value of an integer:
a = -100
if a >= 0:
print a
else:<|fim▁hole|><|fim▁end|> | print -a |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# MicroPython documentation build configuration file, created by
# sphinx-quickstart on Sun Sep 21 11:42:03 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
#master_doc = 'index'
# General information about the project.
project = 'MicroPython'
copyright = '2014, Damien P. George'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.4'
# The full version, including alpha/beta/rc tags.
release = '1.4.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
try:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), '.']
except:
html_theme = 'default'
html_theme_path = ['.']
else:
html_theme_path = ['.']
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = ['.']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = '../../logo/trans-logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%d %b %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {"index": "topindex.html"}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MicroPythondoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'MicroPython.tex', 'MicroPython Documentation',
'Damien P. George', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'micropython', 'MicroPython Documentation',
['Damien P. George'], 1),
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MicroPython', 'MicroPython Documentation',
'Damien P. George', 'MicroPython', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.<|fim▁hole|>
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
# Work out the port to generate the docs for
from collections import OrderedDict
micropy_port = os.getenv('MICROPY_PORT') or 'pyboard'
tags.add('port_' + micropy_port)
ports = OrderedDict((
("unix", "unix"),
("pyboard", "the pyboard"),
("wipy", "the WiPy"),
("esp8266", "esp8266"),
))
# The members of the html_context dict are available inside topindex.html
url_prefix = os.getenv('MICROPY_URL_PREFIX') or '/'
html_context = {
'port':micropy_port,
'port_name':ports[micropy_port],
'all_ports':[(n, url_prefix + p) for p, n in ports.items()],
}
# Append the other ports' specific folders/files to the exclude pattern
exclude_patterns.extend([port + '*' for port in ports if port != micropy_port])
# Specify a custom master document based on the port name
master_doc = micropy_port + '_' + 'index'<|fim▁end|> | #texinfo_no_detailmenu = False
|
<|file_name|>epub_to_txt_parser.py<|end_file_name|><|fim▁begin|>import sys, getopt
import errno
import os.path
import epub
import lxml
from bs4 import BeautifulSoup
class EPubToTxtParser:
# Epub parsing specific code
def get_linear_items_data( self, in_file_name ):
book_items = []
book = epub.open_epub( in_file_name )
for item_id, linear in book.opf.spine.itemrefs:
item = book.get_item( item_id )
if linear:
data = book.read_item( item )
book_items.append( data )
return book_items
def get_narrative( self, linear_items_data ):
avg_len = 0
count = 0
for data in linear_items_data:
count += 1
avg_len = ( ( avg_len * ( count - 1 ) ) + len( data ) ) / count
book_narrative = [ data for data in linear_items_data if len(data) >= avg_len ]
return book_narrative
def extract_paragraph_text( self, book_narrative ):
paragraph_text = ''
for data in book_narrative:
soup = BeautifulSoup( data, "lxml" )
paragraphs = soup.find_all( 'p' )
# Thanks to Eric Storms for finding the solution
# to some 0kB parse results…
if paragraphs == []:
paragraphs = soup.find_all( 'div' )
for paragraph in paragraphs:
paragraph_text += ( paragraph.get_text() + '\n' )
return paragraph_text
def narrative_from_epub_to_txt( self, in_file_name ):
if os.path.isfile( in_file_name ):
book_items = self.get_linear_items_data( in_file_name )
book_narrative = self.get_narrative( book_items )
paragraph_text = self.extract_paragraph_text( book_narrative )
return( paragraph_text )
else:
raise FileNotFoundError( errno.ENOENT, os.strerror( errno.ENOENT ), in_file_name )
# Command line usage stuff
def print_usage_and_exit():
print( "Usage: %s -i epub_file_in -o txt_file_out" % sys.argv[ 0 ] )
sys.exit( 2 )
def parse_opts( opts ):
in_file_name, out_file_name = None, None
for o, a in opts:
if o == '-i':
in_file_name = a<|fim▁hole|> out_file_name = a
return ( in_file_name, out_file_name )
# Main
if __name__ == '__main__':
try:
opts, args = getopt.getopt(sys.argv[1:], "i:o:")
assert( len(opts) != 0 )
in_file_name, out_file_name = parse_opts( opts )
except getopt.GetoptError as e:
print( str( e ) )
print_usage_and_exit()
except AssertionError:
print_usage_and_exit()
try:
parser = EPubToTxtParser()
narrative_text = parser.narrative_from_epub_to_txt( in_file_name )
if( out_file_name != None ):
with open( out_file_name, "w" ) as out_file:
out_file.write( narrative_text )
out_file.close()
else:
print( narrative_text )
except FileNotFoundError:
print( "File not found: {file_name}".format( file_name = in_file_name ) )<|fim▁end|> | elif o == '-o': |
<|file_name|>imageTest.py<|end_file_name|><|fim▁begin|>f = open("pixels.dat", "r")
pixs = f.readline()
f.close()
print len(pixs)
from PIL import Image
import numpy as np
img = Image.new('RGB', (160, 210), "black") # create a new black image
pixels = img.load() # create the pixel map
# Load the hardcoded grayscale array
from grayscale import getGrayscaleArray
colMat = getGrayscaleArray()
for i in range(len(pixs)/2):
row = i % 160
column = i/160
hex1 = int(pixs[i*2], 16)
# Division by 2 because: http://en.wikipedia.org/wiki/List_of_video_game_console_palettes
hex2 = int(pixs[i*2+1], 16)/2
temp = int(colMat[hex2, hex1])
pixels[row, column] = (temp, temp, temp)
img.show()
# Example 1: take one PIL.Image file, preprocess and get its pixel array
from preprocessing import preprocessImage<|fim▁hole|>img2 = preprocessImage(img)
pixels = img2.load()
# Example 2: take a sequence that DOESN'T contain actions and preprocess the images in-place
from preprocessing import preprocessSequenceWithActions
sequence = [img.copy(), 45, img.copy(), 'thisdoesntmatter', img.copy(), 'this neither'] #,deepcopy(img),'thisdoesntmatter',deepcopy(img),deepcopy(img)]
sequence = preprocessSequenceWithActions(sequence)
# Example 3: take a sequence that DOES contain actions and preprocess the images in-place
from preprocessing import preprocessSequenceNoActions
sequence = [img.copy(), img.copy(), img.copy()]
sequence = preprocessSequenceNoActions(sequence)<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta
from django.db import models
from django.db.models import Max, Min
from tinymce.models import HTMLField
<|fim▁hole|> symbol = models.CharField(max_length=10, blank=True, null=True)
description = HTMLField(blank=True, null=True, default='')
listing_date = models.DateField(blank=True, null=True)
renamed_to = models.ForeignKey('self', blank=True, null=True, default=None, related_name='renamed_from')
order = models.IntegerField(blank=True, default=0)
is_index = models.BooleanField(blank=True, default=False)
is_currently_listed = models.BooleanField(blank=True, default=True)
is_suspended = models.BooleanField(blank=True, default=False)
created_datetime = models.DateTimeField(auto_now_add=True)
updated_datetime = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('symbol',)
verbose_name = 'Company'
verbose_name_plural = 'Companies'
def __unicode__(self):
return self.symbol if self.symbol is not None else self.name
def __str__(self):
return self.symbol if self.symbol is not None else self.name
@property
def readable_name(self):
if self.is_index:
return self.name[1:]
else:
return self.name
@property
def year_high(self):
today = datetime.now()
one_year = timedelta(days=52*7)
if today.isoweekday() == 6:
today = today - timedelta(days=1)
elif today.isoweekday() == 7:
today = today - timedelta(days=2)
last_year = today - one_year
quotes = self.quote_set.filter(quote_date__gt=last_year)
if quotes.count() == 0:
return 0.0
year_high = quotes.aggregate(Max('price_high'))
return ('%f' % year_high['price_high__max']).rstrip('0').rstrip('.')
@property
def year_low(self):
today = datetime.now()
one_year = timedelta(days=52*7)
if today.isoweekday() == 6:
today = today - timedelta(days=1)
elif today.isoweekday() == 7:
today = today - timedelta(days=2)
last_year = today - one_year
quotes = self.quote_set.filter(quote_date__gt=last_year)
if quotes.count() == 0:
return 0.0
year_low = quotes.aggregate(Min('price_low'))
return ('%f' % year_low['price_low__min']).rstrip('0').rstrip('.')
@property
def last_thirty_quotes(self):
quotes = self.quote_set.order_by('-quote_date')[:30]
return quotes<|fim▁end|> | class Company(models.Model):
name = models.CharField(max_length=75, blank=True, null=True) |
<|file_name|>Handler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from collections import namedtuple
Payload = namedtuple('Payload', ['iden', 'body', 'send_date', 'sender'])
class Handler(object):<|fim▁hole|> @staticmethod
def config():
return
def __init__(self, logger):
self.logger = logger
def create_translator():
return
def create_listener(task):
return
def configure_modules(modules, push_config):
return
class Translator(object):
def get_recent():
return
def is_valid(message):
return
def get_module(message, modules):
return
def cleanup(message):
return
def to_payload(message):
return
def respond(message, response):
return<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>
//! Infrastructure for compiler plugins.
//!
//! Plugins are Rust libraries which extend the behavior of `rustc`
//! in various ways.
//!
//! Plugin authors will use the `Registry` type re-exported by
//! this module, along with its methods. The rest of the module
//! is for use by `rustc` itself.
//!
//! To define a plugin, build a dylib crate with a
//! `#[plugin_registrar]` function:
//!
//! ```rust,ignore
//! #![crate_name = "myplugin"]
//! #![crate_type = "dylib"]
//! #![feature(plugin_registrar)]
//!
//! extern crate rustc;
//!
//! use rustc::plugin::Registry;
//!
//! #[plugin_registrar]
//! pub fn plugin_registrar(reg: &mut Registry) {
//! reg.register_macro("mymacro", expand_mymacro);
//! }
//!
//! fn expand_mymacro(...) { // details elided
//! ```
//!
//! WARNING: We currently don't check that the registrar function
//! has the appropriate type!
//!
//! To use a plugin while compiling another crate:
//!
//! ```rust
//! #![feature(plugin)]
//!
//! #[plugin]
//! extern crate myplugin;
//! ```
//!
//! If you don't need the plugin crate available at runtime, use
//! `#[no_link]` as well.
//!
//! See [the compiler plugin guide](../../guide-plugin.html)
//! for more examples.
pub use self::registry::Registry;
pub mod registry;
pub mod load;
pub mod build;<|fim▁end|> | |
<|file_name|>jsonWorker.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import URI from 'vs/base/common/uri';
import Severity from 'vs/base/common/severity';
import EditorCommon = require('vs/editor/common/editorCommon');
import Modes = require('vs/editor/common/modes');
import HtmlContent = require('vs/base/common/htmlContent');
import Parser = require('./parser/jsonParser');
import JSONFormatter = require('vs/languages/json/common/features/jsonFormatter');
import SchemaService = require('./jsonSchemaService');
import JSONSchema = require('vs/base/common/jsonSchema');
import JSONIntellisense = require('./jsonIntellisense');
import WinJS = require('vs/base/common/winjs.base');
import Strings = require('vs/base/common/strings');
import ProjectJSONContribution = require('./contributions/projectJSONContribution');
import PackageJSONContribution = require('./contributions/packageJSONContribution');
import BowerJSONContribution = require('./contributions/bowerJSONContribution');
import GlobPatternContribution = require('./contributions/globPatternContribution');
import errors = require('vs/base/common/errors');
import {IMarkerService, IMarkerData} from 'vs/platform/markers/common/markers';
import {IRequestService} from 'vs/platform/request/common/request';
import {IWorkspaceContextService} from 'vs/platform/workspace/common/workspace';
import {ISchemaContributions} from 'vs/platform/jsonschemas/common/jsonContributionRegistry';
import {IResourceService} from 'vs/editor/common/services/resourceService';
import {IInstantiationService} from 'vs/platform/instantiation/common/instantiation';
import {JSONLocation} from './parser/jsonLocation';
import {filterSuggestions} from 'vs/editor/common/modes/supports/suggestSupport';
import {ValidationHelper} from 'vs/editor/common/worker/validationHelper';
export interface IOptionsSchema {
/**
* HTTP schema URL or a relative path to schema file in workspace
*/
url: string;
/**
* The patterns (e.g. *.pack.json) to map files to this schema
*/
fileMatch: string[];
/**
* A unresolved schema definition. Optional, to avoid fetching from a URL.
*/
schema?: JSONSchema.IJSONSchema;
/* deprecated */
schemaPath: string;
/* deprecated */
filePattern: string;
}
export interface IOptions {
schemas: IOptionsSchema[];
}
export interface ISuggestionsCollector {
add(suggestion: Modes.ISuggestion): void;
setAsIncomplete() : void;
error(message:string): void;
}
export interface IJSONWorkerContribution {
getInfoContribution(resource: URI, location: JSONLocation) : WinJS.TPromise<HtmlContent.IHTMLContentElement[]>;
collectPropertySuggestions(resource: URI, location: JSONLocation, currentWord: string, addValue: boolean, isLast:boolean, result: ISuggestionsCollector) : WinJS.Promise;
collectValueSuggestions(resource: URI, location: JSONLocation, propertyKey: string, result: ISuggestionsCollector): WinJS.Promise;
collectDefaultSuggestions(resource: URI, result: ISuggestionsCollector): WinJS.Promise;
}
export class JSONWorker implements Modes.IExtraInfoSupport {
private schemaService: SchemaService.IJSONSchemaService;
private requestService: IRequestService;
private contextService: IWorkspaceContextService;
private jsonIntellisense : JSONIntellisense.JSONIntellisense;
private contributions: IJSONWorkerContribution[];
private _validationHelper: ValidationHelper;
private resourceService:IResourceService;
private markerService: IMarkerService;
private _modeId: string;
constructor(
modeId: string,
@IResourceService resourceService: IResourceService,
@IMarkerService markerService: IMarkerService,
@IRequestService requestService: IRequestService,
@IWorkspaceContextService contextService: IWorkspaceContextService,
@IInstantiationService instantiationService: IInstantiationService
) {
this._modeId = modeId;
this.resourceService = resourceService;
this.markerService = markerService;
this._validationHelper = new ValidationHelper(
this.resourceService,
this._modeId,<|fim▁hole|> );
this.requestService = requestService;
this.contextService = contextService;
this.schemaService = instantiationService.createInstance(SchemaService.JSONSchemaService);
this.contributions = [
instantiationService.createInstance(ProjectJSONContribution.ProjectJSONContribution),
instantiationService.createInstance(PackageJSONContribution.PackageJSONContribution),
instantiationService.createInstance(BowerJSONContribution.BowerJSONContribution),
instantiationService.createInstance(GlobPatternContribution.GlobPatternContribution)
];
this.jsonIntellisense = new JSONIntellisense.JSONIntellisense(this.schemaService, this.requestService, this.contributions);
}
public navigateValueSet(resource:URI, range:EditorCommon.IRange, up:boolean):WinJS.TPromise<Modes.IInplaceReplaceSupportResult> {
var modelMirror = this.resourceService.get(resource);
var offset = modelMirror.getOffsetFromPosition({ lineNumber: range.startLineNumber, column: range.startColumn });
var parser = new Parser.JSONParser();
var config = new Parser.JSONDocumentConfig();
config.ignoreDanglingComma = true;
var doc = parser.parse(modelMirror.getValue(), config);
var node = doc.getNodeFromOffsetEndInclusive(offset);
if (node && (node.type === 'string' || node.type === 'number' || node.type === 'boolean' || node.type === 'null')) {
return this.schemaService.getSchemaForResource(resource.toString(), doc).then((schema) => {
if (schema) {
var proposals : Modes.ISuggestion[] = [];
var proposed: any = {};
var collector = {
add: (suggestion: Modes.ISuggestion) => {
if (!proposed[suggestion.label]) {
proposed[suggestion.label] = true;
proposals.push(suggestion);
}
},
setAsIncomplete: () => { /* ignore */ },
error: (message: string) => {
errors.onUnexpectedError(message);
}
};
this.jsonIntellisense.getValueSuggestions(resource, schema, doc, node.parent, node.start, collector);
var range = modelMirror.getRangeFromOffsetAndLength(node.start, node.end - node.start);
var text = modelMirror.getValueInRange(range);
for (var i = 0, len = proposals.length; i < len; i++) {
if (Strings.equalsIgnoreCase(proposals[i].label, text)) {
var nextIdx = i;
if (up) {
nextIdx = (i + 1) % len;
} else {
nextIdx = i - 1;
if (nextIdx < 0) {
nextIdx = len - 1;
}
}
return {
value: proposals[nextIdx].label,
range: range
};
}
}
return null;
}
});
}
return null;
}
/**
* @return true if you want to revalidate your models
*/
_doConfigure(options:IOptions): WinJS.TPromise<void> {
if (options && options.schemas) {
this.schemaService.clearExternalSchemas();
options.schemas.forEach((schema) => {
if (schema.url && (schema.fileMatch || schema.schema)) {
var url = schema.url;
if (!Strings.startsWith(url, 'http://') && !Strings.startsWith(url, 'https://') && !Strings.startsWith(url, 'file://')) {
var resourceURL = this.contextService.toResource(url);
if (resourceURL) {
url = resourceURL.toString();
}
}
if (url) {
this.schemaService.registerExternalSchema(url, schema.fileMatch, schema.schema);
}
} else if (schema.filePattern && schema.schemaPath) {
var url = this.contextService.toResource(schema.schemaPath).toString();
var patterns = schema.filePattern ? [ schema.filePattern ] : [];
this.schemaService.registerExternalSchema(url, patterns);
}
});
}
this._validationHelper.triggerDueToConfigurationChange();
return WinJS.TPromise.as(void 0);
}
public setSchemaContributions(contributions:ISchemaContributions): WinJS.TPromise<boolean> {
this.schemaService.setSchemaContributions(contributions);
return WinJS.TPromise.as(true);
}
public enableValidator(): WinJS.TPromise<void> {
this._validationHelper.enable();
return WinJS.TPromise.as(null);
}
public doValidate(resources: URI[]):void {
for (var i = 0; i < resources.length; i++) {
this.doValidate1(resources[i]);
}
}
private doValidate1(resource: URI):void {
var modelMirror = this.resourceService.get(resource);
var parser = new Parser.JSONParser();
var content = modelMirror.getValue();
if (content.length === 0) {
// ignore empty content, no marker can be set anyway
return;
}
var result = parser.parse(content);
this.schemaService.getSchemaForResource(resource.toString(), result).then((schema) => {
if (schema) {
if (schema.errors.length && result.root) {
var property = result.root.type === 'object' ? (<Parser.ObjectASTNode> result.root).getFirstProperty('$schema') : null;
if (property) {
var node = property.value || property;
result.warnings.push({ location: { start: node.start, end: node.end }, message: schema.errors[0] });
} else {
result.warnings.push({ location: { start: result.root.start, end: result.root.start + 1 }, message: schema.errors[0] });
}
} else {
result.validate(schema.schema);
}
}
var added : { [signature:string]: boolean} = {};
var markerData: IMarkerData[] = [];
result.errors.concat(result.warnings).forEach((error, idx) => {
// remove duplicated messages
var signature = error.location.start + ' ' + error.location.end + ' ' + error.message;
if (!added[signature]) {
added[signature] = true;
var startPosition = modelMirror.getPositionFromOffset(error.location.start);
var endPosition = modelMirror.getPositionFromOffset(error.location.end);
markerData.push({
message: error.message,
severity: idx >= result.errors.length ? Severity.Warning : Severity.Error,
startLineNumber: startPosition.lineNumber,
startColumn: startPosition.column,
endLineNumber: endPosition.lineNumber,
endColumn: endPosition.column
});
}
});
this.markerService.changeOne(this._modeId, resource, markerData);
});
}
public suggest(resource:URI, position:EditorCommon.IPosition):WinJS.TPromise<Modes.ISuggestResult[]> {
return this.doSuggest(resource, position).then(value => filterSuggestions(value));
}
private doSuggest(resource:URI, position:EditorCommon.IPosition):WinJS.TPromise<Modes.ISuggestResult> {
var modelMirror = this.resourceService.get(resource);
return this.jsonIntellisense.doSuggest(resource, modelMirror, position);
}
public computeInfo(resource:URI, position:EditorCommon.IPosition): WinJS.TPromise<Modes.IComputeExtraInfoResult> {
var modelMirror = this.resourceService.get(resource);
var parser = new Parser.JSONParser();
var doc = parser.parse(modelMirror.getValue());
var offset = modelMirror.getOffsetFromPosition(position);
var node = doc.getNodeFromOffset(offset);
var originalNode = node;
// use the property description when hovering over an object key
if (node && node.type === 'string') {
var stringNode = <Parser.StringASTNode>node;
if (stringNode.isKey) {
var propertyNode = <Parser.PropertyASTNode>node.parent;
node = propertyNode.value;
}
}
if (!node) {
return WinJS.TPromise.as(null);
}
return this.schemaService.getSchemaForResource(resource.toString(), doc).then((schema) => {
if (schema) {
var matchingSchemas : Parser.IApplicableSchema[] = [];
doc.validate(schema.schema, matchingSchemas, node.start);
var description: string = null;
var contributonId: string = null;
matchingSchemas.every((s) => {
if (s.node === node && !s.inverted && s.schema) {
description = description || s.schema.description;
contributonId = contributonId || s.schema.id;
}
return true;
});
var location = node.getNodeLocation();
for (var i= this.contributions.length -1; i >= 0; i--) {
var contribution = this.contributions[i];
var promise = contribution.getInfoContribution(resource, location);
if (promise) {
return promise.then((htmlContent) => { return this.createInfoResult(htmlContent, originalNode, modelMirror); } );
}
}
if (description) {
var htmlContent = [ {className: 'documentation', text: description } ];
return this.createInfoResult(htmlContent, originalNode, modelMirror);
}
}
return null;
});
}
private createInfoResult(htmlContent : HtmlContent.IHTMLContentElement[], node: Parser.ASTNode, modelMirror: EditorCommon.IMirrorModel) : Modes.IComputeExtraInfoResult {
var range = modelMirror.getRangeFromOffsetAndLength(node.start, node.end - node.start);
var result:Modes.IComputeExtraInfoResult = {
value: '',
htmlContent: htmlContent,
className: 'typeInfo json',
range: range
};
return result;
}
public getOutline(resource:URI):WinJS.TPromise<Modes.IOutlineEntry[]> {
var modelMirror = this.resourceService.get(resource);
var parser = new Parser.JSONParser();
var doc = parser.parse(modelMirror.getValue());
var root = doc.root;
if (!root) {
return WinJS.TPromise.as(null);
}
// special handling for key bindings
var resourceString = resource.toString();
if ((resourceString === 'vscode://defaultsettings/keybindings.json') || Strings.endsWith(resourceString.toLowerCase(), '/user/keybindings.json')) {
if (root.type === 'array') {
var result : Modes.IOutlineEntry[] = [];
(<Parser.ArrayASTNode> root).items.forEach((item) => {
if (item.type === 'object') {
var property = (<Parser.ObjectASTNode> item).getFirstProperty('key');
if (property && property.value) {
var range = modelMirror.getRangeFromOffsetAndLength(item.start, item.end - item.start);
result.push({ label: property.value.getValue(), icon: 'function', type: 'string', range: range, children: []});
}
}
});
return WinJS.TPromise.as(result);
}
}
function collectOutlineEntries(result: Modes.IOutlineEntry[], node: Parser.ASTNode): Modes.IOutlineEntry[] {
if (node.type === 'array') {
(<Parser.ArrayASTNode>node).items.forEach((node:Parser.ASTNode) => {
collectOutlineEntries(result, node);
});
} else if (node.type === 'object') {
var objectNode = <Parser.ObjectASTNode>node;
objectNode.properties.forEach((property:Parser.PropertyASTNode) => {
var range = modelMirror.getRangeFromOffsetAndLength(property.start, property.end - property.start);
var valueNode = property.value;
if (valueNode) {
var children = collectOutlineEntries([], valueNode);
var icon = valueNode.type === 'object' ? 'module' : valueNode.type;
result.push({ label: property.key.getValue(), icon: icon, type: valueNode.type, range: range, children: children});
}
});
}
return result;
}
var result = collectOutlineEntries([], root);
return WinJS.TPromise.as(result);
}
public format(resource: URI, range: EditorCommon.IRange, options: Modes.IFormattingOptions): WinJS.TPromise<EditorCommon.ISingleEditOperation[]> {
var model = this.resourceService.get(resource);
return WinJS.TPromise.as(JSONFormatter.format(model, range, options));
}
}<|fim▁end|> | (toValidate) => this.doValidate(toValidate) |
<|file_name|>test_instance_mapping.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from nova import context
from nova import exception
from nova.objects import cell_mapping
from nova.objects import instance_mapping
from nova import test
from nova.tests import fixtures
sample_mapping = {'instance_uuid': '',
'cell_id': 3,
'project_id': 'fake-project'}
sample_cell_mapping = {'id': 3,
'uuid': '',
'name': 'fake-cell',
'transport_url': 'rabbit:///',
'database_connection': 'mysql:///'}
def create_cell_mapping(**kwargs):
args = sample_cell_mapping.copy()
if 'uuid' not in kwargs:
args['uuid'] = uuidutils.generate_uuid()
args.update(kwargs)
ctxt = context.RequestContext('fake-user', 'fake-project')
return cell_mapping.CellMapping._create_in_db(ctxt, args)
def create_mapping(**kwargs):
args = sample_mapping.copy()
if 'instance_uuid' not in kwargs:
args['instance_uuid'] = uuidutils.generate_uuid()
args.update(kwargs)
ctxt = context.RequestContext('fake-user', 'fake-project')
return instance_mapping.InstanceMapping._create_in_db(ctxt, args)
class InstanceMappingTestCase(test.NoDBTestCase):
USES_DB_SELF = True
def setUp(self):
super(InstanceMappingTestCase, self).setUp()
self.useFixture(fixtures.Database(database='api'))
self.context = context.RequestContext('fake-user', 'fake-project')
self.mapping_obj = instance_mapping.InstanceMapping()
def test_get_by_instance_uuid(self):
cell_mapping = create_cell_mapping()
mapping = create_mapping()
db_mapping = self.mapping_obj._get_by_instance_uuid_from_db(
self.context, mapping['instance_uuid'])
for key in [key for key in self.mapping_obj.fields.keys()
if key != 'cell_mapping']:
self.assertEqual(db_mapping[key], mapping[key])
self.assertEqual(db_mapping['cell_mapping']['id'], cell_mapping['id'])
def test_get_by_instance_uuid_not_found(self):
self.assertRaises(exception.InstanceMappingNotFound,
self.mapping_obj._get_by_instance_uuid_from_db, self.context,
uuidutils.generate_uuid())
def test_save_in_db(self):
mapping = create_mapping()
cell_mapping = create_cell_mapping()
self.mapping_obj._save_in_db(self.context, mapping['instance_uuid'],
{'cell_id': cell_mapping['id']})
db_mapping = self.mapping_obj._get_by_instance_uuid_from_db(
self.context, mapping['instance_uuid'])
for key in [key for key in self.mapping_obj.fields.keys()
if key not in ['cell_id', 'cell_mapping', 'updated_at']]:
self.assertEqual(db_mapping[key], mapping[key])
self.assertEqual(db_mapping['cell_id'], cell_mapping['id'])
def test_destroy_in_db(self):
mapping = create_mapping()
self.mapping_obj._get_by_instance_uuid_from_db(self.context,
mapping['instance_uuid'])
self.mapping_obj._destroy_in_db(self.context, mapping['instance_uuid'])
self.assertRaises(exception.InstanceMappingNotFound,
self.mapping_obj._get_by_instance_uuid_from_db, self.context,
mapping['instance_uuid'])
def test_cell_id_nullable(self):
# Just ensure this doesn't raise
create_mapping(cell_id=None)
def test_modify_cell_mapping(self):
inst_mapping = instance_mapping.InstanceMapping(context=self.context)
inst_mapping.instance_uuid = uuidutils.generate_uuid()
inst_mapping.project_id = self.context.project_id
inst_mapping.cell_mapping = None
inst_mapping.create()
c_mapping = cell_mapping.CellMapping(
self.context,
uuid=uuidutils.generate_uuid(),
name="cell0",
transport_url="none:///",
database_connection="fake:///")
c_mapping.create()
inst_mapping.cell_mapping = c_mapping
inst_mapping.save()
result_mapping = instance_mapping.InstanceMapping.get_by_instance_uuid(
self.context, inst_mapping.instance_uuid)
self.assertEqual(result_mapping.cell_mapping.id,
c_mapping.id)
class InstanceMappingListTestCase(test.NoDBTestCase):
USES_DB_SELF = True
def setUp(self):
super(InstanceMappingListTestCase, self).setUp()
self.useFixture(fixtures.Database(database='api'))
self.context = context.RequestContext('fake-user', 'fake-project')
self.list_obj = instance_mapping.InstanceMappingList()
def test_get_by_project_id_from_db(self):
project_id = 'fake-project'
mappings = {}
mapping = create_mapping(project_id=project_id)
mappings[mapping['instance_uuid']] = mapping
mapping = create_mapping(project_id=project_id)
mappings[mapping['instance_uuid']] = mapping
db_mappings = self.list_obj._get_by_project_id_from_db(
self.context, project_id)
for db_mapping in db_mappings:
mapping = mappings[db_mapping.instance_uuid]
for key in instance_mapping.InstanceMapping.fields.keys():
self.assertEqual(db_mapping[key], mapping[key])
def test_instance_mapping_list_get_by_cell_id(self):
"""Tests getting all of the InstanceMappings for a given CellMapping id
"""
# we shouldn't have any instance mappings yet
inst_mapping_list = (
instance_mapping.InstanceMappingList.get_by_cell_id(
self.context, sample_cell_mapping['id'])
)
self.assertEqual(0, len(inst_mapping_list))
# now create an instance mapping in a cell<|fim▁hole|> # sure our filtering is working
db_inst_mapping2 = create_mapping(cell_id=None)
self.assertIsNone(db_inst_mapping2['cell_id'])
# now we should list out one instance mapping for the cell
inst_mapping_list = (
instance_mapping.InstanceMappingList.get_by_cell_id(
self.context, db_inst_mapping1['cell_id'])
)
self.assertEqual(1, len(inst_mapping_list))
self.assertEqual(db_inst_mapping1['id'], inst_mapping_list[0].id)<|fim▁end|> | db_inst_mapping1 = create_mapping()
# let's also create an instance mapping that's not in a cell to make |
<|file_name|>opdelete.go<|end_file_name|><|fim▁begin|>/*
Copyright 2021 Gravitational, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package protocol
import (
"fmt"
"strings"<|fim▁hole|>
"github.com/gravitational/trace"
)
// MessageOpDelete represents parsed OP_DELETE wire message.
//
// https://docs.mongodb.com/manual/reference/mongodb-wire-protocol/#op_delete
//
// struct {
// MsgHeader header; // standard message header
// int32 ZERO; // 0 - reserved for future use
// cstring fullCollectionName; // "dbname.collectionname"
// int32 flags; // bit vector - see below for details.
// document selector; // query object. See below for details.
// }
//
// OP_DELETE is deprecated starting MongoDB 5.0 in favor of OP_MSG.
type MessageOpDelete struct {
Header MessageHeader
Zero int32
FullCollectionName string
Flags int32
Selector bsoncore.Document
// bytes is the full wire message bytes (incl. header) read from the connection.
bytes []byte
}
// GetHeader returns the wire message header.
func (m *MessageOpDelete) GetHeader() MessageHeader {
return m.Header
}
// GetBytes returns the message raw bytes read from the connection.
func (m *MessageOpDelete) GetBytes() []byte {
return m.bytes
}
// GetDatabase returns the command's database.
func (m *MessageOpDelete) GetDatabase() (string, error) {
// Full collection name has "<db>.<collection>" format.
return strings.Split(m.FullCollectionName, ".")[0], nil
}
// GetCommand returns the message's command.
func (m *MessageOpDelete) GetCommand() (string, error) {
return "delete", nil
}
// String returns the message string representation.
func (m *MessageOpDelete) String() string {
return fmt.Sprintf("OpDelete(FullCollectionName=%v, Selector=%v, Flags=%v)",
m.FullCollectionName, m.Selector.String(), m.Flags)
}
// MoreToCome is whether sender will send another message right after this one.
func (m *MessageOpDelete) MoreToCome(_ Message) bool {
return true
}
// readOpDelete converts OP_DELETE wire message bytes to a structured form.
func readOpDelete(header MessageHeader, payload []byte) (*MessageOpDelete, error) {
zero, rem, ok := readInt32(payload)
if !ok {
return nil, trace.BadParameter("malformed OP_DELETE: missing zero %v", payload)
}
fullCollectionName, rem, ok := readCString(rem)
if !ok {
return nil, trace.BadParameter("malformed OP_DELETE: missing full collection name %v", payload)
}
flags, rem, ok := readInt32(rem)
if !ok {
return nil, trace.BadParameter("malformed OP_DELETE: missing flags %v", payload)
}
selector, _, ok := bsoncore.ReadDocument(rem)
if !ok {
return nil, trace.BadParameter("malformed OP_DELETE: missing selector %v", payload)
}
return &MessageOpDelete{
Header: header,
Zero: zero,
FullCollectionName: fullCollectionName,
Flags: flags,
Selector: selector,
bytes: append(header.bytes[:], payload...),
}, nil
}
// ToWire converts this message to wire protocol message bytes.
func (m *MessageOpDelete) ToWire(responseTo int32) (dst []byte) {
var idx int32
idx, dst = wiremessage.AppendHeaderStart(dst, m.Header.RequestID, responseTo, wiremessage.OpDelete)
dst = appendInt32(dst, m.Zero)
dst = appendCString(dst, m.FullCollectionName)
dst = appendInt32(dst, m.Flags)
dst = bsoncore.AppendDocument(dst, m.Selector)
return bsoncore.UpdateLength(dst, idx, int32(len(dst[idx:])))
}<|fim▁end|> |
"go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
"go.mongodb.org/mongo-driver/x/mongo/driver/wiremessage" |
<|file_name|>test-xy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import pygtk
pygtk.require('2.0')
import pynotify
import sys
if __name__ == '__main__':<|fim▁hole|>
n = pynotify.Notification("X, Y Test",
"This notification should point to 150, 10")
n.set_hint("x", 150)
n.set_hint("y", 10)
if not n.show():
print "Failed to send notification"
sys.exit(1)<|fim▁end|> | if not pynotify.init("XY"):
sys.exit(1) |
<|file_name|>parse.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <stdint.h>
#include <map>
#include <set>
#include "macros.h"
#include <assert.h>
using namespace std;
#include <fstream>
struct defsetcmp {
bool operator() (const pair<uint64_t, uint64_t> &l, const pair<uint64_t, uint64_t> &r) {
return (l.first < r.first || l.second < r.second);
}
};
int main(int argc, char *argv[])
{
if (argc != 2) {
cerr << "Usage: ./parse file" << endl;
return(1);
}
ifstream inFile(argv[1]);
map<uint64_t, uint64_t> latestWrite;
typedef map<pair<uint64_t, uint64_t>, set<uint64_t>, defsetcmp> defsett;
defsett defset;
map<uint64_t, bool> localread;
map<uint64_t, bool> nonlocalread;
map<uint64_t, uint64_t> ins2tid;
map<uint64_t, map<uint64_t, bool> > has_read;
map<uint64_t, bool> follower;
while (inFile.good()) {
uint64_t ins, tid, rw, addr;
inFile >> ins >> tid >> rw >> addr;
if (ins == END && addr == END) {
latestWrite.clear();
ins2tid.clear();
has_read.clear();
} else if (rw == READ) {
ins2tid[ins] = tid;
//if there are no writes to current variable yet then continue
if (latestWrite.find(addr) == latestWrite.end()) {
continue;
}
uint64_t latestWriteIns = latestWrite[addr];
//defset
defset[make_pair(addr,ins)].insert(latestWriteIns);
//local non-local
bool isLocal = (tid == ins2tid[latestWriteIns]);
if (localread.find(ins) == localread.end()) {
localread[ins] = isLocal;
nonlocalread[ins] = !isLocal;
} else {
localread[ins] = localread[ins] && isLocal;
nonlocalread[ins] = nonlocalread[ins] && !isLocal;
}
//follower
if (has_read.find(addr) != has_read.end()
&& has_read[addr].find(tid) != has_read[addr].end()) {
if (follower.find(ins) != follower.end()) {
follower[ins] = follower[ins] && has_read[addr][tid];
} else {
follower[ins] = has_read[addr][tid];
}
}
has_read[addr][tid] = true;<|fim▁hole|>
} else {
assert(rw == WRITE);
ins2tid[ins] = tid;
latestWrite[addr] = ins;
if(has_read.find(addr) != has_read.end()) {
for(map<uint64_t, bool>::iterator titr = has_read[addr].begin();
titr != has_read[addr].end();
++titr) {
titr->second = false;
}
}
}
}
inFile.close();
//print defset
//variable read_ins_addr #writes write_ins_add0 ...
for (defsett::const_iterator defsetitr = defset.begin();
defsetitr != defset.end();
++defsetitr) {
cout << defsetitr->first.first << " " << defsetitr->first.second << " ";
cout << (defsetitr->second).size() << " ";
for (set<uint64_t>::const_iterator witr = (defsetitr->second).begin();
witr != (defsetitr->second).end();
++witr) {
cout << *witr << " ";
}
cout << endl;
}
//print local and non local
cout << "#local: " << endl;
for(map<uint64_t, bool>::const_iterator litr = localread.begin();
litr != localread.end();
++litr) {
if (litr->second) {
cout << litr->first << endl;
}
}
cout << "#nonlocal: " << endl;
for(map<uint64_t, bool>::const_iterator nlitr = nonlocalread.begin();
nlitr != nonlocalread.end();
++nlitr) {
if (nlitr->second) {
cout << nlitr->first << endl;
}
}
//print follower
cout << "#follower: " << endl;
for(map<uint64_t, bool>::const_iterator fitr = follower.begin();
fitr != follower.end();
++fitr) {
if (fitr->second) {
cout << fitr->first << endl;
}
}
return 0;
}<|fim▁end|> | |
<|file_name|>convert_basti.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import glob,re,sys,math,pyfits
import numpy as np
import utils
if len( sys.argv ) < 2:
print '\nconvert basti SSP models to ez_gal fits format'
print 'Run in directory with SED models for one metallicity'
print 'Usage: convert_basti.py ez_gal.ascii\n'
sys.exit(2)
fileout = sys.argv[1]
# try to extract meta data out of fileout
sfh = ''; tau = ''; met = ''; imf = ''
# split on _ but get rid of the extension
parts = '.'.join( fileout.split( '.' )[:-1] ).split( '_' )
# look for sfh
for (check,val) in zip( ['ssp','exp'], ['SSP','Exponential'] ):
if parts.count( check ):
sfh = val
sfh_index = parts.index( check )
break
# tau?
if sfh:
tau = parts[sfh_index+1] if sfh == 'exp' else ''
# metallicity
if parts.count( 'z' ):
met = parts[ parts.index( 'z' ) + 1 ]
# imf
for (check,val) in zip( ['krou','salp','chab'], ['Kroupa', 'Salpeter', 'Chabrier'] ):
if parts.count( check ):
imf = val
break
if parts.count( 'n' ):
n = parts[ parts.index( 'n' ) + 1 ]
ae = False
if parts.count( 'ae' ): ae = True
# does the file with masses exist?<|fim▁hole|>has_masses = False
mass_file = glob.glob( 'MLR*.txt' )
if len( mass_file ):
# read it in!
print 'Loading masses from %s' % mass_file[0]
data = utils.rascii( mass_file[0], silent=True )
masses = data[:,10:14].sum( axis=1 )
has_masses = True
files = glob.glob( 'SPEC*agb*' )
nages = len( files )
ages = []
for (i,file) in enumerate(files):
ls = []
this = []
# extract the age from the filename and convert to years
m = re.search( 't60*(\d+)$', file )
ages.append( int( m.group(1) )*1e6 )
# read in this file
fp = open( file, 'r' )
for line in fp:
parts = line.strip().split()
ls.append( float( parts[0].strip() ) )
this.append( float( parts[1].strip() ) )
if i == 0:
# if this is the first file, generate the data table
nls = len( ls )
seds = np.empty( (nls,nages) )
# convert to ergs/s/angstrom
seds[:,i] = np.array( this )/4.3607e-33/1e10
# convert to numpy
ages = np.array( ages )
ls = np.array( ls )*10.0
# make sure we are sorted in age
sinds = ages.argsort()
ages = ages[sinds]
seds = seds[:,sinds]
# speed of light
c = utils.convert_length( utils.c, incoming='m', outgoing='a' )
# convert from angstroms to hertz
vs = c/ls
# convert from ergs/s/A to ergs/s/Hz
seds *= ls.reshape( (ls.size,1) )**2.0/c
# and now from ergs/s/Hz to ergs/s/Hz/cm^2.0
seds /= (4.0*math.pi*utils.convert_length( 10, incoming='pc', outgoing='cm' )**2.0)
# sort in frequency space
sinds = vs.argsort()
# generate fits frame with sed in it
primary_hdu = pyfits.PrimaryHDU(seds[sinds,:])
primary_hdu.header.update( 'units', 'ergs/s/cm^2/Hz' )
primary_hdu.header.update( 'has_seds', True )
primary_hdu.header.update( 'nfilters', 0 )
primary_hdu.header.update( 'nzfs', 0 )
# store meta data
if sfh and met and imf:
primary_hdu.header.update( 'has_meta', True )
primary_hdu.header.update( 'model', 'BaSTI', comment='meta data' )
primary_hdu.header.update( 'met', met, comment='meta data' )
primary_hdu.header.update( 'imf', imf, comment='meta data' )
primary_hdu.header.update( 'sfh', sfh, comment='meta data' )
if sfh == 'Exponential': primary_hdu.header.update( 'tau', tau, comment='meta data' )
primary_hdu.header.update( 'n', n, comment='meta data' )
primary_hdu.header.update( 'ae', ae, comment='meta data' )
# store the list of frequencies in a table
vs_hdu = pyfits.new_table(pyfits.ColDefs([pyfits.Column(name='vs', array=vs[sinds], format='D', unit='hertz')]))
vs_hdu.header.update( 'units', 'hertz' )
# and the list of ages
cols = [pyfits.Column(name='ages', array=ages, format='D', unit='years')]
# and masses
if has_masses: cols.append( pyfits.Column(name='masses', array=masses, format='D', unit='m_sun') )
ages_hdu = pyfits.new_table(pyfits.ColDefs( cols ))
if has_masses: ages_hdu.header.update( 'has_mass', True )
# make the fits file in memory
hdulist = pyfits.HDUList( [primary_hdu,vs_hdu,ages_hdu] )
# and write it out
hdulist.writeto( fileout, clobber=True )<|fim▁end|> | |
<|file_name|>handlers.go<|end_file_name|><|fim▁begin|>package proxy
import (
"log"
"strconv"
"../util"
)
func (s *ProxyServer) handleGetWorkRPC(cs *Session, diff, id string) (reply []string, errorReply *ErrorReply) {
t := s.currentBlockTemplate()
if len(t.Header) == 0 {
return nil, &ErrorReply{Code: -1, Message: "Work not ready"}
}
targetHex := t.Target
if !s.rpc().Pool {
minerDifficulty, err := strconv.ParseFloat(diff, 64)
if err != nil {
log.Printf("Invalid difficulty %v from %v@%v ", diff, id, cs.ip)
minerDifficulty = 5
}
targetHex = util.MakeTargetHex(minerDifficulty)
}
reply = []string{t.Header, t.Seed, targetHex}
return
}
func (s *ProxyServer) handleSubmitRPC(cs *Session, diff string, id string, params []string) (reply bool, errorReply *ErrorReply) {
miner, ok := s.miners.Get(id)
if !ok {
miner = NewMiner(id, cs.ip)
s.registerMiner(miner)
}
t := s.currentBlockTemplate()
reply = miner.processShare(s, t, diff, params)
return
}
func (s *ProxyServer) handleSubmitHashrate(cs *Session, req *JSONRpcReq) bool {
reply, _ := s.rpc().SubmitHashrate(req.Params)
return reply
}
func (s *ProxyServer) handleUnknownRPC(cs *Session, req *JSONRpcReq) *ErrorReply {<|fim▁hole|>}<|fim▁end|> | log.Printf("Unknown RPC method: %v", req)
return &ErrorReply{Code: -1, Message: "Invalid method"} |
<|file_name|>_hasnotematchingsubstringof.py<|end_file_name|><|fim▁begin|>#
# gPrime - A web-based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or<|fim▁hole|># but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gprime modules
#
#-------------------------------------------------------------------------
from .._hasnotesubstrbase import HasNoteSubstrBase
#-------------------------------------------------------------------------
# "Media having notes that contain a substring"
#-------------------------------------------------------------------------
class HasNoteMatchingSubstringOf(HasNoteSubstrBase):
"""Media having notes containing <substring>"""
name = _('Media objects having notes containing <substring>')
description = _("Matches media objects whose notes contain text "
"matching a substring")<|fim▁end|> | # (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, |
<|file_name|>pksig_ps03.py<|end_file_name|><|fim▁begin|>"""
Identity Based Signature
| From: "David Pointcheval and Olivier Sanders. Short Randomizable Signatures"
| Published in: 2015
| Available from: https://eprint.iacr.org/2015/525.pdf
* type: signature (identity-based)
* setting: bilinear groups (asymmetric)
:Authors: Lovesh Harchandani
:Date: 6/2018
"""
from functools import reduce
from charm.toolbox.pairinggroup import PairingGroup, ZR, G1, G2, pair
debug = False
class PS01:
"""
Signatures over committed messages, section 6.1 of the paper
"""
def __init__(self, groupObj):
global group
group = groupObj
@staticmethod
def keygen(num_messages=1):
x = group.random(ZR)
g1 = group.random(G1)
sk = {'x': x, 'X1': g1 ** x}
g2 = group.random(G2)
ys = [group.random(ZR) for _ in range(num_messages)]
X2 = g2 ** x
y1s = [g1 ** y for y in ys]
y2s = [g2 ** y for y in ys]
pk = {'X2': X2, 'Y2': y2s, 'Y1': y1s, 'g2': g2, 'g1': g1}
return pk, sk
def commitment(self, pk, *messages):
t = group.random(ZR)
return t, (pk['g1'] ** t) * self.product([y1 ** group.hash(m, ZR) for (y1, m) in zip(pk['Y1'], messages)])
def sign(self, sk, pk, commitment):
u = group.random(ZR)
return pk['g1'] ** u, (sk['X1'] * commitment) ** u
@staticmethod
def unblind_signature(t, sig):
s1, s2 = sig
return s1, (s2 / (s1 ** t))
def verify(self, pk, sig, *messages):<|fim▁hole|> l2 = pk['X2'] * self.product([pk['Y2'][i] ** ms[i] for i in range(len(messages))])
return pair(s1, l2) == pair(pk['g2'], s2)
def randomize_sig(self, sig):
s1, s2 = sig
t = group.random(ZR)
return s1 ** t, s2 ** t
@staticmethod
def product(seq):
return reduce(lambda x, y: x * y, seq)
def main():
grp = PairingGroup('MNT224')
ps = PS01(grp)
messages = ['Hi there', 'Not there', 'Some message ................', 'Dont know .............']
(pk, sk) = ps.keygen(len(messages))
if debug:
print("Keygen...")
print("pk :=", pk)
print("sk :=", sk)
t, commitment = ps.commitment(pk, *messages)
sig = ps.sign(sk, pk, commitment)
if debug:
print("Signature: ", sig)
sig = ps.unblind_signature(t, sig)
result = ps.verify(pk, sig, *messages)
assert result, "INVALID signature!"
if debug:
print("Successful Verification!!!")
rand_sig = ps.randomize_sig(sig)
assert sig != rand_sig
if debug:
print("Randomized Signature: ", rand_sig)
result = ps.verify(pk, rand_sig, *messages)
assert result, "INVALID signature!"
if debug:
print("Successful Verification!!!")
if __name__ == "__main__":
debug = True
main()<|fim▁end|> | ms = [group.hash(m, ZR) for m in messages]
s1, s2 = sig
if group.init(G1) == s1:
return False |
<|file_name|>SeaGlassTextPaneUI.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2009 Kathryn Huxtable and Kenneth Orr.
*
* This file is part of the SeaGlass Pluggable Look and Feel.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* $Id: org.eclipse.jdt.ui.prefs 172 2009-10-06 18:31:12Z [email protected] $
*/
package com.seaglasslookandfeel.ui;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.beans.PropertyChangeEvent;
import javax.swing.JComponent;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.synth.SynthContext;
import javax.swing.text.Style;
import javax.swing.text.StyleConstants;
import javax.swing.text.StyleContext;
import javax.swing.text.StyledDocument;
import com.seaglasslookandfeel.SeaGlassContext;
/**
* SeaGlass TextPaneUI delegate.
*
* Based on SynthTextPaneUI by Georges Saab and David Karlton.
*
* The only reason this exists is that we had to modify SynthTextPaneUI.
*
* @see javax.swing.plaf.synth.SynthTextPaneUI
*/
public class SeaGlassTextPaneUI extends SeaGlassEditorPaneUI {
/**
* Creates a UI for the JTextPane.
*
* @param c the JTextPane object
* @return the UI object
*/
public static ComponentUI createUI(JComponent c) {
return new SeaGlassTextPaneUI();
}
/**
* Fetches the name used as a key to lookup properties through the
* UIManager. This is used as a prefix to all the standard
* text properties.
*
* @return the name ("TextPane")
*/
@Override
protected String getPropertyPrefix() {
return "TextPane";
}
/**
* Installs the UI for a component. This does the following
* things.
* <ol>
* <li>
* Sets opaqueness of the associated component according to its style,
<|fim▁hole|> * Installs the default caret and highlighter into the
* associated component. These properties are only set if their
* current value is either {@code null} or an instance of
* {@link UIResource}.
* <li>
* Attaches to the editor and model. If there is no
* model, a default one is created.
* <li>
* Creates the view factory and the view hierarchy used
* to represent the model.
* </ol>
*
* @param c the editor component
* @see javax.swing.plaf.basic.BasicTextUI#installUI
* @see ComponentUI#installUI
*/
@Override
public void installUI(JComponent c) {
super.installUI(c);
updateForeground(c.getForeground());
updateFont(c.getFont());
}
/**
* This method gets called when a bound property is changed
* on the associated JTextComponent. This is a hook
* which UI implementations may change to reflect how the
* UI displays bound properties of JTextComponent subclasses.
* If the font, foreground or document has changed, the
* the appropriate property is set in the default style of
* the document.
*
* @param evt the property change event
*/
@Override
protected void propertyChange(PropertyChangeEvent evt) {
super.propertyChange(evt);
String name = evt.getPropertyName();
if (name.equals("foreground")) {
updateForeground((Color)evt.getNewValue());
} else if (name.equals("font")) {
updateFont((Font)evt.getNewValue());
} else if (name.equals("document")) {
JComponent comp = getComponent();
updateForeground(comp.getForeground());
updateFont(comp.getFont());
}
}
/**
* Update the color in the default style of the document.
*
* @param color the new color to use or null to remove the color attribute
* from the document's style
*/
private void updateForeground(Color color) {
StyledDocument doc = (StyledDocument)getComponent().getDocument();
Style style = doc.getStyle(StyleContext.DEFAULT_STYLE);
if (style == null) {
return;
}
if (color == null) {
style.removeAttribute(StyleConstants.Foreground);
} else {
StyleConstants.setForeground(style, color);
}
}
/**
* Update the font in the default style of the document.
*
* @param font the new font to use or null to remove the font attribute
* from the document's style
*/
private void updateFont(Font font) {
StyledDocument doc = (StyledDocument)getComponent().getDocument();
Style style = doc.getStyle(StyleContext.DEFAULT_STYLE);
if (style == null) {
return;
}
if (font == null) {
style.removeAttribute(StyleConstants.FontFamily);
style.removeAttribute(StyleConstants.FontSize);
style.removeAttribute(StyleConstants.Bold);
style.removeAttribute(StyleConstants.Italic);
} else {
StyleConstants.setFontFamily(style, font.getName());
StyleConstants.setFontSize(style, font.getSize());
StyleConstants.setBold(style, font.isBold());
StyleConstants.setItalic(style, font.isItalic());
}
}
@Override
void paintBackground(SynthContext context, Graphics g, JComponent c) {
((SeaGlassContext)context).getPainter().paintTextPaneBackground(context, g, 0, 0,
c.getWidth(), c.getHeight());
}
/**
* @inheritDoc
*/
@Override
public void paintBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
((SeaGlassContext)context).getPainter().paintTextPaneBorder(context, g, x, y, w, h);
}
}<|fim▁end|> | * if the opaque property has not already been set by the client program.
* <li>
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from datetime import datetime
import json
import boto.cloudformation
from moto.core import BaseBackend
from .parsing import ResourceMap, OutputMap
from .utils import generate_stack_id
from .exceptions import ValidationError
class FakeStack(object):
def __init__(self, stack_id, name, template, parameters, region_name, notification_arns=None, tags=None):
self.stack_id = stack_id
self.name = name
self.template = template
self.template_dict = json.loads(self.template)
self.parameters = parameters
self.region_name = region_name
self.notification_arns = notification_arns if notification_arns else []
self.tags = tags if tags else {}
self.events = []
self._add_stack_event("CREATE_IN_PROGRESS", resource_status_reason="User Initiated")
self.description = self.template_dict.get('Description')
self.resource_map = self._create_resource_map()
self.output_map = self._create_output_map()
self._add_stack_event("CREATE_COMPLETE")
self.status = 'CREATE_COMPLETE'
def _create_resource_map(self):
resource_map = ResourceMap(self.stack_id, self.name, self.parameters, self.tags, self.region_name, self.template_dict)
resource_map.create()
return resource_map
def _create_output_map(self):
output_map = OutputMap(self.resource_map, self.template_dict)
output_map.create()
return output_map
def _add_stack_event(self, resource_status, resource_status_reason=None, resource_properties=None):
self.events.append(FakeEvent(
stack_id=self.stack_id,
stack_name=self.name,
logical_resource_id=self.name,
physical_resource_id=self.stack_id,
resource_type="AWS::CloudFormation::Stack",
resource_status=resource_status,
resource_status_reason=resource_status_reason,
resource_properties=resource_properties,
))
def _add_resource_event(self, logical_resource_id, resource_status, resource_status_reason=None, resource_properties=None):
# not used yet... feel free to help yourself
resource = self.resource_map[logical_resource_id]
self.events.append(FakeEvent(
stack_id=self.stack_id,
stack_name=self.name,
logical_resource_id=logical_resource_id,
physical_resource_id=resource.physical_resource_id,
resource_type=resource.type,
resource_status=resource_status,
resource_status_reason=resource_status_reason,
resource_properties=resource_properties,
))
@property
def stack_parameters(self):
return self.resource_map.resolved_parameters
@property
def stack_resources(self):
return self.resource_map.values()
@property
def stack_outputs(self):
return self.output_map.values()
def update(self, template):
self._add_stack_event("UPDATE_IN_PROGRESS", resource_status_reason="User Initiated")
self.template = template
self.resource_map.update(json.loads(template))
self.output_map = self._create_output_map()
self._add_stack_event("UPDATE_COMPLETE")
self.status = "UPDATE_COMPLETE"
def delete(self):
self._add_stack_event("DELETE_IN_PROGRESS", resource_status_reason="User Initiated")
self.resource_map.delete()
self._add_stack_event("DELETE_COMPLETE")
self.status = "DELETE_COMPLETE"
class FakeEvent(object):
def __init__(self, stack_id, stack_name, logical_resource_id, physical_resource_id, resource_type, resource_status, resource_status_reason=None, resource_properties=None):
self.stack_id = stack_id
self.stack_name = stack_name
self.logical_resource_id = logical_resource_id
self.physical_resource_id = physical_resource_id
self.resource_type = resource_type
self.resource_status = resource_status
self.resource_status_reason = resource_status_reason
self.resource_properties = resource_properties
self.timestamp = datetime.utcnow()
class CloudFormationBackend(BaseBackend):
def __init__(self):
self.stacks = {}
self.deleted_stacks = {}
def create_stack(self, name, template, parameters, region_name, notification_arns=None, tags=None):
stack_id = generate_stack_id(name)
new_stack = FakeStack(
stack_id=stack_id,
name=name,
template=template,
parameters=parameters,
region_name=region_name,<|fim▁hole|> return new_stack
def describe_stacks(self, name_or_stack_id):
stacks = self.stacks.values()
if name_or_stack_id:
for stack in stacks:
if stack.name == name_or_stack_id or stack.stack_id == name_or_stack_id:
return [stack]
if self.deleted_stacks:
deleted_stacks = self.deleted_stacks.values()
for stack in deleted_stacks:
if stack.stack_id == name_or_stack_id:
return [stack]
raise ValidationError(name_or_stack_id)
else:
return stacks
def list_stacks(self):
return self.stacks.values()
def get_stack(self, name_or_stack_id):
all_stacks = dict(self.deleted_stacks, **self.stacks)
if name_or_stack_id in all_stacks:
# Lookup by stack id - deleted stacks incldued
return all_stacks[name_or_stack_id]
else:
# Lookup by stack name - undeleted stacks only
for stack in self.stacks.values():
if stack.name == name_or_stack_id:
return stack
def update_stack(self, name, template):
stack = self.get_stack(name)
stack.update(template)
return stack
def list_stack_resources(self, stack_name_or_id):
stack = self.get_stack(stack_name_or_id)
return stack.stack_resources
def delete_stack(self, name_or_stack_id):
if name_or_stack_id in self.stacks:
# Delete by stack id
stack = self.stacks.pop(name_or_stack_id, None)
stack.delete()
self.deleted_stacks[stack.stack_id] = stack
return self.stacks.pop(name_or_stack_id, None)
else:
# Delete by stack name
for stack in list(self.stacks.values()):
if stack.name == name_or_stack_id:
self.delete_stack(stack.stack_id)
cloudformation_backends = {}
for region in boto.cloudformation.regions():
cloudformation_backends[region.name] = CloudFormationBackend()<|fim▁end|> | notification_arns=notification_arns,
tags=tags,
)
self.stacks[stack_id] = new_stack |
<|file_name|>file_tools.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import codecs
import distutils.dir_util
import os
import shutil
import sys
def touch_file(file_path):
"""
Create a new empty file at file_path.
"""
parent_dir = os.path.abspath(os.path.join(file_path, os.pardir))
if not os.path.isdir(parent_dir):
os.makedirs(parent_dir)
with codecs.open(file_path, 'a'):
os.utime(file_path, None)
def copy_file(input_file, output_file, overwrite=False):
"""
Helper function to copy a file that adds an overwrite parameter.
"""
if os.path.isfile(output_file):
if overwrite:
print('File exists, overwriting')
shutil.copyfile(input_file, output_file)
else:
sys.exit('File exists, unable to continue: %s' % output_file)
else:
shutil.copyfile(input_file, output_file)
<|fim▁hole|> Helper function to copy a directory tree that adds an overwrite parameter.
"""
if os.path.isdir(output_dir):
if overwrite:
print('Directory exists, overwriting')
distutils.dir_util.copy_tree(input_dir, output_dir)
else:
sys.exit('Directory exists, unable to continue: %s' % output_dir)
else:
distutils.dir_util.copy_tree(input_dir, output_dir)
def get_file_paths_from_directory(dir_path):
"""
Walk a directory and create a list of all contained file_paths in all sub-directories.
"""
file_paths = []
for root, dirs, files in os.walk(dir_path):
for f in files:
file_paths.append(os.path.join(root, f))
return file_paths
def clean_dsstore(dir_path):
"""
Walk a directory and get rid of all those useless hidden .DS_Store files.
"""
for root, dirs, files in os.walk(dir_path):
for f in files:
if f == '.DS_Store':
os.remove(os.path.join(dir_path, root, f))<|fim▁end|> |
def copy_tree(input_dir, output_dir, overwrite=False):
""" |
<|file_name|>test_app.py<|end_file_name|><|fim▁begin|># coding: utf8
from __future__ import unicode_literals
from flask import abort, make_response, request
from flask_api.decorators import set_renderers
from flask_api import exceptions, renderers, status, FlaskAPI
import json
import unittest
app = FlaskAPI(__name__)
app.config['TESTING'] = True
class JSONVersion1(renderers.JSONRenderer):
media_type = 'application/json; api-version="1.0"'
class JSONVersion2(renderers.JSONRenderer):
media_type = 'application/json; api-version="2.0"'
@app.route('/set_status_and_headers/')
def set_status_and_headers():
headers = {'Location': 'http://example.com/456'}
return {'example': 'content'}, status.HTTP_201_CREATED, headers
@app.route('/set_headers/')
def set_headers():
headers = {'Location': 'http://example.com/456'}
return {'example': 'content'}, headers
@app.route('/make_response_view/')
def make_response_view():
response = make_response({'example': 'content'})
response.headers['Location'] = 'http://example.com/456'
return response
@app.route('/api_exception/')
def api_exception():
raise exceptions.PermissionDenied()
@app.route('/abort_view/')
def abort_view():
abort(status.HTTP_403_FORBIDDEN)
@app.route('/options/')
def options_view():<|fim▁hole|>
@app.route('/accepted_media_type/')
@set_renderers([JSONVersion2, JSONVersion1])
def accepted_media_type():
return {'accepted_media_type': str(request.accepted_media_type)}
class AppTests(unittest.TestCase):
def test_set_status_and_headers(self):
with app.test_client() as client:
response = client.get('/set_status_and_headers/')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.headers['Location'], 'http://example.com/456')
self.assertEqual(response.content_type, 'application/json')
expected = '{"example": "content"}'
self.assertEqual(response.get_data().decode('utf8'), expected)
def test_set_headers(self):
with app.test_client() as client:
response = client.get('/set_headers/')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.headers['Location'], 'http://example.com/456')
self.assertEqual(response.content_type, 'application/json')
expected = '{"example": "content"}'
self.assertEqual(response.get_data().decode('utf8'), expected)
def test_make_response(self):
with app.test_client() as client:
response = client.get('/make_response_view/')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.headers['Location'], 'http://example.com/456')
self.assertEqual(response.content_type, 'application/json')
expected = '{"example": "content"}'
self.assertEqual(response.get_data().decode('utf8'), expected)
def test_api_exception(self):
with app.test_client() as client:
response = client.get('/api_exception/')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content_type, 'application/json')
expected = '{"message": "You do not have permission to perform this action."}'
self.assertEqual(response.get_data().decode('utf8'), expected)
def test_abort_view(self):
with app.test_client() as client:
response = client.get('/abort_view/')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_options_view(self):
with app.test_client() as client:
response = client.options('/options/')
# Errors if `response.response` is `None`
response.get_data()
def test_accepted_media_type_property(self):
with app.test_client() as client:
# Explicitly request the "api-version 1.0" renderer.
headers = {'Accept': 'application/json; api-version="1.0"'}
response = client.get('/accepted_media_type/', headers=headers)
data = json.loads(response.get_data().decode('utf8'))
expected = {'accepted_media_type': 'application/json; api-version="1.0"'}
self.assertEqual(data, expected)
# Request the default renderer, which is "api-version 2.0".
headers = {'Accept': '*/*'}
response = client.get('/accepted_media_type/', headers=headers)
data = json.loads(response.get_data().decode('utf8'))
expected = {'accepted_media_type': 'application/json; api-version="2.0"'}
self.assertEqual(data, expected)<|fim▁end|> | return {}
|
<|file_name|>MinaComponentBuilderFactory.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;<|fim▁hole|>import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.mina.MinaComponent;
/**
* Socket level networking using TCP or UDP with Apache Mina 2.x.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface MinaComponentBuilderFactory {
/**
* Mina (camel-mina)
* Socket level networking using TCP or UDP with Apache Mina 2.x.
*
* Category: networking,tcp,udp
* Since: 2.10
* Maven coordinates: org.apache.camel:camel-mina
*
* @return the dsl builder
*/
static MinaComponentBuilder mina() {
return new MinaComponentBuilderImpl();
}
/**
* Builder for the Mina component.
*/
interface MinaComponentBuilder extends ComponentBuilder<MinaComponent> {
/**
* Whether or not to disconnect(close) from Mina session right after
* use. Can be used for both consumer and producer.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disconnect the value to set
* @return the dsl builder
*/
default MinaComponentBuilder disconnect(boolean disconnect) {
doSetProperty("disconnect", disconnect);
return this;
}
/**
* You can enable the Apache MINA logging filter. Apache MINA uses slf4j
* logging at INFO level to log all input and output.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param minaLogger the value to set
* @return the dsl builder
*/
default MinaComponentBuilder minaLogger(boolean minaLogger) {
doSetProperty("minaLogger", minaLogger);
return this;
}
/**
* Setting to set endpoint as one-way or request-response.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param sync the value to set
* @return the dsl builder
*/
default MinaComponentBuilder sync(boolean sync) {
doSetProperty("sync", sync);
return this;
}
/**
* You can configure the timeout that specifies how long to wait for a
* response from a remote server. The timeout unit is in milliseconds,
* so 60000 is 60 seconds.
*
* The option is a: <code>long</code> type.
*
* Default: 30000
* Group: common
*
* @param timeout the value to set
* @return the dsl builder
*/
default MinaComponentBuilder timeout(long timeout) {
doSetProperty("timeout", timeout);
return this;
}
/**
* Maximum amount of time it should take to send data to the MINA
* session. Default is 10000 milliseconds.
*
* The option is a: <code>long</code> type.
*
* Default: 10000
* Group: common
*
* @param writeTimeout the value to set
* @return the dsl builder
*/
default MinaComponentBuilder writeTimeout(long writeTimeout) {
doSetProperty("writeTimeout", writeTimeout);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default MinaComponentBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* If the clientMode is true, mina consumer will connect the address as
* a TCP client.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param clientMode the value to set
* @return the dsl builder
*/
default MinaComponentBuilder clientMode(boolean clientMode) {
doSetProperty("clientMode", clientMode);
return this;
}
/**
* If sync is enabled then this option dictates MinaConsumer if it
* should disconnect where there is no reply to send back.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer (advanced)
*
* @param disconnectOnNoReply the value to set
* @return the dsl builder
*/
default MinaComponentBuilder disconnectOnNoReply(
boolean disconnectOnNoReply) {
doSetProperty("disconnectOnNoReply", disconnectOnNoReply);
return this;
}
/**
* If sync is enabled this option dictates MinaConsumer which logging
* level to use when logging a there is no reply to send back.
*
* The option is a:
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: WARN
* Group: consumer (advanced)
*
* @param noReplyLogLevel the value to set
* @return the dsl builder
*/
default MinaComponentBuilder noReplyLogLevel(
org.apache.camel.LoggingLevel noReplyLogLevel) {
doSetProperty("noReplyLogLevel", noReplyLogLevel);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default MinaComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether to create the InetAddress once and reuse. Setting this to
* false allows to pickup DNS changes in the network.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer (advanced)
*
* @param cachedAddress the value to set
* @return the dsl builder
*/
default MinaComponentBuilder cachedAddress(boolean cachedAddress) {
doSetProperty("cachedAddress", cachedAddress);
return this;
}
/**
* Sessions can be lazily created to avoid exceptions, if the remote
* server is not up and running when the Camel producer is started.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer (advanced)
*
* @param lazySessionCreation the value to set
* @return the dsl builder
*/
default MinaComponentBuilder lazySessionCreation(
boolean lazySessionCreation) {
doSetProperty("lazySessionCreation", lazySessionCreation);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default MinaComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* To use the shared mina configuration.
*
* The option is a:
* <code>org.apache.camel.component.mina.MinaConfiguration</code> type.
*
* Group: advanced
*
* @param configuration the value to set
* @return the dsl builder
*/
default MinaComponentBuilder configuration(
org.apache.camel.component.mina.MinaConfiguration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* Number of worker threads in the worker pool for TCP and UDP.
*
* The option is a: <code>int</code> type.
*
* Default: 16
* Group: advanced
*
* @param maximumPoolSize the value to set
* @return the dsl builder
*/
default MinaComponentBuilder maximumPoolSize(int maximumPoolSize) {
doSetProperty("maximumPoolSize", maximumPoolSize);
return this;
}
/**
* Whether to use ordered thread pool, to ensure events are processed
* orderly on the same channel.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param orderedThreadPoolExecutor the value to set
* @return the dsl builder
*/
default MinaComponentBuilder orderedThreadPoolExecutor(
boolean orderedThreadPoolExecutor) {
doSetProperty("orderedThreadPoolExecutor", orderedThreadPoolExecutor);
return this;
}
/**
* Only used for TCP. You can transfer the exchange over the wire
* instead of just the body. The following fields are transferred: In
* body, Out body, fault body, In headers, Out headers, fault headers,
* exchange properties, exchange exception. This requires that the
* objects are serializable. Camel will exclude any non-serializable
* objects and log it at WARN level.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param transferExchange the value to set
* @return the dsl builder
*/
default MinaComponentBuilder transferExchange(boolean transferExchange) {
doSetProperty("transferExchange", transferExchange);
return this;
}
/**
* The mina component installs a default codec if both, codec is null
* and textline is false. Setting allowDefaultCodec to false prevents
* the mina component from installing a default codec as the first
* element in the filter chain. This is useful in scenarios where
* another filter must be the first in the filter chain, like the SSL
* filter.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: codec
*
* @param allowDefaultCodec the value to set
* @return the dsl builder
*/
default MinaComponentBuilder allowDefaultCodec(boolean allowDefaultCodec) {
doSetProperty("allowDefaultCodec", allowDefaultCodec);
return this;
}
/**
* To use a custom minda codec implementation.
*
* The option is a:
* <code>org.apache.mina.filter.codec.ProtocolCodecFactory</code> type.
*
* Group: codec
*
* @param codec the value to set
* @return the dsl builder
*/
default MinaComponentBuilder codec(
org.apache.mina.filter.codec.ProtocolCodecFactory codec) {
doSetProperty("codec", codec);
return this;
}
/**
* To set the textline protocol decoder max line length. By default the
* default value of Mina itself is used which are 1024.
*
* The option is a: <code>int</code> type.
*
* Default: 1024
* Group: codec
*
* @param decoderMaxLineLength the value to set
* @return the dsl builder
*/
default MinaComponentBuilder decoderMaxLineLength(
int decoderMaxLineLength) {
doSetProperty("decoderMaxLineLength", decoderMaxLineLength);
return this;
}
/**
* To set the textline protocol encoder max line length. By default the
* default value of Mina itself is used which are Integer.MAX_VALUE.
*
* The option is a: <code>int</code> type.
*
* Default: -1
* Group: codec
*
* @param encoderMaxLineLength the value to set
* @return the dsl builder
*/
default MinaComponentBuilder encoderMaxLineLength(
int encoderMaxLineLength) {
doSetProperty("encoderMaxLineLength", encoderMaxLineLength);
return this;
}
/**
* You can configure the encoding (a charset name) to use for the TCP
* textline codec and the UDP protocol. If not provided, Camel will use
* the JVM default Charset.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: codec
*
* @param encoding the value to set
* @return the dsl builder
*/
default MinaComponentBuilder encoding(java.lang.String encoding) {
doSetProperty("encoding", encoding);
return this;
}
/**
* You can set a list of Mina IoFilters to use.
*
* The option is a:
* <code>java.util.List&lt;org.apache.mina.core.filterchain.IoFilter&gt;</code> type.
*
* Group: codec
*
* @param filters the value to set
* @return the dsl builder
*/
default MinaComponentBuilder filters(
java.util.List<org.apache.mina.core.filterchain.IoFilter> filters) {
doSetProperty("filters", filters);
return this;
}
/**
* Only used for TCP. If no codec is specified, you can use this flag to
* indicate a text line based codec; if not specified or the value is
* false, then Object Serialization is assumed over TCP.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: codec
*
* @param textline the value to set
* @return the dsl builder
*/
default MinaComponentBuilder textline(boolean textline) {
doSetProperty("textline", textline);
return this;
}
/**
* Only used for TCP and if textline=true. Sets the text line delimiter
* to use. If none provided, Camel will use DEFAULT. This delimiter is
* used to mark the end of text.
*
* The option is a:
* <code>org.apache.camel.component.mina.MinaTextLineDelimiter</code> type.
*
* Group: codec
*
* @param textlineDelimiter the value to set
* @return the dsl builder
*/
default MinaComponentBuilder textlineDelimiter(
org.apache.camel.component.mina.MinaTextLineDelimiter textlineDelimiter) {
doSetProperty("textlineDelimiter", textlineDelimiter);
return this;
}
/**
* Whether to auto start SSL handshake.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: security
*
* @param autoStartTls the value to set
* @return the dsl builder
*/
default MinaComponentBuilder autoStartTls(boolean autoStartTls) {
doSetProperty("autoStartTls", autoStartTls);
return this;
}
/**
* To configure SSL security.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default MinaComponentBuilder sslContextParameters(
org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* Enable usage of global SSL context parameters.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param useGlobalSslContextParameters the value to set
* @return the dsl builder
*/
default MinaComponentBuilder useGlobalSslContextParameters(
boolean useGlobalSslContextParameters) {
doSetProperty("useGlobalSslContextParameters", useGlobalSslContextParameters);
return this;
}
}
class MinaComponentBuilderImpl
extends
AbstractComponentBuilder<MinaComponent>
implements
MinaComponentBuilder {
@Override
protected MinaComponent buildConcreteComponent() {
return new MinaComponent();
}
private org.apache.camel.component.mina.MinaConfiguration getOrCreateConfiguration(
org.apache.camel.component.mina.MinaComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.mina.MinaConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "disconnect": getOrCreateConfiguration((MinaComponent) component).setDisconnect((boolean) value); return true;
case "minaLogger": getOrCreateConfiguration((MinaComponent) component).setMinaLogger((boolean) value); return true;
case "sync": getOrCreateConfiguration((MinaComponent) component).setSync((boolean) value); return true;
case "timeout": getOrCreateConfiguration((MinaComponent) component).setTimeout((long) value); return true;
case "writeTimeout": getOrCreateConfiguration((MinaComponent) component).setWriteTimeout((long) value); return true;
case "bridgeErrorHandler": ((MinaComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "clientMode": getOrCreateConfiguration((MinaComponent) component).setClientMode((boolean) value); return true;
case "disconnectOnNoReply": getOrCreateConfiguration((MinaComponent) component).setDisconnectOnNoReply((boolean) value); return true;
case "noReplyLogLevel": getOrCreateConfiguration((MinaComponent) component).setNoReplyLogLevel((org.apache.camel.LoggingLevel) value); return true;
case "lazyStartProducer": ((MinaComponent) component).setLazyStartProducer((boolean) value); return true;
case "cachedAddress": getOrCreateConfiguration((MinaComponent) component).setCachedAddress((boolean) value); return true;
case "lazySessionCreation": getOrCreateConfiguration((MinaComponent) component).setLazySessionCreation((boolean) value); return true;
case "autowiredEnabled": ((MinaComponent) component).setAutowiredEnabled((boolean) value); return true;
case "configuration": ((MinaComponent) component).setConfiguration((org.apache.camel.component.mina.MinaConfiguration) value); return true;
case "maximumPoolSize": getOrCreateConfiguration((MinaComponent) component).setMaximumPoolSize((int) value); return true;
case "orderedThreadPoolExecutor": getOrCreateConfiguration((MinaComponent) component).setOrderedThreadPoolExecutor((boolean) value); return true;
case "transferExchange": getOrCreateConfiguration((MinaComponent) component).setTransferExchange((boolean) value); return true;
case "allowDefaultCodec": getOrCreateConfiguration((MinaComponent) component).setAllowDefaultCodec((boolean) value); return true;
case "codec": getOrCreateConfiguration((MinaComponent) component).setCodec((org.apache.mina.filter.codec.ProtocolCodecFactory) value); return true;
case "decoderMaxLineLength": getOrCreateConfiguration((MinaComponent) component).setDecoderMaxLineLength((int) value); return true;
case "encoderMaxLineLength": getOrCreateConfiguration((MinaComponent) component).setEncoderMaxLineLength((int) value); return true;
case "encoding": getOrCreateConfiguration((MinaComponent) component).setEncoding((java.lang.String) value); return true;
case "filters": getOrCreateConfiguration((MinaComponent) component).setFilters((java.util.List) value); return true;
case "textline": getOrCreateConfiguration((MinaComponent) component).setTextline((boolean) value); return true;
case "textlineDelimiter": getOrCreateConfiguration((MinaComponent) component).setTextlineDelimiter((org.apache.camel.component.mina.MinaTextLineDelimiter) value); return true;
case "autoStartTls": getOrCreateConfiguration((MinaComponent) component).setAutoStartTls((boolean) value); return true;
case "sslContextParameters": getOrCreateConfiguration((MinaComponent) component).setSslContextParameters((org.apache.camel.support.jsse.SSLContextParameters) value); return true;
case "useGlobalSslContextParameters": ((MinaComponent) component).setUseGlobalSslContextParameters((boolean) value); return true;
default: return false;
}
}
}
}<|fim▁end|> |
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder; |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod color;
pub mod scale;
pub mod canvas;
pub mod decorator;
pub mod sketch;
pub use self::color::Color;
pub use self::scale::Scale;
pub use self::canvas::Canvas;<|fim▁hole|>pub use self::decorator::Decorator;<|fim▁end|> | pub use self::sketch::Sketch; |
<|file_name|>MapPrismTest.java<|end_file_name|><|fim▁begin|>package com.jnape.palatable.lambda.optics.prisms;
import org.junit.Test;
import java.util.HashMap;
import java.util.LinkedHashMap;
import static java.util.Arrays.asList;
import static java.util.Collections.singleton;
import static java.util.Collections.singletonMap;
import static testsupport.assertion.PrismAssert.assertPrismLawfulness;
public class MapPrismTest {
@Test
public void valueAtWithConstructor() {
assertPrismLawfulness(MapPrism.valueAt(LinkedHashMap::new, "foo"),
asList(new LinkedHashMap<>(),
new LinkedHashMap<>(singletonMap("foo", 1)),
new LinkedHashMap<>(singletonMap("bar", 2))),
singleton(1));
}
@Test
public void valueAtWithoutConstructor() {
assertPrismLawfulness(MapPrism.valueAt("foo"),
asList(new HashMap<>(),<|fim▁hole|> new HashMap<>(singletonMap("bar", 2))),
singleton(1));
}
}<|fim▁end|> | new HashMap<>(singletonMap("foo", 1)), |
<|file_name|>ObjectSettings.java<|end_file_name|><|fim▁begin|>package addonloader.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.util.Properties;
/**
* Simple wrapper around {@link Properites},
* allowing to easily access objects.
* @author Enginecrafter77
*/
public class ObjectSettings extends Properties{
private static final long serialVersionUID = -8939834947658913650L;
private final File path;
public ObjectSettings(File path) throws FileNotFoundException, IOException
{
this.path = path;
this.load(new FileReader(path));
}
public ObjectSettings(String path) throws FileNotFoundException, IOException
{
this(new File(path));
}
<|fim▁hole|> }
public int getInteger(String key, int def)
{
return Integer.parseInt(this.getProperty(key, String.valueOf(def)));
}
public float getFloat(String key, float def)
{
return Float.parseFloat(this.getProperty(key, String.valueOf(def)));
}
public void set(String key, Object val)
{
this.setProperty(key, val.toString());
}
public void store(String comment)
{
try
{
this.store(new FileOutputStream(path), comment);
}
catch(IOException e)
{
e.printStackTrace();
}
}
}<|fim▁end|> |
public boolean getBoolean(String key, boolean def)
{
return Boolean.parseBoolean(this.getProperty(key, String.valueOf(def)));
|
<|file_name|>desktop.py<|end_file_name|><|fim▁begin|># Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
import subprocess
from typing import Sequence
from pants.util.osutil import get_os_name
class IdeaNotFoundException(Exception):
"""Could not find Idea executable."""
class OpenError(Exception):
"""Indicates an error opening a file in a desktop application."""
def _mac_open_with_idea(file_: str, lookup_paths: list) -> None:
ideaPath = next((path for path in lookup_paths if os.path.isdir(path)), None)
if ideaPath is not None:
subprocess.call(["open", "-a", ideaPath, file_])
else:
raise IdeaNotFoundException(
"Could not find Idea executable in the following locations:\n{}".format(
"\n".join(lookup_paths)
)
)
def _mac_open(files: Sequence[str]) -> None:
subprocess.call(["open"] + list(files))
def _linux_open_with_idea(file_: str, lookup_paths: list) -> None:<|fim▁hole|> "goal.".format(cmd)
)
subprocess.Popen(["nohup", cmd, file_])
def _linux_open(files: Sequence[str]) -> None:
cmd = "xdg-open"
if not _cmd_exists(cmd):
raise OpenError(
"The program '{}' isn't in your PATH. Please install and re-run this "
"goal.".format(cmd)
)
for f in list(files):
subprocess.call([cmd, f])
# From: http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def _cmd_exists(cmd: str) -> bool:
return (
subprocess.call(
["/usr/bin/which", cmd], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
== 0
)
_OPENER_BY_OS = {"darwin": _mac_open, "linux": _linux_open}
_IDEA_BY_OS = {"darwin": _mac_open_with_idea, "linux": _linux_open_with_idea}
def idea_open(file_: str, lookup_paths: list) -> None:
"""Attempts to open the given files using the preferred desktop viewer or editor.
:raises :class:`OpenError`: if there is a problem opening any of the files.
"""
if file_:
osname = get_os_name()
opener = _IDEA_BY_OS.get(osname)
if opener:
opener(file_, lookup_paths)
else:
raise OpenError("Open currently not supported for " + osname)
def ui_open(*files: str) -> None:
"""Attempts to open the given files using the preferred desktop viewer or editor.
:raises :class:`OpenError`: if there is a problem opening any of the files.
"""
if files:
osname = get_os_name()
opener = _OPENER_BY_OS.get(osname)
if opener:
opener(files)
else:
raise OpenError("Open currently not supported for " + osname)<|fim▁end|> | cmd = "idea"
if not _cmd_exists(cmd):
raise OpenError(
"The program '{}' isn't in your PATH. Please install and re-run this " |
<|file_name|>shared.ts<|end_file_name|><|fim▁begin|>import { lighten } from 'polished';
import { css } from '@emotion/react';
import styled from '@emotion/styled';
import { colors } from './colors';
export const outer = css`
position: relative;
padding: 0 5vw;
`;
// Centered content container blocks
export const inner = css`
margin: 0 auto;
max-width: 1040px;
width: 100%;
`;
export const SiteNavMain = css`
position: fixed;
top: 0;
right: 0;
left: 0;
z-index: 1000;
/* background: color(var(--darkgrey) l(-5%)); */
background: ${lighten('-0.05', colors.darkgrey)};
`;
export const SiteMain = css`
flex-grow: 1;
@media (prefers-color-scheme: dark) {
background: ${colors.darkmode};
}
`;
export const SiteTitle = styled.h1`
z-index: 10;
margin: 0 0 0 -2px;
padding: 0;
font-size: 5rem;
line-height: 1em;
font-weight: 600;
@media (max-width: 500px) {
font-size: 4.2rem;
}
`;
export const SiteDescription = styled.h2`
z-index: 10;
margin: 0;
padding: 5px 0;
font-size: 2.1rem;
line-height: 1.4em;
font-weight: 400;
opacity: 0.8;
@media (max-width: 500px) {
font-size: 1.8rem;
}
`;
export const Posts = css`
overflow-x: hidden;
`;
export const PostFeed = css`
position: relative;
display: flex;
flex-wrap: wrap;
margin: 0 -20px;
padding: 50px 0 0;
background: #fff;
/* Special Template Styles */
padding: 40px 0 5vw;
border-top-left-radius: 3px;
border-top-right-radius: 3px;
@media (prefers-color-scheme: dark) {
background: ${colors.darkmode};
}
`;
export const SocialLink = css`
display: inline-block;
margin: 0;
padding: 10px;
opacity: 0.8;
:hover {
opacity: 1;
}
svg {
height: 1.8rem;
fill: #fff;
}
`;
export const SocialLinkFb = css`
svg {
height: 1.6rem;
}
`;
export const SiteHeader = css``;
export const SiteHeaderContent = styled.div`
z-index: 100;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
padding: 6vw 3vw;
min-height: 200px;
max-height: 340px;
`;
export const SiteHeaderStyles = css`
position: relative;
/* margin-top: 64px; */
padding-bottom: 12px;
color: #fff;
/* background: color(var(--darkgrey) l(-5%)) no-repeat center center; */
background: ${lighten('-0.05', colors.darkgrey)} no-repeat center center;
background-size: cover;
:before {
content: '';
position: absolute;
top: 0;
right: 0;
bottom: 0;
left: 0;
z-index: 10;
display: block;
background: rgba(0, 0, 0, 0.18);
}
:after {
content: '';
position: absolute;
top: 0;
right: 0;
bottom: auto;
left: 0;
z-index: 10;
display: block;
height: 140px;
background: linear-gradient(rgba(0, 0, 0, 0.15), rgba(0, 0, 0, 0));
}
@media (prefers-color-scheme: dark) {
:before {
background: rgba(0, 0, 0, 0.6);
}
}
`;
export const AuthorProfileImage = css`
flex: 0 0 60px;
margin: 0;
width: 60px;
height: 60px;
border: none;
@media (prefers-color-scheme: dark) {
box-shadow: 0 0 0 6px hsla(0, 0%, 100%, 0.04);
background: ${colors.darkmode};
}
`;
// tag and author post lists
export const SiteArchiveHeader = css`
.site-header-content {
position: relative;
align-items: stretch;
padding: 12vw 0 20px;
min-height: 200px;
max-height: 600px;
}
`;
export const SiteHeaderBackground = css`
margin-top: 64px;
`;
export const ResponsiveHeaderBackground = styled.div<{ backgroundImage?: string }>`
${p =>
p.backgroundImage
&& `
position: relative;
margin-top: 64px;
padding-bottom: 12px;
color: #fff;
background-size: cover;
/* background: color(var(--darkgrey) l(-5%)) no-repeat center center; */
background: #090a0b no-repeat 50%;
background-image: url(${p.backgroundImage});
:before {
content: '';
position: absolute;
top: 0;
right: 0;
bottom: 0;
left: 0;
z-index: 10;
display: block;
background: rgba(0, 0, 0, 0.18);
}
:after {
content: '';
position: absolute;
top: 0;
right: 0;
bottom: auto;
left: 0;
z-index: 10;
display: block;
height: 140px;
background: linear-gradient(rgba(0, 0, 0, 0.15), rgba(0, 0, 0, 0));
}
@media (prefers-color-scheme: dark) {
&:before {
background: rgba(0, 0, 0, 0.6);
}
}
`}
${p =>
!p.backgroundImage
&& `
padding-top: 0;
padding-bottom: 0;
/* color: var(--darkgrey); */
color: ${colors.darkgrey};
background: #fff;
opacity: 1;
.site-description {
/* color: var(--midgrey); */
color: ${colors.midgrey};
opacity: 1;
}
.site-header-content {
padding: 5vw 0 10px;
/* border-bottom: 1px solid color(var(--lightgrey) l(+12%)); */
border-bottom: 1px solid ${lighten('0.12', colors.lightgrey)};
}
.author-bio {
/* color: var(--midgrey); */
color: ${colors.midgrey};
opacity: 1;
}
.author-meta {
/* color: var(--midgrey); */
color: ${colors.midgrey};
opacity: 1;
}
.author-social-link a {
/* color: var(--darkgrey); */
color: ${colors.darkgrey};
}
.author-social-link a:before {
/* color: var(--midgrey); */
color: ${colors.midgrey};
}
.author-location + .author-stats:before,
.author-stats + .author-social-link:before,
.author-social-link + .author-social-link:before {
/* color: var(--midgrey); */
color: ${colors.midgrey};
}
.author-header {
padding-bottom: 20px;
}
@media (max-width: 500px) {
.site-header-content {
flex-direction: column;
align-items: center;
min-height: unset;
}
.site-title {
font-size: 4.2rem;
text-align: center;
}
.site-header-content {
padding: 12vw 0 20px;
}
.author-header {
padding-bottom: 10px;
}
}
@media (prefers-color-scheme: dark) {
color: rgba(255, 255, 255, 0.9);
/* background: var(--darkmode); */
background: ${colors.darkmode};
.site-header-content {
/* border-bottom-color: color(var(--darkmode) l(+15%)); */
/* border-bottom-color: ${lighten('0.15', colors.darkmode)}; */
border-bottom-color: #272a30;
}
.author-social-link a {
color: rgba(255, 255, 255, 0.75);
}
}
`}<|fim▁hole|>export const NoImage = css`
.no-image {
padding-top: 0;
padding-bottom: 0;
/* color: var(--darkgrey); */
color: ${colors.darkgrey};
background: #fff;
opacity: 1;
}
.no-image .site-description {
/* color: var(--midgrey); */
color: ${colors.midgrey};
opacity: 1;
}
.no-image .site-header-content {
padding: 5vw 0 10px;
/* border-bottom: 1px solid color(var(--lightgrey) l(+12%)); */
border-bottom: 1px solid ${lighten('0.12', colors.lightgrey)};
}
.no-image .author-bio {
/* color: var(--midgrey); */
color: ${colors.midgrey};
opacity: 1;
}
.no-image .author-meta {
/* color: var(--midgrey); */
color: ${colors.midgrey};
opacity: 1;
}
.no-image .author-social-link a {
/* color: var(--darkgrey); */
color: ${colors.darkgrey};
}
.no-image .author-social-link a:before {
/* color: var(--midgrey); */
color: ${colors.midgrey};
}
.no-image .author-location + .author-stats:before,
.no-image .author-stats + .author-social-link:before,
.no-image .author-social-link + .author-social-link:before {
/* color: var(--midgrey); */
color: ${colors.midgrey};
}
@media (max-width: 500px) {
.site-header-content {
flex-direction: column;
align-items: center;
min-height: unset;
}
.site-title {
font-size: 4.2rem;
text-align: center;
}
.no-image .site-header-content {
padding: 12vw 0 20px;
}
}
@media (prefers-color-scheme: dark) {
.no-image {
color: rgba(255, 255, 255, 0.9);
/* background: var(--darkmode); */
background: ${colors.darkmode};
}
.no-image .site-header-content {
/* border-bottom-color: color(var(--darkmode) l(+15%)); */
border-bottom-color: ${lighten('0.15', colors.darkmode)};
}
.no-image .author-social-link a {
color: rgba(255, 255, 255, 0.75);
}
}
`;<|fim▁end|> | `;
|
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|>"""
Test the views of jurisdiction models
"""
# Django
from django.test import TestCase
# Third Party
from nose.tools import assert_is_not, eq_
# MuckRock
from muckrock.core.test_utils import http_get_response
from muckrock.jurisdiction import factories, views
class TestExemptionDetailView(TestCase):
"""The exemption detail view provides information about the exemption at a standalone url."""
def setUp(self):
self.view = views.ExemptionDetailView.as_view()
def test_ok(self):
"""The view should return a 200 OK status."""
exemption = factories.ExemptionFactory()
url = exemption.get_absolute_url()
kwargs = exemption.jurisdiction.get_slugs()
kwargs.update({"slug": exemption.slug, "pk": exemption.pk})<|fim▁hole|> def test_unique_for_jurisdiction(self):
"""Two exemptions may have the same name,
as long as they belong to different jurisdictions."""
exemption = factories.ExemptionFactory()
url = exemption.get_absolute_url()
kwargs = exemption.jurisdiction.get_slugs()
kwargs.update({"slug": exemption.slug, "pk": exemption.pk})
another_jurisdiction = factories.StateJurisdictionFactory(
parent=exemption.jurisdiction.parent
)
assert_is_not(exemption.jurisdiction, another_jurisdiction)
factories.ExemptionFactory(jurisdiction=another_jurisdiction)
response = http_get_response(url, self.view, **kwargs)
eq_(response.status_code, 200)
def test_local_exemptions(self):
"""An exemption at the local level should return 200."""
local = factories.LocalJurisdictionFactory()
exemption = factories.ExemptionFactory(jurisdiction=local)
url = exemption.get_absolute_url()
kwargs = exemption.jurisdiction.get_slugs()
kwargs.update({"slug": exemption.slug, "pk": exemption.pk})
response = http_get_response(url, self.view, **kwargs)
eq_(response.status_code, 200)
def test_state_exemptions(self):
"""An exemption at the state level should return 200."""
state = factories.StateJurisdictionFactory()
exemption = factories.ExemptionFactory(jurisdiction=state)
url = exemption.get_absolute_url()
kwargs = exemption.jurisdiction.get_slugs()
kwargs.update({"slug": exemption.slug, "pk": exemption.pk})
response = http_get_response(url, self.view, **kwargs)
eq_(response.status_code, 200)
def test_federal_exemptions(self):
"""An exemption at the federal level should return 200."""
fed = factories.FederalJurisdictionFactory()
exemption = factories.ExemptionFactory(jurisdiction=fed)
url = exemption.get_absolute_url()
kwargs = exemption.jurisdiction.get_slugs()
kwargs.update({"slug": exemption.slug, "pk": exemption.pk})
response = http_get_response(url, self.view, **kwargs)
eq_(response.status_code, 200)<|fim▁end|> |
response = http_get_response(url, self.view, **kwargs)
eq_(response.status_code, 200)
|
<|file_name|>service-585.service.spec.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license<|fim▁hole|>import { Service585Service } from './service-585.service';
describe('Service585Service', () => {
beforeEach(() => {
TestBed.configureTestingModule({
providers: [Service585Service]
});
});
it('should ...', inject([Service585Service], (service: Service585Service) => {
expect(service).toBeTruthy();
}));
});<|fim▁end|> | */
import { TestBed, inject } from '@angular/core/testing';
|
<|file_name|>protein.py<|end_file_name|><|fim▁begin|>from . import sqldb
class aminoAcid():
def __init__(self,abbr1,abbr3,name):
self.abbr3 = abbr3<|fim▁hole|> def __str__(self):
return self.name
def __repr__(self):
return self.name
def getOne(self):
return self.abbr1
def getThree(self):
return self.abbr3
class aminoAcidDB():
def __init__(self):
self.db = sqldb.sqliteDB('bioChemData/data.sql','protein')
def getAA3(self,abbr3):
abbr1 = self.db.getItem(abbr3,'one')
name = self.db.getItem(abbr3,'name')
return aminoAcid(abbr1,abbr3,name)
class translateDB():
def __init__(self):
self.db = sqldb.sqliteDB('bioChemData/data.sql','translate')
def getAA3(self,codon):
return self.db.getItem(codon,'protein')
def codonTranslate(codon,codonDB,aaDB):
return aaDB.getAA3(codonDB.getAA3(codon))
def nucleotideTranslation(posStrand):
pointer = 0
result = ''
lastAA = 'M'
adb = aminoAcidDB()
cdb = translateDB()
while posStrand[pointer:pointer+3] != 'ATG' and pointer <= len(posStrand)-3:
pointer += 1
while pointer <= len(posStrand)-3 and lastAA != 'X':
lastAA = adb.getAA3(cdb.getAA3(posStrand[pointer:pointer+3])).getOne()
result += lastAA
pointer += 3
return result<|fim▁end|> | self.abbr1 = abbr1
self.name = name |
<|file_name|>bagImageDemo.js<|end_file_name|><|fim▁begin|>/**
* Created by liushuo on 17/2/20.
*/
import React , {Component} from 'react';
import {AppRegistry , ListView , Text , View, StyleSheet,TouchableOpacity, Image, Dimensions} from 'react-native';
let badgeDatas = require('../Json/BadgeData.json')
let {width,height,scale} = Dimensions.get("window");
let cols = 3;
let bWidth = 100;
let hM = (width - 3 * bWidth) / (cols + 1)
let vM = 25;
export default class BagImageDemo extends Component {
renderAllBadge(){
let allBadgeDatas = [];
for (let i = 0; i<badgeDatas.data.length;i++){
let data = badgeDatas.data[i];
allBadgeDatas.push(
<View key={i} style={styles.outViewStyle}>
<Image style={styles.imageStyle} source={{uri:data.icon}}></Image>
<Text style={styles.bottomTextsStyle}>{data.title}</Text>
</View>
);
}
return allBadgeDatas;
}
render() {
return (
<View style={styles.contain}>
{/*返回所有数据包*/}
{this.renderAllBadge()}
</View>
)
}
}
const styles = StyleSheet.create({
contain:{
backgroundColor:'#F5FCFF',
flexDirection:"row",
flexWrap:'wrap',<|fim▁hole|> },
outViewStyle:{
width:bWidth,
height: bWidth,
marginLeft:hM,
marginTop:vM,
backgroundColor:'red',
alignItems:"center"
},
imageStyle:{
width:80,
height:80
},
bottomTextsStyle:{
fontSize:12
}
});<|fim▁end|> | |
<|file_name|>UmbracoClientManager.js<|end_file_name|><|fim▁begin|>/// <reference path="/umbraco_client/Application/NamespaceManager.js" />
/// <reference path="/umbraco_client/Application/HistoryManager.js" />
/// <reference path="/umbraco_client/ui/jquery.js" />
/// <reference path="/umbraco_client/Tree/UmbracoTree.js" />
/// <reference name="MicrosoftAjax.js"/>
Umbraco.Sys.registerNamespace("Umbraco.Application");
(function($) {
Umbraco.Application.ClientManager = function() {
/// <summary>
/// A class which ensures that all calls made to the objects that it owns are done in the context
/// of the main Umbraco application window.
/// </summary>
return {
_isDirty: false,
_isDebug: false,
_mainTree: null,
_appActions: null,
_historyMgr: null,
_rootPath: "/umbraco", //this is the default
_modal: new Array(), //track all modal window objects (they get stacked)
historyManager: function() {
if (!this._historyMgr) {
this._historyMgr = new Umbraco.Controls.HistoryManager();
}
return this._historyMgr;
},
setUmbracoPath: function(strPath) {
/// <summary>
/// sets the Umbraco root path folder
/// </summary>
this._debug("setUmbracoPath: " + strPath);
this._rootPath = strPath;
},
mainWindow: function() {
/// <summary>
/// Returns a reference to the main frame of the application
/// </summary>
return top;
},
mainTree: function() {
/// <summary>
/// Returns a reference to the main UmbracoTree API object.
/// Sometimes an Umbraco page will need to be opened without being contained in the iFrame from the main window
/// so this method is will construct a false tree to be returned if this is the case as to avoid errors.
/// </summary>
/// <returns type="Umbraco.Controls.UmbracoTree" />
if (this._mainTree == null) {
this._mainTree = top.UmbClientMgr.mainTree();
}
return this._mainTree;
},
appActions: function() {
/// <summary>
/// Returns a reference to the application actions object
/// </summary>
//if the main window has no actions, we'll create some
if (this._appActions == null) {
if (typeof this.mainWindow().appActions == 'undefined') {
this._appActions = new Umbraco.Application.Actions();
}
else this._appActions = this.mainWindow().appActions;
}
return this._appActions;
},
uiKeys: function() {
/// <summary>
/// Returns a reference to the main windows uiKeys object for globalization
/// </summary>
//TODO: If there is no main window, we need to go retrieve the appActions from the server!
return this.mainWindow().uiKeys;
},
// windowMgr: function()
// return null;
// },
contentFrameAndSection: function(app, rightFrameUrl) {
//this.appActions().shiftApp(app, this.uiKeys()['sections_' + app]);
var self = this;
self.mainWindow().UmbClientMgr.historyManager().addHistory(app, true);
window.setTimeout(function() {
self.mainWindow().UmbClientMgr.contentFrame(rightFrameUrl);
}, 200);
},
contentFrame: function (strLocation) {
/// <summary>
/// This will return the reference to the right content frame if strLocation is null or empty,
/// or set the right content frames location to the one specified by strLocation.
/// </summary>
this._debug("contentFrame: " + strLocation);
if (strLocation == null || strLocation == "") {
if (typeof this.mainWindow().right != "undefined") {
return this.mainWindow().right;
}
else {
return this.mainWindow(); //return the current window if the content frame doesn't exist in the current context
}
}
else {
//its a hash change so process that like angular
if (strLocation.substr(0, 1) !== "#") {
if (strLocation.substr(0, 1) != "/") {
//if the path doesn't start with "/" or with the root path then
//prepend the root path
strLocation = this._rootPath + "/" + strLocation;
}
else if (strLocation.length >= this._rootPath.length
&& strLocation.substr(0, this._rootPath.length) != this._rootPath) {
strLocation = this._rootPath + "/" + strLocation;
}
}
this._debug("contentFrame: parsed location: " + strLocation);
if (!this.mainWindow().UmbClientMgr) {
window.setTimeout(function() {
var self = this;
self.mainWindow().location.href = strLocation;
}, 200);
}
else {
this.mainWindow().UmbClientMgr.contentFrame(strLocation);
}
}
},
reloadContentFrameUrlIfPathLoaded: function (url) {
var contentFrame;
if (typeof this.mainWindow().right != "undefined") {
contentFrame = this.mainWindow().right;
}
else {
contentFrame = this.mainWindow();
}
var currentPath = contentFrame.location.pathname + (contentFrame.location.search ? contentFrame.location.search : "");
if (currentPath == url) {
contentFrame.location.reload();
}
},
/** This is used to launch an angular based modal window instead of the legacy window */
openAngularModalWindow: function (options) {
if (!this.mainWindow().UmbClientMgr) {
throw "An angular modal window can only be launched when the modal is running within the main Umbraco application";
}
else {
this.mainWindow().UmbClientMgr.openAngularModalWindow.apply(this.mainWindow().UmbClientMgr, [options]);
}
},
/** This is used to launch an angular based modal window instead of the legacy window */
rootScope: function () {
if (!this.mainWindow().UmbClientMgr) {
throw "An angular modal window can only be launched when the modal is running within the main Umbraco application";
}
else {
return this.mainWindow().UmbClientMgr.rootScope();
}
},
openModalWindow: function(url, name, showHeader, width, height, top, leftOffset, closeTriggers, onCloseCallback) {
//need to create the modal on the top window if the top window has a client manager, if not, create it on the current window
//if this is the top window, or if the top window doesn't have a client manager, create the modal in this manager
if (window == this.mainWindow() || !this.mainWindow().UmbClientMgr) {<|fim▁hole|> this._modal.push(m);
m.open(url, name, showHeader, width, height, top, leftOffset, closeTriggers, onCloseCallback);
}
else {
//if the main window has a client manager, then call the main window's open modal method whilst keeping the context of it's manager.
if (this.mainWindow().UmbClientMgr) {
this.mainWindow().UmbClientMgr.openModalWindow.apply(this.mainWindow().UmbClientMgr,
[url, name, showHeader, width, height, top, leftOffset, closeTriggers, onCloseCallback]);
}
else {
return; //exit recurse.
}
}
},
openModalWindowForContent: function (jQueryElement, name, showHeader, width, height, top, leftOffset, closeTriggers, onCloseCallback) {
//need to create the modal on the top window if the top window has a client manager, if not, create it on the current window
//if this is the top window, or if the top window doesn't have a client manager, create the modal in this manager
if (window == this.mainWindow() || !this.mainWindow().UmbClientMgr) {
var m = new Umbraco.Controls.ModalWindow();
this._modal.push(m);
m.show(jQueryElement, name, showHeader, width, height, top, leftOffset, closeTriggers, onCloseCallback);
}
else {
//if the main window has a client manager, then call the main window's open modal method whilst keeping the context of it's manager.
if (this.mainWindow().UmbClientMgr) {
this.mainWindow().UmbClientMgr.openModalWindowForContent.apply(this.mainWindow().UmbClientMgr,
[jQueryElement, name, showHeader, width, height, top, leftOffset, closeTriggers, onCloseCallback]);
}
else {
return; //exit recurse.
}
}
},
closeModalWindow: function(rVal) {
/// <summary>
/// will close the latest open modal window.
/// if an rVal is passed in, then this will be sent to the onCloseCallback method if it was specified.
/// </summary>
if (this._modal != null && this._modal.length > 0) {
this._modal.pop().close(rVal);
}
else {
//this will recursively try to close a modal window until the parent window has a modal object or the window is the top and has the modal object
var mgr = null;
if (window.parent == null || window.parent == window) {
//we are at the root window, check if we can close the modal window from here
if (window.UmbClientMgr != null && window.UmbClientMgr._modal != null && window.UmbClientMgr._modal.length > 0) {
mgr = window.UmbClientMgr;
}
else {
return; //exit recursion.
}
}
else if (typeof window.parent.UmbClientMgr != "undefined") {
mgr = window.parent.UmbClientMgr;
}
mgr.closeModalWindow.call(mgr, rVal);
}
},
_debug: function(strMsg) {
if (this._isDebug) {
Sys.Debug.trace("UmbClientMgr: " + strMsg);
}
},
get_isDirty: function() {
return this._isDirty;
},
set_isDirty: function(value) {
this._isDirty = value;
}
};
};
})(jQuery);
//define alias for use throughout application
var UmbClientMgr = new Umbraco.Application.ClientManager();<|fim▁end|> | var m = new Umbraco.Controls.ModalWindow(); |
<|file_name|>localrepocache_tests.py<|end_file_name|><|fim▁begin|># Copyright (C) 2012-2015 Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#<|fim▁hole|>#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import urllib2
import os
import cliapp
import fs.memoryfs
import morphlib
import morphlib.gitdir_tests
class FakeApplication(object):
def __init__(self):
self.settings = {
'verbose': True
}
def status(self, msg):
pass
class LocalRepoCacheTests(unittest.TestCase):
def setUp(self):
aliases = ['upstream=git://example.com/#example.com:%s.git']
repo_resolver = morphlib.repoaliasresolver.RepoAliasResolver(aliases)
tarball_base_url = 'http://lorry.example.com/tarballs/'
self.reponame = 'upstream:reponame'
self.repourl = 'git://example.com/reponame'
escaped_url = 'git___example_com_reponame'
self.tarball_url = '%s%s.tar' % (tarball_base_url, escaped_url)
self.cachedir = '/cache/dir'
self.cache_path = '%s/%s' % (self.cachedir, escaped_url)
self.remotes = {}
self.fetched = []
self.removed = []
self.lrc = morphlib.localrepocache.LocalRepoCache(
FakeApplication(), self.cachedir, repo_resolver, tarball_base_url)
self.lrc.fs = fs.memoryfs.MemoryFS()
self.lrc._git = self.fake_git
self.lrc._fetch = self.not_found
self.lrc._mkdtemp = self.fake_mkdtemp
self.lrc._new_cached_repo_instance = self.new_cached_repo_instance
self._mkdtemp_count = 0
def fake_git(self, args, **kwargs):
if args[0] == 'clone':
self.assertEqual(len(args), 5)
remote = args[3]
local = args[4]
self.remotes['origin'] = {'url': remote, 'updates': 0}
self.lrc.fs.makedir(local, recursive=True)
elif args[0:2] == ['remote', 'set-url']:
remote = args[2]
url = args[3]
self.remotes[remote] = {'url': url}
elif args[0:2] == ['config', 'remote.origin.url']:
remote = 'origin'
url = args[2]
self.remotes[remote] = {'url': url}
elif args[0:2] == ['config', 'remote.origin.mirror']:
remote = 'origin'
elif args[0:2] == ['config', 'remote.origin.fetch']:
remote = 'origin'
else:
raise NotImplementedError()
def fake_mkdtemp(self, dirname):
thing = "foo"+str(self._mkdtemp_count)
self._mkdtemp_count += 1
self.lrc.fs.makedir(dirname+"/"+thing)
return thing
def new_cached_repo_instance(self, *args):
with morphlib.gitdir_tests.allow_nonexistant_git_repos():
return morphlib.cachedrepo.CachedRepo(
FakeApplication(), *args)
def not_found(self, url, path):
raise cliapp.AppException('Not found')
def test_has_not_got_shortened_repo_initially(self):
self.assertFalse(self.lrc.has_repo(self.reponame))
def test_has_not_got_absolute_repo_initially(self):
self.assertFalse(self.lrc.has_repo(self.repourl))
def test_caches_shortened_repository_on_request(self):
self.lrc.cache_repo(self.reponame)
self.assertTrue(self.lrc.has_repo(self.reponame))
self.assertTrue(self.lrc.has_repo(self.repourl))
def test_caches_absolute_repository_on_request(self):
self.lrc.cache_repo(self.repourl)
self.assertTrue(self.lrc.has_repo(self.reponame))
self.assertTrue(self.lrc.has_repo(self.repourl))
def test_cachedir_does_not_exist_initially(self):
self.assertFalse(self.lrc.fs.exists(self.cachedir))
def test_creates_cachedir_if_missing(self):
self.lrc.cache_repo(self.repourl)
self.assertTrue(self.lrc.fs.exists(self.cachedir))
def test_happily_caches_same_repo_twice(self):
self.lrc.cache_repo(self.repourl)
self.lrc.cache_repo(self.repourl)
def test_fails_to_cache_when_remote_does_not_exist(self):
def fail(args, **kwargs):
self.lrc.fs.makedir(args[4])
raise cliapp.AppException('')
self.lrc._git = fail
self.assertRaises(morphlib.localrepocache.NoRemote,
self.lrc.cache_repo, self.repourl)
def test_does_not_mind_a_missing_tarball(self):
self.lrc.cache_repo(self.repourl)
self.assertEqual(self.fetched, [])
def test_fetches_tarball_when_it_exists(self):
self.lrc._fetch = lambda url, path: self.fetched.append(url)
self.unpacked_tar = ""
self.mkdir_path = ""
with morphlib.gitdir_tests.monkeypatch(
morphlib.cachedrepo.CachedRepo, 'update', lambda self: None):
self.lrc.cache_repo(self.repourl)
self.assertEqual(self.fetched, [self.tarball_url])
self.assertFalse(self.lrc.fs.exists(self.cache_path + '.tar'))
self.assertEqual(self.remotes['origin']['url'], self.repourl)
def test_gets_cached_shortened_repo(self):
self.lrc.cache_repo(self.reponame)
cached = self.lrc.get_repo(self.reponame)
self.assertTrue(cached is not None)
def test_gets_cached_absolute_repo(self):
self.lrc.cache_repo(self.repourl)
cached = self.lrc.get_repo(self.repourl)
self.assertTrue(cached is not None)
def test_get_repo_raises_exception_if_repo_is_not_cached(self):
self.assertRaises(Exception, self.lrc.get_repo, self.repourl)
def test_escapes_repourl_as_filename(self):
escaped = self.lrc._escape(self.repourl)
self.assertFalse('/' in escaped)
def test_noremote_error_message_contains_repo_name(self):
e = morphlib.localrepocache.NoRemote(self.repourl, [])
self.assertTrue(self.repourl in str(e))
def test_avoids_caching_local_repo(self):
self.lrc.fs.makedir('/local/repo', recursive=True)
self.lrc.cache_repo('file:///local/repo')
cached = self.lrc.get_repo('file:///local/repo')
assert cached.path == '/local/repo'<|fim▁end|> | # This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details. |
<|file_name|>CWNPropertyCheckView.java<|end_file_name|><|fim▁begin|>package de.uni.freiburg.iig.telematik.wolfgang.properties.check;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import de.invation.code.toval.graphic.component.DisplayFrame;
import de.invation.code.toval.graphic.util.SpringUtilities;
import de.uni.freiburg.iig.telematik.sepia.petrinet.cpn.properties.cwn.CWNProperties;
import de.uni.freiburg.iig.telematik.sepia.petrinet.properties.PropertyCheckingResult;
import javax.swing.SpringLayout;
public class CWNPropertyCheckView extends AbstractPropertyCheckView<CWNProperties> {<|fim▁hole|> private PropertyCheckResultLabel lblStructure;
private PropertyCheckResultLabel lblInOutPlaces;
private PropertyCheckResultLabel lblConnectedness;
private PropertyCheckResultLabel lblValidMarking;
private PropertyCheckResultLabel lblCFDependency;
private PropertyCheckResultLabel lblNoDeadTransitions;
private PropertyCheckResultLabel lblCompletion;
private PropertyCheckResultLabel lblOptionComplete;
private PropertyCheckResultLabel lblBounded;
@Override
protected String getHeadline() {
return "Colored WF Net Check";
}
@Override
protected void addSpecificFields(JPanel pnl) {
lblStructure = new PropertyCheckResultLabel("\u2022 CWN Structure", PropertyCheckingResult.UNKNOWN);
pnl.add(lblStructure);
JPanel pnlStructureSub = new JPanel(new SpringLayout());
pnl.add(pnlStructureSub);
lblInOutPlaces = new PropertyCheckResultLabel("\u2022 Valid InOut Places", PropertyCheckingResult.UNKNOWN);
pnlStructureSub.add(lblInOutPlaces);
lblConnectedness = new PropertyCheckResultLabel("\u2022 Strong Connectedness", PropertyCheckingResult.UNKNOWN);
pnlStructureSub.add(lblConnectedness);
lblValidMarking = new PropertyCheckResultLabel("\u2022 Valid Initial Marking", PropertyCheckingResult.UNKNOWN);
pnlStructureSub.add(lblValidMarking);
lblCFDependency = new PropertyCheckResultLabel("\u2022 Control Flow Dependency", PropertyCheckingResult.UNKNOWN);
pnlStructureSub.add(lblCFDependency);
SpringUtilities.makeCompactGrid(pnlStructureSub, pnlStructureSub.getComponentCount(), 1, 15, 0, 0, 0);
pnl.add(new JPopupMenu.Separator());
lblBounded = new PropertyCheckResultLabel("\u2022 Is Bounded", PropertyCheckingResult.UNKNOWN);
pnl.add(lblBounded);
pnl.add(new JPopupMenu.Separator());
lblOptionComplete = new PropertyCheckResultLabel("\u2022 Option To Complete", PropertyCheckingResult.UNKNOWN);
pnl.add(lblOptionComplete);
pnl.add(new JPopupMenu.Separator());
lblCompletion = new PropertyCheckResultLabel("\u2022 Proper Completion", PropertyCheckingResult.UNKNOWN);
pnl.add(lblCompletion);
pnl.add(new JPopupMenu.Separator());
lblNoDeadTransitions = new PropertyCheckResultLabel("\u2022 No Dead Transitions", PropertyCheckingResult.UNKNOWN);
pnl.add(lblNoDeadTransitions);
}
@Override
public void resetFieldContent() {
super.updateFieldContent(null, null);
lblStructure.updatePropertyCheckingResult(PropertyCheckingResult.UNKNOWN);
lblInOutPlaces.updatePropertyCheckingResult(PropertyCheckingResult.UNKNOWN);
lblConnectedness.updatePropertyCheckingResult(PropertyCheckingResult.UNKNOWN);
lblValidMarking.updatePropertyCheckingResult(PropertyCheckingResult.UNKNOWN);
lblCFDependency.updatePropertyCheckingResult(PropertyCheckingResult.UNKNOWN);
lblBounded.updatePropertyCheckingResult(PropertyCheckingResult.UNKNOWN);
lblOptionComplete.updatePropertyCheckingResult(PropertyCheckingResult.UNKNOWN);
lblCompletion.updatePropertyCheckingResult(PropertyCheckingResult.UNKNOWN);
lblNoDeadTransitions.updatePropertyCheckingResult(PropertyCheckingResult.UNKNOWN);
}
@Override
public void updateFieldContent(CWNProperties checkResult, Exception exception) {
super.updateFieldContent(checkResult, exception);
lblStructure.updatePropertyCheckingResult(checkResult.hasCWNStructure);
lblInOutPlaces.updatePropertyCheckingResult(checkResult.validInOutPlaces);
lblConnectedness.updatePropertyCheckingResult(checkResult.strongConnectedness);
lblValidMarking.updatePropertyCheckingResult(checkResult.validInitialMarking);
lblCFDependency.updatePropertyCheckingResult(checkResult.controlFlowDependency);
lblBounded.updatePropertyCheckingResult(checkResult.isBounded);
lblOptionComplete.updatePropertyCheckingResult(checkResult.optionToCompleteAndProperCompletion);
lblCompletion.updatePropertyCheckingResult(checkResult.optionToCompleteAndProperCompletion);
lblNoDeadTransitions.updatePropertyCheckingResult(checkResult.noDeadTransitions);
}
public static void main(String[] args) {
CWNPropertyCheckView view = new CWNPropertyCheckView();
view.setUpGui();
view.updateFieldContent(new CWNProperties(), null);
new DisplayFrame(view, true);
}
}<|fim▁end|> |
private static final long serialVersionUID = -950169446391727139L;
|
<|file_name|>git.py<|end_file_name|><|fim▁begin|># The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
import logging
import os.path
import re
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._internal.exceptions import BadCommand, InstallationError
from pip._internal.utils.misc import display_path, hide_url
from pip._internal.utils.subprocess import make_command
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.vcs.versioncontrol import (
RemoteNotFoundError,
VersionControl,
find_path_to_setup_from_repo_root,
vcs,
)
if MYPY_CHECK_RUNNING:
from typing import Optional, Tuple
from pip._internal.utils.misc import HiddenText
from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions
urlsplit = urllib_parse.urlsplit
urlunsplit = urllib_parse.urlunsplit
logger = logging.getLogger(__name__)
HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$')
def looks_like_hash(sha):
return bool(HASH_REGEX.match(sha))
class Git(VersionControl):
name = 'git'
dirname = '.git'
repo_name = 'clone'
schemes = (
'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
)
# Prevent the user's environment variables from interfering with pip:
# https://github.com/pypa/pip/issues/1130
unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
default_arg_rev = 'HEAD'
@staticmethod
def get_base_rev_args(rev):
return [rev]
def is_immutable_rev_checkout(self, url, dest):
# type: (str, str) -> bool
_, rev_options = self.get_url_rev_options(hide_url(url))
if not rev_options.rev:
return False
if not self.is_commit_id_equal(dest, rev_options.rev):
# the current commit is different from rev,
# which means rev was something else than a commit hash
return False
# return False in the rare case rev is both a commit hash
# and a tag or a branch; we don't want to cache in that case
# because that branch/tag could point to something else in the future
is_tag_or_branch = bool(
self.get_revision_sha(dest, rev_options.rev)[0]
)
return not is_tag_or_branch
def get_git_version(self):
VERSION_PFX = 'git version '
version = self.run_command(
['version'], show_stdout=False, stdout_only=True
)
if version.startswith(VERSION_PFX):
version = version[len(VERSION_PFX):].split()[0]
else:
version = ''
# get first 3 positions of the git version because
# on windows it is x.y.z.windows.t, and this parses as
# LegacyVersion which always smaller than a Version.
version = '.'.join(version.split('.')[:3])
return parse_version(version)
@classmethod
def get_current_branch(cls, location):
"""
Return the current branch, or None if HEAD isn't at a branch
(e.g. detached HEAD).
"""
# git-symbolic-ref exits with empty stdout if "HEAD" is a detached
# HEAD rather than a symbolic ref. In addition, the -q causes the
# command to exit with status code 1 instead of 128 in this case
# and to suppress the message to stderr.
args = ['symbolic-ref', '-q', 'HEAD']
output = cls.run_command(
args,
extra_ok_returncodes=(1, ),
show_stdout=False,
stdout_only=True,
cwd=location,
)
ref = output.strip()
if ref.startswith('refs/heads/'):
return ref[len('refs/heads/'):]
return None
def export(self, location, url):
# type: (str, HiddenText) -> None
"""Export the Git repository at the url to the destination location"""
if not location.endswith('/'):
location = location + '/'
with TempDirectory(kind="export") as temp_dir:
self.unpack(temp_dir.path, url=url)
self.run_command(
['checkout-index', '-a', '-f', '--prefix', location],
show_stdout=False, cwd=temp_dir.path
)
@classmethod
def get_revision_sha(cls, dest, rev):
"""
Return (sha_or_none, is_branch), where sha_or_none is a commit hash
if the revision names a remote branch or tag, otherwise None.
Args:
dest: the repository directory.
rev: the revision name.
"""
# Pass rev to pre-filter the list.
output = cls.run_command(
['show-ref', rev],
cwd=dest,
show_stdout=False,
stdout_only=True,
on_returncode='ignore',
)
refs = {}
for line in output.strip().splitlines():
try:
sha, ref = line.split()
except ValueError:
# Include the offending line to simplify troubleshooting if
# this error ever occurs.
raise ValueError('unexpected show-ref line: {!r}'.format(line))
refs[ref] = sha
branch_ref = 'refs/remotes/origin/{}'.format(rev)
tag_ref = 'refs/tags/{}'.format(rev)
sha = refs.get(branch_ref)
if sha is not None:
return (sha, True)
sha = refs.get(tag_ref)
return (sha, False)
@classmethod
def _should_fetch(cls, dest, rev):
"""
Return true if rev is a ref or is a commit that we don't have locally.
Branches and tags are not considered in this method because they are
assumed to be always available locally (which is a normal outcome of
``git clone`` and ``git fetch --tags``).
"""
if rev.startswith("refs/"):
# Always fetch remote refs.
return True
if not looks_like_hash(rev):
# Git fetch would fail with abbreviated commits.
return False
if cls.has_commit(dest, rev):
# Don't fetch if we have the commit locally.
return False
return True
@classmethod
def resolve_revision(cls, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> RevOptions
"""
Resolve a revision to a new RevOptions object with the SHA1 of the
branch, tag, or ref if found.
Args:
rev_options: a RevOptions object.
"""
rev = rev_options.arg_rev
# The arg_rev property's implementation for Git ensures that the
# rev return value is always non-None.
assert rev is not None
sha, is_branch = cls.get_revision_sha(dest, rev)
if sha is not None:
rev_options = rev_options.make_new(sha)
rev_options.branch_name = rev if is_branch else None
return rev_options
# Do not show a warning for the common case of something that has
# the form of a Git commit hash.
if not looks_like_hash(rev):
logger.warning(
"Did not find branch or tag '%s', assuming revision or ref.",
rev,
)
if not cls._should_fetch(dest, rev):
return rev_options
# fetch the requested revision
cls.run_command(
make_command('fetch', '-q', url, rev_options.to_args()),
cwd=dest,
)
# Change the revision to the SHA of the ref we fetched
sha = cls.get_revision(dest, rev='FETCH_HEAD')
rev_options = rev_options.make_new(sha)
return rev_options
@classmethod
def is_commit_id_equal(cls, dest, name):
"""
Return whether the current commit hash equals the given name.
Args:
dest: the repository directory.
name: a string name.
"""
if not name:
# Then avoid an unnecessary subprocess call.
return False
return cls.get_revision(dest) == name
def fetch_new(self, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> None
rev_display = rev_options.to_display()
logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest))
self.run_command(make_command('clone', '-q', url, dest))
if rev_options.rev:
# Then a specific revision was requested.
rev_options = self.resolve_revision(dest, url, rev_options)
branch_name = getattr(rev_options, 'branch_name', None)
if branch_name is None:
# Only do a checkout if the current commit id doesn't match
# the requested revision.
if not self.is_commit_id_equal(dest, rev_options.rev):
cmd_args = make_command(
'checkout', '-q', rev_options.to_args(),
)
self.run_command(cmd_args, cwd=dest)
elif self.get_current_branch(dest) != branch_name:
# Then a specific branch was requested, and that branch
# is not yet checked out.
track_branch = 'origin/{}'.format(branch_name)
cmd_args = [
'checkout', '-b', branch_name, '--track', track_branch,
]
self.run_command(cmd_args, cwd=dest)
#: repo may contain submodules
self.update_submodules(dest)
def switch(self, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> None
self.run_command(
make_command('config', 'remote.origin.url', url),
cwd=dest,
)
cmd_args = make_command('checkout', '-q', rev_options.to_args())
self.run_command(cmd_args, cwd=dest)
self.update_submodules(dest)
def update(self, dest, url, rev_options):
# type: (str, HiddenText, RevOptions) -> None
# First fetch changes from the default remote
if self.get_git_version() >= parse_version('1.9.0'):
# fetch tags in addition to everything else
self.run_command(['fetch', '-q', '--tags'], cwd=dest)
else:
self.run_command(['fetch', '-q'], cwd=dest)
# Then reset to wanted revision (maybe even origin/master)
rev_options = self.resolve_revision(dest, url, rev_options)
cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args())
self.run_command(cmd_args, cwd=dest)
#: update submodules
self.update_submodules(dest)
@classmethod
def get_remote_url(cls, location):
"""
Return URL of the first remote encountered.
Raises RemoteNotFoundError if the repository does not have a remote
url configured.
"""
# We need to pass 1 for extra_ok_returncodes since the command
# exits with return code 1 if there are no matching lines.
stdout = cls.run_command(
['config', '--get-regexp', r'remote\..*\.url'],
extra_ok_returncodes=(1, ),
show_stdout=False,
stdout_only=True,
cwd=location,
)
remotes = stdout.splitlines()
try:
found_remote = remotes[0]
except IndexError:
raise RemoteNotFoundError
for remote in remotes:
if remote.startswith('remote.origin.url '):
found_remote = remote
break
url = found_remote.split(' ')[1]
return url.strip()
@classmethod
def has_commit(cls, location, rev):
"""
Check if rev is a commit that is available in the local repository.
"""
try:
cls.run_command(
['rev-parse', '-q', '--verify', "sha^" + rev],
cwd=location,
log_failed_cmd=False,
)
except InstallationError:
return False
else:
return True
@classmethod
def get_revision(cls, location, rev=None):
if rev is None:
rev = 'HEAD'
current_rev = cls.run_command(
['rev-parse', rev],
show_stdout=False,
stdout_only=True,
cwd=location,
)
return current_rev.strip()
@classmethod
def get_subdirectory(cls, location):
"""
Return the path to setup.py, relative to the repo root.
Return None if setup.py is in the repo root.
"""
# find the repo root
git_dir = cls.run_command(
['rev-parse', '--git-dir'],
show_stdout=False,
stdout_only=True,
cwd=location,
).strip()
if not os.path.isabs(git_dir):
git_dir = os.path.join(location, git_dir)
repo_root = os.path.abspath(os.path.join(git_dir, '..'))
return find_path_to_setup_from_repo_root(location, repo_root)
@classmethod
def get_url_rev_and_auth(cls, url):
# type: (str) -> Tuple[str, Optional[str], AuthInfo]
"""
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
That's required because although they use SSH they sometimes don't
work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
parsing. Hence we remove it again afterwards and return it as a stub.
"""
# Works around an apparent Git bug
# (see https://article.gmane.org/gmane.comp.version-control.git/146500)
scheme, netloc, path, query, fragment = urlsplit(url)
if scheme.endswith('file'):
initial_slashes = path[:-len(path.lstrip('/'))]
newpath = (
initial_slashes +
urllib_request.url2pathname(path)
.replace('\\', '/').lstrip('/')
)
after_plus = scheme.find('+') + 1
url = scheme[:after_plus] + urlunsplit(<|fim▁hole|> if '://' not in url:
assert 'file:' not in url
url = url.replace('git+', 'git+ssh://')
url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
url = url.replace('ssh://', '')
else:
url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
return url, rev, user_pass
@classmethod
def update_submodules(cls, location):
if not os.path.exists(os.path.join(location, '.gitmodules')):
return
cls.run_command(
['submodule', 'update', '--init', '--recursive', '-q'],
cwd=location,
)
@classmethod
def get_repository_root(cls, location):
loc = super(Git, cls).get_repository_root(location)
if loc:
return loc
try:
r = cls.run_command(
['rev-parse', '--show-toplevel'],
cwd=location,
show_stdout=False,
stdout_only=True,
on_returncode='raise',
log_failed_cmd=False,
)
except BadCommand:
logger.debug("could not determine if %s is under git control "
"because git is not available", location)
return None
except InstallationError:
return None
return os.path.normpath(r.rstrip('\r\n'))
vcs.register(Git)<|fim▁end|> | (scheme[after_plus:], netloc, newpath, query, fragment),
)
|
<|file_name|>ekloges.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
import csv
import codecs
import re
import argparse
import os
from prettytable import PrettyTable
report08_schools = {}
report08_employees = {}
report08_school_employees = {}
report16_employee = None
# The following (combined with report16_absence_reasons) is used when an employee is absent, has multiple assignments and not all schools have input this absence
report16_absents = {}
# we will store employee school exclusion in the employee_school_exclusions dict
# format: key -> employee afm
employee_school_exclusions = {}
# school exclusions
excluced_schools = list()
# employee exclusions
excluced_employees = dict()
def filterAFM(rawAFM):
return re.search('=\"(\d*)\"', rawAFM).group(1)
def csv_unireader(f, encoding="utf-8"):
for row in csv.reader(codecs.iterencode(codecs.iterdecode(f, encoding), "utf-8"), delimiter=';', quotechar='"'):
yield [e.decode("utf-8") for e in row]
def parseEmployeeExclusionList(reportPath):
"""
Parses a CSV which in the first column contains the IDs of all employees that need to be excluded from
processing
:param reportPath:
:return: a list of schools ids to exclude
"""
result = dict()
with open(reportPath, 'rb') as report_csvfile:
reader = csv_unireader(report_csvfile, encoding='iso8859-7')
for row in reader:
afm = str(row[0])
afm = afm if len(afm)==9 else '0'+afm
result[afm]=(row[1] if len(row)>1 and row[1] != u'' else u'Άγνωστος λόγος εξαίρεσεις')
return result
def parseSchoolExclusionList(reportPath):
"""
Parses a CSV which in the first column contains the IDs of all schools that need to be excluded from
processing
:param reportPath:
:return: a list of schools ids to exclude
"""
result = list()
with open(reportPath, 'rb') as report_csvfile:
reader = csv_unireader(report_csvfile, encoding='iso8859-7')
for row in reader:
result.append(row[0])
return result
def parseReport16(reportPath='/Users/slavikos/Downloads/CSV_2015-06-03-100905.csv'):
"""
Parse report 16 (Κατάλογος Εκπαιδευτικών που Απουσιάζουν από Σχολικές Μονάδες)
:param reportPath:
:return:
"""
report16_absence_reasons = [u'ΜΑΚΡΟΧΡΟΝΙΑ ΑΔΕΙΑ (>10 ημέρες)',u'ΑΠΟΣΠΑΣΗ ΣΤΟ ΕΞΩΤΕΡΙΚΟ',u'ΑΠΟΣΠΑΣΗ ΣΕ ΦΟΡΕΑ ΥΠ. ΠΑΙΔΕΙΑΣ',u'ΑΠΟΣΠΑΣΗ ΣΕ ΑΛΛΟ ΠΥΣΠΕ / ΠΥΣΔΕ',u'ΑΠΟΣΠΑΣΗ ΣΕ ΦΟΡΕΑ ΕΚΤΟΣ ΥΠ. ΠΑΙΔΕΙΑΣ',u'ΟΛΙΚΗ ΔΙΑΘΕΣΗ ΣΕ ΑΠΟΚΕΝΤΡΩΜΕΝΕΣ ΥΠΗΡΕΣΙΕΣ ΥΠ. ΠΑΙΔΕΙΑΣ']
result = {}
with open(reportPath, 'rb') as report_csvfile:
reader = csv_unireader(report_csvfile, encoding='iso8859-7')
firstRow = True
for row in reader:
if firstRow:
# first row contains
firstRow = False
continue
# note that employee with employeeAfm is missing from school schoolId
result[filterAFM(row[12])] = { "schoolId": row[6], "reason": "%s (%s)" % (row[22], row[23]) }
# check if generally absent (in case of multiple assignments) and insert in report16_absents
if row[24] in report16_absence_reasons or unicode(row[24]).startswith(u'ΜΑΚΡΟΧΡΟΝΙΑ ΑΔΕΙΑ (>10 ημέρες)'):
report16_absents[filterAFM(row[12])] = row[24]
return result
def parseReport08(reportPath='/Users/slavikos/Downloads/CSV_2015-06-02-130003.csv'):
excluded_school_types = [u'Νηπιαγωγεία']
with open(reportPath, 'rb') as report08_csvfile:
spamreader = csv_unireader(report08_csvfile, encoding='iso8859-7')
firstRow = True
for row in spamreader:
if firstRow:
firstRow = False
continue
#exclude some school types
if row[4] in excluded_school_types:
continue
# check if the school id is excluded
if row[6] in excluced_schools:
continue
# get school object
schoolObj = report08_schools.get(row[6], None)
if not schoolObj:
# first time we see that school
schoolObj = {
'id': row[6],
'title': row[7],
'email': row[10],
'employees': list()
}
# add school to dict
report08_schools[row[6]] = schoolObj
# fetch employee from cache
employeeAfm = filterAFM(row[16])
employeeObj = report08_employees.get(employeeAfm, None)
if not employeeObj:
# first time we see that employee
employeeObj = {
'id': row[15] if row[15] else '',
'afm': employeeAfm,
'name': row[19],
'surname': row[18],
'fatherName': row[20],
'specialization': row[28],
'assigments': list()
}
# add the employee in the dict
report08_employees[employeeObj.get('afm')] = employeeObj
# add to the school as dict as well
schoolObj['employees'].append(employeeObj)
else:
# employee exists in the report08_employee dict, so add it
# (if he does not exist) in the schools dict as well
if employeeObj not in schoolObj['employees']:
schoolObj['employees'].append(employeeObj)
assigmentObj = {
'schoolId': schoolObj['id'],
'type': row[33],
'assigment': row[34],
'isMaster': True if row[35] == u'Ναι' else False,
'hours': int(row[44]) if row[44] else 0, # Ώρες Υποχ. Διδακτικού Ωραρίου Υπηρέτησης στο Φορέα
'teachingHours': (int(row[46]) if row[46] else 0) + (int(row[47]) if row[47] else 0),
}
employeeObj['assigments'].append(assigmentObj)
# report08_school_employees[schoolObj['id']].append(assigmentObj)
def isExcluded(employeeAfm, schoolId):
"""
Determines if an employee is excluded from school unit id. If the schoolId is None, then
the operation will check the general exclusion list. The operation will
return None if the employee is not excluded or a description if the employee
should be excluded
:param employeeAfm: The employee's AFM
:type employeeAfm: str
:param schoolId: The school ID to check for exclusion
:type schoolId: str
:return: None if the employee is not excluded or a description if the employee should be excluded
"""
if schoolId is None:
return excluced_employees.get(employeeAfm, None)
if len(employee_school_exclusions) > 0:
exclusion = employee_school_exclusions.get(employeeAfm, None)
if exclusion:
# employee is probably excluded
if exclusion.get('schoolId', '') == schoolId:
return exclusion.get('reason', u"Άγνωστος λόγος εξαίρεσεις")
else:
return None
else:
return None
else:
return None
def processSchool(id, filter0=False):
schoolObj = report08_schools.get(id, None)
acceptedList = list()
rejectedList = list()
# fetch school employees, if school is not excluded
schoolEmployees = schoolObj.get('employees', list()) if id not in excluced_schools else list()
for employee in schoolEmployees:
# check if the employee is in the general exclusion list
excludedReason = isExcluded(employeeAfm=employee['afm'], schoolId=None)
# check if the employee is in the exclusion list (for the given school)
if excludedReason is None:
excludedReason = isExcluded(employeeAfm=employee['afm'], schoolId=schoolObj['id'])
if excludedReason:
# employee has been excluded
rejectedList.append(
{
'employee': employee,
'excludedReason': excludedReason,
}
)
continue
if report16_absents and employee['afm'] in report16_absents:
# exclude report16_absents from all schools (if they have more than one assignments)
continue
# some (in our case pe05, pe07) employees may have multiple secondary assignments with equal, more than the main, hours
# if this happens, select and enroll them in their main assignment school (as instructed by the ministry of education)
foundAssigment = None
mainAssigment = None
mainAssigmentHours = None
assigmentHours = list()
if len(employee['assigments']) > 2:
for assigment in employee['assigments']:
if assigment['assigment'] == u'Από Διάθεση ΠΥΣΠΕ/ΠΥΣΔΕ':
mainAssigment = assigment
mainAssigmentHours = assigment['hours']
continue
else:
assigmentHours.append (assigment['hours'])
continue
maxHours = max(assigmentHours)
if assigmentHours.count(maxHours)>1:
foundAssigment = mainAssigment
# end of multi max assignments
primaryAssignemtns = [ u'Από Διάθεση ΠΥΣΠΕ/ΠΥΣΔΕ', u'Απόσπαση (με αίτηση - κύριος φορέας)', u'Οργανικά', u'Οργανικά από Άρση Υπεραριθμίας' ]
selectedAssigment = None
for assigment in employee['assigments']:
if foundAssigment:
selectedAssigment = foundAssigment
break
if not selectedAssigment:
selectedAssigment = employee['assigments'][0]
continue
if assigment['hours'] > selectedAssigment['hours']:
# found an assigment with more hours, check the
# new assigment
selectedAssigment = assigment
elif assigment['hours'] == selectedAssigment['hours']:
# deal with same hour assignments
# selected assigment will be accepted if the type is a primary assignment
if assigment['assigment'] in primaryAssignemtns:
selectedAssigment = assigment
else:
pass
# we've checked all assignments and we have the selected assignment
# in the selectedAssigment variable. Check if the assignment references
# the current school and the hours attribute is > 0
if selectedAssigment['schoolId'] == id and selectedAssigment['hours'] > 0:
if filter0 and selectedAssigment['teachingHours'] == 0:
# we've been asked to filter out employees with assignments
# in the current school but without teaching hours
rejectedList.append({
'employee': employee,
'excludedReason': u"Αποκλεισμός λόγο μη ανάθεσης διδακτικού έργου στην μονάδα",
})
continue
# woooo! we have a winner !
acceptedList.append(
{
'employee': employee,
'assigment': selectedAssigment,
}
)
else:
# ok, employee is rejected
schName = report08_schools.get(selectedAssigment['schoolId'], None)['title']
rejectedList.append(
{
'employee': employee,
'excludedReason': u"Τοποθετημένος για '%s' ώρες στην μονάδα '%s' (%s)\n με σχέση '%s'(Σχ.Έργ.: '%s')" % (selectedAssigment['hours'], selectedAssigment['schoolId'], schName, selectedAssigment['assigment'], selectedAssigment['type']),
}
)
return {
'school' : schoolObj,
'accepted': sorted(acceptedList, key=lambda employee: employee['employee']['surname']),
'rejected': sorted(rejectedList, key=lambda employee: employee['employee']['surname']),
}
def writeReportToFile(reportName, resultStr, basePath='/tmp', encoding="utf-8"):
filePath = os.path.join(basePath, reportName)
with codecs.open(filePath, mode="w", encoding=encoding) as textFile:
textFile.write(resultStr)
return filePath
def replace_all(text, dic):
for i, j in dic.iteritems():
text = text.replace(i, j)
return text
def shortenTitle(schName):
shortenDic = {u'ΟΛΟΗΜΕΡΟ' : u'ΟΛ', u'ΔΗΜΟΤΙΚΟ' : u'Δ.', u'ΣΧΟΛΕΙΟ' : u'Σ.', u'/' : ''}
return replace_all(schName, shortenDic)
def printTabularResults(result, includeRejected=False):
schoolObj = result.get('school', dict())
resultString = "\n"
resultString = resultString + "::::::::::::::::::::::::::::::::::::::::::::::::\n"
resultString = resultString + ":: %s - (%s) ::\n" % (schoolObj['title'], schoolObj['id'])
resultString = resultString + "::::::::::::::::::::::::::::::::::::::::::::::::\n"
resultString = resultString + "\n\n"
x = PrettyTable(["#","ΑΜ", "ΑΦΜ", u"ΕΠΩΝΥΜΟ", u"ΟΝΟΜΑ", u"ΠΑΤΡΩΝΥΜΟ", u"ΕΙΔΙΚΟΤΗΤΑ", u"ΣΧΕΣΗ ΕΡΓΑΣΙΑΣ", u"ΤΟΠΟΘΕΤΗΣΗ ΣΤΗΝ ΜΟΝΑΔΑ", u"ΩΡΑΡΙΟ", u"ΑΝΑΘΕΣΕΙΣ"])
x.align[u"#"] = "l"
x.align[u"ΕΠΩΝΥΜΟ"] = "r"
x.align[u"ΟΝΟΜΑ"] = "r"
x.align[u"ΠΑΤΡΩΝΥΜΟ"] = "r"
x.align[u"ΕΙΔΙΚΟΤΗΤΑ"] = "r"
x.align[u"ΣΧΕΣΗ ΕΡΓΑΣΙΑΣ"] = "r"
x.align[u"ΤΟΠΟΘΕΤΗΣΗ ΣΤΗΝ ΜΟΝΑΔΑ"] = "r"
x.align[u"ΩΡΑΡΙΟ"] = "r"
x.align[u"ΑΝΑΘΕΣΕΙΣ"] = "r"
counter = 1
for r in result.get('accepted', list()):
e = r['employee']
a = r['assigment']
x.add_row([counter, e['id'], e['afm'], e['surname'], e['name'], e['fatherName'], e['specialization'], a['type'], a['assigment'], a['hours'], a['teachingHours']])
counter = counter + 1
resultString = resultString + x.get_string()
if includeRejected:
x = PrettyTable(["#","ΑΜ", "ΑΦΜ", u"ΕΠΩΝΥΜΟ", u"ΟΝΟΜΑ", u"ΠΑΤΡΩΝΥΜΟ", u"ΕΙΔΙΚΟΤΗΤΑ", u"ΑΠΟΚΛΕΙΣΜΟΣ ΑΠΟ ΨΗΦΟΦΟΡΙΑ"])
x.align[u"#"] = "l"
x.align[u"ΕΠΩΝΥΜΟ"] = "r"
x.align[u"ΟΝΟΜΑ"] = "r"
x.align[u"ΠΑΤΡΩΝΥΜΟ"] = "r"
x.align[u"ΕΙΔΙΚΟΤΗΤΑ"] = "r"
x.align[u"ΑΠΟΚΛΕΙΣΜΟΣ ΑΠΟ ΨΗΦΟΦΟΡΙΑ"] = "l"
counter = 1
for r in result.get('rejected', list()):
e = r['employee']
x.add_row([counter, e['id'], e['afm'], e['surname'], e['name'], e['fatherName'], e['specialization'], r['excludedReason'] ])
counter = counter + 1
resultString = resultString + "\n\n"
resultString = resultString + u"###############################\n"
resultString = resultString + u"##### Λίστα Αποκλεισμένων #####\n"
resultString = resultString + u"###############################\n"
resultString = resultString + "\n\n"
resultString = resultString + x.get_string()
return resultString
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-r8', "--report8", help="path to myschool report 8", required=True, type=str)
parser.add_argument('-r16', "--report16", help="path to myschool report 16", type=str)
parser.add_argument('-se', "--schoolExclusion", help="path to school exclusion list", type=str)
parser.add_argument('-ee', "--employeeExclusion", help="path to school exclusion list", type=str)
parser.add_argument('--schoolId', type=str, help='generate report for the given school id')
parser.add_argument('--filter0', action='store_true', default=False, help='filter employees without teaching hour(s)')
parser.add_argument('--rejected', action='store_true', default=False, help='print rejected employees in results')
parser.add_argument('--outputDir', type=str, help='the base path where output files should be placed')
parser.add_argument('--titleFiles', action='store_true', default=False, help='output school titles as filenames')
parser.add_argument('--outputEncoding', default='utf-8', help='set output encdoding')
args = parser.parse_args()
if args.schoolExclusion:
# path to school exclusion has been specified, so go and parse
excluced_schools = parseSchoolExclusionList(reportPath=args.schoolExclusion)<|fim▁hole|>
if args.employeeExclusion:
excluced_employees = parseEmployeeExclusionList(reportPath=args.employeeExclusion)
# parse report 08 as it is mandatory !
parseReport08(reportPath=args.report8)
if args.report16:
# path to report 16 has been specified, so parse!
employee_school_exclusions.update(parseReport16(reportPath=args.report16))
if args.schoolId:
schoolObj = report08_schools[args.schoolId]
result = processSchool(id=args.schoolId, filter0=args.filter0)
r = printTabularResults(result, includeRejected=args.rejected)
if args.outputDir:
outputFileName = shortenTitle(schoolObj['title']) if args.titleFiles else args.schoolId
path = writeReportToFile(reportName=("%s.txt" % outputFileName), resultStr=r, basePath=args.outputDir, encoding=args.outputEncoding)
print "[*] School '%s' (%s) report has been written to file '%s'" % (args.schoolId,schoolObj['title'], path)
else:
print r
exit()
for school in report08_schools:
schoolObj = report08_schools[school]
result = processSchool(id=school, filter0=args.filter0)
r = printTabularResults(result, includeRejected=args.rejected)
if args.outputDir:
outputFileName = shortenTitle(schoolObj['title']) if args.titleFiles else school
path = writeReportToFile(reportName=("%s.txt" % outputFileName), resultStr=r, basePath=args.outputDir, encoding=args.outputEncoding)
print "[*] School '%s' (%s) report has been written to file '%s'" % (school,schoolObj['title'], path)
else:
print r<|fim▁end|> | |
<|file_name|>webroot.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import cherrypy
import mako
import os
from girder import constants
from girder.utility import config
class WebrootBase(object):
"""
Serves a template file in response to GET requests.
This will typically be the base class of any non-API endpoints.
"""
exposed = True
def __init__(self, templatePath):
with open(templatePath) as templateFile:
# This may raise an IOError, but there's no way to recover
self.template = templateFile.read()
# Rendering occurs lazily on the first GET request
self.indexHtml = None
self.vars = {}
self.config = config.getConfig()
def updateHtmlVars(self, vars):
"""
If any of the variables in the index html need to change, call this
with the updated set of variables to render the template with.
"""
self.vars.update(vars)
self.indexHtml = None
def _renderHTML(self):
return mako.template.Template(self.template).render(**self.vars)
def GET(self, **params):
if self.indexHtml is None or self.config['server']['mode'] == 'development':
self.indexHtml = self._renderHTML()
return self.indexHtml
def DELETE(self, **params):
raise cherrypy.HTTPError(405)
def PATCH(self, **params):
raise cherrypy.HTTPError(405)
def POST(self, **params):
raise cherrypy.HTTPError(405)
def PUT(self, **params):
raise cherrypy.HTTPError(405)
class Webroot(WebrootBase):
"""
The webroot endpoint simply serves the main index HTML file.
"""
def __init__(self, templatePath=None):
if not templatePath:
templatePath = os.path.join(constants.PACKAGE_DIR,
'utility', 'webroot.mako')
super(Webroot, self).__init__(templatePath)
self.vars = {<|fim▁hole|> 'apiRoot': '',
'staticRoot': '',
'title': 'Girder'
}
def _renderHTML(self):
self.vars['pluginCss'] = []
self.vars['pluginJs'] = []
builtDir = os.path.join(constants.STATIC_ROOT_DIR, 'clients', 'web',
'static', 'built', 'plugins')
for plugin in self.vars['plugins']:
if os.path.exists(os.path.join(builtDir, plugin, 'plugin.min.css')):
self.vars['pluginCss'].append(plugin)
if os.path.exists(os.path.join(builtDir, plugin, 'plugin.min.js')):
self.vars['pluginJs'].append(plugin)
return super(Webroot, self)._renderHTML()<|fim▁end|> | 'plugins': [], |
<|file_name|>popup.spec.js<|end_file_name|><|fim▁begin|>describe('$materialPopup service', function() {
beforeEach(module('material.services.popup', 'ngAnimateMock'));
function setup(options) {
var popup;
inject(function($materialPopup, $rootScope) {
$materialPopup(options).then(function(p) {
popup = p;
});
$rootScope.$apply();<|fim▁hole|> describe('enter()', function() {
it('should append to options.appendTo', inject(function($animate, $rootScope) {
var parent = angular.element('<div id="parent">');
var popup = setup({
appendTo: parent,
template: '<div id="element"></div>'
});
popup.enter();
$rootScope.$digest();
expect($animate.queue.shift().event).toBe('enter');
expect(popup.element.parent()[0]).toBe(parent[0]); //fails
}));
it('should append to $rootElement by default', inject(function($rootScope, $document, $rootElement) {
var popup = setup({
template: '<div id="element"></div>'
});
popup.enter();
$rootScope.$digest();
expect(popup.element.parent()[0]).toBe($rootElement[0]);
}));
});
describe('destroy()', function() {
it('should leave and then destroy scope', inject(function($rootScope, $animate) {
var popup = setup({
template: '<div>'
});
popup.enter();
$rootScope.$apply();
var scope = popup.element.scope();
spyOn($animate, 'leave').andCallFake(function(element, cb) { cb(); });
spyOn(scope, '$destroy');
popup.destroy();
expect($animate.leave).toHaveBeenCalled();
expect(scope.$destroy).toHaveBeenCalled();
}));
});
});<|fim▁end|> | });
return popup;
}
|
<|file_name|>scheduler_predicates.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package e2e
import (
"fmt"
"time"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/resource"
"k8s.io/kubernetes/pkg/api/unversioned"
client "k8s.io/kubernetes/pkg/client/unversioned"
"k8s.io/kubernetes/pkg/fields"
"k8s.io/kubernetes/pkg/util"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
// Returns a number of currently scheduled and not scheduled Pods.
func getPodsScheduled(pods *api.PodList) (scheduledPods, notScheduledPods []api.Pod) {
for _, pod := range pods.Items {
if pod.Spec.NodeName != "" {
scheduledPods = append(scheduledPods, pod)
} else {
notScheduledPods = append(notScheduledPods, pod)
}
}
return<|fim▁hole|>// requires passing whole Pod definition, which is needed to test various Scheduler predicates.
func startPods(c *client.Client, replicas int, ns string, podNamePrefix string, pod api.Pod) {
allPods, err := c.Pods(api.NamespaceAll).List(api.ListOptions{})
expectNoError(err)
podsScheduledBefore, _ := getPodsScheduled(allPods)
for i := 0; i < replicas; i++ {
podName := fmt.Sprintf("%v-%v", podNamePrefix, i)
pod.ObjectMeta.Name = podName
pod.ObjectMeta.Labels["name"] = podName
pod.Spec.Containers[0].Name = podName
_, err = c.Pods(ns).Create(&pod)
expectNoError(err)
}
// Wait for pods to start running. Note: this is a functional
// test, not a performance test, so the timeout needs to be
// sufficiently long that it's only triggered if things are
// completely broken vs. running slowly.
timeout := 10 * time.Minute
startTime := time.Now()
currentlyScheduledPods := 0
for len(podsScheduledBefore)+replicas != currentlyScheduledPods {
allPods, err := c.Pods(api.NamespaceAll).List(api.ListOptions{})
expectNoError(err)
scheduledPods := 0
for _, pod := range allPods.Items {
if pod.Spec.NodeName != "" {
scheduledPods += 1
}
}
currentlyScheduledPods = scheduledPods
Logf("%v pods running", currentlyScheduledPods)
if startTime.Add(timeout).Before(time.Now()) {
Logf("Timed out after %v waiting for pods to start running.", timeout)
break
}
time.Sleep(5 * time.Second)
}
Expect(currentlyScheduledPods).To(Equal(len(podsScheduledBefore) + replicas))
}
func getRequestedCPU(pod api.Pod) int64 {
var result int64
for _, container := range pod.Spec.Containers {
result += container.Resources.Requests.Cpu().MilliValue()
}
return result
}
func verifyResult(c *client.Client, podName string, ns string) {
allPods, err := c.Pods(api.NamespaceAll).List(api.ListOptions{})
expectNoError(err)
scheduledPods, notScheduledPods := getPodsScheduled(allPods)
selector := fields.Set{
"involvedObject.kind": "Pod",
"involvedObject.name": podName,
"involvedObject.namespace": ns,
"source": "scheduler",
"reason": "FailedScheduling",
}.AsSelector()
options := api.ListOptions{FieldSelector: selector}
schedEvents, err := c.Events(ns).List(options)
expectNoError(err)
// If we failed to find event with a capitalized first letter of reason
// try looking for one starting with a small one for backward compatibility.
// If we don't do it we end up in #15806.
// TODO: remove this block when we don't care about supporting v1.0 too much.
if len(schedEvents.Items) == 0 {
selector := fields.Set{
"involvedObject.kind": "Pod",
"involvedObject.name": podName,
"involvedObject.namespace": ns,
"source": "scheduler",
"reason": "failedScheduling",
}.AsSelector()
options := api.ListOptions{FieldSelector: selector}
schedEvents, err = c.Events(ns).List(options)
expectNoError(err)
}
printed := false
printOnce := func(msg string) string {
if !printed {
printed = true
return msg
} else {
return ""
}
}
Expect(len(notScheduledPods)).To(Equal(1), printOnce(fmt.Sprintf("Not scheduled Pods: %#v", notScheduledPods)))
Expect(schedEvents.Items).ToNot(BeEmpty(), printOnce(fmt.Sprintf("Scheduled Pods: %#v", scheduledPods)))
}
func cleanupPods(c *client.Client, ns string) {
By("Removing all pods in namespace " + ns)
pods, err := c.Pods(ns).List(api.ListOptions{})
expectNoError(err)
opt := api.NewDeleteOptions(0)
for _, p := range pods.Items {
expectNoError(c.Pods(ns).Delete(p.ObjectMeta.Name, opt))
}
}
// Waits until all existing pods are scheduled and returns their amount.
func waitForStableCluster(c *client.Client) int {
timeout := 10 * time.Minute
startTime := time.Now()
allPods, err := c.Pods(api.NamespaceAll).List(api.ListOptions{})
expectNoError(err)
scheduledPods, currentlyNotScheduledPods := getPodsScheduled(allPods)
for len(currentlyNotScheduledPods) != 0 {
time.Sleep(2 * time.Second)
allPods, err := c.Pods(api.NamespaceAll).List(api.ListOptions{})
expectNoError(err)
scheduledPods, currentlyNotScheduledPods = getPodsScheduled(allPods)
if startTime.Add(timeout).Before(time.Now()) {
Failf("Timed out after %v waiting for stable cluster.", timeout)
break
}
}
return len(scheduledPods)
}
var _ = Describe("SchedulerPredicates [Serial]", func() {
var c *client.Client
var nodeList *api.NodeList
var totalPodCapacity int64
var RCName string
var ns string
AfterEach(func() {
rc, err := c.ReplicationControllers(ns).Get(RCName)
if err == nil && rc.Spec.Replicas != 0 {
By("Cleaning up the replication controller")
err := DeleteRC(c, ns, RCName)
expectNoError(err)
}
})
framework := NewFramework("sched-pred")
BeforeEach(func() {
c = framework.Client
ns = framework.Namespace.Name
nodeList = ListSchedulableNodesOrDie(c)
})
// This test verifies that max-pods flag works as advertised. It assumes that cluster add-on pods stay stable
// and cannot be run in parallel with any other test that touches Nodes or Pods. It is so because to check
// if max-pods is working we need to fully saturate the cluster and keep it in this state for few seconds.
It("validates MaxPods limit number of pods that are allowed to run", func() {
totalPodCapacity = 0
for _, node := range nodeList.Items {
podCapacity, found := node.Status.Capacity["pods"]
Expect(found).To(Equal(true))
totalPodCapacity += podCapacity.Value()
Logf("Node: %v", node)
}
currentlyScheduledPods := waitForStableCluster(c)
podsNeededForSaturation := int(totalPodCapacity) - currentlyScheduledPods
By(fmt.Sprintf("Starting additional %v Pods to fully saturate the cluster max pods and trying to start another one", podsNeededForSaturation))
startPods(c, podsNeededForSaturation, ns, "maxp", api.Pod{
TypeMeta: unversioned.TypeMeta{
Kind: "Pod",
},
ObjectMeta: api.ObjectMeta{
Name: "",
Labels: map[string]string{"name": ""},
},
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: "",
Image: "gcr.io/google_containers/pause:2.0",
},
},
},
})
podName := "additional-pod"
_, err := c.Pods(ns).Create(&api.Pod{
TypeMeta: unversioned.TypeMeta{
Kind: "Pod",
},
ObjectMeta: api.ObjectMeta{
Name: podName,
Labels: map[string]string{"name": "additional"},
},
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: podName,
Image: "gcr.io/google_containers/pause:2.0",
},
},
},
})
expectNoError(err)
// Wait a bit to allow scheduler to do its thing
// TODO: this is brittle; there's no guarantee the scheduler will have run in 10 seconds.
Logf("Sleeping 10 seconds and crossing our fingers that scheduler will run in that time.")
time.Sleep(10 * time.Second)
verifyResult(c, podName, ns)
cleanupPods(c, ns)
})
// This test verifies we don't allow scheduling of pods in a way that sum of limits of pods is greater than machines capacity.
// It assumes that cluster add-on pods stay stable and cannot be run in parallel with any other test that touches Nodes or Pods.
// It is so because we need to have precise control on what's running in the cluster.
It("validates resource limits of pods that are allowed to run [Conformance]", func() {
nodeToCapacityMap := make(map[string]int64)
for _, node := range nodeList.Items {
capacity, found := node.Status.Capacity["cpu"]
Expect(found).To(Equal(true))
nodeToCapacityMap[node.Name] = capacity.MilliValue()
}
waitForStableCluster(c)
pods, err := c.Pods(api.NamespaceAll).List(api.ListOptions{})
expectNoError(err)
for _, pod := range pods.Items {
_, found := nodeToCapacityMap[pod.Spec.NodeName]
Expect(found).To(Equal(true))
if pod.Status.Phase == api.PodRunning {
Logf("Pod %v requesting capacity %v on Node %v", pod.Name, getRequestedCPU(pod), pod.Spec.NodeName)
nodeToCapacityMap[pod.Spec.NodeName] -= getRequestedCPU(pod)
}
}
var podsNeededForSaturation int
milliCpuPerPod := int64(500)
for name, leftCapacity := range nodeToCapacityMap {
Logf("Node: %v has capacity: %v", name, leftCapacity)
podsNeededForSaturation += (int)(leftCapacity / milliCpuPerPod)
}
By(fmt.Sprintf("Starting additional %v Pods to fully saturate the cluster CPU and trying to start another one", podsNeededForSaturation))
startPods(c, podsNeededForSaturation, ns, "overcommit", api.Pod{
TypeMeta: unversioned.TypeMeta{
Kind: "Pod",
},
ObjectMeta: api.ObjectMeta{
Name: "",
Labels: map[string]string{"name": ""},
},
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: "",
Image: "gcr.io/google_containers/pause:2.0",
Resources: api.ResourceRequirements{
Limits: api.ResourceList{
"cpu": *resource.NewMilliQuantity(milliCpuPerPod, "DecimalSI"),
},
Requests: api.ResourceList{
"cpu": *resource.NewMilliQuantity(milliCpuPerPod, "DecimalSI"),
},
},
},
},
},
})
podName := "additional-pod"
_, err = c.Pods(ns).Create(&api.Pod{
TypeMeta: unversioned.TypeMeta{
Kind: "Pod",
},
ObjectMeta: api.ObjectMeta{
Name: podName,
Labels: map[string]string{"name": "additional"},
},
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: podName,
Image: "gcr.io/google_containers/pause:2.0",
Resources: api.ResourceRequirements{
Limits: api.ResourceList{
"cpu": *resource.NewMilliQuantity(milliCpuPerPod, "DecimalSI"),
},
},
},
},
},
})
expectNoError(err)
// Wait a bit to allow scheduler to do its thing
// TODO: this is brittle; there's no guarantee the scheduler will have run in 10 seconds.
Logf("Sleeping 10 seconds and crossing our fingers that scheduler will run in that time.")
time.Sleep(10 * time.Second)
verifyResult(c, podName, ns)
cleanupPods(c, ns)
})
// Test Nodes does not have any label, hence it should be impossible to schedule Pod with
// nonempty Selector set.
It("validates that NodeSelector is respected if not matching [Conformance]", func() {
By("Trying to schedule Pod with nonempty NodeSelector.")
podName := "restricted-pod"
waitForStableCluster(c)
_, err := c.Pods(ns).Create(&api.Pod{
TypeMeta: unversioned.TypeMeta{
Kind: "Pod",
},
ObjectMeta: api.ObjectMeta{
Name: podName,
Labels: map[string]string{"name": "restricted"},
},
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: podName,
Image: "gcr.io/google_containers/pause:2.0",
},
},
NodeSelector: map[string]string{
"label": "nonempty",
},
},
})
expectNoError(err)
// Wait a bit to allow scheduler to do its thing
// TODO: this is brittle; there's no guarantee the scheduler will have run in 10 seconds.
Logf("Sleeping 10 seconds and crossing our fingers that scheduler will run in that time.")
time.Sleep(10 * time.Second)
verifyResult(c, podName, ns)
cleanupPods(c, ns)
})
It("validates that NodeSelector is respected if matching [Conformance]", func() {
// launch a pod to find a node which can launch a pod. We intentionally do
// not just take the node list and choose the first of them. Depending on the
// cluster and the scheduler it might be that a "normal" pod cannot be
// scheduled onto it.
By("Trying to launch a pod without a label to get a node which can launch it.")
podName := "without-label"
_, err := c.Pods(ns).Create(&api.Pod{
TypeMeta: unversioned.TypeMeta{
Kind: "Pod",
},
ObjectMeta: api.ObjectMeta{
Name: podName,
},
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: podName,
Image: "gcr.io/google_containers/pause:2.0",
},
},
},
})
expectNoError(err)
expectNoError(waitForPodRunningInNamespace(c, podName, ns))
pod, err := c.Pods(ns).Get(podName)
expectNoError(err)
nodeName := pod.Spec.NodeName
err = c.Pods(ns).Delete(podName, api.NewDeleteOptions(0))
expectNoError(err)
By("Trying to apply a random label on the found node.")
k := fmt.Sprintf("kubernetes.io/e2e-%s", string(util.NewUUID()))
v := "42"
patch := fmt.Sprintf(`{"metadata":{"labels":{"%s":"%s"}}}`, k, v)
err = c.Patch(api.MergePatchType).Resource("nodes").Name(nodeName).Body([]byte(patch)).Do().Error()
expectNoError(err)
node, err := c.Nodes().Get(nodeName)
expectNoError(err)
Expect(node.Labels[k]).To(Equal(v))
By("Trying to relaunch the pod, now with labels.")
labelPodName := "with-labels"
_, err = c.Pods(ns).Create(&api.Pod{
TypeMeta: unversioned.TypeMeta{
Kind: "Pod",
},
ObjectMeta: api.ObjectMeta{
Name: labelPodName,
},
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: labelPodName,
Image: "gcr.io/google_containers/pause:2.0",
},
},
NodeSelector: map[string]string{
"kubernetes.io/hostname": nodeName,
k: v,
},
},
})
expectNoError(err)
defer c.Pods(ns).Delete(labelPodName, api.NewDeleteOptions(0))
// check that pod got scheduled. We intentionally DO NOT check that the
// pod is running because this will create a race condition with the
// kubelet and the scheduler: the scheduler might have scheduled a pod
// already when the kubelet does not know about its new label yet. The
// kubelet will then refuse to launch the pod.
expectNoError(waitForPodNotPending(c, ns, labelPodName))
labelPod, err := c.Pods(ns).Get(labelPodName)
expectNoError(err)
Expect(labelPod.Spec.NodeName).To(Equal(nodeName))
})
})<|fim▁end|> | }
// Simplified version of RunRC, that does not create RC, but creates plain Pods and |
<|file_name|>test_pytables.py<|end_file_name|><|fim▁begin|>import nose
import sys
import os
import warnings
import tempfile
from contextlib import contextmanager
import datetime
import numpy as np
import pandas
import pandas as pd
from pandas import (Series, DataFrame, Panel, MultiIndex, Categorical, bdate_range,
date_range, Index, DatetimeIndex, isnull)
from pandas.io.pytables import _tables
try:
_tables()
except ImportError as e:
raise nose.SkipTest(e)
from pandas.io.pytables import (HDFStore, get_store, Term, read_hdf,
IncompatibilityWarning, PerformanceWarning,
AttributeConflictWarning, DuplicateWarning,
PossibleDataLossError, ClosedFileError)
from pandas.io import pytables as pytables
import pandas.util.testing as tm
from pandas.util.testing import (assert_panel4d_equal,
assert_panel_equal,
assert_frame_equal,
assert_series_equal)
from pandas import concat, Timestamp
from pandas import compat
from pandas.compat import range, lrange, u
from pandas.util.testing import assert_produces_warning
from numpy.testing.decorators import slow
try:
import tables
except ImportError:
raise nose.SkipTest('no pytables')
from distutils.version import LooseVersion
_default_compressor = LooseVersion(tables.__version__) >= '2.2' \
and 'blosc' or 'zlib'
_multiprocess_can_split_ = False
# contextmanager to ensure the file cleanup
def safe_remove(path):
if path is not None:
try:
os.remove(path)
except:
pass
def safe_close(store):
try:
if store is not None:
store.close()
except:
pass
def create_tempfile(path):
""" create an unopened named temporary file """
return os.path.join(tempfile.gettempdir(),path)
@contextmanager
def ensure_clean_store(path, mode='a', complevel=None, complib=None,
fletcher32=False):
try:
# put in the temporary path if we don't have one already
if not len(os.path.dirname(path)):
path = create_tempfile(path)
store = HDFStore(path, mode=mode, complevel=complevel,
complib=complib, fletcher32=False)
yield store
finally:
safe_close(store)
if mode == 'w' or mode == 'a':
safe_remove(path)
@contextmanager
def ensure_clean_path(path):
"""
return essentially a named temporary file that is not opened
and deleted on existing; if path is a list, then create and
return list of filenames
"""
try:
if isinstance(path, list):
filenames = [ create_tempfile(p) for p in path ]
yield filenames
else:
filenames = [ create_tempfile(path) ]
yield filenames[0]
finally:
for f in filenames:
safe_remove(f)
# set these parameters so we don't have file sharing
tables.parameters.MAX_NUMEXPR_THREADS = 1
tables.parameters.MAX_BLOSC_THREADS = 1
tables.parameters.MAX_THREADS = 1
def _maybe_remove(store, key):
"""For tests using tables, try removing the table to be sure there is
no content from previous tests using the same table name."""
try:
store.remove(key)
except:
pass
def compat_assert_produces_warning(w,f):
""" don't produce a warning under PY3 """
if compat.PY3:
f()
else:
with tm.assert_produces_warning(expected_warning=w):
f()
class TestHDFStore(tm.TestCase):
@classmethod
def setUpClass(cls):
super(TestHDFStore, cls).setUpClass()
# Pytables 3.0.0 deprecates lots of things
tm.reset_testing_mode()
@classmethod
def tearDownClass(cls):
super(TestHDFStore, cls).tearDownClass()
# Pytables 3.0.0 deprecates lots of things
tm.set_testing_mode()
def setUp(self):
warnings.filterwarnings(action='ignore', category=FutureWarning)
self.path = 'tmp.__%s__.h5' % tm.rands(10)
def tearDown(self):
pass
def test_factory_fun(self):
try:
with get_store(self.path) as tbl:
raise ValueError('blah')
except ValueError:
pass
finally:
safe_remove(self.path)
try:
with get_store(self.path) as tbl:
tbl['a'] = tm.makeDataFrame()
with get_store(self.path) as tbl:
self.assertEqual(len(tbl), 1)
self.assertEqual(type(tbl['a']), DataFrame)
finally:
safe_remove(self.path)
def test_context(self):
try:
with HDFStore(self.path) as tbl:
raise ValueError('blah')
except ValueError:
pass
finally:
safe_remove(self.path)
try:
with HDFStore(self.path) as tbl:
tbl['a'] = tm.makeDataFrame()
with HDFStore(self.path) as tbl:
self.assertEqual(len(tbl), 1)
self.assertEqual(type(tbl['a']), DataFrame)
finally:
safe_remove(self.path)
def test_conv_read_write(self):
try:
def roundtrip(key, obj,**kwargs):
obj.to_hdf(self.path, key,**kwargs)
return read_hdf(self.path, key)
o = tm.makeTimeSeries()
assert_series_equal(o, roundtrip('series',o))
o = tm.makeStringSeries()
assert_series_equal(o, roundtrip('string_series',o))
o = tm.makeDataFrame()
assert_frame_equal(o, roundtrip('frame',o))
o = tm.makePanel()
assert_panel_equal(o, roundtrip('panel',o))
# table
df = DataFrame(dict(A=lrange(5), B=lrange(5)))
df.to_hdf(self.path,'table',append=True)
result = read_hdf(self.path, 'table', where = ['index>2'])
assert_frame_equal(df[df.index>2],result)
finally:
safe_remove(self.path)
def test_long_strings(self):
# GH6166
# unconversion of long strings was being chopped in earlier
# versions of numpy < 1.7.2
df = DataFrame({'a': tm.rands_array(100, size=10)},
index=tm.rands_array(100, size=10))
with ensure_clean_store(self.path) as store:
store.append('df', df, data_columns=['a'])
result = store.select('df')
assert_frame_equal(df, result)
def test_api(self):
# GH4584
# API issue when to_hdf doesn't acdept append AND format args
with ensure_clean_path(self.path) as path:
df = tm.makeDataFrame()
df.iloc[:10].to_hdf(path,'df',append=True,format='table')
df.iloc[10:].to_hdf(path,'df',append=True,format='table')
assert_frame_equal(read_hdf(path,'df'),df)
# append to False
df.iloc[:10].to_hdf(path,'df',append=False,format='table')
df.iloc[10:].to_hdf(path,'df',append=True,format='table')
assert_frame_equal(read_hdf(path,'df'),df)
with ensure_clean_path(self.path) as path:
df = tm.makeDataFrame()
df.iloc[:10].to_hdf(path,'df',append=True)
df.iloc[10:].to_hdf(path,'df',append=True,format='table')
assert_frame_equal(read_hdf(path,'df'),df)
# append to False
df.iloc[:10].to_hdf(path,'df',append=False,format='table')
df.iloc[10:].to_hdf(path,'df',append=True)
assert_frame_equal(read_hdf(path,'df'),df)
with ensure_clean_path(self.path) as path:
df = tm.makeDataFrame()
df.to_hdf(path,'df',append=False,format='fixed')
assert_frame_equal(read_hdf(path,'df'),df)
df.to_hdf(path,'df',append=False,format='f')
assert_frame_equal(read_hdf(path,'df'),df)
df.to_hdf(path,'df',append=False)
assert_frame_equal(read_hdf(path,'df'),df)
df.to_hdf(path,'df')
assert_frame_equal(read_hdf(path,'df'),df)
with ensure_clean_store(self.path) as store:
path = store._path
df = tm.makeDataFrame()
_maybe_remove(store,'df')
store.append('df',df.iloc[:10],append=True,format='table')
store.append('df',df.iloc[10:],append=True,format='table')
assert_frame_equal(store.select('df'),df)
# append to False
_maybe_remove(store,'df')
store.append('df',df.iloc[:10],append=False,format='table')
store.append('df',df.iloc[10:],append=True,format='table')
assert_frame_equal(store.select('df'),df)
# formats
_maybe_remove(store,'df')
store.append('df',df.iloc[:10],append=False,format='table')
store.append('df',df.iloc[10:],append=True,format='table')
assert_frame_equal(store.select('df'),df)
_maybe_remove(store,'df')
store.append('df',df.iloc[:10],append=False,format='table')
store.append('df',df.iloc[10:],append=True,format=None)
assert_frame_equal(store.select('df'),df)
with ensure_clean_path(self.path) as path:
# invalid
df = tm.makeDataFrame()
self.assertRaises(ValueError, df.to_hdf, path,'df',append=True,format='f')
self.assertRaises(ValueError, df.to_hdf, path,'df',append=True,format='fixed')
self.assertRaises(TypeError, df.to_hdf, path,'df',append=True,format='foo')
self.assertRaises(TypeError, df.to_hdf, path,'df',append=False,format='bar')
#File path doesn't exist
path = ""
self.assertRaises(IOError, read_hdf, path, 'df')
def test_api_default_format(self):
# default_format option
with ensure_clean_store(self.path) as store:
df = tm.makeDataFrame()
pandas.set_option('io.hdf.default_format','fixed')
_maybe_remove(store,'df')
store.put('df',df)
self.assertFalse(store.get_storer('df').is_table)
self.assertRaises(ValueError, store.append, 'df2',df)
pandas.set_option('io.hdf.default_format','table')
_maybe_remove(store,'df')
store.put('df',df)
self.assertTrue(store.get_storer('df').is_table)
_maybe_remove(store,'df2')
store.append('df2',df)
self.assertTrue(store.get_storer('df').is_table)
pandas.set_option('io.hdf.default_format',None)
with ensure_clean_path(self.path) as path:
df = tm.makeDataFrame()
pandas.set_option('io.hdf.default_format','fixed')
df.to_hdf(path,'df')
with get_store(path) as store:
self.assertFalse(store.get_storer('df').is_table)
self.assertRaises(ValueError, df.to_hdf, path,'df2', append=True)
pandas.set_option('io.hdf.default_format','table')
df.to_hdf(path,'df3')
with HDFStore(path) as store:
self.assertTrue(store.get_storer('df3').is_table)
df.to_hdf(path,'df4',append=True)
with HDFStore(path) as store:
self.assertTrue(store.get_storer('df4').is_table)
pandas.set_option('io.hdf.default_format',None)
def test_keys(self):
with ensure_clean_store(self.path) as store:
store['a'] = tm.makeTimeSeries()
store['b'] = tm.makeStringSeries()
store['c'] = tm.makeDataFrame()
store['d'] = tm.makePanel()
store['foo/bar'] = tm.makePanel()
self.assertEqual(len(store), 5)
self.assertTrue(set(
store.keys()) == set(['/a', '/b', '/c', '/d', '/foo/bar']))
def test_repr(self):
with ensure_clean_store(self.path) as store:
repr(store)
store['a'] = tm.makeTimeSeries()
store['b'] = tm.makeStringSeries()
store['c'] = tm.makeDataFrame()
store['d'] = tm.makePanel()
store['foo/bar'] = tm.makePanel()
store.append('e', tm.makePanel())
df = tm.makeDataFrame()
df['obj1'] = 'foo'
df['obj2'] = 'bar'
df['bool1'] = df['A'] > 0
df['bool2'] = df['B'] > 0
df['bool3'] = True
df['int1'] = 1
df['int2'] = 2
df['timestamp1'] = Timestamp('20010102')
df['timestamp2'] = Timestamp('20010103')
df['datetime1'] = datetime.datetime(2001,1,2,0,0)
df['datetime2'] = datetime.datetime(2001,1,3,0,0)
df.ix[3:6,['obj1']] = np.nan
df = df.consolidate().convert_objects()
warnings.filterwarnings('ignore', category=PerformanceWarning)
store['df'] = df
warnings.filterwarnings('always', category=PerformanceWarning)
# make a random group in hdf space
store._handle.create_group(store._handle.root,'bah')
repr(store)
str(store)
# storers
with ensure_clean_store(self.path) as store:
df = tm.makeDataFrame()
store.append('df',df)
s = store.get_storer('df')
repr(s)
str(s)
def test_contains(self):
with ensure_clean_store(self.path) as store:
store['a'] = tm.makeTimeSeries()
store['b'] = tm.makeDataFrame()
store['foo/bar'] = tm.makeDataFrame()
self.assertIn('a', store)
self.assertIn('b', store)
self.assertNotIn('c', store)
self.assertIn('foo/bar', store)
self.assertIn('/foo/bar', store)
self.assertNotIn('/foo/b', store)
self.assertNotIn('bar', store)
# GH 2694
warnings.filterwarnings('ignore', category=tables.NaturalNameWarning)
store['node())'] = tm.makeDataFrame()
self.assertIn('node())', store)
def test_versioning(self):
with ensure_clean_store(self.path) as store:
store['a'] = tm.makeTimeSeries()
store['b'] = tm.makeDataFrame()
df = tm.makeTimeDataFrame()
_maybe_remove(store, 'df1')
store.append('df1', df[:10])
store.append('df1', df[10:])
self.assertEqual(store.root.a._v_attrs.pandas_version, '0.15.2')
self.assertEqual(store.root.b._v_attrs.pandas_version, '0.15.2')
self.assertEqual(store.root.df1._v_attrs.pandas_version, '0.15.2')
# write a file and wipe its versioning
_maybe_remove(store, 'df2')
store.append('df2', df)
# this is an error because its table_type is appendable, but no version
# info
store.get_node('df2')._v_attrs.pandas_version = None
self.assertRaises(Exception, store.select, 'df2')
def test_mode(self):
df = tm.makeTimeDataFrame()
def check(mode):
with ensure_clean_path(self.path) as path:
# constructor
if mode in ['r','r+']:
self.assertRaises(IOError, HDFStore, path, mode=mode)
else:
store = HDFStore(path,mode=mode)
self.assertEqual(store._handle.mode, mode)
store.close()
with ensure_clean_path(self.path) as path:
# context
if mode in ['r','r+']:
def f():
with HDFStore(path,mode=mode) as store:
pass
self.assertRaises(IOError, f)
else:
with HDFStore(path,mode=mode) as store:
self.assertEqual(store._handle.mode, mode)
with ensure_clean_path(self.path) as path:
# conv write
if mode in ['r','r+']:
self.assertRaises(IOError, df.to_hdf, path, 'df', mode=mode)
df.to_hdf(path,'df',mode='w')
else:
df.to_hdf(path,'df',mode=mode)
# conv read
if mode in ['w']:
self.assertRaises(KeyError, read_hdf, path, 'df', mode=mode)
else:
result = read_hdf(path,'df',mode=mode)
assert_frame_equal(result,df)
check('r')
check('r+')
check('a')
check('w')
def test_reopen_handle(self):
with ensure_clean_path(self.path) as path:
store = HDFStore(path,mode='a')
store['a'] = tm.makeTimeSeries()
# invalid mode change
self.assertRaises(PossibleDataLossError, store.open, 'w')
store.close()
self.assertFalse(store.is_open)
# truncation ok here
store.open('w')
self.assertTrue(store.is_open)
self.assertEqual(len(store), 0)
store.close()
self.assertFalse(store.is_open)
store = HDFStore(path,mode='a')
store['a'] = tm.makeTimeSeries()
# reopen as read
store.open('r')
self.assertTrue(store.is_open)
self.assertEqual(len(store), 1)
self.assertEqual(store._mode, 'r')
store.close()
self.assertFalse(store.is_open)
# reopen as append
store.open('a')
self.assertTrue(store.is_open)
self.assertEqual(len(store), 1)
self.assertEqual(store._mode, 'a')
store.close()
self.assertFalse(store.is_open)
# reopen as append (again)
store.open('a')
self.assertTrue(store.is_open)
self.assertEqual(len(store), 1)
self.assertEqual(store._mode, 'a')
store.close()
self.assertFalse(store.is_open)
def test_open_args(self):
with ensure_clean_path(self.path) as path:
df = tm.makeDataFrame()
# create an in memory store
store = HDFStore(path,mode='a',driver='H5FD_CORE',driver_core_backing_store=0)
store['df'] = df
store.append('df2',df)
tm.assert_frame_equal(store['df'],df)
tm.assert_frame_equal(store['df2'],df)
store.close()
# the file should not have actually been written
self.assertFalse(os.path.exists(path))
def test_flush(self):
with ensure_clean_store(self.path) as store:
store['a'] = tm.makeTimeSeries()
store.flush()
store.flush(fsync=True)
def test_get(self):
with ensure_clean_store(self.path) as store:
store['a'] = tm.makeTimeSeries()
left = store.get('a')
right = store['a']
tm.assert_series_equal(left, right)
left = store.get('/a')
right = store['/a']
tm.assert_series_equal(left, right)
self.assertRaises(KeyError, store.get, 'b')
def test_getattr(self):
with ensure_clean_store(self.path) as store:
s = tm.makeTimeSeries()
store['a'] = s
# test attribute access
result = store.a
tm.assert_series_equal(result, s)
result = getattr(store,'a')
tm.assert_series_equal(result, s)
df = tm.makeTimeDataFrame()
store['df'] = df
result = store.df
tm.assert_frame_equal(result, df)
# errors
self.assertRaises(AttributeError, getattr, store, 'd')
for x in ['mode','path','handle','complib']:
self.assertRaises(AttributeError, getattr, store, x)
# not stores
for x in ['mode','path','handle','complib']:
getattr(store,"_%s" % x)
def test_put(self):
with ensure_clean_store(self.path) as store:
ts = tm.makeTimeSeries()
df = tm.makeTimeDataFrame()
store['a'] = ts
store['b'] = df[:10]
store['foo/bar/bah'] = df[:10]
store['foo'] = df[:10]
store['/foo'] = df[:10]
store.put('c', df[:10], format='table')
# not OK, not a table
self.assertRaises(
ValueError, store.put, 'b', df[10:], append=True)
# node does not currently exist, test _is_table_type returns False in
# this case
# _maybe_remove(store, 'f')
# self.assertRaises(ValueError, store.put, 'f', df[10:], append=True)
# can't put to a table (use append instead)
self.assertRaises(ValueError, store.put, 'c', df[10:], append=True)
# overwrite table
store.put('c', df[:10], format='table', append=False)
tm.assert_frame_equal(df[:10], store['c'])
def test_put_string_index(self):
with ensure_clean_store(self.path) as store:
index = Index(
["I am a very long string index: %s" % i for i in range(20)])
s = Series(np.arange(20), index=index)
df = DataFrame({'A': s, 'B': s})
store['a'] = s
tm.assert_series_equal(store['a'], s)
store['b'] = df
tm.assert_frame_equal(store['b'], df)
# mixed length
index = Index(['abcdefghijklmnopqrstuvwxyz1234567890'] + ["I am a very long string index: %s" % i for i in range(20)])
s = Series(np.arange(21), index=index)
df = DataFrame({'A': s, 'B': s})
store['a'] = s
tm.assert_series_equal(store['a'], s)
store['b'] = df
tm.assert_frame_equal(store['b'], df)
def test_put_compression(self):
with ensure_clean_store(self.path) as store:
df = tm.makeTimeDataFrame()
store.put('c', df, format='table', complib='zlib')
tm.assert_frame_equal(store['c'], df)
# can't compress if format='fixed'
self.assertRaises(ValueError, store.put, 'b', df,
format='fixed', complib='zlib')
def test_put_compression_blosc(self):
tm.skip_if_no_package('tables', '2.2', app='blosc support')
df = tm.makeTimeDataFrame()
with ensure_clean_store(self.path) as store:
# can't compress if format='fixed'
self.assertRaises(ValueError, store.put, 'b', df,
format='fixed', complib='blosc')
store.put('c', df, format='table', complib='blosc')
tm.assert_frame_equal(store['c'], df)
def test_put_integer(self):
# non-date, non-string index
df = DataFrame(np.random.randn(50, 100))
self._check_roundtrip(df, tm.assert_frame_equal)
def test_put_mixed_type(self):
df = tm.makeTimeDataFrame()
df['obj1'] = 'foo'
df['obj2'] = 'bar'
df['bool1'] = df['A'] > 0
df['bool2'] = df['B'] > 0
df['bool3'] = True
df['int1'] = 1
df['int2'] = 2
df['timestamp1'] = Timestamp('20010102')
df['timestamp2'] = Timestamp('20010103')
df['datetime1'] = datetime.datetime(2001, 1, 2, 0, 0)
df['datetime2'] = datetime.datetime(2001, 1, 3, 0, 0)
df.ix[3:6, ['obj1']] = np.nan
df = df.consolidate().convert_objects()
with ensure_clean_store(self.path) as store:
_maybe_remove(store, 'df')
# cannot use assert_produces_warning here for some reason
# a PendingDeprecationWarning is also raised?
warnings.filterwarnings('ignore', category=PerformanceWarning)
store.put('df',df)
warnings.filterwarnings('always', category=PerformanceWarning)
expected = store.get('df')
tm.assert_frame_equal(expected,df)
def test_append(self):
with ensure_clean_store(self.path) as store:
df = tm.makeTimeDataFrame()
_maybe_remove(store, 'df1')
store.append('df1', df[:10])
store.append('df1', df[10:])
tm.assert_frame_equal(store['df1'], df)
_maybe_remove(store, 'df2')
store.put('df2', df[:10], format='table')
store.append('df2', df[10:])
tm.assert_frame_equal(store['df2'], df)
_maybe_remove(store, 'df3')
store.append('/df3', df[:10])
store.append('/df3', df[10:])
tm.assert_frame_equal(store['df3'], df)
# this is allowed by almost always don't want to do it
with tm.assert_produces_warning(expected_warning=tables.NaturalNameWarning):
_maybe_remove(store, '/df3 foo')
store.append('/df3 foo', df[:10])
store.append('/df3 foo', df[10:])
tm.assert_frame_equal(store['df3 foo'], df)
# panel
wp = tm.makePanel()
_maybe_remove(store, 'wp1')
store.append('wp1', wp.ix[:, :10, :])
store.append('wp1', wp.ix[:, 10:, :])
assert_panel_equal(store['wp1'], wp)
# ndim
p4d = tm.makePanel4D()
_maybe_remove(store, 'p4d')
store.append('p4d', p4d.ix[:, :, :10, :])
store.append('p4d', p4d.ix[:, :, 10:, :])
assert_panel4d_equal(store['p4d'], p4d)
# test using axis labels
_maybe_remove(store, 'p4d')
store.append('p4d', p4d.ix[:, :, :10, :], axes=[
'items', 'major_axis', 'minor_axis'])
store.append('p4d', p4d.ix[:, :, 10:, :], axes=[
'items', 'major_axis', 'minor_axis'])
assert_panel4d_equal(store['p4d'], p4d)
# test using differnt number of items on each axis
p4d2 = p4d.copy()
p4d2['l4'] = p4d['l1']
p4d2['l5'] = p4d['l1']
_maybe_remove(store, 'p4d2')
store.append(
'p4d2', p4d2, axes=['items', 'major_axis', 'minor_axis'])
assert_panel4d_equal(store['p4d2'], p4d2)
# test using differt order of items on the non-index axes
_maybe_remove(store, 'wp1')
wp_append1 = wp.ix[:, :10, :]
store.append('wp1', wp_append1)
wp_append2 = wp.ix[:, 10:, :].reindex(items=wp.items[::-1])
store.append('wp1', wp_append2)
assert_panel_equal(store['wp1'], wp)
# dtype issues - mizxed type in a single object column
df = DataFrame(data=[[1, 2], [0, 1], [1, 2], [0, 0]])
df['mixed_column'] = 'testing'
df.ix[2, 'mixed_column'] = np.nan
_maybe_remove(store, 'df')
store.append('df', df)
tm.assert_frame_equal(store['df'], df)
# uints - test storage of uints
uint_data = DataFrame({'u08' : Series(np.random.random_integers(0, high=255, size=5), dtype=np.uint8),
'u16' : Series(np.random.random_integers(0, high=65535, size=5), dtype=np.uint16),
'u32' : Series(np.random.random_integers(0, high=2**30, size=5), dtype=np.uint32),
'u64' : Series([2**58, 2**59, 2**60, 2**61, 2**62], dtype=np.uint64)},
index=np.arange(5))
_maybe_remove(store, 'uints')
store.append('uints', uint_data)
tm.assert_frame_equal(store['uints'], uint_data)
# uints - test storage of uints in indexable columns
_maybe_remove(store, 'uints')
store.append('uints', uint_data, data_columns=['u08','u16','u32']) # 64-bit indices not yet supported
tm.assert_frame_equal(store['uints'], uint_data)
def test_append_series(self):
with ensure_clean_store(self.path) as store:
# basic
ss = tm.makeStringSeries()
ts = tm.makeTimeSeries()
ns = Series(np.arange(100))
store.append('ss', ss)
result = store['ss']
tm.assert_series_equal(result, ss)
self.assertIsNone(result.name)
store.append('ts', ts)
result = store['ts']
tm.assert_series_equal(result, ts)
self.assertIsNone(result.name)
ns.name = 'foo'
store.append('ns', ns)
result = store['ns']
tm.assert_series_equal(result, ns)
self.assertEqual(result.name, ns.name)
# select on the values
expected = ns[ns>60]
result = store.select('ns',Term('foo>60'))
tm.assert_series_equal(result,expected)
# select on the index and values
expected = ns[(ns>70) & (ns.index<90)]
result = store.select('ns',[Term('foo>70'), Term('index<90')])
tm.assert_series_equal(result,expected)
# multi-index
mi = DataFrame(np.random.randn(5,1),columns=['A'])
mi['B'] = np.arange(len(mi))
mi['C'] = 'foo'
mi.loc[3:5,'C'] = 'bar'
mi.set_index(['C','B'],inplace=True)
s = mi.stack()
s.index = s.index.droplevel(2)
store.append('mi', s)
tm.assert_series_equal(store['mi'], s)
def test_store_index_types(self):
# GH5386
# test storing various index types
with ensure_clean_store(self.path) as store:
def check(format,index):
df = DataFrame(np.random.randn(10,2),columns=list('AB'))
df.index = index(len(df))
_maybe_remove(store, 'df')
store.put('df',df,format=format)
assert_frame_equal(df,store['df'])
for index in [ tm.makeFloatIndex, tm.makeStringIndex, tm.makeIntIndex,
tm.makeDateIndex ]:
check('table',index)
check('fixed',index)
# period index currently broken for table
# seee GH7796 FIXME
check('fixed',tm.makePeriodIndex)
#check('table',tm.makePeriodIndex)
# unicode
index = tm.makeUnicodeIndex
if compat.PY3:
check('table',index)
check('fixed',index)
else:
# only support for fixed types (and they have a perf warning)
self.assertRaises(TypeError, check, 'table', index)
with tm.assert_produces_warning(expected_warning=PerformanceWarning):
check('fixed',index)
def test_encoding(self):
if sys.byteorder != 'little':
raise nose.SkipTest('system byteorder is not little')
with ensure_clean_store(self.path) as store:
df = DataFrame(dict(A='foo',B='bar'),index=range(5))
df.loc[2,'A'] = np.nan
df.loc[3,'B'] = np.nan
_maybe_remove(store, 'df')
store.append('df', df, encoding='ascii')
tm.assert_frame_equal(store['df'], df)
expected = df.reindex(columns=['A'])
result = store.select('df',Term('columns=A',encoding='ascii'))
tm.assert_frame_equal(result,expected)
def test_append_some_nans(self):
with ensure_clean_store(self.path) as store:
df = DataFrame({'A' : Series(np.random.randn(20)).astype('int32'),
'A1' : np.random.randn(20),
'A2' : np.random.randn(20),
'B' : 'foo', 'C' : 'bar', 'D' : Timestamp("20010101"), 'E' : datetime.datetime(2001,1,2,0,0) },
index=np.arange(20))
# some nans
_maybe_remove(store, 'df1')
df.ix[0:15,['A1','B','D','E']] = np.nan
store.append('df1', df[:10])
store.append('df1', df[10:])
tm.assert_frame_equal(store['df1'], df)
# first column
df1 = df.copy()
df1.ix[:,'A1'] = np.nan
_maybe_remove(store, 'df1')
store.append('df1', df1[:10])
store.append('df1', df1[10:])
tm.assert_frame_equal(store['df1'], df1)
# 2nd column
df2 = df.copy()
df2.ix[:,'A2'] = np.nan
_maybe_remove(store, 'df2')
store.append('df2', df2[:10])
store.append('df2', df2[10:])
tm.assert_frame_equal(store['df2'], df2)
# datetimes
df3 = df.copy()
df3.ix[:,'E'] = np.nan
_maybe_remove(store, 'df3')
store.append('df3', df3[:10])
store.append('df3', df3[10:])
tm.assert_frame_equal(store['df3'], df3)
def test_append_all_nans(self):
with ensure_clean_store(self.path) as store:
df = DataFrame({'A1' : np.random.randn(20),
'A2' : np.random.randn(20)},
index=np.arange(20))
df.ix[0:15,:] = np.nan
# nan some entire rows (dropna=True)
_maybe_remove(store, 'df')
store.append('df', df[:10], dropna=True)
store.append('df', df[10:], dropna=True)
tm.assert_frame_equal(store['df'], df[-4:])
# nan some entire rows (dropna=False)
_maybe_remove(store, 'df2')
store.append('df2', df[:10], dropna=False)
store.append('df2', df[10:], dropna=False)
tm.assert_frame_equal(store['df2'], df)
# tests the option io.hdf.dropna_table
pandas.set_option('io.hdf.dropna_table',False)
_maybe_remove(store, 'df3')
store.append('df3', df[:10])
store.append('df3', df[10:])
tm.assert_frame_equal(store['df3'], df)
pandas.set_option('io.hdf.dropna_table',True)
_maybe_remove(store, 'df4')
store.append('df4', df[:10])
store.append('df4', df[10:])
tm.assert_frame_equal(store['df4'], df[-4:])
# nan some entire rows (string are still written!)
df = DataFrame({'A1' : np.random.randn(20),
'A2' : np.random.randn(20),
'B' : 'foo', 'C' : 'bar'},
index=np.arange(20))
df.ix[0:15,:] = np.nan
_maybe_remove(store, 'df')
store.append('df', df[:10], dropna=True)
store.append('df', df[10:], dropna=True)
tm.assert_frame_equal(store['df'], df)
_maybe_remove(store, 'df2')
store.append('df2', df[:10], dropna=False)
store.append('df2', df[10:], dropna=False)
tm.assert_frame_equal(store['df2'], df)
# nan some entire rows (but since we have dates they are still written!)
df = DataFrame({'A1' : np.random.randn(20),
'A2' : np.random.randn(20),
'B' : 'foo', 'C' : 'bar', 'D' : Timestamp("20010101"), 'E' : datetime.datetime(2001,1,2,0,0) },
index=np.arange(20))
df.ix[0:15,:] = np.nan
_maybe_remove(store, 'df')
store.append('df', df[:10], dropna=True)
store.append('df', df[10:], dropna=True)
tm.assert_frame_equal(store['df'], df)
_maybe_remove(store, 'df2')
store.append('df2', df[:10], dropna=False)
store.append('df2', df[10:], dropna=False)
tm.assert_frame_equal(store['df2'], df)
def test_append_frame_column_oriented(self):
with ensure_clean_store(self.path) as store:
# column oriented
df = tm.makeTimeDataFrame()
_maybe_remove(store, 'df1')
store.append('df1', df.ix[:, :2], axes=['columns'])
store.append('df1', df.ix[:, 2:])
tm.assert_frame_equal(store['df1'], df)
result = store.select('df1', 'columns=A')
expected = df.reindex(columns=['A'])
tm.assert_frame_equal(expected, result)
# selection on the non-indexable
result = store.select(
'df1', ('columns=A', Term('index=df.index[0:4]')))
expected = df.reindex(columns=['A'], index=df.index[0:4])
tm.assert_frame_equal(expected, result)
# this isn't supported
self.assertRaises(TypeError, store.select, 'df1', (
'columns=A', Term('index>df.index[4]')))
def test_append_with_different_block_ordering(self):
#GH 4096; using same frames, but different block orderings
with ensure_clean_store(self.path) as store:
for i in range(10):
df = DataFrame(np.random.randn(10,2),columns=list('AB'))
df['index'] = range(10)
df['index'] += i*10
df['int64'] = Series([1]*len(df),dtype='int64')
df['int16'] = Series([1]*len(df),dtype='int16')
if i % 2 == 0:
del df['int64']
df['int64'] = Series([1]*len(df),dtype='int64')
if i % 3 == 0:
a = df.pop('A')
df['A'] = a
df.set_index('index',inplace=True)
store.append('df',df)
# test a different ordering but with more fields (like invalid combinate)
with ensure_clean_store(self.path) as store:
df = DataFrame(np.random.randn(10,2),columns=list('AB'), dtype='float64')
df['int64'] = Series([1]*len(df),dtype='int64')
df['int16'] = Series([1]*len(df),dtype='int16')
store.append('df',df)
# store additonal fields in different blocks
df['int16_2'] = Series([1]*len(df),dtype='int16')
self.assertRaises(ValueError, store.append, 'df', df)
# store multile additonal fields in different blocks
df['float_3'] = Series([1.]*len(df),dtype='float64')
self.assertRaises(ValueError, store.append, 'df', df)
def test_ndim_indexables(self):
""" test using ndim tables in new ways"""
with ensure_clean_store(self.path) as store:
p4d = tm.makePanel4D()
def check_indexers(key, indexers):
for i, idx in enumerate(indexers):
self.assertTrue(getattr(getattr(
store.root, key).table.description, idx)._v_pos == i)
# append then change (will take existing schema)
indexers = ['items', 'major_axis', 'minor_axis']
_maybe_remove(store, 'p4d')
store.append('p4d', p4d.ix[:, :, :10, :], axes=indexers)
store.append('p4d', p4d.ix[:, :, 10:, :])
assert_panel4d_equal(store.select('p4d'), p4d)
check_indexers('p4d', indexers)
# same as above, but try to append with differnt axes
_maybe_remove(store, 'p4d')
store.append('p4d', p4d.ix[:, :, :10, :], axes=indexers)
store.append('p4d', p4d.ix[:, :, 10:, :], axes=[
'labels', 'items', 'major_axis'])
assert_panel4d_equal(store.select('p4d'), p4d)
check_indexers('p4d', indexers)
# pass incorrect number of axes
_maybe_remove(store, 'p4d')
self.assertRaises(ValueError, store.append, 'p4d', p4d.ix[
:, :, :10, :], axes=['major_axis', 'minor_axis'])
# different than default indexables #1
indexers = ['labels', 'major_axis', 'minor_axis']
_maybe_remove(store, 'p4d')
store.append('p4d', p4d.ix[:, :, :10, :], axes=indexers)
store.append('p4d', p4d.ix[:, :, 10:, :])
assert_panel4d_equal(store['p4d'], p4d)
check_indexers('p4d', indexers)
# different than default indexables #2
indexers = ['major_axis', 'labels', 'minor_axis']
_maybe_remove(store, 'p4d')
store.append('p4d', p4d.ix[:, :, :10, :], axes=indexers)
store.append('p4d', p4d.ix[:, :, 10:, :])
assert_panel4d_equal(store['p4d'], p4d)
check_indexers('p4d', indexers)
# partial selection
result = store.select('p4d', ['labels=l1'])
expected = p4d.reindex(labels=['l1'])
assert_panel4d_equal(result, expected)
# partial selection2
result = store.select('p4d', [Term(
'labels=l1'), Term('items=ItemA'), Term('minor_axis=B')])
expected = p4d.reindex(
labels=['l1'], items=['ItemA'], minor_axis=['B'])
assert_panel4d_equal(result, expected)
# non-existant partial selection
result = store.select('p4d', [Term(
'labels=l1'), Term('items=Item1'), Term('minor_axis=B')])
expected = p4d.reindex(labels=['l1'], items=[], minor_axis=['B'])
assert_panel4d_equal(result, expected)
def test_append_with_strings(self):
with ensure_clean_store(self.path) as store:
wp = tm.makePanel()
wp2 = wp.rename_axis(
dict([(x, "%s_extra" % x) for x in wp.minor_axis]), axis=2)
def check_col(key,name,size):
self.assertEqual(getattr(store.get_storer(key).table.description,name).itemsize, size)
store.append('s1', wp, min_itemsize=20)
store.append('s1', wp2)
expected = concat([wp, wp2], axis=2)
expected = expected.reindex(minor_axis=sorted(expected.minor_axis))
assert_panel_equal(store['s1'], expected)
check_col('s1', 'minor_axis', 20)
# test dict format
store.append('s2', wp, min_itemsize={'minor_axis': 20})
store.append('s2', wp2)
expected = concat([wp, wp2], axis=2)
expected = expected.reindex(minor_axis=sorted(expected.minor_axis))
assert_panel_equal(store['s2'], expected)
check_col('s2', 'minor_axis', 20)
# apply the wrong field (similar to #1)
store.append('s3', wp, min_itemsize={'major_axis': 20})
self.assertRaises(ValueError, store.append, 's3', wp2)
# test truncation of bigger strings
store.append('s4', wp)
self.assertRaises(ValueError, store.append, 's4', wp2)
# avoid truncation on elements
df = DataFrame([[123, 'asdqwerty'], [345, 'dggnhebbsdfbdfb']])
store.append('df_big', df)
tm.assert_frame_equal(store.select('df_big'), df)
check_col('df_big', 'values_block_1', 15)
# appending smaller string ok
df2 = DataFrame([[124, 'asdqy'], [346, 'dggnhefbdfb']])
store.append('df_big', df2)
expected = concat([df, df2])
tm.assert_frame_equal(store.select('df_big'), expected)
check_col('df_big', 'values_block_1', 15)
# avoid truncation on elements
df = DataFrame([[123, 'asdqwerty'], [345, 'dggnhebbsdfbdfb']])
store.append('df_big2', df, min_itemsize={'values': 50})
tm.assert_frame_equal(store.select('df_big2'), df)
check_col('df_big2', 'values_block_1', 50)
# bigger string on next append
store.append('df_new', df)
df_new = DataFrame(
[[124, 'abcdefqhij'], [346, 'abcdefghijklmnopqrtsuvwxyz']])
self.assertRaises(ValueError, store.append, 'df_new', df_new)
# with nans
_maybe_remove(store, 'df')
df = tm.makeTimeDataFrame()
df['string'] = 'foo'
df.ix[1:4, 'string'] = np.nan
df['string2'] = 'bar'
df.ix[4:8, 'string2'] = np.nan
df['string3'] = 'bah'
df.ix[1:, 'string3'] = np.nan
store.append('df', df)
result = store.select('df')
tm.assert_frame_equal(result, df)
with ensure_clean_store(self.path) as store:
def check_col(key,name,size):
self.assertEqual(getattr(store.get_storer(key).table.description,name).itemsize, size)
df = DataFrame(dict(A = 'foo', B = 'bar'),index=range(10))
# a min_itemsize that creates a data_column
_maybe_remove(store, 'df')
store.append('df', df, min_itemsize={'A' : 200 })
check_col('df', 'A', 200)
self.assertEqual(store.get_storer('df').data_columns, ['A'])
# a min_itemsize that creates a data_column2
_maybe_remove(store, 'df')
store.append('df', df, data_columns = ['B'], min_itemsize={'A' : 200 })
check_col('df', 'A', 200)
self.assertEqual(store.get_storer('df').data_columns, ['B','A'])
# a min_itemsize that creates a data_column2
_maybe_remove(store, 'df')
store.append('df', df, data_columns = ['B'], min_itemsize={'values' : 200 })
check_col('df', 'B', 200)
check_col('df', 'values_block_0', 200)
self.assertEqual(store.get_storer('df').data_columns, ['B'])
# infer the .typ on subsequent appends
_maybe_remove(store, 'df')
store.append('df', df[:5], min_itemsize=200)
store.append('df', df[5:], min_itemsize=200)
tm.assert_frame_equal(store['df'], df)
# invalid min_itemsize keys
df = DataFrame(['foo','foo','foo','barh','barh','barh'],columns=['A'])
_maybe_remove(store, 'df')
self.assertRaises(ValueError, store.append, 'df', df, min_itemsize={'foo' : 20, 'foobar' : 20})
def test_append_with_data_columns(self):
with ensure_clean_store(self.path) as store:
df = tm.makeTimeDataFrame()
df.loc[:,'B'].iloc[0] = 1.
_maybe_remove(store, 'df')
store.append('df', df[:2], data_columns=['B'])
store.append('df', df[2:])
tm.assert_frame_equal(store['df'], df)
# check that we have indicies created
assert(store._handle.root.df.table.cols.index.is_indexed is True)
assert(store._handle.root.df.table.cols.B.is_indexed is True)
# data column searching
result = store.select('df', [Term('B>0')])
expected = df[df.B > 0]
tm.assert_frame_equal(result, expected)
# data column searching (with an indexable and a data_columns)
result = store.select(
'df', [Term('B>0'), Term('index>df.index[3]')])
df_new = df.reindex(index=df.index[4:])
expected = df_new[df_new.B > 0]
tm.assert_frame_equal(result, expected)
# data column selection with a string data_column
df_new = df.copy()
df_new['string'] = 'foo'
df_new.loc[1:4,'string'] = np.nan
df_new.loc[5:6,'string'] = 'bar'
_maybe_remove(store, 'df')
store.append('df', df_new, data_columns=['string'])
result = store.select('df', [Term('string=foo')])
expected = df_new[df_new.string == 'foo']
tm.assert_frame_equal(result, expected)
# using min_itemsize and a data column
def check_col(key,name,size):
self.assertEqual(getattr(store.get_storer(key).table.description,name).itemsize, size)
with ensure_clean_store(self.path) as store:
_maybe_remove(store, 'df')
store.append('df', df_new, data_columns=['string'],
min_itemsize={'string': 30})
check_col('df', 'string', 30)
_maybe_remove(store, 'df')
store.append(
'df', df_new, data_columns=['string'], min_itemsize=30)
check_col('df', 'string', 30)
_maybe_remove(store, 'df')
store.append('df', df_new, data_columns=['string'],
min_itemsize={'values': 30})
check_col('df', 'string', 30)
with ensure_clean_store(self.path) as store:
df_new['string2'] = 'foobarbah'
df_new['string_block1'] = 'foobarbah1'
df_new['string_block2'] = 'foobarbah2'
_maybe_remove(store, 'df')
store.append('df', df_new, data_columns=['string', 'string2'], min_itemsize={'string': 30, 'string2': 40, 'values': 50})
check_col('df', 'string', 30)
check_col('df', 'string2', 40)
check_col('df', 'values_block_1', 50)
with ensure_clean_store(self.path) as store:
# multiple data columns
df_new = df.copy()
df_new.ix[0,'A'] = 1.
df_new.ix[0,'B'] = -1.
df_new['string'] = 'foo'
df_new.loc[1:4,'string'] = np.nan
df_new.loc[5:6,'string'] = 'bar'
df_new['string2'] = 'foo'
df_new.loc[2:5,'string2'] = np.nan
df_new.loc[7:8,'string2'] = 'bar'
_maybe_remove(store, 'df')
store.append(
'df', df_new, data_columns=['A', 'B', 'string', 'string2'])
result = store.select('df', [Term('string=foo'), Term(
'string2=foo'), Term('A>0'), Term('B<0')])
expected = df_new[(df_new.string == 'foo') & (
df_new.string2 == 'foo') & (df_new.A > 0) & (df_new.B < 0)]
tm.assert_frame_equal(result, expected, check_index_type=False)
# yield an empty frame
result = store.select('df', [Term('string=foo'), Term(
'string2=cool')])
expected = df_new[(df_new.string == 'foo') & (
df_new.string2 == 'cool')]
tm.assert_frame_equal(result, expected, check_index_type=False)
with ensure_clean_store(self.path) as store:
# doc example
df_dc = df.copy()
df_dc['string'] = 'foo'
df_dc.ix[4:6, 'string'] = np.nan
df_dc.ix[7:9, 'string'] = 'bar'
df_dc['string2'] = 'cool'
df_dc['datetime'] = Timestamp('20010102')
df_dc = df_dc.convert_objects()
df_dc.ix[3:5, ['A', 'B', 'datetime']] = np.nan
_maybe_remove(store, 'df_dc')
store.append('df_dc', df_dc, data_columns=['B', 'C',
'string', 'string2', 'datetime'])
result = store.select('df_dc', [Term('B>0')])
expected = df_dc[df_dc.B > 0]
tm.assert_frame_equal(result, expected, check_index_type=False)
result = store.select(
'df_dc', ['B > 0', 'C > 0', 'string == foo'])
expected = df_dc[(df_dc.B > 0) & (df_dc.C > 0) & (
df_dc.string == 'foo')]
tm.assert_frame_equal(result, expected, check_index_type=False)
with ensure_clean_store(self.path) as store:
# doc example part 2
np.random.seed(1234)
index = date_range('1/1/2000', periods=8)
df_dc = DataFrame(np.random.randn(8, 3), index=index,
columns=['A', 'B', 'C'])
df_dc['string'] = 'foo'
df_dc.ix[4:6,'string'] = np.nan
df_dc.ix[7:9,'string'] = 'bar'
df_dc.ix[:,['B','C']] = df_dc.ix[:,['B','C']].abs()
df_dc['string2'] = 'cool'
# on-disk operations
store.append('df_dc', df_dc, data_columns = ['B', 'C', 'string', 'string2'])
result = store.select('df_dc', [ Term('B>0') ])
expected = df_dc[df_dc.B>0]
tm.assert_frame_equal(result,expected)
result = store.select('df_dc', ['B > 0', 'C > 0', 'string == "foo"'])
expected = df_dc[(df_dc.B > 0) & (df_dc.C > 0) & (df_dc.string == 'foo')]
tm.assert_frame_equal(result,expected)
with ensure_clean_store(self.path) as store:
# panel
# GH5717 not handling data_columns
np.random.seed(1234)
p = tm.makePanel()
store.append('p1',p)
tm.assert_panel_equal(store.select('p1'),p)
store.append('p2',p,data_columns=True)
tm.assert_panel_equal(store.select('p2'),p)
result = store.select('p2',where='ItemA>0')
expected = p.to_frame()
expected = expected[expected['ItemA']>0]
tm.assert_frame_equal(result.to_frame(),expected)
result = store.select('p2',where='ItemA>0 & minor_axis=["A","B"]')
expected = p.to_frame()
expected = expected[expected['ItemA']>0]
expected = expected[expected.reset_index(level=['major']).index.isin(['A','B'])]
tm.assert_frame_equal(result.to_frame(),expected)
def test_create_table_index(self):
with ensure_clean_store(self.path) as store:
def col(t,column):
return getattr(store.get_storer(t).table.cols,column)
# index=False
wp = tm.makePanel()
store.append('p5', wp, index=False)
store.create_table_index('p5', columns=['major_axis'])
assert(col('p5', 'major_axis').is_indexed is True)
assert(col('p5', 'minor_axis').is_indexed is False)
# index=True
store.append('p5i', wp, index=True)
assert(col('p5i', 'major_axis').is_indexed is True)
assert(col('p5i', 'minor_axis').is_indexed is True)
# default optlevels
store.get_storer('p5').create_index()
assert(col('p5', 'major_axis').index.optlevel == 6)
assert(col('p5', 'minor_axis').index.kind == 'medium')
# let's change the indexing scheme
store.create_table_index('p5')
assert(col('p5', 'major_axis').index.optlevel == 6)
assert(col('p5', 'minor_axis').index.kind == 'medium')
store.create_table_index('p5', optlevel=9)
assert(col('p5', 'major_axis').index.optlevel == 9)
assert(col('p5', 'minor_axis').index.kind == 'medium')
store.create_table_index('p5', kind='full')
assert(col('p5', 'major_axis').index.optlevel == 9)
assert(col('p5', 'minor_axis').index.kind == 'full')
store.create_table_index('p5', optlevel=1, kind='light')
assert(col('p5', 'major_axis').index.optlevel == 1)
assert(col('p5', 'minor_axis').index.kind == 'light')
# data columns
df = tm.makeTimeDataFrame()
df['string'] = 'foo'
df['string2'] = 'bar'
store.append('f', df, data_columns=['string', 'string2'])
assert(col('f', 'index').is_indexed is True)
assert(col('f', 'string').is_indexed is True)
assert(col('f', 'string2').is_indexed is True)
# specify index=columns
store.append(
'f2', df, index=['string'], data_columns=['string', 'string2'])
assert(col('f2', 'index').is_indexed is False)
assert(col('f2', 'string').is_indexed is True)
assert(col('f2', 'string2').is_indexed is False)
# try to index a non-table
_maybe_remove(store, 'f2')
store.put('f2', df)
self.assertRaises(TypeError, store.create_table_index, 'f2')
def test_append_diff_item_order(self):
wp = tm.makePanel()
wp1 = wp.ix[:, :10, :]
wp2 = wp.ix[['ItemC', 'ItemB', 'ItemA'], 10:, :]
with ensure_clean_store(self.path) as store:
store.put('panel', wp1, format='table')
self.assertRaises(ValueError, store.put, 'panel', wp2,
append=True)
def test_append_hierarchical(self):
index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'],
['one', 'two', 'three']],
labels=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['foo', 'bar'])
df = DataFrame(np.random.randn(10, 3), index=index,
columns=['A', 'B', 'C'])
with ensure_clean_store(self.path) as store:
store.append('mi', df)
result = store.select('mi')
tm.assert_frame_equal(result, df)
# GH 3748
result = store.select('mi',columns=['A','B'])
expected = df.reindex(columns=['A','B'])
tm.assert_frame_equal(result,expected)
with ensure_clean_path('test.hdf') as path:
df.to_hdf(path,'df',format='table')
result = read_hdf(path,'df',columns=['A','B'])
expected = df.reindex(columns=['A','B'])
tm.assert_frame_equal(result,expected)
def test_column_multiindex(self):
# GH 4710
# recreate multi-indexes properly
index = MultiIndex.from_tuples([('A','a'), ('A','b'), ('B','a'), ('B','b')], names=['first','second'])
df = DataFrame(np.arange(12).reshape(3,4), columns=index)
with ensure_clean_store(self.path) as store:
store.put('df',df)
tm.assert_frame_equal(store['df'],df,check_index_type=True,check_column_type=True)
store.put('df1',df,format='table')
tm.assert_frame_equal(store['df1'],df,check_index_type=True,check_column_type=True)
self.assertRaises(ValueError, store.put, 'df2',df,format='table',data_columns=['A'])
self.assertRaises(ValueError, store.put, 'df3',df,format='table',data_columns=True)
# appending multi-column on existing table (see GH 6167)
with ensure_clean_store(self.path) as store:
store.append('df2', df)
store.append('df2', df)
tm.assert_frame_equal(store['df2'], concat((df,df)))
# non_index_axes name
df = DataFrame(np.arange(12).reshape(3,4), columns=Index(list('ABCD'),name='foo'))
with ensure_clean_store(self.path) as store:
store.put('df1',df,format='table')
tm.assert_frame_equal(store['df1'],df,check_index_type=True,check_column_type=True)
def test_store_multiindex(self):
# validate multi-index names
# GH 5527
with ensure_clean_store(self.path) as store:
def make_index(names=None):
return MultiIndex.from_tuples([( datetime.datetime(2013,12,d), s, t) for d in range(1,3) for s in range(2) for t in range(3)],
names=names)
# no names
_maybe_remove(store, 'df')
df = DataFrame(np.zeros((12,2)), columns=['a','b'], index=make_index())
store.append('df',df)
tm.assert_frame_equal(store.select('df'),df)
# partial names
_maybe_remove(store, 'df')
df = DataFrame(np.zeros((12,2)), columns=['a','b'], index=make_index(['date',None,None]))
store.append('df',df)
tm.assert_frame_equal(store.select('df'),df)
# series
_maybe_remove(store, 's')
s = Series(np.zeros(12), index=make_index(['date',None,None]))
store.append('s',s)
tm.assert_series_equal(store.select('s'),s)
# dup with column
_maybe_remove(store, 'df')
df = DataFrame(np.zeros((12,2)), columns=['a','b'], index=make_index(['date','a','t']))
self.assertRaises(ValueError, store.append, 'df',df)
# dup within level
_maybe_remove(store, 'df')
df = DataFrame(np.zeros((12,2)), columns=['a','b'], index=make_index(['date','date','date']))
self.assertRaises(ValueError, store.append, 'df',df)
# fully names
_maybe_remove(store, 'df')
df = DataFrame(np.zeros((12,2)), columns=['a','b'], index=make_index(['date','s','t']))
store.append('df',df)
tm.assert_frame_equal(store.select('df'),df)
def test_select_columns_in_where(self):
# GH 6169
# recreate multi-indexes when columns is passed
# in the `where` argument
index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'],
['one', 'two', 'three']],
labels=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['foo_name', 'bar_name'])
# With a DataFrame
df = DataFrame(np.random.randn(10, 3), index=index,
columns=['A', 'B', 'C'])
with ensure_clean_store(self.path) as store:
store.put('df', df, format='table')
expected = df[['A']]
tm.assert_frame_equal(store.select('df', columns=['A']), expected)
tm.assert_frame_equal(store.select('df', where="columns=['A']"), expected)
# With a Series
s = Series(np.random.randn(10), index=index,
name='A')
with ensure_clean_store(self.path) as store:
store.put('s', s, format='table')
tm.assert_series_equal(store.select('s', where="columns=['A']"),s)
def test_pass_spec_to_storer(self):
df = tm.makeDataFrame()
with ensure_clean_store(self.path) as store:
store.put('df',df)
self.assertRaises(TypeError, store.select, 'df', columns=['A'])
self.assertRaises(TypeError, store.select, 'df',where=[('columns=A')])
def test_append_misc(self):
with ensure_clean_store(self.path) as store:
# unsuported data types for non-tables
p4d = tm.makePanel4D()
self.assertRaises(TypeError, store.put,'p4d',p4d)
# unsuported data types
self.assertRaises(TypeError, store.put,'abc',None)
self.assertRaises(TypeError, store.put,'abc','123')
self.assertRaises(TypeError, store.put,'abc',123)
self.assertRaises(TypeError, store.put,'abc',np.arange(5))
df = tm.makeDataFrame()
store.append('df', df, chunksize=1)
result = store.select('df')
tm.assert_frame_equal(result, df)
store.append('df1', df, expectedrows=10)
result = store.select('df1')
tm.assert_frame_equal(result, df)
# more chunksize in append tests
def check(obj, comparator):
for c in [10, 200, 1000]:
with ensure_clean_store(self.path,mode='w') as store:
store.append('obj', obj, chunksize=c)
result = store.select('obj')
comparator(result,obj)
df = tm.makeDataFrame()
df['string'] = 'foo'
df['float322'] = 1.
df['float322'] = df['float322'].astype('float32')
df['bool'] = df['float322'] > 0
df['time1'] = Timestamp('20130101')
df['time2'] = Timestamp('20130102')
check(df, tm.assert_frame_equal)
p = tm.makePanel()
check(p, assert_panel_equal)
p4d = tm.makePanel4D()
check(p4d, assert_panel4d_equal)
# empty frame, GH4273
with ensure_clean_store(self.path) as store:
# 0 len
df_empty = DataFrame(columns=list('ABC'))
store.append('df',df_empty)
self.assertRaises(KeyError,store.select, 'df')
# repeated append of 0/non-zero frames
df = DataFrame(np.random.rand(10,3),columns=list('ABC'))
store.append('df',df)
assert_frame_equal(store.select('df'),df)
store.append('df',df_empty)
assert_frame_equal(store.select('df'),df)
# store
df = DataFrame(columns=list('ABC'))
store.put('df2',df)
assert_frame_equal(store.select('df2'),df)
# 0 len
p_empty = Panel(items=list('ABC'))
store.append('p',p_empty)
self.assertRaises(KeyError,store.select, 'p')
# repeated append of 0/non-zero frames
p = Panel(np.random.randn(3,4,5),items=list('ABC'))
store.append('p',p)
assert_panel_equal(store.select('p'),p)
store.append('p',p_empty)
assert_panel_equal(store.select('p'),p)
# store
store.put('p2',p_empty)
assert_panel_equal(store.select('p2'),p_empty)
def test_append_raise(self):
with ensure_clean_store(self.path) as store:
# test append with invalid input to get good error messages
# list in column
df = tm.makeDataFrame()
df['invalid'] = [['a']] * len(df)
self.assertEqual(df.dtypes['invalid'], np.object_)
self.assertRaises(TypeError, store.append,'df',df)
# multiple invalid columns
df['invalid2'] = [['a']] * len(df)
df['invalid3'] = [['a']] * len(df)
self.assertRaises(TypeError, store.append,'df',df)
# datetime with embedded nans as object
df = tm.makeDataFrame()
s = Series(datetime.datetime(2001,1,2),index=df.index)
s = s.astype(object)
s[0:5] = np.nan
df['invalid'] = s
self.assertEqual(df.dtypes['invalid'], np.object_)
self.assertRaises(TypeError, store.append,'df', df)
# directy ndarray
self.assertRaises(TypeError, store.append,'df',np.arange(10))
# series directly
self.assertRaises(TypeError, store.append,'df',Series(np.arange(10)))
# appending an incompatbile table
df = tm.makeDataFrame()
store.append('df',df)
df['foo'] = 'foo'
self.assertRaises(ValueError, store.append,'df',df)
def test_table_index_incompatible_dtypes(self):
df1 = DataFrame({'a': [1, 2, 3]})
df2 = DataFrame({'a': [4, 5, 6]},
index=date_range('1/1/2000', periods=3))
with ensure_clean_store(self.path) as store:
store.put('frame', df1, format='table')
self.assertRaises(TypeError, store.put, 'frame', df2,
format='table', append=True)
def test_table_values_dtypes_roundtrip(self):
with ensure_clean_store(self.path) as store:
df1 = DataFrame({'a': [1, 2, 3]}, dtype='f8')
store.append('df_f8', df1)
assert_series_equal(df1.dtypes,store['df_f8'].dtypes)
df2 = DataFrame({'a': [1, 2, 3]}, dtype='i8')
store.append('df_i8', df2)
assert_series_equal(df2.dtypes,store['df_i8'].dtypes)
# incompatible dtype
self.assertRaises(ValueError, store.append, 'df_i8', df1)
# check creation/storage/retrieval of float32 (a bit hacky to actually create them thought)
df1 = DataFrame(np.array([[1],[2],[3]],dtype='f4'),columns = ['A'])
store.append('df_f4', df1)
assert_series_equal(df1.dtypes,store['df_f4'].dtypes)
assert df1.dtypes[0] == 'float32'
# check with mixed dtypes
df1 = DataFrame(dict([ (c,Series(np.random.randn(5),dtype=c)) for c in
['float32','float64','int32','int64','int16','int8'] ]))
df1['string'] = 'foo'
df1['float322'] = 1.
df1['float322'] = df1['float322'].astype('float32')
df1['bool'] = df1['float32'] > 0
df1['time1'] = Timestamp('20130101')
df1['time2'] = Timestamp('20130102')
store.append('df_mixed_dtypes1', df1)
result = store.select('df_mixed_dtypes1').get_dtype_counts()
expected = Series({ 'float32' : 2, 'float64' : 1,'int32' : 1, 'bool' : 1,
'int16' : 1, 'int8' : 1, 'int64' : 1, 'object' : 1,
'datetime64[ns]' : 2})
result.sort()
expected.sort()
tm.assert_series_equal(result,expected)
def test_table_mixed_dtypes(self):
# frame
df = tm.makeDataFrame()
df['obj1'] = 'foo'
df['obj2'] = 'bar'
df['bool1'] = df['A'] > 0
df['bool2'] = df['B'] > 0
df['bool3'] = True
df['int1'] = 1
df['int2'] = 2
df['timestamp1'] = Timestamp('20010102')
df['timestamp2'] = Timestamp('20010103')
df['datetime1'] = datetime.datetime(2001, 1, 2, 0, 0)
df['datetime2'] = datetime.datetime(2001, 1, 3, 0, 0)
df.ix[3:6, ['obj1']] = np.nan
df = df.consolidate().convert_objects()
with ensure_clean_store(self.path) as store:
store.append('df1_mixed', df)
tm.assert_frame_equal(store.select('df1_mixed'), df)
# panel
wp = tm.makePanel()
wp['obj1'] = 'foo'
wp['obj2'] = 'bar'
wp['bool1'] = wp['ItemA'] > 0
wp['bool2'] = wp['ItemB'] > 0
wp['int1'] = 1
wp['int2'] = 2
wp = wp.consolidate()
with ensure_clean_store(self.path) as store:
store.append('p1_mixed', wp)
assert_panel_equal(store.select('p1_mixed'), wp)
# ndim
wp = tm.makePanel4D()
wp['obj1'] = 'foo'
wp['obj2'] = 'bar'
wp['bool1'] = wp['l1'] > 0
wp['bool2'] = wp['l2'] > 0
wp['int1'] = 1
wp['int2'] = 2
wp = wp.consolidate()
with ensure_clean_store(self.path) as store:
store.append('p4d_mixed', wp)
assert_panel4d_equal(store.select('p4d_mixed'), wp)
def test_unimplemented_dtypes_table_columns(self):
with ensure_clean_store(self.path) as store:
l = [('date', datetime.date(2001, 1, 2))]
# py3 ok for unicode
if not compat.PY3:
l.append(('unicode', u('\\u03c3')))
### currently not supported dtypes ####
for n, f in l:
df = tm.makeDataFrame()
df[n] = f
self.assertRaises(
TypeError, store.append, 'df1_%s' % n, df)
# frame
df = tm.makeDataFrame()
df['obj1'] = 'foo'
df['obj2'] = 'bar'
df['datetime1'] = datetime.date(2001, 1, 2)
df = df.consolidate().convert_objects()
with ensure_clean_store(self.path) as store:
# this fails because we have a date in the object block......
self.assertRaises(TypeError, store.append, 'df_unimplemented', df)
def test_append_with_timezones_pytz(self):
from datetime import timedelta
def compare(a,b):
tm.assert_frame_equal(a,b)
# compare the zones on each element
for c in a.columns:
for i in a.index:
a_e = a[c][i]
b_e = b[c][i]
if not (a_e == b_e and a_e.tz == b_e.tz):
raise AssertionError("invalid tz comparsion [%s] [%s]" % (a_e,b_e))
# as columns
with ensure_clean_store(self.path) as store:
_maybe_remove(store, 'df_tz')
df = DataFrame(dict(A = [ Timestamp('20130102 2:00:00',tz='US/Eastern') + timedelta(hours=1)*i for i in range(5) ]))
store.append('df_tz',df,data_columns=['A'])
result = store['df_tz']
compare(result,df)
assert_frame_equal(result,df)
# select with tz aware
compare(store.select('df_tz',where=Term('A>=df.A[3]')),df[df.A>=df.A[3]])
_maybe_remove(store, 'df_tz')
# ensure we include dates in DST and STD time here.
df = DataFrame(dict(A = Timestamp('20130102',tz='US/Eastern'), B = Timestamp('20130603',tz='US/Eastern')),index=range(5))
store.append('df_tz',df)
result = store['df_tz']
compare(result,df)
assert_frame_equal(result,df)
_maybe_remove(store, 'df_tz')
df = DataFrame(dict(A = Timestamp('20130102',tz='US/Eastern'), B = Timestamp('20130102',tz='EET')),index=range(5))
self.assertRaises(TypeError, store.append, 'df_tz', df)
# this is ok
_maybe_remove(store, 'df_tz')
store.append('df_tz',df,data_columns=['A','B'])
result = store['df_tz']
compare(result,df)
assert_frame_equal(result,df)
# can't append with diff timezone
df = DataFrame(dict(A = Timestamp('20130102',tz='US/Eastern'), B = Timestamp('20130102',tz='CET')),index=range(5))
self.assertRaises(ValueError, store.append, 'df_tz', df)
# as index
with ensure_clean_store(self.path) as store:
# GH 4098 example
df = DataFrame(dict(A = Series(lrange(3), index=date_range('2000-1-1',periods=3,freq='H', tz='US/Eastern'))))
_maybe_remove(store, 'df')
store.put('df',df)
result = store.select('df')
assert_frame_equal(result,df)
_maybe_remove(store, 'df')
store.append('df',df)
result = store.select('df')
assert_frame_equal(result,df)
def test_calendar_roundtrip_issue(self):
# 8591
# doc example from tseries holiday section
weekmask_egypt = 'Sun Mon Tue Wed Thu'
holidays = ['2012-05-01', datetime.datetime(2013, 5, 1), np.datetime64('2014-05-01')]
bday_egypt = pandas.offsets.CustomBusinessDay(holidays=holidays, weekmask=weekmask_egypt)
dt = datetime.datetime(2013, 4, 30)
dts = date_range(dt, periods=5, freq=bday_egypt)
s = (Series(dts.weekday, dts).map(Series('Mon Tue Wed Thu Fri Sat Sun'.split())))
with ensure_clean_store(self.path) as store:
store.put('fixed',s)
result = store.select('fixed')
assert_series_equal(result, s)
store.append('table',s)
result = store.select('table')
assert_series_equal(result, s)
def test_append_with_timezones_dateutil(self):
from datetime import timedelta
tm._skip_if_no_dateutil()
# use maybe_get_tz instead of dateutil.tz.gettz to handle the windows filename issues.
from pandas.tslib import maybe_get_tz
gettz = lambda x: maybe_get_tz('dateutil/' + x)
def compare(a, b):
tm.assert_frame_equal(a, b)
# compare the zones on each element
for c in a.columns:
for i in a.index:
a_e = a[c][i]
b_e = b[c][i]
if not (a_e == b_e and a_e.tz == b_e.tz):
raise AssertionError("invalid tz comparsion [%s] [%s]" % (a_e, b_e))
# as columns
with ensure_clean_store(self.path) as store:
_maybe_remove(store, 'df_tz')
df = DataFrame(dict(A=[ Timestamp('20130102 2:00:00', tz=gettz('US/Eastern')) + timedelta(hours=1) * i for i in range(5) ]))
store.append('df_tz', df, data_columns=['A'])
result = store['df_tz']
compare(result, df)
assert_frame_equal(result, df)
# select with tz aware
compare(store.select('df_tz', where=Term('A>=df.A[3]')), df[df.A >= df.A[3]])
_maybe_remove(store, 'df_tz')
# ensure we include dates in DST and STD time here.
df = DataFrame(dict(A=Timestamp('20130102', tz=gettz('US/Eastern')), B=Timestamp('20130603', tz=gettz('US/Eastern'))), index=range(5))
store.append('df_tz', df)
result = store['df_tz']
compare(result, df)
assert_frame_equal(result, df)
_maybe_remove(store, 'df_tz')
df = DataFrame(dict(A=Timestamp('20130102', tz=gettz('US/Eastern')), B=Timestamp('20130102', tz=gettz('EET'))), index=range(5))
self.assertRaises(TypeError, store.append, 'df_tz', df)
# this is ok
_maybe_remove(store, 'df_tz')
store.append('df_tz', df, data_columns=['A', 'B'])
result = store['df_tz']
compare(result, df)
assert_frame_equal(result, df)
# can't append with diff timezone
df = DataFrame(dict(A=Timestamp('20130102', tz=gettz('US/Eastern')), B=Timestamp('20130102', tz=gettz('CET'))), index=range(5))
self.assertRaises(ValueError, store.append, 'df_tz', df)
# as index
with ensure_clean_store(self.path) as store:
# GH 4098 example
df = DataFrame(dict(A=Series(lrange(3), index=date_range('2000-1-1', periods=3, freq='H', tz=gettz('US/Eastern')))))
_maybe_remove(store, 'df')
store.put('df', df)
result = store.select('df')
assert_frame_equal(result, df)
_maybe_remove(store, 'df')
store.append('df', df)
result = store.select('df')
assert_frame_equal(result, df)
def test_store_timezone(self):
# GH2852
# issue storing datetime.date with a timezone as it resets when read back in a new timezone
import platform
if platform.system() == "Windows":
raise nose.SkipTest("timezone setting not supported on windows")
import datetime
import time
import os
# original method
with ensure_clean_store(self.path) as store:
today = datetime.date(2013,9,10)
df = DataFrame([1,2,3], index = [today, today, today])
store['obj1'] = df
result = store['obj1']
assert_frame_equal(result, df)
# with tz setting
orig_tz = os.environ.get('TZ')
def setTZ(tz):
if tz is None:
try:
del os.environ['TZ']
except:
pass
else:
os.environ['TZ']=tz
time.tzset()
try:
with ensure_clean_store(self.path) as store:
setTZ('EST5EDT')
today = datetime.date(2013,9,10)
df = DataFrame([1,2,3], index = [today, today, today])
store['obj1'] = df
setTZ('CST6CDT')
result = store['obj1']
assert_frame_equal(result, df)
finally:
setTZ(orig_tz)
def test_append_with_timedelta(self):
# GH 3577
# append timedelta
from datetime import timedelta
df = DataFrame(dict(A = Timestamp('20130101'), B = [ Timestamp('20130101') + timedelta(days=i,seconds=10) for i in range(10) ]))
df['C'] = df['A']-df['B']
df.ix[3:5,'C'] = np.nan
with ensure_clean_store(self.path) as store:
# table
_maybe_remove(store, 'df')
store.append('df',df,data_columns=True)
result = store.select('df')
assert_frame_equal(result,df)
result = store.select('df',Term("C<100000"))
assert_frame_equal(result,df)
result = store.select('df',Term("C","<",-3*86400))
assert_frame_equal(result,df.iloc[3:])
result = store.select('df',"C<'-3D'")
assert_frame_equal(result,df.iloc[3:])
# a bit hacky here as we don't really deal with the NaT properly
result = store.select('df',"C<'-500000s'")
result = result.dropna(subset=['C'])
assert_frame_equal(result,df.iloc[6:])
result = store.select('df',"C<'-3.5D'")
result = result.iloc[1:]
assert_frame_equal(result,df.iloc[4:])
<|fim▁hole|> assert_frame_equal(result,df)
def test_remove(self):
with ensure_clean_store(self.path) as store:
ts = tm.makeTimeSeries()
df = tm.makeDataFrame()
store['a'] = ts
store['b'] = df
_maybe_remove(store, 'a')
self.assertEqual(len(store), 1)
tm.assert_frame_equal(df, store['b'])
_maybe_remove(store, 'b')
self.assertEqual(len(store), 0)
# nonexistence
self.assertRaises(KeyError, store.remove, 'a_nonexistent_store')
# pathing
store['a'] = ts
store['b/foo'] = df
_maybe_remove(store, 'foo')
_maybe_remove(store, 'b/foo')
self.assertEqual(len(store), 1)
store['a'] = ts
store['b/foo'] = df
_maybe_remove(store, 'b')
self.assertEqual(len(store), 1)
# __delitem__
store['a'] = ts
store['b'] = df
del store['a']
del store['b']
self.assertEqual(len(store), 0)
def test_remove_where(self):
with ensure_clean_store(self.path) as store:
# non-existance
crit1 = Term('index>foo')
self.assertRaises(KeyError, store.remove, 'a', [crit1])
# try to remove non-table (with crit)
# non-table ok (where = None)
wp = tm.makePanel(30)
store.put('wp', wp, format='table')
store.remove('wp', ["minor_axis=['A', 'D']"])
rs = store.select('wp')
expected = wp.reindex(minor_axis=['B', 'C'])
assert_panel_equal(rs, expected)
# empty where
_maybe_remove(store, 'wp')
store.put('wp', wp, format='table')
# deleted number (entire table)
n = store.remove('wp', [])
self.assertTrue(n == 120)
# non - empty where
_maybe_remove(store, 'wp')
store.put('wp', wp, format='table')
self.assertRaises(ValueError, store.remove,
'wp', ['foo'])
# selectin non-table with a where
# store.put('wp2', wp, format='f')
# self.assertRaises(ValueError, store.remove,
# 'wp2', [('column', ['A', 'D'])])
def test_remove_startstop(self):
# GH #4835 and #6177
with ensure_clean_store(self.path) as store:
wp = tm.makePanel(30)
# start
_maybe_remove(store, 'wp1')
store.put('wp1', wp, format='t')
n = store.remove('wp1', start=32)
self.assertTrue(n == 120-32)
result = store.select('wp1')
expected = wp.reindex(major_axis=wp.major_axis[:32//4])
assert_panel_equal(result, expected)
_maybe_remove(store, 'wp2')
store.put('wp2', wp, format='t')
n = store.remove('wp2', start=-32)
self.assertTrue(n == 32)
result = store.select('wp2')
expected = wp.reindex(major_axis=wp.major_axis[:-32//4])
assert_panel_equal(result, expected)
# stop
_maybe_remove(store, 'wp3')
store.put('wp3', wp, format='t')
n = store.remove('wp3', stop=32)
self.assertTrue(n == 32)
result = store.select('wp3')
expected = wp.reindex(major_axis=wp.major_axis[32//4:])
assert_panel_equal(result, expected)
_maybe_remove(store, 'wp4')
store.put('wp4', wp, format='t')
n = store.remove('wp4', stop=-32)
self.assertTrue(n == 120-32)
result = store.select('wp4')
expected = wp.reindex(major_axis=wp.major_axis[-32//4:])
assert_panel_equal(result, expected)
# start n stop
_maybe_remove(store, 'wp5')
store.put('wp5', wp, format='t')
n = store.remove('wp5', start=16, stop=-16)
self.assertTrue(n == 120-32)
result = store.select('wp5')
expected = wp.reindex(major_axis=wp.major_axis[:16//4].union(wp.major_axis[-16//4:]))
assert_panel_equal(result, expected)
_maybe_remove(store, 'wp6')
store.put('wp6', wp, format='t')
n = store.remove('wp6', start=16, stop=16)
self.assertTrue(n == 0)
result = store.select('wp6')
expected = wp.reindex(major_axis=wp.major_axis)
assert_panel_equal(result, expected)
# with where
_maybe_remove(store, 'wp7')
date = wp.major_axis.take(np.arange(0,30,3))
crit = Term('major_axis=date')
store.put('wp7', wp, format='t')
n = store.remove('wp7', where=[crit], stop=80)
self.assertTrue(n == 28)
result = store.select('wp7')
expected = wp.reindex(major_axis=wp.major_axis.difference(wp.major_axis[np.arange(0,20,3)]))
assert_panel_equal(result, expected)
def test_remove_crit(self):
with ensure_clean_store(self.path) as store:
wp = tm.makePanel(30)
# group row removal
_maybe_remove(store, 'wp3')
date4 = wp.major_axis.take([0, 1, 2, 4, 5, 6, 8, 9, 10])
crit4 = Term('major_axis=date4')
store.put('wp3', wp, format='t')
n = store.remove('wp3', where=[crit4])
self.assertTrue(n == 36)
result = store.select('wp3')
expected = wp.reindex(major_axis=wp.major_axis.difference(date4))
assert_panel_equal(result, expected)
# upper half
_maybe_remove(store, 'wp')
store.put('wp', wp, format='table')
date = wp.major_axis[len(wp.major_axis) // 2]
crit1 = Term('major_axis>date')
crit2 = Term("minor_axis=['A', 'D']")
n = store.remove('wp', where=[crit1])
self.assertTrue(n == 56)
n = store.remove('wp', where=[crit2])
self.assertTrue(n == 32)
result = store['wp']
expected = wp.truncate(after=date).reindex(minor=['B', 'C'])
assert_panel_equal(result, expected)
# individual row elements
_maybe_remove(store, 'wp2')
store.put('wp2', wp, format='table')
date1 = wp.major_axis[1:3]
crit1 = Term('major_axis=date1')
store.remove('wp2', where=[crit1])
result = store.select('wp2')
expected = wp.reindex(major_axis=wp.major_axis.difference(date1))
assert_panel_equal(result, expected)
date2 = wp.major_axis[5]
crit2 = Term('major_axis=date2')
store.remove('wp2', where=[crit2])
result = store['wp2']
expected = wp.reindex(
major_axis=wp.major_axis.difference(date1).difference(Index([date2])))
assert_panel_equal(result, expected)
date3 = [wp.major_axis[7], wp.major_axis[9]]
crit3 = Term('major_axis=date3')
store.remove('wp2', where=[crit3])
result = store['wp2']
expected = wp.reindex(
major_axis=wp.major_axis.difference(date1).difference(Index([date2])).difference(Index(date3)))
assert_panel_equal(result, expected)
# corners
_maybe_remove(store, 'wp4')
store.put('wp4', wp, format='table')
n = store.remove(
'wp4', where=[Term('major_axis>wp.major_axis[-1]')])
result = store.select('wp4')
assert_panel_equal(result, wp)
def test_invalid_terms(self):
with ensure_clean_store(self.path) as store:
df = tm.makeTimeDataFrame()
df['string'] = 'foo'
df.ix[0:4,'string'] = 'bar'
wp = tm.makePanel()
p4d = tm.makePanel4D()
store.put('df', df, format='table')
store.put('wp', wp, format='table')
store.put('p4d', p4d, format='table')
# some invalid terms
self.assertRaises(ValueError, store.select, 'wp', "minor=['A', 'B']")
self.assertRaises(ValueError, store.select, 'wp', ["index=['20121114']"])
self.assertRaises(ValueError, store.select, 'wp', ["index=['20121114', '20121114']"])
self.assertRaises(TypeError, Term)
# more invalid
self.assertRaises(ValueError, store.select, 'df','df.index[3]')
self.assertRaises(SyntaxError, store.select, 'df','index>')
self.assertRaises(ValueError, store.select, 'wp', "major_axis<'20000108' & minor_axis['A', 'B']")
# from the docs
with ensure_clean_path(self.path) as path:
dfq = DataFrame(np.random.randn(10,4),columns=list('ABCD'),index=date_range('20130101',periods=10))
dfq.to_hdf(path,'dfq',format='table',data_columns=True)
# check ok
read_hdf(path,'dfq',where="index>Timestamp('20130104') & columns=['A', 'B']")
read_hdf(path,'dfq',where="A>0 or C>0")
# catch the invalid reference
with ensure_clean_path(self.path) as path:
dfq = DataFrame(np.random.randn(10,4),columns=list('ABCD'),index=date_range('20130101',periods=10))
dfq.to_hdf(path,'dfq',format='table')
self.assertRaises(ValueError, read_hdf, path,'dfq',where="A>0 or C>0")
def test_terms(self):
with ensure_clean_store(self.path) as store:
wp = tm.makePanel()
p4d = tm.makePanel4D()
wpneg = Panel.fromDict({-1: tm.makeDataFrame(), 0: tm.makeDataFrame(),
1: tm.makeDataFrame()})
store.put('wp', wp, table=True)
store.put('p4d', p4d, table=True)
store.put('wpneg', wpneg, table=True)
# panel
result = store.select('wp', [Term(
'major_axis<"20000108"'), Term("minor_axis=['A', 'B']")])
expected = wp.truncate(after='20000108').reindex(minor=['A', 'B'])
assert_panel_equal(result, expected)
# with deprecation
result = store.select('wp', [Term(
'major_axis','<',"20000108"), Term("minor_axis=['A', 'B']")])
expected = wp.truncate(after='20000108').reindex(minor=['A', 'B'])
tm.assert_panel_equal(result, expected)
# p4d
result = store.select('p4d', [Term('major_axis<"20000108"'),
Term("minor_axis=['A', 'B']"),
Term("items=['ItemA', 'ItemB']")])
expected = p4d.truncate(after='20000108').reindex(
minor=['A', 'B'], items=['ItemA', 'ItemB'])
assert_panel4d_equal(result, expected)
# back compat invalid terms
terms = [
dict(field='major_axis', op='>', value='20121114'),
[ dict(field='major_axis', op='>', value='20121114') ],
[ "minor_axis=['A','B']", dict(field='major_axis', op='>', value='20121114') ]
]
for t in terms:
with tm.assert_produces_warning(expected_warning=DeprecationWarning):
Term(t)
# valid terms
terms = [
('major_axis=20121114'),
('major_axis>20121114'),
(("major_axis=['20121114', '20121114']"),),
('major_axis=datetime.datetime(2012, 11, 14)'),
'major_axis> 20121114',
'major_axis >20121114',
'major_axis > 20121114',
(("minor_axis=['A', 'B']"),),
(("minor_axis=['A', 'B']"),),
((("minor_axis==['A', 'B']"),),),
(("items=['ItemA', 'ItemB']"),),
('items=ItemA'),
]
for t in terms:
store.select('wp', t)
store.select('p4d', t)
# valid for p4d only
terms = [
(("labels=['l1', 'l2']"),),
Term("labels=['l1', 'l2']"),
]
for t in terms:
store.select('p4d', t)
with tm.assertRaisesRegexp(TypeError, 'Only named functions are supported'):
store.select('wp', Term('major_axis == (lambda x: x)("20130101")'))
# check USub node parsing
res = store.select('wpneg', Term('items == -1'))
expected = Panel({-1: wpneg[-1]})
tm.assert_panel_equal(res, expected)
with tm.assertRaisesRegexp(NotImplementedError,
'Unary addition not supported'):
store.select('wpneg', Term('items == +1'))
def test_term_compat(self):
with ensure_clean_store(self.path) as store:
wp = Panel(np.random.randn(2, 5, 4), items=['Item1', 'Item2'],
major_axis=date_range('1/1/2000', periods=5),
minor_axis=['A', 'B', 'C', 'D'])
store.append('wp',wp)
result = store.select('wp', [Term('major_axis>20000102'),
Term('minor_axis', '=', ['A','B']) ])
expected = wp.loc[:,wp.major_axis>Timestamp('20000102'),['A','B']]
assert_panel_equal(result, expected)
store.remove('wp', Term('major_axis>20000103'))
result = store.select('wp')
expected = wp.loc[:,wp.major_axis<=Timestamp('20000103'),:]
assert_panel_equal(result, expected)
with ensure_clean_store(self.path) as store:
wp = Panel(np.random.randn(2, 5, 4), items=['Item1', 'Item2'],
major_axis=date_range('1/1/2000', periods=5),
minor_axis=['A', 'B', 'C', 'D'])
store.append('wp',wp)
# stringified datetimes
result = store.select('wp', [Term('major_axis','>',datetime.datetime(2000,1,2))])
expected = wp.loc[:,wp.major_axis>Timestamp('20000102')]
assert_panel_equal(result, expected)
result = store.select('wp', [Term('major_axis','>',datetime.datetime(2000,1,2,0,0))])
expected = wp.loc[:,wp.major_axis>Timestamp('20000102')]
assert_panel_equal(result, expected)
result = store.select('wp', [Term('major_axis','=',[datetime.datetime(2000,1,2,0,0),datetime.datetime(2000,1,3,0,0)])])
expected = wp.loc[:,[Timestamp('20000102'),Timestamp('20000103')]]
assert_panel_equal(result, expected)
result = store.select('wp', [Term('minor_axis','=',['A','B'])])
expected = wp.loc[:,:,['A','B']]
assert_panel_equal(result, expected)
def test_backwards_compat_without_term_object(self):
with ensure_clean_store(self.path) as store:
wp = Panel(np.random.randn(2, 5, 4), items=['Item1', 'Item2'],
major_axis=date_range('1/1/2000', periods=5),
minor_axis=['A', 'B', 'C', 'D'])
store.append('wp',wp)
with tm.assert_produces_warning(expected_warning=DeprecationWarning):
result = store.select('wp', [('major_axis>20000102'),
('minor_axis', '=', ['A','B']) ])
expected = wp.loc[:,wp.major_axis>Timestamp('20000102'),['A','B']]
assert_panel_equal(result, expected)
store.remove('wp', ('major_axis>20000103'))
result = store.select('wp')
expected = wp.loc[:,wp.major_axis<=Timestamp('20000103'),:]
assert_panel_equal(result, expected)
with ensure_clean_store(self.path) as store:
wp = Panel(np.random.randn(2, 5, 4), items=['Item1', 'Item2'],
major_axis=date_range('1/1/2000', periods=5),
minor_axis=['A', 'B', 'C', 'D'])
store.append('wp',wp)
# stringified datetimes
with tm.assert_produces_warning(expected_warning=DeprecationWarning):
result = store.select('wp', [('major_axis','>',datetime.datetime(2000,1,2))])
expected = wp.loc[:,wp.major_axis>Timestamp('20000102')]
assert_panel_equal(result, expected)
with tm.assert_produces_warning(expected_warning=DeprecationWarning):
result = store.select('wp', [('major_axis','>',datetime.datetime(2000,1,2,0,0))])
expected = wp.loc[:,wp.major_axis>Timestamp('20000102')]
assert_panel_equal(result, expected)
with tm.assert_produces_warning(expected_warning=DeprecationWarning):
result = store.select('wp', [('major_axis','=',[datetime.datetime(2000,1,2,0,0),
datetime.datetime(2000,1,3,0,0)])])
expected = wp.loc[:,[Timestamp('20000102'),Timestamp('20000103')]]
assert_panel_equal(result, expected)
with tm.assert_produces_warning(expected_warning=DeprecationWarning):
result = store.select('wp', [('minor_axis','=',['A','B'])])
expected = wp.loc[:,:,['A','B']]
assert_panel_equal(result, expected)
def test_same_name_scoping(self):
with ensure_clean_store(self.path) as store:
import pandas as pd
df = DataFrame(np.random.randn(20, 2),index=pd.date_range('20130101',periods=20))
store.put('df', df, table=True)
expected = df[df.index>pd.Timestamp('20130105')]
import datetime
result = store.select('df','index>datetime.datetime(2013,1,5)')
assert_frame_equal(result,expected)
from datetime import datetime
# technically an error, but allow it
result = store.select('df','index>datetime.datetime(2013,1,5)')
assert_frame_equal(result,expected)
result = store.select('df','index>datetime(2013,1,5)')
assert_frame_equal(result,expected)
def test_series(self):
s = tm.makeStringSeries()
self._check_roundtrip(s, tm.assert_series_equal)
ts = tm.makeTimeSeries()
self._check_roundtrip(ts, tm.assert_series_equal)
ts2 = Series(ts.index, Index(ts.index, dtype=object))
self._check_roundtrip(ts2, tm.assert_series_equal)
ts3 = Series(ts.values, Index(np.asarray(ts.index, dtype=object),
dtype=object))
self._check_roundtrip(ts3, tm.assert_series_equal)
def test_sparse_series(self):
s = tm.makeStringSeries()
s[3:5] = np.nan
ss = s.to_sparse()
self._check_roundtrip(ss, tm.assert_series_equal,
check_series_type=True)
ss2 = s.to_sparse(kind='integer')
self._check_roundtrip(ss2, tm.assert_series_equal,
check_series_type=True)
ss3 = s.to_sparse(fill_value=0)
self._check_roundtrip(ss3, tm.assert_series_equal,
check_series_type=True)
def test_sparse_frame(self):
s = tm.makeDataFrame()
s.ix[3:5, 1:3] = np.nan
s.ix[8:10, -2] = np.nan
ss = s.to_sparse()
self._check_double_roundtrip(ss, tm.assert_frame_equal,
check_frame_type=True)
ss2 = s.to_sparse(kind='integer')
self._check_double_roundtrip(ss2, tm.assert_frame_equal,
check_frame_type=True)
ss3 = s.to_sparse(fill_value=0)
self._check_double_roundtrip(ss3, tm.assert_frame_equal,
check_frame_type=True)
def test_sparse_panel(self):
items = ['x', 'y', 'z']
p = Panel(dict((i, tm.makeDataFrame().ix[:2, :2]) for i in items))
sp = p.to_sparse()
self._check_double_roundtrip(sp, assert_panel_equal,
check_panel_type=True)
sp2 = p.to_sparse(kind='integer')
self._check_double_roundtrip(sp2, assert_panel_equal,
check_panel_type=True)
sp3 = p.to_sparse(fill_value=0)
self._check_double_roundtrip(sp3, assert_panel_equal,
check_panel_type=True)
def test_float_index(self):
# GH #454
index = np.random.randn(10)
s = Series(np.random.randn(10), index=index)
self._check_roundtrip(s, tm.assert_series_equal)
def test_tuple_index(self):
# GH #492
col = np.arange(10)
idx = [(0., 1.), (2., 3.), (4., 5.)]
data = np.random.randn(30).reshape((3, 10))
DF = DataFrame(data, index=idx, columns=col)
with tm.assert_produces_warning(expected_warning=PerformanceWarning):
self._check_roundtrip(DF, tm.assert_frame_equal)
def test_index_types(self):
values = np.random.randn(2)
func = lambda l, r: tm.assert_series_equal(l, r,
check_dtype=True,
check_index_type=True,
check_series_type=True)
with tm.assert_produces_warning(expected_warning=PerformanceWarning):
ser = Series(values, [0, 'y'])
self._check_roundtrip(ser, func)
with tm.assert_produces_warning(expected_warning=PerformanceWarning):
ser = Series(values, [datetime.datetime.today(), 0])
self._check_roundtrip(ser, func)
with tm.assert_produces_warning(expected_warning=PerformanceWarning):
ser = Series(values, ['y', 0])
self._check_roundtrip(ser, func)
with tm.assert_produces_warning(expected_warning=PerformanceWarning):
ser = Series(values, [datetime.date.today(), 'a'])
self._check_roundtrip(ser, func)
with tm.assert_produces_warning(expected_warning=PerformanceWarning):
ser = Series(values, [1.23, 'b'])
self._check_roundtrip(ser, func)
ser = Series(values, [1, 1.53])
self._check_roundtrip(ser, func)
ser = Series(values, [1, 5])
self._check_roundtrip(ser, func)
ser = Series(values, [datetime.datetime(
2012, 1, 1), datetime.datetime(2012, 1, 2)])
self._check_roundtrip(ser, func)
def test_timeseries_preepoch(self):
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
raise nose.SkipTest("won't work on Python < 2.7")
dr = bdate_range('1/1/1940', '1/1/1960')
ts = Series(np.random.randn(len(dr)), index=dr)
try:
self._check_roundtrip(ts, tm.assert_series_equal)
except OverflowError:
raise nose.SkipTest('known failer on some windows platforms')
def test_frame(self):
df = tm.makeDataFrame()
# put in some random NAs
df.values[0, 0] = np.nan
df.values[5, 3] = np.nan
self._check_roundtrip_table(df, tm.assert_frame_equal)
self._check_roundtrip(df, tm.assert_frame_equal)
self._check_roundtrip_table(df, tm.assert_frame_equal,
compression=True)
self._check_roundtrip(df, tm.assert_frame_equal,
compression=True)
tdf = tm.makeTimeDataFrame()
self._check_roundtrip(tdf, tm.assert_frame_equal)
self._check_roundtrip(tdf, tm.assert_frame_equal,
compression=True)
with ensure_clean_store(self.path) as store:
# not consolidated
df['foo'] = np.random.randn(len(df))
store['df'] = df
recons = store['df']
self.assertTrue(recons._data.is_consolidated())
# empty
self._check_roundtrip(df[:0], tm.assert_frame_equal)
def test_empty_series_frame(self):
s0 = Series()
s1 = Series(name='myseries')
df0 = DataFrame()
df1 = DataFrame(index=['a', 'b', 'c'])
df2 = DataFrame(columns=['d', 'e', 'f'])
self._check_roundtrip(s0, tm.assert_series_equal)
self._check_roundtrip(s1, tm.assert_series_equal)
self._check_roundtrip(df0, tm.assert_frame_equal)
self._check_roundtrip(df1, tm.assert_frame_equal)
self._check_roundtrip(df2, tm.assert_frame_equal)
def test_empty_series(self):
for dtype in [np.int64, np.float64, np.object, 'm8[ns]', 'M8[ns]']:
s = Series(dtype=dtype)
self._check_roundtrip(s, tm.assert_series_equal)
def test_can_serialize_dates(self):
rng = [x.date() for x in bdate_range('1/1/2000', '1/30/2000')]
frame = DataFrame(np.random.randn(len(rng), 4), index=rng)
self._check_roundtrip(frame, tm.assert_frame_equal)
def test_timezones(self):
rng = date_range('1/1/2000', '1/30/2000', tz='US/Eastern')
frame = DataFrame(np.random.randn(len(rng), 4), index=rng)
with ensure_clean_store(self.path) as store:
store['frame'] = frame
recons = store['frame']
self.assertTrue(recons.index.equals(rng))
self.assertEqual(rng.tz, recons.index.tz)
def test_fixed_offset_tz(self):
rng = date_range('1/1/2000 00:00:00-07:00', '1/30/2000 00:00:00-07:00')
frame = DataFrame(np.random.randn(len(rng), 4), index=rng)
with ensure_clean_store(self.path) as store:
store['frame'] = frame
recons = store['frame']
self.assertTrue(recons.index.equals(rng))
self.assertEqual(rng.tz, recons.index.tz)
def test_store_hierarchical(self):
index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'],
['one', 'two', 'three']],
labels=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['foo', 'bar'])
frame = DataFrame(np.random.randn(10, 3), index=index,
columns=['A', 'B', 'C'])
self._check_roundtrip(frame, tm.assert_frame_equal)
self._check_roundtrip(frame.T, tm.assert_frame_equal)
self._check_roundtrip(frame['A'], tm.assert_series_equal)
# check that the names are stored
with ensure_clean_store(self.path) as store:
store['frame'] = frame
recons = store['frame']
assert(recons.index.names == ('foo', 'bar'))
def test_store_index_name(self):
df = tm.makeDataFrame()
df.index.name = 'foo'
with ensure_clean_store(self.path) as store:
store['frame'] = df
recons = store['frame']
assert(recons.index.name == 'foo')
def test_store_series_name(self):
df = tm.makeDataFrame()
series = df['A']
with ensure_clean_store(self.path) as store:
store['series'] = series
recons = store['series']
assert(recons.name == 'A')
def test_store_mixed(self):
def _make_one():
df = tm.makeDataFrame()
df['obj1'] = 'foo'
df['obj2'] = 'bar'
df['bool1'] = df['A'] > 0
df['bool2'] = df['B'] > 0
df['int1'] = 1
df['int2'] = 2
return df.consolidate()
df1 = _make_one()
df2 = _make_one()
self._check_roundtrip(df1, tm.assert_frame_equal)
self._check_roundtrip(df2, tm.assert_frame_equal)
with ensure_clean_store(self.path) as store:
store['obj'] = df1
tm.assert_frame_equal(store['obj'], df1)
store['obj'] = df2
tm.assert_frame_equal(store['obj'], df2)
# check that can store Series of all of these types
self._check_roundtrip(df1['obj1'], tm.assert_series_equal)
self._check_roundtrip(df1['bool1'], tm.assert_series_equal)
self._check_roundtrip(df1['int1'], tm.assert_series_equal)
# try with compression
self._check_roundtrip(df1['obj1'], tm.assert_series_equal,
compression=True)
self._check_roundtrip(df1['bool1'], tm.assert_series_equal,
compression=True)
self._check_roundtrip(df1['int1'], tm.assert_series_equal,
compression=True)
self._check_roundtrip(df1, tm.assert_frame_equal,
compression=True)
def test_wide(self):
wp = tm.makePanel()
self._check_roundtrip(wp, assert_panel_equal)
def test_wide_table(self):
wp = tm.makePanel()
self._check_roundtrip_table(wp, assert_panel_equal)
def test_select_with_dups(self):
# single dtypes
df = DataFrame(np.random.randn(10,4),columns=['A','A','B','B'])
df.index = date_range('20130101 9:30',periods=10,freq='T')
with ensure_clean_store(self.path) as store:
store.append('df',df)
result = store.select('df')
expected = df
assert_frame_equal(result,expected,by_blocks=True)
result = store.select('df',columns=df.columns)
expected = df
assert_frame_equal(result,expected,by_blocks=True)
result = store.select('df',columns=['A'])
expected = df.loc[:,['A']]
assert_frame_equal(result,expected)
# dups accross dtypes
df = concat([DataFrame(np.random.randn(10,4),columns=['A','A','B','B']),
DataFrame(np.random.randint(0,10,size=20).reshape(10,2),columns=['A','C'])],
axis=1)
df.index = date_range('20130101 9:30',periods=10,freq='T')
with ensure_clean_store(self.path) as store:
store.append('df',df)
result = store.select('df')
expected = df
assert_frame_equal(result,expected,by_blocks=True)
result = store.select('df',columns=df.columns)
expected = df
assert_frame_equal(result,expected,by_blocks=True)
expected = df.loc[:,['A']]
result = store.select('df',columns=['A'])
assert_frame_equal(result,expected,by_blocks=True)
expected = df.loc[:,['B','A']]
result = store.select('df',columns=['B','A'])
assert_frame_equal(result,expected,by_blocks=True)
# duplicates on both index and columns
with ensure_clean_store(self.path) as store:
store.append('df',df)
store.append('df',df)
expected = df.loc[:,['B','A']]
expected = concat([expected, expected])
result = store.select('df',columns=['B','A'])
assert_frame_equal(result,expected,by_blocks=True)
def test_wide_table_dups(self):
wp = tm.makePanel()
with ensure_clean_store(self.path) as store:
store.put('panel', wp, format='table')
store.put('panel', wp, format='table', append=True)
with tm.assert_produces_warning(expected_warning=DuplicateWarning):
recons = store['panel']
assert_panel_equal(recons, wp)
def test_long(self):
def _check(left, right):
assert_panel_equal(left.to_panel(), right.to_panel())
wp = tm.makePanel()
self._check_roundtrip(wp.to_frame(), _check)
# empty
# self._check_roundtrip(wp.to_frame()[:0], _check)
def test_longpanel(self):
pass
def test_overwrite_node(self):
with ensure_clean_store(self.path) as store:
store['a'] = tm.makeTimeDataFrame()
ts = tm.makeTimeSeries()
store['a'] = ts
tm.assert_series_equal(store['a'], ts)
def test_sparse_with_compression(self):
# GH 2931
# make sparse dataframe
df = DataFrame(np.random.binomial(n=1, p=.01, size=(1e3, 10))).to_sparse(fill_value=0)
# case 1: store uncompressed
self._check_double_roundtrip(df, tm.assert_frame_equal,
compression = False,
check_frame_type=True)
# case 2: store compressed (works)
self._check_double_roundtrip(df, tm.assert_frame_equal,
compression = 'zlib',
check_frame_type=True)
# set one series to be completely sparse
df[0] = np.zeros(1e3)
# case 3: store df with completely sparse series uncompressed
self._check_double_roundtrip(df, tm.assert_frame_equal,
compression = False,
check_frame_type=True)
# case 4: try storing df with completely sparse series compressed (fails)
self._check_double_roundtrip(df, tm.assert_frame_equal,
compression = 'zlib',
check_frame_type=True)
def test_select(self):
wp = tm.makePanel()
with ensure_clean_store(self.path) as store:
# put/select ok
_maybe_remove(store, 'wp')
store.put('wp', wp, format='table')
store.select('wp')
# non-table ok (where = None)
_maybe_remove(store, 'wp')
store.put('wp2', wp)
store.select('wp2')
# selection on the non-indexable with a large number of columns
wp = Panel(
np.random.randn(100, 100, 100), items=['Item%03d' % i for i in range(100)],
major_axis=date_range('1/1/2000', periods=100), minor_axis=['E%03d' % i for i in range(100)])
_maybe_remove(store, 'wp')
store.append('wp', wp)
items = ['Item%03d' % i for i in range(80)]
result = store.select('wp', Term('items=items'))
expected = wp.reindex(items=items)
assert_panel_equal(expected, result)
# selectin non-table with a where
# self.assertRaises(ValueError, store.select,
# 'wp2', ('column', ['A', 'D']))
# select with columns=
df = tm.makeTimeDataFrame()
_maybe_remove(store, 'df')
store.append('df', df)
result = store.select('df', columns=['A', 'B'])
expected = df.reindex(columns=['A', 'B'])
tm.assert_frame_equal(expected, result)
# equivalentsly
result = store.select('df', [("columns=['A', 'B']")])
expected = df.reindex(columns=['A', 'B'])
tm.assert_frame_equal(expected, result)
# with a data column
_maybe_remove(store, 'df')
store.append('df', df, data_columns=['A'])
result = store.select('df', ['A > 0'], columns=['A', 'B'])
expected = df[df.A > 0].reindex(columns=['A', 'B'])
tm.assert_frame_equal(expected, result)
# all a data columns
_maybe_remove(store, 'df')
store.append('df', df, data_columns=True)
result = store.select('df', ['A > 0'], columns=['A', 'B'])
expected = df[df.A > 0].reindex(columns=['A', 'B'])
tm.assert_frame_equal(expected, result)
# with a data column, but different columns
_maybe_remove(store, 'df')
store.append('df', df, data_columns=['A'])
result = store.select('df', ['A > 0'], columns=['C', 'D'])
expected = df[df.A > 0].reindex(columns=['C', 'D'])
tm.assert_frame_equal(expected, result)
def test_select_dtypes(self):
with ensure_clean_store(self.path) as store:
# with a Timestamp data column (GH #2637)
df = DataFrame(dict(ts=bdate_range('2012-01-01', periods=300), A=np.random.randn(300)))
_maybe_remove(store, 'df')
store.append('df', df, data_columns=['ts', 'A'])
result = store.select('df', [Term("ts>=Timestamp('2012-02-01')")])
expected = df[df.ts >= Timestamp('2012-02-01')]
tm.assert_frame_equal(expected, result)
# bool columns (GH #2849)
df = DataFrame(np.random.randn(5,2), columns =['A','B'])
df['object'] = 'foo'
df.ix[4:5,'object'] = 'bar'
df['boolv'] = df['A'] > 0
_maybe_remove(store, 'df')
store.append('df', df, data_columns = True)
expected = df[df.boolv == True].reindex(columns=['A','boolv'])
for v in [True,'true',1]:
result = store.select('df', Term('boolv == %s' % str(v)), columns = ['A','boolv'])
tm.assert_frame_equal(expected, result)
expected = df[df.boolv == False ].reindex(columns=['A','boolv'])
for v in [False,'false',0]:
result = store.select('df', Term('boolv == %s' % str(v)), columns = ['A','boolv'])
tm.assert_frame_equal(expected, result)
# integer index
df = DataFrame(dict(A=np.random.rand(20), B=np.random.rand(20)))
_maybe_remove(store, 'df_int')
store.append('df_int', df)
result = store.select(
'df_int', [Term("index<10"), Term("columns=['A']")])
expected = df.reindex(index=list(df.index)[0:10],columns=['A'])
tm.assert_frame_equal(expected, result)
# float index
df = DataFrame(dict(A=np.random.rand(
20), B=np.random.rand(20), index=np.arange(20, dtype='f8')))
_maybe_remove(store, 'df_float')
store.append('df_float', df)
result = store.select(
'df_float', [Term("index<10.0"), Term("columns=['A']")])
expected = df.reindex(index=list(df.index)[0:10],columns=['A'])
tm.assert_frame_equal(expected, result)
with ensure_clean_store(self.path) as store:
# floats w/o NaN
df = DataFrame(dict(cols = range(11), values = range(11)),dtype='float64')
df['cols'] = (df['cols']+10).apply(str)
store.append('df1',df,data_columns=True)
result = store.select(
'df1', where='values>2.0')
expected = df[df['values']>2.0]
tm.assert_frame_equal(expected, result)
# floats with NaN
df.iloc[0] = np.nan
expected = df[df['values']>2.0]
store.append('df2',df,data_columns=True,index=False)
result = store.select(
'df2', where='values>2.0')
tm.assert_frame_equal(expected, result)
# https://github.com/PyTables/PyTables/issues/282
# bug in selection when 0th row has a np.nan and an index
#store.append('df3',df,data_columns=True)
#result = store.select(
# 'df3', where='values>2.0')
#tm.assert_frame_equal(expected, result)
# not in first position float with NaN ok too
df = DataFrame(dict(cols = range(11), values = range(11)),dtype='float64')
df['cols'] = (df['cols']+10).apply(str)
df.iloc[1] = np.nan
expected = df[df['values']>2.0]
store.append('df4',df,data_columns=True)
result = store.select(
'df4', where='values>2.0')
tm.assert_frame_equal(expected, result)
def test_select_with_many_inputs(self):
with ensure_clean_store(self.path) as store:
df = DataFrame(dict(ts=bdate_range('2012-01-01', periods=300),
A=np.random.randn(300),
B=range(300),
users = ['a']*50 + ['b']*50 + ['c']*100 + ['a%03d' % i for i in range(100)]))
_maybe_remove(store, 'df')
store.append('df', df, data_columns=['ts', 'A', 'B', 'users'])
# regular select
result = store.select('df', [Term("ts>=Timestamp('2012-02-01')")])
expected = df[df.ts >= Timestamp('2012-02-01')]
tm.assert_frame_equal(expected, result)
# small selector
result = store.select('df', [Term("ts>=Timestamp('2012-02-01') & users=['a','b','c']")])
expected = df[ (df.ts >= Timestamp('2012-02-01')) & df.users.isin(['a','b','c']) ]
tm.assert_frame_equal(expected, result)
# big selector along the columns
selector = [ 'a','b','c' ] + [ 'a%03d' % i for i in range(60) ]
result = store.select('df', [Term("ts>=Timestamp('2012-02-01')"),Term('users=selector')])
expected = df[ (df.ts >= Timestamp('2012-02-01')) & df.users.isin(selector) ]
tm.assert_frame_equal(expected, result)
selector = range(100,200)
result = store.select('df', [Term('B=selector')])
expected = df[ df.B.isin(selector) ]
tm.assert_frame_equal(expected, result)
self.assertEqual(len(result), 100)
# big selector along the index
selector = Index(df.ts[0:100].values)
result = store.select('df', [Term('ts=selector')])
expected = df[ df.ts.isin(selector.values) ]
tm.assert_frame_equal(expected, result)
self.assertEqual(len(result), 100)
def test_select_iterator(self):
# single table
with ensure_clean_store(self.path) as store:
df = tm.makeTimeDataFrame(500)
_maybe_remove(store, 'df')
store.append('df', df)
expected = store.select('df')
results = [ s for s in store.select('df',iterator=True) ]
result = concat(results)
tm.assert_frame_equal(expected, result)
results = [ s for s in store.select('df',chunksize=100) ]
self.assertEqual(len(results), 5)
result = concat(results)
tm.assert_frame_equal(expected, result)
results = [ s for s in store.select('df',chunksize=150) ]
result = concat(results)
tm.assert_frame_equal(result, expected)
with ensure_clean_path(self.path) as path:
df = tm.makeTimeDataFrame(500)
df.to_hdf(path,'df_non_table')
self.assertRaises(TypeError, read_hdf, path,'df_non_table',chunksize=100)
self.assertRaises(TypeError, read_hdf, path,'df_non_table',iterator=True)
with ensure_clean_path(self.path) as path:
df = tm.makeTimeDataFrame(500)
df.to_hdf(path,'df',format='table')
results = [ s for s in read_hdf(path,'df',chunksize=100) ]
result = concat(results)
self.assertEqual(len(results), 5)
tm.assert_frame_equal(result, df)
tm.assert_frame_equal(result, read_hdf(path,'df'))
# multiple
with ensure_clean_store(self.path) as store:
df1 = tm.makeTimeDataFrame(500)
store.append('df1',df1,data_columns=True)
df2 = tm.makeTimeDataFrame(500).rename(columns=lambda x: "%s_2" % x)
df2['foo'] = 'bar'
store.append('df2',df2)
df = concat([df1, df2], axis=1)
# full selection
expected = store.select_as_multiple(
['df1', 'df2'], selector='df1')
results = [ s for s in store.select_as_multiple(
['df1', 'df2'], selector='df1', chunksize=150) ]
result = concat(results)
tm.assert_frame_equal(expected, result)
# where selection
#expected = store.select_as_multiple(
# ['df1', 'df2'], where= Term('A>0'), selector='df1')
#results = []
#for s in store.select_as_multiple(
# ['df1', 'df2'], where= Term('A>0'), selector='df1', chunksize=25):
# results.append(s)
#result = concat(results)
#tm.assert_frame_equal(expected, result)
def test_select_iterator_complete_8014(self):
# GH 8014
# using iterator and where clause
chunksize=1e4
# no iterator
with ensure_clean_store(self.path) as store:
expected = tm.makeTimeDataFrame(100064, 'S')
_maybe_remove(store, 'df')
store.append('df',expected)
beg_dt = expected.index[0]
end_dt = expected.index[-1]
# select w/o iteration and no where clause works
result = store.select('df')
tm.assert_frame_equal(expected, result)
# select w/o iterator and where clause, single term, begin
# of range, works
where = "index >= '%s'" % beg_dt
result = store.select('df',where=where)
tm.assert_frame_equal(expected, result)
# select w/o iterator and where clause, single term, end
# of range, works
where = "index <= '%s'" % end_dt
result = store.select('df',where=where)
tm.assert_frame_equal(expected, result)
# select w/o iterator and where clause, inclusive range,
# works
where = "index >= '%s' & index <= '%s'" % (beg_dt, end_dt)
result = store.select('df',where=where)
tm.assert_frame_equal(expected, result)
# with iterator, full range
with ensure_clean_store(self.path) as store:
expected = tm.makeTimeDataFrame(100064, 'S')
_maybe_remove(store, 'df')
store.append('df',expected)
beg_dt = expected.index[0]
end_dt = expected.index[-1]
# select w/iterator and no where clause works
results = [ s for s in store.select('df',chunksize=chunksize) ]
result = concat(results)
tm.assert_frame_equal(expected, result)
# select w/iterator and where clause, single term, begin of range
where = "index >= '%s'" % beg_dt
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
result = concat(results)
tm.assert_frame_equal(expected, result)
# select w/iterator and where clause, single term, end of range
where = "index <= '%s'" % end_dt
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
result = concat(results)
tm.assert_frame_equal(expected, result)
# select w/iterator and where clause, inclusive range
where = "index >= '%s' & index <= '%s'" % (beg_dt, end_dt)
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
result = concat(results)
tm.assert_frame_equal(expected, result)
def test_select_iterator_non_complete_8014(self):
# GH 8014
# using iterator and where clause
chunksize=1e4
# with iterator, non complete range
with ensure_clean_store(self.path) as store:
expected = tm.makeTimeDataFrame(100064, 'S')
_maybe_remove(store, 'df')
store.append('df',expected)
beg_dt = expected.index[1]
end_dt = expected.index[-2]
# select w/iterator and where clause, single term, begin of range
where = "index >= '%s'" % beg_dt
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
result = concat(results)
rexpected = expected[expected.index >= beg_dt]
tm.assert_frame_equal(rexpected, result)
# select w/iterator and where clause, single term, end of range
where = "index <= '%s'" % end_dt
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
result = concat(results)
rexpected = expected[expected.index <= end_dt]
tm.assert_frame_equal(rexpected, result)
# select w/iterator and where clause, inclusive range
where = "index >= '%s' & index <= '%s'" % (beg_dt, end_dt)
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
result = concat(results)
rexpected = expected[(expected.index >= beg_dt) & (expected.index <= end_dt)]
tm.assert_frame_equal(rexpected, result)
# with iterator, empty where
with ensure_clean_store(self.path) as store:
expected = tm.makeTimeDataFrame(100064, 'S')
_maybe_remove(store, 'df')
store.append('df',expected)
end_dt = expected.index[-1]
# select w/iterator and where clause, single term, begin of range
where = "index > '%s'" % end_dt
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
self.assertEqual(0, len(results))
def test_select_iterator_many_empty_frames(self):
# GH 8014
# using iterator and where clause can return many empty
# frames.
chunksize=int(1e4)
# with iterator, range limited to the first chunk
with ensure_clean_store(self.path) as store:
expected = tm.makeTimeDataFrame(100000, 'S')
_maybe_remove(store, 'df')
store.append('df',expected)
beg_dt = expected.index[0]
end_dt = expected.index[chunksize-1]
# select w/iterator and where clause, single term, begin of range
where = "index >= '%s'" % beg_dt
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
result = concat(results)
rexpected = expected[expected.index >= beg_dt]
tm.assert_frame_equal(rexpected, result)
# select w/iterator and where clause, single term, end of range
where = "index <= '%s'" % end_dt
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
tm.assert_equal(1, len(results))
result = concat(results)
rexpected = expected[expected.index <= end_dt]
tm.assert_frame_equal(rexpected, result)
# select w/iterator and where clause, inclusive range
where = "index >= '%s' & index <= '%s'" % (beg_dt, end_dt)
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
# should be 1, is 10
tm.assert_equal(1, len(results))
result = concat(results)
rexpected = expected[(expected.index >= beg_dt) & (expected.index <= end_dt)]
tm.assert_frame_equal(rexpected, result)
# select w/iterator and where clause which selects
# *nothing*.
#
# To be consistent with Python idiom I suggest this should
# return [] e.g. `for e in []: print True` never prints
# True.
where = "index <= '%s' & index >= '%s'" % (beg_dt, end_dt)
results = [ s for s in store.select('df',where=where,chunksize=chunksize) ]
# should be []
tm.assert_equal(0, len(results))
def test_retain_index_attributes(self):
# GH 3499, losing frequency info on index recreation
df = DataFrame(dict(A = Series(lrange(3),
index=date_range('2000-1-1',periods=3,freq='H'))))
with ensure_clean_store(self.path) as store:
_maybe_remove(store,'data')
store.put('data', df, format='table')
result = store.get('data')
tm.assert_frame_equal(df,result)
for attr in ['freq','tz','name']:
for idx in ['index','columns']:
self.assertEqual(getattr(getattr(df,idx),attr,None),
getattr(getattr(result,idx),attr,None))
# try to append a table with a different frequency
with tm.assert_produces_warning(expected_warning=AttributeConflictWarning):
df2 = DataFrame(dict(A = Series(lrange(3),
index=date_range('2002-1-1',periods=3,freq='D'))))
store.append('data',df2)
self.assertIsNone(store.get_storer('data').info['index']['freq'])
# this is ok
_maybe_remove(store,'df2')
df2 = DataFrame(dict(A = Series(lrange(3),
index=[Timestamp('20010101'),Timestamp('20010102'),Timestamp('20020101')])))
store.append('df2',df2)
df3 = DataFrame(dict(A = Series(lrange(3),index=date_range('2002-1-1',periods=3,freq='D'))))
store.append('df2',df3)
def test_retain_index_attributes2(self):
with ensure_clean_path(self.path) as path:
with tm.assert_produces_warning(expected_warning=AttributeConflictWarning):
df = DataFrame(dict(A = Series(lrange(3), index=date_range('2000-1-1',periods=3,freq='H'))))
df.to_hdf(path,'data',mode='w',append=True)
df2 = DataFrame(dict(A = Series(lrange(3), index=date_range('2002-1-1',periods=3,freq='D'))))
df2.to_hdf(path,'data',append=True)
idx = date_range('2000-1-1',periods=3,freq='H')
idx.name = 'foo'
df = DataFrame(dict(A = Series(lrange(3), index=idx)))
df.to_hdf(path,'data',mode='w',append=True)
self.assertEqual(read_hdf(path,'data').index.name, 'foo')
with tm.assert_produces_warning(expected_warning=AttributeConflictWarning):
idx2 = date_range('2001-1-1',periods=3,freq='H')
idx2.name = 'bar'
df2 = DataFrame(dict(A = Series(lrange(3), index=idx2)))
df2.to_hdf(path,'data',append=True)
self.assertIsNone(read_hdf(path,'data').index.name)
def test_panel_select(self):
wp = tm.makePanel()
with ensure_clean_store(self.path) as store:
store.put('wp', wp, format='table')
date = wp.major_axis[len(wp.major_axis) // 2]
crit1 = ('major_axis>=date')
crit2 = ("minor_axis=['A', 'D']")
result = store.select('wp', [crit1, crit2])
expected = wp.truncate(before=date).reindex(minor=['A', 'D'])
assert_panel_equal(result, expected)
result = store.select(
'wp', ['major_axis>="20000124"', ("minor_axis=['A', 'B']")])
expected = wp.truncate(before='20000124').reindex(minor=['A', 'B'])
assert_panel_equal(result, expected)
def test_frame_select(self):
df = tm.makeTimeDataFrame()
with ensure_clean_store(self.path) as store:
store.put('frame', df,format='table')
date = df.index[len(df) // 2]
crit1 = Term('index>=date')
self.assertEqual(crit1.env.scope['date'], date)
crit2 = ("columns=['A', 'D']")
crit3 = ('columns=A')
result = store.select('frame', [crit1, crit2])
expected = df.ix[date:, ['A', 'D']]
tm.assert_frame_equal(result, expected)
result = store.select('frame', [crit3])
expected = df.ix[:, ['A']]
tm.assert_frame_equal(result, expected)
# invalid terms
df = tm.makeTimeDataFrame()
store.append('df_time', df)
self.assertRaises(
ValueError, store.select, 'df_time', [Term("index>0")])
# can't select if not written as table
# store['frame'] = df
# self.assertRaises(ValueError, store.select,
# 'frame', [crit1, crit2])
def test_frame_select_complex(self):
# select via complex criteria
df = tm.makeTimeDataFrame()
df['string'] = 'foo'
df.loc[df.index[0:4],'string'] = 'bar'
with ensure_clean_store(self.path) as store:
store.put('df', df, table=True, data_columns=['string'])
# empty
result = store.select('df', 'index>df.index[3] & string="bar"')
expected = df.loc[(df.index>df.index[3]) & (df.string=='bar')]
tm.assert_frame_equal(result, expected)
result = store.select('df', 'index>df.index[3] & string="foo"')
expected = df.loc[(df.index>df.index[3]) & (df.string=='foo')]
tm.assert_frame_equal(result, expected)
# or
result = store.select('df', 'index>df.index[3] | string="bar"')
expected = df.loc[(df.index>df.index[3]) | (df.string=='bar')]
tm.assert_frame_equal(result, expected)
result = store.select('df', '(index>df.index[3] & index<=df.index[6]) | string="bar"')
expected = df.loc[((df.index>df.index[3]) & (df.index<=df.index[6])) | (df.string=='bar')]
tm.assert_frame_equal(result, expected)
# invert
result = store.select('df', 'string!="bar"')
expected = df.loc[df.string!='bar']
tm.assert_frame_equal(result, expected)
# invert not implemented in numexpr :(
self.assertRaises(NotImplementedError, store.select, 'df', '~(string="bar")')
# invert ok for filters
result = store.select('df', "~(columns=['A','B'])")
expected = df.loc[:,df.columns-['A','B']]
tm.assert_frame_equal(result, expected)
# in
result = store.select('df', "index>df.index[3] & columns in ['A','B']")
expected = df.loc[df.index>df.index[3]].reindex(columns=['A','B'])
tm.assert_frame_equal(result, expected)
def test_frame_select_complex2(self):
with ensure_clean_path(['parms.hdf','hist.hdf']) as paths:
pp, hh = paths
# use non-trivial selection criteria
parms = DataFrame({ 'A' : [1,1,2,2,3] })
parms.to_hdf(pp,'df',mode='w',format='table',data_columns=['A'])
selection = read_hdf(pp,'df',where='A=[2,3]')
hist = DataFrame(np.random.randn(25,1),columns=['data'],
index=MultiIndex.from_tuples([ (i,j) for i in range(5) for j in range(5) ],
names=['l1','l2']))
hist.to_hdf(hh,'df',mode='w',format='table')
expected = read_hdf(hh,'df',where=Term('l1','=',[2,3,4]))
# list like
result = read_hdf(hh,'df',where=Term('l1','=',selection.index.tolist()))
assert_frame_equal(result, expected)
l = selection.index.tolist()
# sccope with list like
store = HDFStore(hh)
result = store.select('df',where='l1=l')
assert_frame_equal(result, expected)
store.close()
result = read_hdf(hh,'df',where='l1=l')
assert_frame_equal(result, expected)
# index
index = selection.index
result = read_hdf(hh,'df',where='l1=index')
assert_frame_equal(result, expected)
result = read_hdf(hh,'df',where='l1=selection.index')
assert_frame_equal(result, expected)
result = read_hdf(hh,'df',where='l1=selection.index.tolist()')
assert_frame_equal(result, expected)
result = read_hdf(hh,'df',where='l1=list(selection.index)')
assert_frame_equal(result, expected)
# sccope with index
store = HDFStore(hh)
result = store.select('df',where='l1=index')
assert_frame_equal(result, expected)
result = store.select('df',where='l1=selection.index')
assert_frame_equal(result, expected)
result = store.select('df',where='l1=selection.index.tolist()')
assert_frame_equal(result, expected)
result = store.select('df',where='l1=list(selection.index)')
assert_frame_equal(result, expected)
store.close()
def test_invalid_filtering(self):
# can't use more than one filter (atm)
df = tm.makeTimeDataFrame()
with ensure_clean_store(self.path) as store:
store.put('df', df, table=True)
# not implemented
self.assertRaises(NotImplementedError, store.select, 'df', "columns=['A'] | columns=['B']")
# in theory we could deal with this
self.assertRaises(NotImplementedError, store.select, 'df', "columns=['A','B'] & columns=['C']")
def test_string_select(self):
# GH 2973
with ensure_clean_store(self.path) as store:
df = tm.makeTimeDataFrame()
# test string ==/!=
df['x'] = 'none'
df.ix[2:7,'x'] = ''
store.append('df',df,data_columns=['x'])
result = store.select('df',Term('x=none'))
expected = df[df.x == 'none']
assert_frame_equal(result,expected)
try:
result = store.select('df',Term('x!=none'))
expected = df[df.x != 'none']
assert_frame_equal(result,expected)
except Exception as detail:
com.pprint_thing("[{0}]".format(detail))
com.pprint_thing(store)
com.pprint_thing(expected)
df2 = df.copy()
df2.loc[df2.x=='','x'] = np.nan
store.append('df2',df2,data_columns=['x'])
result = store.select('df2',Term('x!=none'))
expected = df2[isnull(df2.x)]
assert_frame_equal(result,expected)
# int ==/!=
df['int'] = 1
df.ix[2:7,'int'] = 2
store.append('df3',df,data_columns=['int'])
result = store.select('df3',Term('int=2'))
expected = df[df.int==2]
assert_frame_equal(result,expected)
result = store.select('df3',Term('int!=2'))
expected = df[df.int!=2]
assert_frame_equal(result,expected)
def test_read_column(self):
df = tm.makeTimeDataFrame()
with ensure_clean_store(self.path) as store:
_maybe_remove(store, 'df')
store.append('df', df)
# error
self.assertRaises(KeyError, store.select_column, 'df', 'foo')
def f():
store.select_column('df', 'index', where = ['index>5'])
self.assertRaises(Exception, f)
# valid
result = store.select_column('df', 'index')
tm.assert_almost_equal(result.values, Series(df.index).values)
self.assertIsInstance(result,Series)
# not a data indexable column
self.assertRaises(
ValueError, store.select_column, 'df', 'values_block_0')
# a data column
df2 = df.copy()
df2['string'] = 'foo'
store.append('df2', df2, data_columns=['string'])
result = store.select_column('df2', 'string')
tm.assert_almost_equal(result.values, df2['string'].values)
# a data column with NaNs, result excludes the NaNs
df3 = df.copy()
df3['string'] = 'foo'
df3.ix[4:6, 'string'] = np.nan
store.append('df3', df3, data_columns=['string'])
result = store.select_column('df3', 'string')
tm.assert_almost_equal(result.values, df3['string'].values)
# start/stop
result = store.select_column('df3', 'string', start=2)
tm.assert_almost_equal(result.values, df3['string'].values[2:])
result = store.select_column('df3', 'string', start=-2)
tm.assert_almost_equal(result.values, df3['string'].values[-2:])
result = store.select_column('df3', 'string', stop=2)
tm.assert_almost_equal(result.values, df3['string'].values[:2])
result = store.select_column('df3', 'string', stop=-2)
tm.assert_almost_equal(result.values, df3['string'].values[:-2])
result = store.select_column('df3', 'string', start=2, stop=-2)
tm.assert_almost_equal(result.values, df3['string'].values[2:-2])
result = store.select_column('df3', 'string', start=-2, stop=2)
tm.assert_almost_equal(result.values, df3['string'].values[-2:2])
def test_coordinates(self):
df = tm.makeTimeDataFrame()
with ensure_clean_store(self.path) as store:
_maybe_remove(store, 'df')
store.append('df', df)
# all
c = store.select_as_coordinates('df')
assert((c.values == np.arange(len(df.index))).all() == True)
# get coordinates back & test vs frame
_maybe_remove(store, 'df')
df = DataFrame(dict(A=lrange(5), B=lrange(5)))
store.append('df', df)
c = store.select_as_coordinates('df', ['index<3'])
assert((c.values == np.arange(3)).all() == True)
result = store.select('df', where=c)
expected = df.ix[0:2, :]
tm.assert_frame_equal(result, expected)
c = store.select_as_coordinates('df', ['index>=3', 'index<=4'])
assert((c.values == np.arange(2) + 3).all() == True)
result = store.select('df', where=c)
expected = df.ix[3:4, :]
tm.assert_frame_equal(result, expected)
self.assertIsInstance(c, Index)
# multiple tables
_maybe_remove(store, 'df1')
_maybe_remove(store, 'df2')
df1 = tm.makeTimeDataFrame()
df2 = tm.makeTimeDataFrame().rename(columns=lambda x: "%s_2" % x)
store.append('df1', df1, data_columns=['A', 'B'])
store.append('df2', df2)
c = store.select_as_coordinates('df1', ['A>0', 'B>0'])
df1_result = store.select('df1', c)
df2_result = store.select('df2', c)
result = concat([df1_result, df2_result], axis=1)
expected = concat([df1, df2], axis=1)
expected = expected[(expected.A > 0) & (expected.B > 0)]
tm.assert_frame_equal(result, expected)
# pass array/mask as the coordinates
with ensure_clean_store(self.path) as store:
df = DataFrame(np.random.randn(1000,2),index=date_range('20000101',periods=1000))
store.append('df',df)
c = store.select_column('df','index')
where = c[DatetimeIndex(c).month==5].index
expected = df.iloc[where]
# locations
result = store.select('df',where=where)
tm.assert_frame_equal(result,expected)
# boolean
result = store.select('df',where=where)
tm.assert_frame_equal(result,expected)
# invalid
self.assertRaises(ValueError, store.select, 'df',where=np.arange(len(df),dtype='float64'))
self.assertRaises(ValueError, store.select, 'df',where=np.arange(len(df)+1))
self.assertRaises(ValueError, store.select, 'df',where=np.arange(len(df)),start=5)
self.assertRaises(ValueError, store.select, 'df',where=np.arange(len(df)),start=5,stop=10)
# selection with filter
selection = date_range('20000101',periods=500)
result = store.select('df', where='index in selection')
expected = df[df.index.isin(selection)]
tm.assert_frame_equal(result,expected)
# list
df = DataFrame(np.random.randn(10,2))
store.append('df2',df)
result = store.select('df2',where=[0,3,5])
expected = df.iloc[[0,3,5]]
tm.assert_frame_equal(result,expected)
# boolean
where = [True] * 10
where[-2] = False
result = store.select('df2',where=where)
expected = df.loc[where]
tm.assert_frame_equal(result,expected)
# start/stop
result = store.select('df2', start=5, stop=10)
expected = df[5:10]
tm.assert_frame_equal(result,expected)
def test_append_to_multiple(self):
df1 = tm.makeTimeDataFrame()
df2 = tm.makeTimeDataFrame().rename(columns=lambda x: "%s_2" % x)
df2['foo'] = 'bar'
df = concat([df1, df2], axis=1)
with ensure_clean_store(self.path) as store:
# exceptions
self.assertRaises(ValueError, store.append_to_multiple,
{'df1': ['A', 'B'], 'df2': None}, df, selector='df3')
self.assertRaises(ValueError, store.append_to_multiple,
{'df1': None, 'df2': None}, df, selector='df3')
self.assertRaises(
ValueError, store.append_to_multiple, 'df1', df, 'df1')
# regular operation
store.append_to_multiple(
{'df1': ['A', 'B'], 'df2': None}, df, selector='df1')
result = store.select_as_multiple(
['df1', 'df2'], where=['A>0', 'B>0'], selector='df1')
expected = df[(df.A > 0) & (df.B > 0)]
tm.assert_frame_equal(result, expected)
def test_append_to_multiple_dropna(self):
df1 = tm.makeTimeDataFrame()
df2 = tm.makeTimeDataFrame().rename(columns=lambda x: "%s_2" % x)
df1.ix[1, ['A', 'B']] = np.nan
df = concat([df1, df2], axis=1)
with ensure_clean_store(self.path) as store:
# dropna=True should guarantee rows are synchronized
store.append_to_multiple(
{'df1': ['A', 'B'], 'df2': None}, df, selector='df1',
dropna=True)
result = store.select_as_multiple(['df1', 'df2'])
expected = df.dropna()
tm.assert_frame_equal(result, expected)
tm.assert_index_equal(store.select('df1').index,
store.select('df2').index)
# dropna=False shouldn't synchronize row indexes
store.append_to_multiple(
{'df1': ['A', 'B'], 'df2': None}, df, selector='df1',
dropna=False)
self.assertRaises(
ValueError, store.select_as_multiple, ['df1', 'df2'])
assert not store.select('df1').index.equals(
store.select('df2').index)
def test_select_as_multiple(self):
df1 = tm.makeTimeDataFrame()
df2 = tm.makeTimeDataFrame().rename(columns=lambda x: "%s_2" % x)
df2['foo'] = 'bar'
with ensure_clean_store(self.path) as store:
# no tables stored
self.assertRaises(Exception, store.select_as_multiple,
None, where=['A>0', 'B>0'], selector='df1')
store.append('df1', df1, data_columns=['A', 'B'])
store.append('df2', df2)
# exceptions
self.assertRaises(Exception, store.select_as_multiple,
None, where=['A>0', 'B>0'], selector='df1')
self.assertRaises(Exception, store.select_as_multiple,
[None], where=['A>0', 'B>0'], selector='df1')
self.assertRaises(KeyError, store.select_as_multiple,
['df1','df3'], where=['A>0', 'B>0'], selector='df1')
self.assertRaises(KeyError, store.select_as_multiple,
['df3'], where=['A>0', 'B>0'], selector='df1')
self.assertRaises(KeyError, store.select_as_multiple,
['df1','df2'], where=['A>0', 'B>0'], selector='df4')
# default select
result = store.select('df1', ['A>0', 'B>0'])
expected = store.select_as_multiple(
['df1'], where=['A>0', 'B>0'], selector='df1')
tm.assert_frame_equal(result, expected)
expected = store.select_as_multiple(
'df1', where=['A>0', 'B>0'], selector='df1')
tm.assert_frame_equal(result, expected)
# multiple
result = store.select_as_multiple(
['df1', 'df2'], where=['A>0', 'B>0'], selector='df1')
expected = concat([df1, df2], axis=1)
expected = expected[(expected.A > 0) & (expected.B > 0)]
tm.assert_frame_equal(result, expected)
# multiple (diff selector)
result = store.select_as_multiple(['df1', 'df2'], where=[Term(
'index>df2.index[4]')], selector='df2')
expected = concat([df1, df2], axis=1)
expected = expected[5:]
tm.assert_frame_equal(result, expected)
# test excpection for diff rows
store.append('df3', tm.makeTimeDataFrame(nper=50))
self.assertRaises(ValueError, store.select_as_multiple,
['df1','df3'], where=['A>0', 'B>0'], selector='df1')
def test_nan_selection_bug_4858(self):
# GH 4858; nan selection bug, only works for pytables >= 3.1
if LooseVersion(tables.__version__) < '3.1.0':
raise nose.SkipTest('tables version does not support fix for nan selection bug: GH 4858')
with ensure_clean_store(self.path) as store:
df = DataFrame(dict(cols = range(6), values = range(6)), dtype='float64')
df['cols'] = (df['cols']+10).apply(str)
df.iloc[0] = np.nan
expected = DataFrame(dict(cols = ['13.0','14.0','15.0'], values = [3.,4.,5.]), index=[3,4,5])
# write w/o the index on that particular column
store.append('df',df, data_columns=True,index=['cols'])
result = store.select('df',where='values>2.0')
assert_frame_equal(result,expected)
def test_start_stop(self):
with ensure_clean_store(self.path) as store:
df = DataFrame(dict(A=np.random.rand(20), B=np.random.rand(20)))
store.append('df', df)
result = store.select(
'df', [Term("columns=['A']")], start=0, stop=5)
expected = df.ix[0:4, ['A']]
tm.assert_frame_equal(result, expected)
# out of range
result = store.select(
'df', [Term("columns=['A']")], start=30, stop=40)
assert(len(result) == 0)
assert(type(result) == DataFrame)
def test_select_filter_corner(self):
df = DataFrame(np.random.randn(50, 100))
df.index = ['%.3d' % c for c in df.index]
df.columns = ['%.3d' % c for c in df.columns]
with ensure_clean_store(self.path) as store:
store.put('frame', df, format='table')
crit = Term('columns=df.columns[:75]')
result = store.select('frame', [crit])
tm.assert_frame_equal(result, df.ix[:, df.columns[:75]])
crit = Term('columns=df.columns[:75:2]')
result = store.select('frame', [crit])
tm.assert_frame_equal(result, df.ix[:, df.columns[:75:2]])
def _check_roundtrip(self, obj, comparator, compression=False, **kwargs):
options = {}
if compression:
options['complib'] = _default_compressor
with ensure_clean_store(self.path, 'w', **options) as store:
store['obj'] = obj
retrieved = store['obj']
comparator(retrieved, obj, **kwargs)
def _check_double_roundtrip(self, obj, comparator, compression=False,
**kwargs):
options = {}
if compression:
options['complib'] = compression or _default_compressor
with ensure_clean_store(self.path, 'w', **options) as store:
store['obj'] = obj
retrieved = store['obj']
comparator(retrieved, obj, **kwargs)
store['obj'] = retrieved
again = store['obj']
comparator(again, obj, **kwargs)
def _check_roundtrip_table(self, obj, comparator, compression=False):
options = {}
if compression:
options['complib'] = _default_compressor
with ensure_clean_store(self.path, 'w', **options) as store:
store.put('obj', obj, format='table')
retrieved = store['obj']
# sorted_obj = _test_sort(obj)
comparator(retrieved, obj)
def test_multiple_open_close(self):
# GH 4409, open & close multiple times
with ensure_clean_path(self.path) as path:
df = tm.makeDataFrame()
df.to_hdf(path,'df',mode='w',format='table')
# single
store = HDFStore(path)
self.assertNotIn('CLOSED', str(store))
self.assertTrue(store.is_open)
store.close()
self.assertIn('CLOSED', str(store))
self.assertFalse(store.is_open)
with ensure_clean_path(self.path) as path:
if pytables._table_file_open_policy_is_strict:
# multiples
store1 = HDFStore(path)
def f():
HDFStore(path)
self.assertRaises(ValueError, f)
store1.close()
else:
# multiples
store1 = HDFStore(path)
store2 = HDFStore(path)
self.assertNotIn('CLOSED', str(store1))
self.assertNotIn('CLOSED', str(store2))
self.assertTrue(store1.is_open)
self.assertTrue(store2.is_open)
store1.close()
self.assertIn('CLOSED', str(store1))
self.assertFalse(store1.is_open)
self.assertNotIn('CLOSED', str(store2))
self.assertTrue(store2.is_open)
store2.close()
self.assertIn('CLOSED', str(store1))
self.assertIn('CLOSED', str(store2))
self.assertFalse(store1.is_open)
self.assertFalse(store2.is_open)
# nested close
store = HDFStore(path,mode='w')
store.append('df',df)
store2 = HDFStore(path)
store2.append('df2',df)
store2.close()
self.assertIn('CLOSED', str(store2))
self.assertFalse(store2.is_open)
store.close()
self.assertIn('CLOSED', str(store))
self.assertFalse(store.is_open)
# double closing
store = HDFStore(path,mode='w')
store.append('df', df)
store2 = HDFStore(path)
store.close()
self.assertIn('CLOSED', str(store))
self.assertFalse(store.is_open)
store2.close()
self.assertIn('CLOSED', str(store2))
self.assertFalse(store2.is_open)
# ops on a closed store
with ensure_clean_path(self.path) as path:
df = tm.makeDataFrame()
df.to_hdf(path,'df',mode='w',format='table')
store = HDFStore(path)
store.close()
self.assertRaises(ClosedFileError, store.keys)
self.assertRaises(ClosedFileError, lambda : 'df' in store)
self.assertRaises(ClosedFileError, lambda : len(store))
self.assertRaises(ClosedFileError, lambda : store['df'])
self.assertRaises(ClosedFileError, lambda : store.df)
self.assertRaises(ClosedFileError, store.select, 'df')
self.assertRaises(ClosedFileError, store.get, 'df')
self.assertRaises(ClosedFileError, store.append, 'df2', df)
self.assertRaises(ClosedFileError, store.put, 'df3', df)
self.assertRaises(ClosedFileError, store.get_storer, 'df2')
self.assertRaises(ClosedFileError, store.remove, 'df2')
def f():
store.select('df')
tm.assertRaisesRegexp(ClosedFileError, 'file is not open', f)
def test_pytables_native_read(self):
try:
store = HDFStore(tm.get_data_path('legacy_hdf/pytables_native.h5'), 'r')
d2 = store['detector/readout']
assert isinstance(d2, DataFrame)
finally:
safe_close(store)
try:
store = HDFStore(tm.get_data_path('legacy_hdf/pytables_native2.h5'), 'r')
str(store)
d1 = store['detector']
assert isinstance(d1, DataFrame)
finally:
safe_close(store)
def test_legacy_read(self):
try:
store = HDFStore(tm.get_data_path('legacy_hdf/legacy.h5'), 'r')
store['a']
store['b']
store['c']
store['d']
finally:
safe_close(store)
def test_legacy_table_read(self):
# legacy table types
try:
store = HDFStore(tm.get_data_path('legacy_hdf/legacy_table.h5'), 'r')
store.select('df1')
store.select('df2')
store.select('wp1')
# force the frame
store.select('df2', typ='legacy_frame')
# old version warning
with tm.assert_produces_warning(expected_warning=IncompatibilityWarning):
self.assertRaises(
Exception, store.select, 'wp1', Term('minor_axis=B'))
df2 = store.select('df2')
result = store.select('df2', Term('index>df2.index[2]'))
expected = df2[df2.index > df2.index[2]]
assert_frame_equal(expected, result)
finally:
safe_close(store)
def test_legacy_0_10_read(self):
# legacy from 0.10
try:
store = HDFStore(tm.get_data_path('legacy_hdf/legacy_0.10.h5'), 'r')
str(store)
for k in store.keys():
store.select(k)
finally:
safe_close(store)
def test_legacy_0_11_read(self):
# legacy from 0.11
try:
path = os.path.join('legacy_hdf', 'legacy_table_0.11.h5')
store = HDFStore(tm.get_data_path(path), 'r')
str(store)
assert 'df' in store
assert 'df1' in store
assert 'mi' in store
df = store.select('df')
df1 = store.select('df1')
mi = store.select('mi')
assert isinstance(df, DataFrame)
assert isinstance(df1, DataFrame)
assert isinstance(mi, DataFrame)
finally:
safe_close(store)
def test_copy(self):
def do_copy(f = None, new_f = None, keys = None, propindexes = True, **kwargs):
try:
if f is None:
f = tm.get_data_path(os.path.join('legacy_hdf',
'legacy_0.10.h5'))
store = HDFStore(f, 'r')
if new_f is None:
import tempfile
fd, new_f = tempfile.mkstemp()
tstore = store.copy(new_f, keys = keys, propindexes = propindexes, **kwargs)
# check keys
if keys is None:
keys = store.keys()
self.assertEqual(set(keys), set(tstore.keys()))
# check indicies & nrows
for k in tstore.keys():
if tstore.get_storer(k).is_table:
new_t = tstore.get_storer(k)
orig_t = store.get_storer(k)
self.assertEqual(orig_t.nrows, new_t.nrows)
# check propindixes
if propindexes:
for a in orig_t.axes:
if a.is_indexed:
self.assertTrue(new_t[a.name].is_indexed)
finally:
safe_close(store)
safe_close(tstore)
try:
os.close(fd)
except:
pass
safe_remove(new_f)
do_copy()
do_copy(keys = ['/a','/b','/df1_mixed'])
do_copy(propindexes = False)
# new table
df = tm.makeDataFrame()
try:
st = HDFStore(self.path)
st.append('df', df, data_columns = ['A'])
st.close()
do_copy(f = self.path)
do_copy(f = self.path, propindexes = False)
finally:
safe_remove(self.path)
def test_legacy_table_write(self):
raise nose.SkipTest("cannot write legacy tables")
store = HDFStore(tm.get_data_path('legacy_hdf/legacy_table_%s.h5' % pandas.__version__), 'a')
df = tm.makeDataFrame()
wp = tm.makePanel()
index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'],
['one', 'two', 'three']],
labels=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['foo', 'bar'])
df = DataFrame(np.random.randn(10, 3), index=index,
columns=['A', 'B', 'C'])
store.append('mi', df)
df = DataFrame(dict(A = 'foo', B = 'bar'),index=lrange(10))
store.append('df', df, data_columns = ['B'], min_itemsize={'A' : 200 })
store.append('wp', wp)
store.close()
def test_store_datetime_fractional_secs(self):
with ensure_clean_store(self.path) as store:
dt = datetime.datetime(2012, 1, 2, 3, 4, 5, 123456)
series = Series([0], [dt])
store['a'] = series
self.assertEqual(store['a'].index[0], dt)
def test_tseries_indices_series(self):
with ensure_clean_store(self.path) as store:
idx = tm.makeDateIndex(10)
ser = Series(np.random.randn(len(idx)), idx)
store['a'] = ser
result = store['a']
assert_series_equal(result, ser)
self.assertEqual(type(result.index), type(ser.index))
self.assertEqual(result.index.freq, ser.index.freq)
idx = tm.makePeriodIndex(10)
ser = Series(np.random.randn(len(idx)), idx)
store['a'] = ser
result = store['a']
assert_series_equal(result, ser)
self.assertEqual(type(result.index), type(ser.index))
self.assertEqual(result.index.freq, ser.index.freq)
def test_tseries_indices_frame(self):
with ensure_clean_store(self.path) as store:
idx = tm.makeDateIndex(10)
df = DataFrame(np.random.randn(len(idx), 3), index=idx)
store['a'] = df
result = store['a']
assert_frame_equal(result, df)
self.assertEqual(type(result.index), type(df.index))
self.assertEqual(result.index.freq, df.index.freq)
idx = tm.makePeriodIndex(10)
df = DataFrame(np.random.randn(len(idx), 3), idx)
store['a'] = df
result = store['a']
assert_frame_equal(result, df)
self.assertEqual(type(result.index), type(df.index))
self.assertEqual(result.index.freq, df.index.freq)
def test_tseries_select_index_column(self):
# GH7777
# selecting a UTC datetimeindex column did
# not preserve UTC tzinfo set before storing
# check that no tz still works
rng = date_range('1/1/2000', '1/30/2000')
frame = DataFrame(np.random.randn(len(rng), 4), index=rng)
with ensure_clean_store(self.path) as store:
store.append('frame', frame)
result = store.select_column('frame', 'index')
self.assertEqual(rng.tz, DatetimeIndex(result.values).tz)
# check utc
rng = date_range('1/1/2000', '1/30/2000', tz='UTC')
frame = DataFrame(np.random.randn(len(rng), 4), index=rng)
with ensure_clean_store(self.path) as store:
store.append('frame', frame)
result = store.select_column('frame', 'index')
self.assertEqual(rng.tz, DatetimeIndex(result.values).tz)
# double check non-utc
rng = date_range('1/1/2000', '1/30/2000', tz='US/Eastern')
frame = DataFrame(np.random.randn(len(rng), 4), index=rng)
with ensure_clean_store(self.path) as store:
store.append('frame', frame)
result = store.select_column('frame', 'index')
self.assertEqual(rng.tz, DatetimeIndex(result.values).tz)
def test_unicode_index(self):
unicode_values = [u('\u03c3'), u('\u03c3\u03c3')]
def f():
s = Series(np.random.randn(len(unicode_values)), unicode_values)
self._check_roundtrip(s, tm.assert_series_equal)
compat_assert_produces_warning(PerformanceWarning,f)
def test_store_datetime_mixed(self):
df = DataFrame(
{'a': [1, 2, 3], 'b': [1., 2., 3.], 'c': ['a', 'b', 'c']})
ts = tm.makeTimeSeries()
df['d'] = ts.index[:3]
self._check_roundtrip(df, tm.assert_frame_equal)
# def test_cant_write_multiindex_table(self):
# # for now, #1848
# df = DataFrame(np.random.randn(10, 4),
# index=[np.arange(5).repeat(2),
# np.tile(np.arange(2), 5)])
# self.assertRaises(Exception, store.put, 'foo', df, format='table')
def test_append_with_diff_col_name_types_raises_value_error(self):
df = DataFrame(np.random.randn(10, 1))
df2 = DataFrame({'a': np.random.randn(10)})
df3 = DataFrame({(1, 2): np.random.randn(10)})
df4 = DataFrame({('1', 2): np.random.randn(10)})
df5 = DataFrame({('1', 2, object): np.random.randn(10)})
with ensure_clean_store(self.path) as store:
name = 'df_%s' % tm.rands(10)
store.append(name, df)
for d in (df2, df3, df4, df5):
with tm.assertRaises(ValueError):
store.append(name, d)
def test_query_with_nested_special_character(self):
df = DataFrame({'a': ['a', 'a', 'c', 'b', 'test & test', 'c' , 'b', 'e'],
'b': [1, 2, 3, 4, 5, 6, 7, 8]})
expected = df[df.a == 'test & test']
with ensure_clean_store(self.path) as store:
store.append('test', df, format='table', data_columns=True)
result = store.select('test', 'a = "test & test"')
tm.assert_frame_equal(expected, result)
def test_categorical(self):
with ensure_clean_store(self.path) as store:
# basic
s = Series(Categorical(['a', 'b', 'b', 'a', 'a', 'c'], categories=['a','b','c','d'], ordered=False))
store.append('s', s, format='table')
result = store.select('s')
tm.assert_series_equal(s, result)
s = Series(Categorical(['a', 'b', 'b', 'a', 'a', 'c'], categories=['a','b','c','d'], ordered=True))
store.append('s_ordered', s, format='table')
result = store.select('s_ordered')
tm.assert_series_equal(s, result)
df = DataFrame({"s":s, "vals":[1,2,3,4,5,6]})
store.append('df', df, format='table')
result = store.select('df')
tm.assert_frame_equal(result, df)
# dtypes
s = Series([1,1,2,2,3,4,5]).astype('category')
store.append('si',s)
result = store.select('si')
tm.assert_series_equal(result, s)
s = Series([1,1,np.nan,2,3,4,5]).astype('category')
store.append('si2',s)
result = store.select('si2')
tm.assert_series_equal(result, s)
# multiple
df2 = df.copy()
df2['s2'] = Series(list('abcdefg')).astype('category')
store.append('df2',df2)
result = store.select('df2')
tm.assert_frame_equal(result, df2)
# make sure the metadata is ok
self.assertTrue('/df2 ' in str(store))
self.assertTrue('/df2/meta/values_block_0/meta' in str(store))
self.assertTrue('/df2/meta/values_block_1/meta' in str(store))
# unordered
s = Series(Categorical(['a', 'b', 'b', 'a', 'a', 'c'], categories=['a','b','c','d'],ordered=False))
store.append('s2', s, format='table')
result = store.select('s2')
tm.assert_series_equal(result, s)
# query
store.append('df3', df, data_columns=['s'])
expected = df[df.s.isin(['b','c'])]
result = store.select('df3', where = ['s in ["b","c"]'])
tm.assert_frame_equal(result, expected)
expected = df[df.s.isin(['b','c'])]
result = store.select('df3', where = ['s = ["b","c"]'])
tm.assert_frame_equal(result, expected)
expected = df[df.s.isin(['d'])]
result = store.select('df3', where = ['s in ["d"]'])
tm.assert_frame_equal(result, expected)
expected = df[df.s.isin(['f'])]
result = store.select('df3', where = ['s in ["f"]'])
tm.assert_frame_equal(result, expected)
# appending with same categories is ok
store.append('df3', df)
df = concat([df,df])
expected = df[df.s.isin(['b','c'])]
result = store.select('df3', where = ['s in ["b","c"]'])
tm.assert_frame_equal(result, expected)
# appending must have the same categories
df3 = df.copy()
df3['s'].cat.remove_unused_categories(inplace=True)
self.assertRaises(ValueError, lambda : store.append('df3', df3))
# remove
# make sure meta data is removed (its a recursive removal so should be)
result = store.select('df3/meta/s/meta')
self.assertIsNotNone(result)
store.remove('df3')
self.assertRaises(KeyError, lambda : store.select('df3/meta/s/meta'))
def test_duplicate_column_name(self):
df = DataFrame(columns=["a", "a"], data=[[0, 0]])
with ensure_clean_path(self.path) as path:
self.assertRaises(ValueError, df.to_hdf, path, 'df', format='fixed')
df.to_hdf(path, 'df', format='table')
other = read_hdf(path, 'df')
tm.assert_frame_equal(df, other)
def _test_sort(obj):
if isinstance(obj, DataFrame):
return obj.reindex(sorted(obj.index))
elif isinstance(obj, Panel):
return obj.reindex(major=sorted(obj.major_axis))
else:
raise ValueError('type not supported here')
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)<|fim▁end|> | # fixed
_maybe_remove(store, 'df2')
store.put('df2',df)
result = store.select('df2') |
<|file_name|>attributes.ts<|end_file_name|><|fim▁begin|>namespace $ {
export function $mol_dom_render_attributes (
el : Element ,
attrs : { [ key : string ] : string|number|boolean|null }
) {
for( let name in attrs ) {
<|fim▁hole|> let val = attrs[ name ] as any
if( val === null || val === false ) {
if( !el.hasAttribute( name ) ) continue
el.removeAttribute( name )
} else {
const str = String( val )
if( el.getAttribute( name ) === str ) continue
el.setAttribute( name , str )
}
}
}
}<|fim▁end|> | |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Odoo, an open source suite of business apps
# This module copyright (C) 2014-2015 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.<|fim▁hole|>##############################################################################
{
"name": "Reset a chart of accounts",
"summary": ("Delete the accounting setup from an otherwise reusable "
"database"),
"version": "1.0",
"author": "Therp BV,Odoo Community Association (OCA)",
"category": 'Accounting & Finance',
"depends": [
'account',
],
'license': 'AGPL-3'
}<|fim▁end|> | #
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# |
<|file_name|>JPEGXREncoderOptions.ts<|end_file_name|><|fim▁begin|>/**
* Copyright 2014 Mozilla Foundation
* <|fim▁hole|> * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Class: JPEGXREncoderOptions
module Shumway.AVMX.AS.flash.display {
import notImplemented = Shumway.Debug.notImplemented;
import axCoerceString = Shumway.AVMX.axCoerceString;
export class JPEGXREncoderOptions extends ASObject {
// Called whenever the class is initialized.
static classInitializer: any = null;
// List of static symbols to link.
static classSymbols: string [] = null; // [];
// List of instance symbols to link.
static instanceSymbols: string [] = null; // ["quantization", "colorSpace", "trimFlexBits"];
constructor (quantization: number /*uint*/ = 20, colorSpace: string = "auto", trimFlexBits: number /*uint*/ = 0) {
super();
this.quantization = quantization >>> 0;
this.colorSpace = axCoerceString(colorSpace);
this.trimFlexBits = trimFlexBits >>> 0;
}
quantization: number /*uint*/;
colorSpace: string;
trimFlexBits: number /*uint*/;
}
}<|fim▁end|> | * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. |
<|file_name|>wiki2man.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
import sys
WorkList = None
def SH(i):
"""reformatting .SH"""
global WorkList
string = WorkList[i]
l = len(string) - 2
r = 0
while string[0] == '=' and string[l] == '=':
WorkList[i] = string[1:l]
string = WorkList[i]
l = len(string) - 1
r = r + 1
if r == 2:
WorkList[i] = '\n.SH "' + string + '"\n.PP\n'
else:
WorkList[i] = '\n.SS "' + string + '"\n.PP\n'
#---------------------------------------------------------------------------
def TP(i):
"""reformatting .TP"""
global WorkList
string = WorkList[i]
l=0
string1 = WorkList[i + l]
while string1 != '' and string1[0] == ';':
j=0
finish = 0
nexcl = 1
s = 0
while len(string) > j and finish == 0:
if string[j:j+8] == '<nowiki>':
nexcl = 0
j = j + 7
elif string[j:j+9] == '</nowiki>':
nexcl = 1
j = j + 8
elif string[j:j+4] == '<!--':
nexcl = 0
j = j + 3
elif string[j:j+3] == '-->':
nexcl = 1
j = j + 2
if string[j] == ':':
s = 1
finish = nexcl * s
s = 0
j = j + 1
if len(string) == j:
WorkList[i] = '.TP\n.B ' + string[1:]
elif string[j-1] == ':':
WorkList[i] = '.TP\n.B ' + string[1:j-1] + '\n' + string[j:]
l = l + 1
string1 = WorkList[i+l]
while string1 != '' and string1[0] == ':' and string1[1] <> ':' and string1[1] <> ';':
WorkList[i + l] = '.br\n' + string1[1:]
l = l + 1
string1 = WorkList[i + l]
#---------------------------------------------------------------------------
def wiki2man(content):
global WorkList
string = '\n'
string = unicode(string, 'utf-8')
WorkList = [string]
cpt = 0
while string != '' and cpt < len(content):
string = content[cpt]
cpt += 1
WorkList.append(string)
path = sys.argv[0]
n = len(path)
n = n - 11
path = path[:n]
########## Reformatting from wiki to roff ##########
# TH:
string = WorkList[1];
if len(string) > 2 and string[0] != '=' and string[:4] != '<!--' and string[:2] != '{{':
i = 0
while len(string) > i and string[i] != '(':
i = i + 1
WorkList.pop(1)
WorkList.pop(0)
i = 0
tabacc = -1
tab = 0
tab2 = 0
col = 0
nf = 0
nr = 0
excl = 0
nowiki = 0
RS=0
strng = unicode('{{MAN индекс}}', 'utf-8')
while len(WorkList) > i:
string = WorkList[i]
if len(string) > 1:
# reformatting "nowiki"
if string[:9] == '</nowiki>':
WorkList[i] = string[9:]
nowiki = 0
if nowiki == 0:
# reformatting "pre"
if string[:6] == '</pre>':
WorkList[i] = '\n.fi\n.RE\n' + string[6:]
nf = 0
# reformatting "tt"
elif string[:5] == '</tt>':
if string[6:7] == '. ':
WorkList[i] = '\n.fi\n.RE\n' + string[7:]
elif len(string) > 6 and string[6] == '.':
WorkList[i] = '\n.fi\n.RE\n' + string[6:]
else:
WorkList[i] = '\n.fi\n.RE\n' + string[5:]
nf = 0
# reformatting " "
if string[0] == ' ':
if nf == 0:
nf = 1
WorkList[i] = '\n.RS\n.nf\n' + string
elif nf == 1:
WorkList[i] = string
else:
if nf == 1:
nf = 0
WorkList[i] = '\n.fi\n.RE\n'
WorkList.insert(i+1, string)
string = WorkList[i]
if nf != 2 and nowiki == 0:
# reformatting excluded text <!-- * -->
if excl == 1:
WorkList[i] = '.\" ' + string[0:]
string = WorkList[i]
if nf == 0:
# format titles
if string[0] == '=' and string[len(string)-2] == '=':
SH(i)
# format ";"
elif string[0] == ';':
TP(i)
# format ":..."
elif string[0] == ':':
l = 1
s = ''
while string[l] == ':':
l = l + 1;
if RS == l:
s = '\n.br\n'
elif RS < l:
while RS < l:
s = s + '.RS\n'
RS = RS + 1
if string[RS] == ';':
WorkList[i] = s + '.TP\n.B ' + string[RS+1:]
else:
WorkList[i] = s + string[RS:]
string = WorkList[i]
stri = WorkList[i+1]
if RS > 0 and stri[0] <> ':':
while RS > 0:
WorkList[i] = string + '\n.RE\n'
RS = RS - 1
string = WorkList[i]
else:
while RS > 0 and len(stri) > RS-1 and stri[RS-1] <> ':':
RS = RS - 1
WorkList[i] = string + '\n.RE\n'
string = WorkList[i]
# format "*..."
elif string[0] == '*':
WorkList[i] = '.br\n * ' + string[1:]
# format tables 2
elif string[:2] == '{|':
if tab2 > 0:
WorkList[i] = '.RS\n'
tab2 = tab2 + 1
col = 0
else:
WorkList[i] = ''
tab2 = 1
elif string[:2] == '|-' and tab2 > 0:
WorkList[i] = ''
col = 0
elif string[:2] == '|}':
if tab2 == 1:
WorkList[i] = ''
col = 0
tab2 = 0
elif tab2 > 1:
WorkList[i] = '\n.RE\n'
col = 0
tab2 = tab2 - 1
elif string[:8] == '|valign=' and tab2 > 0:
j = 9
while len(string) > j and string[j]!='|':
j = j + 1
if string[j] == '|':
if col == 0:
WorkList[i] = '\n.TP\n' + string[j+1:]
col = 1
elif col > 0:
WorkList[i] = string[j+1:]
col = 2
elif col > 1:
WorkList[i] = '.PP\n' + string[j+1:]
col = col + 1
elif string[:1] == '|' and tab2 > 0:
if col == 0:
WorkList[i] = '\n.TP\n' + string[1:]
col = 1
elif col == 1:
WorkList[i] = string[1:]
col = col + 1
elif col > 1:
WorkList[i] = '\n' + string[1:]
col = col + 1
# delete wiki "Category:"
elif string[:11] == '[[Category:':
WorkList[i] = ''
# delete wiki {{MAN индекс}}
elif string[:14] == strng:
WorkList[i] = ''
# delete wiki [[en:Man ...]]
elif string[:9] == '[[en:Man ':
WorkList[i] = ''
string = WorkList[i]
j = 0
B = -1
I = -1
U = -1
K = -1
K1 = -1
while len(string) > j:
# reformatting excluded text <!-- * -->
if string[j:j+4] == '<!--':
string = string[:j] + '\n.\"' + string[j+4:]
excl = 1
j = j + 1
elif string[j:j+3] == '-->':
string = string[:j] + '\n' + string[j+3:]
excl = 0
j = j - 1
if excl == 0:
# Change some symbols: — « » — © " & < >
if string[j:j+8] == '―':
string = string[:j] + unicode('—', 'utf-8') + string[j+8:]
elif string[j:j+7] == '«':
string = string[:j] + unicode('«', 'utf-8') + string[j+7:]
elif string[j:j+7] == '»':
string = string[:j] + unicode('»', 'utf-8') + string[j+7:]
elif string[j:j+7] == '—':
string = string[:j] + unicode('—', 'utf-8') + string[j+7:]
elif string[j:j+6] == '©':
string = string[:j] + unicode('©', 'utf-8') + string[j+6:]
elif string[j:j+6] == '"':
string = string[:j] + unicode('"', 'utf-8') + string[j+6:]
elif string[j:j+6] == ' ':
string = string[:j] + unicode(' ', 'utf-8') + string[j+6:]
elif string[j:j+5] == '&':
string = string[:j] + unicode('&', 'utf-8') + string[j+5:]
elif string[j:j+4] == '<':
string = string[:j] + unicode('<', 'utf-8') + string[j+4:]
elif string[j:j+4] == '>':
string = string[:j] + unicode('>', 'utf-8') + string[j+4:]
# reformatting "-" or "\"
elif string[j:j+1] == '-':
string = string[0:j] + '\\' + string[j:]
j = j + 1
elif string[j:j+1] == '\\':
string = string[0:j] + '\e' + string[j+1:]
j = j + 1
# reformatting "nowiki"
elif string[j:j+8] == '<nowiki>':
nowiki = 1
if nf != 2:
string = string[:j] + string[j+8:]
j = j
elif string[j:j+9] == '</nowiki>':
nowiki = 0
if nf != 2:
string = string[:j] + string[j+9:]
j = j
if nowiki == 0:
if string[j:j+5] == "'''''":
if B != -1 and I == -1 :
if tabacc == 1:
string = string[:B] + '"' + string[B+3:j] + '"' + string[j+3:]
j = j - 4
B =- 1
else:
string = string[:B] + '\\fB' + string[B+3:j] + '\\fR' + string[j+3:]
j = j + 1
B =- 1
if I != -1 and B == -1:
string = string[:I] + '\\fI' + string[I+2:j] + '\\fR' + string[j+2:]
j = j + 2
I =- 1
# reformatting boolean text 1
elif string[j:j+3] == "'''":
if B == -1:
B = j
else:
if tabacc == 1:
string = string[:B] + '"' + string[B+3:j] + '"' + string[j+3:]
j = j - 4
B =- 1
elif j+3-B > 5:
string = string[:B] + '\\fB' + string[B+3:j] + '\\fR' + string[j+3:]
j = j + 1
B =- 1
# reformatting italic text 1
elif string[j:j+2] == "''" and B == -1:
if I == -1:
I = j
else:
if j+3-I > 2:
string = string[:I] + '\\fI' + string[I+2:j] + '\\fR' + string[j+2:]
j = j + 2
I =- 1
# reformatting "pre"
elif string[j:j+5] == '<pre>':
string = string[:j] + '\n.RS\n.nf\n' + string[j+5:]
nf = 2
j = j + 3
elif string[j:j+6] == '</pre>':
string = string[:j] + '\n.fi\n.RE\n' + string[j+6:]
nf = 0
j = j + 3
# reformatting "code"
elif string[j:j+6] == '<code>':
string = string[:j] + '\n.nf\n' + string[j+6:]
nf = 2
j = j + 3
elif string[j:j+7] == '</code>':
string = string[:j] + '\n.fi\n' + string[j+7:]
nf = 0
j = j + 3
# reformatting "tt"
elif string[j:j+4] == '<tt>':
string = string[:j] + '\n.RS\n.nf\n' + string[j+4:]
nf = 2
j = j + 3
elif string[j:j+5] == '</tt>':
if string[j+5] == '.':
string = string[:j] + '\n.fi\n.RE\n' + string[j+6:]
else:
string = string[:j] + '\n.fi\n.RE\n' + string[j+5:]
nf = 0
j = j + 3
# reformatting "...}}"
elif string[j:j+2] == '}}':
if nr == 1:
string = string[:j] + '\\fR' + string[j+2:]
nr = 0
j = j + 2
elif nr == 2:
string = string[:j] + '\n.RE\n' + string[j+2:]
nr = 0
j = j + 3
# reformatting "{{Codeline|...}}"
elif string[j:j+11] == '{{Codeline|':
string = string[:j] + '\\fB' + string[j+11:]
nr = 1
j = j + 2
# reformatting "{{Warning|...}}"
elif string[j:j+10] == '{{Warning|':
string = string[:j] + '\\fB' + string[j+10:]
nr = 1
j = j + 2
# reformatting "{{Note|...}}"
elif string[j:j+7] == '{{Note|':
string = string[:j] + '\\fI' + string[j+7:]
nr = 1
j = j + 2
# reformatting "{{Discussion|...}}"
elif string[j:j+13] == '{{Discussion|':
string = string[:j] + '\\fI' + string[j+13:]
nr = 1
j = j + 2
# reformatting "{{Filename|...}}"
elif string[j:j+11] == '{{Filename|':
string = string[:j] + '\\fI' + string[j+11:]
nr = 1
j = j + 2
# reformatting "[mailto:...]"
elif string[j:j+8] == '[mailto:':
a = j + 8
while string[a] <> ' ':
a = a + 1
b = a + 1
while string[b] <> ']':
b = b + 1
string = string[:j] + string[a+1:b] + ' <' + string[j+8:a] + '>'
# reformatting "{{Box File|...|...}}"
elif string[j:j+11] == '{{Box File|':
a = j + 11
while string[a] <> '|':
a = a + 1
string = string[:j] + '\n.TP\n.B ' + string[j+11:a] + '\n.RS\n' + string[a+1:]
nr = 2
if nf == 0:
# reformatting boolean text 2
if string[j:j+3] == '<b>':
string = string[:j] + '\\fB' + string[j+3:]
j = j + 2
elif string[j:j+4] == '</b>':
string = string[:j] + '\\fR' + string[j+4:]
j = j + 2
# reformatting italic text 2
elif string[j:j+3] == '<i>':
string = string[:j] + '\\fI' + string[j+3:]
j = j + 2
elif string[j:j+4] == '</i>':
string = string[:j] + '\\fR' + string[j+4:]
j = j + 2
# format underlined text
elif string[j:j+3] == '<u>':
U = j
elif string[j:j+4] == '</u>' and U != -1:
string = string[:U] + '\\fB\\fI' + string[U+3:j] + '\\fB\\fR' + string[j+4:]
j = j + 7
U =- 1
# brake line 1
elif string[j:j+4] == '<br>':
string = string[0:j] + '\n.br\n' + string[j+4:]
j = j + 2
# brake line 2
elif string[j:j+6] == '<br />':
string = string[0:j] + '\n.PP\n' + string[j+6:]
j = j + 2
# format tables 1
elif string[j:j+6] == '<table':
tab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
string = string[:tab] + string[j+1:]
j = tab - 1
tab = 1
else:
j = tab
tab = 0
elif string[j:j+3] == '<tr':
Ktab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
tabacc = 0
string = string[:Ktab] + '\n.SS ' + string[j+1:]
j = Ktab + 4
else:
j = Ktab
elif string[j:j+4] == '</tr':
Ktab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
tabacc =- 1
string = string[:Ktab] + string[j+1:]
j = Ktab - 1
else:
j = Ktab
elif string[j:j+3] == '<td':
Ktab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
tabacc = tabacc + 1
if tabacc == 1:
string = string[:Ktab] + string[j+1:]
j = Ktab - 1
else:
string = string[:Ktab] + '\n.PP\n' + string[j+1:]
j = Ktab + 3
else:
j = Ktab
elif string[j:j+4] == '</td':
Ktab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
string = string[:Ktab] + string[j+1:]
j = Ktab - 1
else:
j = Ktab
elif string[j:j+7] == '</table':
tab = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
string = string[:tab] + string[j+1:]
j = tab - 1
tab = 0
else:
j = tab
tab = 1
# format table 2 {| |- | || |}
elif string[j:j+2] == '||' and tab2 > 0 and col > 0:
string = string[:j] + '\n' + string[j+2:]
col = col + 1
# format div????
elif string[j:j+4] == '<div':
div = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
string = string[:div] + string[j+1:]
j = div - 1
else:
j = div
elif string[j:j+5] == '</div':
div = j
while len(string) > j and string[j] != '>':
j = j + 1
if string[j] == '>':
string = string[:div] + string[j+1:]
j = div - 1
else:
j = div
# format internal links
elif string[j:j+2] == '[[':
K = j
elif string[j] == '|':
if K != -1:
K1 = j
elif string[j:j+2] == ']]':
if K != -1 and K1 != -1:
string = string[:K] + string[K1+1:j] + string[j+2:]
j = j - K1 + K - 2
K =- 1
K1 =- 1
elif K != -1 and K1 == -1:
string = string[:K] + string[K+2:j] + string[j+2:]
j = j - 4
K =- 1
j = j + 1
WorkList[i] = string
i = i + 1
# Make title .TH
string = '\n'
string = string.encode('utf-8')
string = unicode(string, 'utf-8')
WorkList.insert(0, string)
########## Output roff formatted file ##########
# Output encoded symbols:
string = ''
for i in range(len(WorkList)):
string = string + WorkList[i]
# Delete empty lines and some think else..., just for making roff code better:
i = 0
while len(string) > i:
if string[i:i+8] == '.RE\n\n.RS':
string = string[:i+3] + string[i+4:]
if string[i:i+8] == '.RE\n\n.br':
string = string[:i+3] + string[i+4:]
if string[i:i+6] == '\n.SS\n':
string = string[:i+5] + string[i+6:]
if string[i:i+5] == '\n\n.RE':
string = string[:i+1] + string[i+2:]
if string[i:i+5] == '\n\n\n\n\n':
string = string[:i] + string[i+3:]
if string[i:i+4] == '\n\n\n\n':
string = string[:i] + string[i+2:]<|fim▁hole|>#---------------------------------------------------------------------------<|fim▁end|> | if string[i:i+3] == '\n\n\n':
string = string[:i] + string[i+1:]
i = i + 1
return string |
<|file_name|>module.rs<|end_file_name|><|fim▁begin|>use std::collections::{HashMap, HashSet};
use super::{Expression, Call, TreePrinter, TypeDeclaration, Import, ImportName, Symbol, SymbolType, GlobalBinding, Function, ExternalFunction, prefix};
use target::Target;
use compileerror::CompileResult;
pub struct Module
{
pub name: String,
pub globals: HashMap<String, GlobalBinding>,
pub functions: HashMap<String, Function>,
pub externals: HashMap<String, ExternalFunction>,
pub types: HashMap<String, TypeDeclaration>,
pub import_names: HashSet<ImportName>,
pub type_checked: bool,
}
impl Module
{
pub fn new(name: &str) -> Module
{
Module{
name: name.into(),
globals: HashMap::new(),
functions: HashMap::new(),
externals: HashMap::new(),
types: HashMap::new(),
import_names: HashSet::new(),
type_checked: false,
}
}
fn is_imported_call(&self, call: &Call) -> bool {
!self.functions.contains_key(&call.callee.name) &&
!self.externals.contains_key(&call.callee.name)
}
fn get_imported_symbols(&self, target: &Target) -> HashMap<String, Symbol>
{
let mut symbols = HashMap::new();
for func in self.functions.values() {
let mut find_imported_calls = |e: &Expression| -> CompileResult<()> {
match *e {
Expression::Call(ref call) if self.is_imported_call(call) => {
let typ = call.callee_type(target.int_size);
let symbol = Symbol::new(&call.callee.name, &typ, false, &call.span, SymbolType::External);
symbols.insert(call.callee.name.clone(), symbol);
}
_ => (),
}
Ok(())
};
let _ = func.expression.visit(&mut find_imported_calls);
}
symbols
}
pub fn get_exported_symbols(&self, target: &Target) -> Import
{
let mut import = Import::new(self.name.clone());
for (name, binding) in &self.globals {
import.symbols.insert(name.clone(), Symbol::new(name, &binding.typ, binding.mutable, &binding.span, SymbolType::Global));
}
for (name, function) in &self.functions {
import.symbols.insert(name.clone(), Symbol::new(name, &function.sig.typ, false, &function.span, SymbolType::Normal));
if function.is_generic() {
import.generics.insert(name.clone(), function.clone());
}
}
for (name, function) in &self.externals {
import.symbols.insert(name.clone(), Symbol::new(name, &function.sig.typ, false, &function.span, SymbolType::External));
}
for (name, type_decl) in &self.types {
import.symbols.insert(name.clone(), Symbol::new(name, &type_decl.get_type(), false, &type_decl.span(), SymbolType::Normal));
}
import.imported_symbols = self.get_imported_symbols(target);
import
}
}
impl TreePrinter for Module
{
fn print(&self, level: usize)
{
let p = prefix(level);
println!("{}Module: {}", p, self.name);
for i in &self.import_names {
println!("{} import {}", p, i.to_namespace_string());
}
println!("{}", p);
for t in self.types.values() {
t.print(level + 1);
println!("{}", p);
}
for global in self.globals.values() {
global.print(level + 1);
println!("{}", p);
}
for func in self.externals.values() {
func.print(level + 1);
println!("{}", p);
}
for func in self.functions.values() {
func.print(level + 1);
println!("{}", p);
}
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>canihaz.js<|end_file_name|><|fim▁begin|>'use strict';
//<|fim▁hole|>// Third party modules.
//
module.exports = require('canihaz')({
location: __dirname,
dot: 'smithy'
});<|fim▁end|> | |
<|file_name|>GlobalAction.java<|end_file_name|><|fim▁begin|>/*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.server.batch;
import org.apache.commons.io.IOUtils;
import org.sonar.api.server.ws.Request;
import org.sonar.api.server.ws.Response;
import org.sonar.api.server.ws.WebService;
import org.sonar.batch.protocol.input.GlobalRepositories;
import org.sonar.db.metric.MetricDto;
import org.sonar.core.permission.GlobalPermissions;
import org.sonar.db.DbSession;
import org.sonar.db.MyBatis;
import org.sonar.db.property.PropertiesDao;
import org.sonar.db.property.PropertyDto;
import org.sonar.server.db.DbClient;
import org.sonar.server.exceptions.ForbiddenException;
import org.sonar.server.plugins.MimeTypes;
import org.sonar.server.user.UserSession;
public class GlobalAction implements BatchWsAction {
private final DbClient dbClient;
private final PropertiesDao propertiesDao;
private final UserSession userSession;
public GlobalAction(DbClient dbClient, PropertiesDao propertiesDao, UserSession userSession) {
this.dbClient = dbClient;
this.propertiesDao = propertiesDao;
this.userSession = userSession;
}
@Override
public void define(WebService.NewController controller) {
controller.createAction("global")
.setDescription("Return metrics and global properties")
.setSince("4.5")
.setInternal(true)
.setHandler(this);
}
@Override
public void handle(Request request, Response response) throws Exception {
boolean hasScanPerm = userSession.hasGlobalPermission(GlobalPermissions.SCAN_EXECUTION);
boolean hasPreviewPerm = userSession.hasGlobalPermission(GlobalPermissions.PREVIEW_EXECUTION);
if (!hasPreviewPerm && !hasScanPerm) {
throw new ForbiddenException(Messages.NO_PERMISSION);
}
DbSession session = dbClient.openSession(false);
try {
GlobalRepositories ref = new GlobalRepositories();
addMetrics(ref, session);
addSettings(ref, hasScanPerm, hasPreviewPerm, session);
response.stream().setMediaType(MimeTypes.JSON);
IOUtils.write(ref.toJson(), response.stream().output());
} finally {
MyBatis.closeQuietly(session);
}
}
private void addMetrics(GlobalRepositories ref, DbSession session) {
for (MetricDto metric : dbClient.metricDao().selectEnabled(session)) {
ref.addMetric(
new org.sonar.batch.protocol.input.Metric(metric.getId(), metric.getKey(),
metric.getValueType(),
metric.getDescription(),
metric.getDirection(),
metric.getKey(),
metric.isQualitative(),
metric.isUserManaged(),
metric.getWorstValue(),
metric.getBestValue(),
metric.isOptimizedBestValue()));
}
}
private void addSettings(GlobalRepositories ref, boolean hasScanPerm, boolean hasPreviewPerm, DbSession session) {
for (PropertyDto propertyDto : propertiesDao.selectGlobalProperties(session)) {
String key = propertyDto.getKey();<|fim▁hole|> String value = propertyDto.getValue();
if (isPropertyAllowed(key, hasScanPerm, hasPreviewPerm)) {
ref.addGlobalSetting(key, value);
}
}
}
private static boolean isPropertyAllowed(String key, boolean hasScanPerm, boolean hasPreviewPerm) {
return !key.contains(".secured") || hasScanPerm || (key.contains(".license") && hasPreviewPerm);
}
}<|fim▁end|> | |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>/*
Copyright 2021 The Kubeflow Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"encoding/json"
"fmt"
"os"
"strings"
"github.com/go-openapi/spec"
"github.com/kubeflow/katib/pkg/apis/v1beta1"
"k8s.io/klog"
"k8s.io/kube-openapi/pkg/common"
)
// Generate OpenAPI spec definitions for Katib Resource
func main() {
if len(os.Args) <= 2 {
klog.Fatal("Supply Swagger version and Katib Version")
}
version := os.Args[1]
if !strings.HasPrefix(version, "v") {
version = "v" + version
}
refCallback := func(name string) spec.Ref {
return spec.MustCreateRef("#/definitions/" + common.EscapeJsonPointer(swaggify(name)))
}
katibVersion := os.Args[2]
oAPIDefs := make(map[string]common.OpenAPIDefinition)
if katibVersion == "v1beta1" {
oAPIDefs = v1beta1.GetOpenAPIDefinitions(refCallback)
} else {
klog.Fatalf("Katib version %v is not supported", katibVersion)
}
defs := spec.Definitions{}
for defName, val := range oAPIDefs {
defs[swaggify(defName)] = val.Schema
}
swagger := spec.Swagger{
SwaggerProps: spec.SwaggerProps{
Swagger: "2.0",
Definitions: defs,
Paths: &spec.Paths{Paths: map[string]spec.PathItem{}},
Info: &spec.Info{
InfoProps: spec.InfoProps{
Title: "Katib",
Description: "Swagger description for Katib",
Version: version,
},
},
},
}
jsonBytes, err := json.MarshalIndent(swagger, "", " ")
if err != nil {
klog.Fatal(err.Error())
}
fmt.Println(string(jsonBytes))
}
func swaggify(name string) string {
name = strings.Replace(name, "github.com/kubeflow/katib/pkg/apis/controller/common/", "", -1)
name = strings.Replace(name, "github.com/kubeflow/katib/pkg/apis/controller/experiments/", "", -1)
name = strings.Replace(name, "github.com/kubeflow/katib/pkg/apis/controller/suggestions", "", -1)
name = strings.Replace(name, "github.com/kubeflow/katib/pkg/apis/controller/trials", "", -1)<|fim▁hole|> name = strings.Replace(name, "/", ".", -1)
return name
}<|fim▁end|> | name = strings.Replace(name, "k8s.io/api/core/", "", -1)
name = strings.Replace(name, "k8s.io/apimachinery/pkg/apis/meta/", "", -1) |
<|file_name|>averesultgraph-consumer-bread.py<|end_file_name|><|fim▁begin|># Draw graph x-axios is the number of nodes in the network.
import re
import sys
import os
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import NullFormatter # useful for `logit` scale
from mininet.log import setLogLevel, output, info
# Read the experimental result data file in a specfied directory and generate a list of data file name.
def sortDataFile(dataFilePath, dataFileDir):
dataFileList = os.listdir("%s/data/%s" % (dataFilePath, dataFileDir))
# delete file names that are not statistic result.
i = 0
for dataFileName in dataFileList:
if not ("statResult" in dataFileName):
dataFileList.pop(i)
i = i+1
# Sort data file according to file name (the number of nodes) by using bubble algorithm
i = 0
while i < len(dataFileList)-1:
try:
j = 0
while j < len(dataFileList)-1-i:
fileName = dataFileList[j].strip()
startChar = fileName.index("-") + len("-")
endChar = fileName.index(".", startChar)
nodeNumber_j = fileName[startChar : endChar]
# after j
fileName=dataFileList[j+1].strip()
startChar = fileName.index("-") + len("-")
endChar = fileName.index(".", startChar)
nodeNumber_j1 = fileName[startChar : endChar]
if int(nodeNumber_j1.strip()) < int(nodeNumber_j.strip()):
tmp = dataFileList[j]
dataFileList[j] = dataFileList[j+1]
dataFileList[j+1] = tmp
j = j+1
except:
pass
i = i+1
return dataFileList
# Read a data file and convert to two dimession List.
def readFileData(dataFilePath, dataFileDir, dataFileName):
data_file = open ("%s/data/%s/%s" % (dataFilePath, dataFileDir, dataFileName), "r")
lineField = []
dataLine = []
for line in data_file:
lineString = ""
j=0
# read a line data and generate list of fields
while j < len(line):
if not (line[j] == " "):
lineString = lineString + str(line[j].strip())
if j == len(line)-1:
lineField.append(lineString.strip())
else:
lineField.append(lineString.strip())
lineString = ""
j = j+1
dataLine.append(lineField)
lineField = []
return dataLine
# Sort two dimession List
def listSort(dataList, sortCol):
"sortCol: the specified colume used for sorting."
i = 0
while i < len(dataList)-1:
try:
j = 0
while j < len(dataList)-1-i:
if float(dataList[j+1][sortCol]) < float(dataList[j][sortCol].strip()):
tmp = dataList[j]
dataList[j] = dataList[j+1]
dataList[j+1] = tmp
j = j+1
except:
pass
i = i+1
return dataList
#Calculate average statistic result in one experiment.
def aveStatResult(dataFilePath, dataFileDir, dataFileName, aveCol):
"aveCol: the specified colume used for calculating a average value"
# Caculate average value according to a specified column
# Read data file and generate a list
dataList = readFileData(dataFilePath, dataFileDir, dataFileName)
sortDataList = listSort(dataList, aveCol)
conNodes = []
aveConNumOutInt = []
aveDelay = []
aveNumOutInt = []
aveIntPLR = []
aveDataPLR = []
avePLR = []
i = 0
while i < len(sortDataList):
conNumOutInt = float(sortDataList[i][2].strip())
Delay = float(sortDataList[i][3].strip())
numOutInt = float(sortDataList[i][4].strip())
IntPLR = float(sortDataList[i][8].strip())
DataPLR = float(sortDataList[i][9].strip())
PLR = float(sortDataList[i][10].strip())
tmp = sortDataList[i][aveCol].strip()
j = i+1
n = 1
flag = True
while (j < len(sortDataList)) and flag:
if sortDataList[j][aveCol] == tmp:
n = n + 1
conNumOutInt = conNumOutInt + float(sortDataList[j][2].strip())
Delay = Delay + float(sortDataList[j][3].strip())
numOutInt = numOutInt + float(sortDataList[j][4].strip())
IntPLR = IntPLR + float(sortDataList[j][8].strip())
DataPLR = DataPLR + float(sortDataList[j][9].strip())
PLR = PLR + float(sortDataList[j][10].strip())
j = j+1
else:
flag = False
i = j
conNodes.append(int(tmp))
aveConNumOutInt.append(conNumOutInt/n)
aveDelay.append(Delay/n)
aveNumOutInt.append(numOutInt/n)
aveIntPLR.append(IntPLR/n)
aveDataPLR.append(DataPLR/n)
avePLR.append(PLR/n)
return conNodes, aveConNumOutInt, aveDelay, aveNumOutInt, aveIntPLR, aveDataPLR, avePLR
# randomly generate color
def get_cmap(n, name='hsv'):
'''Returns a function that maps each index in 0, 1, ..., n-1 to a distinct
RGB color; the keyword argument name must be a standard mpl colormap name.'''
return plt.cm.get_cmap(name, n)
# Draw statistical graph
def drawStatGraph(dataFilePath, dataFileDir, aveCol):
# setting a style to use
plt.style.use('fivethirtyeight')
# create a figure
fig = plt.figure()
# define subplots and their positions in figure
plt1 = fig.add_subplot(221, axisbg='white')
plt2 = fig.add_subplot(222, axisbg='white')
plt3 = fig.add_subplot(223, axisbg='white')
plt4 = fig.add_subplot(224, axisbg='white')
# plotting the line 1 points<|fim▁hole|> plt3.axis([5, 35, 0, 200])
plt4.axis([5, 35, 0, 2.5])
dataFileList = sortDataFile (dataFilePath, dataFileDir)
colors = ["b","g","r","y","black"]
nodes = [10, 15, 20, 25,30]
# open all data files for experiment, average data according the number of consumer
# n is times (here n=5 because {1,3,5,7,9} ) for opening these files.
# extact one colume data in all experimental result and draw one cave line according to average value in the number of consumer
n = 0
while n < 5:
col = n
# generate data for drawing cave lines
conNum = []
aveCNOI = [] #sent interest packets before
aveDLY = [] # delay
aveNOI = [] # the total number of interest packets trasmitted on network
aveIPLR = [] # Packet loss rate of interest packet
for dataFileName in dataFileList:
# calculate average value according to the number of consumers in statResult-xx.dat
# the returned results are saved in some List.
conNodes, aveConNumOutInt, aveDelay, aveNumOutInt, aveIntPLR, aveDataPLR, avePLR = \
aveStatResult(dataFilePath, dataFileDir, dataFileName, aveCol)
startChar = dataFileName.index("-") + len("-")
endChar = dataFileName.index(".", startChar)
nodeNumber = dataFileName[startChar : endChar]
# extract the data of the specified colume.
conNum = conNodes[col]
aveCNOI.append(aveConNumOutInt[col])
aveDLY.append(aveDelay[col])
aveNOI.append(aveNumOutInt[col])
aveIPLR.append(aveIntPLR[col])
# draw one cave line according the specified number of consumers, for example 1,3,5,7, or 9
labelChar = "CN=" + str(conNum)
colorN = int((n+1)*5)/5 - 1
color = colors[colorN]
plt1.plot(nodes, aveCNOI, color=color, linestyle='solid', label = labelChar,marker='s',markerfacecolor=color, markersize=10)
plt2.plot(nodes, aveDLY, color=color, linestyle='solid', label = labelChar,marker='s', markerfacecolor=color, markersize=10)
plt3.plot(nodes, aveNOI, color=color, linestyle='solid',label=labelChar, marker='s', markerfacecolor=color, markersize=10)
plt4.plot(nodes, aveIPLR, color=color, linestyle='solid', label = labelChar,marker='s', markerfacecolor=color, markersize=10)
n = n+1
plt1.set_title('The Number of Interest Packet')
plt2.set_title('Average Delay')
plt3.set_title('The Total Number of Interest Packet')
plt4.set_title('Packet Loss Rate of Interest Packet')
#plt1.xlabel('nodes')
#plt1.ylabel('ISR')
#plt1.title('Average Delay')
plt1.legend(loc='upper left')
plt2.legend(loc='upper right')
plt3.legend(loc='upper left')
plt4.legend(loc='upper left')
plt.show()
if __name__ == '__main__':
setLogLevel('info')
dataFilePath = os.path.abspath(os.path.dirname(sys.argv[0]))
# drawStatGraph(dataFilePath, 'oppo', 0)
drawStatGraph(dataFilePath, 'bread', 0)<|fim▁end|> | plt1.axis([5, 35, 4, 10])
plt2.axis([5, 35, 0, 15]) |
<|file_name|>udprelay.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# SOCKS5 UDP Request
# +----+------+------+----------+----------+----------+
# |RSV | FRAG | ATYP | DST.ADDR | DST.PORT | DATA |
# +----+------+------+----------+----------+----------+
# | 2 | 1 | 1 | Variable | 2 | Variable |
# +----+------+------+----------+----------+----------+
# SOCKS5 UDP Response
# +----+------+------+----------+----------+----------+
# |RSV | FRAG | ATYP | DST.ADDR | DST.PORT | DATA |
# +----+------+------+----------+----------+----------+
# | 2 | 1 | 1 | Variable | 2 | Variable |
# +----+------+------+----------+----------+----------+
# shadowsocks UDP Request (before encrypted)
# +------+----------+----------+----------+
# | ATYP | DST.ADDR | DST.PORT | DATA |
# +------+----------+----------+----------+
# | 1 | Variable | 2 | Variable |
# +------+----------+----------+----------+
# shadowsocks UDP Response (before encrypted)
# +------+----------+----------+----------+
# | ATYP | DST.ADDR | DST.PORT | DATA |
# +------+----------+----------+----------+
# | 1 | Variable | 2 | Variable |
# +------+----------+----------+----------+
# shadowsocks UDP Request and Response (after encrypted)
# +-------+--------------+
# | IV | PAYLOAD |
# +-------+--------------+
# | Fixed | Variable |
# +-------+--------------+
# HOW TO NAME THINGS
# ------------------
# `dest` means destination server, which is from DST fields in the SOCKS5
# request
# `local` means local server of shadowsocks
# `remote` means remote server of shadowsocks
# `client` means UDP clients that connects to other servers
# `server` means the UDP server that handles user requests
from __future__ import absolute_import, division, print_function, \
with_statement
import time
import socket
import logging
import struct
import errno
import random
import binascii
import traceback
import threading
from shadowsocks import encrypt, obfs, eventloop, lru_cache, common, shell
from shadowsocks.common import pre_parse_header, parse_header, pack_addr
# for each handler, we have 2 stream directions:
# upstream: from client to server direction
# read local and write to remote
# downstream: from server to client direction
# read remote and write to local
STREAM_UP = 0
STREAM_DOWN = 1
# for each stream, it's waiting for reading, or writing, or both
WAIT_STATUS_INIT = 0
WAIT_STATUS_READING = 1
WAIT_STATUS_WRITING = 2
WAIT_STATUS_READWRITING = WAIT_STATUS_READING | WAIT_STATUS_WRITING
BUF_SIZE = 65536
DOUBLE_SEND_BEG_IDS = 16
POST_MTU_MIN = 500
POST_MTU_MAX = 1400
SENDING_WINDOW_SIZE = 8192
STAGE_INIT = 0
STAGE_RSP_ID = 1
STAGE_DNS = 2
STAGE_CONNECTING = 3
STAGE_STREAM = 4
STAGE_DESTROYED = -1
CMD_CONNECT = 0
CMD_RSP_CONNECT = 1
CMD_CONNECT_REMOTE = 2
CMD_RSP_CONNECT_REMOTE = 3
CMD_POST = 4
CMD_SYN_STATUS = 5
CMD_POST_64 = 6
CMD_SYN_STATUS_64 = 7
CMD_DISCONNECT = 8
CMD_VER_STR = b"\x08"
RSP_STATE_EMPTY = b""
RSP_STATE_REJECT = b"\x00"
RSP_STATE_CONNECTED = b"\x01"
RSP_STATE_CONNECTEDREMOTE = b"\x02"
RSP_STATE_ERROR = b"\x03"
RSP_STATE_DISCONNECT = b"\x04"
RSP_STATE_REDIRECT = b"\x05"
def client_key(source_addr, server_af):
# notice this is server af, not dest af
return '%s:%s:%d' % (source_addr[0], source_addr[1], server_af)
class UDPRelay(object):
def __init__(self, config, dns_resolver, is_local, stat_callback=None, stat_counter=None):
self._config = config
if config.get('connect_verbose_info', 0) > 0:
common.connect_log = logging.info
if is_local:
self._listen_addr = config['local_address']
self._listen_port = config['local_port']
self._remote_addr = config['server']
self._remote_port = config['server_port']
else:
self._listen_addr = config['server']
self._listen_port = config['server_port']
self._remote_addr = None
self._remote_port = None
self._dns_resolver = dns_resolver
self._password = common.to_bytes(config['password'])
self._method = config['method']
self._timeout = config['timeout']
self._is_local = is_local
self._udp_cache_size = config['udp_cache']
self._cache = lru_cache.LRUCache(timeout=config['udp_timeout'],
close_callback=self._close_client_pair)
self._cache_dns_client = lru_cache.LRUCache(timeout=10,
close_callback=self._close_client_pair)
self._client_fd_to_server_addr = {}
#self._dns_cache = lru_cache.LRUCache(timeout=1800)
self._eventloop = None
self._closed = False
self.server_transfer_ul = 0
self.server_transfer_dl = 0
self.server_users = {}
self.server_user_transfer_ul = {}
self.server_user_transfer_dl = {}
if common.to_str(config['protocol']) in obfs.mu_protocol():
self._update_users(None, None)
self.protocol_data = obfs.obfs(config['protocol']).init_data()
self._protocol = obfs.obfs(config['protocol'])
server_info = obfs.server_info(self.protocol_data)
server_info.host = self._listen_addr
server_info.port = self._listen_port
server_info.users = self.server_users
server_info.protocol_param = config['protocol_param']
server_info.obfs_param = ''
server_info.iv = b''
server_info.recv_iv = b''
server_info.key_str = common.to_bytes(config['password'])
server_info.key = encrypt.encrypt_key(self._password, self._method)
server_info.head_len = 30
server_info.tcp_mss = 1452
server_info.buffer_size = BUF_SIZE
server_info.overhead = 0
self._protocol.set_server_info(server_info)
self._sockets = set()
self._fd_to_handlers = {}
self._reqid_to_hd = {}
self._data_to_write_to_server_socket = []
self._timeout_cache = lru_cache.LRUCache(timeout=self._timeout,
close_callback=self._close_tcp_client)
self._bind = config.get('out_bind', '')
self._bindv6 = config.get('out_bindv6', '')
self._ignore_bind_list = config.get('ignore_bind', [])
if 'forbidden_ip' in config:
self._forbidden_iplist = config['forbidden_ip']
else:
self._forbidden_iplist = None
if 'forbidden_port' in config:
self._forbidden_portset = config['forbidden_port']
else:
self._forbidden_portset = None
addrs = socket.getaddrinfo(self._listen_addr, self._listen_port, 0,
socket.SOCK_DGRAM, socket.SOL_UDP)
if len(addrs) == 0:
raise Exception("can't get addrinfo for %s:%d" %
(self._listen_addr, self._listen_port))
af, socktype, proto, canonname, sa = addrs[0]
server_socket = socket.socket(af, socktype, proto)
server_socket.bind((self._listen_addr, self._listen_port))
server_socket.setblocking(False)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024 * 1024)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1024 * 1024)
self._server_socket = server_socket
self._stat_callback = stat_callback
def _get_a_server(self):
server = self._config['server']
server_port = self._config['server_port']
if type(server_port) == list:
server_port = random.choice(server_port)
if type(server) == list:
server = random.choice(server)
logging.debug('chosen server: %s:%d', server, server_port)
return server, server_port
def get_ud(self):
return (self.server_transfer_ul, self.server_transfer_dl)
def get_users_ud(self):
ret = (self.server_user_transfer_ul.copy(), self.server_user_transfer_dl.copy())
return ret
def _update_users(self, protocol_param, acl):
if protocol_param is None:
protocol_param = self._config['protocol_param']
param = common.to_bytes(protocol_param).split(b'#')
if len(param) == 2:
user_list = param[1].split(b',')
if user_list:
for user in user_list:
items = user.split(b':')
if len(items) == 2:
user_int_id = int(items[0])
uid = struct.pack('<I', user_int_id)
if acl is not None and user_int_id not in acl:
self.del_user(uid)
else:
passwd = items[1]
self.add_user(uid, {'password':passwd})
def _update_user(self, id, passwd):
uid = struct.pack('<I', id)
self.add_user(uid, passwd)
def update_users(self, users):
for uid in list(self.server_users.keys()):
id = struct.unpack('<I', uid)[0]
if id not in users:
self.del_user(uid)
for id in users:
uid = struct.pack('<I', id)
self.add_user(uid, users[id])
def add_user(self, uid, cfg): # user: binstr[4], passwd: str
passwd = cfg['password']
self.server_users[uid] = common.to_bytes(passwd)
def del_user(self, uid):
if uid in self.server_users:
del self.server_users[uid]
def add_transfer_u(self, user, transfer):
if user is None:
self.server_transfer_ul += transfer
else:
if user not in self.server_user_transfer_ul:
self.server_user_transfer_ul[user] = 0
self.server_user_transfer_ul[user] += transfer + self.server_transfer_ul
self.server_transfer_ul = 0
def add_transfer_d(self, user, transfer):
if user is None:
self.server_transfer_dl += transfer
else:
if user not in self.server_user_transfer_dl:
self.server_user_transfer_dl[user] = 0
self.server_user_transfer_dl[user] += transfer + self.server_transfer_dl
self.server_transfer_dl = 0
def _close_client_pair(self, client_pair):
client, uid = client_pair
self._close_client(client)
def _close_client(self, client):
if hasattr(client, 'close'):
if not self._is_local:
if client.fileno() in self._client_fd_to_server_addr:
logging.debug('close_client: %s' %
(self._client_fd_to_server_addr[client.fileno()],))
else:
client.info('close_client')
self._sockets.remove(client.fileno())
self._eventloop.remove(client)
del self._client_fd_to_server_addr[client.fileno()]
client.close()
else:
# just an address
client.info('close_client pass %s' % client)
pass
def _handel_protocol_error(self, client_address, ogn_data):
#raise Exception('can not parse header')
logging.warn("Protocol ERROR, UDP ogn data %s from %s:%d" % (binascii.hexlify(ogn_data), client_address[0], client_address[1]))
def _socket_bind_addr(self, sock, af):
bind_addr = ''
if self._bind and af == socket.AF_INET:
bind_addr = self._bind
elif self._bindv6 and af == socket.AF_INET6:
bind_addr = self._bindv6
bind_addr = bind_addr.replace("::ffff:", "")
if bind_addr in self._ignore_bind_list:
bind_addr = None
if bind_addr:
local_addrs = socket.getaddrinfo(bind_addr, 0, 0, socket.SOCK_DGRAM, socket.SOL_UDP)
if local_addrs[0][0] == af:
logging.debug("bind %s" % (bind_addr,))
try:
sock.bind((bind_addr, 0))
except Exception as e:
logging.warn("bind %s fail" % (bind_addr,))
def _handle_server(self):
server = self._server_socket
data, r_addr = server.recvfrom(BUF_SIZE)
ogn_data = data
if not data:
logging.debug('UDP handle_server: data is empty')
if self._stat_callback:
self._stat_callback(self._listen_port, len(data))
uid = None
if self._is_local:
frag = common.ord(data[2])
if frag != 0:
logging.warn('drop a message since frag is not 0')
return
else:
data = data[3:]
else:
ref_iv = [0]
data = encrypt.encrypt_all_iv(self._protocol.obfs.server_info.key, self._method, 0, data, ref_iv)
# decrypt data
if not data:
logging.debug('UDP handle_server: data is empty after decrypt')
return
self._protocol.obfs.server_info.recv_iv = ref_iv[0]
data, uid = self._protocol.server_udp_post_decrypt(data)
#logging.info("UDP data %s" % (binascii.hexlify(data),))
if not self._is_local:
data = pre_parse_header(data)
if data is None:
return
try:
header_result = parse_header(data)
except:
self._handel_protocol_error(r_addr, ogn_data)
return
if header_result is None:
self._handel_protocol_error(r_addr, ogn_data)
return
connecttype, addrtype, dest_addr, dest_port, header_length = header_result
if self._is_local:
addrtype = 3
server_addr, server_port = self._get_a_server()
else:
server_addr, server_port = dest_addr, dest_port
if (addrtype & 7) == 3:
af = common.is_ip(server_addr)
if af == False:
handler = common.UDPAsyncDNSHandler((data, r_addr, uid, header_length))
handler.resolve(self._dns_resolver, (server_addr, server_port), self._handle_server_dns_resolved)
else:
self._handle_server_dns_resolved("", (server_addr, server_port), server_addr, (data, r_addr, uid, header_length))
else:
self._handle_server_dns_resolved("", (server_addr, server_port), server_addr, (data, r_addr, uid, header_length))
def _handle_server_dns_resolved(self, error, remote_addr, server_addr, params):
if error:
return
data, r_addr, uid, header_length = params
user_id = self._listen_port
try:
server_port = remote_addr[1]
addrs = socket.getaddrinfo(server_addr, server_port, 0,
socket.SOCK_DGRAM, socket.SOL_UDP)
if not addrs: # drop
return
af, socktype, proto, canonname, sa = addrs[0]
server_addr = sa[0]
key = client_key(r_addr, af)
client_pair = self._cache.get(key, None)
if client_pair is None:
client_pair = self._cache_dns_client.get(key, None)
if client_pair is None:
if self._forbidden_iplist:
if common.to_str(sa[0]) in self._forbidden_iplist:
logging.debug('IP %s is in forbidden list, drop' % common.to_str(sa[0]))
# drop
return
if self._forbidden_portset:
if sa[1] in self._forbidden_portset:
logging.debug('Port %d is in forbidden list, reject' % sa[1])
# drop
return
client = socket.socket(af, socktype, proto)
client_uid = uid
client.setblocking(False)
self._socket_bind_addr(client, af)
is_dns = False
if len(data) > header_length + 13 and data[header_length + 4 : header_length + 12] == b"\x00\x01\x00\x00\x00\x00\x00\x00":
is_dns = True
else:
pass
if sa[1] == 53 and is_dns: #DNS
logging.debug("DNS query %s from %s:%d" % (common.to_str(sa[0]), r_addr[0], r_addr[1]))
self._cache_dns_client[key] = (client, uid)
else:
self._cache[key] = (client, uid)
self._client_fd_to_server_addr[client.fileno()] = (r_addr, af)
self._sockets.add(client.fileno())
self._eventloop.add(client, eventloop.POLL_IN, self)
logging.debug('UDP port %5d sockets %d' % (self._listen_port, len(self._sockets)))
if uid is not None:
user_id = struct.unpack('<I', client_uid)[0]
else:
client, client_uid = client_pair
self._cache.clear(self._udp_cache_size)
self._cache_dns_client.clear(16)
if self._is_local:
ref_iv = [encrypt.encrypt_new_iv(self._method)]
self._protocol.obfs.server_info.iv = ref_iv[0]
data = self._protocol.client_udp_pre_encrypt(data)
#logging.debug("%s" % (binascii.hexlify(data),))
data = encrypt.encrypt_all_iv(self._protocol.obfs.server_info.key, self._method, 1, data, ref_iv)
if not data:
return
else:
data = data[header_length:]
if not data:
return
except Exception as e:
shell.print_exception(e)
logging.error("exception from user %d" % (user_id,))
try:
client.sendto(data, (server_addr, server_port))
self.add_transfer_u(client_uid, len(data))
if client_pair is None: # new request
addr, port = client.getsockname()[:2]
common.connect_log('UDP data to %s(%s):%d from %s:%d by user %d' %
(common.to_str(remote_addr[0]), common.to_str(server_addr), server_port, addr, port, user_id))
except IOError as e:
err = eventloop.errno_from_exception(e)
logging.warning('IOError sendto %s:%d by user %d' % (server_addr, server_port, user_id))
if err in (errno.EINPROGRESS, errno.EAGAIN):
pass
else:
shell.print_exception(e)
def _handle_client(self, sock):
data, r_addr = sock.recvfrom(BUF_SIZE)
if not data:
logging.debug('UDP handle_client: data is empty')
return
if self._stat_callback:
self._stat_callback(self._listen_port, len(data))
client_addr = self._client_fd_to_server_addr.get(sock.fileno())
client_uid = None
if client_addr:
key = client_key(client_addr[0], client_addr[1])
client_pair = self._cache.get(key, None)
client_dns_pair = self._cache_dns_client.get(key, None)
if client_pair:
client, client_uid = client_pair
elif client_dns_pair:
client, client_uid = client_dns_pair
if not self._is_local:
addrlen = len(r_addr[0])
if addrlen > 255:
# drop
return
data = pack_addr(r_addr[0]) + struct.pack('>H', r_addr[1]) + data
ref_iv = [encrypt.encrypt_new_iv(self._method)]
self._protocol.obfs.server_info.iv = ref_iv[0]
data = self._protocol.server_udp_pre_encrypt(data, client_uid)
response = encrypt.encrypt_all_iv(self._protocol.obfs.server_info.key, self._method, 1,
data, ref_iv)
if not response:
return
else:
ref_iv = [0]
data = encrypt.encrypt_all_iv(self._protocol.obfs.server_info.key, self._method, 0,
data, ref_iv)
if not data:
return
self._protocol.obfs.server_info.recv_iv = ref_iv[0]
data = self._protocol.client_udp_post_decrypt(data)
header_result = parse_header(data)
if header_result is None:
return
#connecttype, dest_addr, dest_port, header_length = header_result
#logging.debug('UDP handle_client %s:%d to %s:%d' % (common.to_str(r_addr[0]), r_addr[1], dest_addr, dest_port))
response = b'\x00\x00\x00' + data
if client_addr:
if client_uid:
self.add_transfer_d(client_uid, len(response))
else:
self.server_transfer_dl += len(response)
self.write_to_server_socket(response, client_addr[0])
if client_dns_pair:
logging.debug("remove dns client %s:%d" % (client_addr[0][0], client_addr[0][1]))
del self._cache_dns_client[key]<|fim▁hole|> else:
# this packet is from somewhere else we know
# simply drop that packet
pass
def write_to_server_socket(self, data, addr):
uncomplete = False
retry = 0
try:
self._server_socket.sendto(data, addr)
data = None
while self._data_to_write_to_server_socket:
data_buf = self._data_to_write_to_server_socket[0]
retry = data_buf[1] + 1
del self._data_to_write_to_server_socket[0]
data, addr = data_buf[0]
self._server_socket.sendto(data, addr)
except (OSError, IOError) as e:
error_no = eventloop.errno_from_exception(e)
uncomplete = True
if error_no in (errno.EWOULDBLOCK,):
pass
else:
shell.print_exception(e)
return False
#if uncomplete and data is not None and retry < 3:
# self._data_to_write_to_server_socket.append([(data, addr), retry])
#'''
def add_to_loop(self, loop):
if self._eventloop:
raise Exception('already add to loop')
if self._closed:
raise Exception('already closed')
self._eventloop = loop
server_socket = self._server_socket
self._eventloop.add(server_socket,
eventloop.POLL_IN | eventloop.POLL_ERR, self)
loop.add_periodic(self.handle_periodic)
def remove_handler(self, client):
if hash(client) in self._timeout_cache:
del self._timeout_cache[hash(client)]
def update_activity(self, client):
self._timeout_cache[hash(client)] = client
def _sweep_timeout(self):
self._timeout_cache.sweep()
def _close_tcp_client(self, client):
if client.remote_address:
logging.debug('timed out: %s:%d' %
client.remote_address)
else:
logging.debug('timed out')
client.destroy()
client.destroy_local()
def handle_event(self, sock, fd, event):
if sock == self._server_socket:
if event & eventloop.POLL_ERR:
logging.error('UDP server_socket err')
try:
self._handle_server()
except Exception as e:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
elif sock and (fd in self._sockets):
if event & eventloop.POLL_ERR:
logging.error('UDP client_socket err')
try:
self._handle_client(sock)
except Exception as e:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
else:
if sock:
handler = self._fd_to_handlers.get(fd, None)
if handler:
handler.handle_event(sock, event)
else:
logging.warn('poll removed fd')
def handle_periodic(self):
if self._closed:
self._cache.clear(0)
self._cache_dns_client.clear(0)
if self._eventloop:
self._eventloop.remove_periodic(self.handle_periodic)
self._eventloop.remove(self._server_socket)
if self._server_socket:
self._server_socket.close()
self._server_socket = None
logging.info('closed UDP port %d', self._listen_port)
else:
before_sweep_size = len(self._sockets)
self._cache.sweep()
self._cache_dns_client.sweep()
if before_sweep_size != len(self._sockets):
logging.debug('UDP port %5d sockets %d' % (self._listen_port, len(self._sockets)))
self._sweep_timeout()
def close(self, next_tick=False):
logging.debug('UDP close')
self._closed = True
if not next_tick:
if self._eventloop:
self._eventloop.remove_periodic(self.handle_periodic)
self._eventloop.remove(self._server_socket)
self._server_socket.close()
self._cache.clear(0)
self._cache_dns_client.clear(0)<|fim▁end|> | self._close_client(client_dns_pair[0]) |
<|file_name|>into_iterator.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code, unused_imports)]
#[macro_use]
extern crate derive_more;
#[derive(IntoIterator)]
#[into_iterator(owned, ref, ref_mut)]
struct MyVec(Vec<i32>);
#[derive(IntoIterator)]
#[into_iterator(owned, ref, ref_mut)]
struct Numbers {
numbers: Vec<i32>,
}
#[derive(IntoIterator)]
struct Numbers2 {<|fim▁hole|> #[into_iterator(owned, ref, ref_mut)]
numbers: Vec<i32>,
useless: bool,
useless2: bool,
}
#[derive(IntoIterator)]
struct Numbers3 {
#[into_iterator(ref, ref_mut)]
numbers: Vec<i32>,
useless: bool,
useless2: bool,
}
// Test that owned is not enabled when ref/ref_mut are enabled without owned
impl ::core::iter::IntoIterator for Numbers3 {
type Item = <Vec<i32> as ::core::iter::IntoIterator>::Item;
type IntoIter = <Vec<i32> as ::core::iter::IntoIterator>::IntoIter;
#[inline]
fn into_iter(self) -> Self::IntoIter {
<Vec<i32> as ::core::iter::IntoIterator>::into_iter(self.numbers)
}
}<|fim▁end|> | |
<|file_name|>_mod1_1_1_0_0_1.py<|end_file_name|><|fim▁begin|>name1_1_1_0_0_1_0 = None
name1_1_1_0_0_1_1 = None
name1_1_1_0_0_1_2 = None
name1_1_1_0_0_1_3 = None<|fim▁hole|><|fim▁end|> |
name1_1_1_0_0_1_4 = None |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>extern crate cfg_if;
use cfg_if::cfg_if;
cfg_if! {
if #[cfg(all(windows, feature = "neon-sys"))] {
use std::env;
use std::fs::File;
use std::io::Write;
use std::path::Path;
// Extract linker metadata from neon-sys and save it in a text file.
// The neon-build lib.rs will textually include them into constants.
fn save(var: &str, filename: &str) {
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(filename);
let mut buffer = File::create(path).unwrap();<|fim▁hole|>
fn main() {
save("DEP_NEON_NODE_ROOT_DIR", "node_root_dir");
save("DEP_NEON_NODE_ARCH", "node_arch");
save("DEP_NEON_NODE_LIB_FILE", "node_lib_file");
}
} else {
fn main() { }
}
}<|fim▁end|> | write!(buffer, "{}", env::var(var).unwrap()).unwrap();
} |
<|file_name|>test_media_player.py<|end_file_name|><|fim▁begin|>"""The tests for the Yamaha Media player platform."""
from unittest.mock import MagicMock, PropertyMock, call, patch
import pytest
import homeassistant.components.media_player as mp
from homeassistant.components.yamaha import media_player as yamaha
from homeassistant.components.yamaha.const import DOMAIN
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.setup import async_setup_component
CONFIG = {"media_player": {"platform": "yamaha", "host": "127.0.0.1"}}
def _create_zone_mock(name, url):
zone = MagicMock()
zone.ctrl_url = url
zone.zone = name
return zone
class FakeYamahaDevice:
"""A fake Yamaha device."""
def __init__(self, ctrl_url, name, zones=None):
"""Initialize the fake Yamaha device."""
self.ctrl_url = ctrl_url
self.name = name
self._zones = zones or []
def zone_controllers(self):
"""Return controllers for all available zones."""
return self._zones
@pytest.fixture(name="main_zone")
def main_zone_fixture():
"""Mock the main zone."""
return _create_zone_mock("Main zone", "http://main")
@pytest.fixture(name="device")
def device_fixture(main_zone):
"""Mock the yamaha device."""
device = FakeYamahaDevice("http://receiver", "Receiver", zones=[main_zone])
with patch("rxv.RXV", return_value=device):
yield device
async def test_setup_host(hass, device, main_zone):
"""Test set up integration with host."""
assert await async_setup_component(hass, mp.DOMAIN, CONFIG)
await hass.async_block_till_done()
state = hass.states.get("media_player.yamaha_receiver_main_zone")
assert state is not None
assert state.state == "off"
async def test_setup_no_host(hass, device, main_zone):
"""Test set up integration without host."""
with patch("rxv.find", return_value=[device]):
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "yamaha"}}
)
await hass.async_block_till_done()
state = hass.states.get("media_player.yamaha_receiver_main_zone")
<|fim▁hole|>async def test_setup_discovery(hass, device, main_zone):
"""Test set up integration via discovery."""
discovery_info = {
"name": "Yamaha Receiver",
"model_name": "Yamaha",
"control_url": "http://receiver",
"description_url": "http://receiver/description",
}
await async_load_platform(
hass, mp.DOMAIN, "yamaha", discovery_info, {mp.DOMAIN: {}}
)
await hass.async_block_till_done()
state = hass.states.get("media_player.yamaha_receiver_main_zone")
assert state is not None
assert state.state == "off"
async def test_setup_zone_ignore(hass, device, main_zone):
"""Test set up integration without host."""
assert await async_setup_component(
hass,
mp.DOMAIN,
{
"media_player": {
"platform": "yamaha",
"host": "127.0.0.1",
"zone_ignore": "Main zone",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("media_player.yamaha_receiver_main_zone")
assert state is None
async def test_enable_output(hass, device, main_zone):
"""Test enable output service."""
assert await async_setup_component(hass, mp.DOMAIN, CONFIG)
await hass.async_block_till_done()
port = "hdmi1"
enabled = True
data = {
"entity_id": "media_player.yamaha_receiver_main_zone",
"port": port,
"enabled": enabled,
}
await hass.services.async_call(DOMAIN, yamaha.SERVICE_ENABLE_OUTPUT, data, True)
assert main_zone.enable_output.call_count == 1
assert main_zone.enable_output.call_args == call(port, enabled)
@pytest.mark.parametrize(
"cursor,method",
[
(yamaha.CURSOR_TYPE_DOWN, "menu_down"),
(yamaha.CURSOR_TYPE_LEFT, "menu_left"),
(yamaha.CURSOR_TYPE_RETURN, "menu_return"),
(yamaha.CURSOR_TYPE_RIGHT, "menu_right"),
(yamaha.CURSOR_TYPE_SELECT, "menu_sel"),
(yamaha.CURSOR_TYPE_UP, "menu_up"),
],
)
@pytest.mark.usefixtures("device")
async def test_menu_cursor(hass, main_zone, cursor, method):
"""Verify that the correct menu method is called for the menu_cursor service."""
assert await async_setup_component(hass, mp.DOMAIN, CONFIG)
await hass.async_block_till_done()
data = {
"entity_id": "media_player.yamaha_receiver_main_zone",
"cursor": cursor,
}
await hass.services.async_call(DOMAIN, yamaha.SERVICE_MENU_CURSOR, data, True)
getattr(main_zone, method).assert_called_once_with()
async def test_select_scene(hass, device, main_zone, caplog):
"""Test select scene service."""
scene_prop = PropertyMock(return_value=None)
type(main_zone).scene = scene_prop
assert await async_setup_component(hass, mp.DOMAIN, CONFIG)
await hass.async_block_till_done()
scene = "TV Viewing"
data = {
"entity_id": "media_player.yamaha_receiver_main_zone",
"scene": scene,
}
await hass.services.async_call(DOMAIN, yamaha.SERVICE_SELECT_SCENE, data, True)
assert scene_prop.call_count == 1
assert scene_prop.call_args == call(scene)
scene = "BD/DVD Movie Viewing"
data["scene"] = scene
await hass.services.async_call(DOMAIN, yamaha.SERVICE_SELECT_SCENE, data, True)
assert scene_prop.call_count == 2
assert scene_prop.call_args == call(scene)
scene_prop.side_effect = AssertionError()
missing_scene = "Missing scene"
data["scene"] = missing_scene
await hass.services.async_call(DOMAIN, yamaha.SERVICE_SELECT_SCENE, data, True)
assert f"Scene '{missing_scene}' does not exist!" in caplog.text<|fim▁end|> | assert state is not None
assert state.state == "off"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.