repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
Jumpscale/core9 | JumpScale9/clients/tarantool/templates/python/model.py | 1 | 3184 | from js9 import j
import os
import capnp
# import msgpack
import base64
ModelBaseCollection = j.data.capnp.getModelBaseClassCollection()
ModelBase = j.data.capnp.getModelBaseClass()
# from JumpScale9.clients.tarantool.KVSInterface import KVSTarantool
class $NameModel(ModelBase):
'''
'''
def __init__(self):
ModelBase.__init__(self)
def index(self):
#no need to put indexes because will be done by capnp
pass
def save(self):
self.reSerialize()
self._pre_save()
buff = self.dbobj.to_bytes()
key=self.key
# key=msgpack.dumps(self.key)
# key=base64.b64encode(self.key.encode())
return self.collection.client.call("model_$name_set",(key,buff))
def delete(self):
key=self.key
# key=base64.b64encode(self.key.encode())
return self.collection.client.call("model_$name_del",(key))
class $NameCollection(ModelBaseCollection):
'''
This class represent a collection of $Names
It's used to list/find/create new Instance of $Name Model object
'''
def __init__(self):
category = '$name'
namespace = ""
# instanciate the KVS interface on top of tarantool
# cl = j.clients.tarantool.client_get() # will get the tarantool from the config file, the main connection
# db = KVSTarantool(cl, category)
# mpath = j.sal.fs.getDirName(os.path.abspath(__file__)) + "/model.capnp"
# SchemaCapnp = j.data.capnp.getSchemaFromPath(mpath, name='$Name')
self.client = j.clients.tarantool.client_get() #will get the tarantool from the config file, the main connection
mpath=j.sal.fs.getDirName(os.path.abspath(__file__))+"/model.capnp"
SchemaCapnp=j.data.capnp.getSchemaFromPath(mpath,name='$Name')
super().__init__(SchemaCapnp, category=category, namespace=namespace, modelBaseClass=$NameModel, db=self.client, indexDb=self.client)
self.client.db.encoding=None
def new(self):
return $NameModel(collection=self, new=True)
def get(self,key):
resp=self.client.call("model_$name_get",key)
if len(resp.data) <= 1 and len(resp.data[0]) > 2:
raise KeyError("value for %s not found" % key)
value = resp.data[0][1]
return $NameModel(key=key,collection=self, new=False,data=value)
# BELOW IS ALL EXAMPLE CODE WHICH NEEDS TO BE REPLACED
def list(self):
resp=self.client.call("model_$name_list")
return [item.decode() for item in resp[0]]
# def list(self, actor="", service="", action="", state="", serviceKey="", fromEpoch=0, toEpoch=9999999999999,tags=[]):
# raise NotImplementedError()
# return res
# def find(self, actor="", service="", action="", state="", serviceKey="", fromEpoch=0, toEpoch=9999999999999, tags=[]):
# raise NotImplementedError()
# res = []
# for key in self.list(actor, service, action, state, serviceKey, fromEpoch, toEpoch, tags):
# if self.get(key):
# res.append(self.get(key))
# return res
| apache-2.0 | -5,462,478,332,331,678,000 | 35.181818 | 141 | 0.618719 | false |
t3dev/odoo | addons/purchase_requisition/tests/test_purchase_requisition.py | 1 | 5253 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.purchase_requisition.tests.common import TestPurchaseRequisitionCommon
class TestPurchaseRequisition(TestPurchaseRequisitionCommon):
def test_00_purchase_requisition_users(self):
self.assertTrue(self.user_purchase_requisition_manager, 'Manager Should be created')
self.assertTrue(self.user_purchase_requisition_user, 'User Should be created')
def test_01_cancel_purchase_requisition(self):
self.requisition1.sudo(self.user_purchase_requisition_user.id).action_cancel()
# Check requisition after cancelled.
self.assertEqual(self.requisition1.state, 'cancel', 'Requisition should be in cancelled state.')
# I reset requisition as "New".
self.requisition1.sudo(self.user_purchase_requisition_user.id).action_draft()
# I duplicate requisition.
self.requisition1.sudo(self.user_purchase_requisition_user.id).copy()
def test_02_purchase_requisition(self):
price_product09 = 34
price_product13 = 62
quantity = 26
# Create a pruchase requisition with type blanket order and two product
line1 = (0, 0, {'product_id': self.product_09.id, 'product_qty': quantity, 'product_uom_id': self.product_uom_id.id, 'price_unit': price_product09})
line2 = (0, 0, {'product_id': self.product_13.id, 'product_qty': quantity, 'product_uom_id': self.product_uom_id.id, 'price_unit': price_product13})
requisition_type = self.env['purchase.requisition.type'].create({
'name': 'Blanket test',
'quantity_copy': 'none'
})
requisition_blanket = self.env['purchase.requisition'].create({
'line_ids': [line1, line2],
'type_id': requisition_type.id,
'vendor_id': self.res_partner_1.id,
})
# confirm the requisition
requisition_blanket.action_in_progress()
# Check for both product that the new supplier info(purchase.requisition.vendor_id) is added to the puchase tab
# and check the quantity
seller_partner1 = self.res_partner_1
supplierinfo09 = self.env['product.supplierinfo'].search([
('name', '=', seller_partner1.id),
('product_id', '=', self.product_09.id),
('purchase_requisition_id', '=', requisition_blanket.id),
])
self.assertEqual(supplierinfo09.name, seller_partner1, 'The supplierinfo is not the good one')
self.assertEqual(supplierinfo09.price, price_product09, 'The supplierinfo is not the good one')
supplierinfo13 = self.env['product.supplierinfo'].search([
('name', '=', seller_partner1.id),
('product_id', '=', self.product_13.id),
('purchase_requisition_id', '=', requisition_blanket.id),
])
self.assertEqual(supplierinfo13.name, seller_partner1, 'The supplierinfo is not the good one')
self.assertEqual(supplierinfo13.price, price_product13, 'The supplierinfo is not the good one')
# Put the requisition in done Status
requisition_blanket.action_in_progress()
requisition_blanket.action_done()
self.assertFalse(self.env['product.supplierinfo'].search([('id', '=', supplierinfo09.id)]), 'The supplier info should be removed')
self.assertFalse(self.env['product.supplierinfo'].search([('id', '=', supplierinfo13.id)]), 'The supplier info should be removed')
def test_06_purchase_requisition(self):
""" Create a blanquet order for a product and a vendor already linked via
a supplier info"""
product = self.env['product.product'].create({
'name': 'test6',
})
product2 = self.env['product.product'].create({
'name': 'test6',
})
vendor = self.env['res.partner'].create({
'name': 'vendor6',
})
supplier_info = self.env['product.supplierinfo'].create({
'product_id': product.id,
'name': vendor.id,
})
# create a empty blanquet order
requisition_type = self.env['purchase.requisition.type'].create({
'name': 'Blanket test',
'quantity_copy': 'none'
})
line1 = (0, 0, {
'product_id': product2.id,
'product_uom_id': product2.uom_po_id.id,
'price_unit': 41,
'product_qty': 10,
})
requisition_blanket = self.env['purchase.requisition'].create({
'line_ids': [line1],
'type_id': requisition_type.id,
'vendor_id': vendor.id,
})
requisition_blanket.action_in_progress()
self.env['purchase.requisition.line'].create({
'product_id': product.id,
'product_qty': 14.0,
'requisition_id': requisition_blanket.id,
'price_unit': 10,
})
new_si = self.env['product.supplierinfo'].search([
('product_id', '=', product.id),
('name', '=', vendor.id)
]) - supplier_info
self.assertEqual(new_si.purchase_requisition_id, requisition_blanket, 'the blanket order is not linked to the supplier info')
| gpl-3.0 | 5,065,559,259,893,772,000 | 44.678261 | 156 | 0.617742 | false |
dann/python-classpluggable | setup.py | 1 | 1506 | import os
import sys
from setuptools import setup, find_packages
if sys.version_info[:2] < (2, 6):
raise RuntimeError('Requires Python 2.6 or better')
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
except IOError:
README = CHANGES = ''
install_requires=[
'setuptools',
]
tests_require = install_requires + [
'nose',
]
setup(name='classpluggable',
version='0.01',
description=('classpluggable project'),
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Intended Audience :: Developers",
"Operating System :: Unix",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: BSD License",
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules'
],
keywords='',
author="dann",
author_email="[email protected] ",
url="http://github.com/dann/python-classpluggable",
license="New BSD License",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires = install_requires,
tests_require = tests_require,
test_suite = 'nose.collector',
entry_points = """\
"""
) | bsd-3-clause | 6,244,364,089,358,640,000 | 27.980769 | 70 | 0.605578 | false |
timpalpant/KaggleTSTextClassification | scripts/practice/predict.6.py | 1 | 3211 | #!/usr/bin/env python
'''
Make predictions for the test data
6. Use logistic regression
'''
import argparse, multiprocessing
from common import *
from sklearn.linear_model import LogisticRegression
from sklearn.preprocessing import OneHotEncoder, StandardScaler
def prepare_features(data, enc=None, scaler=None):
'''
One-hot encode all boolean/string (categorical) features,
and shift/scale integer/float features
'''
# X needs to contain only non-negative integers
bfs = data['bfeatures'] + 1
sfs = data['sfeatures'] + 1
# Shift/scale integer and float features to have mean=0, std=1
ifs = data['ifeatures']
ffs = data['ffeatures']
x2 = np.hstack((ifs,ffs))
if scaler is None:
scaler = StandardScaler()
x2 = scaler.fit_transform(x2)
print "Training features have mean: %s" % scaler.mean_
print "and standard deviation: %s" % scaler.std_
else:
x2 = scaler.transform(x2, copy=False)
# one-hot encode categorical features
X = np.hstack((bfs,sfs,x2))
categorical = np.arange(bfs.shape[1]+sfs.shape[1])
if enc is None:
enc = OneHotEncoder(n_values='auto', categorical_features=categorical)
X = enc.fit_transform(X)
print "One-hot encoded features have dimension %d" % X.shape[1]
else:
X = enc.transform(X)
return X, enc, scaler
def opts():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('train', type=load_npz,
help='Training features (npz)')
parser.add_argument('labels', type=load_npz,
help='Training labels (npz)')
parser.add_argument('test', type=load_npz,
help='Test features (npz)')
parser.add_argument('output',
help='Output label predictions (npz)')
return parser
def cross_validate_classifier(clf, features, labels, cv=5):
kf = cross_validation.KFold(len(features), n_folds=cv)
scores = []
for train, test in kf:
clf.fit(features[train], labels[train])
Y = np.vstack([v[:,0] for v in clf.predict_proba(features[test])])
pred = 1 - Y.T
score = score_predictions(labels[test], pred)
return scores
if __name__ == "__main__":
args = opts().parse_args()
print "Loading and preparing data"
X, enc, scaler = prepare_features(args.train)
Y = args.labels['labels'][:1111]
label_header = args.labels['header']
print "Training classifier"
clfs = [LogisticRegression(C=1.0, tol=0.001, random_state=42) for y in Y.T]
for i, (clf, y) in enumerate(zip(clfs, Y.T)):
print "Fitting label %s" % label_header[i]
try: clf.fit(X, y)
except: clfs[i] = y[0]
del X, Y
print "Predicting"
X = prepare_features(args.test, enc, scaler)
p = []
for i, clf in enumerate(clfs):
print "Predicting label %s" % label_header[i]
print clf.predict_proba(X)
try: p.append(clf.predict_proba(X)[:,0])
except: p.append(np.array([clf] * len(X)))
p = np.vstack(p).T
Y = 1 - p
print "Saving predictions"
save_npz(args.output, ids=args.test['ids'],
header=args.labels['header'], labels=Y)
| gpl-3.0 | 6,292,408,263,200,130,000 | 31.765306 | 79 | 0.623482 | false |
Onager/l2tdevtools | l2tdevtools/dependency_writers/dpkg.py | 1 | 4671 | # -*- coding: utf-8 -*-
"""Writer for Debian packaging (dpkg) files."""
from __future__ import unicode_literals
import io
import os
from l2tdevtools.dependency_writers import interface
class DPKGCompatWriter(interface.DependencyFileWriter):
"""Dpkg compat file writer."""
PATH = os.path.join('config', 'dpkg', 'compat')
_FILE_CONTENT = '9\n'
def Write(self):
"""Writes a dpkg control file."""
with io.open(self.PATH, 'w', encoding='utf-8') as file_object:
file_object.write(self._FILE_CONTENT)
class DPKGControlWriter(interface.DependencyFileWriter):
"""Dpkg control file writer."""
PATH = os.path.join('config', 'dpkg', 'control')
_PYTHON3_FILE_HEADER = [
'Source: {project_name:s}',
'Section: python',
'Priority: extra',
'Maintainer: {maintainer:s}',
'Build-Depends: debhelper (>= 9), dh-python, {build_dependencies:s}',
'Standards-Version: 4.1.4',
'X-Python3-Version: >= 3.5',
'Homepage: {homepage_url:s}',
''] # yapf: disable
_DATA_PACKAGE = [
'Package: {project_name:s}-data',
'Architecture: all',
'Depends: ${{misc:Depends}}',
'Description: Data files for {name_description:s}',
'{description_long:s}',
''] # yapf: disable
_PYTHON3_PACKAGE = [
'Package: python3-{project_name:s}',
'Architecture: all',
('Depends: {python3_dependencies:s}'
'${{python3:Depends}}, ${{misc:Depends}}'),
'Description: Python 3 module of {name_description:s}',
'{description_long:s}',
''] # yapf: disable
_TOOLS_PACKAGE = [
'Package: {project_name:s}-tools',
'Architecture: all',
('Depends: python3-{project_name:s} (>= ${{binary:Version}}), '
'${{python3:Depends}}, ${{misc:Depends}}'),
'Description: Tools of {name_description:s}',
'{description_long:s}',
''] # yapf: disable
def Write(self):
"""Writes a dpkg control file."""
file_content = []
file_content.extend(self._PYTHON3_FILE_HEADER)
data_dependency = ''
if os.path.isdir('data'):
data_dependency = '{0:s}-data (>= ${{binary:Version}})'.format(
self._project_definition.name)
file_content.extend(self._DATA_PACKAGE)
file_content.extend(self._PYTHON3_PACKAGE)
if (os.path.isdir('scripts') or os.path.isdir('tools') or
self._project_definition.name == 'timesketch'):
file_content.extend(self._TOOLS_PACKAGE)
description_long = self._project_definition.description_long
description_long = '\n'.join(
[' {0:s}'.format(line) for line in description_long.split('\n')])
python3_dependencies = self._dependency_helper.GetDPKGDepends(
python_version=3)
if data_dependency:
python3_dependencies.insert(0, data_dependency)
python3_dependencies = ', '.join(python3_dependencies)
if python3_dependencies:
python3_dependencies = '{0:s}, '.format(python3_dependencies)
build_dependencies = ['python3-all (>= 3.5~)', 'python3-setuptools']
if self._project_definition.name == 'timesketch':
build_dependencies.insert(0, 'dh-systemd (>= 1.5)')
build_dependencies.append('python3-pip')
build_dependencies = ', '.join(build_dependencies)
template_mappings = {
'build_dependencies': build_dependencies,
'description_long': description_long,
'description_short': self._project_definition.description_short,
'homepage_url': self._project_definition.homepage_url,
'maintainer': self._project_definition.maintainer,
'name_description': self._project_definition.name_description,
'project_name': self._project_definition.name,
'python3_dependencies': python3_dependencies}
file_content = '\n'.join(file_content)
file_content = file_content.format(**template_mappings)
with io.open(self.PATH, 'w', encoding='utf-8') as file_object:
file_object.write(file_content)
class DPKGRulesWriter(interface.DependencyFileWriter):
"""Dpkg rules file writer."""
PATH = os.path.join('config', 'dpkg', 'rules')
_FILE_CONTENT = [
'#!/usr/bin/make -f',
'',
'%:',
'\tdh $@ --buildsystem=pybuild --with=python3',
'',
'.PHONY: override_dh_auto_test',
'override_dh_auto_test:',
'',
'']
def Write(self):
"""Writes a dpkg control file."""
template_mappings = {
'project_name': self._project_definition.name}
file_content = '\n'.join(self._FILE_CONTENT)
file_content = file_content.format(**template_mappings)
with io.open(self.PATH, 'w', encoding='utf-8') as file_object:
file_object.write(file_content)
| apache-2.0 | -5,977,090,796,422,307,000 | 30.348993 | 75 | 0.627917 | false |
redhat-cip/dci-control-server | tests/api/v1/test_jobs_update.py | 1 | 1718 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def test_update_jobs(admin, remoteci_context, job_user_id, topic_user_id):
# test update schedule latest components
data = {
'name': 'pname',
'type': 'type_1',
'url': 'http://example.com/',
'topic_id': topic_user_id,
'state': 'active'}
c1 = admin.post('/api/v1/components', data=data).data['component']['id']
data.update({'type': 'type_2', 'name': 'pname1'})
c2 = admin.post('/api/v1/components', data=data).data['component']['id']
data.update({'type': 'type_3', 'name': 'pname2'})
c3 = admin.post('/api/v1/components', data=data).data['component']['id']
latest_components = {c1, c2, c3}
r = remoteci_context.post('/api/v1/jobs/%s/update' % job_user_id)
assert r.status_code == 201
update_job = r.data['job']
assert update_job['update_previous_job_id'] == job_user_id
assert update_job['topic_id'] == topic_user_id
update_cmpts = admin.get('/api/v1/jobs/%s/components' % update_job['id'])
update_cmpts = {cmpt['id'] for cmpt in update_cmpts.data['components']}
assert latest_components == update_cmpts
| apache-2.0 | -6,182,659,063,903,803,000 | 39.904762 | 77 | 0.658324 | false |
pli3/enigma2-git | lib/python/Tools/Directories.py | 1 | 8559 | # -*- coding: utf-8 -*-
import os
from os import mkdir, rmdir, system, walk, stat as os_stat, listdir, readlink, makedirs, error as os_error, symlink, access, F_OK, R_OK, W_OK
from stat import S_IMODE
from re import compile
from enigma import eEnv
try:
from os import chmod
have_chmod = True
except:
have_chmod = False
try:
from os import utime
have_utime = True
except:
have_utime = False
SCOPE_TRANSPONDERDATA = 0
SCOPE_SYSETC = 1
SCOPE_FONTS = 2
SCOPE_SKIN = 3
SCOPE_SKIN_IMAGE = 4
SCOPE_USERETC = 5
SCOPE_CONFIG = 6
SCOPE_LANGUAGE = 7
SCOPE_HDD = 8
SCOPE_PLUGINS = 9
SCOPE_MEDIA = 10
SCOPE_PLAYLIST = 11
SCOPE_CURRENT_SKIN = 12
SCOPE_DEFAULTDIR = 13
SCOPE_DEFAULTPARTITION = 14
SCOPE_DEFAULTPARTITIONMOUNTDIR = 15
SCOPE_METADIR = 16
SCOPE_CURRENT_PLUGIN = 17
PATH_CREATE = 0
PATH_DONTCREATE = 1
PATH_FALLBACK = 2
# songingeun - [
HDD_PATH="/hdd/movie/"
from Tools.HardwareInfo import HardwareInfo
if HardwareInfo().get_device_name() in ('ios300hd', 'tmnanooe', 'mediabox'):
HDD_PATH="/media/usb/movie/"
# ]
defaultPaths = {
SCOPE_TRANSPONDERDATA: (eEnv.resolve("${sysconfdir}/"), PATH_DONTCREATE),
SCOPE_SYSETC: (eEnv.resolve("${sysconfdir}/"), PATH_DONTCREATE),
SCOPE_FONTS: (eEnv.resolve("${datadir}/fonts/"), PATH_DONTCREATE),
SCOPE_CONFIG: (eEnv.resolve("${sysconfdir}/enigma2/"), PATH_CREATE),
SCOPE_PLUGINS: (eEnv.resolve("${libdir}/enigma2/python/Plugins/"), PATH_CREATE),
SCOPE_LANGUAGE: (eEnv.resolve("${datadir}/enigma2/po/"), PATH_DONTCREATE),
SCOPE_SKIN: (eEnv.resolve("${datadir}/enigma2/"), PATH_DONTCREATE),
SCOPE_SKIN_IMAGE: (eEnv.resolve("${datadir}/enigma2/"), PATH_DONTCREATE),
# SCOPE_HDD: ("/hdd/movie/", PATH_DONTCREATE),
SCOPE_HDD: (HDD_PATH, PATH_DONTCREATE),
SCOPE_MEDIA: ("/media/", PATH_DONTCREATE),
SCOPE_PLAYLIST: (eEnv.resolve("${sysconfdir}/enigma2/playlist/"), PATH_CREATE),
SCOPE_USERETC: ("", PATH_DONTCREATE), # user home directory
SCOPE_DEFAULTDIR: (eEnv.resolve("${datadir}/enigma2/defaults/"), PATH_CREATE),
SCOPE_DEFAULTPARTITION: ("/dev/mtdblock6", PATH_DONTCREATE),
SCOPE_DEFAULTPARTITIONMOUNTDIR: (eEnv.resolve("${datadir}/enigma2/dealer"), PATH_CREATE),
SCOPE_METADIR: (eEnv.resolve("${datadir}/meta"), PATH_CREATE),
}
FILE_COPY = 0 # copy files from fallback dir to the basedir
FILE_MOVE = 1 # move files
PATH_COPY = 2 # copy the complete fallback dir to the basedir
PATH_MOVE = 3 # move the fallback dir to the basedir (can be used for changes in paths)
fallbackPaths = {
SCOPE_CONFIG: [("/home/root/", FILE_MOVE),
(eEnv.resolve("${datadir}/enigma2/defaults/"), FILE_COPY)],
SCOPE_HDD: [("/hdd/movies", PATH_MOVE)]
}
def resolveFilename(scope, base = "", path_prefix = None):
if base.startswith("~/"):
# you can only use the ~/ if we have a prefix directory
assert path_prefix is not None
base = os.path.join(path_prefix, base[2:])
# don't resolve absolute paths
if base.startswith('/'):
return base
if scope == SCOPE_CURRENT_SKIN:
from Components.config import config
# allow files in the config directory to replace skin files
tmp = defaultPaths[SCOPE_CONFIG][0]
if base and pathExists(tmp + base):
path = tmp
else:
tmp = defaultPaths[SCOPE_SKIN][0]
pos = config.skin.primary_skin.value.rfind('/')
if pos != -1:
#if basefile is not available use default skin path as fallback
tmpfile = tmp+config.skin.primary_skin.value[:pos+1] + base
if pathExists(tmpfile):
path = tmp+config.skin.primary_skin.value[:pos+1]
else:
path = tmp
else:
path = tmp
elif scope == SCOPE_CURRENT_PLUGIN:
tmp = defaultPaths[SCOPE_PLUGINS]
from Components.config import config
skintmp = defaultPaths[SCOPE_SKIN]
pos = config.skin.primary_skin.value.rfind('/')
if pos != -1:
#if basefile is not available inside current skin path, use the original provided file as fallback
skintmpfile = skintmp[0]+config.skin.primary_skin.value[:pos+1] + base
if fileExists(skintmpfile):
path = skintmp[0]+config.skin.primary_skin.value[:pos+1]
else:
path = tmp[0]
else:
path = tmp[0]
else:
tmp = defaultPaths[scope]
path = tmp[0]
flags = tmp[1]
if flags == PATH_CREATE:
if not pathExists(path):
try:
mkdir(path)
except OSError:
print "resolveFilename: Couldn't create %s" % path
return None
fallbackPath = fallbackPaths.get(scope)
if fallbackPath and not fileExists(path + base):
for x in fallbackPath:
try:
if x[1] == FILE_COPY:
if fileExists(x[0] + base):
try:
os.link(x[0] + base, path + base)
except:
system("cp " + x[0] + base + " " + path + base)
break
elif x[1] == FILE_MOVE:
if fileExists(x[0] + base):
os.rename(x[0] + base, path + base)
break
elif x[1] == PATH_COPY:
if pathExists(x[0]):
if not pathExists(defaultPaths[scope][0]):
mkdir(path)
system("cp -a " + x[0] + "* " + path)
break
elif x[1] == PATH_MOVE:
if pathExists(x[0]):
os.rename(x[0], path + base)
break
except Exception, e:
print "[D] Failed to recover %s:" % (path+base), e
# FIXME: we also have to handle DATADIR etc. here.
return path + base
# this is only the BASE - an extension must be added later.
pathExists = os.path.exists
isMount = os.path.ismount
def createDir(path, makeParents = False):
try:
if makeParents:
makedirs(path)
else:
mkdir(path)
except:
return 0
else:
return 1
def removeDir(path):
try:
rmdir(path)
except:
return 0
else:
return 1
def fileExists(f, mode='r'):
if mode == 'r':
acc_mode = R_OK
elif mode == 'w':
acc_mode = W_OK
else:
acc_mode = F_OK
return access(f, acc_mode)
def getRecordingFilename(basename, dirname = None):
# filter out non-allowed characters
non_allowed_characters = "/.\\:*?<>|\""
filename = ""
basename = basename.replace('\xc2\x86', '').replace('\xc2\x87', '')
for c in basename:
if c in non_allowed_characters or ord(c) < 32:
c = "_"
filename += c
if dirname is not None:
filename = os.path.join(dirname, filename)
while len(filename) > 240:
filename = filename.decode('UTF-8')
filename = filename[:-1]
filename = filename.encode('UTF-8')
i = 0
while True:
path = resolveFilename(SCOPE_HDD, filename)
if i > 0:
path += "_%03d" % i
try:
open(path + ".ts")
i += 1
except IOError:
return path
# this is clearly a hack:
def InitFallbackFiles():
resolveFilename(SCOPE_CONFIG, "userbouquet.favourites.tv")
resolveFilename(SCOPE_CONFIG, "bouquets.tv")
resolveFilename(SCOPE_CONFIG, "userbouquet.favourites.radio")
resolveFilename(SCOPE_CONFIG, "bouquets.radio")
# returns a list of tuples containing pathname and filename matching the given pattern
# example-pattern: match all txt-files: ".*\.txt$"
def crawlDirectory(directory, pattern):
list = []
if directory:
expression = compile(pattern)
for root, dirs, files in walk(directory):
for file in files:
if expression.match(file) is not None:
list.append((root, file))
return list
def copyfile(src, dst):
try:
f1 = open(src, "rb")
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
f2 = open(dst, "w+b")
while True:
buf = f1.read(16*1024)
if not buf:
break
f2.write(buf)
st = os_stat(src)
mode = S_IMODE(st.st_mode)
if have_chmod:
chmod(dst, mode)
if have_utime:
utime(dst, (st.st_atime, st.st_mtime))
except:
print "copy", src, "to", dst, "failed!"
return -1
return 0
def copytree(src, dst, symlinks=False):
names = listdir(src)
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
if not os.path.isdir(dst):
mkdir(dst)
else:
makedirs(dst)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = readlink(srcname)
symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks)
else:
copyfile(srcname, dstname)
except:
print "dont copy srcname (no file or link or folder)"
try:
st = os_stat(src)
mode = S_IMODE(st.st_mode)
if have_chmod:
chmod(dst, mode)
if have_utime:
utime(dst, (st.st_atime, st.st_mtime))
except:
print "copy stats for", src, "failed!"
def getSize(path, pattern=".*"):
path_size = 0
if os.path.isdir(path):
files = crawlDirectory(path, pattern)
for file in files:
filepath = os.path.join(file[0], file[1])
path_size += os.path.getsize(filepath)
elif os.path.isfile(path):
path_size = os.path.getsize(path)
return path_size
| gpl-2.0 | 5,339,323,723,015,453,000 | 25.915094 | 141 | 0.667718 | false |
fossfreedom/alternative-toolbar | alttoolbar_sidebar.py | 1 | 22724 | # -*- Mode: python; coding: utf-8; tab-width: 4; indent-tabs-mode: nil; -*-
#
# Copyright (C) 2015 - 2020 David Mohammed <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
import gettext
from alttoolbar_controller import AltControllerCategory
from alttoolbar_preferences import CoverLocale
from alttoolbar_preferences import GSetting
from gi.repository import GLib
from gi.repository import GObject
from gi.repository import Gdk
from gi.repository import Gio
from gi.repository import Gtk
from gi.repository import Pango
from gi.repository import RB
class AltToolbarSidebar(Gtk.TreeView):
expanders = GObject.property(type=str, default='{1:True}')
def __init__(self, toolbar, rbtree):
"""
Initialises the object.
"""
super(AltToolbarSidebar, self).__init__()
self.shell = toolbar.shell
self.toolbar = toolbar
self.plugin = toolbar.plugin
self.rbtree = rbtree
self._drag_dest_source = None
self._drag_motion_counter = -1
# locale stuff
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
self.set_name("AltToolbarSideBar")
self._category = {}
self._last_click_source = None
self._user_clicked = False
gs = GSetting()
plugin_settings = gs.get_setting(gs.Path.PLUGIN)
plugin_settings.bind(gs.PluginKey.EXPANDERS, self, 'expanders',
Gio.SettingsBindFlags.DEFAULT)
# title, source, visible
self.treestore = Gtk.TreeStore.new([str, GObject.Object, bool])
self.treestore_filter = self.treestore.filter_new(root=None)
self.treestore_filter.set_visible_column(2)
self.set_model(self.treestore_filter)
context = self.get_style_context()
context.add_class(Gtk.STYLE_CLASS_SIDEBAR)
self.set_headers_visible(False)
# define the headers - not visible by default
def define_category(text, category):
local = self.treestore.append(None)
self.treestore[local] = [text, None, False]
self._category[category] = local
define_category(_("Local collection"), AltControllerCategory.LOCAL)
define_category(_("Online sources"), AltControllerCategory.ONLINE)
define_category(_("Other sources"), AltControllerCategory.OTHER)
define_category(_("Playlists"), AltControllerCategory.PLAYLIST)
def delayed(*args):
model = self.shell.props.display_page_model
rootiter = model.get_iter_first()
depth = 0
self._traverse_rows(model, rootiter, None, depth)
# switch on/off headers depending upon what's in the model
self._refresh_headers()
# tidy up syncing by connecting signals
self._connect_signals()
# now expand or collapse each expander that we have saved from a
# previous session
expanders = eval(self.expanders)
print(expanders)
print(self.expanders)
for category in expanders:
print(category)
path = self.treestore.get_path(self._category[category])
if path and expanders[category]:
# self._user_clicked = True
self.expand_row(path, False) # expanders[category])
return False
GLib.timeout_add_seconds(1, delayed)
column = Gtk.TreeViewColumn.new()
column.set_fixed_width(5)
column.set_sizing(Gtk.TreeViewColumnSizing.FIXED)
self.append_column(column)
column = Gtk.TreeViewColumn.new()
pixbuf_renderer = Gtk.CellRendererPixbuf()
column.pack_start(pixbuf_renderer, False)
renderer = Gtk.CellRendererText()
renderer.connect('edited', self.on_renderertext_edited)
self.text_renderer = renderer
column.pack_start(renderer, False)
column.set_cell_data_func(pixbuf_renderer, self._set_pixbuf)
column.set_cell_data_func(renderer, self._set_text)
self.tree_column = column
self.append_column(column)
self.set_expander_column(column)
self.show_all()
self.set_can_focus(True)
cl = CoverLocale()
cl.switch_locale(cl.Locale.RB)
def _connect_signals(self):
# display_page_model signals to keep the sidebar model in sync
model = self.shell.props.display_page_model
self._cpi = model.connect('page-inserted', self._model_page_inserted)
self._crd = model.connect('row-deleted', self._model_page_deleted)
# self._crc = model.connect('row-changed', self._model_page_changed)
# when we click on the sidebar -
# need to keep the display_page_tree in sync
self.connect('button-press-event', self._row_click)
# and visa versa
tree = self.shell.props.display_page_tree
tree.props.model.connect('row-inserted', self._tree_inserted)
tree.connect('selected',
self._display_page_tree_selected)
self.shell.props.shell_player.connect('playing-song-changed',
self._on_playing_song_changed)
# drag drop
self.enable_model_drag_dest([], Gdk.DragAction.COPY)
self.drag_dest_add_uri_targets()
self.connect('drag-drop', self.on_drag_drop)
self.connect('drag-data-received',
self.on_drag_data_received)
self.connect('drag-motion', self.on_drag_motion)
def cleanup(self):
model = self.shell.props.display_page_model
model.disconnect(self._cpi)
model.disconnect(self._crd)
# model.disconnect(self._crc)
def on_drag_drop(self, widget, context, x, y, time):
"""
Callback called when a drag operation finishes over the treeview
It decides if the dropped item can be processed.
"""
print("on_drag_drop")
# stop the propagation of the signal (deactivates superclass callback)
widget.stop_emission_by_name('drag-drop')
target = self.drag_dest_find_target(context, None)
widget.drag_get_data(context, target, time)
self._drag_dest_source = None
return True
def on_drag_motion(self, widget, drag_context, x, y, time):
path = False
try:
path, pos = widget.get_dest_row_at_pos(x, y)
except:
pass
result = False
if path and (
pos == Gtk.TreeViewDropPosition.BEFORE or pos == Gtk.TreeViewDropPosition.AFTER):
if pos == Gtk.TreeViewDropPosition.BEFORE:
drop_pos = Gtk.TreeViewDropPosition.INTO_OR_BEFORE
else:
drop_pos = Gtk.TreeViewDropPosition.INTO_OR_AFTER
widget.set_drag_dest_row(None, drop_pos)
# Gdk.drag_status(drag_context, 0, time)
path = None
if path:
dest_source = self.treestore_filter[path][1]
try:
# note - some sources dont have a can_paste method so need to
# trap this case
if not dest_source:
result = False
elif dest_source.can_paste():
result = True
except:
result = False
if dest_source and result:
if dest_source != self._drag_dest_source:
if self._drag_motion_counter != -1:
self._drag_motion_counter = 0
self._drag_dest_source = dest_source
def delayed(*args):
if self._drag_motion_counter < 2 and \
self._drag_dest_source:
self._drag_motion_counter += 1
return True
if self._drag_dest_source \
and self._drag_motion_counter >= 2:
tree = self.shell.props.display_page_tree
if tree:
tree.select(self._drag_dest_source)
self.rbtree.expand_all()
self._drag_motion_counter = -1
return False
if self._drag_motion_counter == -1:
self._drag_motion_counter = 0
GLib.timeout_add_seconds(1, delayed)
if result:
Gdk.drag_status(drag_context, Gdk.DragAction.COPY, time)
else:
Gdk.drag_status(drag_context, 0, time)
self._drag_dest_source = None
return not result
def on_drag_data_received(self, widget, drag_context, x, y, data, info,
time):
"""
Callback called when the drag source has prepared the data (pixbuf)
for us to use.
"""
print("on_drag_data_received")
# stop the propagation of the signal (deactivates superclass callback)
widget.stop_emission_by_name('drag-data-received')
path, pos = widget.get_dest_row_at_pos(x, y)
dest_source = self.treestore_filter[path][1]
drag_context.finish(True, False, time)
uris = data.get_uris()
entries = []
for uri in uris:
entry = self.shell.props.db.entry_lookup_by_location(uri)
if entry:
entries.append(entry)
dest_source.paste(entries)
def _on_playing_song_changed(self, *args):
"""
signal when a playing song changes - need to invoke a tree-refresh
to ensure the user can see which source
:param args:
:return:
"""
print("playing song changed")
if hasattr(self.plugin, "db"): # curious crash when exiting - lets not
# send the queue_draw in this case
print("queuing")
self.queue_draw()
def on_renderertext_edited(self, renderer, path, new_text):
print("edited")
print(path)
print(new_text)
self.treestore_filter[path][1].props.name = new_text
def _traverse_rows(self, store, treeiter, new_parent_iter, depth):
while treeiter is not None:
# print(depth, store[treeiter][1])
# print(depth, store[treeiter][1].props.name)
if isinstance(store[treeiter][1], RB.DisplayPageGroup):
if store.iter_has_child(treeiter):
childiter = store.iter_children(treeiter)
self._traverse_rows(store, childiter, treeiter, depth)
treeiter = store.iter_next(treeiter)
continue
if depth == 0:
category_iter = self._get_category_iter(store[treeiter][1])
leaf_iter = self.treestore.append(category_iter)
else:
leaf_iter = self.treestore.append(new_parent_iter)
self.treestore[leaf_iter][1] = store[treeiter][1]
self.treestore[leaf_iter][0] = ""
self.treestore[leaf_iter][2] = True
if store.iter_has_child(treeiter):
childiter = store.iter_children(treeiter)
self._traverse_rows(store, childiter, leaf_iter, depth + 1)
treeiter = store.iter_next(treeiter)
# def _model_page_changed(self, model, path, page_iter):
# print(model[page_iter][1].props.name)
# print(path)
# # self._model_page_inserted(model, path, page_iter)
def _tree_inserted(self, model, path, page_iter):
print(path)
print(page_iter)
print(model[path][1].props.name)
print(model[path][1])
self._model_page_inserted(model, model[path][1], page_iter)
def _model_page_inserted(self, model, page, page_iter):
if page and not page.props.visibility:
return # we don't display sources that are marked as hidden
print(page)
print(page_iter)
parent_iter = model.iter_parent(page_iter)
print(parent_iter)
def find_lookup_rows(store, treeiter, page):
while treeiter is not None:
found_page = store[treeiter][1]
print(found_page)
if found_page is not None and found_page == page:
# print("found %s" % found_page.props.name)
return treeiter
if store.iter_has_child(treeiter):
childiter = store.iter_children(treeiter)
ret = find_lookup_rows(store, childiter, page)
if ret:
return ret
treeiter = store.iter_next(treeiter)
print("nothing found")
return None
# first check if we've already got the page in the model
rootiter = self.treestore.get_iter_first()
if find_lookup_rows(self.treestore, rootiter, page):
return
if (parent_iter and isinstance(model[parent_iter][1],
RB.DisplayPageGroup)) or \
not parent_iter:
# the parent of the inserted row is a top-level item in the
# display-page-model
# print("top level")
category_iter = self._get_category_iter(page)
leaf_iter = self.treestore.append(category_iter)
else:
# the parent is another source so we need to find the iter in our
# model to hang it off
# print("child level")
searchpage = model[parent_iter][1]
# print("####", searchpage)
leaf_iter = find_lookup_rows(self.treestore, rootiter, searchpage)
# print("##2", leaf_iter)
leaf_iter = self.treestore.append(leaf_iter)
self.treestore[leaf_iter][1] = page
self.treestore[leaf_iter][0] = ""
self.treestore[leaf_iter][2] = True
self._refresh_headers()
if "PlaylistSource" in type(page).__name__:
# a playlist of somesort has been added - so lets put the user into
# edit mode
self.edit_playlist(leaf_iter)
self.rbtree.expand_all()
def edit_playlist(self, leaf_iter):
"""
edit the playlist
:param leaf_iter: treestore iter
:return:
"""
print("edit_playlist")
self.text_renderer.props.editable = True
path = self.treestore.get_path(leaf_iter)
path = self.treestore_filter.convert_child_path_to_path(path)
print(path)
self.grab_focus()
def delayed(*args):
self.set_cursor_on_cell(path,
self.tree_column, self.text_renderer, True)
GLib.timeout_add_seconds(1, delayed, None)
def _model_page_deleted(self, model, path):
"""
signal from the displaytreemodel - we dont actually know what is
deleted ... just that something has been
:param model:
:param path:
:return:
"""
# first do a reverse lookup so that we can search quicker later
# dict of sources in the sidebar model with their treeiter
lookup = {}
rootiter = self.treestore.get_iter_first()
def find_lookup_rows(store, treeiter):
while treeiter is not None:
# if store[treeiter][0] == "":
# lookup[store[treeiter][1]] = treeiter
if store[treeiter][1] is not None:
lookup[store[treeiter][1]] = treeiter
if store.iter_has_child(treeiter):
childiter = store.iter_children(treeiter)
find_lookup_rows(store, childiter)
treeiter = store.iter_next(treeiter)
find_lookup_rows(self.treestore, rootiter)
# next iterate through the displaytreemodel - where we have a matching
# source, delete it from our lookup
def find_rows(store, treeiter):
while treeiter is not None:
if store[treeiter][1] in lookup:
del lookup[store[treeiter][1]]
if store.iter_has_child(treeiter):
childiter = store.iter_children(treeiter)
find_rows(store, childiter)
treeiter = store.iter_next(treeiter)
rootiter = model.get_iter_first()
find_rows(model, rootiter)
# from what is left is the stuff to remove from our treeview
# (treestore)
for source in lookup:
self.treestore.remove(lookup[source])
self._refresh_headers()
def _row_click(self, widget, event):
"""
event called when clicking on a row
"""
print('_row_click')
try:
treepath, treecolumn, cellx, celly = \
widget.get_path_at_pos(event.x, event.y)
except:
print("exit")
return
active_object = self.treestore_filter[treepath][1]
print(active_object)
if active_object:
# we have a source
self._user_clicked = True
self.shell.props.display_page_tree.select(active_object)
self.rbtree.expand_all()
if self._last_click_source == active_object:
self.text_renderer.props.editable = \
"PlaylistSource" in type(active_object).__name__
else:
self.text_renderer.props.editable = False
self._last_click_source = active_object
def delayed(*args):
# save current state of each category in the treeview
cat_vals = {}
for category in self._category:
path = self.treestore.get_path(self._category[category])
if path:
cat_vals[category] = self.row_expanded(path)
self.expanders = str(cat_vals)
print(self.expanders)
GLib.timeout_add_seconds(1, delayed)
def _display_page_tree_selected(self, display_page_tree, page):
"""
signal from when a page is selected in the display-page-tree -
we need to sync with our tree
:param display_page_tree:
:param page:
:return:
"""
if self._user_clicked:
self._user_clicked = False
return
# first do a reverse lookup so that we can search quicker later
# dict of sources in the sidebar model with their treeiter
lookup = {}
rootiter = self.treestore_filter.get_iter_first()
def find_lookup_rows(store, treeiter):
while treeiter is not None:
if store[treeiter][1] is not None:
lookup[store[treeiter][1]] = treeiter
print(store[treeiter][1].props.name)
if store.iter_has_child(treeiter):
childiter = store.iter_children(treeiter)
find_lookup_rows(store, childiter)
treeiter = store.iter_next(treeiter)
find_lookup_rows(self.treestore_filter, rootiter)
if page in lookup:
path = self.treestore_filter.get_path(lookup[page])
self.expand_to_path(path)
self.set_cursor(path)
def _set_text(self, column, renderer, model, treeiter, arg):
if treeiter is None:
return
if model is None:
return
source = model[treeiter][1]
if source is None:
renderer.props.weight = Pango.Weight.BOLD
renderer.props.text = model[treeiter][0]
print(renderer.props.text)
renderer.props.visible = model[treeiter][2]
else:
renderer.props.visible = True
player = self.shell.props.shell_player
playing = \
player.get_playing and player.get_playing_source() == source
if (source.props.name):
cl = CoverLocale()
cl.switch_locale(cl.Locale.LOCALE_DOMAIN)
translation = gettext.gettext(source.props.name)
cl.switch_locale(cl.Locale.RB)
renderer.props.text = translation
else:
renderer.props.text = ""
if playing:
renderer.props.weight = Pango.Weight.BOLD
else:
renderer.props.weight = Pango.Weight.NORMAL
renderer.props.ypad = 3
path = model.get_path(treeiter)
if path.get_depth() == 1:
renderer.props.ypad = 6
renderer.props.xpad = 3
else:
renderer.props.ypad = 3
renderer.props.xpad = 0
renderer.props.ellipsize = Pango.EllipsizeMode.END
def _refresh_headers(self):
treeiter = self.treestore.get_iter_first()
while treeiter is not None:
self.treestore[treeiter][2] = \
self.treestore.iter_has_child(treeiter)
treeiter = self.treestore.iter_next(treeiter)
def _set_pixbuf(self, column, renderer, model, treeiter, arg):
source = model[treeiter][1]
if source is None:
renderer.props.pixbuf = None
else:
ret_bool, controller = self.toolbar.is_controlled(source)
renderer.props.gicon = controller.get_gicon(source)
renderer.props.follow_state = True
path = model.get_path(treeiter)
if path.get_depth() == 2:
renderer.props.visible = True # must be a child so show the
# pixbuf renderer
else:
renderer.props.visible = False # headers or children of child
# dont have pixbuf's so no renderer to display
renderer.props.xpad = 3
def _get_category_iter(self, source):
ret_bool, controller = self.toolbar.is_controlled(source)
category = AltControllerCategory.OTHER
if ret_bool:
category = controller.get_category()
return self._category[category]
| gpl-3.0 | -7,948,833,192,442,559,000 | 34.673469 | 97 | 0.573271 | false |
ebt-hpc/cca | cca/scripts/outlining_queries_fortran.py | 1 | 19126 | #!/usr/bin/env python3
'''
A script for outlining Fortran programs
Copyright 2013-2018 RIKEN
Copyright 2018-2019 Chiba Institute of Technology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
__author__ = 'Masatomo Hashimoto <[email protected]>'
from ns import NS_TBL
OMITTED = ['execution-part','do-block']
SUBPROGS = set([
'subroutine-external-subprogram',
'subroutine-internal-subprogram',
'subroutine-module-subprogram',
'function-external-subprogram',
'function-internal-subprogram',
'function-module-subprogram',
])
LOOPS = set(['do-construct','do-stmt','end-do-stmt','do-block'])
CALLS = set(['call-stmt','function-reference','part-name','call-stmt*','mpi-call'])
TYPE_TBL = { # cat -> type
'file' : 'file',
'do-construct' : 'loop',
'if-construct' : 'branch',
'case-construct' : 'branch',
'select-type-construct' : 'branch',
'where-construct' : 'branch',
'call-stmt' : 'call',
'function-reference' : 'call',
'part-name' : 'call',
'main-program' : 'main',
'subroutine-external-subprogram' : 'subroutine',
'subroutine-internal-subprogram' : 'subroutine',
'subroutine-module-subprogram' : 'subroutine',
'function-external-subprogram' : 'function',
'function-internal-subprogram' : 'function',
'function-module-subprogram' : 'function',
'execution-part' : 'part',
'if-then-block' : 'block',
'else-if-block' : 'block',
'else-block' : 'block',
'case-block' : 'block',
'type-guard-block' : 'block',
'where-block' : 'block',
'do-block' : 'block',
'block-construct' : 'block',
'pp-branch' : 'pp',
'pp-branch-do' : 'pp',
'pp-branch-end-do' : 'pp',
'pp-branch-if' : 'pp',
'pp-branch-end-if' : 'pp',
'pp-branch-forall' : 'pp',
'pp-branch-end-forall' : 'pp',
'pp-branch-select' : 'pp',
'pp-branch-end-select' : 'pp',
'pp-branch-where' : 'pp',
'pp-branch-end-where' : 'pp',
'pp-branch-pu' : 'pp',
'pp-branch-end-pu' : 'pp',
'pp-branch-function' : 'pp',
'pp-branch-end-function' : 'pp',
'pp-branch-subroutine' : 'pp',
'pp-branch-end-subroutine' : 'pp',
'pp-section-elif' : 'pp',
'pp-section-else' : 'pp',
'pp-section-if' : 'pp',
'pp-section-ifdef' : 'pp',
'pp-section-ifndef' : 'pp',
'mpi-call' : 'mpi',
'call-stmt*' : 'call*'
}
Q_AA_IN_LOOP_F = '''DEFINE input:inference "ont.cpi"
PREFIX f: <%(f_ns)s>
PREFIX ver: <%(ver_ns)s>
PREFIX src: <%(src_ns)s>
SELECT DISTINCT ?ver ?loc ?pu_name ?vpu_name ?loop ?aa ?pn ?dtor ?dtor_loc
WHERE {
GRAPH <%%(proj)s> {
{
SELECT DISTINCT ?pn ?aa ?loop ?pu_name ?vpu_name ?loc ?ver
WHERE {
?pn a f:PartName ;
src:parent ?aa .
?aa a f:ArrayAccess ;
f:inDoConstruct ?loop .
?loop a f:DoConstruct ;
f:inProgramUnitOrSubprogram ?pu_or_sp ;
f:inProgramUnit ?pu .
?pu_or_sp src:inFile/src:location ?loc .
?pu a f:ProgramUnit ;
src:inFile/src:location ?pu_loc ;
ver:version ?ver .
OPTIONAL {
?pu f:name ?pu_name
}
OPTIONAL {
?pu f:includedInProgramUnit ?vpu .
?vpu f:name ?vpu_name .
}
} GROUP BY ?pn ?aa ?loop ?pu_name ?vpu_name ?loc ?ver
}
OPTIONAL {
?pn f:declarator ?dtor .
?dtor a f:Declarator ;
f:inProgramUnitOrFragment/src:inFile ?dtor_file .
?dtor_file a src:File ;
src:location ?dtor_loc ;
ver:version ?ver .
}
}
}
''' % NS_TBL
Q_OTHER_CALLS_F = '''DEFINE input:inference "ont.cpi"
PREFIX f: <%(f_ns)s>
PREFIX ver: <%(ver_ns)s>
PREFIX src: <%(src_ns)s>
SELECT DISTINCT ?ver ?loc ?pu_name ?vpu_name ?sp ?sp_cat ?sub ?main ?prog ?call ?callee_name ?constr
WHERE {
GRAPH <%%(proj)s> {
{
SELECT DISTINCT ?ver ?loc ?pu ?pu_name ?vpu_name ?call ?callee_name
WHERE {
?call a f:CallStmt ;
f:name ?callee_name ;
f:inProgramUnitOrSubprogram ?pu_or_sp ;
f:inProgramUnit ?pu .
?pu_or_sp src:inFile/src:location ?loc .
?pu a f:ProgramUnit ;
src:inFile/src:location ?pu_loc ;
ver:version ?ver .
OPTIONAL {
?pu f:name ?pu_name
}
OPTIONAL {
?pu f:includedInProgramUnit ?vpu .
?vpu f:name ?vpu_name .
}
FILTER NOT EXISTS {
?call f:mayCall ?callee .
}
} GROUP BY ?ver ?loc ?pu ?pu_name ?vpu_name ?call ?callee_name
}
OPTIONAL {
?call f:inSubprogram ?sp .
?sp a f:Subprogram ;
a ?sp_cat0 OPTION (INFERENCE NONE) ;
f:name ?sub .
FILTER NOT EXISTS {
?call f:inSubprogram ?sp0 .
?sp0 f:inSubprogram ?sp .
FILTER (?sp != ?sp0)
}
GRAPH <http://codinuum.com/ont/cpi> {
?sp_cat0 rdfs:label ?sp_cat
}
}
OPTIONAL {
?call f:inContainerUnit ?constr .
?constr a f:ContainerUnit .
FILTER EXISTS {
{
?constr f:inProgramUnit ?pu .
FILTER NOT EXISTS {
?call f:inSubprogram/f:inContainerUnit ?constr .
}
}
UNION
{
?call f:inSubprogram ?sp0 .
?constr f:inSubprogram ?sp0 .
}
}
FILTER NOT EXISTS {
?c a f:ContainerUnit ;
f:inContainerUnit ?constr .
?call f:inContainerUnit ?c .
FILTER (?c != ?constr)
}
}
OPTIONAL {
?call f:inMainProgram ?main .
?main a f:MainProgram .
OPTIONAL {
?main f:name ?prog .
}
}
}
}
''' % NS_TBL
Q_DIRECTIVES_F = '''DEFINE input:inference "ont.cpi"
PREFIX f: <%(f_ns)s>
PREFIX ver: <%(ver_ns)s>
PREFIX src: <%(src_ns)s>
SELECT DISTINCT ?ver ?loc ?pu_name ?vpu_name ?sp ?sp_cat ?sub ?main ?prog ?dtv ?cat ?constr
WHERE {
GRAPH <%%(proj)s> {
{
SELECT DISTINCT ?ver ?loc ?pu ?pu_name ?vpu_name ?dtv ?cat
WHERE {
?dtv a f:CompilerDirective ;
a ?cat0 OPTION (INFERENCE NONE) ;
f:inProgramUnitOrSubprogram ?pu_or_sp ;
f:inProgramUnit ?pu .
?pu_or_sp src:inFile/src:location ?loc .
?pu a f:ProgramUnit ;
src:inFile/src:location ?pu_loc ;
ver:version ?ver .
OPTIONAL {
?pu f:name ?pu_name
}
OPTIONAL {
?pu f:includedInProgramUnit ?vpu .
?vpu f:name ?vpu_name .
}
GRAPH <http://codinuum.com/ont/cpi> {
?cat0 rdfs:label ?cat .
}
} GROUP BY ?ver ?loc ?pu ?pu_name ?vpu_name ?dtv ?cat
}
OPTIONAL {
?dtv f:inSubprogram ?sp .
?sp a f:Subprogram ;
a ?sp_cat0 OPTION (INFERENCE NONE) ;
f:name ?sub .
FILTER NOT EXISTS {
?dtv f:inSubprogram ?sp0 .
?sp0 f:inSubprogram ?sp .
FILTER (?sp != ?sp0)
}
GRAPH <http://codinuum.com/ont/cpi> {
?sp_cat0 rdfs:label ?sp_cat
}
}
OPTIONAL {
?dtv f:inContainerUnit ?constr .
?constr a f:ContainerUnit .
FILTER EXISTS {
{
?constr f:inProgramUnit ?pu .
FILTER NOT EXISTS {
?dtv f:inSubprogram/f:inContainerUnit ?constr .
}
}
UNION
{
?dtv f:inSubprogram ?sp0 .
?constr f:inSubprogram ?sp0 .
}
}
FILTER NOT EXISTS {
?c a f:ContainerUnit ;
f:inContainerUnit ?constr .
?dtv f:inContainerUnit ?c .
FILTER (?c != ?constr)
}
}
OPTIONAL {
?dtv f:inMainProgram ?main .
?main a f:MainProgram .
OPTIONAL {
?main f:name ?prog .
}
}
}
}
''' % NS_TBL
Q_CONSTR_CONSTR_F = '''DEFINE input:inference "ont.cpi"
PREFIX f: <%(f_ns)s>
PREFIX ver: <%(ver_ns)s>
PREFIX src: <%(src_ns)s>
SELECT DISTINCT ?ver ?loc ?pu_name ?vpu_name ?sp ?sp_cat ?sub ?main ?prog
?constr ?cat
?parent_constr ?parent_cat ?parent_sub ?parent_prog ?parent_pu_name ?parent_vpu_name
WHERE {
GRAPH <%%(proj)s> {
{
SELECT DISTINCT ?ver ?loc ?pu ?pu_name ?vpu_name ?constr
WHERE {
?constr a f:ContainerUnit ;
f:inProgramUnitOrSubprogram ?pu_or_sp ;
f:inProgramUnit ?pu .
?pu_or_sp src:inFile/src:location ?loc .
?pu a f:ProgramUnit ;
src:inFile/src:location ?pu_loc ;
ver:version ?ver .
OPTIONAL {
?pu f:name ?pu_name
}
OPTIONAL {
?pu f:includedInProgramUnit ?vpu .
?vpu f:name ?vpu_name .
}
} GROUP BY ?ver ?loc ?pu ?pu_name ?vpu_name ?constr
}
OPTIONAL {
SELECT DISTINCT ?constr (GROUP_CONCAT(DISTINCT ?c; SEPARATOR="&") AS ?cat)
WHERE {
?constr a ?cat0 OPTION (INFERENCE NONE) .
GRAPH <http://codinuum.com/ont/cpi> {
?cat0 rdfs:label ?c .
}
} GROUP BY ?constr
}
OPTIONAL {
?constr f:inSubprogram ?sp .
?sp a f:Subprogram ;
a ?sp_cat0 OPTION (INFERENCE NONE) ;
f:name ?sub .
FILTER NOT EXISTS {
?constr f:inSubprogram ?sp0 .
?sp0 f:inSubprogram ?sp .
FILTER (?sp != ?sp0)
}
# FILTER NOT EXISTS {
# ?constr f:inMainProgram ?m0 .
# ?m0 f:inContainerUnit ?parent_constr .
# FILTER (?m0 != ?constr && ?m0 != ?parent_constr)
# }
GRAPH <http://codinuum.com/ont/cpi> {
?sp_cat0 rdfs:label ?sp_cat .
}
}
OPTIONAL {
?constr f:inMainProgram ?main .
?main a f:MainProgram .
OPTIONAL {
?main f:name ?prog .
}
}
OPTIONAL {
?constr f:inContainerUnit ?parent_constr .
?parent_constr a f:ContainerUnit .
FILTER (?constr != ?parent_constr)
FILTER NOT EXISTS {
?constr f:inContainerUnit ?p0 .
?p0 a f:ContainerUnit ;
f:inContainerUnit ?parent_constr .
FILTER (?p0 != ?constr && ?p0 != ?parent_constr)
}
FILTER NOT EXISTS {
?constr f:inSubprogram ?sp0 .
?sp0 f:inContainerUnit ?parent_constr .
FILTER (?sp0 != ?constr && ?sp0 != ?parent_constr)
}
{
SELECT DISTINCT ?parent_constr (GROUP_CONCAT(DISTINCT ?c0; SEPARATOR="&") AS ?parent_cat)
WHERE {
?parent_constr a ?parent_cat0 OPTION (INFERENCE NONE) .
GRAPH <http://codinuum.com/ont/cpi> {
?parent_cat0 rdfs:label ?c0 .
}
} GROUP BY ?parent_constr
}
OPTIONAL {
?parent_constr f:inProgramUnit ?parent_pu .
?parent_pu f:name ?parent_pu_name .
}
OPTIONAL {
?parent_constr f:inProgramUnit/f:includedInProgramUnit ?parent_vpu .
?parent_vpu f:name ?parent_vpu_name .
}
OPTIONAL {
?parent_constr f:inMainProgram ?parent_main .
?parent_main a f:MainProgram .
OPTIONAL {
?parent_main f:name ?parent_prog .
}
}
OPTIONAL {
?parent_constr f:inSubprogram ?parent_sp .
?parent_sp a f:Subprogram ;
f:name ?parent_sub .
FILTER NOT EXISTS {
?parent_constr f:inSubprogram ?sp0 .
?sp0 f:inSubprogram ?parent_sp .
FILTER (?parent_sp != ?sp0)
}
}
}
}
}
''' % NS_TBL
Q_CONSTR_SP_F = '''DEFINE input:inference "ont.cpi"
PREFIX f: <%(f_ns)s>
PREFIX ver: <%(ver_ns)s>
PREFIX src: <%(src_ns)s>
SELECT DISTINCT ?ver ?loc ?pu_name ?vpu_name ?sp ?sp_cat ?sub ?main ?prog
?constr ?cat ?call ?call_cat
?callee ?callee_name ?callee_loc ?callee_cat ?callee_pu_name
WHERE {
GRAPH <%%(proj)s> {
{
SELECT DISTINCT ?ver ?loc ?pu_name ?vpu_name ?constr ?callee ?cat ?call ?call_cat
WHERE {
?call a ?call_cat0 OPTION (INFERENCE NONE) ;
f:inContainerUnit ?constr ;
f:mayCall ?callee .
FILTER (?call_cat0 IN (f:CallStmt, f:FunctionReference, f:PartName))
?constr a f:ContainerUnit ;
a ?cat0 OPTION (INFERENCE NONE) ;
f:inProgramUnitOrSubprogram ?pu_or_sp ;
f:inProgramUnit ?pu .
?pu_or_sp src:inFile/src:location ?loc .
FILTER NOT EXISTS {
?c a f:ContainerUnit ;
f:inContainerUnit+ ?constr .
?call f:inContainerUnit+ ?c .
FILTER (?c != ?constr)
}
?pu a f:ProgramUnit ;
ver:version ?ver ;
src:inFile/src:location ?pu_loc .
OPTIONAL {
?pu f:name ?pu_name
}
OPTIONAL {
?pu f:includedInProgramUnit ?vpu .
?vpu f:name ?vpu_name .
}
GRAPH <http://codinuum.com/ont/cpi> {
?cat0 rdfs:label ?cat .
?call_cat0 rdfs:label ?call_cat .
}
} GROUP BY ?ver ?loc ?pu_name ?vpu_name ?constr ?callee ?cat ?call ?call_cat
}
{
SELECT DISTINCT ?callee ?callee_cat ?callee_loc ?ver ?callee_pu_name
(GROUP_CONCAT(DISTINCT ?cn; SEPARATOR=":") AS ?callee_name)
WHERE {
?callee a f:Subprogram ;
a ?callee_cat0 OPTION (INFERENCE NONE) ;
f:name ?cn ;
src:inFile ?callee_file .
?callee_file a src:File ;
src:location ?callee_loc ;
ver:version ?ver .
GRAPH <http://codinuum.com/ont/cpi> {
?callee_cat0 rdfs:label ?callee_cat
}
OPTIONAL {
?callee f:inProgramUnit/f:name ?callee_pu_name .
}
} GROUP BY ?callee ?callee_cat ?callee_loc ?ver ?callee_pu_name
}
OPTIONAL {
?constr f:inSubprogram ?sp .
?sp a f:Subprogram ;
a ?sp_cat0 OPTION (INFERENCE NONE) ;
f:name ?sub .
FILTER NOT EXISTS {
?constr f:inSubprogram ?sp0 .
?sp0 f:inSubprogram ?sp .
FILTER (?sp != ?sp0)
}
GRAPH <http://codinuum.com/ont/cpi> {
?sp_cat0 rdfs:label ?sp_cat
}
}
OPTIONAL {
?constr f:inMainProgram ?main .
?main a f:MainProgram .
OPTIONAL {
?main f:name ?prog .
}
}
}
}
''' % NS_TBL
Q_SP_SP_F = '''DEFINE input:inference "ont.cpi"
PREFIX f: <%(f_ns)s>
PREFIX ver: <%(ver_ns)s>
PREFIX src: <%(src_ns)s>
SELECT DISTINCT ?ver ?loc ?pu_name ?vpu_name ?sp ?sp_cat ?sub ?main ?prog
?callee ?callee_name ?callee_loc ?callee_cat ?call ?call_cat ?constr ?callee_pu_name
WHERE {
GRAPH <%%(proj)s> {
{
SELECT DISTINCT ?ver ?loc ?pu ?pu_name ?vpu_name ?callee ?call ?call_cat
WHERE {
?call a ?call_cat0 OPTION (INFERENCE NONE) ;
f:inProgramUnitOrSubprogram ?pu_or_sp ;
f:inProgramUnit ?pu ;
f:mayCall ?callee .
?pu_or_sp src:inFile/src:location ?loc .
?pu a f:ProgramUnit ;
src:inFile/src:location ?pu_loc ;
ver:version ?ver .
OPTIONAL {
?pu f:name ?pu_name
}
OPTIONAL {
?pu f:includedInProgramUnit ?vpu .
?vpu f:name ?vpu_name .
}
FILTER (?call_cat0 IN (f:CallStmt, f:FunctionReference, f:PartName))
FILTER NOT EXISTS {
?call f:inContainerUnit [] .
}
GRAPH <http://codinuum.com/ont/cpi> {
?call_cat0 rdfs:label ?call_cat .
}
} GROUP BY ?ver ?loc ?pu ?pu_name ?vpu_name ?callee ?call ?call_cat
}
{
SELECT DISTINCT ?callee ?callee_cat ?callee_loc ?ver ?callee_pu_name
(GROUP_CONCAT(DISTINCT ?cn; SEPARATOR=":") AS ?callee_name)
WHERE {
?callee a f:Subprogram ;
a ?callee_cat0 OPTION (INFERENCE NONE) ;
f:name ?cn ;
src:inFile ?callee_file .
?callee_file a src:File ;
src:location ?callee_loc ;
ver:version ?ver .
GRAPH <http://codinuum.com/ont/cpi> {
?callee_cat0 rdfs:label ?callee_cat
}
OPTIONAL {
?callee f:inProgramUnit/f:name ?callee_pu_name .
}
} GROUP BY ?callee ?callee_cat ?callee_loc ?ver ?callee_pu_name
}
OPTIONAL {
?call f:inSubprogram ?sp .
?sp a f:Subprogram ;
a ?sp_cat0 OPTION (INFERENCE NONE) ;
f:name ?sub .
FILTER NOT EXISTS {
?call f:inSubprogram ?sp0 .
?sp0 f:inSubprogram ?sp .
FILTER (?sp != ?sp0)
}
GRAPH <http://codinuum.com/ont/cpi> {
?sp_cat0 rdfs:label ?sp_cat
}
}
OPTIONAL {
?call f:inMainProgram ?main .
?main a f:MainProgram .
OPTIONAL {
?main f:name ?prog .
}
}
}
}
''' % NS_TBL
Q_CONSTR_QSPN_F = '''DEFINE input:inference "ont.cpi"
PREFIX f: <%(f_ns)s>
PREFIX ver: <%(ver_ns)s>
PREFIX src: <%(src_ns)s>
SELECT DISTINCT ?ver ?loc ?pu_name ?vpu_name ?qspn ?constr
WHERE {
GRAPH <%%(proj)s> {
{
SELECT DISTINCT ?ver ?loc ?pu_name ?vpu_name ?sp0 ?constr
(GROUP_CONCAT(DISTINCT CONCAT(STR(?dist), ?n); SEPARATOR=",") AS ?qspn)
WHERE {
?constr a f:ContainerUnit ;
f:inSubprogram ?sp0 ;
f:inProgramUnit ?pu .
?sp0 src:inFile/src:location ?loc .
FILTER NOT EXISTS {
?constr f:inSubprogram/f:inSubprogram ?sp0 .
}
?pu a f:ProgramUnit ;
src:inFile/src:location ?pu_loc ;
ver:version ?ver .
OPTIONAL {
?pu f:name ?pu_name .
}
OPTIONAL {
?pu f:includedInProgramUnit ?vpu .
?vpu f:name ?vpu_name .
}
?sp0 a f:Subprogram ;
f:name ?sp0_name .
?spx f:name ?n .
{
SELECT ?x ?sp
WHERE {
?x a f:Subprogram ;
f:inSubprogram ?sp .
}
} OPTION(TRANSITIVE,
T_IN(?x),
T_OUT(?sp),
T_DISTINCT,
T_MIN(0),
T_NO_CYCLES,
T_STEP (?x) AS ?spx,
T_STEP ('step_no') AS ?dist
)
FILTER (?x = ?sp0)
} GROUP BY ?ver ?loc ?sp0 ?constr ?pu_name ?vpu_name
}
}
}
''' % NS_TBL
QUERY_TBL = {
'aa_in_loop' : Q_AA_IN_LOOP_F,
'other_calls' : Q_OTHER_CALLS_F,
'directives' : Q_DIRECTIVES_F,
'constr_constr' : Q_CONSTR_CONSTR_F,
'constr_sp' : Q_CONSTR_SP_F,
'sp_sp' : Q_SP_SP_F,
'constr_qspn' : Q_CONSTR_QSPN_F,
}
def get_root_entities(full=False):
s = set(['main-program'])
if full:
s |= set([
'subroutine-external-subprogram',
'subroutine-module-subprogram',
'function-external-subprogram',
'function-module-subprogram',
])
return s
| apache-2.0 | -5,018,884,159,477,743,000 | 23.583548 | 100 | 0.536547 | false |
kpbochenek/empireofcode | non_unique.py | 1 | 1121 | # [email protected]
def non_unique(data):
result = []
diff = 32
for d in data:
if data.count(d) > 1:
result.append(d)
elif type(d) is str and data.count(d) + data.count(chr(ord(d) + diff)) > 1:
result.append(d)
elif type(d) is str and data.count(d) + data.count(chr(ord(d) - diff)) > 1:
result.append(d)
return result
if __name__ == "__main__":
# These "asserts" using only for self-checking and not necessary for auto-testing
# Rank 1
assert isinstance(non_unique([1]), list), "The result must be a list"
assert non_unique([1, 2, 3, 1, 3]) == [1, 3, 1, 3], "1st example"
assert non_unique([1, 2, 3, 4, 5]) == [], "2nd example"
assert non_unique([5, 5, 5, 5, 5]) == [5, 5, 5, 5, 5], "3rd example"
assert non_unique([10, 9, 10, 10, 9, 8]) == [10, 9, 10, 10, 9], "4th example"
# Rank 2
assert non_unique(['P', 7, 'j', 'A', 'P', 'N', 'Z', 'i',
'A', 'X', 'j', 'L', 'y', 's', 'K', 'g',
'p', 'r', 7, 'b']) == ['P', 7, 'j', 'A', 'P', 'A', 'j', 'p', 7], "Letters"
| apache-2.0 | -4,089,713,566,800,570,000 | 39.035714 | 97 | 0.484389 | false |
cloud9ers/j25framework | j25/loaders/reloader.py | 1 | 5047 | # Python Module Reloader
#
# Copyright (c) 2009, 2010 Jon Parise <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import Importer
import j25
import logging
import os
import sys
import threading
import time
try:
import queue
except ImportError:
#python 2.x
import Queue as queue
_win32 = (sys.platform == 'win32')
logger = logging.getLogger("Module Reloader")
class ModuleMonitor(threading.Thread):
"""Monitor module source file changes"""
def __init__(self, interval=1):
threading.Thread.__init__(self)
self.daemon = True
self.mtimes = {}
self.queue = queue.Queue()
self.interval = interval
self.is_running = True
def terminate(self):
self.is_running = False
def run(self):
while self.is_running:
self._scan()
time.sleep(self.interval)
logger.info("ModuleMonitor terminated")
def _scan(self):
# We're only interested in file-based modules (not C extensions).
# We are only interested in project files changes
modules = [m.__file__ for m in sys.modules.values()
if m and '__file__' in m.__dict__ and m.__file__.startswith(j25.project_directory)]
for filename in modules:
# We're only interested in the source .py files.
if filename.endswith('.pyc') or filename.endswith('.pyo'):
filename = filename[:-1]
# stat() the file. This might fail if the module is part of a
# bundle (.egg). We simply skip those modules because they're
# not really reloadable anyway.
try:
stat = os.stat(filename)
except OSError:
continue
# Check the modification time. We need to adjust on Windows.
mtime = stat.st_mtime
if _win32:
mtime -= stat.st_ctime
# Check if we've seen this file before. We don't need to do
# anything for new files.
if filename in self.mtimes:
# If this file's mtime has changed, queue it for reload.
if mtime != self.mtimes[filename]:
print "file %s enqueued" % filename
self.queue.put(filename)
# Record this filename's current mtime.
self.mtimes[filename] = mtime
class Reloader(threading.Thread):
def __init__(self, interval=1):
threading.Thread.__init__(self)
self.monitor = ModuleMonitor(interval=interval)
self.monitor.start()
self.interval = interval
self.is_running = True
logging.info("Module Monitor Started")
def run(self):
self._logger = logging.getLogger("Reloader")
while self.is_running:
self.poll()
time.sleep(self.interval)
self.monitor.terminate()
self._logger.info("Module Reloader terminated")
def terminate(self):
self.is_running = False
def poll(self):
filenames = set()
while not self.monitor.queue.empty():
try:
filename = self.monitor.queue.get_nowait()
filenames.add(filename)
except queue.Empty:
break
if filenames:
self._reload(filenames)
def _check(self, filenames, module):
mod_file = getattr(module, '__file__', None)
if mod_file:
for filename in filenames:
if mod_file.startswith(filename):
return True
return False
def _reload(self, filenames):
modules = [m for m in sys.modules.values()
if self._check(filenames, m)]
for mod in modules:
self._logger.info("Reloading module %s", mod.__name__)
Importer.reload(mod)
else:
j25._load_routing()
j25._update_mapper()
j25._dispatcher.register_all_apps_router() | lgpl-3.0 | 8,412,264,403,082,464,000 | 33.813793 | 99 | 0.607886 | false |
dskinner/damsel-python | dmsl/test/test_basic.py | 1 | 2247 | # -*- coding: utf-8 -*-
import os.path
import unittest
from _parse import Template
import codecs
class TestBasic(unittest.TestCase):
def setUp(self):
self.t = {
'basic_ending_colon': None,
'basic_html': None,
'basic_indent': None,
'basic_inline': None,
'basic_multilinetext': None,
'basic_tabs': None,
'basic_tag_hashes': None,
'basic_variable_indent': None
}
for k, v in self.t.items():
# template file
a = k+'.dmsl'
# expected output
b = open(os.path.join('', k+'.html')).read()
self.t[k] = (a, b)
def test_basic_ending_colon(self):
parsed, expected = self.t['basic_ending_colon']
parsed = Template(parsed).render()
self.assertEqual(parsed.strip(), expected.strip())
def test_basic_html(self):
parsed, expected = self.t['basic_html']
parsed = Template(parsed).render()
self.assertEqual(parsed.strip(), expected.strip())
def test_basic_indent(self):
parsed, expected = self.t['basic_indent']
parsed = Template(parsed).render()
self.assertEqual(parsed.strip(), expected.strip())
def test_basic_inline(self):
parsed, expected = self.t['basic_inline']
parsed = Template(parsed).render()
self.assertEqual(parsed.strip(), expected.strip())
def test_basic_multilinetext(self):
parsed, expected = self.t['basic_multilinetext']
parsed = Template(parsed).render()
self.assertEqual(parsed.strip(), expected.strip())
def test_basic_tabs(self):
parsed, expected = self.t['basic_tabs']
parsed = Template(parsed).render()
self.assertEqual(parsed.strip(), expected.strip())
def test_basic_tag_hashes(self):
parsed, expected = self.t['basic_tag_hashes']
parsed = Template(parsed).render()
self.assertEqual(parsed.strip(), expected.strip())
def test_basic_variable_indent(self):
parsed, expected = self.t['basic_variable_indent']
parsed = Template(parsed).render()
self.assertEqual(parsed.strip(), expected.strip())
| mit | 6,373,065,993,716,862,000 | 33.045455 | 58 | 0.58389 | false |
arrti/proxypooler | tests/test_pooler.py | 1 | 7492 | import asyncio
import os
import signal
import ssl
from multiprocessing import Process
from random import randint
from time import sleep
from unittest.mock import Mock
import aiohttp
import pytest
import requests
from proxypooler import config
from proxypooler.pooler import ProxyPooler
from proxypooler.ext import serial, deserial
srv = pytest.mark.skipif(
not pytest.config.getoption("--runsrv"),
reason="need --runsrv option to run"
)
if config.ssl_on:
HOST = 'https://localhost:8090'
else:
HOST = 'http://localhost:8090'
MAXSIZE = 10**5
@pytest.fixture
def clear(conn):
yield
conn.get_list(MAXSIZE)
@pytest.fixture
def ssl_context():
if config.ssl_on:
# context = ssl.SSLContext()
# context.load_cert_chain(CERT, KEY)
# context.load_verify_locations(CA_CRT)
# context.verify_mode = ssl.CERT_REQUIRED
# return context
return ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH,
cafile=config.ca_crt)
else:
return None
@pytest.fixture(scope='function')
def pooler(conn, clear):
p = ProxyPooler(saver=conn)
proc = Process(target=p.start)
yield p, proc
os.kill(proc.pid, signal.SIGINT)
proc.join()
@pytest.fixture
def proxy(pooler):
def send_request():
host = 'http://localhost:8088'
count = requests.get('{}/proxies/count'.format(host)).json()['count']
proxy = requests.get('{}/proxies/{}'.format(host, count)).json()['proxies']
return count, proxy
proxy = ["123.163.166.209:808", "175.155.24.9:808", "119.5.1.5:808",
"115.220.150.191:808", "117.43.0.73:808", "166.111.77.32:80",
"111.13.7.121:80", "125.106.248.222:808", "111.13.2.131:80",
"111.13.7.117:80", "119.5.1.35:808", "124.207.82.166:8008",
"121.69.47.126:8080", "123.125.212.171:8080", "36.249.24.157:808",
"121.232.147.114:9000", "144.12.165.38:808", "218.64.93.47:808",
"117.69.7.173:808", "221.229.46.39:808", "113.58.235.73:808",
"182.39.1.200:808", "58.50.64.15:808", "220.113.26.18:8080",
"117.43.1.187:808", "125.106.249.228:808", "58.253.70.149:8080",
"202.108.2.42:80", "106.0.4.116:8081", "175.155.24.22:808",
"123.55.189.10:808", "111.13.7.42:83", "121.237.143.107:808",
"175.155.25.21:808", "119.5.1.44:808", "27.22.49.236:808",
"221.217.34.54:9000", "60.184.174.109:808", "60.184.173.100:808",
"59.56.46.133:808", "101.4.136.34:80", "121.204.102.98:808",
"113.226.65.175:80", "61.178.238.122:63000", "115.220.146.70:808",
"122.241.72.204:808", "175.155.24.2:808", "113.123.127.230:808",
"125.106.224.213:808", "117.43.1.246:808", "119.5.1.33:808",
"119.5.0.4:808", "119.5.0.70:808", "175.155.25.44:808",
"123.55.189.200:808", "180.118.241.227:808", "218.241.234.48:8080",
"175.155.25.28:808", "123.163.130.15:808", "119.5.0.22:808"]
proxy = ["127.0.0.1:{}".format(i+51234) for i in range(300)]
count = len(proxy)
count_proxy = Mock(return_value=(count, proxy))
send_request = count_proxy
count, proxy = send_request()
p, proc = pooler
max_expire = 10
for i in proxy:
p.put(i, randint(2, max_expire))
return p, proc, count
def test_api(conn):
p = ProxyPooler(saver=conn)
for i in range(10):
p.put('127.0.0.1:{}'.format(80+i), i+2)
p.put_list([('127.0.0.1:{}'.format(80+i), i+2) for i in range(10, 20)])
assert p.size == 20
item, expire = p.get()
assert item == '127.0.0.1:80'
p.put('127.0.0.1:100', 1)
item, expire = p._get_item()
assert item['item'] == '127.0.0.1:100'
assert item['expire'] == 1
p._put_item(item, expire)
item, expire = p.get()
assert item == '127.0.0.1:100'
assert expire == 1
items = [p._get_item() for _ in range(10)]
p._put_items(items)
assert p.size == 19
item, expire = p.get()
assert item == '127.0.0.1:81'
item, expire = p._get_item()
assert item['item'] == '127.0.0.1:82'
assert item['expire'] == 4
items = p.get_list(3)
assert len(items) == 3
assert items[0][0] == '127.0.0.1:83'
assert items[0][1] == 5
assert items[1][1] == 6
items, _ = p._get_items(5)
assert len(items) == 5
assert items[0][0]['item'] == '127.0.0.1:86'
assert p.size == 9
items = p.get_list(1)
assert len(items) == 1
assert items[0][0] == '127.0.0.1:91'
items = p.get_list(0)
assert not items
items = p.get_list(-2)
assert not items
items, _ = p._get_items(1)
assert len(items) == 1
assert items[0][0]['item'] == '127.0.0.1:92'
items, _ = p._get_items(0)
assert not items
items = p.get_list(20, rev=True)
assert len(items) == 7
assert items[0][0] == '127.0.0.1:99'
assert p.size == 0
item, expire = p.get()
assert item is None
items, _ = p._get_items(1)
assert not items
def test_connect(pooler, ssl_context):
def client_send(data, queue, ssl_context):
async def _client():
connector = aiohttp.TCPConnector(ssl_context=ssl_context)
session = aiohttp.ClientSession(connector=connector)
async with session.ws_connect('{}/connect'.format(HOST)) as ws:
if isinstance(data, str):
await ws.send_str(data)
elif isinstance(data, bytes):
await ws.send_bytes(data)
async for msg in ws:
if msg.type == aiohttp.WSMsgType.TEXT:
if msg.data == 'close cmd':
await ws.close()
break
elif msg.type == aiohttp.WSMsgType.BINARY:
items = deserial(msg.data)
queue.put_nowait(items)
break
elif (msg.type == aiohttp.WSMsgType.CLOSED or
msg.type == aiohttp.WSMsgType.ERROR):
break
loop = asyncio.get_event_loop()
loop.run_until_complete(_client())
p, proc = pooler
proc.start()
sleep(1) # wait server start
queue = asyncio.Queue()
client_send(serial([('127.0.0.1:2017', 20)]), queue, ssl_context)
assert p.size == 1
client_send(serial([('127.0.0.1:2018', 40)]), queue, ssl_context)
client_send('get', queue, ssl_context)
assert queue.get_nowait()[0] == ('127.0.0.1:2018', 40)
client_send(serial([('127.0.0.1:2018', 30), ('127.0.0.1:2019', 25),
('127.0.0.1:2020', 20)]), queue, ssl_context)
client_send('get 0', queue, ssl_context)
with pytest.raises(asyncio.QueueEmpty):
queue.get_nowait()
assert p.size == 4
client_send('get proxy', queue, ssl_context)
assert p.size == 4
client_send('get 3', queue, ssl_context)
assert queue.get_nowait() == (('127.0.0.1:2018', 30), ('127.0.0.1:2019', 25), ('127.0.0.1:2020', 20))
client_send('get 1', queue, ssl_context)
assert queue.get_nowait() == (('127.0.0.1:2017', 20),)
assert p.size == 0
@srv
def test_server(monkeypatch, proxy, celery_worker):
p, proc, count = proxy
assert p.size == count
proc.start()
proc.join(10 + 5) # 60 = count / VALIDATE_COUNT * max_expire
assert p.size == 0
| apache-2.0 | -9,551,087,619,412,100 | 29.958678 | 105 | 0.557528 | false |
sxjscience/tvm | python/tvm/topi/nn/dilate.py | 1 | 2528 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name
"""Dilation operators"""
import tvm
from tvm import te
from .. import util
from .. import tag
@te.tag_scope(tag=tag.INJECTIVE + ",dilate")
def dilate(data, strides, dilation_value=0.0, name="DilatedInput"):
"""Dilate data with given dilation value (0 by default).
Parameters
----------
data : tvm.te.Tensor
n-D, can be any layout.
strides : list / tuple of n ints
Dilation stride on each dimension, 1 means no dilation.
dilation_value : int/float, optional
Value used to dilate the input.
name : str, optional
The name prefix operators generated
Returns
-------
Output : tvm.te.Tensor
n-D, the same layout as data.
"""
n = len(data.shape)
if len(strides) != n:
raise ValueError("data dimension and strides size dismatch : %d vs %d" % (n, len(strides)))
ana = tvm.arith.Analyzer()
out_shape = tuple(ana.simplify((data.shape[i] - 1) * strides[i] + 1) for i in range(n))
def _dilate(*indices):
not_zero = []
index_tuple = []
idxdiv = tvm.tir.indexdiv
idxmod = tvm.tir.indexmod
for i in range(n):
if not util.equal_const_int(strides[i], 1):
index_tuple.append(idxdiv(indices[i], strides[i]))
not_zero.append(idxmod(indices[i], strides[i]).equal(0))
else:
index_tuple.append(indices[i])
if not_zero:
not_zero = tvm.tir.all(*not_zero)
return tvm.tir.if_then_else(
not_zero, data(*index_tuple), tvm.tir.const(dilation_value, data.dtype)
)
return data(*index_tuple)
return te.compute(out_shape, _dilate, name=name)
| apache-2.0 | 205,081,130,815,146,940 | 34.111111 | 99 | 0.645174 | false |
googleapis/python-talent | google/cloud/talent_v4beta1/services/tenant_service/transports/base.py | 1 | 9841 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import packaging.version
import pkg_resources
import google.auth # type: ignore
import google.api_core # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.talent_v4beta1.types import tenant
from google.cloud.talent_v4beta1.types import tenant as gct_tenant
from google.cloud.talent_v4beta1.types import tenant_service
from google.protobuf import empty_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-talent",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
try:
# google.auth.__version__ was added in 1.26.0
_GOOGLE_AUTH_VERSION = google.auth.__version__
except AttributeError:
try: # try pkg_resources if it is available
_GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
class TenantServiceTransport(abc.ABC):
"""Abstract transport class for TenantService."""
AUTH_SCOPES = (
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/jobs",
)
DEFAULT_HOST: str = "jobs.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials is service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
# should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
def _get_scopes_kwargs(
cls, host: str, scopes: Optional[Sequence[str]]
) -> Dict[str, Optional[Sequence[str]]]:
"""Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
scopes_kwargs = {}
if _GOOGLE_AUTH_VERSION and (
packaging.version.parse(_GOOGLE_AUTH_VERSION)
>= packaging.version.parse("1.25.0")
):
scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
else:
scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
return scopes_kwargs
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.create_tenant: gapic_v1.method.wrap_method(
self.create_tenant, default_timeout=30.0, client_info=client_info,
),
self.get_tenant: gapic_v1.method.wrap_method(
self.get_tenant,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=30.0,
),
default_timeout=30.0,
client_info=client_info,
),
self.update_tenant: gapic_v1.method.wrap_method(
self.update_tenant, default_timeout=30.0, client_info=client_info,
),
self.delete_tenant: gapic_v1.method.wrap_method(
self.delete_tenant,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=30.0,
),
default_timeout=30.0,
client_info=client_info,
),
self.list_tenants: gapic_v1.method.wrap_method(
self.list_tenants,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=30.0,
),
default_timeout=30.0,
client_info=client_info,
),
}
@property
def create_tenant(
self,
) -> Callable[
[tenant_service.CreateTenantRequest],
Union[gct_tenant.Tenant, Awaitable[gct_tenant.Tenant]],
]:
raise NotImplementedError()
@property
def get_tenant(
self,
) -> Callable[
[tenant_service.GetTenantRequest],
Union[tenant.Tenant, Awaitable[tenant.Tenant]],
]:
raise NotImplementedError()
@property
def update_tenant(
self,
) -> Callable[
[tenant_service.UpdateTenantRequest],
Union[gct_tenant.Tenant, Awaitable[gct_tenant.Tenant]],
]:
raise NotImplementedError()
@property
def delete_tenant(
self,
) -> Callable[
[tenant_service.DeleteTenantRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def list_tenants(
self,
) -> Callable[
[tenant_service.ListTenantsRequest],
Union[
tenant_service.ListTenantsResponse,
Awaitable[tenant_service.ListTenantsResponse],
],
]:
raise NotImplementedError()
__all__ = ("TenantServiceTransport",)
| apache-2.0 | 7,331,914,839,858,532,000 | 36.418251 | 103 | 0.603292 | false |
enrimatta/RU_Python_IV | labs/lab_objects.py | 1 | 1675 | #!/usr/bin/env python
# *-* coding:utf-8 *-*
"""
:mod:`lab_objects` -- Objects in Python
=========================================
LAB Objects Learning Objective: Explore objects in Python and how everything in Python
is an object.
a. Fill in the series of functions below that determine the characteristics of an object.
b. Write a print_object_flags function that uses the is_* functions to find the characteristics
of the passed in object and print the characteristics (flags).
"""
def is_callable(obj):
""" returns True if the object is callable """
# __call__
return hasattr(obj, "__call__")
def is_with(obj):
""" returns True if the object can be used in a "with" context """
# __enter__, __exit__
return hasattr(obj, "__enter__") and hasattr(obj, "__exit__")
def is_math(obj):
""" returns True if the object supports +, -, /, and * """
# __add__, ...
return hasattr(obj, "__add__") and hasattr(obj, "__mul__") and \
hasattr(obj, "__sub__") and hasattr(obj, "__div__")
def is_iterable(obj):
""" returns True if the object is iterable """
# __iter__
return hasattr(obj, "__iter__")
def print_object_flags(obj):
""" assess the object for various characteristics and print them """
if(is_callable(obj)):
print "CALLABLE"
if(is_with(obj)):
print "WITH"
if(is_math(obj)):
print "MATH"
if(is_iterable(obj)):
print "ITERABLE"
if __name__ == "__main__":
print_object_flags(1)
print_object_flags("abc")
print_object_flags(print_object_flags)
print_object_flags([1, 2, 3])
print_object_flags(file)
| gpl-2.0 | 3,440,704,479,022,479,000 | 26.016129 | 95 | 0.588657 | false |
mwilliamson/zuice | tests/test_injector.py | 1 | 12416 | from nose.tools import assert_equal
from nose.tools import assert_raises
import zuice
from zuice import Bindings
from zuice import Injector
from zuice import NoSuchBindingException
from zuice import dependency
from zuice import Base
class Apple(object):
pass
default_apple = Apple()
class Banana(object):
pass
def test_bind_type_to_instance():
apple = Apple()
bindings = Bindings()
bindings.bind(Apple).to_instance(apple)
injector = Injector(bindings)
assert injector.get(Apple) is apple
def test_bind_name_to_instance():
apple = Apple()
bindings = Bindings()
bindings.bind("apple").to_instance(apple)
injector = Injector(bindings)
assert injector.get("apple") is apple
def test_bind_type_to_provider():
apple = Apple()
bindings = Bindings()
bindings.bind(Apple).to_provider(lambda injector: apple)
injector = Injector(bindings)
assert injector.get(Apple) is apple
def test_get_throws_exception_if_no_such_binding_exists_and_object_has_init_args():
class Donkey(object):
def __init__(self, legs):
pass
injector = Injector(Bindings())
assert_raises(NoSuchBindingException, lambda: injector.get(Donkey))
def test_get_raises_exception_if_no_such_binding_exists():
injector = Injector(Bindings())
assert_raises(NoSuchBindingException, lambda: injector.get("apple"))
apple = Apple()
bindings = Bindings()
bindings.bind("apple").to_provider(lambda injector: apple)
injector = Injector(bindings)
assert injector.get("apple") is apple
assert_raises(NoSuchBindingException, lambda: injector.get("banana"))
def test_changing_bindings_after_creating_injector_does_not_change_injector():
bindings = Bindings()
injector = Injector(bindings)
bindings.bind("apple").to_instance(Apple())
assert_raises(NoSuchBindingException, lambda: injector.get("apple"))
def test_can_inject_class_with_no_constructor_arguments():
class Coconut(object):
def __init__(self):
self.x = 10
injector = Injector(Bindings())
coconut = injector.get(Coconut)
assert_equal(10, coconut.x)
def test_can_bind_to_names():
apple_to_inject = Apple()
bindings = Bindings()
bindings.bind("apple").to_instance(apple_to_inject)
bindings.bind("another_apple").to_key("apple")
injector = Injector(bindings)
assert injector.get("another_apple") is apple_to_inject
def test_injector_class_is_bound_to_injector():
injector = Injector(Bindings())
assert injector.get(Injector) is injector
def test_classes_that_inherit_from_injectable_have_members_injected():
class Foo(Base):
_tag_fetcher = dependency("tag_fetcher")
tag_fetcher = {'some': 'object'}
bindings = Bindings()
bindings.bind("tag_fetcher").to_instance(tag_fetcher)
injector = Injector(bindings)
assert injector.get(Foo)._tag_fetcher is tag_fetcher
def test_classes_that_inherit_from_injectable_can_be_passed_constructor_arguments_manually_by_name():
class Foo(Base):
fetcher = dependency("tag_fetcher")
tag_fetcher = {'some': 'object'}
assert Foo(fetcher=tag_fetcher).fetcher is tag_fetcher
def test_injectable_members_have_leading_underscores_removed_in_constructor_arg():
class Foo(Base):
_fetcher = dependency("tag_fetcher")
tag_fetcher = {'some': 'object'}
assert Foo(fetcher=tag_fetcher)._fetcher is tag_fetcher
def test_classes_that_inherit_from_injectable_can_be_passed_constructor_arguments_manually_by_position():
class View(Base):
_tag_fetcher = dependency("tag_fetcher")
_post_fetcher = dependency("post_fetcher")
tag_fetcher = {'some': 'object'}
post_fetcher = {'another': 'object'}
view = View(tag_fetcher, post_fetcher)
assert view._tag_fetcher is tag_fetcher
assert view._post_fetcher is post_fetcher
def test_injecting_overspecified_arguments_to_injectable_raises_exception():
class View(Base):
_tag_fetcher = dependency("tag_fetcher")
tag_fetcher = {'some': 'object'}
try:
view = View(tag_fetcher, tag_fetcher=tag_fetcher)
assert False
except TypeError as e:
assert_equal(str(e), "Got multiple values for keyword argument 'tag_fetcher'")
def test_injecting_too_many_positional_arguments_to_injectable_raises_exception():
class View(Base):
_tag_fetcher = dependency("tag_fetcher")
try:
view = View(None, None)
assert False
except TypeError as e:
assert_equal(str(e), "__init__ takes exactly 2 arguments (3 given)")
def test_injectable_injects_attributes_of_sub_classes():
class Parent(Base):
_tag_fetcher = dependency('tag_fetcher')
class Child(Parent):
_blog_post_fetcher = dependency('post_fetcher')
tag_fetcher = {'some': 'object'}
post_fetcher = {'another': 'object'}
bindings = Bindings()
bindings.bind("tag_fetcher").to_instance(tag_fetcher)
bindings.bind("post_fetcher").to_instance(post_fetcher)
injector = Injector(bindings)
child = injector.get(Child)
assert child._tag_fetcher is tag_fetcher
assert child._blog_post_fetcher is post_fetcher
def test_subclassing_injectable_objects_allows_injected_attributes_to_be_overwritten():
class Parent(Base):
_fetcher = dependency('tag_fetcher')
class Child(Parent):
_fetcher = dependency('post_fetcher')
post_fetcher = {'another': 'object'}
bindings = Bindings()
bindings.bind("post_fetcher").to_instance(post_fetcher)
injector = Injector(bindings)
child = injector.get(Child)
assert child._fetcher is post_fetcher
def test_missing_constructor_arguments_in_injectable_raises_type_error():
class Foo(Base):
_tag_fetcher = dependency("tag_fetcher")
_blog_post_fetcher = dependency('post_fetcher')
tag_fetcher = {'some': 'object'}
assert_raises(TypeError, lambda: Foo(_tag_fetcher=tag_fetcher))
def test_injectable_injecting_manually_with_extra_members_raises_type_error():
class Foo(Base):
_tag_fetcher = dependency("tag_fetcher")
tag_fetcher = {'some': 'object'}
post_fetcher = {'another': 'object'}
assert_raises(TypeError, lambda: Foo(_tag_fetcher=tag_fetcher, _post_fetcher=post_fetcher))
def test_can_set_bindings_for_keys_in_call_to_get():
Name = zuice.key("Name")
injector = Injector(Bindings())
assert_equal("Bob", injector.get(Name, {Name: "Bob"}))
def test_unscoped_injectables_are_available_in_any_scope():
Greeting = zuice.key("Greeting")
Name = zuice.key("Name")
class Greeter(zuice.Base):
_greeting = zuice.dependency(Greeting)
_name = zuice.dependency(Name)
def hello(self):
return "{0} {1}".format(self._greeting, self._name)
bindings = Bindings()
bindings.bind(Greeting).to_instance("Hello")
injector = Injector(bindings)
greeter = injector.get(Greeter, {Name: "Bob"})
assert greeter.hello() == "Hello Bob"
def test_scoped_injectables_cannot_depend_on_injectables_in_separate_scope():
Name = zuice.key("Name")
class Greeter(zuice.Base):
_name = zuice.dependency(Name)
bindings = Bindings()
bindings.bind(Greeter).singleton()
injector = Injector(bindings)
error = assert_raises(NoSuchBindingException, lambda: injector.get(Greeter, {Name: "Bob"}))
# TODO
#~ assert_equal(Name, error.key)
def test_can_set_bindings_for_keys_in_call_to_injected_factory():
Name = zuice.key("Name")
class Greeter(zuice.Base):
_name = zuice.dependency(Name)
def hello(self):
return "Hello {0}".format(self._name)
injector = Injector(Bindings())
factory = injector.get(zuice.factory(Greeter))
greeter = factory({Name: "Bob"})
assert greeter.hello() == "Hello Bob"
def test_original_bindings_are_prefered_to_zero_arg_constructors():
class Unit(object):
pass
unit = Unit()
bindings = Bindings()
bindings.bind(Unit).to_instance(unit)
injector = Injector(bindings)
assert injector.get(Unit, {zuice.key("a"): "a"}) is unit
class TestLifetimes(object):
def test_new_instances_are_returned_by_default(self):
x = [0]
class Counter(object):
def __init__(self):
self.x = x[0] = x[0] + 1
bindings = Bindings()
injector = Injector(bindings)
assert_equal(1, injector.get(Counter).x)
assert_equal(2, injector.get(Counter).x)
def test_bindings_can_change_lifetime_to_singleton(self):
x = [0]
class Counter(object):
def __init__(self):
self.x = x[0] = x[0] + 1
bindings = Bindings()
bindings.bind(Counter).singleton()
injector = Injector(bindings)
assert_equal(1, injector.get(Counter).x)
assert_equal(1, injector.get(Counter).x)
def test_bindings_can_change_lifetime_of_provider_to_singleton(self):
x = [0]
def count(injector):
x[0] += 1
return x[0]
counter = zuice.key("counter")
bindings = Bindings()
bindings.bind(counter).to_provider(count).singleton()
injector = Injector(bindings)
assert_equal(1, injector.get(counter))
assert_equal(1, injector.get(counter))
def test_bindings_can_scope_provider_to_value(self):
x = []
Name = zuice.key("Name")
def count(injector):
name = injector.get(Name)
x.append(name)
return x
counter = zuice.key("counter")
bindings = Bindings()
with bindings.scope(Name) as scope_bindings:
scope_bindings.bind(counter).to_provider(count)
injector = Injector(bindings)
assert_equal(["Bob"], injector.get(counter, {Name: "Bob"}))
assert_equal(["Bob"], injector.get(counter, {Name: "Bob"}))
assert_equal(["Bob", "Jim"], injector.get(counter, {Name: "Jim"}))
def test_can_retrieve_scoped_value_when_scoped_value_is_set_alongside_other_values(self):
x = []
Name = zuice.key("Name")
Greeting = zuice.key("Greeting")
def count(injector):
name = injector.get(Name)
x.append(name)
return x
counter = zuice.key("counter")
bindings = Bindings()
with bindings.scope(Name) as scope_bindings:
scope_bindings.bind(counter).to_provider(count)
injector = Injector(bindings)
assert_equal(["Bob"], injector.get(counter, {Name: "Bob", Greeting: "hello"}))
def test_singleton_is_cached_in_singleton_scope_when_injector_is_not_in_singleton_scope(self):
x = [0]
class Counter(object):
def __init__(self):
self.x = x[0] = x[0] + 1
bindings = Bindings()
bindings.bind(Counter).singleton()
injector = Injector(bindings)
assert_equal(1, injector.get(Counter, {"name": "Bob"}).x)
assert_equal(1, injector.get(Counter).x)
def test_cached_singleton_is_available_from_non_singleton_scope(self):
x = [0]
class Counter(object):
def __init__(self):
self.x = x[0] = x[0] + 1
bindings = Bindings()
bindings.bind(Counter).singleton()
injector = Injector(bindings)
assert_equal(1, injector.get(Counter, {"name": "Bob"}).x)
assert_equal(1, injector.get(Counter, {"name": "Jim"}).x)
def test_methods_decorated_with_init_decorator_are_run_after_injection():
class Count(zuice.Base):
@zuice.init
def start(self):
self.x = 1
injector = Injector(Bindings())
assert_equal(1, injector.get(Count).x)
def test_methods_decorated_with_init_decorator_are_run_after_manual_construction():
class Count(zuice.Base):
@zuice.init
def start(self):
self.x = 1
assert_equal(1, Count().x)
| bsd-2-clause | -4,687,835,895,482,636,000 | 29.808933 | 105 | 0.622181 | false |
nesaro/driza | pyrqt/carga/operaciones/anova.py | 1 | 3082 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#Copyright (C) 2006-2008 Inmaculada Luengo Merino, Néstor Arocha Rodríguez
#This file is part of pyrqt.
#
#pyrqt is free software; you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation; either version 2 of the License, or
#(at your option) any later version.
#
#pyrqt is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with pyrqt; if not, write to the Free Software
#Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Anova"""
#TODO: pasar a formato nuevo
nombre = u"Anova Simple"
#tipo = "Variable"
tipo = "Casos" #FIXME: Tipo incorrecto
etiquetas = ["Otros"]
factor = {"nombre":"Factor", "tipo":"Factores"}
widget = {"tipo":"Variable", "opciones":[factor]}
def funcion(dato, variable, caso, opciones):
"""Funcion que convierte los datos de entrada en los resultados"""
import rpy #pylint: disable=import-error
diccionario = {}
r_data = {"Variable":[], "Factor":[]}
for x in dato.query(variable, caso = caso):
r_data["Variable"].append(float(x))
for x in dato.query(opciones["Factor"], caso = caso):
r_data["Factor"].append(repr(x))
# lista=[float(x) for x in dato.getCol(variable,caso=caso)]
# agrupacion=[x for x in dato.getCasos(opciones["Factor"])]
# agrupacion2=[x for x in dato.getCol(opciones["Factor"],caso=caso)]
# mifuncion=lambda f:agrupacion.index(f)
# agrupacionfinal=map(mifuncion,agrupacion2)
r_data_table = rpy.with_mode(rpy.NO_CONVERSION, rpy.r.data_frame)(r_data)
modelo = rpy.r("Variable ~ Factor")
aov = rpy.with_mode(rpy.NO_CONVERSION, rpy.r.aov)(modelo, r_data_table)
diccionario = rpy.r.summary(aov)
return diccionario
def initresultado(resultado, opciones):
"""Inicializa al objeto resultado, añadiendole lo que crea conveniente"""
resultado.addTablaSimple("resultado")
resultado["resultado"].titulo = u"Anova"
lista = []
if opciones["caso"]:
lista.append("Caso")
lista += [u"Resultado en bruto"]
resultado["resultado"].settitulo(lista)
def interfazresultado(resultado, listaopciones, floatrender = None):
"""Este método dice como introducir los datos en la tabla"""
lista = []
variable = listaopciones[0]
caso = listaopciones[1]
if caso:
lista.append(caso)
diccionario = listaopciones[2]
resultado["resultado"].set(variable, [str(diccionario)])
def comprobarentrada(opciones):
if not opciones["Factor"]:
from pyrqt.excepciones import OpcionesIncorrectaException
raise OpcionesIncorrectaException
def funcionprincipal(): pass
def funcionchequeocondiciones(interfazdato): return False
def funcionchequeoentradausuario(opciones): return False
definicionresultado = []
| gpl-2.0 | 3,244,386,289,570,607,000 | 35.642857 | 77 | 0.707602 | false |
blockstack/blockstack-server | integration_tests/blockstack_integration_tests/scenarios/namespace_preorder_reveal_import_multi_ready.py | 1 | 3316 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Blockstack
~~~~~
copyright: (c) 2014-2015 by Halfmoon Labs, Inc.
copyright: (c) 2016 by Blockstack.org
This file is part of Blockstack
Blockstack is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Blockstack is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Blockstack. If not, see <http://www.gnu.org/licenses/>.
"""
import testlib
import json
import virtualchain
wallets = [
testlib.Wallet( "5JesPiN68qt44Hc2nT8qmyZ1JDwHebfoh9KQ52Lazb1m1LaKNj9", 100000000000 ),
testlib.Wallet( "5KHqsiU9qa77frZb6hQy9ocV7Sus9RWJcQGYYBJJBb2Efj1o77e", 100000000000 ),
testlib.Wallet( "5Kg5kJbQHvk1B64rJniEmgbD83FpZpbw2RjdAZEzTefs9ihN3Bz", 100000000000 ),
testlib.Wallet( "5JuVsoS9NauksSkqEjbUZxWwgGDQbMwPsEfoRBSpLpgDX1RtLX7", 100000000000 ),
testlib.Wallet( "5KEpiSRr1BrT8vRD7LKGCEmudokTh1iMHbiThMQpLdwBwhDJB1T", 100000000000 )
]
consensus = "17ac43c1d8549c3181b200f1bf97eb7d"
def scenario( wallets, **kw ):
testlib.blockstack_namespace_preorder( "test", wallets[1].addr, wallets[0].privkey )
testlib.next_block( **kw )
testlib.blockstack_namespace_reveal( "test", wallets[1].addr, 52595, 250, 4, [6,5,4,3,2,1,0,0,0,0,0,0,0,0,0,0], 10, 10, wallets[0].privkey )
testlib.next_block( **kw )
resp = testlib.blockstack_name_import( "foo.test", wallets[2].addr, "11" * 20, wallets[1].privkey )
if 'error' in resp:
print json.dumps(resp, indent=4 )
testlib.next_block( **kw )
resp = testlib.blockstack_name_import( "foo.test", wallets[3].addr, "22" * 20, wallets[1].privkey )
if 'error' in resp:
print json.dumps(resp, indent=4 )
testlib.next_block( **kw )
resp = testlib.blockstack_name_import( "foo.test", wallets[4].addr, "44" * 20, wallets[1].privkey )
resp = testlib.blockstack_name_import( "foo.test", wallets[4].addr, "55" * 20, wallets[1].privkey )
resp = testlib.blockstack_name_import( "foo.test", wallets[4].addr, "33" * 20, wallets[1].privkey )
if 'error' in resp:
print json.dumps(resp, indent=4 )
testlib.next_block( **kw )
testlib.blockstack_namespace_ready( "test", wallets[1].privkey )
testlib.next_block( **kw )
def check( state_engine ):
# not revealed, but ready
ns = state_engine.get_namespace_reveal( "test" )
if ns is not None:
return False
ns = state_engine.get_namespace( "test" )
if ns is None:
return False
if ns['namespace_id'] != 'test':
return False
# each name must exist
foo = state_engine.get_name( "foo.test" )
if foo is None:
return False
if foo['value_hash'] != "33" * 20:
return False
if foo['address'] != wallets[4].addr or foo['sender'] != virtualchain.make_payment_script(wallets[4].addr):
return False
return True
| gpl-3.0 | -1,843,037,481,361,064,400 | 33.541667 | 144 | 0.676116 | false |
iLoop2/ResInsight | ThirdParty/Ert/devel/python/python/ert/enkf/plot_data/pca_plot_vector.py | 1 | 2288 | from ert.cwrap import BaseCClass, CWrapper
from ert.enkf import ENKF_LIB
from ert.util import Matrix
class PcaPlotVector(BaseCClass):
def __init__(self, component, principal_component_matrix, observation_principal_component_matrix):
assert isinstance(component, int)
assert isinstance(principal_component_matrix, Matrix)
assert isinstance(observation_principal_component_matrix, Matrix)
c_pointer = PcaPlotVector.cNamespace().alloc(component, principal_component_matrix, observation_principal_component_matrix)
super(PcaPlotVector, self).__init__(c_pointer)
def __len__(self):
""" @rtype: int """
return PcaPlotVector.cNamespace().size(self)
def __getitem__(self, index):
"""
@type index: int
@rtype: float
"""
assert isinstance(index, int)
return PcaPlotVector.cNamespace().get(self, index)
def __iter__(self):
cur = 0
while cur < len(self):
yield self[cur]
cur += 1
def getObservation(self):
""" @rtype: float """
return PcaPlotVector.cNamespace().get_obs(self)
def getSingularValue(self):
""" @rtype: float """
return PcaPlotVector.cNamespace().get_singular_value(self)
def free(self):
PcaPlotVector.cNamespace().free(self)
cwrapper = CWrapper(ENKF_LIB)
cwrapper.registerType("pca_plot_vector", PcaPlotVector)
cwrapper.registerType("pca_plot_vector_obj", PcaPlotVector.createPythonObject)
cwrapper.registerType("pca_plot_vector_ref", PcaPlotVector.createCReference)
PcaPlotVector.cNamespace().alloc = cwrapper.prototype("c_void_p pca_plot_vector_alloc(int, matrix, matrix)")
PcaPlotVector.cNamespace().free = cwrapper.prototype("void pca_plot_vector_free(pca_plot_vector)")
PcaPlotVector.cNamespace().size = cwrapper.prototype("int pca_plot_vector_get_size(pca_plot_vector)")
PcaPlotVector.cNamespace().get = cwrapper.prototype("double pca_plot_vector_iget_sim_value(pca_plot_vector, int)")
PcaPlotVector.cNamespace().get_obs = cwrapper.prototype("double pca_plot_vector_get_obs_value(pca_plot_vector)")
PcaPlotVector.cNamespace().get_singular_value = cwrapper.prototype("double pca_plot_vector_get_singular_value(pca_plot_vector)")
| gpl-3.0 | -3,688,779,641,078,196,700 | 35.903226 | 131 | 0.693182 | false |
jeremiah-c-leary/vhdl-style-guide | vsg/tests/code_tags/test_next_line_code_tag.py | 1 | 2440 | import os
import unittest
from vsg.rules import process
from vsg.rules import architecture
from vsg import vhdlFile
from vsg.tests import utils
# Read in test file used for all tests
lFile, eError = vhdlFile.utils.read_vhdlfile(os.path.join(os.path.dirname(__file__), 'next_line_code_tag_test_input.vhd'))
oFile = vhdlFile.vhdlFile(lFile)
class testCodeTags(unittest.TestCase):
def setUp(self):
self.assertIsNone(eError)
def test_rule_process_016(self):
oRule = process.rule_016()
# dExpected = []
# dExpected.append(utils.add_violation(13))
# dExpected.append(utils.add_violation(25))
# oRule.analyze(oFile)
# self.assertEqual(oRule.violations, dExpected)
lExpected = [13, 25]
oRule.analyze(oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_rule_process_018(self):
oRule = process.rule_018()
# dExpected = []
# dExpected.append(utils.add_violation(15))
# dExpected.append(utils.add_violation(27))
# oRule.analyze(oFile)
# self.assertEqual(oRule.violations, dExpected)
lExpected = [15, 27]
oRule.analyze(oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_rule_process_014(self):
oRule = process.rule_014()
# dExpected = []
# dExpected.append(utils.add_violation(19))
# oRule.analyze(oFile)
# self.assertEqual(oRule.violations, dExpected)
lExpected = [19]
oRule.analyze(oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_rule_architecture_024(self):
oRule = architecture.rule_024()
# dExpected = []
# oRule.analyze(oFile)
# self.assertEqual(oRule.violations, dExpected)
lExpected = []
oRule.analyze(oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_rule_process_002(self):
oRule = process.rule_002()
# dExpected = []
# oRule.analyze(oFile)
# self.assertEqual(oRule.violations, dExpected)
lExpected = []
oRule.analyze(oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
| gpl-3.0 | 3,781,427,114,097,436,700 | 27.705882 | 122 | 0.663115 | false |
gauravbose/digital-menu | digimenu2/restaurant/migrations/0030_kitchen_usertable.py | 1 | 1126 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('restaurant', '0029_cart'),
]
operations = [
migrations.CreateModel(
name='Kitchen',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('table', models.IntegerField()),
('status', models.CharField(default=b'RC', max_length=2, choices=[(b'RC', b'recieved'), (b'PG', b'preparing'), (b'PD', b'prepared'), (b'DD', b'delivered')])),
('menu_item', models.ForeignKey(to='restaurant.Menu')),
],
),
migrations.CreateModel(
name='Usertable',
fields=[
('table_no', models.IntegerField(serialize=False, primary_key=True)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
]
| bsd-3-clause | 7,702,231,826,981,313,000 | 34.1875 | 174 | 0.563055 | false |
hammerhorn/working | cjh/letter.py | 1 | 3270 | #coding=utf8
import time
from cjh.music import Pitch, Note
from cjh.misc import speak
"""
translate the Roman alphabet into, e.g.,
radiophonic words, morse code, braille, etc....
"""
class Letter(object):
"""
convert between different forms of Roman-alphabet letters
"""
morse_dict = {
'1':'.----',
'2':'..---',
'3':'...--',
'4':'....-',
'5':'.....',
'6':'-....',
'7':'--...',
'8':'---..',
'9':'----.',
'0':'-----',
'A':'.-',
'B':'-...',
'C':'-.-.',
'D':'-..',
'E':'.',
'F':'..-.',
'G':'--.',
'H':'....',
'I':'..',
'J':'.---',
'K':'-.-',
'L':'.-..',
'M':'--',
'N':'-.',
'O':'---',
'P':'.--.',
'Q':'--.-',
'R':'.-.',
'S':'...',
'T':'-',
'U':'..-',
'V':'...-',
'W':'.--',
'X':'-..-',
'Y':'-.--',
'Z':'--..',
' ':'/', '.':'.-.-.-'}
radio_dict = {
'A':'Alfa',
'B':'Bravo',
'C':'Charlie',
'D':'Delta',
'E':'Echo',
'F':'Foxtrot',
'G':'Golf',
'H':'Hotel',
'I':'India',
'J':'Juliett',
'K':'Kilo',
'L':'Lima',
'M':'Mike',
'N':'November',
'O':'Oscar',
'P':'Papa',
'Q':'Quebec',
'R':'Romeo',
'S':'Sierra',
'T':'Tango',
'U':'Uniform',
'V':'Victor',
'W':'Whiskey',
'X':'Xray',
'Y':'Yankee',
'Z':'Zulu', ' ': '', '.': 'stop'}
braille_dict = {
'A':'⠁',
'B':'⠃',
'C':'⠉',
'D':'⠙',
'E':'⠑',
'F':'⠋',
'G':'⠛',
'H':'⠓',
'I':'⠊',
'J':'⠚',
'K':'⠅',
'L':'⠇',
'M':'⠍',
'N':'⠝',
'O':'⠕',
'P':'⠏',
'Q':'⠟',
'R':'⠗',
'S':'⠎',
'T':'⠞',
'U':'⠥',
'V':'⠧',
'W':'⠺',
'X':'⠭',
'Y':'⠽',
'Z':'⠵', ' ':None, '.':None}
def __init__(self, char):
self.majuscule = char.upper()
self.radio_name = self.__class__.radio_dict[char.upper()]
self.braille = self.__class__.braille_dict[char.upper()]
self.morse = self.__class__.morse_dict[char.upper()]
self.mora = 0.048
self.wpm = 1.2 / self.mora
self.hz = 1000
def __str__(self):
return '{} {} {}'.format(self.radio_name, self.braille, self.morse)
def play_morse(self):
for x in self.morse:
if x == '.':
Note(Pitch(freq=self.hz), self.mora).play()
#time.sleep(.025)
elif x == '-':
Note(Pitch(freq=self.hz), self.mora * 3).play()
elif x == ' ':
time.sleep(7 * self.mora)
time.sleep(self.mora)
time.sleep(3 * self.mora)
def radio_speak(self):
spoken_forms = {
'J': 'Julie-et',
'O': 'Oska',
'P': 'Pawpaw',
'Q': 'Kebec'
}
speak(spoken_forms.get(self.majuscule, self.radio_name))
| gpl-3.0 | 8,146,375,046,475,646,000 | 21.661972 | 75 | 0.306712 | false |
marcelocure/python-librato | tests/test_metrics.py | 1 | 7166 | import logging
import unittest
try:
from unittest.mock import patch
except ImportError:
from mock import patch
import librato
from mock_connection import MockConnect, server
#logging.basicConfig(level=logging.DEBUG)
# Mock the server
librato.HTTPSConnection = MockConnect
class TestLibrato(unittest.TestCase):
def setUp(self):
self.conn = librato.connect('user_test', 'key_test')
server.clean()
def test_list_metrics_when_there_are_no_metrics(self):
metrics = self.conn.list_metrics()
assert len(metrics) == 0
def test_list_all_metrics(self):
def mock_list(**args):
offset = args['offset']
length = args['length']
# I don't care what the metrics are
# this is about testing the logic and the calls
result = [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}]
return result[offset:length+offset]
expected_call_list = [({'length': 5, 'offset': 0},),
({'length': 5, 'offset': 5},),
({'length': 5, 'offset': 10},)]
with patch.object(
self.conn,
'list_metrics',
) as list_prop:
list_prop.side_effect = mock_list
metrics = list(self.conn.list_all_metrics(length=5))
assert len(metrics) == 12
assert list_prop.call_count == 3
assert list_prop.call_args_list == expected_call_list
def test_list_metrics_adding_gauge(self):
""" Notice that the api forces you to send a value even when you are
just trying to create the metric without measurements."""
self.conn.submit('gauge_1', 1, description='desc 1')
self.conn.submit('gauge_2', 2, description='desc 2')
# Get all metrics
metrics = self.conn.list_metrics()
assert len(metrics) == 2
assert isinstance(metrics[0], librato.metrics.Gauge)
assert metrics[0].name == 'gauge_1'
assert metrics[0].description == 'desc 1'
assert isinstance(metrics[1], librato.metrics.Gauge)
assert metrics[1].name == 'gauge_2'
assert metrics[1].description == 'desc 2'
def test_list_metrics_adding_counter_metrics(self):
self.conn.submit('c1', 10, 'counter', description='counter desc 1')
self.conn.submit('c2', 20, 'counter', description='counter desc 2')
# Get all metrics
metrics = self.conn.list_metrics()
assert len(metrics) == 2
assert isinstance(metrics[0], librato.metrics.Counter)
assert metrics[0].name == 'c1'
assert metrics[0].description == 'counter desc 1'
assert isinstance(metrics[1], librato.metrics.Counter)
assert metrics[1].name == 'c2'
assert metrics[1].description == 'counter desc 2'
def test_list_metrics_adding_one_counter_one_gauge(self):
self.conn.submit('gauge1', 10)
self.conn.submit('counter2', 20, type='counter', description="desc c2")
# Get all metrics
metrics = self.conn.list_metrics()
assert isinstance(metrics[0], librato.metrics.Gauge)
assert metrics[0].name == 'gauge1'
assert isinstance(metrics[1], librato.metrics.Counter)
assert metrics[1].name == 'counter2'
assert metrics[1].description == 'desc c2'
def test_deleting_a_gauge(self):
self.conn.submit('test', 100)
assert len(self.conn.list_metrics()) == 1
self.conn.delete('test')
assert len(self.conn.list_metrics()) == 0
def test_deleting_a_batch_of_gauges(self):
self.conn.submit('test', 100)
self.conn.submit('test2', 100)
assert len(self.conn.list_metrics()) == 2
self.conn.delete(['test', 'test2'])
assert len(self.conn.list_metrics()) == 0
def test_deleting_a_counter(self):
self.conn.submit('test', 200, type='counter')
assert len(self.conn.list_metrics()) == 1
self.conn.delete('test')
assert len(self.conn.list_metrics()) == 0
def test_get_gauge_basic(self):
name, desc = '1', 'desc 1'
self.conn.submit(name, 10, description=desc)
gauge = self.conn.get(name)
assert isinstance(gauge, librato.metrics.Gauge)
assert gauge.name == name
assert gauge.description == desc
assert len(gauge.measurements['unassigned']) == 1
assert gauge.measurements['unassigned'][0]['value'] == 10
def test_get_counter_basic(self):
name, desc = 'counter1', 'count desc 1'
self.conn.submit(name, 20, type='counter', description=desc)
counter = self.conn.get(name)
assert isinstance(counter, librato.metrics.Counter)
assert counter.name == name
assert counter.description == desc
assert len(counter.measurements['unassigned']) == 1
assert counter.measurements['unassigned'][0]['value'] == 20
def test_send_single_measurements_for_gauge_with_source(self):
name, desc, src = 'Test', 'A Test Gauge.', 'from_source'
self.conn.submit(name, 10, description=desc, source=src)
gauge = self.conn.get(name)
assert gauge.name == name
assert gauge.description == desc
assert len(gauge.measurements[src]) == 1
assert gauge.measurements[src][0]['value'] == 10
def test_send_single_measurements_for_counter_with_source(self):
name, desc, src = 'Test', 'A Test Counter.', 'from_source'
self.conn.submit(name, 111, type='counter', description=desc, source=src)
counter = self.conn.get(name)
assert counter.name == name
assert counter.description == desc
assert len(counter.measurements[src]) == 1
assert counter.measurements[src][0]['value'] == 111
def test_add_in_counter(self):
name, desc, src = 'Test', 'A Test Counter.', 'from_source'
self.conn.submit(name, 111, type='counter', description=desc, source=src)
counter = self.conn.get(name)
assert counter.name == name
assert counter.description == desc
assert len(counter.measurements[src]) == 1
assert counter.measurements[src][0]['value'] == 111
counter.add(1, source=src)
counter = self.conn.get(name)
assert counter.name == name
assert counter.description == desc
assert len(counter.measurements[src]) == 2
assert counter.measurements[src][-1]['value'] == 1
def test_add_in_gauge(self):
name, desc, src = 'Test', 'A Test Gauge.', 'from_source'
self.conn.submit(name, 10, description=desc, source=src)
gauge = self.conn.get(name)
assert gauge.name == name
assert gauge.description == desc
assert len(gauge.measurements[src]) == 1
assert gauge.measurements[src][0]['value'] == 10
gauge.add(1, source=src)
gauge = self.conn.get(name)
assert gauge.name == name
assert gauge.description == desc
assert len(gauge.measurements[src]) == 2
assert gauge.measurements[src][-1]['value'] == 1
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -7,349,473,628,441,905,000 | 37.945652 | 81 | 0.604382 | false |
MrJohz/snooble | tests/unit/test_ratelimit.py | 1 | 7583 | from snooble import ratelimit
import time # used to monkeypatch this module
from unittest import mock
import pytest
class TestRatelimit(object):
def test_bursty(self):
limiter = ratelimit.RateLimiter(5, 1, bursty=False)
assert limiter.current_bucket == 1
assert limiter.refresh_period == 1 / 5
limiter.take()
assert limiter.current_bucket == 0
def test_bursty_property(self):
limiter = ratelimit.RateLimiter(5, 1, bursty=True)
assert limiter.current_bucket == 5
assert limiter.refresh_period == 1
limiter.bursty = False
assert limiter.bucket_size == 1
assert limiter.current_bucket == 1
assert limiter.refresh_period == 1 / 5
limiter.bursty = False
assert limiter.bucket_size == 1
assert limiter.current_bucket == 1
assert limiter.refresh_period == 1 / 5
limiter.take()
assert limiter.current_bucket == 0
limiter.bursty = True
assert limiter.bucket_size == 5
assert limiter.current_bucket == 0
assert limiter.refresh_period == 1
limiter.bursty = True
assert limiter.bucket_size == 5
assert limiter.current_bucket == 0
assert limiter.refresh_period == 1
def test_able_to_take_when_bucket_filled(self, monkeypatch):
mocker = mock.Mock()
monkeypatch.setattr(time, 'sleep', mocker)
limiter = ratelimit.RateLimiter(5, 1)
assert limiter.current_bucket == 5
limiter.take()
assert limiter.current_bucket == 4
limiter.take(4)
assert limiter.current_bucket == 0
assert not mocker.called
def test_sleeps_until_finished(self, monkeypatch):
sleep_mocker = mock.Mock()
timer_mocker = mock.Mock(side_effect=[0, 0.1, 0.2, 1])
monkeypatch.setattr(time, 'sleep', sleep_mocker)
monkeypatch.setattr(time, 'perf_counter', timer_mocker)
limiter = ratelimit.RateLimiter(1, 1)
assert limiter.current_bucket == 1
limiter.take()
assert limiter.current_bucket == 0
assert not sleep_mocker.called
limiter.take()
assert limiter.current_bucket == 0
assert sleep_mocker.called
assert sleep_mocker.call_args_list == [mock.call(0.9), mock.call(0.8)]
assert len(timer_mocker.call_args_list) == 4
def test_taking_many_at_once_small_bucket(self, monkeypatch):
sleep_mocker = mock.Mock()
timer_mocker = mock.Mock(side_effect=range(100))
monkeypatch.setattr(time, 'sleep', sleep_mocker)
monkeypatch.setattr(time, 'perf_counter', timer_mocker)
small_bucket = ratelimit.RateLimiter(1, 1)
assert small_bucket.current_bucket == 1
small_bucket.take()
assert small_bucket.current_bucket == 0
small_bucket.take(8)
assert small_bucket.current_bucket == 0
assert len(timer_mocker.call_args_list) == 9
def test_taking_many_at_once_big_bucket(self, monkeypatch):
sleep_mocker = mock.Mock()
timer_mocker = mock.Mock(side_effect=range(100))
monkeypatch.setattr(time, 'sleep', sleep_mocker)
monkeypatch.setattr(time, 'perf_counter', timer_mocker)
big_bucket = ratelimit.RateLimiter(3, 1)
assert big_bucket.current_bucket == 3
big_bucket.take()
assert big_bucket.current_bucket == 2
assert not sleep_mocker.called
big_bucket.take(8)
assert big_bucket.current_bucket == 0
assert len(timer_mocker.call_args_list) == 3
def test_equality(self):
limit1 = ratelimit.RateLimiter(rate=60, per=60, bursty=False)
limit2 = ratelimit.RateLimiter(rate=60, per=60, bursty=True)
limit3 = ratelimit.RateLimiter(rate=25, per=50, bursty=True)
limit4 = ratelimit.RateLimiter(rate=60, per=60, bursty=False)
assert limit1 == limit4 and limit4 == limit1
assert limit1 != limit2
assert limit1 != limit3
assert limit1 != (60, 60)
def test_repr(self):
rl = ratelimit.RateLimiter(rate=60, per=60, bursty=True)
assert "rate=60" in repr(rl)
assert "per=60" in repr(rl)
assert "bursty=True" in repr(rl)
assert "current=60" in repr(rl)
rl.take(30)
assert "current=30" in repr(rl)
class TestLimitation(object):
def test_wrapping(self):
take_mocker = mock.Mock(return_value=True)
ratelimiter = ratelimit.RateLimiter(1, 1)
ratelimiter.take = take_mocker
test_object = mock.Mock()
limited_object = ratelimiter.limitate(test_object, ['limited_method', 'limited_uncalled_method'])
limited_object.arbitrary_method()
assert not take_mocker.called
assert test_object.arbitrary_method.called
test_object.reset_mock()
take_mocker.reset_mock()
limited_object.arbitrary_uncalled_method
assert not take_mocker.called
assert not test_object.arbitrary_uncalled_method.called
test_object.reset_mock()
take_mocker.reset_mock()
limited_object.limited_method()
assert take_mocker.called
assert test_object.limited_method.called
test_object.reset_mock()
take_mocker.reset_mock()
limited_object.limited_uncalled_method
assert not take_mocker.called
assert not test_object.limited_uncalled_method.called
test_object.reset_mock()
take_mocker.reset_mock()
test_object = mock.Mock()
test_object.arbitrary_attribute = "arbitrary"
test_object.limited_attribute = "limited"
limited_object = ratelimiter.limitate(test_object, ['limited_attribute'])
limited_object.arbitrary_attribute
assert not take_mocker.called
test_object.reset_mock()
take_mocker.reset_mock()
limited_object.limited_attribute
assert take_mocker.called
def test_wrapper_passes_information_through(self):
take_mocker = mock.Mock(return_value=True)
ratelimiter = ratelimit.RateLimiter(1, 1)
ratelimiter.take = take_mocker
test_object = mock.Mock()
limited_object = ratelimiter.limitate(test_object, ['limited_method'])
limited_object.arbitrary_method("arg1", "arg2", ["args4", "and 5"], name="hello")
assert not take_mocker.called
assert (test_object.arbitrary_method.call_args ==
mock.call("arg1", "arg2", ["args4", "and 5"], name="hello"))
test_object.reset_mock()
limited_object.limited_method("arg1", "arg2", ["args4", "and 5"], name="hello")
assert take_mocker.called
assert (test_object.limited_method.call_args ==
mock.call("arg1", "arg2", ["args4", "and 5"], name="hello"))
@pytest.mark.xfail
def test_wrapper_looks_like_object(self):
take_mocker = mock.Mock(return_value=True)
ratelimiter = ratelimit.RateLimiter(1, 1)
ratelimiter.take = take_mocker
class MyCustomObject(object):
def limited_method(self):
return "limited"
def unlimited_method(self, arg1, arg2="hello"):
return "unlimited"
test_object = MyCustomObject()
limited_object = ratelimiter.limitate(test_object, ['limited_method'])
assert isinstance(limited_object, MyCustomObject)
assert hasattr(limited_object, 'limited_method')
assert hasattr(limited_object, 'unlimited_method')
# TODO: method signatures are alike
| mit | -3,938,698,611,375,506,000 | 33.468182 | 105 | 0.632072 | false |
syuu1228/seastar | configure.py | 1 | 43696 | #!/usr/bin/python3
#
# This file is open source software, licensed to you under the terms
# of the Apache License, Version 2.0 (the "License"). See the NOTICE file
# distributed with this work for additional information regarding copyright
# ownership. You may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os, os.path, textwrap, argparse, sys, shlex, subprocess, tempfile, re
configure_args = str.join(' ', [shlex.quote(x) for x in sys.argv[1:]])
tempfile.tempdir = "./build/tmp"
srcdir = os.getcwd()
def get_flags():
with open('/proc/cpuinfo') as f:
for line in f:
if line.strip():
if line.rstrip('\n').startswith('flags'):
return re.sub(r'^flags\s+: ', '', line).split()
def add_tristate(arg_parser, name, dest, help):
arg_parser.add_argument('--enable-' + name, dest = dest, action = 'store_true', default = None,
help = 'Enable ' + help)
arg_parser.add_argument('--disable-' + name, dest = dest, action = 'store_false', default = None,
help = 'Disable ' + help)
def apply_tristate(var, test, note, missing):
if (var is None) or var:
if test():
return True
elif var == True:
print(missing)
sys.exit(1)
else:
print(note)
return False
return False
#
# dpdk_cflags - fetch the DPDK specific CFLAGS
#
# Run a simple makefile that "includes" the DPDK main makefile and prints the
# MACHINE_CFLAGS value
#
def dpdk_cflags (dpdk_target):
ensure_tmp_dir_exists()
with tempfile.NamedTemporaryFile() as sfile:
dpdk_target = os.path.abspath(dpdk_target)
dpdk_target = re.sub(r'\/+$', '', dpdk_target)
dpdk_sdk_path = os.path.dirname(dpdk_target)
dpdk_target_name = os.path.basename(dpdk_target)
dpdk_arch = dpdk_target_name.split('-')[0]
if args.dpdk:
dpdk_sdk_path = 'dpdk'
dpdk_target = os.getcwd() + '/build/dpdk'
dpdk_target_name = 'x86_64-{}-linuxapp-gcc'.format(dpdk_machine)
dpdk_arch = 'x86_64'
sfile.file.write(bytes('include ' + dpdk_sdk_path + '/mk/rte.vars.mk' + "\n", 'utf-8'))
sfile.file.write(bytes('all:' + "\n\t", 'utf-8'))
sfile.file.write(bytes('@echo $(MACHINE_CFLAGS)' + "\n", 'utf-8'))
sfile.file.flush()
dpdk_cflags = subprocess.check_output(['make', '--no-print-directory',
'-f', sfile.name,
'RTE_SDK=' + dpdk_sdk_path,
'RTE_OUTPUT=' + dpdk_target,
'RTE_TARGET=' + dpdk_target_name,
'RTE_SDK_BIN=' + dpdk_target,
'RTE_ARCH=' + dpdk_arch])
dpdk_cflags_str = dpdk_cflags.decode('utf-8')
dpdk_cflags_str = re.sub(r'\n+$', '', dpdk_cflags_str)
dpdk_cflags_final = ''
return dpdk_cflags_str
def try_compile(compiler, source = '', flags = []):
return try_compile_and_link(compiler, source, flags = flags + ['-c'])
def ensure_tmp_dir_exists():
if not os.path.exists(tempfile.tempdir):
os.makedirs(tempfile.tempdir)
def try_compile_and_link(compiler, source = '', flags = []):
ensure_tmp_dir_exists()
with tempfile.NamedTemporaryFile() as sfile:
ofile = tempfile.mktemp()
try:
sfile.file.write(bytes(source, 'utf-8'))
sfile.file.flush()
# We can't write to /dev/null, since in some cases (-ftest-coverage) gcc will create an auxiliary
# output file based on the name of the output file, and "/dev/null.gcsa" is not a good name
return subprocess.call([compiler, '-x', 'c++', '-o', ofile, sfile.name] + flags,
stdout = subprocess.DEVNULL,
stderr = subprocess.DEVNULL) == 0
finally:
if os.path.exists(ofile):
os.unlink(ofile)
def try_compile_and_run(compiler, flags, source, env = {}):
ensure_tmp_dir_exists()
mktemp = tempfile.NamedTemporaryFile
with mktemp() as sfile, mktemp(mode='rb') as xfile:
sfile.file.write(bytes(source, 'utf-8'))
sfile.file.flush()
xfile.file.close()
if subprocess.call([compiler, '-x', 'c++', '-o', xfile.name, sfile.name] + args.user_cflags.split() + flags,
stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL) != 0:
# The compiler may delete the target on failure, and lead to
# NamedTemporaryFile's destructor throwing an exception.
open(xfile.name, 'a').close()
return False
e = os.environ.copy()
e.update(env)
env = e
return subprocess.call([xfile.name], stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL, env=env) == 0
def warning_supported(warning, compiler, flags):
# gcc ignores -Wno-x even if it is not supported
adjusted = re.sub('^-Wno-', '-W', warning)
return try_compile(flags=flags + [adjusted, '-Werror'], compiler = compiler)
def debug_flag(compiler, flags):
src_with_auto = textwrap.dedent('''\
template <typename T>
struct x { auto f() {} };
x<int> a;
''')
if try_compile(source = src_with_auto, flags = flags + ['-g', '-std=gnu++1y'], compiler = compiler):
return '-g'
else:
print('Note: debug information disabled; upgrade your compiler')
return ''
def dialect_supported(dialect, compiler='g++'):
return try_compile(compiler=compiler, source='', flags=['-std=' + dialect])
def detect_membarrier(compiler, flags):
return try_compile(compiler=compiler, flags=flags, source=textwrap.dedent('''\
#include <linux/membarrier.h>
int x = MEMBARRIER_CMD_PRIVATE_EXPEDITED | MEMBARRIER_CMD_REGISTER_PRIVATE_EXPEDITED;
'''))
def sanitize_vptr_flag(compiler, flags):
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67258
if (not try_compile(compiler, flags=flags + ['-fsanitize=vptr'])
or (try_compile_and_run(compiler, flags=flags + ['-fsanitize=undefined', '-fno-sanitize-recover'],
env={'UBSAN_OPTIONS': 'exitcode=1'}, source=textwrap.dedent('''
struct A
{
virtual ~A() {}
};
struct B : virtual A {};
struct C : virtual A {};
struct D : B, virtual C {};
int main()
{
D d;
}
'''))
and False)): # -fsanitize=vptr is broken even when the test above passes
return ''
else:
print('Notice: -fsanitize=vptr is broken, disabling; some debug mode tests are bypassed.')
return '-fno-sanitize=vptr'
def adjust_visibility_flags(compiler, flags):
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80947
flags = flags + ['-fvisibility=hidden', '-std=gnu++1y', '-Werror=attributes']
if not try_compile(compiler, flags=flags, source=textwrap.dedent('''
template <class T>
class MyClass {
public:
MyClass() {
auto outer = [this] ()
{
auto fn = [this] { };
//use fn for something here
};
}
};
int main() {
MyClass<int> r;
}
''')):
print('Notice: disabling -Wattributes due to https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80947')
return '-Wno-attributes'
else:
return ''
modes = {
'debug': {
'sanitize': '-fsanitize=address -fsanitize=leak -fsanitize=undefined',
'sanitize_libs': '-lasan -lubsan',
'opt': '-O0 -DDEBUG -DDEBUG_SHARED_PTR -DDEFAULT_ALLOCATOR -DSEASTAR_THREAD_STACK_GUARDS -DNO_EXCEPTION_HACK',
'libs': '',
'cares_opts': '-DCARES_STATIC=ON -DCARES_SHARED=OFF -DCMAKE_BUILD_TYPE=Debug',
},
'release': {
'sanitize': '',
'sanitize_libs': '',
'opt': '-O2',
'libs': '',
'cares_opts': '-DCARES_STATIC=ON -DCARES_SHARED=OFF -DCMAKE_BUILD_TYPE=Release',
},
}
perf_tests = [
'tests/perf/perf_future_util',
]
tests = [
'tests/fileiotest',
'tests/directory_test',
'tests/linecount',
'tests/echotest',
'tests/l3_test',
'tests/ip_test',
'tests/timertest',
'tests/tcp_test',
'tests/futures_test',
'tests/alloc_test',
'tests/foreign_ptr_test',
'tests/smp_test',
'tests/thread_test',
'tests/thread_context_switch',
'tests/udp_server',
'tests/udp_client',
'tests/blkdiscard_test',
'tests/sstring_test',
'tests/unwind_test',
'tests/defer_test',
'tests/httpd',
'tests/memcached/test_ascii_parser',
'tests/tcp_sctp_server',
'tests/tcp_sctp_client',
'tests/allocator_test',
'tests/output_stream_test',
'tests/udp_zero_copy',
'tests/shared_ptr_test',
'tests/weak_ptr_test',
'tests/checked_ptr_test',
'tests/slab_test',
'tests/fstream_test',
'tests/distributed_test',
'tests/rpc',
'tests/semaphore_test',
'tests/expiring_fifo_test',
'tests/packet_test',
'tests/tls_test',
'tests/fair_queue_test',
'tests/rpc_test',
'tests/connect_test',
'tests/chunked_fifo_test',
'tests/circular_buffer_test',
'tests/perf/perf_fstream',
'tests/json_formatter_test',
'tests/dns_test',
'tests/execution_stage_test',
'tests/lowres_clock_test',
'tests/program_options_test',
'tests/tuple_utils_test',
'tests/tls_echo_server',
'tests/tls_simple_client',
'tests/circular_buffer_fixed_capacity_test',
'tests/noncopyable_function_test',
'tests/netconfig_test',
'tests/abort_source_test',
] + perf_tests
apps = [
'apps/httpd/httpd',
'apps/seawreck/seawreck',
'apps/io_tester/io_tester',
'apps/memcached/memcached',
'apps/iotune/iotune',
'tests/scheduling_group_demo',
]
extralibs = {
'apps/io_tester/io_tester': [ '-lyaml-cpp' ]
}
all_artifacts = apps + tests + ['libseastar.a', 'seastar.pc']
cpp_dialects = ['gnu++17', 'gnu++1z', 'gnu++14', 'gnu++1y']
try:
default_cpp_dialect = [x for x in cpp_dialects if dialect_supported(x, compiler='g++')][0]
except:
# if g++ is not available, fallback to something safe-ish
default_cpp_dialect='gnu++1y'
arg_parser = argparse.ArgumentParser('Configure seastar')
arg_parser.add_argument('--static', dest = 'static', action = 'store_const', default = '',
const = '-static',
help = 'Static link (useful for running on hosts outside the build environment')
arg_parser.add_argument('--pie', dest = 'pie', action = 'store_true',
help = 'Build position-independent executable (PIE)')
arg_parser.add_argument('--so', dest = 'so', action = 'store_true',
help = 'Build shared object (SO) instead of executable')
arg_parser.add_argument('--mode', action='store', choices=list(modes.keys()) + ['all'], default='all')
arg_parser.add_argument('--with', dest='artifacts', action='append', choices=all_artifacts, default=[])
arg_parser.add_argument('--cflags', action = 'store', dest = 'user_cflags', default = '',
help = 'Extra flags for the C++ compiler')
arg_parser.add_argument('--ldflags', action = 'store', dest = 'user_ldflags', default = '',
help = 'Extra flags for the linker')
arg_parser.add_argument('--compiler', action = 'store', dest = 'cxx', default = 'g++',
help = 'C++ compiler path')
arg_parser.add_argument('--c-compiler', action='store', dest='cc', default='gcc',
help = 'C compiler path (for bundled libraries such as dpdk and c-ares)')
arg_parser.add_argument('--c++-dialect', action='store', dest='cpp_dialect', default=default_cpp_dialect,
help='C++ dialect to build with [default: %(default)s]')
arg_parser.add_argument('--with-osv', action = 'store', dest = 'with_osv', default = '',
help = 'Shortcut for compile for OSv')
arg_parser.add_argument('--enable-dpdk', action = 'store_true', dest = 'dpdk', default = False,
help = 'Enable dpdk (from included dpdk sources)')
arg_parser.add_argument('--dpdk-target', action = 'store', dest = 'dpdk_target', default = '',
help = 'Path to DPDK SDK target location (e.g. <DPDK SDK dir>/x86_64-native-linuxapp-gcc)')
arg_parser.add_argument('--debuginfo', action = 'store', dest = 'debuginfo', type = int, default = 1,
help = 'Enable(1)/disable(0)compiler debug information generation')
arg_parser.add_argument('--tests-debuginfo', action='store', dest='tests_debuginfo', type=int, default=0,
help='Enable(1)/disable(0)compiler debug information generation for tests')
arg_parser.add_argument('--static-stdc++', dest = 'staticcxx', action = 'store_true',
help = 'Link libgcc and libstdc++ statically')
arg_parser.add_argument('--static-boost', dest = 'staticboost', action = 'store_true',
help = 'Link with boost statically')
add_tristate(arg_parser, name = 'hwloc', dest = 'hwloc', help = 'hwloc support')
arg_parser.add_argument('--enable-gcc6-concepts', dest='gcc6_concepts', action='store_true', default=False,
help='enable experimental support for C++ Concepts as implemented in GCC 6')
arg_parser.add_argument('--enable-alloc-failure-injector', dest='alloc_failure_injector', action='store_true', default=False,
help='enable allocation failure injection')
add_tristate(arg_parser, name = 'exception-scalability-workaround', dest='exception_workaround',
help='disabling override of dl_iterate_phdr symbol to workaround C++ exception scalability issues')
arg_parser.add_argument('--allocator-page-size', dest='allocator_page_size', type=int, help='override allocator page size')
arg_parser.add_argument('--protoc-compiler', action = 'store', dest='protoc', default='protoc',
help = 'Path to protoc compiler, the default is protoc')
arg_parser.add_argument('--sed', action = 'store', dest = 'sed', default = 'sed',
help = 'GNU sed path')
args = arg_parser.parse_args()
libnet = [
'net/proxy.cc',
'net/virtio.cc',
'net/dpdk.cc',
'net/ip.cc',
'net/ethernet.cc',
'net/arp.cc',
'net/native-stack.cc',
'net/ip_checksum.cc',
'net/udp.cc',
'net/tcp.cc',
'net/dhcp.cc',
'net/tls.cc',
'net/dns.cc',
'net/config.cc',
]
core = [
'core/reactor.cc',
'core/systemwide_memory_barrier.cc',
'core/fstream.cc',
'core/posix.cc',
'core/memory.cc',
'core/resource.cc',
'core/scollectd.cc',
'core/metrics.cc',
'core/app-template.cc',
'core/thread.cc',
'core/dpdk_rte.cc',
'core/fsqual.cc',
'core/linux-aio.cc',
'util/conversions.cc',
'util/program-options.cc',
'util/log.cc',
'util/backtrace.cc',
'util/alloc_failure_injector.cc',
'net/packet.cc',
'net/posix-stack.cc',
'net/net.cc',
'net/stack.cc',
'net/inet_address.cc',
'rpc/rpc.cc',
'rpc/lz4_compressor.cc',
'core/exception_hacks.cc',
'core/future-util.cc',
]
protobuf = [
'proto/metrics2.proto',
]
prometheus = [
'core/prometheus.cc',
]
http = ['http/transformers.cc',
'http/json_path.cc',
'http/file_handler.cc',
'http/common.cc',
'http/routes.cc',
'json/json_elements.cc',
'json/formatter.cc',
'http/matcher.cc',
'http/mime_types.cc',
'http/httpd.cc',
'http/reply.cc',
'http/request_parser.rl',
'http/api_docs.cc',
]
boost_test_lib = [
'tests/test-utils.cc',
'tests/test_runner.cc',
]
def maybe_static(flag, libs):
if flag and not args.static:
libs = '-Wl,-Bstatic {} -Wl,-Bdynamic'.format(libs)
return libs
defines = ['FMT_HEADER_ONLY']
# Include -lgcc_s before -lunwind to work around for https://savannah.nongnu.org/bugs/?48486. See https://github.com/scylladb/scylla/issues/1725.
libs = ' '.join([maybe_static(args.staticboost,
'-lboost_program_options -lboost_system -lboost_filesystem'),
'-lstdc++ -lm',
maybe_static(args.staticboost, '-lboost_thread'),
'-lcryptopp -lrt -lgnutls -lgnutlsxx -llz4 -lprotobuf -ldl -lgcc_s -lunwind -lyaml-cpp',
])
boost_unit_test_lib = maybe_static(args.staticboost, '-lboost_unit_test_framework')
hwloc_libs = '-lhwloc -lnuma -lpciaccess -lxml2 -lz'
if args.gcc6_concepts:
defines.append('HAVE_GCC6_CONCEPTS')
args.user_cflags += ' -fconcepts'
if args.alloc_failure_injector:
defines.append('SEASTAR_ENABLE_ALLOC_FAILURE_INJECTION')
if not apply_tristate(args.exception_workaround, test = lambda: not args.staticcxx and not args.static,
note = "Note: disabling exception scalability workaround due to static linkage of libgcc and libstdc++",
missing = "Error: cannot enable exception scalability workaround with static linkage of libgcc and libstdc++"):
defines.append('NO_EXCEPTION_HACK')
if args.staticcxx:
libs = libs.replace('-lstdc++', '')
libs += ' -static-libgcc -static-libstdc++'
if args.staticcxx or args.static:
defines.append("NO_EXCEPTION_INTERCEPT");
memcache_base = [
'apps/memcached/ascii.rl'
] + libnet + core
deps = {
'libseastar.a' : core + libnet + http + protobuf + prometheus,
'seastar.pc': [],
'apps/httpd/httpd': ['apps/httpd/demo.json', 'apps/httpd/main.cc'] + http + libnet + core,
'apps/memcached/memcached': ['apps/memcached/memcache.cc'] + memcache_base,
'tests/memcached/test_ascii_parser': ['tests/memcached/test_ascii_parser.cc'] + memcache_base,
'tests/fileiotest': ['tests/fileiotest.cc'] + core,
'tests/directory_test': ['tests/directory_test.cc'] + core,
'tests/linecount': ['tests/linecount.cc'] + core,
'tests/echotest': ['tests/echotest.cc'] + core + libnet,
'tests/l3_test': ['tests/l3_test.cc'] + core + libnet,
'tests/ip_test': ['tests/ip_test.cc'] + core + libnet,
'tests/tcp_test': ['tests/tcp_test.cc'] + core + libnet,
'tests/timertest': ['tests/timertest.cc'] + core,
'tests/futures_test': ['tests/futures_test.cc'] + core,
'tests/alloc_test': ['tests/alloc_test.cc'] + core,
'tests/foreign_ptr_test': ['tests/foreign_ptr_test.cc'] + core,
'tests/semaphore_test': ['tests/semaphore_test.cc'] + core,
'tests/expiring_fifo_test': ['tests/expiring_fifo_test.cc'] + core,
'tests/smp_test': ['tests/smp_test.cc'] + core,
'tests/thread_test': ['tests/thread_test.cc'] + core,
'tests/thread_context_switch': ['tests/thread_context_switch.cc'] + core,
'tests/udp_server': ['tests/udp_server.cc'] + core + libnet,
'tests/udp_client': ['tests/udp_client.cc'] + core + libnet,
'tests/tcp_sctp_server': ['tests/tcp_sctp_server.cc'] + core + libnet,
'tests/tcp_sctp_client': ['tests/tcp_sctp_client.cc'] + core + libnet,
'tests/tls_test': ['tests/tls_test.cc'] + core + libnet,
'tests/fair_queue_test': ['tests/fair_queue_test.cc'] + core,
'apps/seawreck/seawreck': ['apps/seawreck/seawreck.cc', 'http/http_response_parser.rl'] + core + libnet,
'apps/io_tester/io_tester': ['apps/io_tester/io_tester.cc'] + core,
'apps/iotune/iotune': ['apps/iotune/iotune.cc'] + ['core/resource.cc', 'core/fsqual.cc', 'core/linux-aio.cc'],
'tests/blkdiscard_test': ['tests/blkdiscard_test.cc'] + core,
'tests/sstring_test': ['tests/sstring_test.cc'] + core,
'tests/unwind_test': ['tests/unwind_test.cc'] + core,
'tests/defer_test': ['tests/defer_test.cc'] + core,
'tests/httpd': ['tests/httpd.cc'] + http + core,
'tests/allocator_test': ['tests/allocator_test.cc'] + core,
'tests/output_stream_test': ['tests/output_stream_test.cc'] + core + libnet,
'tests/udp_zero_copy': ['tests/udp_zero_copy.cc'] + core + libnet,
'tests/shared_ptr_test': ['tests/shared_ptr_test.cc'] + core,
'tests/weak_ptr_test': ['tests/weak_ptr_test.cc'] + core,
'tests/checked_ptr_test': ['tests/checked_ptr_test.cc'] + core,
'tests/slab_test': ['tests/slab_test.cc'] + core,
'tests/fstream_test': ['tests/fstream_test.cc'] + core,
'tests/distributed_test': ['tests/distributed_test.cc'] + core,
'tests/rpc': ['tests/rpc.cc'] + core + libnet,
'tests/rpc_test': ['tests/rpc_test.cc'] + core + libnet,
'tests/packet_test': ['tests/packet_test.cc'] + core + libnet,
'tests/connect_test': ['tests/connect_test.cc'] + core + libnet,
'tests/chunked_fifo_test': ['tests/chunked_fifo_test.cc'] + core,
'tests/circular_buffer_test': ['tests/circular_buffer_test.cc'] + core,
'tests/perf/perf_fstream': ['tests/perf/perf_fstream.cc'] + core,
'tests/json_formatter_test': ['tests/json_formatter_test.cc'] + core + http,
'tests/dns_test': ['tests/dns_test.cc'] + core + libnet,
'tests/execution_stage_test': ['tests/execution_stage_test.cc'] + core,
'tests/lowres_clock_test': ['tests/lowres_clock_test.cc'] + core,
'tests/program_options_test': ['tests/program_options_test.cc'] + core,
'tests/tuple_utils_test': ['tests/tuple_utils_test.cc'],
'tests/tls_echo_server': ['tests/tls_echo_server.cc'] + core + libnet,
'tests/tls_simple_client': ['tests/tls_simple_client.cc'] + core + libnet,
'tests/circular_buffer_fixed_capacity_test': ['tests/circular_buffer_fixed_capacity_test.cc'],
'tests/scheduling_group_demo': ['tests/scheduling_group_demo.cc'] + core,
'tests/noncopyable_function_test': ['tests/noncopyable_function_test.cc'],
'tests/netconfig_test': ['tests/netconfig_test.cc'] + core + libnet,
'tests/abort_source_test': ['tests/abort_source_test.cc'] + core,
}
boost_tests = [
'tests/memcached/test_ascii_parser',
'tests/fileiotest',
'tests/futures_test',
'tests/alloc_test',
'tests/foreign_ptr_test',
'tests/semaphore_test',
'tests/expiring_fifo_test',
'tests/thread_test',
'tests/tls_test',
'tests/fair_queue_test',
'tests/httpd',
'tests/output_stream_test',
'tests/fstream_test',
'tests/rpc_test',
'tests/connect_test',
'tests/json_formatter_test',
'tests/dns_test',
'tests/execution_stage_test',
'tests/lowres_clock_test',
'tests/abort_source_test',
]
for bt in boost_tests:
deps[bt] += boost_test_lib
for pt in perf_tests:
deps[pt] = [pt + '.cc'] + core + ['tests/perf/perf_tests.cc']
warnings = [
'-Wno-mismatched-tags', # clang-only
'-Wno-pessimizing-move', # clang-only: moving a temporary object prevents copy elision
'-Wno-redundant-move', # clang-only: redundant move in return statement
'-Wno-inconsistent-missing-override', # clang-only: 'x' overrides a member function but is not marked 'override'
'-Wno-unused-private-field', # clang-only: private field 'x' is not used
'-Wno-unknown-attributes', # clang-only: unknown attribute 'x' ignored (x in this case is gnu::externally_visible)
'-Wno-unneeded-internal-declaration', # clang-only: 'x' function 'x' declared in header file shouldb e declared 'x'
'-Wno-undefined-inline', # clang-only: inline function 'x' is not defined
'-Wno-overloaded-virtual', # clang-only: 'x' hides overloaded virtual functions
'-Wno-maybe-uninitialized',
'-Wno-sign-compare',
'-Wno-error=cpp', # gcc: allow preprecessor warnings
'-Wno-error=#warning', # clang: allow preprecessor warnings
]
# The "--with-osv=<path>" parameter is a shortcut for a bunch of other
# settings:
if args.with_osv:
args.so = True
args.hwloc = False
args.user_cflags = (args.user_cflags +
' -DDEFAULT_ALLOCATOR -fvisibility=default -DHAVE_OSV -I' +
args.with_osv + ' -I' + args.with_osv + '/include -I' +
args.with_osv + '/arch/x64')
if args.allocator_page_size:
args.user_cflags += ' -DSEASTAR_OVERRIDE_ALLOCATOR_PAGE_SIZE=' + str(args.allocator_page_size)
dpdk_arch_xlat = {
'native': 'native',
'nehalem': 'nhm',
'westmere': 'wsm',
'sandybridge': 'snb',
'ivybridge': 'ivb',
}
dpdk_machine = 'native'
if args.dpdk:
if not os.path.exists('dpdk') or not os.listdir('dpdk'):
raise Exception('--enable-dpdk: dpdk/ is empty. Run "git submodule update --init".')
cflags = args.user_cflags.split()
dpdk_machine = ([dpdk_arch_xlat[cflag[7:]]
for cflag in cflags
if cflag.startswith('-march')] or ['native'])[0]
subprocess.check_call('make -C dpdk RTE_OUTPUT=$PWD/build/dpdk/ config T=x86_64-native-linuxapp-gcc'.format(
dpdk_machine=dpdk_machine),
shell = True)
# adjust configutation to taste
dotconfig = 'build/dpdk/.config'
lines = open(dotconfig, encoding='UTF-8').readlines()
def update(lines, vars):
ret = []
for line in lines:
for var, val in vars.items():
if line.startswith(var + '='):
line = var + '=' + val + '\n'
ret.append(line)
return ret
lines = update(lines, {'CONFIG_RTE_LIBRTE_PMD_BOND': 'n',
'CONFIG_RTE_MBUF_SCATTER_GATHER': 'n',
'CONFIG_RTE_LIBRTE_IP_FRAG': 'n',
'CONFIG_RTE_APP_TEST': 'n',
'CONFIG_RTE_TEST_PMD': 'n',
'CONFIG_RTE_MBUF_REFCNT_ATOMIC': 'n',
'CONFIG_RTE_MAX_MEMSEG': '8192',
'CONFIG_RTE_EAL_IGB_UIO': 'n',
'CONFIG_RTE_LIBRTE_KNI': 'n',
'CONFIG_RTE_KNI_KMOD': 'n',
'CONFIG_RTE_LIBRTE_JOBSTATS': 'n',
'CONFIG_RTE_LIBRTE_LPM': 'n',
'CONFIG_RTE_LIBRTE_ACL': 'n',
'CONFIG_RTE_LIBRTE_POWER': 'n',
'CONFIG_RTE_LIBRTE_IP_FRAG': 'n',
'CONFIG_RTE_LIBRTE_METER': 'n',
'CONFIG_RTE_LIBRTE_SCHED': 'n',
'CONFIG_RTE_LIBRTE_DISTRIBUTOR': 'n',
'CONFIG_RTE_LIBRTE_PMD_CRYPTO_SCHEDULER': 'n',
'CONFIG_RTE_LIBRTE_REORDER': 'n',
'CONFIG_RTE_LIBRTE_PORT': 'n',
'CONFIG_RTE_LIBRTE_TABLE': 'n',
'CONFIG_RTE_LIBRTE_PIPELINE': 'n',
})
lines += 'CONFIG_RTE_MACHINE={}'.format(dpdk_machine)
open(dotconfig, 'w', encoding='UTF-8').writelines(lines)
args.dpdk_target = os.getcwd() + '/build/dpdk'
if args.dpdk_target:
args.user_cflags = (args.user_cflags +
' -DHAVE_DPDK -I' + args.dpdk_target + '/include ' +
dpdk_cflags(args.dpdk_target) +
' -Wno-error=literal-suffix -Wno-literal-suffix -Wno-invalid-offsetof')
libs += (' -L' + args.dpdk_target + '/lib ')
if args.with_osv:
libs += '-lintel_dpdk -lrt -lm -ldl'
else:
libs += '-Wl,--whole-archive -lrte_pmd_vmxnet3_uio -lrte_pmd_i40e -lrte_pmd_ixgbe -lrte_pmd_e1000 -lrte_pmd_ring -lrte_pmd_bnxt -lrte_pmd_cxgbe -lrte_pmd_ena -lrte_pmd_enic -lrte_pmd_fm10k -lrte_pmd_nfp -lrte_pmd_qede -lrte_pmd_sfc_efx -lrte_hash -lrte_kvargs -lrte_mbuf -lrte_ethdev -lrte_eal -lrte_mempool -lrte_mempool_ring -lrte_ring -lrte_cmdline -lrte_cfgfile -Wl,--no-whole-archive -lrt -lm -ldl'
args.user_cflags += ' -I{srcdir}/fmt'.format(**globals())
if not args.staticboost:
args.user_cflags += ' -DBOOST_TEST_DYN_LINK'
warnings = [w
for w in warnings
if warning_supported(warning = w, compiler = args.cxx, flags=args.user_cflags.split())]
warnings = ' '.join(warnings)
dbgflag = debug_flag(args.cxx, flags=args.user_cflags.split()) if args.debuginfo else ''
tests_link_rule = 'link' if args.tests_debuginfo else 'link_stripped'
sanitize_flags = sanitize_vptr_flag(args.cxx, flags=args.user_cflags.split())
visibility_flags = adjust_visibility_flags(args.cxx, flags=args.user_cflags.split())
if not try_compile(args.cxx, source='#include <gnutls/gnutls.h>', flags=args.user_cflags.split()):
print('Seastar requires gnutls. Install gnutls-devel/libgnutls-dev')
sys.exit(1)
if not try_compile(args.cxx, source='#include <gnutls/gnutls.h>\nint x = GNUTLS_NONBLOCK;', flags=args.user_cflags.split()):
print('Seastar requires gnutls >= 2.8. Install libgnutls28-dev or later.')
sys.exit(1)
if not try_compile(args.cxx, source='#include <experimental/string_view>', flags=['-std=gnu++1y'] + args.user_cflags.split()):
print('Seastar requires g++ >= 4.9. Install g++-4.9 or later (use --compiler option).')
sys.exit(1)
if not try_compile(args.cxx, '''#include <boost/version.hpp>\n\
#if BOOST_VERSION < 105800\n\
#error "Invalid boost version"\n\
#endif''', flags=args.user_cflags.split()):
print("Seastar requires boost >= 1.58")
sys.exit(1)
modes['debug']['sanitize'] += ' ' + sanitize_flags
def have_hwloc():
return try_compile(compiler = args.cxx, source = '#include <hwloc.h>\n#include <numa.h>', flags=args.user_cflags.split())
if apply_tristate(args.hwloc, test = have_hwloc,
note = 'Note: hwloc-devel/numactl-devel not installed. No NUMA support.',
missing = 'Error: required packages hwloc-devel/numactl-devel not installed.'):
libs += ' ' + hwloc_libs
defines.append('HAVE_HWLOC')
defines.append('HAVE_NUMA')
if detect_membarrier(compiler=args.cxx, flags=args.user_cflags.split()):
defines.append('SEASTAR_HAS_MEMBARRIER')
if try_compile(args.cxx, source = textwrap.dedent('''\
#include <lz4.h>
void m() {
LZ4_compress_default(static_cast<const char*>(0), static_cast<char*>(0), 0, 0);
}
'''), flags=args.user_cflags.split()):
defines.append("HAVE_LZ4_COMPRESS_DEFAULT")
if try_compile_and_link(args.cxx, flags=['-fsanitize=address'] + args.user_cflags.split(), source = textwrap.dedent('''\
#include <cstddef>
extern "C" {
void __sanitizer_start_switch_fiber(void**, const void*, size_t);
void __sanitizer_finish_switch_fiber(void*, const void**, size_t*);
}
int main() {
__sanitizer_start_switch_fiber(nullptr, nullptr, 0);
__sanitizer_finish_switch_fiber(nullptr, nullptr, nullptr);
}
''')):
defines.append("HAVE_ASAN_FIBER_SUPPORT")
if args.so:
args.pie = '-shared'
args.fpie = '-fpic'
elif args.pie:
args.pie = '-pie'
args.fpie = '-fpie'
else:
args.pie = ''
args.fpie = ''
defines = ' '.join(['-D' + d for d in defines])
globals().update(vars(args))
total_memory = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
link_pool_depth = max(int(total_memory / 7e9), 1)
build_modes = modes if args.mode == 'all' else [args.mode]
build_artifacts = all_artifacts if not args.artifacts else args.artifacts
protoc = args.protoc
dpdk_sources = []
if args.dpdk:
for root, dirs, files in os.walk('dpdk'):
dpdk_sources += [os.path.join(root, file)
for file in files
if file.endswith('.h') or file.endswith('.c')]
dpdk_sources = ' '.join(dpdk_sources)
# both source and builddir location
cares_dir = 'c-ares'
cares_lib = 'cares-seastar'
cares_src_lib = cares_dir + '/lib/libcares.a'
if not os.path.exists(cares_dir) or not os.listdir(cares_dir):
raise Exception(cares_dir + ' is empty. Run "git submodule update --init".')
cares_sources = []
for root, dirs, files in os.walk('c-ares'):
cares_sources += [os.path.join(root, file)
for file in files
if file.endswith('.h') or file.endswith('.c')]
cares_sources = ' '.join(cares_sources)
libs += ' -l' + cares_lib
# "libs" contains mostly pre-existing libraries, but if we want to add to
# it a library which we built here, we need to ensure that this library
# gets built before actually using "libs". So let's make a list "built_libs"
# of libraries which are targets built here. These libraries are all relative
# to the current mode's build directory.
built_libs = []
built_libs += ['lib' + cares_lib + '.a']
outdir = 'build'
buildfile = 'build.ninja'
os.makedirs(outdir, exist_ok = True)
do_sanitize = True
if args.static:
do_sanitize = False
with open(buildfile, 'w') as f:
dpdk_deps = ''
if args.dpdk:
# fake dependencies on dpdk, so that it is built before anything else
dpdk_deps = ' {dpdk_target}/include/rte_eal.h {dpdk_target}/lib/librte_eal.a'.format(dpdk_target=args.dpdk_target)
f.write(textwrap.dedent('''\
configure_args = {configure_args}
builddir = {outdir}
full_builddir = {srcdir}/$builddir
cxx = {cxx}
# we disable _FORTIFY_SOURCE because it generates false positives with longjmp() (core/thread.cc)
cxxflags = -std={cpp_dialect} {dbgflag} {fpie} -Wall -Werror -Wno-error=deprecated-declarations -fvisibility=hidden {visibility_flags} -pthread -I{srcdir} -U_FORTIFY_SOURCE {user_cflags} {warnings} {defines}
ldflags = {dbgflag} -Wl,--no-as-needed {static} {pie} -fvisibility=hidden {visibility_flags} -pthread {user_ldflags}
libs = {libs}
pool link_pool
depth = {link_pool_depth}
rule ragel
# sed away a bug in ragel 7 that emits some extraneous _nfa* variables
# (the $$ is collapsed to a single one by ninja)
command = ragel -G2 -o $out $in && {sed} -i -e '1h;2,$$H;$$!d;g' -re 's/static const char _nfa[^;]*;//g' $out
description = RAGEL $out
rule gen
command = /bin/echo -e $text > $out
description = GEN $out
rule swagger
command = json/json2code.py -f $in -o $out
description = SWAGGER $out
rule protobuf
command = {protoc} --cpp_out=$outdir $in
description = PROTOC $out
rule copy_file
command = cp $in $out
''').format(**globals()))
if args.dpdk:
f.write(textwrap.dedent('''\
rule dpdkmake
command = make -C build/dpdk CC={args.cc}
build {dpdk_deps} : dpdkmake {dpdk_sources}
''').format(**globals()))
for mode in build_modes:
objdeps = {}
modeval = modes[mode]
if modeval['sanitize'] and not do_sanitize:
print('Note: --static disables debug mode sanitizers')
modeval['sanitize'] = ''
modeval['sanitize_libs'] = ''
elif modeval['sanitize']:
modeval['sanitize'] += ' -DASAN_ENABLED'
f.write(textwrap.dedent('''\
cxxflags_{mode} = {sanitize} {opt} -I$full_builddir/{mode}/gen -I$full_builddir/{mode}/c-ares
libs_{mode} = {sanitize_libs} {libs}
rule cxx.{mode}
command = $cxx -MD -MT $out -MF $out.d $cxxflags_{mode} $cxxflags -c -o $out $in
description = CXX $out
depfile = $out.d
rule link.{mode}
command = $cxx $cxxflags_{mode} -L$builddir/{mode} $ldflags -o $out $in $libs $libs_{mode} $extralibs
description = LINK $out
pool = link_pool
rule link_stripped.{mode}
command = $cxx $cxxflags_{mode} -s -L$builddir/{mode} $ldflags -o $out $in $libs $libs_{mode} $extralibs
description = LINK (stripped) $out
pool = link_pool
rule ar.{mode}
command = rm -f $out; ar cr $out $in; ranlib $out
description = AR $out
''').format(mode = mode, **modeval))
f.write('build {mode}: phony $builddir/{mode}/lib{cares_lib}.a {artifacts}\n'.format(mode = mode, cares_lib=cares_lib,
artifacts = str.join(' ', ('$builddir/' + mode + '/' + x for x in build_artifacts))))
f.write(textwrap.dedent('''\
rule caresmake_{mode}
command = make -C build/{mode}/{cares_dir} CC={args.cc}
rule carescmake_{mode}
command = mkdir -p $builddir/{mode}/{cares_dir} && cd $builddir/{mode}/{cares_dir} && CC={args.cc} cmake {cares_opts} {srcdir}/$in
build $builddir/{mode}/{cares_dir}/Makefile : carescmake_{mode} {cares_dir}
build $builddir/{mode}/{cares_dir}/ares_build.h : phony $builddir/{mode}/{cares_dir}/Makefile
build $builddir/{mode}/{cares_src_lib} : caresmake_{mode} $builddir/{mode}/{cares_dir}/Makefile | {cares_sources}
build $builddir/{mode}/lib{cares_lib}.a : copy_file $builddir/{mode}/{cares_src_lib}
''').format(cares_opts=(modeval['cares_opts']), **globals()))
objdeps['$builddir/' + mode + '/net/dns.o'] = ' $builddir/' + mode + '/' + cares_dir + '/ares_build.h'
compiles = {}
ragels = {}
swaggers = {}
protobufs = {}
for binary in build_artifacts:
srcs = deps[binary]
objs = ['$builddir/' + mode + '/' + src.replace('.cc', '.o')
for src in srcs
if src.endswith('.cc')]
objs += ['$builddir/' + mode + '/gen/' + src.replace('.proto', '.pb.o')
for src in srcs
if src.endswith('.proto')]
if binary.endswith('.pc'):
vars = modeval.copy()
vars.update(globals())
pc = textwrap.dedent('''\
Name: Seastar
URL: http://seastar-project.org/
Description: Advanced C++ framework for high-performance server applications on modern hardware.
Version: 1.0
Libs: -L$full_builddir/{mode} -Wl,--whole-archive,-lseastar,--no-whole-archive $cxxflags $cxflags_{mode} -Wl,--no-as-needed {static} {pie} {user_ldflags} {sanitize_libs} {libs}
Cflags: $cxxflags $cxxflags_{mode}
''').format(**vars)
f.write('build $builddir/{}/{}: gen\n text = {}\n'.format(mode, binary, repr(pc)))
elif binary.endswith('.a'):
f.write('build $builddir/{}/{}: ar.{} {}\n'.format(mode, binary, mode, str.join(' ', objs)))
else:
libdeps = str.join(' ', ('$builddir/{}/{}'.format(mode, i) for i in built_libs))
test_extralibs = ['-lyaml-cpp']
if binary.startswith('tests/'):
if binary in boost_tests:
test_extralibs += [maybe_static(args.staticboost, '-lboost_unit_test_framework')]
# Our code's debugging information is huge, and multiplied
# by many tests yields ridiculous amounts of disk space.
# So we strip the tests by default; The user can very
# quickly re-link the test unstripped by adding a "_g"
# to the test name, e.g., "ninja build/release/testname_g"
f.write('build $builddir/{}/{}: {}.{} {} | {} {}\n'.format(mode, binary, tests_link_rule, mode, str.join(' ', objs), dpdk_deps, libdeps))
f.write(' extralibs = {}\n'.format(' '.join(test_extralibs)))
f.write('build $builddir/{}/{}_g: link.{} {} | {} {}\n'.format(mode, binary, mode, str.join(' ', objs), dpdk_deps, libdeps))
f.write(' extralibs = {}\n'.format(' '.join(test_extralibs)))
else:
f.write('build $builddir/{}/{}: link.{} {} | {} {} $builddir/{}/lib{}.a\n'.format(mode, binary, mode, str.join(' ', objs), dpdk_deps, libdeps, mode, cares_lib))
if binary in extralibs.keys():
app_extralibs = extralibs[binary]
f.write(' extralibs = {}\n'.format(' '.join(app_extralibs)))
for src in srcs:
if src.endswith('.cc'):
obj = '$builddir/' + mode + '/' + src.replace('.cc', '.o')
compiles[obj] = src
elif src.endswith('.proto'):
hh = '$builddir/' + mode + '/gen/' + src.replace('.proto', '.pb.h')
protobufs[hh] = src
compiles[hh.replace('.h', '.o')] = hh.replace('.h', '.cc')
elif src.endswith('.rl'):
hh = '$builddir/' + mode + '/gen/' + src.replace('.rl', '.hh')
ragels[hh] = src
elif src.endswith('.json'):
hh = '$builddir/' + mode + '/gen/' + src + '.hh'
swaggers[hh] = src
else:
raise Exception('No rule for ' + src)
for obj in compiles:
src = compiles[obj]
gen_headers = list(ragels.keys()) + list(swaggers.keys()) + list(protobufs.keys())
f.write('build {}: cxx.{} {} || {} \n'.format(obj, mode, src, ' '.join(gen_headers) + dpdk_deps + objdeps.get(obj, '')))
for hh in ragels:
src = ragels[hh]
f.write('build {}: ragel {}\n'.format(hh, src))
for hh in swaggers:
src = swaggers[hh]
f.write('build {}: swagger {} | json/json2code.py\n'.format(hh,src))
for pb in protobufs:
src = protobufs[pb]
c_pb = pb.replace('.h','.cc')
outd = os.path.dirname(os.path.dirname(pb))
f.write('build {} {}: protobuf {}\n outdir = {}\n'.format(c_pb, pb, src, outd))
f.write(textwrap.dedent('''\
rule configure
command = python3 configure.py $configure_args
generator = 1
build build.ninja: configure | configure.py
rule cscope
command = find -name '*.[chS]' -o -name "*.cc" -o -name "*.hh" | cscope -bq -i-
description = CSCOPE
build cscope: cscope
rule md2html
command = pandoc --self-contained --toc -c doc/template.css -V documentclass=report --chapters --number-sections -f markdown_github+pandoc_title_block --highlight-style tango $in -o $out
description = PANDOC $out
rule md2pdf
command = pandoc -f markdown_github+pandoc_title_block --highlight-style tango --template=doc/template.tex $in -o $out
description = PANDOC $out
build doc/tutorial.html: md2html doc/tutorial.md
build doc/tutorial.pdf: md2pdf doc/tutorial.md
default {modes_list}
''').format(modes_list = ' '.join(build_modes), **globals()))
| apache-2.0 | -1,926,595,404,115,856,600 | 43.406504 | 411 | 0.578749 | false |
Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_api_v3/models/work_summary_v30_rc1.py | 1 | 13920 | # coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.created_date_v30_rc1 import CreatedDateV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.external_i_ds_v30_rc1 import ExternalIDsV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v30_rc1 import LastModifiedDateV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.publication_date_v30_rc1 import PublicationDateV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.source_v30_rc1 import SourceV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.title_v30_rc1 import TitleV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.url_v30_rc1 import UrlV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.work_title_v30_rc1 import WorkTitleV30Rc1 # noqa: F401,E501
class WorkSummaryV30Rc1(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'put_code': 'int',
'created_date': 'CreatedDateV30Rc1',
'last_modified_date': 'LastModifiedDateV30Rc1',
'source': 'SourceV30Rc1',
'title': 'WorkTitleV30Rc1',
'external_ids': 'ExternalIDsV30Rc1',
'url': 'UrlV30Rc1',
'type': 'str',
'publication_date': 'PublicationDateV30Rc1',
'journal_title': 'TitleV30Rc1',
'visibility': 'str',
'path': 'str',
'display_index': 'str'
}
attribute_map = {
'put_code': 'put-code',
'created_date': 'created-date',
'last_modified_date': 'last-modified-date',
'source': 'source',
'title': 'title',
'external_ids': 'external-ids',
'url': 'url',
'type': 'type',
'publication_date': 'publication-date',
'journal_title': 'journal-title',
'visibility': 'visibility',
'path': 'path',
'display_index': 'display-index'
}
def __init__(self, put_code=None, created_date=None, last_modified_date=None, source=None, title=None, external_ids=None, url=None, type=None, publication_date=None, journal_title=None, visibility=None, path=None, display_index=None): # noqa: E501
"""WorkSummaryV30Rc1 - a model defined in Swagger""" # noqa: E501
self._put_code = None
self._created_date = None
self._last_modified_date = None
self._source = None
self._title = None
self._external_ids = None
self._url = None
self._type = None
self._publication_date = None
self._journal_title = None
self._visibility = None
self._path = None
self._display_index = None
self.discriminator = None
if put_code is not None:
self.put_code = put_code
if created_date is not None:
self.created_date = created_date
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if source is not None:
self.source = source
if title is not None:
self.title = title
if external_ids is not None:
self.external_ids = external_ids
if url is not None:
self.url = url
if type is not None:
self.type = type
if publication_date is not None:
self.publication_date = publication_date
if journal_title is not None:
self.journal_title = journal_title
if visibility is not None:
self.visibility = visibility
if path is not None:
self.path = path
if display_index is not None:
self.display_index = display_index
@property
def put_code(self):
"""Gets the put_code of this WorkSummaryV30Rc1. # noqa: E501
:return: The put_code of this WorkSummaryV30Rc1. # noqa: E501
:rtype: int
"""
return self._put_code
@put_code.setter
def put_code(self, put_code):
"""Sets the put_code of this WorkSummaryV30Rc1.
:param put_code: The put_code of this WorkSummaryV30Rc1. # noqa: E501
:type: int
"""
self._put_code = put_code
@property
def created_date(self):
"""Gets the created_date of this WorkSummaryV30Rc1. # noqa: E501
:return: The created_date of this WorkSummaryV30Rc1. # noqa: E501
:rtype: CreatedDateV30Rc1
"""
return self._created_date
@created_date.setter
def created_date(self, created_date):
"""Sets the created_date of this WorkSummaryV30Rc1.
:param created_date: The created_date of this WorkSummaryV30Rc1. # noqa: E501
:type: CreatedDateV30Rc1
"""
self._created_date = created_date
@property
def last_modified_date(self):
"""Gets the last_modified_date of this WorkSummaryV30Rc1. # noqa: E501
:return: The last_modified_date of this WorkSummaryV30Rc1. # noqa: E501
:rtype: LastModifiedDateV30Rc1
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this WorkSummaryV30Rc1.
:param last_modified_date: The last_modified_date of this WorkSummaryV30Rc1. # noqa: E501
:type: LastModifiedDateV30Rc1
"""
self._last_modified_date = last_modified_date
@property
def source(self):
"""Gets the source of this WorkSummaryV30Rc1. # noqa: E501
:return: The source of this WorkSummaryV30Rc1. # noqa: E501
:rtype: SourceV30Rc1
"""
return self._source
@source.setter
def source(self, source):
"""Sets the source of this WorkSummaryV30Rc1.
:param source: The source of this WorkSummaryV30Rc1. # noqa: E501
:type: SourceV30Rc1
"""
self._source = source
@property
def title(self):
"""Gets the title of this WorkSummaryV30Rc1. # noqa: E501
:return: The title of this WorkSummaryV30Rc1. # noqa: E501
:rtype: WorkTitleV30Rc1
"""
return self._title
@title.setter
def title(self, title):
"""Sets the title of this WorkSummaryV30Rc1.
:param title: The title of this WorkSummaryV30Rc1. # noqa: E501
:type: WorkTitleV30Rc1
"""
self._title = title
@property
def external_ids(self):
"""Gets the external_ids of this WorkSummaryV30Rc1. # noqa: E501
:return: The external_ids of this WorkSummaryV30Rc1. # noqa: E501
:rtype: ExternalIDsV30Rc1
"""
return self._external_ids
@external_ids.setter
def external_ids(self, external_ids):
"""Sets the external_ids of this WorkSummaryV30Rc1.
:param external_ids: The external_ids of this WorkSummaryV30Rc1. # noqa: E501
:type: ExternalIDsV30Rc1
"""
self._external_ids = external_ids
@property
def url(self):
"""Gets the url of this WorkSummaryV30Rc1. # noqa: E501
:return: The url of this WorkSummaryV30Rc1. # noqa: E501
:rtype: UrlV30Rc1
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this WorkSummaryV30Rc1.
:param url: The url of this WorkSummaryV30Rc1. # noqa: E501
:type: UrlV30Rc1
"""
self._url = url
@property
def type(self):
"""Gets the type of this WorkSummaryV30Rc1. # noqa: E501
:return: The type of this WorkSummaryV30Rc1. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this WorkSummaryV30Rc1.
:param type: The type of this WorkSummaryV30Rc1. # noqa: E501
:type: str
"""
allowed_values = ["ARTISTIC_PERFORMANCE", "BOOK_CHAPTER", "BOOK_REVIEW", "BOOK", "CONFERENCE_ABSTRACT", "CONFERENCE_PAPER", "CONFERENCE_POSTER", "DATA_SET", "DICTIONARY_ENTRY", "DISCLOSURE", "DISSERTATION", "EDITED_BOOK", "ENCYCLOPEDIA_ENTRY", "INVENTION", "JOURNAL_ARTICLE", "JOURNAL_ISSUE", "LECTURE_SPEECH", "LICENSE", "MAGAZINE_ARTICLE", "MANUAL", "NEWSLETTER_ARTICLE", "NEWSPAPER_ARTICLE", "ONLINE_RESOURCE", "OTHER", "PATENT", "PREPRINT", "REGISTERED_COPYRIGHT", "REPORT", "RESEARCH_TECHNIQUE", "RESEARCH_TOOL", "SOFTWARE", "SPIN_OFF_COMPANY", "STANDARDS_AND_POLICY", "SUPERVISED_STUDENT_PUBLICATION", "TECHNICAL_STANDARD", "TEST", "TRADEMARK", "TRANSLATION", "WEBSITE", "WORKING_PAPER", "UNDEFINED"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
)
self._type = type
@property
def publication_date(self):
"""Gets the publication_date of this WorkSummaryV30Rc1. # noqa: E501
:return: The publication_date of this WorkSummaryV30Rc1. # noqa: E501
:rtype: PublicationDateV30Rc1
"""
return self._publication_date
@publication_date.setter
def publication_date(self, publication_date):
"""Sets the publication_date of this WorkSummaryV30Rc1.
:param publication_date: The publication_date of this WorkSummaryV30Rc1. # noqa: E501
:type: PublicationDateV30Rc1
"""
self._publication_date = publication_date
@property
def journal_title(self):
"""Gets the journal_title of this WorkSummaryV30Rc1. # noqa: E501
:return: The journal_title of this WorkSummaryV30Rc1. # noqa: E501
:rtype: TitleV30Rc1
"""
return self._journal_title
@journal_title.setter
def journal_title(self, journal_title):
"""Sets the journal_title of this WorkSummaryV30Rc1.
:param journal_title: The journal_title of this WorkSummaryV30Rc1. # noqa: E501
:type: TitleV30Rc1
"""
self._journal_title = journal_title
@property
def visibility(self):
"""Gets the visibility of this WorkSummaryV30Rc1. # noqa: E501
:return: The visibility of this WorkSummaryV30Rc1. # noqa: E501
:rtype: str
"""
return self._visibility
@visibility.setter
def visibility(self, visibility):
"""Sets the visibility of this WorkSummaryV30Rc1.
:param visibility: The visibility of this WorkSummaryV30Rc1. # noqa: E501
:type: str
"""
allowed_values = ["LIMITED", "REGISTERED_ONLY", "PUBLIC", "PRIVATE"] # noqa: E501
if visibility not in allowed_values:
raise ValueError(
"Invalid value for `visibility` ({0}), must be one of {1}" # noqa: E501
.format(visibility, allowed_values)
)
self._visibility = visibility
@property
def path(self):
"""Gets the path of this WorkSummaryV30Rc1. # noqa: E501
:return: The path of this WorkSummaryV30Rc1. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this WorkSummaryV30Rc1.
:param path: The path of this WorkSummaryV30Rc1. # noqa: E501
:type: str
"""
self._path = path
@property
def display_index(self):
"""Gets the display_index of this WorkSummaryV30Rc1. # noqa: E501
:return: The display_index of this WorkSummaryV30Rc1. # noqa: E501
:rtype: str
"""
return self._display_index
@display_index.setter
def display_index(self, display_index):
"""Sets the display_index of this WorkSummaryV30Rc1.
:param display_index: The display_index of this WorkSummaryV30Rc1. # noqa: E501
:type: str
"""
self._display_index = display_index
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(WorkSummaryV30Rc1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, WorkSummaryV30Rc1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| mit | 3,248,907,714,498,165,000 | 30.422122 | 728 | 0.599282 | false |
catapult-project/catapult | telemetry/telemetry/internal/image_processing/_bitmap.py | 3 | 8267 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Bitmap is a basic wrapper for image pixels. It includes some basic processing
tools: crop, find bounding box of a color and compute histogram of color values.
"""
from __future__ import absolute_import
import array
try:
from cStringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
import struct
import subprocess
import warnings
from telemetry.internal.util import binary_manager
from telemetry.core import platform
from telemetry.util import color_histogram
from telemetry.util import rgba_color
import png
class _BitmapTools(object):
"""Wraps a child process of bitmaptools and allows for one command."""
CROP_PIXELS = 0
HISTOGRAM = 1
BOUNDING_BOX = 2
def __init__(self, dimensions, pixels):
binary = binary_manager.FetchPath(
'bitmaptools',
platform.GetHostPlatform().GetOSName(),
platform.GetHostPlatform().GetArchName())
assert binary, 'You must build bitmaptools first!'
self._popen = subprocess.Popen([binary],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# dimensions are: bpp, width, height, boxleft, boxtop, boxwidth, boxheight
packed_dims = struct.pack('iiiiiii', *dimensions)
self._popen.stdin.write(packed_dims)
# If we got a list of ints, we need to convert it into a byte buffer.
if not isinstance(pixels, bytearray):
pixels = bytearray(pixels)
self._popen.stdin.write(pixels)
def _RunCommand(self, *command):
assert not self._popen.stdin.closed, (
'Exactly one command allowed per instance of tools.')
packed_command = struct.pack('i' * len(command), *command)
self._popen.stdin.write(packed_command)
self._popen.stdin.close()
length_packed = self._popen.stdout.read(struct.calcsize('i'))
if not length_packed:
raise Exception(self._popen.stderr.read())
length = struct.unpack('i', length_packed)[0]
return self._popen.stdout.read(length)
def CropPixels(self):
return self._RunCommand(_BitmapTools.CROP_PIXELS)
def Histogram(self, ignore_color, tolerance):
ignore_color_int = -1 if ignore_color is None else int(ignore_color)
response = self._RunCommand(_BitmapTools.HISTOGRAM,
ignore_color_int, tolerance)
out = array.array('i')
out.fromstring(response)
assert len(out) == 768, (
'The ColorHistogram has the wrong number of buckets: %s' % len(out))
return color_histogram.ColorHistogram(
out[:256], out[256:512], out[512:], ignore_color)
def BoundingBox(self, color, tolerance):
response = self._RunCommand(_BitmapTools.BOUNDING_BOX, int(color),
tolerance)
unpacked = struct.unpack('iiiii', response)
box, count = unpacked[:4], unpacked[-1]
if box[2] < 0 or box[3] < 0:
box = None
return box, count
class Bitmap(object):
"""Utilities for parsing and inspecting a bitmap."""
def __init__(self, bpp, width, height, pixels, metadata=None):
assert bpp in [3, 4], 'Invalid bytes per pixel'
assert width > 0, 'Invalid width'
assert height > 0, 'Invalid height'
assert pixels, 'Must specify pixels'
assert bpp * width * height == len(pixels), 'Dimensions and pixels mismatch'
self._bpp = bpp
self._width = width
self._height = height
self._pixels = pixels
self._metadata = metadata or {}
self._crop_box = None
@property
def bpp(self):
return self._bpp
@property
def width(self):
return self._crop_box[2] if self._crop_box else self._width
@property
def height(self):
return self._crop_box[3] if self._crop_box else self._height
def _PrepareTools(self):
"""Prepares an instance of _BitmapTools which allows exactly one command.
"""
crop_box = self._crop_box or (0, 0, self._width, self._height)
return _BitmapTools((self._bpp, self._width, self._height) + crop_box,
self._pixels)
@property
def pixels(self):
if self._crop_box:
self._pixels = self._PrepareTools().CropPixels()
# pylint: disable=unpacking-non-sequence
_, _, self._width, self._height = self._crop_box
self._crop_box = None
if not isinstance(self._pixels, bytearray):
self._pixels = bytearray(self._pixels)
return self._pixels
@property
def metadata(self):
self._metadata['size'] = (self.width, self.height)
self._metadata['alpha'] = self.bpp == 4
self._metadata['bitdepth'] = 8
return self._metadata
def GetPixelColor(self, x, y):
pixels = self.pixels
base = self._bpp * (y * self._width + x)
if self._bpp == 4:
return rgba_color.RgbaColor(pixels[base + 0], pixels[base + 1],
pixels[base + 2], pixels[base + 3])
return rgba_color.RgbaColor(pixels[base + 0], pixels[base + 1],
pixels[base + 2])
@staticmethod
def FromPng(png_data):
warnings.warn(
'Using pure python png decoder, which could be very slow. To speed up, '
'consider installing numpy & cv2 (OpenCV).')
width, height, pixels, meta = png.Reader(bytes=png_data).read_flat()
return Bitmap(4 if meta['alpha'] else 3, width, height, pixels, meta)
@staticmethod
def FromPngFile(path):
with open(path, "rb") as f:
return Bitmap.FromPng(f.read())
def WritePngFile(self, path):
with open(path, "wb") as f:
png.Writer(**self.metadata).write_array(f, self.pixels)
def IsEqual(self, other, tolerance=0):
# Dimensions must be equal
if self.width != other.width or self.height != other.height:
return False
# Loop over each pixel and test for equality
if tolerance or self.bpp != other.bpp:
for y in range(self.height):
for x in range(self.width):
c0 = self.GetPixelColor(x, y)
c1 = other.GetPixelColor(x, y)
if not c0.IsEqual(c1, tolerance):
return False
else:
return self.pixels == other.pixels
return True
def Diff(self, other):
# Output dimensions will be the maximum of the two input dimensions
out_width = max(self.width, other.width)
out_height = max(self.height, other.height)
diff = [[0 for x in range(out_width * 3)] for x in range(out_height)]
# Loop over each pixel and write out the difference
for y in range(out_height):
for x in range(out_width):
if x < self.width and y < self.height:
c0 = self.GetPixelColor(x, y)
else:
c0 = rgba_color.RgbaColor(0, 0, 0, 0)
if x < other.width and y < other.height:
c1 = other.GetPixelColor(x, y)
else:
c1 = rgba_color.RgbaColor(0, 0, 0, 0)
offset = x * 3
diff[y][offset] = abs(c0.r - c1.r)
diff[y][offset+1] = abs(c0.g - c1.g)
diff[y][offset+2] = abs(c0.b - c1.b)
# This particular method can only save to a file, so the result will be
# written into an in-memory buffer and read back into a Bitmap
warnings.warn(
'Using pure python png decoder, which could be very slow. To speed up, '
'consider installing numpy & cv2 (OpenCV).')
diff_img = png.from_array(diff, mode='RGB')
output = StringIO()
try:
diff_img.save(output)
diff = Bitmap.FromPng(output.getvalue())
finally:
output.close()
return diff
def GetBoundingBox(self, color, tolerance=0):
return self._PrepareTools().BoundingBox(color, tolerance)
def Crop(self, left, top, width, height):
cur_box = self._crop_box or (0, 0, self._width, self._height)
cur_left, cur_top, cur_width, cur_height = cur_box
if (left < 0 or top < 0 or
(left + width) > cur_width or
(top + height) > cur_height):
raise ValueError('Invalid dimensions')
self._crop_box = cur_left + left, cur_top + top, width, height
return self
def ColorHistogram(self, ignore_color=None, tolerance=0):
return self._PrepareTools().Histogram(ignore_color, tolerance)
| bsd-3-clause | 5,840,911,145,013,504,000 | 32.605691 | 80 | 0.637958 | false |
mdshw5/strandex | setup.py | 1 | 1338 | from setuptools import setup
install_requires = ['six']
def get_version(string):
""" Parse the version number variable __version__ from a script. """
import re
version_re = r"^__version__ = ['\"]([^'\"]*)['\"]"
version_str = re.search(version_re, string, re.M).group(1)
return version_str
setup(
name='strandex',
version=get_version(open('strandex/__init__.py').read()),
author='Matthew Shirley',
author_email='[email protected]',
url='https://github.com/mdshw5/strandex',
description='Strand-anchored regex for expansion or contraction of FASTQ files',
packages=['strandex'],
install_requires=install_requires,
entry_points = { 'console_scripts': [ 'strandex = strandex:main' ] },
license='MIT',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Bio-Informatics",
],
)
| mit | 6,688,157,721,223,414,000 | 37.228571 | 84 | 0.571749 | false |
project-schumann/vmf-converter | tests/vmf_converter_test.py | 1 | 39224 | import unittest
import json
from music21 import converter
from music21 import duration
from music21.chord import Chord
from music21.meter import TimeSignature
from music21.note import Note, Rest
from music21.key import KeySignature
from music21.tempo import MetronomeMark
from vmf_converter.core import vmf_converter_core
class vmfConverterTest(unittest.TestCase):
"""Test Class for vmf_converter_core module"""
def test_convert_score_to_vmf_001(self):
"""
Tests the conversion of a score stream to an vmf data structure.
"""
score = converter.parse('./fixtures/simple.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/simple.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_002(self):
"""
Tests the conversion of a score stream with ties to a vmf data structure.
"""
score = converter.parse('./fixtures/ties.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/ties.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_003(self):
"""
Tests the conversion of a score stream with triplets to a vmf data structure.
"""
score = converter.parse('./fixtures/triplets.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/triplets.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_004(self):
"""
Tests the conversion of a score stream with duplets to a vmf data structure.
"""
score = converter.parse('./fixtures/duplets.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/duplets.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_005(self):
"""
Tests the conversion of a score stream with quintuplets to a vmf data structure.
"""
score = converter.parse('./fixtures/quintuplets.xml')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/quintuplets.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_006(self):
"""
Tests the conversion of a score stream with a simple to simple meter change to a vmf data structure.
"""
score = converter.parse('./fixtures/SimpleToSimple.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/SimpleToSimple.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
print(actual)
assert expected == actual
def test_convert_score_to_vmf_007(self):
"""
Tests the conversion of a score stream with a compound to compound meter change to a vmf data structure.
"""
score = converter.parse('./fixtures/CompoundToCompound.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/CompoundToCompound.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
print(actual)
assert expected == actual
def test_convert_score_to_vmf_008(self):
"""
Tests the conversion of a score stream with a simple to compound meter change to a vmf data structure.
"""
score = converter.parse('./fixtures/SimpleToCompound.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/SimpleToCompound.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_009(self):
"""
Tests the conversion of a score stream with a compound to simple meter change to a vmf data structure.
"""
score = converter.parse('./fixtures/CompoundToSimple.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/CompoundToSimple.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_010(self):
"""
Tests the conversion of a score stream with chords to a vmf data structure.
"""
score = converter.parse('./fixtures/chords.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/chords.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_011(self):
"""
Tests the conversion of a score stream with multiple voices to a vmf data structure.
"""
score = converter.parse('./fixtures/voices.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/voices.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_012(self):
"""
Tests the conversion of a score stream with dynamics to a vmf data structure.
"""
score = converter.parse('./fixtures/dynamics.mid')
first_phrase = score.measures(0, 5)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/dynamics.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_013(self):
"""
Tests the conversion of a score stream with dynamics to a vmf data structure.
"""
score = converter.parse('./fixtures/dottedQuarter.mid')
first_phrase = score.measures(0, 2)
actual = vmf_converter_core.convert_score_to_vmf(first_phrase)
with open('./expected/dottedQuarter.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_014(self):
"""
Tests a key signature change.
"""
score = converter.parse('./fixtures/keyChange.mid')
actual = vmf_converter_core.convert_score_to_vmf(score)
with open('./expected/keyChange.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_015(self):
"""
Tests a tempo change.
"""
score = converter.parse('./fixtures/tempoChange.mid')
actual = vmf_converter_core.convert_score_to_vmf(score)
with open('./expected/tempoChange.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_016(self):
"""
Tests an explicit anacrusis.
"""
score = converter.parse('./fixtures/anacrusis2.xml')
actual = vmf_converter_core.convert_score_to_vmf(score)
with open('./expected/anacrusis.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_017(self):
"""
Tests the conversion of a score stream with chords and sustained notes..
"""
score = converter.parse('./fixtures/chordsAndSustain.xml')
actual = vmf_converter_core.convert_score_to_vmf(score)
with open('./expected/chordsAndSustain.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_convert_score_to_vmf_018(self):
"""
Tests the conversion of a score stream with syncopated chords..
"""
score = converter.parse('./fixtures/syncopated.xml')
actual = vmf_converter_core.convert_score_to_vmf(score)
with open('./expected/syncopated.vmf', 'r') as expected_file:
expected_json = expected_file.read()
expected = json.loads(expected_json)
assert expected == actual
def test_scan_score_durations_001(self):
"""
Tests the scanning function which pre-analyzes the score to determine the
smallest necessary note value to accurately encode the score as a vmf.
"""
score = converter.parse('./fixtures/aus_meines_herz.mid')
shortest_duration = vmf_converter_core.scan_score_durations(score)
assert shortest_duration == duration.convertTypeToQuarterLength('eighth')
def test_scan_score_for_largest_chord_001(self):
"""
Tests the scanning function which pre-analyzes the score to determine the
largest chord size.
"""
score = converter.parse('./fixtures/chords.mid')
largest_chord_size = vmf_converter_core.scan_score_for_largest_chord(score)
assert largest_chord_size == 3
def test_scan_score_for_number_of_voices_001(self):
"""
Tests the scanning function which pre-analyzes the score to determine the
number of voices in each part.
"""
score = converter.parse('./fixtures/voices.mid')
first_phrase = score.measures(0, 2)
number_of_parts = vmf_converter_core.scan_score_for_number_of_voices(first_phrase)
assert number_of_parts == 3
def test_convert_vmf_to_midi_001(self):
"""
Tests the conversion of a simple vmf file to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/simple.vmf')
expected_score = converter.parse('./fixtures/simple.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_convert_vmf_to_midi_002(self):
"""
Tests the conversion of a vmf file with ties to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/ties.vmf')
expected_score = converter.parse('./fixtures/ties.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_convert_vmf_to_midi_003(self):
"""
Tests the conversion of a vmf file with rhythmic dots to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/dottedQuarter.vmf')
expected_score = converter.parse('./fixtures/dottedQuarter.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_convert_vmf_to_midi_004(self):
"""
Tests the conversion of a vmf file with triplets to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/triplets.vmf')
expected_score = converter.parse('./fixtures/triplets.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_convert_vmf_to_midi_005(self):
"""
Tests the conversion of a vmf file with duplets to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/duplets.vmf')
expected_score = converter.parse('./fixtures/duplets.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_convert_vmf_to_midi_006(self):
"""
Tests the conversion of a vmf file with a simple to simple meter change to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/SimpleToSimple.vmf')
expected_score = converter.parse('./fixtures/SimpleToSimple.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
# Check that the time signatures are encoded.
expected_time_signatures = expected_score.flat.getElementsByClass(TimeSignature)
actual_time_signatures = actual_score.flat.getElementsByClass(TimeSignature)
# Ensure we have the right number of time signatures.
assert len(expected_time_signatures) == len(actual_time_signatures)
for expected, actual in zip(expected_time_signatures, actual_time_signatures):
assert expected.ratioString == actual.ratioString
assert expected.offset == actual.offset
def test_convert_vmf_to_midi_007(self):
"""
Tests the conversion of a vmf file with a compound to compound meter change to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/CompoundToCompound.vmf')
expected_score = converter.parse('./fixtures/CompoundToCompound.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
# Check that the time signatures are encoded.
expected_time_signatures = expected_score.flat.getElementsByClass(TimeSignature)
actual_time_signatures = actual_score.flat.getElementsByClass(TimeSignature)
# Ensure we have the right number of time signatures.
assert len(expected_time_signatures) == len(actual_time_signatures)
for expected, actual in zip(expected_time_signatures, actual_time_signatures):
assert expected.ratioString == actual.ratioString
assert expected.offset == actual.offset
def test_convert_vmf_to_midi_008(self):
"""
Tests the conversion of a vmf file with a simple to compound meter change to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/SimpleToCompound.vmf')
expected_score = converter.parse('./fixtures/SimpleToCompound.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
# Check that the time signatures are encoded.
expected_time_signatures = expected_score.flat.getElementsByClass(TimeSignature)
actual_time_signatures = actual_score.flat.getElementsByClass(TimeSignature)
# Ensure we have the right number of time signatures.
assert len(expected_time_signatures) == len(actual_time_signatures)
for expected, actual in zip(expected_time_signatures, actual_time_signatures):
assert expected.ratioString == actual.ratioString
assert expected.offset == actual.offset
def test_convert_vmf_to_midi_009(self):
"""
Tests the conversion of a vmf file with a compound to simple meter change to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/CompoundToSimple.vmf')
expected_score = converter.parse('./fixtures/CompoundToSimple.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
# Check that the time signatures are encoded.
expected_time_signatures = expected_score.flat.getElementsByClass(TimeSignature)
actual_time_signatures = actual_score.flat.getElementsByClass(TimeSignature)
# Ensure we have the right number of time signatures.
assert len(expected_time_signatures) == len(actual_time_signatures)
for expected, actual in zip(expected_time_signatures, actual_time_signatures):
assert expected.ratioString == actual.ratioString
assert expected.offset == actual.offset
def test_convert_vmf_to_midi_010(self):
"""
Tests the conversion of a vmf file with chords to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/chords.vmf')
expected_score = converter.parse('./fixtures/chords.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Chord:
assert len(expected_element.pitches) == len(actual_element.pitches)
assert expected_element.quarterLength == actual_element.quarterLength
for actual_pitch, expected_pitch in zip(expected_element.pitches, actual_element.pitches):
assert expected_pitch.pitchClass == actual_pitch.pitchClass
assert expected_pitch.octave == actual_pitch.octave
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
# Check that the time signatures are encoded.
expected_time_signatures = expected_score.flat.getElementsByClass(TimeSignature)
actual_time_signatures = actual_score.flat.getElementsByClass(TimeSignature)
# Ensure we have the right number of time signatures.
assert len(expected_time_signatures) == len(actual_time_signatures)
for expected, actual in zip(expected_time_signatures, actual_time_signatures):
assert expected.ratioString == actual.ratioString
assert expected.offset == actual.offset
def test_convert_vmf_to_midi_011(self):
"""
Tests the conversion of a vmf file with multiple voices to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/voices.vmf')
expected_score = converter.parse('./fixtures/voices.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Chord:
assert len(expected_element.pitches) == len(actual_element.pitches)
assert expected_element.quarterLength == actual_element.quarterLength
for actual_pitch, expected_pitch in zip(expected_element.pitches, actual_element.pitches):
assert expected_pitch.pitchClass == actual_pitch.pitchClass
assert expected_pitch.octave == actual_pitch.octave
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_convert_vmf_to_midi_012(self):
"""
Tests the conversion of a vmf file with dynamics to a midi file.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/dynamics.vmf')
expected_score = converter.parse('./fixtures/dynamics.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements, actual.flat.notesAndRests.elements):
if type(expected_element) is Chord:
assert len(expected_element.pitches) == len(actual_element.pitches)
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.volume.velocity == actual_element.volume.velocity
for actual_pitch, expected_pitch in zip(expected_element.pitches, actual_element.pitches):
assert expected_pitch.pitchClass == actual_pitch.pitchClass
assert expected_pitch.octave == actual_pitch.octave
elif type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
assert expected_element.volume.velocity == actual_element.volume.velocity
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_convert_vmf_to_midi_013(self):
"""
Tests a key signature change.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/keyChange.vmf')
expected_score = converter.parse('./fixtures/keyChange.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements,
actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
# Check that the key signatures are encoded.
expected_key_signatures = expected_score.flat.getElementsByClass(KeySignature)
actual_key_signatures = actual_score.flat.getElementsByClass(KeySignature)
# Ensure we have the right number of key signatures.
assert len(expected_key_signatures) == len(actual_key_signatures)
for expected, actual in zip(expected_key_signatures, actual_key_signatures):
assert expected.sharps == actual.sharps
assert expected.offset == actual.offset
def test_convert_vmf_to_midi_014(self):
"""
Tests a tempo change.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/tempoChange.vmf')
expected_score = converter.parse('./fixtures/tempoChange.mid')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements,
actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
# Check that the tempos are encoded.
expected_tempos = expected_score.flat.getElementsByClass(MetronomeMark)
actual_tempos = actual_score.flat.getElementsByClass(MetronomeMark)
# Ensure we have the right number of tempos.
assert len(expected_tempos) == len(actual_tempos)
for expected, actual in zip(expected_tempos, actual_tempos):
assert expected.number == actual.number
assert expected.offset == actual.offset
def test_read_vmf_string_001(self):
"""
Tests reading a VMF file with articulations
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/articulation.vmf')
expected_score = converter.parse('./fixtures/articulation.xml')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements,
actual.flat.notesAndRests.elements):
if type(expected_element) is Chord:
assert len(expected_element.pitches) == len(actual_element.pitches)
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.volume.velocity == actual_element.volume.velocity
for actual_pitch, expected_pitch in zip(expected_element.pitches, actual_element.pitches):
assert expected_pitch.pitchClass == actual_pitch.pitchClass
assert expected_pitch.octave == actual_pitch.octave
elif type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
# Equality on articulations is not well implemented in music21.
for a, b in zip(expected_element.articulations, actual_element.articulations):
assert type(a) == type(b)
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_read_vmf_string_002(self):
"""
Tests reading a VMF file with a pickup measure.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/anacrusis.vmf')
expected_score = converter.parse('./fixtures/anacrusis.xml')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements,
actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_read_vmf_string_003(self):
"""
Tests reading a VMF file with a pickup and a measure of rests.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/anacrusisAndRests.vmf')
expected_score = converter.parse('./fixtures/anacrusisAndRests.xml')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements,
actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_read_vmf_string_004(self):
"""
Tests reading a VMF file with a quintuplet.
"""
actual_score = vmf_converter_core.read_vmf_file('./expected/quintuplets.vmf')
expected_score = converter.parse('./fixtures/quintuplets.xml')
# Assert that the file has the right number of parts.
assert len(expected_score.parts) == len(actual_score.parts)
# Assert that the notes and rests match
for expected, actual in zip(expected_score.parts, actual_score.parts):
for expected_element, actual_element in zip(expected.flat.notesAndRests.elements,
actual.flat.notesAndRests.elements):
if type(expected_element) is Note:
assert expected_element.quarterLength == actual_element.quarterLength
assert expected_element.pitch.pitchClass == actual_element.pitch.pitchClass
assert expected_element.pitch.octave == actual_element.pitch.octave
elif type(expected_element) is Rest:
assert expected_element.quarterLength == actual_element.quarterLength
def test_find_number_of_notes_in_tick_001(self):
"""
Tests finding the number of notes in a tick
"""
tick = [1,-1,0,0,4,-1,-1,-1,-1,0]
number_of_notes = vmf_converter_core.find_number_of_notes_in_tick(tick)
assert number_of_notes == 1
def test_find_number_of_notes_in_tick_002(self):
"""
Tests finding the number of notes in a tick
"""
tick = [1,-1,0,0,4,0,0,-1,-1,0]
number_of_notes = vmf_converter_core.find_number_of_notes_in_tick(tick)
assert number_of_notes == 2 | mit | 8,606,255,815,061,616,000 | 47.605948 | 130 | 0.642158 | false |
cjgibson/mechkbot | bot.py | 1 | 61053 | # -*- coding: utf-8 -*-
###
# AUTHORS: CHRISTIAN GIBSON,
# PROJECT: /r/MechMarket Bot
# UPDATED: SEPTEMBER 11, 2015
# USAGE: python bot.py [-h / --help] [-is / --interactive-shell]
# EXPECTS: python 3.4.0
# beautifulsoup4 4.4.0
# praw 3.2.1
# regex 2015.06.24
###
import argparse
import bs4
import cmd
import collections
import configparser
import copy
import errno
import inspect
import logging
import math
import multiprocessing
import os
import platform
import praw
import random
import regex
import shelve
import shutil
import threading
import time
import traceback
import urllib
import uuid
__AUTHORS__ = ['/u/NotMelNoGuitars']
__VERSION__ = 0.1
__CMD_STR__ = '>>> '
__INFO__ = 'MechKB0t-v%s on "%s" with << %s v%s >> at %s %s' % (
__VERSION__,
platform.platform(),
platform.python_implementation(),
platform.python_version(),
time.ctime(),
time.localtime().tm_zone)
def coerce_reddit_handles(handles=__AUTHORS__):
clean = regex.compile(r'[^A-Z0-9_/-]', regex.UNICODE + regex.IGNORECASE)
authors = []
for author in handles:
author = clean.sub('', str(author))
if ((author.startswith('/u/') or author.startswith('/r/'))
and len(author.split('/')) == 3):
authors.append(author)
else:
authors.append('/u/' + max(author.split('/'), key=len))
return authors
class config_generator():
# c = bot.config_generator()(bot.bot.CONFIG_DEFAULTS)() ; print(c.func_code)
_FUNC_CODE_ = """class config_handler(configparser.RawConfigParser):
def __init__(self, conf_file=None):
super(self.__class__, self).__init__()
self.true_values = frozenset(['true', 't', '1', 'y', 'yes', 'aye', 'on',
'use', 'active', 'activate'])
self.heatware_regex = None
if conf_file:
self.conf_file = os.path.abspath(conf_file)
else:
try:
self.conf_file = (os.path.dirname(os.path.abspath(
inspect.getsourcefile(lambda: None))) + os.sep + 'config.cfg')
except:
self.conf_file = None
if self.conf_file:
try:
self.read(self.conf_file)
if not self.sections():
self.generate_defaults()
self.status = errno.ENOENT
else:
self.status = 0
except:
traceback.print_exc()
self.status = errno.EIO
else:
self.status = errno.EBADF
def store(self):
with open(self.conf_file, 'w') as conf_handle:
self.write(conf_handle)
def protected_pull(self, section, option, cast=None, default=None):
if self.status:
raise EnvironmentError(self.status,
('Current status #%d <%s> "%s".' %
(self.status,
errno.errorcode[self.status],
os.strerror(self.status))),
self.conf_file)
try:
if cast:
return cast(self.get(section, option))
else:
return self.get(section, option)
except:
if default:
return default
else:
raise
def protected_pullboolean(self, section, option):
boolean = self.protected_pull(section, option).lower()
if boolean in self.true_values:
return True
return False
def protected_push(self, section, option, value):
if self.status:
raise EnvironmentError(self.status,
('Current status #%d <%s> "%s".' %
(self.status,
errno.errorcode[self.status],
os.strerror(self.status))),
self.conf_file)
try:
self.set(section, option, value)
self.store()
return True
except:
return False
def protected_pushboolean(self, section, option, value):
if value is True or value in self.true_values:
return self.protected_push(section, option, 'true')
return self.protected_push(section, option, 'false')
"""
def __init__(self):
pass
def __call__(self, sections, ignore_description=False):
if all(all('desc' in detail for _, detail in options.items())
for _, options in sections.items()) or ignore_description:
pass
else:
raise TypeError('Provided configuration does not provide a "desc" '
'field for each section option. As such, the %s '
'cannot create an interactive_initialization() '
'method. To create the constructor without the '
'interactive_initialization() method, set '
'"ignore_description" to True when calling %s.'
% (self.__class__, self.__class__))
added_methods = {attr_or_func: None
for attr_or_func in dir(configparser.RawConfigParser)}
added_methods['conf_file'] = None
added_methods['func_code'] = None
added_methods['heatware_regex'] = None
added_methods['protected_pull'] = None
added_methods['protected_pullboolean'] = None
added_methods['protected_push'] = None
added_methods['protected_pushboolean'] = None
added_methods['status'] = None
added_methods['store'] = None
added_methods['true_values'] = None
if ignore_description:
added_methods['generate_defaults'] = None
else:
added_methods['generate_defaults'] = None
added_methods['interactive_initialization'] = None
init_initials = [" def interactive_initialization(self):",
" to_initialize = ["]
init_defaults = [" def generate_defaults(self):"]
for section, options in sections.items():
init_defaults.append(" self.add_section('%s')" % section)
for option, detail in options.items():
if 'boolean' in detail:
pulltype = 'protected_pullboolean'
pushtype = 'protected_pushboolean'
else:
pulltype = 'protected_pull'
pushtype = 'protected_push'
if 'get' in detail:
if detail['get']:
get_method = detail['get']
else:
get_method = None
else:
get_method = 'get_%s_%s' % (section, option)
if get_method in added_methods:
raise SyntaxError('Attempted to add get method %s to new '
'config_handler object, but it was '
'already defined.' % get_method)
if get_method:
added_methods[get_method] = (
" def %s(self):\n"
" return self.%s('%s', '%s')\n"
% (get_method, pulltype, section, option))
if 'set' in detail:
if detail['set']:
set_method = detail['set']
else:
set_method = None
else:
set_method = 'set_%s_%s' % (section, option)
if set_method in added_methods:
raise SyntaxError('Attempted to add set method %s to new '
'config_handler object, but it was '
'already defined.' % set_method)
if set_method:
added_methods[set_method] = (
" def %s(self, value):\n"
" return self.%s('%s', '%s', value)\n"
% (set_method, pushtype, section, option))
if 'def' in detail:
init_defaults.append(
" self.set('%s', '%s', '%s')" %
(section, option, detail['def']))
else:
init_defaults.append(
" self.set('%s', '%s', '%s')" %
(section, option, ""))
if not ignore_description:
if 'def' in detail:
init_initials.append(
" ('%s', '%s', '%s', '%s', '%s')," %
(self.sanify(detail['desc']),
self.sanify(detail['def']),
pushtype, section, option))
else:
init_initials.append(
" ('%s', None, '%s', '%s', '%s')," %
(self.sanify(detail['desc']),
pushtype, section, option))
added_methods['generate_defaults'] = ('\n'.join(init_defaults) + '\n' +
' self.store()\n')
if not ignore_description:
init_initials.extend([
" ]",
"",
" for desc, def_, fxn, sec, opt in to_initialize:",
" value_set = False",
" while not value_set:",
" try:",
" print('Now setting [%s].[%s]:' % (sec, opt))",
" print('Description: %s' % desc)",
" if def_:",
" print('Leave blank to use default '",
" 'value \"%s\".' % def_)",
" val = input('Set [%s].[%s]: ' % (sec, opt))",
" if val:",
" getattr(self, fxn)(sec, opt, val)",
" value_set = True",
" elif def_:",
" getattr(self, fxn)(sec, opt, def_)",
" value_set = True",
" else:",
" print('(!!!) Invalid value provided, '",
" 'or no value provided with no '",
" 'default available.\\n')",
" if value_set:",
" rec = self.get(sec, opt)",
" print('Value set as \"%s\".' % rec,",
" end=' ')",
" chk = input('Is that correct? (y/n) ')",
" if chk.lower().strip().startswith('y'):",
" print('Input accepted and stored.'",
" '\\f\\n\\r')",
" else:",
" print('Interpreted response as '",
" '\"no\". Will recapture '",
" 'input.\\n')",
" value_set = False",
" except KeyboardInterrupt:",
" raise",
" except:",
" print('(!!!) Error encountered when '",
" 'attempting to set value.\\n')",
" self.store()"
])
added_methods['interactive_initialization'] = (
'\n'.join(init_initials) + '\n')
_func_code_ = (self._FUNC_CODE_ +
'\n'.join(filter(lambda x: isinstance(x, str),
added_methods.values())))
exec(compile(_func_code_, '<string>', 'exec'))
config = eval('config_handler')
config.func_code = _func_code_
return config
def sanify(self, text):
return text.encode('unicode-escape').decode().replace("'", "\\'")
_BS4_PARSER = 'html.parser'
_GET_CONFIG = config_generator()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
class bot_prompt(cmd.Cmd):
# errno.ENOTTY
def __init__(self):
super(self.__class__, self).__init__()
self.prompt = __CMD_STR__
self.size = shutil.get_terminal_size()
self.height, self.width = self.size.lines, self.size.columns
class bot(praw.Reddit, threading.Thread):
CONFIG_DEFAULTS = collections.OrderedDict([
('crawl', collections.OrderedDict([
('file', {'def': 'data.record',
'desc': ('This is the name of the flatfile that will be '
'used to store all collected data on a user-by-'
'user basis.')}),
('hold', {'def': '10',
'desc': ('This is the number of seconds the bot will '
'spend in each state as a minimum.\nAs an '
'example, the bot has three states by default:\n'
' 1. Crawl /new of the target subreddit.\n'
' 2. Respond to user PMs.\n'
' 3. Crawl the trade thread of the target '
'subreddit.')}),
('sleep', {'def': '100',
'desc': ('This is the number of seconds the bot will '
'spend doing nothing after completing each set '
'of states.')})
])),
('reddit', collections.OrderedDict([
('user_agent', {'def': ('%s-%s:%s:MechKB0t-v%s (by %s)' %
(platform.system(), platform.processor(),
uuid.uuid5(uuid.NAMESPACE_OID, __INFO__),
__VERSION__,
', '.join(coerce_reddit_handles()))),
'desc': ('This is the plaintext string that will '
'be used by the admins at reddit to '
'identify this bot. It is recommended '
'that bots follow the format:\n'
' <platform>:<app ID>:<version string> '
'(by /u/<reddit username>)\n'
'Full rules and restrictions can be '
'found here: http://github.com/reddit/'
'reddit/wiki/API.')}),
('client_id', {'desc': ('This is the OAuth2 client_id created '
'for your reddit app instance. More '
'information can be found here: http://'
'github.com/reddit/reddit/wiki/OAuth2.')}),
('client_secret', {'desc': ('This is the OAuth2 client_secret '
'created for your reddit app instance. '
'More information can be found here: '
'http://github.com/reddit/reddit/wiki'
'/OAuth2.')}),
('redirect_url', {'desc': ('This is the OAuth2 redirect_url created '
'for your reddit app instance. More '
'information can be found here: http://'
'github.com/reddit/reddit/wiki/OAuth2.')}),
('subreddit', {'desc': 'The subreddit targeted by this bot.'}),
('multiprocess', {'def': 'false',
'get': 'is_multiprocessed',
'set': None,
'desc': 'Currently not implemented. Ignore.',
'boolean': True}),
('verbose', {'def': 'true',
'get': 'is_verbose',
'set': 'set_verbose',
'desc': ('Sets whether the bot will display its '
'actions during runtime, or simply log them.'),
'boolean': True})
])),
('monitor', collections.OrderedDict([
('log', {'def': 'event.log',
'desc': ('This is the flatfile that will be used to log '
'all actions taken by the bot.')}),
('posts', {'def': 'true',
'desc': ('Whether or not the bot will log basic '
'information concerning all posts observed '
'during its runtime.'),
'boolean': True}),
('users', {'def': 'true',
'desc': ('Whether or not the bot will record basic '
'infromation concerning all users observed '
'during its runtime.'),
'boolean': True}),
('format', {'def': '%(created)f -- %(levelname)s -> %(message)s',
'desc': ('This is the format string that will be used '
'in creating each entry in the log file. '
'Formatting options include:\n'
' %(asctime)s: Human-readable time when a '
'logged event was created.\n'
' %(created)f: Seconds since epoch when a '
'logged event was created.\n'
' %(filename)s: Source file that created a '
'logged event.\n'
' %(funcName)s: Function used that created a '
'logged event.\n'
' %(levelname)s: Severity of logged event as '
'an English string.\n'
' %(levelno)s: Severity of logged event as a '
'numeric value.\n'
' %(lineno)d: Line number of the source file '
'where a logged event was created.\n'
' %(module)s: Module that created a logged '
'event.\n'
' %(msecs)d: Millisecond portion of system '
'time when an event was logged.\n'
' %(message)s: Message provided when an event '
'was logged.\n'
' %(name)s: Name of the logger used to create '
'the logged event.\n'
' %(pathname)s: Full pathname of the source '
'file that created the logged event.\n'
' %(process)d: Process ID that created the '
'logged event.\n'
' %(processName)s: Process name that created '
'the logged event.\n'
' %(relativeCreated)d: Milliseconds after the '
'logging module was initially loaded that an '
'event was logged.\n'
' %(thread)d: Thread ID that created the '
'logged event.\n'
' %(threadName)s: Thread name that created '
'the logged event.\n'
'Further information can be found at: '
'http://docs.python.org/3.4/library/logging.'
'html#logging.LogRecord')},
('respond', {'def': 'true',
'desc': ('Whether or not the bot should make a post '
'on each new trade thread.'),
'boolean': True}),
('response', {'desc': ('The text template used when commenting on '
'a new trade thread. Formatting options '
'include:\n')})),
])),
('sidebar', collections.OrderedDict([
('add_button', {'def': 'false',
'get': 'should_add_button',
'desc': ('Whether the bot should add a button for '
'the current trade thread on the target '
'subreddit\'s sidebar.'),
'boolean': True}),
('button_text', {'desc': 'The text used for the created button.'}),
('button_start', {'desc': ('A specialized tag, included in the '
'sidebar\'s text, which determines '
'where the button starts.')}),
('button_end', {'desc': ('A specialized tag, included in the '
'sidebar\'s text, which determines where '
'the button ends.')})
])),
('class', collections.OrderedDict([
('use', {'def': 'true',
'desc': 'If the bot should monitor and update user flair.',
'boolean': True}),
('start', {'desc': 'Flair given to users never seen before.'}),
('limit', {'desc': ('Maximum integer indicating how many times '
'a user\'s flair can be incremented.')}),
('ignore', {'desc': ('A whitespace-separated list of flairs which '
'should be ignored if encountered by the bot.')}),
('pattern', {'desc': ('The pattern used to generate new user '
'flair following an increment. %i is used '
'to indicate where the integer value of the '
'flair should go. As a example, a flair '
'pattern of "u-%i" would take on the values '
'"u-1" for a user with a flair value of 1, '
'"u-2" for a user with a flair value of 2, '
'"u-3" for a user with a flair value of 3, '
'etc.')}),
('increment', {'def': '1',
'desc': ('The integer value that a user\'s flair '
'value will be incremented by with each '
'flair increment. Given a default value '
'of "1", a user with a flair value of 3 '
'would advance to a flair value of 4 after '
'completing a trade.')})
])),
('trade', collections.OrderedDict([
('method', {'def': 'post',
'desc': ('The method used by the bot to confirm user '
'trades. Three options are available, "pm", '
'"post", or "both". If "pm" is specified, '
'trades will be confirmed via private '
'message; with the sender in a trade sending '
'a private message to the bot containing the '
'reddit handle of the recipient. The bot then '
'contacts the other party, who confirms the '
'trade. If "post" is specified, a public '
'thread is used. Within the thread, the '
'sender creates a top-level comment, which '
'the recipient replies to with a comment '
'containing the phrase "confirmed". In the '
'case that "both" is specified, either option '
'can be used to confirm a trade.')}),
('post_id', {'desc': ('The id used by the trading thread within '
'the target subreddit. If left blank, the '
'bot will create its own trading thread. In '
'the case that "pm" is used as a method, '
'this value is ignored.')}),
('post_text', {'desc': ('The text template used when creating a '
'new trade thread. Supports formatting '
'arguments as found in Python\'s strftime '
'command. For more information, see: '
'https://docs.python.org/2/library/time.html'
'#time.strftime.')}),
('post_rate', {'def': 'monthly',
'desc': ('The rate at which the bot will create '
'new trading posts on the target subreddit.'
' Provided options include "daily", '
'"weekly", "monthly", "yearly", and "never"'
'. If "never" is selected, the post_id will'
' have to be updated manually by the user.')}),
('post_title', {'desc': ('The title template used when creating a '
'new trade thread\'s title. Supports '
'formatting arguments as found in Python\'s'
'strftime command. For more information, '
'see: https://docs.python.org/2/library/'
'time.html#time.strftime.')}),
('post_sticky', {'def': 'false',
'desc': ('If the bot makes the trade thread sticky'
' or not.')}),
('post_response', {'desc': ('The text template used when replying '
'to a confirmed trade comment on a '
'trade post. Supports formatting '
'arguments as found in Python\'s '
'strftime command. For more information'
', see: https://docs.python.org/2/'
'library/time.html#time.strftime.')}),
('message_text', {'desc': ('The text template used when sending a '
'private message to both users following'
' a confirmed trade. Supports formatting'
' arguments as found in Python\'s '
'strftime command. For more information,'
' see: https://docs.python.org/2/library'
'/time.html#time.strftime.')}),
('message_title', {'desc': ('The title template used when sending a '
'private message to both users '
'following a confirmed trade. Supports '
'formatting arguments as found in '
'Python\'s strftime command. For more '
'information, see: https://docs.python.'
'org/2/library/time.html#time.strftime.')}),
('respond', {'def': 'true',
'desc': ('If the bot should respond following a '
'confirmed trade or not.'),
'boolean': True}),
('age_msg', {'desc': ('Message used to reply when a user attempts '
'to confirm a trade when their account is '
'younger than the provided age limit.')}),
('age_type', {'def': 'days',
'desc': ('Units used in determining if a user\'s '
'account is too young to confirm a trade. '
'Options are "seconds", "minutes", "hours", '
'"days", "months".')}),
('age_limit', {'def': '30',
'desc': ('Numerical measurement used in determining '
'if a user\'s account is too young to '
'confirm a trade.')}),
('same_msg', {'desc': ('Message used to reply when a user attempts '
'to confirm a trade with themselves.')}),
('karma_msg', {'desc': ('Message used to reply when a user attempts'
' to confirm a trade when their account\'s '
'karma is below the provided karma limit.')}),
('karma_type', {'def': 'both',
'desc': ('Units used in determining if a user\'s '
'account has sufficient karma to confirm '
'a trade. Options are "comment", "link", '
'or "both".')}),
('karma_limit', {'def': '100',
'desc': ('Numerical measurement used in '
'determining if a user\'s account has '
'sufficient karma to confirm a trade.')})
])),
('heatware', collections.OrderedDict([
('method', {'def': 'pm',
'desc': ('The method by which the bot will collect a '
'user\'s heatware URL. Three options are '
'available, "pm", "post", and "both". If "pm" '
'is specified, users can submit heatware URLs '
'by means of private message to the bot. If '
'"post" is specified, users can submit their '
'heatware URLs by means of commenting in a '
'specified post. If "both" is specified, '
'either method can be used.')}),
('post_id', {'desc': ('The id used by the heatware thread in the '
'target subreddit.')}),
('post_text', {'desc': ('The text template used when creating a '
'new heatware thread. Supports formatting '
'arguments as found in Python\'s strftime '
'command. For more information, see: '
'https://docs.python.org/2/library/time.html'
'#time.strftime.')}),
('post_rate', {'def': 'yearly',
'desc': ('The rate at which the bot will create '
'new heatware posts on the target subreddit.'
' Provided options include "daily", '
'"weekly", "monthly", "yearly", and "never"'
'. If "never" is selected, the post_id will'
' have to be updated manually by the user.')}),
('post_title', {'desc': ('The title template used when creating a '
'new heatware thread\'s title. Supports '
'formatting arguments as found in Python\'s'
'strftime command. For more information, '
'see: https://docs.python.org/2/library/'
'time.html#time.strftime.')}),
('post_sticky', {'desc': ('If the bot makes the heatware thread '
'sticky or not.')}),
('post_response', {'desc': ('The text template used when replying '
'to an accepted heatware comment on a '
'heatware post. Supports formatting '
'arguments as found in Python\'s '
'strftime command. For more information'
', see: https://docs.python.org/2/'
'library/time.html#time.strftime.')}),
('message_text', {'desc': ('The text template used when sending a '
'private message to a user following'
' an accepted heatware profile. Supports '
'formatting arguments as found in Python\'s'
' strftime command. For more information,'
' see: https://docs.python.org/2/library'
'/time.html#time.strftime.')}),
('message_title', {'desc': ('The title template used when sending a '
'private message to a user following '
'an accepted heatware profile. Supports '
'formatting arguments as found in '
'Python\'s strftime command. For more '
'information, see: https://docs.python.'
'org/2/library/time.html#time.strftime.')}),
('regex', {'def': '(?:.*)(http(?:s?)://www\.heatware\.com/eval\.php\?id=[0-9]+)(?:.*)',
'set': None,
'desc': ('The regular expression used to _extract '
'heatware URLs from plaintext comments.')}),
('group', {'def': '1',
'set': None,
'desc': ('The group within the regular expression that '
'actually contained the captured heatware URL. '
'If left blank, the parser will accept the '
'entire match resulting from the regular '
'expression.')}),
('respond', {'def': 'true',
'desc': ('If a bot should respond to an accepted '
'heatware profile URL or not.'),
'boolean': True})
]))
])
def __init__(self, conf_file='config.cfg'):
config_constructor = _GET_CONFIG(self.CONFIG_DEFAULTS)
self.config_handler = config_constructor(conf_file)
if self.config_handler.status:
raise EnvironmentError(self.config_handler.status,
('Current status #%d <%s> "%s".' %
(self.config_handler.status,
errno.errorcode[
self.config_handler.status],
os.strerror(self.config_handler.status))),
conf_file)
log = logging.StreamHandler(self.config_handler.get_monitor_log())
fmt = logging.Formatter(self.config_handler.get_monitor_format())
log.setLevel(logging.DEBUG)
log.setFormatter(fmt)
logger.addHandler(log)
self.data_store = database_handler(
self.config_handler.get_crawl_file())
self.heat_parse = heatware_crawler()
self.run_states = {
state[6:].lstrip('_'): getattr(self, state)
for state in set(dir(self)).difference(dir(super()))
if (state.startswith('_state')
and hasattr(getattr(self, state), '__call__'))}
super().__init__(self.config_handler.get_reddit_user_agent())
self.set_oauth_app_info(self.config_handler.get_reddit_client_id(),
self.config_handler.get_reddit_client_secret(),
self.config_handler.get_reddit_redirect_url())
threading.Thread.__init__(self, daemon=True)
def run(self):
while True:
state_time = {state: max(1, self.config_handler.get_crawl_hold())
for state in self.run_states}
while any(t > 0 for t in state_time.values()):
for state, function in self.run_states.items():
if state_time[state] > 0:
self.state = state
state_start = time.time()
try:
function()
except:
pass
state_elaps = time.time() - state_start
if state_elaps > 0:
state_time[state] -= state_elaps
else:
state_time[state] = 0
time.sleep(self.config_handler.get_crawl_sleep())
self.shutdown()
def _state_trade(self):
"""
Performs processing necessary for the verification and updating
of user's css class following a successful trade.
Will need to call the following methods from self.config_handler:
get_trade_method()
if get_trade_method() in ['post', 'both']:
get_trade_post_id()
get_trade_post_text()
get_trade_post_rate()
get_trade_post_title()
get_trade_post_sticky()
get_trade_post_response()
should_add_button()
get_sidebar_button_text()
get_sidebar_button_start()
get_sidebar_button_end()
if get_trade_method() in ['pm', 'both']:
get_trade_message_text()
get_trade_message_title()
get_trade_respond()
get_trade_age_msg()
get_trade_age_type() -> ['seconds', 'minutes', 'hours', 'days', 'months']
get_trade_same_msg()
get_trade_karma_msg()
get_trade_karma_type() -> ['comment', 'link', 'both']
get_trade_karma_limit()
get_class_use()
get_class_start()
get_class_limit()
get_class_ignore()
get_class_pattern()
get_class_increment()
In addition, will need to log results to logger, and store updated
user information in self.data_store if get_monitor_users() is True.
"""
if self.config_handler.get_trade_method() in ['pm', 'both']:
pass
if self.config_handler.get_trade_method() in ['post', 'both']:
pass
def _state_posts(self):
"""
Monitors and replies to previously unseen posts on the target
subreddit's /new page.
Will need to call the following methods from self.config_handler:
get_monitor_posts()
get_monitor_users()
get_monitor_format()
get_monitor_respond()
get_monitor_response()
"""
pass
def _state_flair(self):
"""
Responsible for verifying and setting user flair with regards to their
accounts on http://www.HeatWare.com.
Will need to call the following methods from self.config_handler:
get_heatware_method()
if get_heatware_method() in ['post', 'both']:
get_heatware_post_id()
get_heatware_post_text()
get_heatware_post_rate()
get_heatware_post_title()
get_heatware_post_sticky()
get_heatware_post_response()
if get_heatware_method() in ['pm', 'both']:
get_heatware_message_text()
get_heatware_message_title()
get_heatware_regex()
get_heatware_group()
get_heatware_respond()
Recall:
>>> import time, pprint
>>> self.heat_parse.parse('2')
>>> while len(self.heat_parse) < 1: time.sleep(1)
>>> results = {id_: info for id_, info in self.heat_parse}
>>> pprint.pprint(results['2'])
{'aliases': {'amdmb': {'heat23': None},
'anandtech bbs': {'heat23': 'http://forum.anandtech.com'},
'arstechnica': {'heat23': None},
'geekhack': {'heatware': None},
'techpowerup!': {'heatware': None},
'webhostingtalk': {'heat23': None}},
'evaluations': {334221: {'comments': 'Great transaction, he sent money '
'via paypal and I shipped upon '
'payment.',
'date': '06-30-2005',
'forum': 'anandtech bbs',
'user': 'floben'},
344973: {'comments': 'What can I say about the owner of '
'heatware besides the fact that it '
'was an awesome transaction. I had '
'no worries about shipping first, '
'and his great communication '
'throughout the transaction put me '
'at ease.',
'date': '08-17-2005',
'forum': 'anandtech bbs',
'user': 'jackson18249'},
345198: {'comments': 'Quick payment & good communication. '
'You cannot ask for a smoother '
'transaction!',
'date': '08-23-2005',
'forum': 'anandtech bbs',
'user': 'hkklife'},
356225: {'comments': 'Super-fast payment, prompt response '
'to PMs. There was a delivery delay '
'(because of Katrina) but buyer was '
'very patient and kept in touch. '
'Thanks!',
'date': '09-27-2005',
'forum': 'anandtech bbs',
'user': 'fornax'},
423266: {'comments': 'This was simply one of the best '
'transactions I have experienced on '
'Anandtech. I sent Heat23 a paypal '
'e-check (expecting for funds to '
'clear first) but he crosshipped '
'minutes later on a Saturday. Got '
'the package Monday morning in the '
'office. Awesome.',
'date': '08-14-2006',
'forum': 'anandtech bbs',
'user': 'jloor'},
425040: {'comments': 'Fast payment, smooth transaction... '
'Good guy to deal with! Thanks!',
'date': '08-23-2006',
'forum': 'anandtech bbs',
'user': 'Doctor Feelgood'},
425650: {'comments': 'Heat23 threw in a couple of '
'freebies and shipped everything out '
'lightspeed. Thanks Man!',
'date': '08-26-2006',
'forum': 'anandtech bbs',
'user': 'ScottyWH'},
425699: {'comments': 'This was a very smooth transaction. '
'Heat sent me payment and I sent him '
'the camera. I would gladly sell to '
'him again. Thanks!',
'date': '08-20-2006',
'forum': 'anandtech bbs',
'user': 'dak125'},
426236: {'comments': 'The transaction went great, seller '
'was the easy to deal with and the '
'shipping was fast. (Freebie '
'included)...Love to deal again in '
'the future...',
'date': '08-29-2006',
'forum': 'anandtech bbs',
'user': 'mackle'},
487916: {'comments': 'Good communication, paid via '
"Paypal, smooth deal. If you can\\'t "
'trust heat23, who can you trust?;)',
'date': '08-23-2007',
'forum': 'anandtech bbs',
'user': 'Tates'},
496656: {'comments': 'Nice guy to work with. His '
'contribution to the trading '
'community is definitely '
'appreicated!!! Thanks again heat. :)',
'date': '11-08-2007',
'forum': 'anandtech bbs',
'user': 'ELopes580'},
527657: {'comments': 'Though took a bit to get the deal '
'done, he was courteous, kept in '
'touch, and made the whole '
'experience awesome! Thanks for the '
"phone, it\\'s awesome!",
'date': '08-04-2008',
'forum': 'anandtech bbs',
'user': 'proxops-pete'},
621980: {'comments': 'Donation acknowledgement and thanks '
'received. Thanks for spending your '
'time building something to do good.',
'date': '07-11-2011',
'forum': 'heatware',
'user': 'AmboBartok'},
690634: {'comments': 'Got payment quickly, great '
'comunication. Would deal with again '
'anytime. A++++',
'date': '07-23-2014',
'forum': 'anandtech bbs',
'user': 'Sniper82'},
699942: {'comments': 'Receiver was packed very well, in '
'what appeared to be the original '
'box. This receiver was shipped from '
'CA to NY and was in beautiful '
'condition when it arrived. Heat23 '
'even included a couple HDMI cables. '
'The item was as described, shipped '
'promptly, packed very well, and is '
'working well as I type this. This '
'transaction could not have gone '
"better, and I\\'d definitely deal "
'with Heat23 again.',
'date': '03-03-2015',
'forum': 'anandtech bbs',
'user': 'NicePants42'}},
'location': 'Austin, TX',
'rating': {'negative': 0, 'neutral': 0, 'positive': 188}}
"""
if self.config_handler.get_heatware_method() in ['pm', 'both']:
pass
if self.config_handler.get_heatware_method() in ['post', 'both']:
pass
def shutdown(self):
self.heat_parse.kill()
def __repr__(self):
# This section is a carbon copy of the vanilla codebase.
# ( See: threading.Thread.__repr__ )
thread_status = 'initial'
if self._started.is_set():
thread_status = 'started'
self.is_alive()
if self._is_stopped:
thread_status = 'stopped'
if self._daemonic:
thread_status += ' daemon'
if self._ident is not None:
thread_status += ' %s' % self._ident
reddit_status = 'logged'
if self.is_logged_in():
reddit_status += '-in'
else:
reddit_status += '-out'
if self.is_oauth_session():
reddit_status += ' oauth2'
return "<%s.%s {'thread': (%s, %s), 'reddit': (%s, %s)} at %s>" % (
self.__class__.__module__, self.__class__.__name__,
self.name, thread_status, self.user, reddit_status, hex(id(self)))
class database_handler(shelve.DbfilenameShelf):
def __init__(self, data_file):
super(self.__class__, self).__init__(filename=data_file)
def get(self, key):
try:
return self[key.lower()]
except:
return {}
def set(self, key, val):
try:
assert(isinstance(val, dict))
cur = self.get(key.lower())
val = self.update(val, cur)
self[key.lower()] = val
return True
except:
return False
def remove(self, key):
try:
del self[key.lower()]
return True
except:
return False
def update(self, new_, orig):
for key, val in orig.items():
if isinstance(val, dict):
new_[key] = self.update(new_.get(key, {}), val)
else:
new_[key] = orig[key]
return new_
def terminate(self):
self.sync()
self.close()
class heatware_crawler(multiprocessing.Process):
def __init__(self, page_wait=0, deep_parse=False, rand_wait=False):
# TODO: See if heat is okay with maximum one request per sixty seconds.
# STATUS: Reached out to heat as of Aug 29; no response as of yet.
# The site's robots.txt (http://heatware.com/robots.txt) seems
# to allow any sort of automated crawling, but I won't
# implement the ability to perform a 'deep_parse' until I
# get confirmation from the man himself.
self._state = multiprocessing.Value('c', 0)
self.page_wait = max(60, page_wait)
self.sqrt_wait = math.sqrt(self.page_wait)
# TODO: See if heat is okay with deep crawling of his site.
self.deep_parse = False # deep_parse
if rand_wait:
self.rand_wait = lambda: random.uniform(self.sqrt_wait / 2.0,
self.sqrt_wait * 2.0)
else:
self.rand_wait = lambda: 0
self.next_time = time.time()
self.get_next_time = lambda: (
time.time() + self.page_wait + self.rand_wait())
self.get_page = urllib.request.urlopen
self.root_page = 'http://www.heatware.com/eval.php?id='
self.page_ext = '&pagenum=%i'
self.eval_ext = '&num_days=%i'
self.info_dict = {
# 'deep_parse': self.deep_parse,
'rating': {'positive': 0,
'neutral': 0,
'negative': 0},
'aliases': {},
'location': None,
'evaluations': []
}
self.subhead_map = {
'Evaluation Summary': {'function': self._summary,
'key': 'rating'},
'User Information': {'function': self._information,
'key': 'location'},
'Aliases': {'function': self._aliases,
'key': 'aliases'},
'Evaluations': {'function': self._evaluations,
'key': 'evaluations'}
}
self.text_clean = regex.compile(r'\s+', regex.UNICODE)
self.date_clean = regex.compile(r'\d{2}-\d{2}-\d{4}', regex.UNICODE)
self.info_queue = multiprocessing.Queue()
self.user_queue = multiprocessing.JoinableQueue()
super().__init__()
self.daemon = True
self.start()
def run(self):
while True:
self._state.value = b'i'
heatware_id = self.user_queue.get()
if heatware_id is Ellipsis:
break
else:
self._state.value = b'b'
information = self._parse(heatware_id)
self.info_queue.put((heatware_id, information))
self.user_queue.task_done()
self._state.value = b'd'
def parse(self, id_):
self.user_queue.put(id_)
def kill(self):
self.user_queue.put(Ellipsis)
def state(self):
if self._state.value == b'i':
return 'idle'
if self._state.value == b'b':
return 'busy'
if self._state.value == b'd':
return 'dead'
return 'none'
def is_idle(self):
return self._state.value == b'i'
def is_busy(self):
return self._state.value == b'b'
def is_dead(self):
return self._state.value == b'd'
def remaining_jobs(self):
# Not exact.
return self.user_queue.qsize()
def __nonzero__(self):
# Not reliable.
return self.info_queue.empty()
def __len__(self):
# Not exact.
return self.info_queue.qsize()
def __iter__(self):
try:
while True:
yield self.info_queue.get_nowait()
except:
raise StopIteration
def _parse(self, id_):
return self._extract(self.root_page + str(id_))
def _extract(self, url):
time.sleep(max(0, self.next_time - time.time()))
info = copy.deepcopy(self.info_dict)
page = self.get_page(url)
html = str(page.read())
self.next_time = self.get_next_time()
soup = bs4.BeautifulSoup(html, _BS4_PARSER)
for subhead in soup.find_all(class_='subhead'):
if subhead.text in self.subhead_map:
try:
info[self.subhead_map[subhead.text]['key']] = (
self.subhead_map[subhead.text]['function'](subhead,
soup))
except:
info[self.subhead_map[subhead.text]['key']] = (copy.deepcopy(
self.info_dict[self.subhead_map[subhead.text]['key']]))
return info
def _summary(self, spoonful, soup):
root = spoonful.parent
scores = root.find_all(class_='num')
summary = {}
for idx, item in enumerate(['positive', 'neutral', 'negative']):
try:
summary[item] = int(scores[idx].text)
except:
summary[item] = None
return summary
def _information(self, spoonful, soup):
root = spoonful.parent
info = root.find_all('div')
for idx in range(len(info) - 1):
prior, label = info[idx], info[idx + 1]
if label.text == 'Location':
return prior.text
return None
def _aliases(self, spoonful, soup):
root = spoonful.parent
links = {}
for alias in root.find_all('div'):
link = alias.find('a', href=True)
try:
alias, site = alias.text.split(' on ', 1)
alias = alias.lower()
if link:
links.setdefault(link.text.lower(), {}
).setdefault(alias, link.get('href'))
else:
links.setdefault(site.lower(), {}).setdefault(alias, None)
except:
pass
return links
def _evaluations(self, spoonful, soup):
root = spoonful.parent
evals = {}
for evalu in root.find_all(id=regex.compile(r'rp_[0-9]+')):
id_ = int(evalu.get('id').strip('rp_'))
info = {}
try:
info['user'] = self._clean(evalu.find('td').text)
except:
info['user'] = None
try:
info_string = soup.find(id=('row_%i' % id_)).text
date_match = self.date_clean.search(info_string)
info['date'] = self._clean(date_match.group(0))
date_span = date_match.span(0)
except:
info['date'] = None
date_span = None
if date_span:
try:
info['forum'] = self._clean(info_string[date_span[1]:]
).lower()
except:
info['forum'] = None
else:
info['forum'] = None
try:
for inner in evalu.find_all('strong'):
if 'Comments' in inner.text:
info['comments'] = self._clean(
inner.parent.text.split(None, 1)[1])
except:
info['comments'] = None
evals[id_] = info
return evals
def _clean(self, text):
_text = text.replace('\\t', '\t'
).replace('\\r', '\r'
).replace('\\n', '\n')
return self.text_clean.sub(' ', _text)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=('Automates flair monitoring '
"for reddit's trading "
'subreddits.'),
epilog=('Currently maintained by ' +
', '.join(coerce_reddit_handles()) +
'.'))
parser.add_argument('-is', '--interactive-shell', action='store_true',
help='run the bot with an interactive shell')
args = parser.parse_args()
if args.interactive_shell:
bot_prompt().cmdloop()
else:
bot()
| gpl-2.0 | 7,135,524,112,241,807,000 | 48.879902 | 99 | 0.415147 | false |
quaquel/EMAworkbench | ema_workbench/examples/cart_flu_example.py | 1 | 1172 | '''
Created on May 26, 2015
@author: jhkwakkel
'''
import matplotlib.pyplot as plt
import ema_workbench.analysis.cart as cart
from ema_workbench import ema_logging, load_results
ema_logging.log_to_stderr(level=ema_logging.INFO)
def classify(data):
# get the output for deceased population
result = data['deceased population region 1']
# if deceased population is higher then 1.000.000 people,
# classify as 1
classes = result[:, -1] > 1000000
return classes
# load data
fn = './data/1000 flu cases with policies.tar.gz'
results = load_results(fn)
experiments, outcomes = results
# extract results for 1 policy
logical = experiments['policy'] == 'no policy'
new_experiments = experiments[logical]
new_outcomes = {}
for key, value in outcomes.items():
new_outcomes[key] = value[logical]
results = (new_experiments, new_outcomes)
# perform cart on modified results tuple
cart_alg = cart.setup_cart(results, classify, mass_min=0.05)
cart_alg.build_tree()
# print cart to std_out
print(cart_alg.stats_to_dataframe())
print(cart_alg.boxes_to_dataframe())
# visualize
cart_alg.show_boxes(together=False)
cart_alg.show_tree()
plt.show()
| bsd-3-clause | -5,965,908,289,611,535,000 | 21.980392 | 61 | 0.726109 | false |
kdart/pycopia3 | net/pycopia/http/jsonrpc1.py | 1 | 3487 | #!/usr/bin/python3.4
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides a simple, general purpose JSON RPC v1.0 over HTTP.
TODO: Asynchronous client handling multiple connections at once.
"""
import json
from pycopia import urls
from pycopia.inet import httputils
from pycopia.http.client import (HTTPRequest, RequestResponseError)
class JSONError(Exception):
pass
class JSONRequestError(JSONError):
pass
class JSONResponseError(JSONError):
pass
def Counter():
i = 0
while True:
yield i
i += 1
class JSON1Method:
COUNTER = Counter() # class singleton
def __init__(self, name, params):
self.method = name
self.params = params
self.id = next(self.COUNTER)
def to_json(self):
return json.dumps({"method": self.method, "params": self.params, "id": self.id})
class SimpleJSONRPCClient:
def __init__(self, url, logfile=None):
self._baseurl = urls.UniversalResourceLocator(url)
self._cookiejar = httputils.CookieJar()
self._logfile = logfile
def call(self, path, query, method, args):
"""Call the remote method, return result.
"""
data = JSON1Method(method, args)
resp = self.post(path, data, query)
res = json.loads(resp.body.decode("utf-8"))
if res["id"] != data.id:
raise JSONRequestError("mismatched id")
err = res.get("error")
if err:
raise JSONResponseError((err["code"], err["message"]))
return res["result"]
def get(self, path, query=None):
url = self._baseurl.copy()
url.path = self._baseurl.path + path
headers = [httputils.Referer(self._baseurl), httputils.Connection("keep-alive")]
request = HTTPRequest(url, method="GET", query=query, cookiejar=self._cookiejar, extraheaders=headers)
resp = request.perform(self._logfile)
if resp.status.code != 200:
raise RequestResponseError(str(resp.status))
self._cookiejar.parse_mozilla_lines(resp.cookielist)
return resp
def post(self, path, data, query=None):
url = self._baseurl.copy()
url.path = self._baseurl.path + path
if query:
url.query = query
request = HTTPRequest(url, data, method="POST", cookiejar=self._cookiejar,
accept="application/json")
resp = request.perform(self._logfile)
if resp.status.code != 200:
raise RequestResponseError(str(resp.status))
self._cookiejar.parse_mozilla_lines(resp.cookielist)
return resp
@property
def cookies(self):
return self._cookiejar.get_setcookies()
def clear_cookies(self):
return self._cookiejar.clear()
if __name__ == "__main__":
m = JSON1Method("callme", ("maybe", 1))
print(m.to_json())
m = JSON1Method("callme", ("again", 2))
print(m.to_json())
| apache-2.0 | -5,706,128,941,765,622,000 | 29.060345 | 110 | 0.641526 | false |
ty-tal/python | tools/user_audit.py | 1 | 2470 | #!/usr/bin/python2.7 -Wd
'''
file:user_audit.py
use: audits users table and compares current date to last password update
if last update exceeds threshold send a notice to users
author: [email protected]
date: 20131002
connector: mysqldb because the SQL only changes per table layout
'''
# define imported libraries
import datetime,MySQLdb,smtplib
# import mimetext if you want to include a copy of the password update policy
from email.mime.text import MIMEText
# turn mail function off and on
# 0 is off 1 is on
mail_on=0
# define today and lookback day
lookback_days=90
today_day=datetime.datetime.today()
today_holdout=datetime.date.today()
day_format="%Y-%m-%d"
hour_format="%H:%M:%S"
this_day=today_day.strftime(day_format)
this_hour=today_day.strftime(hour_format)
today="%s %s" % (this_day,this_hour)
lookback=datetime.timedelta(days=lookback_days)
holdout=today_holdout-lookback
threshhold_d="%s %s" % (holdout,this_hour)
threshhold=str(threshhold_d)
# define msg as multipart, application and message to be sent to listed users
audit_file="/path/to/audit_message.txt"
ap=open(audit_file, 'rb')
msg=MIMEText(ap.read())
ap.close()
me='[email protected]'
application_name='Your_Application'
#connect to mysql database
audit_db = MySQLdb.connect(host="localhost",user="some_user",passwd="some_password",db="some_db_schema")
# create cursor object
cursor = audit_db.cursor()
# query user table
cursor.execute("select name,full_name,password_created,email from users where password_created < '%s' order by name asc" % threshhold)
print "Auditing users starting %s and looking back %s days to %s.\n" % (today,lookback_days,threshhold)
print "The following users have not updated their passwords in the last %s days.\n " % lookback_days
# echo results if running in a scheduler, i.e. Control-M, Open-Scheduler, etc. so they will appear in sysout
# format the data so it is in readable columns
for row in cursor.fetchall():
pw_format=str(row[2])
if mail_on == '0':
# send an email to the users displayed
msg['Subject']='Password update reminder from %s' % application_name
msg['From']=me
msg['To']=row[3]
sendme=smtplib.SMTP('mail_server')
sendme=sendmail(me,[row[3]], msg.as_string())
sendme.quit()
else:
print row[0].ljust(30), " ", row[1].ljust(30), " ",pw_format.ljust(30), " ", row[3].ljust(30)
# close the database connection
audit_db.close()
# print the done message
print "\nFinished auditing user table.\n" | gpl-3.0 | 1,133,495,115,821,487,400 | 35.880597 | 134 | 0.738462 | false |
sargentfrancesca/coming-up-roses | client_site/models.py | 1 | 1300 | from __future__ import unicode_literals
from django.db import models
# Create your models here.
# copy nano lib/python2.7/site-packages/pinax_theme_bootstrap folder
class Category(models.Model):
category_name = models.CharField(max_length=100)
category_description = models.TextField()
def __str__(self):
return "%s (%s)" % (self.category_name, self.category_description)
class Treatment(models.Model):
treatment_name = models.CharField(max_length=100)
treatment_price = models.DecimalField(max_digits=5, decimal_places=2)
treatment_descripton = models.TextField(null=True)
category = models.ForeignKey(Category, on_delete=models.CASCADE)
def __str__(self):
return "%s: %s" % (self.treatment_name, self.treatment_price)
class MailingList(models.Model):
user_name = models.CharField(max_length=100)
user_email = models.EmailField(max_length=254)
def __str__(self):
return "%s: %s" % (self.user_name, self.user_email)
class Image(models.Model):
image_filename = models.CharField(max_length=100)
image_title = models.CharField(max_length=64)
image_description = models.TextField(null=True)
image_folder = models.CharField(max_length=100, default="photo_shoot")
def __str__(self):
return "%s: %s [%s]" % (self.image_filename, self.image_title, self.image_description)
| mit | -7,982,091,275,539,703,000 | 31.525 | 88 | 0.736154 | false |
mvcsantos/QGIS | python/plugins/processing/algs/qgis/RasterLayerHistogram.py | 1 | 3219 | # -*- coding: utf-8 -*-
"""
***************************************************************************
RasterLayerHistogram.py
---------------------
Date : January 2013
Copyright : (C) 2013 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'January 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import matplotlib.pyplot as plt
import matplotlib.pylab as lab
from PyQt4.QtCore import QVariant
from qgis.core import QgsField
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterNumber
from processing.core.parameters import ParameterRaster
from processing.core.outputs import OutputTable
from processing.core.outputs import OutputHTML
from processing.tools import dataobjects
from processing.tools import raster
class RasterLayerHistogram(GeoAlgorithm):
INPUT = 'INPUT'
PLOT = 'PLOT'
TABLE = 'TABLE'
BINS = 'BINS'
def defineCharacteristics(self):
self.name = 'Raster layer histogram'
self.group = 'Graphics'
self.addParameter(ParameterRaster(self.INPUT,
self.tr('Input layer')))
self.addParameter(ParameterNumber(self.BINS,
self.tr('Number of bins'), 2, None, 10))
self.addOutput(OutputHTML(self.PLOT, self.tr('Histogram')))
self.addOutput(OutputTable(self.TABLE, self.tr('Table')))
def processAlgorithm(self):
layer = dataobjects.getObjectFromUri(
self.getParameterValue(self.INPUT))
nbins = self.getParameterValue(self.BINS)
outputplot = self.getOutputValue(self.PLOT)
outputtable = self.getOutputFromName(self.TABLE)
values = raster.scanraster(layer)
# ALERT: this is potentially blocking if the layer is too big
plt.close()
valueslist = []
for v in values:
if v is not None:
valueslist.append(v)
(n, bins, values) = plt.hist(valueslist, nbins)
fields = [QgsField('CENTER_VALUE', QVariant.Double),
QgsField('NUM_ELEM', QVariant.Double)]
writer = outputtable.getTableWriter(fields)
for i in xrange(len(values)):
writer.addRecord([str(bins[i]) + '-' + str(bins[i + 1]), n[i]])
plotFilename = outputplot + '.png'
lab.savefig(plotFilename)
f = open(outputplot, 'w')
f.write('<html><img src="' + plotFilename + '"/></html>')
f.close()
| gpl-2.0 | -4,066,943,660,711,654,000 | 34.373626 | 75 | 0.558559 | false |
Kelfast/mamba-framework | mamba/web/page.py | 1 | 10483 | # -*- test-case-name: mamba.test.test_web -*-
# Copyright (c) 2012 - 2013 Oscar Campos <[email protected]>
# See LICENSE for more details
"""
.. module: page
:platform: Unix, Windows
:synopsis: The Page object is the main web application entry point
.. moduleauthor:: Oscar Campos <[email protected]>
"""
from singledispatch import singledispatch
from twisted.web import static, server
from twisted.python import filepath
from twisted.python import log as twisted_log
from twisted.python.logfile import DailyLogFile
from mamba.utils.less import LessResource
from mamba.utils import log
from mamba.core import templating, resource
os = filepath.os
class Page(resource.Resource):
"""
This represents a full web page in mamba applications. It's usually
the root page of your web site/application.
The controllers for the routing system are eregistered here. We first
register any package shared controller because we want to overwrite
them if our application defines the same routes.
:param app: The Mamba Application that implements this page
:type app: :class:`~mamba.application.app.Application`
:param template_paths: additional template paths for resources
:param cache_size: the cache size for Jinja2 Templating system
:param loader: Jinja2 custom templating loader
"""
def __init__(self, app, template_paths=None, cache_size=50, loader=None):
resource.Resource.__init__(self)
# register log file if any
if (app.development is False and
app.already_logging is False and app.log_file is not None):
twisted_log.startLogging(DailyLogFile.fromFullPath(app.log_file))
self._assets = resource.Assets([os.getcwd() + '/static'])
self.template_paths = [
'application/view/templates',
'{}/templates/jinja'.format(
os.path.dirname(__file__).rsplit(os.sep, 1)[0]
)
]
# set managers
self._controllers_manager = app.managers.get('controller')
self._shared_controllers_manager = app.managers.get('packages')
# register controllers
self.register_shared_controllers()
self.register_controllers()
# containers
self.containers = {
'styles': static.Data('', 'text/css'),
'scripts': static.Data('', 'text/javascript')
}
# register containers
self.putChild('styles', self.containers['styles'])
self.putChild('scripts', self.containers['scripts'])
# insert stylesheets and scripts
self.insert_stylesheets()
self.insert_scripts()
# register service ponger
self.putChild('_mamba_pong', static.Data('PONG', 'text/plain'))
# static accessible data (scripts, css, images, and others)
self.putChild('assets', self._assets)
# other initializations
self.generate_dispatches()
self.initialize_templating_system(template_paths, cache_size, loader)
def getChild(self, path, request):
"""
If path is an empty string or index, render_GET should be called,
if not, we just look at the templates loaded from the view templates
directory. If we find a template with the same name than the path
then we render that template.
.. caution::
If there is a controller with the same path than the path
parameter then it will be hidden and the template in templates
path should be rendered instead
:param path: the path
:type path: str
:param request: the Twisted request object
"""
if path == '' or path is None or path == 'index':
return self
for template in self.environment.list_templates():
if path == template.rsplit('.', 1)[0]:
return self
return resource.Resource.getChild(self, path, request)
def render_GET(self, request):
"""Renders the index page or other templates of templates directory
"""
if not request.prepath[0].endswith('.html'):
request.prepath[0] += '.html'
try:
template = templating.Template(
self.environment, template=request.prepath[0]
)
return template.render(**self.render_keys).encode('utf-8')
except templating.TemplateNotFound:
try:
template = templating.Template(
self.environment, template='index.html'
)
return template.render(**self.render_keys).encode('utf-8')
except templating.TemplateNotFound:
pass
template = templating.Template(
self.environment,
template='root_page.html'
)
return template.render(**self.render_keys).encode('utf-8')
def generate_dispatches(self):
"""Generate singledispatches
"""
self.add_template_paths = singledispatch(self.add_template_paths)
self.add_template_paths.register(str, self._add_template_paths_str)
self.add_template_paths.register(list, self._add_template_paths_list)
self.add_template_paths.register(tuple, self._add_template_paths_tuple)
def add_script(self, script):
"""Adds a script to the page
"""
self.putChild(script.prefix, static.File(script.path))
def register_controllers(self):
"""Add a child for each controller in the ControllerManager
"""
for controller in self._controllers_manager.get_controllers().values():
self._register_controller_module(controller)
self._build_controllers_tree()
def register_shared_controllers(self):
"""
Add a child for each shared package controller. If the package
includes a static files directory we add an asset for it
.. versionadded:: 0.3.6
"""
if self._shared_controllers_manager is None:
return
for package in self._shared_controllers_manager.packages.values():
static_data = filepath.FilePath(
'{}/static'.format(os.path.normpath(package['path']))
)
if static_data.exists():
self._assets.add_paths([static_data.path])
real_manager = package.get('controller')
if real_manager is None:
continue
for controller in real_manager.get_controllers().values():
self._register_controller_module(controller, True)
real_manager.build_controller_tree()
def initialize_templating_system(self, template_paths, cache_size, loader):
"""Initialize the Jinja2 templating system for static HTML resources
"""
if self._shared_controllers_manager is not None:
for package in self._shared_controllers_manager.packages.values():
self.add_template_paths('{}/view/templates'.format(
package.get('path'))
)
if template_paths is not None:
self.add_template_paths(template_paths)
if loader is None:
loader = templating.FileSystemLoader
self.environment = templating.Environment(
autoescape=lambda name: (
name.rsplit('.', 1)[1] == 'html' if name is not None else False
),
cache_size=cache_size,
loader=loader(self.template_paths)
)
def insert_stylesheets(self):
"""Insert stylesheets into the HTML
"""
for name, style in self._styles_manager.get_styles().iteritems():
if style.less:
self.containers['styles'].putChild(
name, LessResource(style.path)
)
continue
self.containers['styles'].putChild(name, static.File(style.path))
def insert_scripts(self):
"""Insert scripts to the HTML
"""
for name, script in self._scripts_manager.get_scripts().iteritems():
self.containers['scripts'].putChild(name, static.File(script.path))
def run(self, port=8080):
"""
Method to run the application within Twisted reactor
This method exists for testing purposes only and fast
controller test-development-test workflow. In production you
should use twistd
:param port: the port to listen
:type port: number
"""
from twisted.internet import reactor
factory = server.Site(self)
reactor.listenTCP(port, factory)
reactor.run()
def add_template_paths(self, paths):
"""Add template paths to the underlying Jinja2 templating system
"""
raise RuntimeError(
'{} type for paths can not be handled'.format(type(paths)))
def _add_template_paths_str(self, paths):
"""Append template paths for single str template path given
"""
self.template_paths.append(paths)
def _add_template_paths_list(self, paths):
"""Adds the given template paths list
"""
self.template_paths += paths
def _add_template_paths_tuple(self, paths):
"""Adds the given template paths tuple
"""
self.template_paths += list(paths)
def _register_controller_module(self, controller, shared=False):
"""Efectively register the controller in the routing system
:param controller: the controller to be registered
:type controller: :class:`mamba.application.controller.Controller`
:param shaed: is this a shared controller?
:type shared: bool
"""
log.info(
'Registering {} controller {} with route {} {}({})'.format(
'shared' if shared else '',
controller.get('object').name,
controller.get('object').get_register_path(),
controller.get('object').name,
controller.get('module')
)
)
if controller.get('object').__parent__ is None:
self.putChild(
controller.get('object').get_register_path(),
controller.get('object')
)
def _build_controllers_tree(self):
"""Build the full controllers tree
"""
self._controllers_manager.build_controller_tree()
__all__ = ['Page']
| gpl-3.0 | -775,358,922,251,888,300 | 32.707395 | 79 | 0.610417 | false |
geonition/base_page | base_page/admin.py | 1 | 1467 | """
Admin classes for base_page related models
"""
from django.contrib.gis import admin
from django.core.urlresolvers import reverse_lazy
from base_page.forms import OrganizationSettingForm
from base_page.models import Feedback
from base_page.models import OrganizationSetting
from django.conf import settings
from modeltranslation.admin import TranslationAdmin
class OrganizationSettingAdmin(TranslationAdmin, admin.OSMGeoAdmin):
"""
The OrganizationSettingAdmin handles the organization specific settings
for the site.
"""
list_display = ('organization_name',
'title',
'blurb',
'provider',)
default_lon = getattr(settings,
'ORGANIZATION_ADMIN_DEFAULT_MAP_SETTINGS',
{'default_lon': 0})['default_lon']
default_lat = getattr(settings,
'ORGANIZATION_ADMIN_DEFAULT_MAP_SETTINGS',
{'default_lat': 0})['default_lat']
default_zoom = getattr(settings,
'ORGANIZATION_ADMIN_DEFAULT_MAP_SETTINGS',
{'default_zoom': 4})['default_zoom']
form = OrganizationSettingForm
openlayers_url = '%s%s' % (getattr(settings, 'STATIC_URL', '/'), 'js/libs/OpenLayers.js')
extra_js = (reverse_lazy('osmextra'),)
admin.site.register(Feedback)
admin.site.register(OrganizationSetting, OrganizationSettingAdmin)
| mit | 6,753,025,144,731,848,000 | 37.605263 | 93 | 0.632584 | false |
finikorg/zephyr | scripts/filter-known-issues.py | 1 | 9183 | #! /usr/bin/env python3
"""
Filters a file, classifying output in errors, warnings and discarding
the rest.
Given a set of regular expressions read from files named *.conf in the
given configuration path(s), of the format:
#
# Comments for multiline regex 1...
#
MULTILINEPYTHONREGEX
MULTILINEPYTHONREGEX
MULTILINEPYTHONREGEX
#
# Comments for multiline regex 2...
#
#WARNING
MULTILINEPYTHONREGEX2
MULTILINEPYTHONREGEX2
Anything matched by MULTILINEPYTHONREGEX will be considered something
to be filtered out and not printed.
Anything matched by MULTILINEPYHONREGEX with a #WARNING tag in the
comment means (optional) means that it describes something that is
considered to be a warning. Print it to stderr.
Anything leftover is considred to be errors, printed to stdout.
"""
import argparse
import logging
import mmap
import os
import re
import sre_constants
import sys
import traceback
exclude_regexs = []
# first is a list of one or more comment lines
# followed by a list of non-comments which describe a multiline regex
config_regex = \
b"(?P<comment>(^\\s*#.*\n)+)" \
b"(?P<regex>(^[^#].*\n)+)"
def config_import_file(filename):
"""
Imports regular expression from any file *.conf in the given path,
format as given in the main doc
"""
try:
with open(filename, "rb") as f:
mm = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
# That regex basically selects any block of
# lines that is not a comment block. The
# finditer() finds all the blocks and selects
# the bits of mmapped-file that comprises
# each--we compile it into a regex and append.
for m in re.finditer(config_regex, mm, re.MULTILINE):
origin = "%s:%s-%s" % (filename, m.start(), m.end())
gd = m.groupdict()
comment = gd['comment']
regex = gd['regex']
try:
r = re.compile(regex, re.MULTILINE)
except sre_constants.error as e:
logging.error("%s: bytes %d-%d: bad regex: %s",
filename, m.start(), m.end(), e)
raise
logging.debug("%s: found regex at bytes %d-%d: %s",
filename, m.start(), m.end(), regex)
if b'#WARNING' in comment:
exclude_regexs.append((r, origin, ('warning',)))
else:
exclude_regexs.append((r, origin, ()))
logging.debug("%s: loaded", filename)
except Exception as e:
logging.error("E: %s: can't load config file: %s" % (filename, e))
raise
def config_import_path(path):
"""
Imports regular expression from any file *.conf in the given path
"""
file_regex = re.compile(r".*\.conf$")
try:
for dirpath, _, filenames in os.walk(path):
for _filename in sorted(filenames):
filename = os.path.join(dirpath, _filename)
if not file_regex.search(_filename):
logging.debug("%s: ignored", filename)
continue
config_import_file(filename)
except Exception as e:
raise Exception(
"E: %s: can't load config files: %s %s" %
(path, e, traceback.format_exc()))
def config_import(paths):
"""
Imports regular expression from any file *.conf in the list of paths.
If a path is "" or None, the list of paths until then is flushed
and only the new ones are considered.
"""
_paths = []
# Go over the list, flush it if the user gave an empty path ("")
for path in paths:
if path == "" or path is None:
logging.debug("flushing current config list: %s", _paths)
_paths = []
else:
_paths.append(path)
logging.debug("config list: %s", _paths)
for path in _paths:
config_import_path(path)
arg_parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
arg_parser.add_argument("-v", "--verbosity", action="count", default=0,
help="increase verbosity")
arg_parser.add_argument("-q", "--quiet", action="count", default=0,
help="decrease verbosity")
arg_parser.add_argument("-e", "--errors", action="store", default=None,
help="file where to store errors")
arg_parser.add_argument("-w", "--warnings", action="store", default=None,
help="file where to store warnings")
arg_parser.add_argument("-c", "--config-dir", action="append", nargs="?",
default=[".known-issues/"],
help="configuration directory (multiple can be "
"given; if none given, clears the current list) "
"%(default)s")
arg_parser.add_argument("FILENAMEs", nargs="+",
help="files to filter")
args = arg_parser.parse_args()
logging.basicConfig(level=40 - 10 * (args.verbosity - args.quiet),
format="%(levelname)s: %(message)s")
path = ".known-issues/"
logging.debug("Reading configuration from directory `%s`", path)
config_import(args.config_dir)
exclude_ranges = []
if args.warnings:
warnings = open(args.warnings, "w")
else:
warnings = None
if args.errors:
errors = open(args.errors, "w")
else:
errors = None
def report_error(data):
sys.stdout.write(data.decode('utf-8'))
if errors:
errors.write(data.decode('utf-8'))
def report_warning(data):
sys.stderr.write(data.decode('utf-8'))
if warnings:
warnings.write(data.decode('utf-8'))
for filename in args.FILENAMEs:
if os.stat(filename).st_size == 0:
continue # skip empty log files
try:
with open(filename, "r+b") as f:
logging.info("%s: filtering", filename)
# Yeah, this should be more protected in case of exception
# and such, but this is a short running program...
mm = mmap.mmap(f.fileno(), 0)
for ex, origin, flags in exclude_regexs:
logging.info("%s: searching from %s: %s",
filename, origin, ex.pattern)
for m in re.finditer(ex.pattern, mm, re.MULTILINE):
logging.info("%s: %s-%s: match from from %s %s",
filename, m.start(), m.end(), origin, flags)
if 'warning' in flags:
exclude_ranges.append((m.start(), m.end(), True))
else:
exclude_ranges.append((m.start(), m.end(), False))
exclude_ranges = sorted(exclude_ranges, key=lambda r: r[0])
logging.warning(
"%s: ranges excluded: %s",
filename,
exclude_ranges)
# Decide what to do with what has been filtered; warnings
# go to stderr and warnings file, errors to stdout, what
# is ignored is just dumped.
offset = 0
for b, e, warning in exclude_ranges:
mm.seek(offset)
if b > offset:
# We have something not caught by a filter, an error
logging.info("%s: error range (%d, %d), from %d %dB",
filename, offset, b, offset, b - offset)
report_error(mm.read(b - offset))
mm.seek(b)
if warning: # A warning, print it
mm.seek(b)
logging.info("%s: warning range (%d, %d), from %d %dB",
filename, b, e, offset, e - b)
report_warning(mm.read(e - b))
else: # Exclude, ignore it
d = b - offset
logging.info("%s: exclude range (%d, %d), from %d %dB",
filename, b, e, offset, d)
offset = e
mm.seek(offset)
if len(mm) != offset:
logging.info("%s: error final range from %d %dB",
filename, offset, len(mm))
report_error(mm.read(len(mm) - offset - 1))
del mm
except Exception as e:
logging.error("%s: cannot load: %s", filename, e)
raise
if warnings or errors:
if warnings:
warnings.flush()
if errors:
errors.flush()
if ((os.path.isfile(args.warnings) and os.path.getsize(args.warnings) > 0) or
(os.path.isfile(args.errors) and os.path.getsize(args.errors) > 0)):
print('''\n\n ---- New errors/warnings not tracked as .known-issues/, \
please fix them ----\n''')
if args.warnings:
print(open(args.warnings, "r").read())
if args.errors and (args.errors != args.warnings):
print(open(args.errors, "r").read())
else:
print("\n\nNo new errors/warnings.\n")
print('''\nTo see *all* new error/warnings you must make/ninja clean and \
rebuild from scratch.''')
| apache-2.0 | -1,054,487,401,185,095,400 | 35.879518 | 81 | 0.551563 | false |
aguijarro/DataSciencePython | DataWrangling/CaseStudy/project/audit.py | 1 | 2399 | import xml.etree.cElementTree as ET
from collections import defaultdict
import re
street_type_re = re.compile(r'\S+\.?$', re.IGNORECASE)
city_type_re = re.compile(r'\S+\.?$', re.IGNORECASE)
expected = ["Street", "Avenue", "Boulevard", "Drive",
"Court", "Place", "Square", "Lane", "Road",
"Trail", "Parkway", "Commons"]
def print_sorted_dict(d):
keys = d.keys()
keys = sorted(keys, key=lambda s: s.lower())
for k in keys:
v = d[k]
print "%s: %d" % (k, v)
def audit_street_type(street_types, street_types_count, street_name):
m = street_type_re.search(street_name)
if m:
street_type = m.group()
street_types_count[street_type] += 1
if street_type not in expected:
street_types[street_type].add(street_name)
def is_street_name(elem):
return (elem.tag == "tag") and (elem.attrib['k'] == "addr:street")
def audit(input_file):
osm_file = open(input_file, "r")
street_types = defaultdict(set)
street_types_count = defaultdict(int)
for event, elem in ET.iterparse(osm_file, events=("start",)):
if elem.tag == "node" or elem.tag == "way":
for tag in elem.iter("tag"):
if is_street_name(tag):
audit_street_type(street_types,
street_types_count,
tag.attrib['v'])
osm_file.close()
return street_types, street_types_count
def audit_city_type(city_types, city_types_count, city_name):
m = city_type_re.search(city_name)
if m:
city_type = m.group()
city_types_count[city_type] += 1
if city_type not in expected:
city_types[city_type].add(city_name)
def is_city_name(elem):
return (elem.tag == "tag") and (elem.attrib['k'] == "addr:city")
def audit_city(input_file):
osm_file = open(input_file, "r")
city_types = defaultdict(set)
city_types_count = defaultdict(int)
for event, elem in ET.iterparse(osm_file, events=("start",)):
if elem.tag == "node" or elem.tag == "way":
for tag in elem.iter("tag"):
if is_city_name(tag):
audit_city_type(city_types,
city_types_count,
tag.attrib['v'])
osm_file.close()
return city_types, city_types_count | mit | -636,464,583,109,244,500 | 31 | 70 | 0.557316 | false |
scrapinghub/dateparser | dateparser/data/date_translation_data/ky.py | 1 | 4964 | info = {
"name": "ky",
"date_order": "DMY",
"january": [
"янв",
"январь"
],
"february": [
"фев",
"февраль"
],
"march": [
"мар",
"март"
],
"april": [
"апр",
"апрель"
],
"may": [
"май"
],
"june": [
"июн",
"июнь"
],
"july": [
"июл",
"июль"
],
"august": [
"авг",
"август"
],
"september": [
"сен",
"сентябрь"
],
"october": [
"окт",
"октябрь"
],
"november": [
"ноя",
"ноябрь"
],
"december": [
"дек",
"декабрь"
],
"monday": [
"дүй",
"дүйшөмбү"
],
"tuesday": [
"шейш",
"шейшемби"
],
"wednesday": [
"шарш",
"шаршемби"
],
"thursday": [
"бейш",
"бейшемби"
],
"friday": [
"жума"
],
"saturday": [
"ишемби",
"ишм"
],
"sunday": [
"жек",
"жекшемби"
],
"am": [
"таңкы",
"тң"
],
"pm": [
"тк",
"түштөн кийинки"
],
"year": [
"ж",
"жыл"
],
"month": [
"ай"
],
"week": [
"апт",
"апта"
],
"day": [
"күн"
],
"hour": [
"саат",
"ст"
],
"minute": [
"м",
"мүн",
"мүнөт"
],
"second": [
"сек",
"секунд"
],
"relative-type": {
"0 day ago": [
"бүгүн"
],
"0 hour ago": [
"ушул саатта"
],
"0 minute ago": [
"ушул мүнөттө"
],
"0 month ago": [
"бул айда"
],
"0 second ago": [
"азыр"
],
"0 week ago": [
"ушул апт",
"ушул аптада"
],
"0 year ago": [
"быйыл"
],
"1 day ago": [
"кечээ"
],
"1 month ago": [
"өткөн айда"
],
"1 week ago": [
"өткөн апт",
"өткөн аптада"
],
"1 year ago": [
"былтыр"
],
"in 1 day": [
"эртеӊ"
],
"in 1 month": [
"эмдиги айда"
],
"in 1 week": [
"келерки апт",
"келерки аптада"
],
"in 1 year": [
"эмдиги жылы"
]
},
"relative-type-regex": {
"\\1 day ago": [
"(\\d+) күн мурун"
],
"\\1 hour ago": [
"(\\d+) с мурн",
"(\\d+) саат мурун"
],
"\\1 minute ago": [
"(\\d+) мүн мурн",
"(\\d+) мүн мурун",
"(\\d+) мүнөт мурун"
],
"\\1 month ago": [
"(\\d+) ай мурн",
"(\\d+) ай мурун"
],
"\\1 second ago": [
"(\\d+) сек мурн",
"(\\d+) сек мурун",
"(\\d+) секунд мурун"
],
"\\1 week ago": [
"(\\d+) апт мурун",
"(\\d+) апта мурун"
],
"\\1 year ago": [
"(\\d+) жыл мурун"
],
"in \\1 day": [
"(\\d+) күн кийин",
"(\\d+) күндөн кийин"
],
"in \\1 hour": [
"(\\d+) с кийн",
"(\\d+) саат кийин",
"(\\d+) сааттан кийин"
],
"in \\1 minute": [
"(\\d+) мүн кийин",
"(\\d+) мүн кийн",
"(\\d+) мүнөттөн кийин"
],
"in \\1 month": [
"(\\d+) айд кийин",
"(\\d+) айд кийн",
"(\\d+) айдан кийин"
],
"in \\1 second": [
"(\\d+) сек кийин",
"(\\d+) сек кийн",
"(\\d+) секунддан кийин"
],
"in \\1 week": [
"(\\d+) апт кийин",
"(\\d+) аптадан кийин"
],
"in \\1 year": [
"(\\d+) жыл кийин",
"(\\d+) жылдан кийин"
]
},
"locale_specific": {},
"skip": [
" ",
"'",
",",
"-",
".",
"/",
";",
"@",
"[",
"]",
"|",
","
]
}
| bsd-3-clause | 5,801,358,384,835,052,000 | 16.9375 | 36 | 0.267364 | false |
ppppn/bib-generator | gen_bib.py | 1 | 1967 | #! /bin/sh
""":"
exec python3 "$0" ${1+"$@"}
"""
import argparse
import csv
import re
from datetime import datetime
from html_format import HTML_FORMAT
def readStyles(format_csv_fname):
formats = {}
f = open(format_csv_fname, encoding='sjis')
reader = csv.reader(f)
category_header = next(reader)[0]
for format_name, format in reader:
format = re.sub('{', '{0[', format)
formats[format_name] = re.sub('}', ']}', format)
return category_header, formats
def readBibList(biblist_csv_fname):
f = open(biblist_csv_fname, encoding='sjis')
reader = csv.reader(f)
properties = next(reader)
bib_list = []
for bib in reader:
current_bib = {}
i = 0
for i in range(len(properties)):
current_bib[properties[i]] = bib[i]
bib_list.append(current_bib)
return bib_list
def applyStyleToBib(format, bib):
line = format.format(bib)
line = re.sub('///(.*)///', '<I>\\1</I>', line)
return line
def generateHTML(formatfname, biblistfname):
category_header, formats = readStyles(formatfname)
biblist = readBibList(biblistfname)
body = ''
for current_bib in biblist:
selected_format = formats[current_bib.pop(category_header)]
body += applyStyleToBib(selected_format, current_bib) + '<BR/>\n'
outputfile = open('result.html', 'w', encoding='utf-8')
outputfile.write(HTML_FORMAT.format(bib_body=body,
time_stamp=datetime.now()))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('formats_csv_fname', action='store', nargs='?',
const=None, default='formats.csv', type=str)
parser.add_argument('biblist_csv_fname', action='store', nargs='?',
const=None, default='biblist.csv', type=str)
args = parser.parse_args()
generateHTML(args.formats_csv_fname, args.biblist_csv_fname)
if __name__ == '__main__':
main()
| mit | -3,812,726,738,535,261,000 | 31.245902 | 73 | 0.613625 | false |
daizhengy/RDS | trove/guestagent/datastore/mysql/service.py | 1 | 43726 | # Copyright 2013 OpenStack Foundation
# Copyright 2013 Rackspace Hosting
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import os
import re
import uuid
from collections import defaultdict
import sqlalchemy
from sqlalchemy import exc
from sqlalchemy import interfaces
from sqlalchemy.sql.expression import text
from trove.common import cfg
from trove.common import utils as utils
from trove.common import exception
from trove.common import instance as rd_instance
from trove.common.exception import PollTimeOut
from trove.guestagent.common import operating_system
from trove.guestagent.common import sql_query
from trove.guestagent.db import models
from trove.guestagent import pkg
from trove.guestagent.datastore import service
from trove.openstack.common import log as logging
from trove.common.i18n import _
ADMIN_USER_NAME = "os_admin"
LOG = logging.getLogger(__name__)
FLUSH = text(sql_query.FLUSH)
ENGINE = None
PREPARING = False
UUID = False
TMP_MYCNF = "/tmp/my.cnf.tmp"
MYSQL_BASE_DIR = "/var/lib/mysql"
CONF = cfg.CONF
MANAGER = CONF.datastore_manager if CONF.datastore_manager else 'mysql'
INCLUDE_MARKER_OPERATORS = {
True: ">=",
False: ">"
}
OS_NAME = operating_system.get_os()
MYSQL_CONFIG = {operating_system.REDHAT: "/etc/my.cnf",
operating_system.DEBIAN: "/etc/mysql/my.cnf",
operating_system.SUSE: "/etc/my.cnf"}[OS_NAME]
MYSQL_SERVICE_CANDIDATES = ["mysql", "mysqld", "mysql-server"]
MYSQL_BIN_CANDIDATES = ["/usr/sbin/mysqld", "/usr/libexec/mysqld"]
MYCNF_OVERRIDES = "/etc/mysql/conf.d/overrides.cnf"
MYCNF_OVERRIDES_TMP = "/tmp/overrides.cnf.tmp"
MYCNF_REPLMASTER = "/etc/mysql/conf.d/0replmaster.cnf"
MYCNF_REPLSLAVE = "/etc/mysql/conf.d/1replslave.cnf"
MYCNF_REPLCONFIG_TMP = "/tmp/replication.cnf.tmp"
# Create a package impl
packager = pkg.Package()
def clear_expired_password():
"""
Some mysql installations generate random root password
and save it in /root/.mysql_secret, this password is
expired and should be changed by client that supports expired passwords.
"""
LOG.debug("Removing expired password.")
secret_file = "/root/.mysql_secret"
try:
out, err = utils.execute("cat", secret_file,
run_as_root=True, root_helper="sudo")
except exception.ProcessExecutionError:
LOG.exception(_("/root/.mysql_secret does not exist."))
return
m = re.match('# The random password set for the root user at .*: (.*)',
out)
if m:
try:
out, err = utils.execute("mysqladmin", "-p%s" % m.group(1),
"password", "", run_as_root=True,
root_helper="sudo")
except exception.ProcessExecutionError:
LOG.exception(_("Cannot change mysql password."))
return
utils.execute("rm", "-f", secret_file, run_as_root=True,
root_helper="sudo")
LOG.debug("Expired password removed.")
def get_auth_password():
pwd, err = utils.execute_with_timeout(
"sudo",
"awk",
"/password\\t=/{print $3; exit}",
MYSQL_CONFIG)
if err:
LOG.error(err)
raise RuntimeError("Problem reading my.cnf! : %s" % err)
return pwd.strip()
def get_engine():
"""Create the default engine with the updated admin user."""
#TODO(rnirmal):Based on permissions issues being resolved we may revert
#url = URL(drivername='mysql', host='localhost',
# query={'read_default_file': '/etc/mysql/my.cnf'})
global ENGINE
if ENGINE:
return ENGINE
pwd = get_auth_password()
ENGINE = sqlalchemy.create_engine("mysql://%s:%s@localhost:3306" %
(ADMIN_USER_NAME, pwd.strip()),
pool_recycle=7200,
echo=CONF.sql_query_logging,
listeners=[KeepAliveConnection()])
return ENGINE
def load_mysqld_options():
#find mysqld bin
for bin in MYSQL_BIN_CANDIDATES:
if os.path.isfile(bin):
mysqld_bin = bin
break
else:
return {}
try:
out, err = utils.execute(mysqld_bin, "--print-defaults",
run_as_root=True, root_helper="sudo")
arglist = re.split("\n", out)[1].split()
args = defaultdict(list)
for item in arglist:
if "=" in item:
key, value = item.split("=", 1)
args[key.lstrip("--")].append(value)
else:
args[item.lstrip("--")].append(None)
return args
except exception.ProcessExecutionError:
return {}
class MySqlAppStatus(service.BaseDbStatus):
@classmethod
def get(cls):
if not cls._instance:
cls._instance = MySqlAppStatus()
return cls._instance
def _get_actual_db_status(self):
try:
out, err = utils.execute_with_timeout(
"/usr/bin/mysqladmin",
"ping", run_as_root=True, root_helper="sudo",
log_output_on_error=True)
LOG.info(_("MySQL Service Status is RUNNING."))
return rd_instance.ServiceStatuses.RUNNING
except exception.ProcessExecutionError:
LOG.exception(_("Failed to get database status."))
try:
out, err = utils.execute_with_timeout("/bin/ps", "-C",
"mysqld", "h")
pid = out.split()[0]
# TODO(rnirmal): Need to create new statuses for instances
# where the mysql service is up, but unresponsive
LOG.info(_('MySQL Service Status %(pid)s is BLOCKED.') %
{'pid': pid})
return rd_instance.ServiceStatuses.BLOCKED
except exception.ProcessExecutionError:
LOG.exception(_("Process execution failed."))
mysql_args = load_mysqld_options()
pid_file = mysql_args.get('pid_file',
['/var/run/mysqld/mysqld.pid'])[0]
if os.path.exists(pid_file):
LOG.info(_("MySQL Service Status is CRASHED."))
return rd_instance.ServiceStatuses.CRASHED
else:
LOG.info(_("MySQL Service Status is SHUTDOWN."))
return rd_instance.ServiceStatuses.SHUTDOWN
class LocalSqlClient(object):
"""A sqlalchemy wrapper to manage transactions."""
def __init__(self, engine, use_flush=True):
self.engine = engine
self.use_flush = use_flush
def __enter__(self):
self.conn = self.engine.connect()
self.trans = self.conn.begin()
return self.conn
def __exit__(self, type, value, traceback):
if self.trans:
if type is not None: # An error occurred
self.trans.rollback()
else:
if self.use_flush:
self.conn.execute(FLUSH)
self.trans.commit()
self.conn.close()
def execute(self, t, **kwargs):
try:
return self.conn.execute(t, kwargs)
except Exception:
self.trans.rollback()
self.trans = None
raise
class MySqlAdmin(object):
"""Handles administrative tasks on the MySQL database."""
def _associate_dbs(self, user):
"""Internal. Given a MySQLUser, populate its databases attribute."""
LOG.debug("Associating dbs to user %s at %s." %
(user.name, user.host))
with LocalSqlClient(get_engine()) as client:
q = sql_query.Query()
q.columns = ["grantee", "table_schema"]
q.tables = ["information_schema.SCHEMA_PRIVILEGES"]
q.group = ["grantee", "table_schema"]
q.where = ["privilege_type != 'USAGE'"]
t = text(str(q))
db_result = client.execute(t)
for db in db_result:
LOG.debug("\t db: %s." % db)
if db['grantee'] == "'%s'@'%s'" % (user.name, user.host):
mysql_db = models.MySQLDatabase()
mysql_db.name = db['table_schema']
user.databases.append(mysql_db.serialize())
def change_passwords(self, users):
"""Change the passwords of one or more existing users."""
LOG.debug("Changing the password of some users.")
with LocalSqlClient(get_engine()) as client:
for item in users:
LOG.debug("Changing password for user %s." % item)
user_dict = {'_name': item['name'],
'_host': item['host'],
'_password': item['password']}
user = models.MySQLUser()
user.deserialize(user_dict)
LOG.debug("\tDeserialized: %s." % user.__dict__)
uu = sql_query.UpdateUser(user.name, host=user.host,
clear=user.password)
t = text(str(uu))
client.execute(t)
def update_attributes(self, username, hostname, user_attrs):
"""Change the attributes of an existing user."""
LOG.debug("Changing user attributes for user %s." % username)
user = self._get_user(username, hostname)
db_access = set()
grantee = set()
with LocalSqlClient(get_engine()) as client:
q = sql_query.Query()
q.columns = ["grantee", "table_schema"]
q.tables = ["information_schema.SCHEMA_PRIVILEGES"]
q.group = ["grantee", "table_schema"]
q.where = ["privilege_type != 'USAGE'"]
t = text(str(q))
db_result = client.execute(t)
for db in db_result:
grantee.add(db['grantee'])
if db['grantee'] == "'%s'@'%s'" % (user.name, user.host):
db_name = db['table_schema']
db_access.add(db_name)
with LocalSqlClient(get_engine()) as client:
uu = sql_query.UpdateUser(user.name, host=user.host,
clear=user_attrs.get('password'),
new_user=user_attrs.get('name'),
new_host=user_attrs.get('host'))
t = text(str(uu))
client.execute(t)
uname = user_attrs.get('name') or username
host = user_attrs.get('host') or hostname
find_user = "'%s'@'%s'" % (uname, host)
if find_user not in grantee:
self.grant_access(uname, host, db_access)
def create_database(self, databases):
"""Create the list of specified databases."""
with LocalSqlClient(get_engine()) as client:
for item in databases:
mydb = models.ValidatedMySQLDatabase()
mydb.deserialize(item)
cd = sql_query.CreateDatabase(mydb.name,
mydb.character_set,
mydb.collate)
t = text(str(cd))
client.execute(t)
def create_user(self, users):
"""Create users and grant them privileges for the
specified databases.
"""
with LocalSqlClient(get_engine()) as client:
for item in users:
user = models.MySQLUser()
user.deserialize(item)
# TODO(cp16net):Should users be allowed to create users
# 'os_admin' or 'debian-sys-maint'
g = sql_query.Grant(user=user.name, host=user.host,
clear=user.password)
t = text(str(g))
client.execute(t)
for database in user.databases:
mydb = models.ValidatedMySQLDatabase()
mydb.deserialize(database)
g = sql_query.Grant(permissions='ALL', database=mydb.name,
user=user.name, host=user.host,
clear=user.password)
t = text(str(g))
client.execute(t)
def delete_database(self, database):
"""Delete the specified database."""
with LocalSqlClient(get_engine()) as client:
mydb = models.ValidatedMySQLDatabase()
mydb.deserialize(database)
dd = sql_query.DropDatabase(mydb.name)
t = text(str(dd))
client.execute(t)
def delete_user(self, user):
"""Delete the specified user."""
mysql_user = models.MySQLUser()
mysql_user.deserialize(user)
self.delete_user_by_name(mysql_user.name, mysql_user.host)
def delete_user_by_name(self, name, host='%'):
with LocalSqlClient(get_engine()) as client:
du = sql_query.DropUser(name, host=host)
t = text(str(du))
LOG.debug("delete_user_by_name: %s", t)
client.execute(t)
def get_user(self, username, hostname):
user = self._get_user(username, hostname)
if not user:
return None
return user.serialize()
def _get_user(self, username, hostname):
"""Return a single user matching the criteria."""
user = models.MySQLUser()
try:
user.name = username # Could possibly throw a BadRequest here.
except exception.ValueError as ve:
LOG.exception(_("Error Getting user information"))
raise exception.BadRequest(_("Username %(user)s is not valid"
": %(reason)s") %
{'user': username, 'reason': ve.message}
)
with LocalSqlClient(get_engine()) as client:
q = sql_query.Query()
q.columns = ['User', 'Host', 'Password']
q.tables = ['mysql.user']
q.where = ["Host != 'localhost'",
"User = '%s'" % username,
"Host = '%s'" % hostname]
q.order = ['User', 'Host']
t = text(str(q))
result = client.execute(t).fetchall()
LOG.debug("Getting user information %s." % result)
if len(result) != 1:
return None
found_user = result[0]
user.password = found_user['Password']
user.host = found_user['Host']
self._associate_dbs(user)
return user
def grant_access(self, username, hostname, databases):
"""Grant a user permission to use a given database."""
user = self._get_user(username, hostname)
mydb = models.ValidatedMySQLDatabase()
with LocalSqlClient(get_engine()) as client:
for database in databases:
try:
mydb.name = database
except ValueError:
LOG.exception(_("Error granting access"))
raise exception.BadRequest(_(
"Grant access to %s is not allowed") % database)
g = sql_query.Grant(permissions='ALL', database=mydb.name,
user=user.name, host=user.host,
hashed=user.password)
t = text(str(g))
client.execute(t)
def is_root_enabled(self):
"""Return True if root access is enabled; False otherwise."""
return MySqlRootAccess.is_root_enabled()
def enable_root(self, root_password=None):
"""Enable the root user global access and/or
reset the root password.
"""
return MySqlRootAccess.enable_root(root_password)
def list_databases(self, limit=None, marker=None, include_marker=False):
"""List databases the user created on this mysql instance."""
LOG.debug("---Listing Databases---")
ignored_database_names = "'%s'" % "', '".join(CONF.ignore_dbs)
LOG.debug("The following database names are on ignore list and will "
"be omitted from the listing: %s" % ignored_database_names)
databases = []
with LocalSqlClient(get_engine()) as client:
# If you have an external volume mounted at /var/lib/mysql
# the lost+found directory will show up in mysql as a database
# which will create errors if you try to do any database ops
# on it. So we remove it here if it exists.
q = sql_query.Query()
q.columns = [
'schema_name as name',
'default_character_set_name as charset',
'default_collation_name as collation',
]
q.tables = ['information_schema.schemata']
q.where = ["schema_name NOT IN (" + ignored_database_names + ")"]
q.order = ['schema_name ASC']
if limit:
q.limit = limit + 1
if marker:
q.where.append("schema_name %s '%s'" %
(INCLUDE_MARKER_OPERATORS[include_marker],
marker))
t = text(str(q))
database_names = client.execute(t)
next_marker = None
LOG.debug("database_names = %r." % database_names)
for count, database in enumerate(database_names):
if count >= limit:
break
LOG.debug("database = %s." % str(database))
mysql_db = models.MySQLDatabase()
mysql_db.name = database[0]
next_marker = mysql_db.name
mysql_db.character_set = database[1]
mysql_db.collate = database[2]
databases.append(mysql_db.serialize())
LOG.debug("databases = " + str(databases))
if database_names.rowcount <= limit:
next_marker = None
return databases, next_marker
def list_users(self, limit=None, marker=None, include_marker=False):
"""List users that have access to the database."""
'''
SELECT
User,
Host,
Marker
FROM
(SELECT
User,
Host,
CONCAT(User, '@', Host) as Marker
FROM mysql.user
ORDER BY 1, 2) as innerquery
WHERE
Marker > :marker
ORDER BY
Marker
LIMIT :limit;
'''
LOG.debug("---Listing Users---")
users = []
with LocalSqlClient(get_engine()) as client:
mysql_user = models.MySQLUser()
iq = sql_query.Query() # Inner query.
iq.columns = ['User', 'Host', "CONCAT(User, '@', Host) as Marker"]
iq.tables = ['mysql.user']
iq.order = ['User', 'Host']
innerquery = str(iq).rstrip(';')
oq = sql_query.Query() # Outer query.
oq.columns = ['User', 'Host', 'Marker']
oq.tables = ['(%s) as innerquery' % innerquery]
oq.where = ["Host != 'localhost'"]
oq.order = ['Marker']
if marker:
oq.where.append("Marker %s '%s'" %
(INCLUDE_MARKER_OPERATORS[include_marker],
marker))
if limit:
oq.limit = limit + 1
t = text(str(oq))
result = client.execute(t)
next_marker = None
LOG.debug("result = " + str(result))
for count, row in enumerate(result):
if count >= limit:
break
LOG.debug("user = " + str(row))
mysql_user = models.MySQLUser()
mysql_user.name = row['User']
mysql_user.host = row['Host']
self._associate_dbs(mysql_user)
next_marker = row['Marker']
users.append(mysql_user.serialize())
if result.rowcount <= limit:
next_marker = None
LOG.debug("users = " + str(users))
return users, next_marker
def revoke_access(self, username, hostname, database):
"""Revoke a user's permission to use a given database."""
user = self._get_user(username, hostname)
with LocalSqlClient(get_engine()) as client:
r = sql_query.Revoke(database=database,
user=user.name,
host=user.host)
t = text(str(r))
client.execute(t)
def list_access(self, username, hostname):
"""Show all the databases to which the user has more than
USAGE granted.
"""
user = self._get_user(username, hostname)
return user.databases
class KeepAliveConnection(interfaces.PoolListener):
"""
A connection pool listener that ensures live connections are returned
from the connection pool at checkout. This alleviates the problem of
MySQL connections timing out.
"""
def checkout(self, dbapi_con, con_record, con_proxy):
"""Event triggered when a connection is checked out from the pool."""
try:
try:
dbapi_con.ping(False)
except TypeError:
dbapi_con.ping()
except dbapi_con.OperationalError as ex:
if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
raise exc.DisconnectionError()
else:
raise
class MySqlApp(object):
"""Prepares DBaaS on a Guest container."""
TIME_OUT = 1000
def __init__(self, status):
"""By default login with root no password for initial setup."""
self.state_change_wait_time = CONF.state_change_wait_time
self.status = status
def _create_admin_user(self, client, password):
"""
Create a os_admin user with a random password
with all privileges similar to the root user.
"""
localhost = "localhost"
g = sql_query.Grant(permissions='ALL', user=ADMIN_USER_NAME,
host=localhost, grant_option=True, clear=password)
t = text(str(g))
client.execute(t)
@staticmethod
def _generate_root_password(client):
"""Generate and set a random root password and forget about it."""
localhost = "localhost"
uu = sql_query.UpdateUser("root", host=localhost,
clear=utils.generate_random_password())
t = text(str(uu))
client.execute(t)
def install_if_needed(self, packages):
"""Prepare the guest machine with a secure
mysql server installation.
"""
LOG.info(_("Preparing Guest as MySQL Server."))
if not packager.pkg_is_installed(packages):
LOG.debug("Installing MySQL server.")
self._clear_mysql_config()
# set blank password on pkg configuration stage
pkg_opts = {'root_password': '',
'root_password_again': ''}
packager.pkg_install(packages, pkg_opts, self.TIME_OUT)
self._create_mysql_confd_dir()
LOG.info(_("Finished installing MySQL server."))
self.start_mysql()
def complete_install_or_restart(self):
self.status.end_install_or_restart()
def secure(self, config_contents, overrides):
LOG.info(_("Generating admin password."))
admin_password = utils.generate_random_password()
clear_expired_password()
engine = sqlalchemy.create_engine("mysql://root:@localhost:3306",
echo=True)
with LocalSqlClient(engine) as client:
self._remove_anonymous_user(client)
self._create_admin_user(client, admin_password)
self.stop_db()
self._write_mycnf(admin_password, config_contents, overrides)
self.start_mysql()
LOG.debug("MySQL secure complete.")
def secure_root(self, secure_remote_root=True):
with LocalSqlClient(get_engine()) as client:
LOG.info(_("Preserving root access from restore."))
self._generate_root_password(client)
if secure_remote_root:
self._remove_remote_root_access(client)
def _clear_mysql_config(self):
"""Clear old configs, which can be incompatible with new version."""
LOG.debug("Clearing old MySQL config.")
random_uuid = str(uuid.uuid4())
configs = ["/etc/my.cnf", "/etc/mysql/conf.d", "/etc/mysql/my.cnf"]
for config in configs:
command = "mv %s %s_%s" % (config, config, random_uuid)
try:
utils.execute_with_timeout(command, shell=True,
root_helper="sudo")
LOG.debug("%s saved to %s_%s." %
(config, config, random_uuid))
except exception.ProcessExecutionError:
pass
def _create_mysql_confd_dir(self):
conf_dir = "/etc/mysql/conf.d"
LOG.debug("Creating %s." % conf_dir)
command = "sudo mkdir -p %s" % conf_dir
utils.execute_with_timeout(command, shell=True)
def _enable_mysql_on_boot(self):
LOG.debug("Enabling MySQL on boot.")
try:
mysql_service = operating_system.service_discovery(
MYSQL_SERVICE_CANDIDATES)
utils.execute_with_timeout(mysql_service['cmd_enable'], shell=True)
except KeyError:
LOG.exception(_("Error enabling MySQL start on boot."))
raise RuntimeError("Service is not discovered.")
def _disable_mysql_on_boot(self):
try:
mysql_service = operating_system.service_discovery(
MYSQL_SERVICE_CANDIDATES)
utils.execute_with_timeout(mysql_service['cmd_disable'],
shell=True)
except KeyError:
LOG.exception(_("Error disabling MySQL start on boot."))
raise RuntimeError("Service is not discovered.")
def stop_db(self, update_db=False, do_not_start_on_reboot=False):
LOG.info(_("Stopping MySQL."))
if do_not_start_on_reboot:
self._disable_mysql_on_boot()
try:
mysql_service = operating_system.service_discovery(
MYSQL_SERVICE_CANDIDATES)
utils.execute_with_timeout(mysql_service['cmd_stop'], shell=True)
except KeyError:
LOG.exception(_("Error stopping MySQL."))
raise RuntimeError("Service is not discovered.")
if not self.status.wait_for_real_status_to_change_to(
rd_instance.ServiceStatuses.SHUTDOWN,
self.state_change_wait_time, update_db):
LOG.error(_("Could not stop MySQL."))
self.status.end_install_or_restart()
raise RuntimeError("Could not stop MySQL!")
def _remove_anonymous_user(self, client):
t = text(sql_query.REMOVE_ANON)
client.execute(t)
def _remove_remote_root_access(self, client):
t = text(sql_query.REMOVE_ROOT)
client.execute(t)
def restart(self):
try:
self.status.begin_restart()
self.stop_db()
self.start_mysql()
finally:
self.status.end_install_or_restart()
def update_overrides(self, override_values):
"""
This function will update the MySQL overrides.cnf file
if there is content to write.
:param override_values:
:return:
"""
if override_values:
LOG.debug("Writing new overrides.cnf config file.")
self._write_config_overrides(override_values)
def apply_overrides(self, overrides):
LOG.debug("Applying overrides to MySQL.")
with LocalSqlClient(get_engine()) as client:
LOG.debug("Updating override values in running MySQL.")
for k, v in overrides.iteritems():
q = sql_query.SetServerVariable(key=k, value=v)
t = text(str(q))
try:
client.execute(t)
except exc.OperationalError:
output = {'key': k, 'value': v}
LOG.exception(_("Unable to set %(key)s with value "
"%(value)s.") % output)
def make_read_only(self, read_only):
with LocalSqlClient(get_engine()) as client:
q = "set global read_only = %s" % read_only
client.execute(text(str(q)))
def _write_temp_mycnf_with_admin_account(self, original_file_path,
temp_file_path, password):
mycnf_file = open(original_file_path, 'r')
tmp_file = open(temp_file_path, 'w')
for line in mycnf_file:
tmp_file.write(line)
if "[client]" in line:
tmp_file.write("user\t\t= %s\n" % ADMIN_USER_NAME)
tmp_file.write("password\t= %s\n" % password)
mycnf_file.close()
tmp_file.close()
def wipe_ib_logfiles(self):
"""Destroys the iblogfiles.
If for some reason the selected log size in the conf changes from the
current size of the files MySQL will fail to start, so we delete the
files to be safe.
"""
LOG.info(_("Wiping ib_logfiles."))
for index in range(2):
try:
# On restarts, sometimes these are wiped. So it can be a race
# to have MySQL start up before it's restarted and these have
# to be deleted. That's why its ok if they aren't found and
# that is why we use the "-f" option to "rm".
(utils.
execute_with_timeout("sudo", "rm", "-f", "%s/ib_logfile%d"
% (MYSQL_BASE_DIR, index)))
except exception.ProcessExecutionError:
LOG.exception("Could not delete logfile.")
raise
def _write_mycnf(self, admin_password, config_contents, overrides=None):
"""
Install the set of mysql my.cnf templates.
Update the os_admin user and password to the my.cnf
file for direct login from localhost.
"""
LOG.info(_("Writing my.cnf templates."))
if admin_password is None:
admin_password = get_auth_password()
try:
with open(TMP_MYCNF, 'w') as t:
t.write(config_contents)
utils.execute_with_timeout("sudo", "mv", TMP_MYCNF,
MYSQL_CONFIG)
self._write_temp_mycnf_with_admin_account(MYSQL_CONFIG,
TMP_MYCNF,
admin_password)
utils.execute_with_timeout("sudo", "mv", TMP_MYCNF,
MYSQL_CONFIG)
except Exception:
os.unlink(TMP_MYCNF)
raise
self.wipe_ib_logfiles()
# write configuration file overrides
if overrides:
self._write_config_overrides(overrides)
def _write_config_overrides(self, overrideValues):
LOG.info(_("Writing new temp overrides.cnf file."))
with open(MYCNF_OVERRIDES_TMP, 'w') as overrides:
overrides.write(overrideValues)
LOG.info(_("Moving overrides.cnf into correct location."))
utils.execute_with_timeout("sudo", "mv", MYCNF_OVERRIDES_TMP,
MYCNF_OVERRIDES)
LOG.info(_("Setting permissions on overrides.cnf."))
utils.execute_with_timeout("sudo", "chmod", "0644",
MYCNF_OVERRIDES)
def remove_overrides(self):
LOG.info(_("Removing overrides configuration file."))
if os.path.exists(MYCNF_OVERRIDES):
utils.execute_with_timeout("sudo", "rm", MYCNF_OVERRIDES)
def _write_replication_overrides(self, overrideValues, cnf_file):
LOG.info(_("Writing replication.cnf file."))
with open(MYCNF_REPLCONFIG_TMP, 'w') as overrides:
overrides.write(overrideValues)
LOG.debug("Moving temp replication.cnf into correct location.")
utils.execute_with_timeout("sudo", "mv", MYCNF_REPLCONFIG_TMP,
cnf_file)
LOG.debug("Setting permissions on replication.cnf.")
utils.execute_with_timeout("sudo", "chmod", "0644", cnf_file)
def _remove_replication_overrides(self, cnf_file):
LOG.info(_("Removing replication configuration file."))
if os.path.exists(cnf_file):
utils.execute_with_timeout("sudo", "rm", cnf_file)
def exists_replication_source_overrides(self):
return os.path.exists(MYCNF_REPLMASTER)
def write_replication_source_overrides(self, overrideValues):
self._write_replication_overrides(overrideValues, MYCNF_REPLMASTER)
def write_replication_replica_overrides(self, overrideValues):
self._write_replication_overrides(overrideValues, MYCNF_REPLSLAVE)
def remove_replication_source_overrides(self):
self._remove_replication_overrides(MYCNF_REPLMASTER)
def remove_replication_replica_overrides(self):
self._remove_replication_overrides(MYCNF_REPLSLAVE)
def grant_replication_privilege(self, replication_user):
LOG.info(_("Granting Replication Slave privilege."))
LOG.debug("grant_replication_privilege: %s" % replication_user)
with LocalSqlClient(get_engine()) as client:
g = sql_query.Grant(permissions=['REPLICATION SLAVE'],
user=replication_user['name'],
clear=replication_user['password'])
t = text(str(g))
client.execute(t)
def get_port(self):
with LocalSqlClient(get_engine()) as client:
result = client.execute('SELECT @@port').first()
return result[0]
def get_binlog_position(self):
with LocalSqlClient(get_engine()) as client:
result = client.execute('SHOW MASTER STATUS').first()
binlog_position = {
'log_file': result['File'],
'position': result['Position']
}
return binlog_position
def execute_on_client(self, sql_statement):
LOG.debug("Executing SQL: %s" % sql_statement)
with LocalSqlClient(get_engine()) as client:
return client.execute(sql_statement)
def start_slave(self):
LOG.info(_("Starting slave replication."))
with LocalSqlClient(get_engine()) as client:
client.execute('START SLAVE')
self._wait_for_slave_status("ON", client, 60)
def stop_slave(self, for_failover):
replication_user = None
LOG.info(_("Stopping slave replication."))
with LocalSqlClient(get_engine()) as client:
result = client.execute('SHOW SLAVE STATUS')
replication_user = result.first()['Master_User']
client.execute('STOP SLAVE')
client.execute('RESET SLAVE ALL')
self._wait_for_slave_status("OFF", client, 30)
if not for_failover:
client.execute('DROP USER ' + replication_user)
return {
'replication_user': replication_user
}
def stop_master(self):
LOG.info(_("Stopping replication master."))
with LocalSqlClient(get_engine()) as client:
client.execute('RESET MASTER')
def _wait_for_slave_status(self, status, client, max_time):
def verify_slave_status():
actual_status = client.execute(
"SHOW GLOBAL STATUS like 'slave_running'").first()[1]
return actual_status.upper() == status.upper()
LOG.debug("Waiting for SLAVE_RUNNING to change to %s.", status)
try:
utils.poll_until(verify_slave_status, sleep_time=3,
time_out=max_time)
LOG.info(_("Replication is now %s.") % status.lower())
except PollTimeOut:
raise RuntimeError(
_("Replication is not %(status)s after %(max)d seconds.") % {
'status': status.lower(), 'max': max_time})
def start_mysql(self, update_db=False):
LOG.info(_("Starting MySQL."))
# This is the site of all the trouble in the restart tests.
# Essentially what happens is that mysql start fails, but does not
# die. It is then impossible to kill the original, so
self._enable_mysql_on_boot()
try:
mysql_service = operating_system.service_discovery(
MYSQL_SERVICE_CANDIDATES)
utils.execute_with_timeout(mysql_service['cmd_start'], shell=True)
except KeyError:
raise RuntimeError("Service is not discovered.")
except exception.ProcessExecutionError:
# it seems mysql (percona, at least) might come back with [Fail]
# but actually come up ok. we're looking into the timing issue on
# parallel, but for now, we'd like to give it one more chance to
# come up. so regardless of the execute_with_timeout() response,
# we'll assume mysql comes up and check it's status for a while.
pass
if not self.status.wait_for_real_status_to_change_to(
rd_instance.ServiceStatuses.RUNNING,
self.state_change_wait_time, update_db):
LOG.error(_("Start up of MySQL failed."))
# If it won't start, but won't die either, kill it by hand so we
# don't let a rouge process wander around.
try:
utils.execute_with_timeout("sudo", "pkill", "-9", "mysql")
except exception.ProcessExecutionError:
LOG.exception(_("Error killing stalled MySQL start command."))
# There's nothing more we can do...
self.status.end_install_or_restart()
raise RuntimeError("Could not start MySQL!")
def start_db_with_conf_changes(self, config_contents):
LOG.info(_("Starting MySQL with conf changes."))
LOG.debug("Inside the guest - Status is_running = (%s)."
% self.status.is_running)
if self.status.is_running:
LOG.error(_("Cannot execute start_db_with_conf_changes because "
"MySQL state == %s.") % self.status)
raise RuntimeError("MySQL not stopped.")
LOG.info(_("Resetting configuration."))
self._write_mycnf(None, config_contents)
self.start_mysql(True)
def reset_configuration(self, configuration):
config_contents = configuration['config_contents']
LOG.info(_("Resetting configuration."))
self._write_mycnf(None, config_contents)
# DEPRECATED: Mantain for API Compatibility
def get_txn_count(self):
LOG.info(_("Retrieving latest txn id."))
txn_count = 0
with LocalSqlClient(get_engine()) as client:
result = client.execute('SELECT @@global.gtid_executed').first()
for uuid_set in result[0].split(','):
for interval in uuid_set.split(':')[1:]:
if '-' in interval:
iparts = interval.split('-')
txn_count += int(iparts[1]) - int(iparts[0])
else:
txn_count += 1
return txn_count
def _get_slave_status(self):
with LocalSqlClient(get_engine()) as client:
return client.execute('SHOW SLAVE STATUS').first()
def _get_master_UUID(self):
slave_status = self._get_slave_status()
return slave_status and slave_status['Master_UUID'] or None
def _get_gtid_executed(self):
with LocalSqlClient(get_engine()) as client:
return client.execute('SELECT @@global.gtid_executed').first()[0]
def get_last_txn(self):
master_UUID = self._get_master_UUID()
last_txn_id = '0'
gtid_executed = self._get_gtid_executed()
for gtid_set in gtid_executed.split(','):
uuid_set = gtid_set.split(':')
if uuid_set[0] == master_UUID:
last_txn_id = uuid_set[-1].split('-')[-1]
break
return master_UUID, int(last_txn_id)
def get_latest_txn_id(self):
LOG.info(_("Retrieving latest txn id."))
return self._get_gtid_executed()
def wait_for_txn(self, txn):
LOG.info(_("Waiting on txn '%s'.") % txn)
with LocalSqlClient(get_engine()) as client:
client.execute("SELECT WAIT_UNTIL_SQL_THREAD_AFTER_GTIDS('%s')"
% txn)
class MySqlRootAccess(object):
@classmethod
def is_root_enabled(cls):
"""Return True if root access is enabled; False otherwise."""
with LocalSqlClient(get_engine()) as client:
t = text(sql_query.ROOT_ENABLED)
result = client.execute(t)
LOG.debug("Found %s with remote root access." % result.rowcount)
return result.rowcount != 0
@classmethod
def enable_root(cls, root_password=None):
"""Enable the root user global access and/or
reset the root password.
"""
user = models.RootUser()
user.name = "root"
user.host = "%"
user.password = root_password or utils.generate_random_password()
with LocalSqlClient(get_engine()) as client:
print(client)
try:
cu = sql_query.CreateUser(user.name, host=user.host)
t = text(str(cu))
client.execute(t, **cu.keyArgs)
except exc.OperationalError as err:
# Ignore, user is already created, just reset the password
# TODO(rnirmal): More fine grained error checking later on
LOG.debug(err)
with LocalSqlClient(get_engine()) as client:
print(client)
uu = sql_query.UpdateUser(user.name, host=user.host,
clear=user.password)
t = text(str(uu))
client.execute(t)
LOG.debug("CONF.root_grant: %s CONF.root_grant_option: %s." %
(CONF.root_grant, CONF.root_grant_option))
g = sql_query.Grant(permissions=CONF.root_grant,
user=user.name,
host=user.host,
grant_option=CONF.root_grant_option,
clear=user.password)
t = text(str(g))
client.execute(t)
return user.serialize()
| apache-2.0 | 5,361,696,649,788,965,000 | 39.263352 | 79 | 0.55196 | false |
sigma-random/pwndbg | pwndbg/commands/nearpc.py | 1 | 2744 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from capstone import *
import pwndbg.arguments
import pwndbg.color
import pwndbg.disasm
import pwndbg.disasm.color
import pwndbg.functions
import pwndbg.ida
import pwndbg.regs
import pwndbg.strings
import pwndbg.symbol
import pwndbg.ui
import pwndbg.vmmap
@pwndbg.commands.ParsedCommand
@pwndbg.commands.OnlyWhenRunning
def nearpc(pc=None, lines=None, to_string=False):
"""
Disassemble near a specified address.
"""
# Fix the case where we only have one argument, and
# it's a small value.
if lines is None and (pc is None or int(pc) < 0x100):
lines = pc
pc = None
if pc is None:
pc = pwndbg.regs.pc
if lines is None:
lines = 5
pc = int(pc)
lines = int(lines)
result = []
instructions = pwndbg.disasm.near(pc, lines)
# In case $pc is in a new map we don't know about,
# this will trigger an exploratory search.
pwndbg.vmmap.find(pc)
# Find all of the symbols for the addresses
symbols = []
for i in instructions:
symbol = pwndbg.symbol.get(i.address)
if symbol:
symbol = '<%s> ' % symbol
symbols.append(symbol)
# Find the longest symbol name so we can adjust
if symbols:
longest_sym = max(map(len, symbols))
else:
longest_sym = ''
# Pad them all out
for i,s in enumerate(symbols):
symbols[i] = s.ljust(longest_sym)
prev = None
# Print out each instruction
for i,s in zip(instructions, symbols):
asm = pwndbg.disasm.color.instruction(i)
prefix = ' =>' if i.address == pc else ' '
pre = pwndbg.ida.Anterior(i.address)
if pre:
result.append(pwndbg.color.bold(pre))
line = ' '.join((prefix, "%#x" % i.address, s or '', asm))
# If there was a branch before this instruction which was not
# contiguous, put in some ellipses.
if prev and prev.address + prev.size != i.address:
result.append('...')
# Otherwise if it's a branch and it *is* contiguous, just put
# and empty line.
elif prev and any(g in prev.groups for g in (CS_GRP_CALL, CS_GRP_JUMP, CS_GRP_RET)):
result.append('')
result.append(line)
# For call instructions, attempt to resolve the target and
# determine the number of arguments.
for arg, value in pwndbg.arguments.get(i):
code = False if arg.type == 'char' else True
pretty = pwndbg.chain.format(value, code=code)
result.append('%8s%-10s %s' % ('',arg.name+':', pretty))
prev = i
if not to_string:
print('\n'.join(result))
return result
| mit | 3,652,252,772,521,421,000 | 25.901961 | 92 | 0.603863 | false |
eLRuLL/scrapy | tests/mockserver.py | 1 | 7989 | import json
import os
import random
import sys
from subprocess import Popen, PIPE
from urllib.parse import urlencode
from OpenSSL import SSL
from twisted.web.server import Site, NOT_DONE_YET
from twisted.web.resource import Resource
from twisted.web.static import File
from twisted.web.test.test_webclient import PayloadResource
from twisted.web.server import GzipEncoderFactory
from twisted.web.resource import EncodingResourceWrapper
from twisted.web.util import redirectTo
from twisted.internet import reactor, ssl
from twisted.internet.task import deferLater
from scrapy.utils.python import to_bytes, to_unicode
from scrapy.utils.ssl import SSL_OP_NO_TLSv1_3
def getarg(request, name, default=None, type=None):
if name in request.args:
value = request.args[name][0]
if type is not None:
value = type(value)
return value
else:
return default
class LeafResource(Resource):
isLeaf = True
def deferRequest(self, request, delay, f, *a, **kw):
def _cancelrequest(_):
# silence CancelledError
d.addErrback(lambda _: None)
d.cancel()
d = deferLater(reactor, delay, f, *a, **kw)
request.notifyFinish().addErrback(_cancelrequest)
return d
class Follow(LeafResource):
def render(self, request):
total = getarg(request, b"total", 100, type=int)
show = getarg(request, b"show", 1, type=int)
order = getarg(request, b"order", b"desc")
maxlatency = getarg(request, b"maxlatency", 0, type=float)
n = getarg(request, b"n", total, type=int)
if order == b"rand":
nlist = [random.randint(1, total) for _ in range(show)]
else: # order == "desc"
nlist = range(n, max(n - show, 0), -1)
lag = random.random() * maxlatency
self.deferRequest(request, lag, self.renderRequest, request, nlist)
return NOT_DONE_YET
def renderRequest(self, request, nlist):
s = """<html> <head></head> <body>"""
args = request.args.copy()
for nl in nlist:
args[b"n"] = [to_bytes(str(nl))]
argstr = urlencode(args, doseq=True)
s += "<a href='/follow?%s'>follow %d</a><br>" % (argstr, nl)
s += """</body>"""
request.write(to_bytes(s))
request.finish()
class Delay(LeafResource):
def render_GET(self, request):
n = getarg(request, b"n", 1, type=float)
b = getarg(request, b"b", 1, type=int)
if b:
# send headers now and delay body
request.write('')
self.deferRequest(request, n, self._delayedRender, request, n)
return NOT_DONE_YET
def _delayedRender(self, request, n):
request.write(to_bytes("Response delayed for %0.3f seconds\n" % n))
request.finish()
class Status(LeafResource):
def render_GET(self, request):
n = getarg(request, b"n", 200, type=int)
request.setResponseCode(n)
return b""
class Raw(LeafResource):
def render_GET(self, request):
request.startedWriting = 1
self.deferRequest(request, 0, self._delayedRender, request)
return NOT_DONE_YET
render_POST = render_GET
def _delayedRender(self, request):
raw = getarg(request, b'raw', b'HTTP 1.1 200 OK\n')
request.startedWriting = 1
request.write(raw)
request.channel.transport.loseConnection()
request.finish()
class Echo(LeafResource):
def render_GET(self, request):
output = {
'headers': dict(
(to_unicode(k), [to_unicode(v) for v in vs])
for k, vs in request.requestHeaders.getAllRawHeaders()),
'body': to_unicode(request.content.read()),
}
return to_bytes(json.dumps(output))
render_POST = render_GET
class RedirectTo(LeafResource):
def render(self, request):
goto = getarg(request, b'goto', b'/')
# we force the body content, otherwise Twisted redirectTo()
# returns HTML with <meta http-equiv="refresh"
redirectTo(goto, request)
return b'redirecting...'
class Partial(LeafResource):
def render_GET(self, request):
request.setHeader(b"Content-Length", b"1024")
self.deferRequest(request, 0, self._delayedRender, request)
return NOT_DONE_YET
def _delayedRender(self, request):
request.write(b"partial content\n")
request.finish()
class Drop(Partial):
def _delayedRender(self, request):
abort = getarg(request, b"abort", 0, type=int)
request.write(b"this connection will be dropped\n")
tr = request.channel.transport
try:
if abort and hasattr(tr, 'abortConnection'):
tr.abortConnection()
else:
tr.loseConnection()
finally:
request.finish()
class ArbitraryLengthPayloadResource(LeafResource):
def render(self, request):
return request.content.read()
class Root(Resource):
def __init__(self):
Resource.__init__(self)
self.putChild(b"status", Status())
self.putChild(b"follow", Follow())
self.putChild(b"delay", Delay())
self.putChild(b"partial", Partial())
self.putChild(b"drop", Drop())
self.putChild(b"raw", Raw())
self.putChild(b"echo", Echo())
self.putChild(b"payload", PayloadResource())
self.putChild(b"xpayload", EncodingResourceWrapper(PayloadResource(), [GzipEncoderFactory()]))
self.putChild(b"alpayload", ArbitraryLengthPayloadResource())
try:
from tests import tests_datadir
self.putChild(b"files", File(os.path.join(tests_datadir, 'test_site/files/')))
except Exception:
pass
self.putChild(b"redirect-to", RedirectTo())
def getChild(self, name, request):
return self
def render(self, request):
return b'Scrapy mock HTTP server\n'
class MockServer():
def __enter__(self):
from scrapy.utils.test import get_testenv
self.proc = Popen([sys.executable, '-u', '-m', 'tests.mockserver'],
stdout=PIPE, env=get_testenv())
http_address = self.proc.stdout.readline().strip().decode('ascii')
https_address = self.proc.stdout.readline().strip().decode('ascii')
self.http_address = http_address
self.https_address = https_address
return self
def __exit__(self, exc_type, exc_value, traceback):
self.proc.kill()
self.proc.communicate()
def url(self, path, is_secure=False):
host = self.http_address.replace('0.0.0.0', '127.0.0.1')
if is_secure:
host = self.https_address
return host + path
def ssl_context_factory(keyfile='keys/localhost.key', certfile='keys/localhost.crt', cipher_string=None):
factory = ssl.DefaultOpenSSLContextFactory(
os.path.join(os.path.dirname(__file__), keyfile),
os.path.join(os.path.dirname(__file__), certfile),
)
if cipher_string:
ctx = factory.getContext()
# disabling TLS1.2+ because it unconditionally enables some strong ciphers
ctx.set_options(SSL.OP_CIPHER_SERVER_PREFERENCE | SSL.OP_NO_TLSv1_2 | SSL_OP_NO_TLSv1_3)
ctx.set_cipher_list(to_bytes(cipher_string))
return factory
if __name__ == "__main__":
root = Root()
factory = Site(root)
httpPort = reactor.listenTCP(0, factory)
contextFactory = ssl_context_factory()
httpsPort = reactor.listenSSL(0, factory, contextFactory)
def print_listening():
httpHost = httpPort.getHost()
httpsHost = httpsPort.getHost()
httpAddress = 'http://%s:%d' % (httpHost.host, httpHost.port)
httpsAddress = 'https://%s:%d' % (httpsHost.host, httpsHost.port)
print(httpAddress)
print(httpsAddress)
reactor.callWhenRunning(print_listening)
reactor.run()
| bsd-3-clause | 8,276,427,149,703,918,000 | 30.207031 | 105 | 0.619477 | false |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/PyKDE4/kdeui/KWidgetItemDelegate.py | 1 | 1056 | # encoding: utf-8
# module PyKDE4.kdeui
# from /usr/lib/python2.7/dist-packages/PyKDE4/kdeui.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdecore as __PyKDE4_kdecore
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
import PyQt4.QtSvg as __PyQt4_QtSvg
class KWidgetItemDelegate(__PyQt4_QtGui.QAbstractItemDelegate):
# no doc
def blockedEventTypes(self, *args, **kwargs): # real signature unknown
pass
def createItemWidgets(self, *args, **kwargs): # real signature unknown
pass
def focusedIndex(self, *args, **kwargs): # real signature unknown
pass
def itemView(self, *args, **kwargs): # real signature unknown
pass
def paintWidgets(self, *args, **kwargs): # real signature unknown
pass
def setBlockedEventTypes(self, *args, **kwargs): # real signature unknown
pass
def updateItemWidgets(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
| gpl-2.0 | -2,606,785,844,807,714,000 | 25.4 | 77 | 0.676136 | false |
dstufft/fastly-py | tests/test_core.py | 1 | 1153 | # Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from fastly.auth import KeyAuth, SessionAuth
from fastly.core import Fastly
def test_fastly_key():
api = Fastly("1234")
assert isinstance(api._session.auth, KeyAuth)
assert api._session.auth.key == "1234"
def test_fastly_session():
api = Fastly("[email protected]", "password")
assert isinstance(api._session.auth, SessionAuth)
assert api._session.auth.user == "[email protected]"
assert api._session.auth.password == "password"
assert api._session.auth.session is api._session
| apache-2.0 | -7,275,991,855,456,668,000 | 33.939394 | 74 | 0.740676 | false |
zuun77/givemegoogletshirts | codejam/2019/1B/q1.py | 1 | 1278 | import collections
def solve(case, P, Q, people):
ver, hor = {}, {}
s, n, e, w = 0, 0, 0, 0
for p in people:
x, y, d = p
if d == 'S':
if y in ver: ver[y] = (ver[y][0]+1, ver[y][1])
else: ver[y] = (1, 0)
s += 1
elif d == 'N':
if y in ver: ver[y] = (ver[y][0], ver[y][1]+1)
else: ver[y] = (0, 1)
n += 1
elif d == 'W':
if x in hor: hor[x] = (hor[x][0]+1, hor[x][1])
else: hor[x] = (1, 0)
e += 1
else:
if x in hor: hor[x] = (hor[x][0], hor[x][1]+1)
else: hor[x] = (0, 1)
w += 1
X, Y = getMaxCord(w, hor), getMaxCord(s, ver)
print("Case #{}: {} {}".format(case, X, Y))
def getMaxCord(n, dic):
X, maxV = 0, n
wcnt, ecnt = n, 0
for i in range(100001):
if i in dic: wcnt -= dic[i][0]
cnt = wcnt + ecnt
if cnt > maxV:
X = i
maxV = cnt
if i in dic: ecnt += dic[i][1]
return X
for case in range(1, eval(input()) + 1):
P, Q = map(int, input().split())
people = []
for i in range(P):
p = input().split()
people.append((int(p[0]), int(p[1]), p[2]))
solve(case, P, Q, people)
| apache-2.0 | -4,067,940,558,325,939,000 | 25.625 | 58 | 0.400626 | false |
digital-anvil/djangocms-owl | runtests.py | 1 | 1180 | import sys
try:
from django.conf import settings
settings.configure(
DEBUG=True,
USE_TZ=True,
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
ROOT_URLCONF="djangocms_owl.urls",
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"djangocms_owl",
],
SITE_ID=1,
NOSE_ARGS=['-s'],
MIDDLEWARE_CLASSES=(),
)
try:
import django
setup = django.setup
except AttributeError:
pass
else:
setup()
from django_nose import NoseTestSuiteRunner
except ImportError:
import traceback
traceback.print_exc()
raise ImportError("To fix this error, run: pip install -r requirements-test.txt")
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
failures = test_runner.run_tests(test_args)
if failures:
sys.exit(failures)
if __name__ == '__main__':
run_tests(*sys.argv[1:])
| bsd-3-clause | -1,932,248,877,800,464,400 | 20.454545 | 85 | 0.55678 | false |
jimbotonic/df_nlp | step1/prepare_data2.py | 1 | 2591 | #
# This file is part of DF.
#
# DF is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation;
# either version 3 of the License, or (at your option) any
# later version.
#
# Latassan is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public
# License along with DF; see the file COPYING. If not
# see <http://www.gnu.org/licenses/>.
#
# Copyright (C) 2014-2019 Jimmy Dubuisson <[email protected]>
#
from __future__ import division
from utils import *
from lemmatizer import *
from igraph import Graph
from numpy import dot
import cPickle as pickle
if __name__ == '__main__':
rgx = '\w+'
#punct = '\',.!?'
min_length = 3
# min number of occurences
min_occ = 3
# max frequency (between 0 and 1)
max_freq = 1
# min number of tokens
min_size = 100
# max number of tokens
max_size = 1000
# folder path
data_dir = sys.argv[1]
pickle_dir1 = sys.argv[2]
pickle_dir2 = sys.argv[3]
# collocation metrics instance to be used
#cmetrics = CollocationMetrics(CollocationMetrics.decreasing_exp,(1,1),CollocationMetrics.do_nothing,())
cmetrics = CollocationMetrics(CollocationMetrics.decreasing_exp,(1,1),CollocationMetrics.information,())
# batch replace arrays
vold = ['</?blog>','</?Blog>','</?post>','<date>.*</date>','nbsp','urlLink']
vnew = ['','','','','','']
fnames = FileUtils.get_files_list(data_dir)
counter = 1
max_count = 2000
success_count = 0
for p in fnames:
if success_count == max_count:
break
print counter, '- Tokenizing: ', p
counter += 1
txt = FileUtils.read_text_file(data_dir + '/' + p)
txt = FileUtils.batch_replace(txt,vold,vnew)
doc = DocStats(txt, rgx, min_length, min_occ, max_freq, cmetrics)
print '# tokens: ', len(doc.token_set)
if len(doc.token_set) >= min_size and len(doc.token_set) <= max_size:
mat = doc.get_collocation_mat()
print '# rows: ', mat.dim
print '# nnz entries: ', mat.vmat.nnz
if mat:
success_count += 1
pickle.dump(doc.token_stats, open(pickle_dir1 + '/' + p.replace('.xml','') + ".p", "wb"), pickle.HIGHEST_PROTOCOL)
pickle.dump(mat, open(pickle_dir2 + '/' + p.replace('.xml','') + ".p", "wb"), pickle.HIGHEST_PROTOCOL)
print '---'
| gpl-2.0 | 8,706,524,835,819,565,000 | 33.092105 | 118 | 0.650714 | false |
renesugar/arrow | python/pyarrow/pandas_compat.py | 1 | 37193 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import ast
import json
import operator
import re
import warnings
import numpy as np
import six
import pyarrow as pa
from pyarrow.lib import _pandas_api
from pyarrow.compat import (builtin_pickle, # noqa
PY2, zip_longest, Sequence, u_utf8)
_logical_type_map = {}
def get_logical_type_map():
global _logical_type_map
if not _logical_type_map:
_logical_type_map.update({
pa.lib.Type_NA: 'empty',
pa.lib.Type_BOOL: 'bool',
pa.lib.Type_INT8: 'int8',
pa.lib.Type_INT16: 'int16',
pa.lib.Type_INT32: 'int32',
pa.lib.Type_INT64: 'int64',
pa.lib.Type_UINT8: 'uint8',
pa.lib.Type_UINT16: 'uint16',
pa.lib.Type_UINT32: 'uint32',
pa.lib.Type_UINT64: 'uint64',
pa.lib.Type_HALF_FLOAT: 'float16',
pa.lib.Type_FLOAT: 'float32',
pa.lib.Type_DOUBLE: 'float64',
pa.lib.Type_DATE32: 'date',
pa.lib.Type_DATE64: 'date',
pa.lib.Type_TIME32: 'time',
pa.lib.Type_TIME64: 'time',
pa.lib.Type_BINARY: 'bytes',
pa.lib.Type_FIXED_SIZE_BINARY: 'bytes',
pa.lib.Type_STRING: 'unicode',
})
return _logical_type_map
def get_logical_type(arrow_type):
logical_type_map = get_logical_type_map()
try:
return logical_type_map[arrow_type.id]
except KeyError:
if isinstance(arrow_type, pa.lib.DictionaryType):
return 'categorical'
elif isinstance(arrow_type, pa.lib.ListType):
return 'list[{}]'.format(get_logical_type(arrow_type.value_type))
elif isinstance(arrow_type, pa.lib.TimestampType):
return 'datetimetz' if arrow_type.tz is not None else 'datetime'
elif isinstance(arrow_type, pa.lib.Decimal128Type):
return 'decimal'
return 'object'
_numpy_logical_type_map = {
np.bool_: 'bool',
np.int8: 'int8',
np.int16: 'int16',
np.int32: 'int32',
np.int64: 'int64',
np.uint8: 'uint8',
np.uint16: 'uint16',
np.uint32: 'uint32',
np.uint64: 'uint64',
np.float32: 'float32',
np.float64: 'float64',
'datetime64[D]': 'date',
np.unicode_: 'string' if not PY2 else 'unicode',
np.bytes_: 'bytes' if not PY2 else 'string',
}
def get_logical_type_from_numpy(pandas_collection):
try:
return _numpy_logical_type_map[pandas_collection.dtype.type]
except KeyError:
if hasattr(pandas_collection.dtype, 'tz'):
return 'datetimetz'
# See https://github.com/pandas-dev/pandas/issues/24739
if str(pandas_collection.dtype) == 'datetime64[ns]':
return 'datetime64[ns]'
result = _pandas_api.infer_dtype(pandas_collection)
if result == 'string':
return 'bytes' if PY2 else 'unicode'
return result
def get_extension_dtype_info(column):
dtype = column.dtype
if str(dtype) == 'category':
cats = getattr(column, 'cat', column)
assert cats is not None
metadata = {
'num_categories': len(cats.categories),
'ordered': cats.ordered,
}
physical_dtype = str(cats.codes.dtype)
elif hasattr(dtype, 'tz'):
metadata = {'timezone': pa.lib.tzinfo_to_string(dtype.tz)}
physical_dtype = 'datetime64[ns]'
else:
metadata = None
physical_dtype = str(dtype)
return physical_dtype, metadata
def get_column_metadata(column, name, arrow_type, field_name):
"""Construct the metadata for a given column
Parameters
----------
column : pandas.Series or pandas.Index
name : str
arrow_type : pyarrow.DataType
field_name : str
Equivalent to `name` when `column` is a `Series`, otherwise if `column`
is a pandas Index then `field_name` will not be the same as `name`.
This is the name of the field in the arrow Table's schema.
Returns
-------
dict
"""
logical_type = get_logical_type(arrow_type)
string_dtype, extra_metadata = get_extension_dtype_info(column)
if logical_type == 'decimal':
extra_metadata = {
'precision': arrow_type.precision,
'scale': arrow_type.scale,
}
string_dtype = 'object'
if name is not None and not isinstance(name, six.string_types):
raise TypeError(
'Column name must be a string. Got column {} of type {}'.format(
name, type(name).__name__
)
)
assert field_name is None or isinstance(field_name, six.string_types), \
str(type(field_name))
return {
'name': name,
'field_name': 'None' if field_name is None else field_name,
'pandas_type': logical_type,
'numpy_type': string_dtype,
'metadata': extra_metadata,
}
def construct_metadata(df, column_names, index_levels, index_descriptors,
preserve_index, types):
"""Returns a dictionary containing enough metadata to reconstruct a pandas
DataFrame as an Arrow Table, including index columns.
Parameters
----------
df : pandas.DataFrame
index_levels : List[pd.Index]
index_descriptors : List[Dict]
preserve_index : bool
types : List[pyarrow.DataType]
Returns
-------
dict
"""
num_serialized_index_levels = len([descr for descr in index_descriptors
if not isinstance(descr, dict)])
# Use ntypes instead of Python shorthand notation [:-len(x)] as [:-0]
# behaves differently to what we want.
ntypes = len(types)
df_types = types[:ntypes - num_serialized_index_levels]
index_types = types[ntypes - num_serialized_index_levels:]
column_metadata = []
for col_name, sanitized_name, arrow_type in zip(df.columns, column_names,
df_types):
metadata = get_column_metadata(df[col_name], name=sanitized_name,
arrow_type=arrow_type,
field_name=sanitized_name)
column_metadata.append(metadata)
index_column_metadata = []
if preserve_index is not False:
for level, arrow_type, descriptor in zip(index_levels, index_types,
index_descriptors):
if isinstance(descriptor, dict):
# The index is represented in a non-serialized fashion,
# e.g. RangeIndex
continue
metadata = get_column_metadata(level, name=level.name,
arrow_type=arrow_type,
field_name=descriptor)
index_column_metadata.append(metadata)
column_indexes = []
levels = getattr(df.columns, 'levels', [df.columns])
names = getattr(df.columns, 'names', [df.columns.name])
for level, name in zip(levels, names):
metadata = _get_simple_index_descriptor(level, name)
column_indexes.append(metadata)
else:
index_descriptors = index_column_metadata = column_indexes = []
return {
b'pandas': json.dumps({
'index_columns': index_descriptors,
'column_indexes': column_indexes,
'columns': column_metadata + index_column_metadata,
'creator': {
'library': 'pyarrow',
'version': pa.__version__
},
'pandas_version': _pandas_api.version
}).encode('utf8')
}
def _get_simple_index_descriptor(level, name):
string_dtype, extra_metadata = get_extension_dtype_info(level)
pandas_type = get_logical_type_from_numpy(level)
if 'mixed' in pandas_type:
warnings.warn(
"The DataFrame has column names of mixed type. They will be "
"converted to strings and not roundtrip correctly.",
UserWarning, stacklevel=4)
if pandas_type == 'unicode':
assert not extra_metadata
extra_metadata = {'encoding': 'UTF-8'}
return {
'name': name,
'field_name': name,
'pandas_type': pandas_type,
'numpy_type': string_dtype,
'metadata': extra_metadata,
}
def _column_name_to_strings(name):
"""Convert a column name (or level) to either a string or a recursive
collection of strings.
Parameters
----------
name : str or tuple
Returns
-------
value : str or tuple
Examples
--------
>>> name = 'foo'
>>> _column_name_to_strings(name)
'foo'
>>> name = ('foo', 'bar')
>>> _column_name_to_strings(name)
('foo', 'bar')
>>> import pandas as pd
>>> name = (1, pd.Timestamp('2017-02-01 00:00:00'))
>>> _column_name_to_strings(name)
('1', '2017-02-01 00:00:00')
"""
if isinstance(name, six.string_types):
return name
elif isinstance(name, six.binary_type):
# XXX: should we assume that bytes in Python 3 are UTF-8?
return name.decode('utf8')
elif isinstance(name, tuple):
return str(tuple(map(_column_name_to_strings, name)))
elif isinstance(name, Sequence):
raise TypeError("Unsupported type for MultiIndex level")
elif name is None:
return None
return str(name)
def _index_level_name(index, i, column_names):
"""Return the name of an index level or a default name if `index.name` is
None or is already a column name.
Parameters
----------
index : pandas.Index
i : int
Returns
-------
name : str
"""
if index.name is not None and index.name not in column_names:
return index.name
else:
return '__index_level_{:d}__'.format(i)
def _get_columns_to_convert(df, schema, preserve_index, columns):
columns = _resolve_columns_of_interest(df, schema, columns)
if not df.columns.is_unique:
raise ValueError(
'Duplicate column names found: {}'.format(list(df.columns))
)
if schema is not None:
return _get_columns_to_convert_given_schema(df, schema, preserve_index)
column_names = []
index_levels = (
_get_index_level_values(df.index) if preserve_index is not False
else []
)
columns_to_convert = []
convert_fields = []
for name in columns:
col = df[name]
name = _column_name_to_strings(name)
if _pandas_api.is_sparse(col):
raise TypeError(
"Sparse pandas data (column {}) not supported.".format(name))
columns_to_convert.append(col)
convert_fields.append(None)
column_names.append(name)
index_descriptors = []
index_column_names = []
for i, index_level in enumerate(index_levels):
name = _index_level_name(index_level, i, column_names)
if (isinstance(index_level, _pandas_api.pd.RangeIndex)
and preserve_index is None):
descr = _get_range_index_descriptor(index_level)
else:
columns_to_convert.append(index_level)
convert_fields.append(None)
descr = name
index_column_names.append(name)
index_descriptors.append(descr)
all_names = column_names + index_column_names
# all_names : all of the columns in the resulting table including the data
# columns and serialized index columns
# column_names : the names of the data columns
# index_column_names : the names of the serialized index columns
# index_descriptors : descriptions of each index to be used for
# reconstruction
# index_levels : the extracted index level values
# columns_to_convert : assembled raw data (both data columns and indexes)
# to be converted to Arrow format
# columns_fields : specified column to use for coercion / casting
# during serialization, if a Schema was provided
return (all_names, column_names, index_column_names, index_descriptors,
index_levels, columns_to_convert, convert_fields)
def _get_columns_to_convert_given_schema(df, schema, preserve_index):
"""
Specialized version of _get_columns_to_convert in case a Schema is
specified.
In that case, the Schema is used as the single point of truth for the
table structure (types, which columns are included, order of columns, ...).
"""
column_names = []
columns_to_convert = []
convert_fields = []
index_descriptors = []
index_column_names = []
index_levels = []
for name in schema.names:
try:
col = df[name]
is_index = False
except KeyError:
if preserve_index is not False and name in df.index.names:
col = df.index.get_level_values(name)
if (preserve_index is None and
isinstance(col, _pandas_api.pd.RangeIndex)):
raise ValueError(
"name '{}' is present in the schema, but it is a "
"RangeIndex which will not be converted as a column "
"in the Table, but saved as metadata-only not in "
"columns. Specify 'preserve_index=True' to force it "
"being added as a column, or remove it from the "
"specified schema".format(name))
is_index = True
else:
raise KeyError(
"name '{}' present in the specified schema is not found "
"in the columns or index".format(name))
name = _column_name_to_strings(name)
if _pandas_api.is_sparse(col):
raise TypeError(
"Sparse pandas data (column {}) not supported.".format(name))
field = schema.field(name)
columns_to_convert.append(col)
convert_fields.append(field)
column_names.append(name)
if is_index:
index_column_names.append(name)
index_descriptors.append(name)
index_levels.append(col)
all_names = column_names + index_column_names
return (all_names, column_names, index_column_names, index_descriptors,
index_levels, columns_to_convert, convert_fields)
def _get_range_index_descriptor(level):
# public start/stop/step attributes added in pandas 0.25.0
return {
'kind': 'range',
'name': level.name,
'start': _pandas_api.get_rangeindex_attribute(level, 'start'),
'stop': _pandas_api.get_rangeindex_attribute(level, 'stop'),
'step': _pandas_api.get_rangeindex_attribute(level, 'step')
}
def _get_index_level_values(index):
n = len(getattr(index, 'levels', [index]))
return [index.get_level_values(i) for i in range(n)]
def _resolve_columns_of_interest(df, schema, columns):
if schema is not None and columns is not None:
raise ValueError('Schema and columns arguments are mutually '
'exclusive, pass only one of them')
elif schema is not None:
columns = schema.names
elif columns is not None:
columns = [c for c in columns if c in df.columns]
else:
columns = df.columns
return columns
def dataframe_to_types(df, preserve_index, columns=None):
(all_names,
column_names,
_,
index_descriptors,
index_columns,
columns_to_convert,
_) = _get_columns_to_convert(df, None, preserve_index, columns)
types = []
# If pandas knows type, skip conversion
for c in columns_to_convert:
values = c.values
if _pandas_api.is_categorical(values):
type_ = pa.array(c, from_pandas=True).type
else:
values, type_ = get_datetimetz_type(values, c.dtype, None)
type_ = pa.lib._ndarray_to_arrow_type(values, type_)
if type_ is None:
type_ = pa.array(c, from_pandas=True).type
types.append(type_)
metadata = construct_metadata(df, column_names, index_columns,
index_descriptors, preserve_index, types)
return all_names, types, metadata
def dataframe_to_arrays(df, schema, preserve_index, nthreads=1, columns=None,
safe=True):
(all_names,
column_names,
index_column_names,
index_descriptors,
index_columns,
columns_to_convert,
convert_fields) = _get_columns_to_convert(df, schema, preserve_index,
columns)
# NOTE(wesm): If nthreads=None, then we use a heuristic to decide whether
# using a thread pool is worth it. Currently the heuristic is whether the
# nrows > 100 * ncols.
if nthreads is None:
nrows, ncols = len(df), len(df.columns)
if nrows > ncols * 100:
nthreads = pa.cpu_count()
else:
nthreads = 1
def convert_column(col, field):
if field is None:
field_nullable = True
type_ = None
else:
field_nullable = field.nullable
type_ = field.type
try:
result = pa.array(col, type=type_, from_pandas=True, safe=safe)
except (pa.ArrowInvalid,
pa.ArrowNotImplementedError,
pa.ArrowTypeError) as e:
e.args += ("Conversion failed for column {0!s} with type {1!s}"
.format(col.name, col.dtype),)
raise e
if not field_nullable and result.null_count > 0:
raise ValueError("Field {} was non-nullable but pandas column "
"had {} null values".format(str(field),
result.null_count))
return result
if nthreads == 1:
arrays = [convert_column(c, f)
for c, f in zip(columns_to_convert, convert_fields)]
else:
from concurrent import futures
with futures.ThreadPoolExecutor(nthreads) as executor:
arrays = list(executor.map(convert_column, columns_to_convert,
convert_fields))
types = [x.type for x in arrays]
if schema is None:
fields = []
for name, type_ in zip(all_names, types):
name = name if name is not None else 'None'
fields.append(pa.field(name, type_))
schema = pa.schema(fields)
metadata = construct_metadata(df, column_names, index_columns,
index_descriptors, preserve_index,
types)
schema = schema.with_metadata(metadata)
return arrays, schema
def get_datetimetz_type(values, dtype, type_):
if values.dtype.type != np.datetime64:
return values, type_
if _pandas_api.is_datetimetz(dtype) and type_ is None:
# If no user type passed, construct a tz-aware timestamp type
tz = dtype.tz
unit = dtype.unit
type_ = pa.timestamp(unit, tz)
elif type_ is None:
# Trust the NumPy dtype
type_ = pa.from_numpy_dtype(values.dtype)
return values, type_
# ----------------------------------------------------------------------
# Converting pandas.DataFrame to a dict containing only NumPy arrays or other
# objects friendly to pyarrow.serialize
def dataframe_to_serialized_dict(frame):
import pandas.core.internals as _int
block_manager = frame._data
blocks = []
axes = [ax for ax in block_manager.axes]
for block in block_manager.blocks:
values = block.values
block_data = {}
if isinstance(block, _int.DatetimeTZBlock):
block_data['timezone'] = pa.lib.tzinfo_to_string(values.tz)
if hasattr(values, 'values'):
values = values.values
elif isinstance(block, _int.CategoricalBlock):
block_data.update(dictionary=values.categories,
ordered=values.ordered)
values = values.codes
block_data.update(
placement=block.mgr_locs.as_array,
block=values
)
# If we are dealing with an object array, pickle it instead. Note that
# we do not use isinstance here because _int.CategoricalBlock is a
# subclass of _int.ObjectBlock.
if type(block) == _int.ObjectBlock:
block_data['object'] = None
block_data['block'] = builtin_pickle.dumps(
values, protocol=builtin_pickle.HIGHEST_PROTOCOL)
blocks.append(block_data)
return {
'blocks': blocks,
'axes': axes
}
def serialized_dict_to_dataframe(data):
import pandas.core.internals as _int
reconstructed_blocks = [_reconstruct_block(block)
for block in data['blocks']]
block_mgr = _int.BlockManager(reconstructed_blocks, data['axes'])
return _pandas_api.data_frame(block_mgr)
def _reconstruct_block(item):
import pandas.core.internals as _int
# Construct the individual blocks converting dictionary types to pandas
# categorical types and Timestamps-with-timezones types to the proper
# pandas Blocks
block_arr = item.get('block', None)
placement = item['placement']
if 'dictionary' in item:
cat = _pandas_api.categorical_type.from_codes(
block_arr, categories=item['dictionary'],
ordered=item['ordered'])
block = _int.make_block(cat, placement=placement,
klass=_int.CategoricalBlock)
elif 'timezone' in item:
dtype = make_datetimetz(item['timezone'])
block = _int.make_block(block_arr, placement=placement,
klass=_int.DatetimeTZBlock,
dtype=dtype)
elif 'object' in item:
block = _int.make_block(builtin_pickle.loads(block_arr),
placement=placement, klass=_int.ObjectBlock)
elif 'py_array' in item:
arr = item['py_array']
# TODO have mechanism to know a method to create a
# pandas ExtensionArray given the pyarrow type
# Now hardcode here to create a pandas IntegerArray for the example
arr = arr.chunk(0)
buflist = arr.buffers()
data = np.frombuffer(buflist[-1], dtype=arr.type.to_pandas_dtype())[
arr.offset:arr.offset + len(arr)]
bitmask = buflist[0]
if bitmask is not None:
mask = pa.BooleanArray.from_buffers(
pa.bool_(), len(arr), [None, bitmask])
mask = np.asarray(mask)
else:
mask = np.ones(len(arr), dtype=bool)
block_arr = _pandas_api.pd.arrays.IntegerArray(
data.copy(), ~mask, copy=False)
# create ExtensionBlock
block = _int.make_block(block_arr, placement=placement,
klass=_int.ExtensionBlock)
else:
block = _int.make_block(block_arr, placement=placement)
return block
def make_datetimetz(tz):
tz = pa.lib.string_to_tzinfo(tz)
return _pandas_api.datetimetz_type('ns', tz=tz)
# ----------------------------------------------------------------------
# Converting pyarrow.Table efficiently to pandas.DataFrame
def table_to_blockmanager(options, table, categories=None,
extension_columns=None, ignore_metadata=False):
from pandas.core.internals import BlockManager
all_columns = []
column_indexes = []
pandas_metadata = table.schema.pandas_metadata
if not ignore_metadata and pandas_metadata is not None:
all_columns = pandas_metadata['columns']
column_indexes = pandas_metadata.get('column_indexes', [])
index_descriptors = pandas_metadata['index_columns']
table = _add_any_metadata(table, pandas_metadata)
table, index = _reconstruct_index(table, index_descriptors,
all_columns)
else:
index = _pandas_api.pd.RangeIndex(table.num_rows)
_check_data_column_metadata_consistency(all_columns)
blocks = _table_to_blocks(options, table, categories, extension_columns)
columns = _deserialize_column_index(table, all_columns, column_indexes)
axes = [columns, index]
return BlockManager(blocks, axes)
def _check_data_column_metadata_consistency(all_columns):
# It can never be the case in a released version of pyarrow that
# c['name'] is None *and* 'field_name' is not a key in the column metadata,
# because the change to allow c['name'] to be None and the change to add
# 'field_name' are in the same release (0.8.0)
assert all(
(c['name'] is None and 'field_name' in c) or c['name'] is not None
for c in all_columns
)
def _deserialize_column_index(block_table, all_columns, column_indexes):
column_strings = [u_utf8(x) for x in block_table.column_names]
if all_columns:
columns_name_dict = {
c.get('field_name', _column_name_to_strings(c['name'])): c['name']
for c in all_columns
}
columns_values = [
columns_name_dict.get(name, name) for name in column_strings
]
else:
columns_values = column_strings
# If we're passed multiple column indexes then evaluate with
# ast.literal_eval, since the column index values show up as a list of
# tuples
to_pair = ast.literal_eval if len(column_indexes) > 1 else lambda x: (x,)
# Create the column index
# Construct the base index
if not columns_values:
columns = _pandas_api.pd.Index(columns_values)
else:
columns = _pandas_api.pd.MultiIndex.from_tuples(
list(map(to_pair, columns_values)),
names=[col_index['name'] for col_index in column_indexes] or None,
)
# if we're reconstructing the index
if len(column_indexes) > 0:
columns = _reconstruct_columns_from_metadata(columns, column_indexes)
# ARROW-1751: flatten a single level column MultiIndex for pandas 0.21.0
columns = _flatten_single_level_multiindex(columns)
return columns
def _reconstruct_index(table, index_descriptors, all_columns):
# 0. 'field_name' is the name of the column in the arrow Table
# 1. 'name' is the user-facing name of the column, that is, it came from
# pandas
# 2. 'field_name' and 'name' differ for index columns
# 3. We fall back on c['name'] for backwards compatibility
field_name_to_metadata = {
c.get('field_name', c['name']): c
for c in all_columns
}
# Build up a list of index columns and names while removing those columns
# from the original table
index_arrays = []
index_names = []
result_table = table
for descr in index_descriptors:
if isinstance(descr, six.string_types):
result_table, index_level, index_name = _extract_index_level(
table, result_table, descr, field_name_to_metadata)
if index_level is None:
# ARROW-1883: the serialized index column was not found
continue
elif descr['kind'] == 'range':
index_name = descr['name']
index_level = _pandas_api.pd.RangeIndex(descr['start'],
descr['stop'],
step=descr['step'],
name=index_name)
if len(index_level) != len(table):
# Possibly the result of munged metadata
continue
else:
raise ValueError("Unrecognized index kind: {0}"
.format(descr['kind']))
index_arrays.append(index_level)
index_names.append(index_name)
pd = _pandas_api.pd
# Reconstruct the row index
if len(index_arrays) > 1:
index = pd.MultiIndex.from_arrays(index_arrays, names=index_names)
elif len(index_arrays) == 1:
index = index_arrays[0]
if not isinstance(index, pd.Index):
# Box anything that wasn't boxed above
index = pd.Index(index, name=index_names[0])
else:
index = pd.RangeIndex(table.num_rows)
return result_table, index
def _extract_index_level(table, result_table, field_name,
field_name_to_metadata):
logical_name = field_name_to_metadata[field_name]['name']
index_name = _backwards_compatible_index_name(field_name, logical_name)
i = table.schema.get_field_index(field_name)
if i == -1:
# The serialized index column was removed by the user
return table, None, None
pd = _pandas_api.pd
col = table.column(i)
values = col.to_pandas().values
if hasattr(values, 'flags') and not values.flags.writeable:
# ARROW-1054: in pandas 0.19.2, factorize will reject
# non-writeable arrays when calling MultiIndex.from_arrays
values = values.copy()
if isinstance(col.type, pa.lib.TimestampType):
index_level = (pd.Series(values).dt.tz_localize('utc')
.dt.tz_convert(col.type.tz))
else:
index_level = pd.Series(values, dtype=values.dtype)
result_table = result_table.remove_column(
result_table.schema.get_field_index(field_name)
)
return result_table, index_level, index_name
def _backwards_compatible_index_name(raw_name, logical_name):
"""Compute the name of an index column that is compatible with older
versions of :mod:`pyarrow`.
Parameters
----------
raw_name : str
logical_name : str
Returns
-------
result : str
Notes
-----
* Part of :func:`~pyarrow.pandas_compat.table_to_blockmanager`
"""
# Part of table_to_blockmanager
if raw_name == logical_name and _is_generated_index_name(raw_name):
return None
else:
return logical_name
def _is_generated_index_name(name):
pattern = r'^__index_level_\d+__$'
return re.match(pattern, name) is not None
_pandas_logical_type_map = {
'date': 'datetime64[D]',
'datetime': 'datetime64[ns]',
'unicode': np.unicode_,
'bytes': np.bytes_,
'string': np.str_,
'empty': np.object_,
}
def _pandas_type_to_numpy_type(pandas_type):
"""Get the numpy dtype that corresponds to a pandas type.
Parameters
----------
pandas_type : str
The result of a call to pandas.lib.infer_dtype.
Returns
-------
dtype : np.dtype
The dtype that corresponds to `pandas_type`.
"""
try:
return _pandas_logical_type_map[pandas_type]
except KeyError:
if 'mixed' in pandas_type:
# catching 'mixed', 'mixed-integer' and 'mixed-integer-float'
return np.object_
return np.dtype(pandas_type)
def _get_multiindex_codes(mi):
# compat for pandas < 0.24 (MI labels renamed to codes).
if isinstance(mi, _pandas_api.pd.MultiIndex):
return mi.codes if hasattr(mi, 'codes') else mi.labels
else:
return None
def _reconstruct_columns_from_metadata(columns, column_indexes):
"""Construct a pandas MultiIndex from `columns` and column index metadata
in `column_indexes`.
Parameters
----------
columns : List[pd.Index]
The columns coming from a pyarrow.Table
column_indexes : List[Dict[str, str]]
The column index metadata deserialized from the JSON schema metadata
in a :class:`~pyarrow.Table`.
Returns
-------
result : MultiIndex
The index reconstructed using `column_indexes` metadata with levels of
the correct type.
Notes
-----
* Part of :func:`~pyarrow.pandas_compat.table_to_blockmanager`
"""
pd = _pandas_api.pd
# Get levels and labels, and provide sane defaults if the index has a
# single level to avoid if/else spaghetti.
levels = getattr(columns, 'levels', None) or [columns]
labels = _get_multiindex_codes(columns) or [
pd.RangeIndex(len(level)) for level in levels
]
# Convert each level to the dtype provided in the metadata
levels_dtypes = [
(level, col_index.get('pandas_type', str(level.dtype)))
for level, col_index in zip_longest(
levels, column_indexes, fillvalue={}
)
]
new_levels = []
encoder = operator.methodcaller('encode', 'UTF-8')
for level, pandas_dtype in levels_dtypes:
dtype = _pandas_type_to_numpy_type(pandas_dtype)
# Since our metadata is UTF-8 encoded, Python turns things that were
# bytes into unicode strings when json.loads-ing them. We need to
# convert them back to bytes to preserve metadata.
if dtype == np.bytes_:
level = level.map(encoder)
elif level.dtype != dtype:
level = level.astype(dtype)
new_levels.append(level)
return pd.MultiIndex(new_levels, labels, names=columns.names)
def _table_to_blocks(options, block_table, categories, extension_columns):
# Part of table_to_blockmanager
# Convert an arrow table to Block from the internal pandas API
result = pa.lib.table_to_blocks(options, block_table, categories,
extension_columns)
# Defined above
return [_reconstruct_block(item) for item in result]
def _flatten_single_level_multiindex(index):
pd = _pandas_api.pd
if isinstance(index, pd.MultiIndex) and index.nlevels == 1:
levels, = index.levels
labels, = _get_multiindex_codes(index)
# Cheaply check that we do not somehow have duplicate column names
if not index.is_unique:
raise ValueError('Found non-unique column index')
return pd.Index([levels[_label] if _label != -1 else None
for _label in labels],
name=index.names[0])
return index
def _add_any_metadata(table, pandas_metadata):
modified_columns = {}
modified_fields = {}
schema = table.schema
index_columns = pandas_metadata['index_columns']
# only take index columns into account if they are an actual table column
index_columns = [idx_col for idx_col in index_columns
if isinstance(idx_col, six.string_types)]
n_index_levels = len(index_columns)
n_columns = len(pandas_metadata['columns']) - n_index_levels
# Add time zones
for i, col_meta in enumerate(pandas_metadata['columns']):
raw_name = col_meta.get('field_name')
if not raw_name:
# deal with metadata written with arrow < 0.8 or fastparquet
raw_name = col_meta['name']
if i >= n_columns:
# index columns
raw_name = index_columns[i - n_columns]
if raw_name is None:
raw_name = 'None'
idx = schema.get_field_index(raw_name)
if idx != -1:
if col_meta['pandas_type'] == 'datetimetz':
col = table[idx]
converted = col.to_pandas()
tz = col_meta['metadata']['timezone']
tz_aware_type = pa.timestamp('ns', tz=tz)
with_metadata = pa.Array.from_pandas(converted,
type=tz_aware_type)
modified_fields[idx] = pa.field(schema[idx].name,
tz_aware_type)
modified_columns[idx] = with_metadata
if len(modified_columns) > 0:
columns = []
fields = []
for i in range(len(table.schema)):
if i in modified_columns:
columns.append(modified_columns[i])
fields.append(modified_fields[i])
else:
columns.append(table[i])
fields.append(table.schema[i])
return pa.Table.from_arrays(columns, schema=pa.schema(fields))
else:
return table
# ----------------------------------------------------------------------
# Helper functions used in lib
def make_tz_aware(series, tz):
"""
Make a datetime64 Series timezone-aware for the given tz
"""
tz = pa.lib.string_to_tzinfo(tz)
series = (series.dt.tz_localize('utc')
.dt.tz_convert(tz))
return series
| apache-2.0 | -6,731,303,660,057,483,000 | 33.247698 | 79 | 0.591912 | false |
nymoral/euler | p18.py | 1 | 2156 | """
By starting at the top of the triangle below and moving to adjacent numbers on the row below, the maximum total from top to bottom is 23.
3 ->
7 4 ->
2 4 6
8 5 9 3
That is, 3 + 7 + 4 + 9 = 23.
Find the maximum total from top to bottom of the triangle below:
75
95 64
17 47 82
18 35 87 10
20 04 82 47 65
19 01 23 75 03 34
88 02 77 73 07 63 67
99 65 04 28 06 16 70 92
41 41 26 56 83 40 80 70 33
41 48 72 33 47 32 37 16 94 29
53 71 44 65 25 43 91 52 97 51 14
70 11 33 28 77 73 17 78 39 68 17 57
91 71 52 38 17 14 91 43 58 50 27 29 48
63 66 04 68 89 53 67 30 73 16 69 87 40 31
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23
NOTE: As there are only 16384 routes, it is possible to solve this problem by trying every route.
However, Problem 67, is the same challenge with a triangle containing one-hundred rows; it cannot be solved by brute force, and requires a clever method! ;o)
"""
def biggest_sum(triangle):
triangle = triangle.split("\n")
triangle = [t for t in triangle if t != ""]
triangle = [[int(x) for x in t.split()] for t in triangle]
# Flip the triangle upside down and expand each node thus:
# node in lowest level (0 in upside-down one) becomes (node)
# node (j) in others levels (i) becomes (node + max(level[i + 1][j], level[i + 1][j+1])), where we index the original triangle.
# The biggest path sum will be at the top of the original triangle (bottom of the upside-down one)
triangle = triangle[::-1]
for rid, row in enumerate(triangle):
if rid != 0:
for nid, node in enumerate(row):
row[nid] = node + max(triangle[rid - 1][nid], triangle[rid - 1][nid + 1])
#print(row)
return triangle[-1][0]
if __name__ == "__main__":
triangle = """
75
95 64
17 47 82
18 35 87 10
20 04 82 47 65
19 01 23 75 03 34
88 02 77 73 07 63 67
99 65 04 28 06 16 70 92
41 41 26 56 83 40 80 70 33
41 48 72 33 47 32 37 16 94 29
53 71 44 65 25 43 91 52 97 51 14
70 11 33 28 77 73 17 78 39 68 17 57
91 71 52 38 17 14 91 43 58 50 27 29 48
63 66 04 68 89 53 67 30 73 16 69 87 40 31
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23
"""
print(biggest_sum(triangle))
| mit | 1,246,611,544,375,990,300 | 31.179104 | 157 | 0.650742 | false |
MikeLaptev/sandbox_python | mera/practice_with_function/sort_of_list.py | 1 | 1144 | '''
Created on Aug 3, 2015
@author: Mikhail
@summary: Sort list of strings by latest letter
'''
def sort_by_latest_letter(list_of_strings):
"""
>>> sort_by_latest_letter(["abc", "cab", "bca"])
['bca', 'cab', 'abc']
"""
return [sorted_element[::-1] for sorted_element in sorted([element[::-1] for element in list_of_strings])]
def sort_by_latest_letter_with_map(list_of_strings):
"""
>>> sort_by_latest_letter_with_map(["abc", "cab", "bca"])
['bca', 'cab', 'abc']
"""
return map(lambda x: x[::-1], *[sorted(map(lambda a: a[::-1], list_of_strings))])
if __name__ == "__main__":
# acceptance testing
import doctest
print doctest.testmod()
# performance testing
import timeit
first_solution = timeit.timeit(stmt='sort_by_latest_letter(["abc", "cab", "bca"])', setup="from __main__ import sort_by_latest_letter")
second_solution = timeit.timeit(stmt='sort_by_latest_letter_with_map(["abc", "cab", "bca"])', setup="from __main__ import sort_by_latest_letter_with_map")
print "First solution took {}, but second solution took {}".format(first_solution, second_solution) | apache-2.0 | 6,715,663,797,360,264,000 | 34.78125 | 158 | 0.624126 | false |
BV-DR/foamBazar | pythonScripts/gmshScript/geo.py | 1 | 14381 | import numpy as np
import copy
from .misc import *
from .point import Point
from .line import Line
from .surface import Surface
from .volume import Volume
"""
class to handle gmsh geo-file(s)
"""
class extdb(dict):
'''
Extrude database, this is for conveniently accessing dict-keys by calling as attribute
'''
def __getattr__(self, attr):
return self[attr]
class geo(object):
def __init__(self):
'''
GMSH requires users to provide unique ID(s) for point(s), line(s), etc.
and we need to keep track of these ID(s) manually
'''
self.__dict__[Point._ID_NAME] = 0
self.__dict__[Line._ID_NAME] = 0
self.__dict__[Surface._ID_NAME] = 0
self.__dict__[Volume._ID_NAME] = 0
self.__dict__[Point._DB_NAME] = dict()
self.__dict__[Line._DB_NAME] = dict()
self.__dict__[Surface._DB_NAME] = dict()
self.__dict__[Volume._DB_NAME] = dict()
self._EXTRUDE_ID = 0
self._PHYS_IDS = [] # array of physical group id(s)
self._CODE = [
'/* This script was generated using fsMesher.gmshScript */',
'Geometry.OldNewReg=0;'
]
return
# for printing to terminal
def __repr__(self):
quickinfo = "geo(p:"+str(len(getDB(self,Point)))
if self.hasDB(Line):
quickinfo += ",l:" + str(len(getDB(self,Line)))
if self.hasDB(Surface):
quickinfo += ",s:" + str(len(getDB(self,Surface)))
return quickinfo + ")"
def printDB(self):
if not self.hasDB(Point):
print 'no data'
return
self._print_db(getDB(self,Point), prefix='p')
print 'next p:', getIDX(self,Point) + 1
self._print_db(getDB(self,Line), prefix='l')
print 'next l:', getIDX(self,Line) + 1
self._print_db(getDB(self,Surface), prefix='s')
print 'next s:', getIDX(self,Surface) + 1
self._print_db(getDB(self,Volume), prefix='v')
print 'next v:', getIDX(self,Volume) + 1
print
self.printScript()
return
def _print_db(self, db, prefix=''):
idx = sorted(db, key=db.get)
for i in idx:
print prefix + str(db[i]), ':', i
return
def printScript(self):
tmp = self._CODE
for i in tmp:
print i
return
def add(self, obj):
'''
Add a geometrical object to the code ... the actual code is generated in
obj.code(self) where the arg. self is needed for a proper check of id(s)
'''
obj_code = obj.code(self)
if obj_code:
self._CODE.append(obj_code)
self._db_insert(obj)
return
def addPoint(self, x, y, z, lc=None):
p = Point(x,y,z,lc)
self.add(p)
return p
def addLine(self, p0, p1):
l = Line(self,p0,p1)
self.add(l)
return l
def extrude(self, obj, dx, dy, dz, layers=1, opts=None):
'''
Extrude "point|line|surface" along translation axis
'''
# we need the object in a list format
objList = obj if isinstance(obj, list) else [obj]
if len(objList) == 0 or objList[0] is None: return
assert isinstance(dx, (int,long,float))
assert isinstance(dy, (int,long,float))
assert isinstance(dz, (int,long,float))
assert isinstance(layers, (str,int,list,np.ndarray))
#The layers are defined using two arrays i.e. Layers {{nElem[]},{nCut[]}}
#The first array nElem[]={1,1,1,(n elements),1,1,1} defines the number of element created between each cut.
#The second array nCut[]={0.1,0.2,(n cuts),...,1} defines the cut location (normalized) where the last cut must be at 100% i.e. 1
layers_str='1'
if isinstance(layers, (int, long)):
layers_str=str(layers)
elif isinstance(layers, str):
# user(s) need to provide a valid format here
# e.g: '#n' or '{n,n,n,n}, {float,float,float,1}'
layers_str=layers
elif isinstance(layers, (np.ndarray,list)):
layerList = copy.deepcopy(layers)
layerList.sort()
maxVal = max(layerList) # for normalization
# assume each cut has 1 element, and use only cut locations to control the extrude
nElem_str = ','.join(str(1) for i in layerList)
cut_str = ','.join(Point._FLOAT_TO_STR.format(float(i)/maxVal) for i in layerList)
layers_str = '{' + nElem_str + '},{' + cut_str + '}'
#
# Scan the object list and determine the type
# All element must be of the same type i.e. either Point|Line|Surface
objtype = objList[0].__class__
for i in objList:
if not isinstance(i, objtype):
raise RuntimeError("extrude: all extruded obj must be of the same type")
#
if isinstance(objList[0], Point):
return self._extrude_points(objList, [dx,dy,dz], layers_str, opts=opts)
elif isinstance(objList[0], Line):
return self._extrude_lines(objList, [dx,dy,dz], layers_str, opts=opts)
elif isinstance(objList[0], Surface):
return self._extrude_surfaces(objList, [dx,dy,dz], layers_str, opts=opts)
else:
raise RuntimeError('The object to be extruded must be of type Point|Line|Surface')
return
def hasDB(self,obj):
return bool(getDB(self,obj))
def incIDX(self,obj,n):
self.__dict__[obj._ID_NAME] += n
return
def get(self, obj, idx):
db=getDB(self,obj)
allIdx=db.values()
if not abs(idx) in allIdx: return None
return obj.fromkey(db.keys()[allIdx.index(abs(idx))])
def _create_idx_str(self, objList):
idx = []
for obj in objList:
if not obj.key() in getDB(self,obj):
raise RuntimeError('id not found: ' + str(obj))
idx.append(getDB(self,obj)[obj.key()])
return ','.join(str(i) for i in idx)
def _db_insert(self, obj):
found,idx = exist(self,obj)
self.incIDX(obj,1) # gmsh always keeps incrementing the id by 1 !!!
if not found:
getDB(self,obj)[obj.key()] = getIDX(self,obj)
return True # insert successful
else:
return False # no need to insert, the obj already exists
def _extrude_points(self, pointList, axis, layers, opts=None):
'''
line[] = Extrude{dx, dy, dz} { Point{#ID}; Layers{{1,..(nElem)..,1},{0.1,..(nCut)..,1}}; };
For each point extruded, 1 new point and 1 new line are created
'''
out = extdb({
'newPoints': [],
'newLines': []
})
ok_to_extrude=False
for i in pointList:
newpoint = Point(np.asarray(axis) + i.pos)
if self._db_insert(newpoint): ok_to_extrude=True
newline = Line(self,i,newpoint)
if self._db_insert(newline): ok_to_extrude=True
out['newPoints'].append(newpoint)
out['newLines'].append(newline)
if ok_to_extrude:
idx_str = self._create_idx_str(pointList)
axis_str = ','.join(Point._FLOAT_TO_STR.format(i) for i in axis)
self._EXTRUDE_ID += 1
self._CODE.append(
'ex%d[] = Extrude {%s} { Point{%s}; Layers{%s}; };' %
(self._EXTRUDE_ID, axis_str, idx_str, layers)
)
return out
def _extrude_lines(self, lineList, axis, layers, opts=None):
'''
surface[] = Extrude{dx, dy, dz} { Line{#ID}; Layers{{1,..(nElem)..,1},{0.1,..(nCut)..,1}}; };
For each line extruded, 2 new points, 3 new lines and 1 surface are created
'''
out = extdb({
'newPoints': [],
'newLines': [],
'newSurfaces': []
})
axis_as_nparray = np.asarray(axis)
ok_to_extrude=False
for i in lineList:
# 2 old point(s),
oldPoint0 = self.get(Point, i.pid[0])
oldPoint1 = self.get(Point, i.pid[1])
# 2 new points
newpoint0 = Point(axis_as_nparray + oldPoint0.pos)
newpoint1 = Point(axis_as_nparray + oldPoint1.pos)
# create 3 new lines
if self._db_insert(newpoint0): ok_to_extrude=True
if self._db_insert(newpoint1): ok_to_extrude=True
newline1 = Line(self,newpoint0,newpoint1)
if self._db_insert(newline1): ok_to_extrude=True
#
self.incIDX(Point,2) # stupid gmsh
newline2 = Line(self,oldPoint0,newpoint0)
if self._db_insert(newline2): ok_to_extrude=True
#
self.incIDX(Point,2) # stupid gmsh
newline3 = Line(self,oldPoint1,newpoint1)
if self._db_insert(newline3): ok_to_extrude=True
# create 1 new surface
newsurf = Surface(self,[i,newline3,newline1,newline2])
if self._db_insert(newsurf): ok_to_extrude=True
out['newPoints'].append(newpoint0)
out['newPoints'].append(newpoint1)
out['newLines'].append(newline1)
out['newLines'].append(newline2)
out['newLines'].append(newline3)
out['newSurfaces'].append(newsurf)
if ok_to_extrude:
idx_str = self._create_idx_str(lineList)
axis_str = ','.join(Point._FLOAT_TO_STR.format(i) for i in axis)
opts_str = opts if opts is not None else 'Recombine;'
self._EXTRUDE_ID += 1
self._CODE.append(
'ex%d[] = Extrude {%s} { Line{%s}; Layers{%s}; %s};' %
(self._EXTRUDE_ID, axis_str, idx_str, layers, opts_str)
)
return out
def _extrude_surfaces(self, surfList, axis, layers, opts=None):
'''
volume[] = Extrude{dx, dy, dz} { Surface{#ID}; Layers{{1,..(nElem)..,1},{0.1,..(nCut)..,1}}; };
If the surface has n lines, we will create
n new points,
2*n new lines,
n+1 new surfaces,
and 1 volume
'''
out = extdb({
'newPoints': [],
'newLines': [],
'newSurfaces': [],
'newVolumes': [],
})
axis_as_nparray = np.asarray(axis)
ok_to_extrude=False
newp=out['newPoints']
newl=out['newLines']
news=out['newSurfaces']
newv=out['newVolumes']
for s in surfList:
# extract ordered surface points
sp=[]
for i in s.lid:
l=self.get(Line, i)
if (i<0):
sp.append(self.get(Point,l.pid[1]))
else:
sp.append(self.get(Point, l.pid[0]))
n = len(sp) # the total number of point(s) on this surface
# create line(s) parallel to old lines
# treat 1st line (stupid gmsh), 2 newp, 1 newl
newp.append(Point(axis_as_nparray + sp[0].pos))
if self._db_insert(newp[-1]): ok_to_extrude=True
newp.append(Point(axis_as_nparray + sp[1].pos))
if self._db_insert(newp[-1]): ok_to_extrude=True
newl.append(Line(self,newp[-2],newp[-1]))
if self._db_insert(newl[-1]): ok_to_extrude=True
# treat internal line(s), 1 newp, 1 newl for each internal line
for i in sp[2:]:
newp.append(Point(axis_as_nparray + i.pos))
self.incIDX(Point,3) # stupid gmsh
if self._db_insert(newp[-1]): ok_to_extrude=True
newl.append(Line(self,newp[-2],newp[-1]))
if self._db_insert(newl[-1]): ok_to_extrude=True
#
# Important Note to myself:
# Do not change self.incIDX(Point,???) before this line
#
# treat last line, no newp, 1 newl
self.incIDX(Point,18) # stupid gmsh
newl.append(Line(self,newp[-1],newp[-n]))
if self._db_insert(newl[-1]): ok_to_extrude=True
# create lines in the extruded direction, n newl
# the first two lines are treated differently (stupid gmsh)
self.incIDX(Line,1) # stupid gmsh
newl.append(Line(self, sp[0], newp[-n]))
if self._db_insert(newl[-1]): ok_to_extrude=True
newl.append(Line(self, sp[1], newp[-n+1]))
if self._db_insert(newl[-1]): ok_to_extrude=True
for i in range(2,n):
self.incIDX(Point,6) # stupid gmsh
self.incIDX(Line,2) # stupid gmsh
newl.append(Line(self, sp[i], newp[-n+i]))
if self._db_insert(newl[-1]): ok_to_extrude=True
#
# Important Note to myself:
# Do not change self.incIDX(Line,???) before this line
#
# create n+1 new surfaces
self.incIDX(Line,3) # stupid gmsh
self.incIDX(Surface,1) # stupid gmsh
for i in range(0,n-1):
news.append(Surface(self,[s.lid[i],newl[-2*n+i],newl[-n+i],newl[-n+i+1]]))
if self._db_insert(news[-1]): ok_to_extrude=True
news.append(Surface(self,[s.lid[-1],newl[-n-1],newl[-n],newl[-1]]))
if self._db_insert(news[-1]): ok_to_extrude=True
lList=[] # last surface
for i in range(0,n): lList.append(newl[-2*n+i])
news.append(Surface(self,lList))
if self._db_insert(news[-1]): ok_to_extrude=True
# create 1 volume
newv.append(Volume(self, [s,news[-1]] + news[-n-1:-1]))
if self._db_insert(newv[-1]): ok_to_extrude=True
if ok_to_extrude:
idx_str = self._create_idx_str(surfList)
axis_str = ','.join(Point._FLOAT_TO_STR.format(i) for i in axis)
opts_str = opts if opts is not None else 'Recombine;'
self._EXTRUDE_ID += 1
self._CODE.append(
'ex%d[] = Extrude {%s} { Surface{%s}; Layers{%s}; %s};' %
(self._EXTRUDE_ID, axis_str, idx_str, layers, opts_str)
)
return out
| gpl-3.0 | -6,198,291,772,516,877,000 | 39.509859 | 137 | 0.527849 | false |
AutorestCI/azure-sdk-for-python | azure-cognitiveservices-vision-contentmoderator/azure/cognitiveservices/vision/contentmoderator/models/classification.py | 1 | 1545 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Classification(Model):
"""The classification details of the text.
:param adult_score: The adult score.
:type adult_score: float
:param racy_score: The racy score.
:type racy_score: float
:param offensive_score: The offensive score.
:type offensive_score: float
:param review_recommended: The review recommended flag.
:type review_recommended: bool
"""
_attribute_map = {
'adult_score': {'key': 'AdultScore', 'type': 'float'},
'racy_score': {'key': 'RacyScore', 'type': 'float'},
'offensive_score': {'key': 'OffensiveScore', 'type': 'float'},
'review_recommended': {'key': 'ReviewRecommended', 'type': 'bool'},
}
def __init__(self, adult_score=None, racy_score=None, offensive_score=None, review_recommended=None):
super(Classification, self).__init__()
self.adult_score = adult_score
self.racy_score = racy_score
self.offensive_score = offensive_score
self.review_recommended = review_recommended
| mit | -4,682,587,792,800,070,000 | 37.625 | 105 | 0.606472 | false |
Asparagirl/ArchiveBot | pipeline/archivebot/seesaw/wpullargs_test.py | 1 | 2884 | from os import environ as env
import unittest
from .wpull import WpullArgs
from seesaw.item import Item
# taken form pipeline/pipeline.py
if 'WARC_MAX_SIZE' in env:
WARC_MAX_SIZE = env['WARC_MAX_SIZE']
else:
WARC_MAX_SIZE = '5368709120'
def joined(args):
return str.join(' ', args)
class TestWpullArgs(unittest.TestCase):
def setUp(self):
self.item = {
'cookie_jar': '/foobar/cookies.txt',
'ident': 'abc123',
'item_dir': '/foobar',
'url': 'http://www.example.com',
'warc_file_base': '/foobar/warc'
}
self.args = WpullArgs(default_user_agent='Default/1',
wpull_exe='/bin/wpull',
youtube_dl_exe='/usr/bin/youtube-dl',
phantomjs_exe='/usr/bin/phantomjs',
finished_warcs_dir='/lost+found/',
warc_max_size=WARC_MAX_SIZE
)
def test_user_agent_can_be_set(self):
self.item['user_agent'] = 'Frobinator/20.1'
self.assertIn('-U Frobinator/20.1', joined(self.args.realize(self.item)))
def test_youtube_dl_activation(self):
self.item['youtube_dl'] = True
self.assertIn('--youtube-dl', joined(self.args.realize(self.item)))
def test_uses_default_user_agent(self):
self.assertIn('-U Default/1', joined(self.args.realize(self.item)))
def test_recursive_fetch_settings(self):
self.item['recursive'] = True
self.item['depth'] = 'inf'
cmdline = joined(self.args.realize(self.item))
self.assertIn('--recursive', cmdline)
self.assertIn('--level inf', cmdline)
def test_nonrecursive_fetch_settings(self):
self.item['recursive'] = False
cmdline = joined(self.args.realize(self.item))
self.assertNotIn('--recursive', cmdline)
self.assertNotIn('--level inf', cmdline)
def test_recursive_fetch_enables_linked_pages_and_requisites(self):
self.item['recursive'] = True
self.item['depth'] = 'inf'
cmdline = joined(self.args.realize(self.item))
self.assertIn('--span-hosts-allow page-requisites,linked-pages',
cmdline)
def test_recursive_fetch_with_no_offsite_links_enables_requisites(self):
self.item['recursive'] = True
self.item['depth'] = 'inf'
self.item['no_offsite_links'] = True
cmdline = joined(self.args.realize(self.item))
self.assertIn('--span-hosts-allow page-requisites', cmdline)
self.assertNotIn('linked-pages', cmdline)
def test_nonrecursive_fetch_enables_requisites(self):
self.item['recursive'] = False
cmdline = joined(self.args.realize(self.item))
self.assertIn('--span-hosts-allow page-requisites', cmdline)
self.assertNotIn('linked-pages', cmdline)
# vim:ts=4:sw=4:et:tw=78
| mit | -364,133,565,755,845,800 | 30.692308 | 81 | 0.606449 | false |
pfig/CmdrKeen | setup.py | 1 | 1221 | from setuptools import setup, find_packages
def long_description_from_readme():
with open('README.rst') as readme:
return readme.read()
setup(
name="CommanderKeen",
version="0.1",
packages=find_packages(),
scripts=['scripts/keen.py'],
author="Pedro Figueiredo",
author_email="[email protected]",
description="Commander Keen is a Slack bot with long term memory",
long_description=long_description_from_readme(),
license="MIT",
keywords="slack bot chat",
url="https://pfig.github.io/CmdrKeen/",
data_files=[('config', ['cfg/keen.json'])],
setup_requires=['pytest-runner'],
install_requires=[
'slackclient>=0.16',
'websocket-client>=0.35',
'requests>=2.9.1',
'python-daemon>=2.1.1'
],
tests_require=['pytest'],
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Topic :: Communications :: Chat'
]
)
| mit | 3,875,417,983,791,774,000 | 30.307692 | 70 | 0.610975 | false |
Haikson/virtenviro | virtenviro/content/templatetags/page_tags.py | 1 | 4472 | # ~*~ coding: utf-8 ~*~
__author__ = 'Kamo Petrosyan'
from django import template
from django.db.models import Q
from virtenviro.content.models import Snippet, Page, AdditionalField, Menu
from django.template import loader, Context
from virtenviro.utils import *
register = template.Library()
@register.assignment_tag
def additional_field(page, field_name):
try:
additional_field = AdditionalField.objects.get(name=field_name)
field = page.fieldvalue_set.filter(additional_field=additional_field)
if field.count() > 0:
return field[0]
except AdditionalField.DoesNotExist:
return None
@register.simple_tag(takes_context=True)
def render_snippet(context, snippet_name):
try:
snippet = Snippet.objects.get(name=snippet_name)
except Snippet.DoesNotExist:
snippet = None
if snippet.render:
t = loader.get_template_from_string(snippet.code)
res = t.render(Context(context))
return res
return snippet.code
@register.simple_tag(takes_context=True)
def render_content(context, content):
t = loader.get_template_from_string(content)
return t.render(Context(context))
@register.simple_tag(takes_context=True)
def render_field(context, page, field_name):
try:
additional_field = AdditionalField.objects.get(name=field_name)
except AdditionalField.DoesNotExist:
return ''
field = page.fieldvalue_set.filter(additional_field=additional_field)
if additional_field.render:
t = loader.get_template_from_string(field.value)
return t.render(Context(context))
else:
return field.value
@register.assignment_tag(takes_context=True)
def get_pages(context, *args, **kwargs):
parent_id = kwargs.get('parent', 0)
if parent_id == 0:
queryset = Page.objects.filter(parent__isnull=True)
else:
if isinstance(parent_id, int):
try:
parent_node = Page.objects.get(id=parent_id)
except Page.DoesNotExist:
return None
elif isinstance(parent_id, str) or isinstance(parent_id, unicode):
try:
parent_node = Page.objects.get(slug=parent_id)
except Page.DoesNotExist:
return None
level = kwargs.get('level', 1) + 1
queryset = Page.objects.filter(
level__lte=level,
tree_id=parent_node.tree_id,
lft__gte=parent_node.lft,
rght__lte=parent_node.rght)
if not kwargs.get('include_parents', False):
queryset = queryset.exclude(level__lte=parent_node.level)
if kwargs.get('author', False):
queryset = queryset.filter(author=kwargs['author'])
queryset = queryset.order_by(kwargs.get('order', 'id'))
if context['request'].GET.has_key('page'):
rpage = context['request'].GET['page']
else:
rpage = 1
if kwargs.get('limit', False):
queryset = paginate(queryset, rpage, int(kwargs['limit']))
return queryset
@register.assignment_tag(takes_context=True)
def get_content_ml(context, page, lang):
content = page.get_content(language=lang)
return content
@register.assignment_tag
def leaf_pages(root=None, root_id=None, count=0, rnd=False):
if root is None:
if root_id is None:
return []
else:
try:
root = Page.objects.get(pk=root_id)
except Page.DoesNotExist:
return []
nodes = []
m_nodes = root.get_descendants(include_self=False).order_by('-pub_datetime', '-pk')
if rnd:
m_nodes = m_nodes.order_by('?')
if count == 0:
count = m_nodes.count()
for m_node in m_nodes:
if m_node.is_leaf_node():
nodes.append(m_node)
count -= 1
if count == 0:
break
return nodes
@register.assignment_tag
def page_breadcrumb(page):
breadcrumb = [page]
while page.parent:
page = page.parent
breadcrumb.append(page)
breadcrumb.reverse()
return breadcrumb
@register.assignment_tag
def get_page_by_id(page_id):
try:
return Page.objects.get(pk=page_id)
except Page.DoesNotExist:
return None
@register.assignment_tag
def get_menu(sys_name):
try:
menu = Menu.objects.get(sys_name=sys_name)
except Menu.DoesNotExist:
return None
return menu.pagemenurelationship_set.all().order_by('ordering')
| apache-2.0 | -318,434,964,313,289,300 | 28.813333 | 87 | 0.632156 | false |
openstack/zaqar | zaqar/tests/unit/transport/wsgi/v2_0/test_claims.py | 1 | 12577 | # Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import json
from unittest import mock
import ddt
import falcon
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
from testtools import matchers
from zaqar import tests as testing
from zaqar.tests.unit.transport.wsgi import base
@ddt.ddt
class TestClaimsMongoDB(base.V2Base):
config_file = 'wsgi_mongodb.conf'
@testing.requires_mongodb
def setUp(self):
super(TestClaimsMongoDB, self).setUp()
self.default_claim_ttl = self.boot.transport._defaults.claim_ttl
self.project_id = '737_abc8332832'
self.headers = {
'Client-ID': uuidutils.generate_uuid(),
'X-Project-ID': self.project_id
}
self.queue_path = self.url_prefix + '/queues/fizbit'
self.claims_path = self.queue_path + '/claims'
self.messages_path = self.queue_path + '/messages'
doc = json.dumps({"_ttl": 60})
self.simulate_put(self.queue_path, body=doc, headers=self.headers)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
doc = json.dumps({'messages': [{'body': 239, 'ttl': 300}] * 10})
self.simulate_post(self.queue_path + '/messages',
body=doc, headers=self.headers)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
def tearDown(self):
storage = self.boot.storage._storage
control = self.boot.control
connection = storage.connection
connection.drop_database(control.queues_database)
for db in storage.message_databases:
connection.drop_database(db)
self.simulate_delete(self.queue_path, headers=self.headers)
super(TestClaimsMongoDB, self).tearDown()
@ddt.data('[', '[]', '.', '"fail"')
def test_bad_claim(self, doc):
self.simulate_post(self.claims_path, body=doc, headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
href = self._get_a_claim()
self.simulate_patch(href, body=doc, headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_exceeded_claim(self):
self.simulate_post(self.claims_path,
body='{"ttl": 100, "grace": 60}',
query_string='limit=21', headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data((-1, -1), (59, 60), (60, 59), (60, 43201), (43201, 60))
def test_unacceptable_ttl_or_grace(self, ttl_grace):
ttl, grace = ttl_grace
self.simulate_post(self.claims_path,
body=json.dumps({'ttl': ttl, 'grace': grace}),
headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@ddt.data(-1, 59, 43201)
def test_unacceptable_new_ttl(self, ttl):
href = self._get_a_claim()
self.simulate_patch(href,
body=json.dumps({'ttl': ttl}),
headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_default_ttl_and_grace(self):
self.simulate_post(self.claims_path,
body='{}', headers=self.headers)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
body = self.simulate_get(self.srmock.headers_dict['location'],
headers=self.headers)
claim = jsonutils.loads(body[0])
self.assertEqual(self.default_claim_ttl, claim['ttl'])
def _get_a_claim(self):
doc = '{"ttl": 100, "grace": 60}'
self.simulate_post(self.claims_path, body=doc, headers=self.headers)
return self.srmock.headers_dict['Location']
def test_lifecycle(self):
doc = '{"ttl": 100, "grace": 60}'
# First, claim some messages
body = self.simulate_post(self.claims_path, body=doc,
headers=self.headers)
self.assertEqual(falcon.HTTP_201, self.srmock.status)
claimed = jsonutils.loads(body[0])['messages']
claim_href = self.srmock.headers_dict['Location']
message_href, params = claimed[0]['href'].split('?')
# No more messages to claim
self.simulate_post(self.claims_path, body=doc,
query_string='limit=3', headers=self.headers)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Listing messages, by default, won't include claimed, will echo
body = self.simulate_get(self.messages_path,
headers=self.headers,
query_string="echo=true")
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self._empty_message_list(body)
# Listing messages, by default, won't include claimed, won't echo
body = self.simulate_get(self.messages_path,
headers=self.headers,
query_string="echo=false")
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self._empty_message_list(body)
# List messages, include_claimed, but don't echo
body = self.simulate_get(self.messages_path,
query_string='include_claimed=true'
'&echo=false',
headers=self.headers)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self._empty_message_list(body)
# List messages with a different client-id and echo=false.
# Should return some messages
headers = self.headers.copy()
headers["Client-ID"] = uuidutils.generate_uuid()
body = self.simulate_get(self.messages_path,
query_string='include_claimed=true'
'&echo=false',
headers=headers)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
# Include claimed messages this time, and echo
body = self.simulate_get(self.messages_path,
query_string='include_claimed=true'
'&echo=true',
headers=self.headers)
listed = jsonutils.loads(body[0])
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self.assertEqual(len(claimed), len(listed['messages']))
now = timeutils.utcnow() + datetime.timedelta(seconds=10)
timeutils_utcnow = 'oslo_utils.timeutils.utcnow'
with mock.patch(timeutils_utcnow) as mock_utcnow:
mock_utcnow.return_value = now
body = self.simulate_get(claim_href, headers=self.headers)
claim = jsonutils.loads(body[0])
self.assertEqual(falcon.HTTP_200, self.srmock.status)
self.assertEqual(100, claim['ttl'])
# NOTE(cpp-cabrera): verify that claim age is non-negative
self.assertThat(claim['age'], matchers.GreaterThan(-1))
# Try to delete the message without submitting a claim_id
self.simulate_delete(message_href, headers=self.headers)
self.assertEqual(falcon.HTTP_403, self.srmock.status)
# Delete the message and its associated claim
self.simulate_delete(message_href,
query_string=params, headers=self.headers)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Try to get it from the wrong project
headers = {
'Client-ID': uuidutils.generate_uuid(),
'X-Project-ID': 'bogusproject'
}
self.simulate_get(message_href, query_string=params, headers=headers)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
# Get the message
self.simulate_get(message_href, query_string=params,
headers=self.headers)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
# Update the claim
new_claim_ttl = '{"ttl": 60, "grace": 60}'
creation = timeutils.utcnow()
self.simulate_patch(claim_href, body=new_claim_ttl,
headers=self.headers)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Get the claimed messages (again)
body = self.simulate_get(claim_href, headers=self.headers)
query = timeutils.utcnow()
claim = jsonutils.loads(body[0])
message_href, params = claim['messages'][0]['href'].split('?')
self.assertEqual(60, claim['ttl'])
estimated_age = timeutils.delta_seconds(creation, query)
self.assertGreater(estimated_age, claim['age'])
# Delete the claim
self.simulate_delete(claim['href'], headers=self.headers)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
# Try to delete a message with an invalid claim ID
self.simulate_delete(message_href,
query_string=params, headers=self.headers)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
# Make sure it wasn't deleted!
self.simulate_get(message_href, query_string=params,
headers=self.headers)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
# Try to get a claim that doesn't exist
self.simulate_get(claim['href'], headers=self.headers)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
# Try to update a claim that doesn't exist
self.simulate_patch(claim['href'], body=doc,
headers=self.headers)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
def test_post_claim_nonexistent_queue(self):
path = self.url_prefix + '/queues/nonexistent/claims'
self.simulate_post(path,
body='{"ttl": 100, "grace": 60}',
headers=self.headers)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_get_claim_nonexistent_queue(self):
path = self.url_prefix + '/queues/nonexistent/claims/aaabbbba'
self.simulate_get(path, headers=self.headers)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
# NOTE(cpp-cabrera): regression test against bug #1203842
def test_get_nonexistent_claim_404s(self):
self.simulate_get(self.claims_path + '/a', headers=self.headers)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
def test_delete_nonexistent_claim_204s(self):
self.simulate_delete(self.claims_path + '/a',
headers=self.headers)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
def test_patch_nonexistent_claim_404s(self):
patch_data = json.dumps({'ttl': 100})
self.simulate_patch(self.claims_path + '/a', body=patch_data,
headers=self.headers)
self.assertEqual(falcon.HTTP_404, self.srmock.status)
class TestClaimsFaultyDriver(base.V2BaseFaulty):
config_file = 'wsgi_faulty.conf'
def test_simple(self):
self.project_id = '480924abc_'
self.headers = {
'Client-ID': uuidutils.generate_uuid(),
'X-Project-ID': self.project_id
}
claims_path = self.url_prefix + '/queues/fizbit/claims'
doc = '{"ttl": 100, "grace": 60}'
self.simulate_post(claims_path, body=doc, headers=self.headers)
self.assertEqual(falcon.HTTP_503, self.srmock.status)
self.simulate_get(claims_path + '/nichts', headers=self.headers)
self.assertEqual(falcon.HTTP_503, self.srmock.status)
self.simulate_patch(claims_path + '/nichts', body=doc,
headers=self.headers)
self.assertEqual(falcon.HTTP_503, self.srmock.status)
self.simulate_delete(claims_path + '/foo', headers=self.headers)
self.assertEqual(falcon.HTTP_503, self.srmock.status)
| apache-2.0 | -5,287,691,008,987,485,000 | 38.800633 | 77 | 0.608333 | false |
MehdiSfr/tensor-flow | tensorflow/python/ops/candidate_sampling_ops.py | 1 | 18205 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Wrappers for primitive Neural Net (NN) Operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_candidate_sampling_ops
from tensorflow.python.ops import math_ops
def uniform_candidate_sampler(true_classes, num_true, num_sampled, unique,
range_max, seed=None, name=None):
"""Samples a set of classes using a uniform base distribution.
This operation randomly samples a tensor of sampled classes
(`sampled_candidates`) from the range of integers `[0, range_max]`.
The elements of `sampled_candidates` are drawn without replacement
(if `unique=True`) or with replacement (if `unique=False`) from
the base distribution.
The base distribution for this operation is the uniform distribution
over the range of integers `[0, range_max]`.
In addition, this operation returns tensors `true_expected_count`
and `sampled_expected_count` representing the number of times each
of the target classes (`true_classes`) and the sampled
classes (`sampled_candidates`) is expected to occur in an average
tensor of sampled classes. These values correspond to `Q(y|x)`
defined in [this
document](http://www.tensorflow.org/extras/candidate_sampling.pdf).
If `unique=True`, then these are post-rejection probabilities and we
compute them approximately.
Args:
true_classes: A `Tensor` of type `int64` and shape `[batch_size,
num_true]`. The target classes.
num_true: An `int`. The number of target classes per training example.
num_sampled: An `int`. The number of classes to randomly sample per batch.
unique: A `bool`. Determines whether all sampled classes in a batch are
unique.
range_max: An `int`. The number of possible classes.
seed: An `int`. An operation-specific seed. Default is 0.
name: A name for the operation (optional).
Returns:
sampled_candidates: A tensor of type `int64` and shape `[num_sampled]`.
The sampled classes.
true_expected_count: A tensor of type `float`. Same shape as
`true_classes`. The expected counts under the sampling distribution
of each of `true_classes`.
sampled_expected_count: A tensor of type `float`. Same shape as
`sampled_candidates`. The expected counts under the sampling distribution
of each of `sampled_candidates`.
"""
seed1, seed2 = random_seed.get_seed(seed)
return gen_candidate_sampling_ops._uniform_candidate_sampler(
true_classes, num_true, num_sampled, unique, range_max, seed=seed1,
seed2=seed2, name=name)
def log_uniform_candidate_sampler(true_classes, num_true, num_sampled, unique,
range_max, seed=None, name=None):
"""Samples a set of classes using a log-uniform (Zipfian) base distribution.
This operation randomly samples a tensor of sampled classes
(`sampled_candidates`) from the range of integers `[0, range_max]`.
The elements of `sampled_candidates` are drawn without replacement
(if `unique=True`) or with replacement (if `unique=False`) from
the base distribution.
The base distribution for this operation is an approximately log-uniform
or Zipfian distribution:
`P(class) = (log(class + 2) - log(class + 1)) / log(range_max + 1)`
This sampler is useful when the target classes approximately follow such
a distribution - for example, if the classes represent words in a lexicon
sorted in decreasing order of frequency. If your classes are not ordered by
decreasing frequency, do not use this op.
In addition, this operation returns tensors `true_expected_count`
and `sampled_expected_count` representing the number of times each
of the target classes (`true_classes`) and the sampled
classes (`sampled_candidates`) is expected to occur in an average
tensor of sampled classes. These values correspond to `Q(y|x)`
defined in [this
document](http://www.tensorflow.org/extras/candidate_sampling.pdf).
If `unique=True`, then these are post-rejection probabilities and we
compute them approximately.
Args:
true_classes: A `Tensor` of type `int64` and shape `[batch_size,
num_true]`. The target classes.
num_true: An `int`. The number of target classes per training example.
num_sampled: An `int`. The number of classes to randomly sample per batch.
unique: A `bool`. Determines whether all sampled classes in a batch are
unique.
range_max: An `int`. The number of possible classes.
seed: An `int`. An operation-specific seed. Default is 0.
name: A name for the operation (optional).
Returns:
sampled_candidates: A tensor of type `int64` and shape `[num_sampled]`.
The sampled classes.
true_expected_count: A tensor of type `float`. Same shape as
`true_classes`. The expected counts under the sampling distribution
of each of `true_classes`.
sampled_expected_count: A tensor of type `float`. Same shape as
`sampled_candidates`. The expected counts under the sampling distribution
of each of `sampled_candidates`.
"""
seed1, seed2 = random_seed.get_seed(seed)
return gen_candidate_sampling_ops._log_uniform_candidate_sampler(
true_classes, num_true, num_sampled, unique, range_max, seed=seed1,
seed2=seed2, name=name)
def learned_unigram_candidate_sampler(true_classes, num_true, num_sampled,
unique, range_max, seed=None, name=None):
"""Samples a set of classes from a distribution learned during training.
This operation randomly samples a tensor of sampled classes
(`sampled_candidates`) from the range of integers `[0, range_max]`.
The elements of `sampled_candidates` are drawn without replacement
(if `unique=True`) or with replacement (if `unique=False`) from
the base distribution.
The base distribution for this operation is constructed on the fly
during training. It is a unigram distribution over the target
classes seen so far during training. Every integer in `[0, range_max]`
begins with a weight of 1, and is incremented by 1 each time it is
seen as a target class. The base distribution is not saved to checkpoints,
so it is reset when the model is reloaded.
In addition, this operation returns tensors `true_expected_count`
and `sampled_expected_count` representing the number of times each
of the target classes (`true_classes`) and the sampled
classes (`sampled_candidates`) is expected to occur in an average
tensor of sampled classes. These values correspond to `Q(y|x)`
defined in [this
document](http://www.tensorflow.org/extras/candidate_sampling.pdf).
If `unique=True`, then these are post-rejection probabilities and we
compute them approximately.
Args:
true_classes: A `Tensor` of type `int64` and shape `[batch_size,
num_true]`. The target classes.
num_true: An `int`. The number of target classes per training example.
num_sampled: An `int`. The number of classes to randomly sample per batch.
unique: A `bool`. Determines whether all sampled classes in a batch are
unique.
range_max: An `int`. The number of possible classes.
seed: An `int`. An operation-specific seed. Default is 0.
name: A name for the operation (optional).
Returns:
sampled_candidates: A tensor of type `int64` and shape `[num_sampled]`.
The sampled classes.
true_expected_count: A tensor of type `float`. Same shape as
`true_classes`. The expected counts under the sampling distribution
of each of `true_classes`.
sampled_expected_count: A tensor of type `float`. Same shape as
`sampled_candidates`. The expected counts under the sampling distribution
of each of `sampled_candidates`.
"""
seed1, seed2 = random_seed.get_seed(seed)
return gen_candidate_sampling_ops._learned_unigram_candidate_sampler(
true_classes, num_true, num_sampled, unique, range_max, seed=seed1,
seed2=seed2, name=name)
def fixed_unigram_candidate_sampler(true_classes, num_true, num_sampled, unique,
range_max, vocab_file='', distortion=0.0,
num_reserved_ids=0, num_shards=1, shard=0,
unigrams=[], seed=None, name=None):
"""Samples a set of classes using the provided (fixed) base distribution.
This operation randomly samples a tensor of sampled classes
(`sampled_candidates`) from the range of integers `[0, range_max]`.
The elements of `sampled_candidates` are drawn without replacement
(if `unique=True`) or with replacement (if `unique=False`) from
the base distribution.
The base distribution is read from a file or passed in as an
in-memory array. There is also an option to skew the distribution by
applying a distortion power to the weights.
In addition, this operation returns tensors `true_expected_count`
and `sampled_expected_count` representing the number of times each
of the target classes (`true_classes`) and the sampled
classes (`sampled_candidates`) is expected to occur in an average
tensor of sampled classes. These values correspond to `Q(y|x)`
defined in [this
document](http://www.tensorflow.org/extras/candidate_sampling.pdf).
If `unique=True`, then these are post-rejection probabilities and we
compute them approximately.
Args:
true_classes: A `Tensor` of type `int64` and shape `[batch_size,
num_true]`. The target classes.
num_true: An `int`. The number of target classes per training example.
num_sampled: An `int`. The number of classes to randomly sample per batch.
unique: A `bool`. Determines whether all sampled classes in a batch are
unique.
range_max: An `int`. The number of possible classes.
vocab_file: Each valid line in this file (which should have a CSV-like
format) corresponds to a valid word ID. IDs are in sequential order,
starting from num_reserved_ids. The last entry in each line is expected
to be a value corresponding to the count or relative probability. Exactly
one of `vocab_file` and `unigrams` needs to be passed to this operation.
distortion: The distortion is used to skew the unigram probability
distribution. Each weight is first raised to the distortion's power
before adding to the internal unigram distribution. As a result,
`distortion = 1.0` gives regular unigram sampling (as defined by the vocab
file), and `distortion = 0.0` gives a uniform distribution.
num_reserved_ids: Optionally some reserved IDs can be added in the range
`[0, num_reserved_ids]` by the users. One use case is that a special
unknown word token is used as ID 0. These IDs will have a sampling
probability of 0.
num_shards: A sampler can be used to sample from a subset of the original
range in order to speed up the whole computation through parallelism. This
parameter (together with `shard`) indicates the number of partitions that
are being used in the overall computation.
shard: A sampler can be used to sample from a subset of the original range
in order to speed up the whole computation through parallelism. This
parameter (together with `num_shards`) indicates the particular partition
number of the operation, when partitioning is being used.
unigrams: A list of unigram counts or probabilities, one per ID in
sequential order. Exactly one of `vocab_file` and `unigrams` should be
passed to this operation.
seed: An `int`. An operation-specific seed. Default is 0.
name: A name for the operation (optional).
Returns:
sampled_candidates: A tensor of type `int64` and shape `[num_sampled]`.
The sampled classes.
true_expected_count: A tensor of type `float`. Same shape as
`true_classes`. The expected counts under the sampling distribution
of each of `true_classes`.
sampled_expected_count: A tensor of type `float`. Same shape as
`sampled_candidates`. The expected counts under the sampling distribution
of each of `sampled_candidates`.
"""
seed1, seed2 = random_seed.get_seed(seed)
return gen_candidate_sampling_ops._fixed_unigram_candidate_sampler(
true_classes, num_true, num_sampled, unique, range_max,
vocab_file=vocab_file, distortion=distortion,
num_reserved_ids=num_reserved_ids, num_shards=num_shards, shard=shard,
unigrams=unigrams, seed=seed1, seed2=seed2, name=name)
def all_candidate_sampler(true_classes, num_true, num_sampled, unique,
seed=None, name=None):
"""Generate the set of all classes.
Deterministically generates and returns the set of all possible classes.
For testing purposes. There is no need to use this, since you might as
well use full softmax or full logistic regression.
Args:
true_classes: A `Tensor` of type `int64` and shape `[batch_size,
num_true]`. The target classes.
num_true: An `int`. The number of target classes per training example.
num_sampled: An `int`. The number of possible classes.
unique: A `bool`. Ignored.
unique.
seed: An `int`. An operation-specific seed. Default is 0.
name: A name for the operation (optional).
Returns:
sampled_candidates: A tensor of type `int64` and shape `[num_sampled]`.
This operation deterministically returns the entire range
`[0, num_sampled]`.
true_expected_count: A tensor of type `float`. Same shape as
`true_classes`. The expected counts under the sampling distribution
of each of `true_classes`. All returned values are 1.0.
sampled_expected_count: A tensor of type `float`. Same shape as
`sampled_candidates`. The expected counts under the sampling distribution
of each of `sampled_candidates`. All returned values are 1.0.
"""
seed1, seed2 = random_seed.get_seed(seed)
return gen_candidate_sampling_ops._all_candidate_sampler(
true_classes, num_true, num_sampled, unique, seed=seed1, seed2=seed2,
name=name)
def compute_accidental_hits(true_classes, sampled_candidates, num_true,
seed=None, name=None):
"""Compute the position ids in `sampled_candidates` matching `true_classes`.
In Candidate Sampling, this operation facilitates virtually removing
sampled classes which happen to match target classes. This is done
in Sampled Softmax and Sampled Logistic.
See our [Candidate Sampling Algorithms
Reference](http://www.tensorflow.org/extras/candidate_sampling.pdf).
We presuppose that the `sampled_candidates` are unique.
We call it an 'accidental hit' when one of the target classes
matches one of the sampled classes. This operation reports
accidental hits as triples `(index, id, weight)`, where `index`
represents the row number in `true_classes`, `id` represents the
position in `sampled_candidates`, and weight is `-FLOAT_MAX`.
The result of this op should be passed through a `sparse_to_dense`
operation, then added to the logits of the sampled classes. This
removes the contradictory effect of accidentally sampling the true
target classes as noise classes for the same example.
Args:
true_classes: A `Tensor` of type `int64` and shape `[batch_size,
num_true]`. The target classes.
sampled_candidates: A tensor of type `int64` and shape `[num_sampled]`.
The sampled_candidates output of CandidateSampler.
num_true: An `int`. The number of target classes per training example.
seed: An `int`. An operation-specific seed. Default is 0.
name: A name for the operation (optional).
Returns:
indices: A `Tensor` of type `int32` and shape `[num_accidental_hits]`.
Values indicate rows in `true_classes`.
ids: A `Tensor` of type `int64` and shape `[num_accidental_hits]`.
Values indicate positions in `sampled_candidates`.
weights: A `Tensor` of type `float` and shape `[num_accidental_hits]`.
Each value is `-FLOAT_MAX`.
"""
seed1, seed2 = random_seed.get_seed(seed)
return gen_candidate_sampling_ops._compute_accidental_hits(
true_classes, sampled_candidates, num_true, seed=seed1, seed2=seed2,
name=name)
@ops.RegisterShape("AllCandidateSampler")
@ops.RegisterShape("FixedUnigramCandidateSampler")
@ops.RegisterShape("LearnedUnigramCandidateSampler")
@ops.RegisterShape("LogUniformCandidateSampler")
@ops.RegisterShape("ThreadUnsafeUnigramCandidateSampler")
@ops.RegisterShape("UniformCandidateSampler")
def _CandidateSamplerShape(op):
true_classes_shape = op.inputs[0].get_shape().with_rank(2)
batch_size = true_classes_shape[0]
num_sampled = op.get_attr("num_sampled")
num_true = op.get_attr("num_true")
return [tensor_shape.vector(num_sampled),
tensor_shape.matrix(batch_size, num_true),
tensor_shape.vector(num_sampled)]
@ops.RegisterShape("ComputeAccidentalHits")
def _ComputeAccidentalHitsShape(op):
num_true = op.get_attr("num_true")
# Validate that the input shape matches the attrs, even though it
# does not influence the shape of the output.
true_candidates_shape = op.inputs[0].get_shape().merge_with(
tensor_shape.matrix(None, num_true))
output_shape = tensor_shape.vector(None)
return [output_shape] * 3
| apache-2.0 | -4,878,222,035,196,797,000 | 46.408854 | 80 | 0.715737 | false |
xzhang2016/tfagent | setup.py | 1 | 1318 | from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
def main():
setup(name='tfta',
version='0.0.1',
description='TF Agent',
long_description='TF Agent',
author='Xue Zhang',
author_email='[email protected]',
url='https://github.com/xzhang2016/tfagent',
packages=['tfta','enrichment'],
install_requires=['pysb', 'indra', 'pykqml', 'objectpath', 'rdflib',
'functools32', 'requests', 'lxml',
'pandas', 'suds'],
include_package_data=True,
keywords=['systems', 'biology', 'model', 'pathway', 'assembler',
'nlp', 'mechanism', 'biochemistry'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Chemistry',
'Topic :: Scientific/Engineering :: Mathematics',
],
)
if __name__ == '__main__':
main()
| bsd-2-clause | 5,773,306,868,928,383,000 | 38.939394 | 78 | 0.525797 | false |
mendhak/Kindle-Time-and-Weather | server/weather-script.py | 1 | 6448 | #!/usr/bin/python
# Kindle Weather Display
# Matthew Petroff (http://www.mpetroff.net/)
# September 2012
#
# Owen Bullock - UK Weather - MetOffice - Aug 2013
# Apr 2014 - amended for Wind option
#
# Mendhak - redone for WeatherUnderground API
import json
import urllib2
from xml.dom import minidom
import datetime
import codecs
import os.path
import time
import sys
import os
#
# Weather Underground API Key - unique to me.
#
wuapikey= os.environ.get('WUNDERGROUND_API_KEY') or "2f1126aef047991e"
template = 'weather-script-preprocess_temps.svg'
#
# Map the Wunderground weather codes to Icons.
# ( See https://www.wunderground.com/weather/api/d/docs?d=resources/icon-sets&MR=1 )
#
mapping = [
[0 , 'skc '], # Clear night skc.svg
[1 , 'skc '], # Sunny day skc.svg
[2 , 'sct '], # Partly cloudy (night) sct.svg
[3 , 'sct '], # Partly cloudy (day) sct.svg
[4 , ' '], # Not used -
[5 , 'fg '], # Mist fg.svg
[6 , 'fg '], # Fog fg.svg
[7 , 'bkn '], # Cloudy bkn.svg
[8 , 'ovc '], # Overcast ovc.svg
[9 , 'hi_shwrs'], # Light rain shower (night) hi_shwrs.svg
[10, 'hi_shwrs'], # Light rain shower (day) hi_shwrs.svg
[11, 'hi_shwrs'], # Drizzle hi_shwrs.svg
[12, 'ra1 '], # Light rain ra1.svg
[13, 'ra '], # Heavy rain shower (night) ra.svg
[14, 'ra '], # Heavy rain shower (day) ra.svg
[15, 'ra '], # Heavy rain ra.svg
[16, 'rasn '], # Sleet shower (night) rasn.svg
[17, 'rasn '], # Sleet shower (day) rasn.svg
[18, 'rasn '], # Sleet rasn.svg
[19, 'ip '], # Hail shower (night) ip.svg
[20, 'ip '], # Hail shower (day) ip.svg
[21, 'ip '], # Hail ip.svg
[22, 'sn '], # Light snow shower (night) sn.svg
[23, 'sn '], # Light snow shower (day) sn.svg
[24, 'sn '], # Light snow sn.svg
[25, 'sn '], # Heavy snow shower (night) sn.xvg
[26, 'sn '], # Heavy snow shower (day) sn.svg
[27, 'sn '], # Heavy snow sn.svg
[28, 'tsra '], # Thunder shower (night) tsra.svg
[29, 'tsra '], # Thunder shower (day) tsra.svg
[30, 'tsra '], # Thunder tsra.svg
]
icon_dict={
'chanceflurries':'sn',
'chancerain':'hi_shwrs',
'chancesleet':'rasn',
'chancesnow':'sn',
'chancetstorms':'tsra',
'clear':'skc',
'cloudy':'bkn',
'flurries':'sn',
'fog':'fg',
'hazy':'fg',
'mostlycloudy':'ovc',
'mostlysunny':'skc',
'partlycloudy':'sct',
'partlysunny':'skc',
'sleet':'rasn',
'rain':'ra',
'sleet':'rasn',
'snow':'sn',
'sunny':'skc',
'tstorms':'tsra',
'cloudy':'bkn',
'partlycloudy':'bkn',
}
#
# Download and parse weather data - location 353773 = Sutton, Surrey
#
weather_json=''
stale=True
if(os.path.isfile(os.getcwd() + "/wunderground.json")):
#Read the contents anyway
with open(os.getcwd() + "/wunderground.json", 'r') as content_file:
weather_json = content_file.read()
stale=time.time() - os.path.getmtime(os.getcwd() + "/wunderground.json") > (12*60*60)
#If old file or file doesn't exist, time to download it
if(stale):
try:
print "Old file, attempting re-download"
url='http://api.wunderground.com/api/' + wuapikey + '/forecast/q/UK/Reigate.json'
weather_json = urllib2.urlopen(url).read()
with open(os.getcwd() + "/wunderground.json", "w") as text_file:
text_file.write(weather_json)
except:
print "FAILED. using previous read"
with open(os.getcwd() + "/wunderground.json", 'r') as content_file:
weather_json = content_file.read()
weatherData = json.loads(weather_json)
icon_one = weatherData['forecast']['simpleforecast']['forecastday'][0]['icon']
high_one = weatherData['forecast']['simpleforecast']['forecastday'][0]['high']['celsius']
low_one = weatherData['forecast']['simpleforecast']['forecastday'][0]['low']['celsius']
day_one = weatherData['forecast']['simpleforecast']['forecastday'][0]['date']['weekday']
icon_two = weatherData['forecast']['simpleforecast']['forecastday'][1]['icon']
high_two = weatherData['forecast']['simpleforecast']['forecastday'][1]['high']['celsius']
low_two = weatherData['forecast']['simpleforecast']['forecastday'][1]['low']['celsius']
day_two = weatherData['forecast']['simpleforecast']['forecastday'][1]['date']['weekday']
icon_three = weatherData['forecast']['simpleforecast']['forecastday'][2]['icon']
high_three = weatherData['forecast']['simpleforecast']['forecastday'][2]['high']['celsius']
low_three = weatherData['forecast']['simpleforecast']['forecastday'][2]['low']['celsius']
day_three = weatherData['forecast']['simpleforecast']['forecastday'][2]['date']['weekday']
print icon_one,low_one,high_one,day_one
print icon_two,low_two,high_two,day_two
print icon_three,low_three,high_three,day_three
dtnow=datetime.datetime.now().strftime("%d-%b %H:%M")
print "NOW:",dtnow
#
# Preprocess SVG
#
# Open SVG to process
output = codecs.open(template , 'r', encoding='utf-8').read()
# Insert weather icons and temperatures
output = output.replace('ICON_ONE',icon_dict[icon_one])
output = output.replace('ICON_TWO',icon_dict[icon_two])
output = output.replace('ICON_THREE',icon_dict[icon_three])
output = output.replace('TIME_NOW',datetime.datetime.now().strftime("%H:%M"))
output = output.replace('HIGH_ONE',high_one)
output = output.replace('HIGH_TWO',high_two)
output = output.replace('HIGH_THREE',high_three)
output = output.replace('LOW_ONE',low_one)
output = output.replace('LOW_TWO',low_two)
output = output.replace('LOW_THREE',low_three)
# Insert current time
# (thanks Jennifer http://www.shatteredhaven.com/2012/11/1347365-kindle-weather-display.html)
output = output.replace('DATE_VALPLACE',str(dtnow))
readableDate = datetime.datetime.now().strftime("%A %B %d")
output = output.replace('TODAY_DATE', str(readableDate))
output = output.replace('DAY_TWO',day_two)
output = output.replace('DAY_THREE',day_three)
# Write output
codecs.open('weather-script-output.svg', 'w', encoding='utf-8').write(output)
| mit | 3,446,430,669,195,974,700 | 34.234973 | 93 | 0.596309 | false |
brittanystoroz/kitsune | kitsune/customercare/tests/test_templates.py | 1 | 4222 | import json
from django.conf import settings
from django.core.cache import cache
from nose.tools import eq_
from pyquery import PyQuery as pq
from kitsune.customercare.replies import REPLIES_DOCUMENT_SLUG
from kitsune.sumo.tests import TestCase
from kitsune.sumo.urlresolvers import reverse
from kitsune.wiki.tests import DocumentFactory, RevisionFactory
CANNED_RESPONSES_WIKI = """
Any initial text above the first H1 should be ignored.
=Category 1=
==Reply 1==
Reply goes here http://example.com/kb-article
==Reply 2==
Another reply here
=Category 2=
==Reply 3==
And another reply
"""
MESSED_UP_CANNED_RESPONSES_WIKI = """
Lal al ala la alaa lala la
==Bogus Reply will be ignored==
==Another bogus one==
Any initial text above the first H1 should be ignored.
=Category 1=
==Reply 1==
Reply goes here http://example.com/kb-article
==Reply 2==
Another reply here [[Bad link]]
==A reply without text==
=Category 2=
==Another reply without text==
==Reply 3==
And another reply
==Another Reply without text==
"""
class CannedResponsesTestCase(TestCase):
"""Canned responses tests."""
def _create_doc(self, content):
# Create the canned responses article.
doc = DocumentFactory(slug=REPLIES_DOCUMENT_SLUG)
rev = RevisionFactory(document=doc, content=content, is_approved=True)
doc.current_revision = rev
doc.save()
def test_list_canned_responses(self):
"""Listing canned responses works as expected."""
# Create the canned responses article.
self._create_doc(CANNED_RESPONSES_WIKI)
r = self.client.get(reverse('customercare.landing'), follow=True)
eq_(200, r.status_code)
doc = pq(r.content)
responses_plain = doc('#accordion').text()
# Verify categories and replies
assert 'Category 1' in responses_plain
assert 'Reply 1' in responses_plain
assert 'Reply goes here' in responses_plain
assert 'Category 2' in responses_plain
assert 'Reply 3' in responses_plain
assert 'And another reply' in responses_plain
# Listing all responses
eq_(3, len(doc('#accordion a.reply-topic')))
def test_list_canned_responses_nondefault_locale(self):
"""Listing canned responses gives all snippets regardless of locale.
"""
# Create the canned responses article.
self._create_doc(CANNED_RESPONSES_WIKI)
r = self.client.get(reverse('customercare.landing', locale='es'),
follow=True)
eq_(200, r.status_code)
doc = pq(r.content)
# Listing all responses, l10n-agnostic (English if not in Verbatim).
eq_(3, len(doc('#accordion a.reply-topic')))
def test_messed_up_canned_responses(self):
"""Make sure we don't blow up if the article is malformed."""
# Create the canned responses article.
self._create_doc(MESSED_UP_CANNED_RESPONSES_WIKI)
r = self.client.get(reverse('customercare.landing'), follow=True)
eq_(200, r.status_code)
doc = pq(r.content)
responses_plain = doc('#accordion').text()
assert 'Category 1' in responses_plain
assert 'Category 2' in responses_plain
class TweetListTestCase(TestCase):
"""Tests for the list of tweets."""
def test_fallback_message(self):
"""Fallback message when there are no tweets."""
r = self.client.get(reverse('customercare.landing'), follow=True)
eq_(200, r.status_code)
doc = pq(r.content)
assert doc('#tweets-wrap .warning-box'), (
'Fallback message is not showing up.')
class StatsTests(TestCase):
"""Tests for the activity and contributors stats."""
def test_contributors(self):
"""Only contributors stats are set."""
with open('kitsune/customercare/tests/stats.json') as f:
json_data = json.load(f)
cache.set(settings.CC_TOP_CONTRIB_CACHE_KEY,
json_data['contributors'],
settings.CC_STATS_CACHE_TIMEOUT)
r = self.client.get(reverse('customercare.landing'), follow=True)
eq_(200, r.status_code)
cache.delete(settings.CC_TOP_CONTRIB_CACHE_KEY)
| bsd-3-clause | -8,958,314,547,248,741,000 | 29.157143 | 78 | 0.657035 | false |
xswxm/MyIoT | devices/system.py | 1 | 1715 | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import threading, os
class CPUTemp:
_lock = threading.RLock()
def __init__(self, id, title, feasible = True):
self.id = id
self.title = title
self.feasible = feasible
self.category = 'Value'
def description(self):
message = {}
message['id'] = self.id
message['title'] = self.title
message['category'] = self.category
message['value'] = self.getValue()
message['feasible'] = self.feasible
return message
def getValue(self):
try:
with CPUTemp._lock:
res = os.popen('vcgencmd measure_temp').readline()
return res.replace("temp=", "").replace("'C\n", "") + " °C"
except Exception as e:
return str(e)
class MemUse:
_lock = threading.RLock()
def __init__(self, id, title, feasible = True):
self.id = id
self.title = title
self.feasible = feasible
self.category = 'Value'
def description(self):
message = {}
message['id'] = self.id
message['title'] = self.title
message['category'] = self.category
message['value'] = self.getValue()
message['feasible'] = self.feasible
return message
def getValue(self):
try:
with CPUTemp._lock:
mem = os.popen("cat /proc/meminfo | awk '/Mem/ {print $2}'")
memTotal = int(mem.readline()) / 1000
memFree = int(mem.readline()) / 1000
memUsed = memTotal - memFree
return '{0:d}MB/{1:d}MB'.format(memUsed, memTotal)
except Exception as e:
return str(e)
| gpl-3.0 | -1,412,676,988,932,031,500 | 31.339623 | 76 | 0.533839 | false |
bfirsh/django-mptt | setup.py | 1 | 2361 | """
Based entirely on Django's own ``setup.py``.
"""
import os
from distutils.command.install import INSTALL_SCHEMES
from distutils.core import setup
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Tell distutils to put the data_files in platform-specific installation
# locations. See here for an explanation:
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/35ec7b2fed36eaec/2105ee4d9e8042cb
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
mptt_dir = os.path.join(root_dir, 'mptt')
pieces = fullsplit(root_dir)
if pieces[-1] == '':
len_root_dir = len(pieces) - 1
else:
len_root_dir = len(pieces)
for dirpath, dirnames, filenames in os.walk(mptt_dir):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)[len_root_dir:]))
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames]])
setup(
name = 'django-mptt',
description = 'Utilities for implementing Modified Preorder Tree Traversal with your Django Models and working with trees of Model instances',
version = '0.3_pre',
author = 'Jonathan Buchanan',
author_email = '[email protected]',
url = 'http://code.google.com/p/django-mptt/',
packages = packages,
data_files = data_files,
classifiers = ['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
| mit | -2,161,140,952,425,725,200 | 35.323077 | 146 | 0.639136 | false |
Tanmay28/coala | bears/tests/natural_language/AlexBearTest.py | 1 | 1313 | import os
import subprocess
import sys
from queue import Queue
sys.path.insert(0, ".")
import unittest
from bears.tests.LocalBearTestHelper import LocalBearTestHelper
from bears.natural_language.AlexBear import AlexBear
from coalib.settings.Section import Section
class AlexBearTest(LocalBearTestHelper):
def setUp(self):
self.section = Section("test section")
self.uut = AlexBear(self.section, Queue())
self.test_file1 = os.path.join(os.path.dirname(__file__),
"test_files",
"alex_test1.md")
self.test_file2 = os.path.join(os.path.dirname(__file__),
"test_files",
"alex_test2.md")
def test_run(self):
# Test a file with no issues
self.assertLinesValid(self.uut, [], self.test_file1)
# Test a file with issues
self.assertLinesInvalid(self.uut, [], self.test_file2)
def skip_test():
try:
subprocess.Popen(['alex', '--version'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
return False
except OSError:
return "Alex is not installed."
if __name__ == '__main__':
unittest.main(verbosity=2)
| agpl-3.0 | 2,532,052,658,769,836,500 | 29.534884 | 65 | 0.568926 | false |
hpcloud/CloudAgents | agents/file_exists.py | 1 | 3605 | #!/usr/bin/env python
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Only required for more convenient local development.
import sys, os
sys.path.append(os.path.dirname(os.path.realpath(__file__))+'/lib')
from cloudagents import CloudAgent
from keystoneclient.v2_0 import client
import novaclient
import swiftclient
from time import mktime
import datetime
import parsedatetime.parsedatetime as pdt
ca = CloudAgent()
ca.required_config = {
"name": "File Existence Checker",
"version": "0.2.0",
"author": "Jeff Kramer",
"url": "http://www.hpcloud.com/",
"help": """This script checks to see if a file exists inside of a swift container. It also has functions to allow for searching for files named on relative dates, daily backups for instance.""",
"config":
[{
"name": "region",
"regexp": "^.{1,50}$",
"title": "Region",
"description": "Short name for the object storage endpoint region to search. IE: region-a.geo-1",
"type": "string",
"required": True,
"resource": "openstack.object-store.endpoints.region"
},{
"name": "container",
"regexp": "^.{1,50}$",
"title": "Container",
"description": "Name of the container to search for the file.",
"type": "string",
"required": True,
"resource": "openstack.object-store.[region].containers"
},{
"name": "date",
"regexp": "^.{1,250}$",
"title": "Date Adjustment",
"description": "Date adjustment. Enables time substitution in object name. IE: 'yesterday'. Dates are compared in UTC.",
"type": "string",
"required": False,
},{
"name": "name",
"regexp": "^.{1,250}$",
"title": "Name",
"description": "Object name to check for in the container. If a date adjustment is set, python datetime time substution is enabled. IE: 'backups/%Y-%m-%d.zip'",
"type": "string",
"required": True
},
]
}
def agent():
ca.log("Starting!")
keystone = client.Client(token=ca.creds['token'], tenant_id=ca.creds['tenantId'],
auth_url=ca.creds['identity_url'])
object_store_catalog = keystone.service_catalog.get_endpoints()['object-store']
region_endpoints = None
for endpoints in object_store_catalog:
if endpoints['region'] == ca.conf['region']:
region_endpoints = endpoints
if not region_endpoints:
ca.log_fail("Failing, region not found in endpoint list.")
exit()
if ca.conf.get('date'):
p = pdt.Calendar()
result = p.parse(ca.conf['date'])
dt = datetime.datetime.fromtimestamp(mktime(result[0]))
path = dt.strftime(ca.conf['name'])
else:
path = ca.conf['name']
try:
headers = swiftclient.head_object(region_endpoints['publicURL'],ca.creds['token'],
ca.conf['container'],path)
if headers['content-length'] >= 0:
ca.log("File exists!")
except swiftclient.client.ClientException, e:
ca.log("File doesn't exist!")
ca.email("File missing: "+ca.conf['container']+"/"+path,'''
The container '%s' appears to be missing the file '%s'.
''' % (ca.conf['container'], path))
ca.run(agent)
| apache-2.0 | 6,946,789,395,821,561,000 | 30.347826 | 196 | 0.671567 | false |
pwittchen/learn-python-the-hard-way | exercises/exercise35.py | 1 | 1993 | # Exercise 35: Branches and Functions
from sys import exit
def gold_room():
print "This room is full of gold. How much do you take?"
choice = raw_input("> ")
if "0" in choice or "1" in choice:
how_much = int(choice)
else:
dead("Man, learn to type a number.")
if how_much < 50:
print "Nice, you're not greedy, you win!"
exit(0)
else:
dead("You greedy bastard!")
def bear_room():
print "There is a bear here."
print "The bear has a bunch of honey."
print "The fat bear is in front of another door."
print "How are you going to move the bear?"
bear_moved = False
while True:
choice = raw_input("> ")
if choice == "take honey":
dead("The bear looks at you then slaps your face off.")
elif choice == "taunt bear" and not bear_moved:
print "The bear has moved from the door. You can go through it now."
bear_moved = True
elif choice == "taunt bear" and bear_moved:
dead("The bear gets pissed off and chews your leg off.")
elif choice == "open door" and bear_moved:
gold_room()
else:
print "I got no idea what that means."
def cthulhu_room():
print "Here you see the great evil Cthulhu."
print "He, it, whatever stares at you and you go insane."
print "Do you flee for your life or eat your head?"
choice = raw_input("> ")
if "flee" in choice:
start()
elif "head" in choice:
dead("Well that was tasty!")
else:
cthulhu_room()
def dead(why):
print why, "Good job!"
exit(0)
def start():
print "You are in a dark room."
print "There is a door to your right and left."
print "Which one do you take?"
choice = raw_input("> ")
if choice == "left":
bear_room()
elif choice == "right":
cthulhu_room()
else:
dead("You stumble around the room until you starve.")
start()
| mit | 956,250,791,782,358,400 | 24.551282 | 80 | 0.578525 | false |
willjp/pyqconcurrency | qconcurrency/models.py | 1 | 37871 | #!/usr/bin/env python
"""
Name : qconcurrency/models.py
Created : Apr 14, 2017
Author : Will Pittman
Contact : [email protected]
________________________________________________________________________________
Description : Generic models, and interfaces for models to be used
in various Qt `View` widgets (ex: QTableView, QListView, QTableView, QComboBox, ...)
________________________________________________________________________________
"""
#builtin
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import Iterable, MutableMapping
#external
from Qt import QtGui, QtCore
import six
#internal
__all__ = [
'DictModel',
'DictModelRow',
]
#!TODO: implement the other QStandardItemModel methods (insertRow, ...)
#! taking special care to handle self._data
#! (to keep IPython happy)
#!TODO: test using delattr unecessary, (and potentially harmful)
#! QStandardItemModel methods (like appendRow, setItem, ...)
#!TODO: validation based on `hierarchy`, preventing nesting below defined
#!TODO: validation of column-names when setting columnvals
class DictModel( QtGui.QStandardItemModel ):
"""
Customized python interface for :py:obj:`QtGui.QStandardItemModel` so that it's
values, and nested tables can be accessed like a python dictionary.
Example:
Simple Example:
.. code-block:: bash
| _id | firstname | lastname | username |
|========================================|
| 101 | luke | skywalker | lukes |
| 102 | leia | skywalker | leias |
|========================================|
.. code-block:: python
model = DictModel( columns=('firstname','lastname','username') )
model.add_row( 101, columnvals = {
'firstname':'luke' ,
'lastname' :'skywalker' ,
'username' :'lukes' ,
}
)
userId = 101
print( model[userId].column('firstname') )
>>> 'luke'
print( model[userId].columnvals() )
>>> {'_id':101, 'firstname':'luke', 'lastname':'skywalker', 'username':'lukes'}
Nested-Table Example:
.. code-block:: bash
|=============|
| _id | class | # level: 'jedi_class'
|=============|
| 101 | sith |
| |===========================================|
| | _id | firstname | lastname | username | # level: 'user'
| |===========================================|
| | 56 | Darth | Vader | anakins |
| | 57 | Darth | Maul | darthm |
| |===========================================|
| |
| 102 | jedi |
| |===========================================|
| | _id | firstname | lastname | username | # level: 'user'
| |===========================================|
| | 58 | Mace | Windu | macew |
| | 59 | Ben | Kenobi | benk |
| |===========================================|
| |
|=============|
.. code-block:: python
model = DictModel(
hierarchy = ('jedi_class','user'),
columns = {
'jedi_class': ('class'),
'user': ('firstname','lastname','username')
},
)
sith_row = model.add_row( 101, {'class':'sith'} )
jedi_row = model.add_row( 102, {'class':'jedi'} )
sith_row.add_child( 56, {'firstname':'Darth', 'lastname':'Vader', 'username':'anakins'} )
sith_row.add_child( 57, {'firstname':'Darth', 'lastname':'Maul', 'username':'darthm'} )
jediclassId = 101
userId = 56
print( model[jediclassId][userId].column('username') )
>>> 'anakins'
print( model[jediclassId].level() )
>>> 'jedi_class'
print( model[jediclassId][userId].level() )
>>> 'user'
:py:obj:`qconcurrency.models.DictModel` column datatypes
.. code-block:: bash
|===============================================|
| _id | columnA | columnB |
|===============================================|
| DictModelRow | QStandardItem | QStandardItem |
| |===============================================|
| | _id | columnA | columnB |
| |===============================================|
| | DictModelRow | QStandardItem | QStandardItem |
| |===============================================|
| |
|===============================================|
"""
def __init__(self, columns, hierarchy=None ):
"""
Args:
columns (list, dict):
Defines the available columns for the table/tree this :py:obj:`QtGui.QStandardItemModel`
(the `key`, generally referring to the databaseId, is always the first column)
If `hierarchy` argument is set, you have two options:
* This can be a list of column-names, that will be
created in all levels of nested table.
* This can be a dictionary in the form of ``{'level_name':(column,column,column,...), ...}``
that indicates specific-columns for each level of table-nesting.
If `hierarchy` is not set, this must be a list of column-names,
and they will be applicable to any level of table-nesting.
.. code-block:: python
{
'jedi_class': ('class',),
'user': ('firstname','lastname'),
}
hierarchy (dict, optional): ``(ex: ('department_type','department') )``
A list that labels what type of data is stored at each
level of table-nesting in this :py:obj:`qconcurrency.models.DictModel`. Each item
indicates another level of nesting.
.. code-block:: python
hierarchy = ('jedi_class','user'),
"""
QtGui.QStandardItemModel.__init__(self)
# Attributes
self._defaultcolumnvals = {} # all columns for new rows are initialized as ``None``
self._columns = None # either a list of columns, or a dict of hierarchy-keys and their columns
self._hierarchy = None # either ``None``, or a list, indicating the level of each
self._data = {} # unfortunately, if an item has a dict interface
# and has assignments within a context-manager,
# IPython tries to save/restore the values when it
# is destroyed.
#
# This means we need a real-dict (or something
# to fake it) in order to cleanly use
# within IPython. So we are now keeping
# 2x references to the data.
# Validation
# ==========
# If imposing hierarchy restrictions
if hierarchy:
self._hierarchy = hierarchy
if isinstance( columns, MutableMapping ):
if not set(hierarchy).issubset( set(columns.keys()) ):
raise RuntimeError((
'`columns` argument is missing keys represented in`hierarchy` \n'
'columns: %s \n'
'hierarchy: %s \n'
) % (repr(columns.keys()), repr(hierarchy))
)
# so that hierarchy can always be handled the same,
# create `columns` as a dict, if a list was passed
elif hasattr( columns, Iterable ):
new_columns = {}
for level in hierarchy:
new_columns[ level ] = columns[:]
columns = new_columns
else:
raise RuntimeError(
'When `hierarchy` argument is set, `columns` must be either: \n'
' * a list of columns (applicable to all hierarchy levels) \n'
' * a dict of hierarchy-keys, and the columns associated with them \n'
)
for level in hierarchy:
self._defaultcolumnvals[ level ] = {}
for key in columns[level]:
self._defaultcolumnvals[ level ][ key ] = None
# If not imposing hierarchy restrictions
else:
if isinstance( columns, MutableMapping ):
raise RuntimeError(
'When `hierarchy` argument is *not* set, `columns` should always \n'
'be a list of column-names. This set of columns will be reused by all \n'
'levels of nested tables. '
)
for key in columns:
self._defaultcolumnvals[ key ] = None
self._columns = columns
self._hierarchy = hierarchy
def add_row(self, key, columnvals=None ):
"""
Adds a new (toplevel) row to this DictModel, henceforth referred to by the key `key`.
Args:
key (obj):
Key is the id you will use to refer to this object.
Generally it will be a databaseId. This object must be
hashable.
columnvals (dict, optional):
Optionally, you may provide a dictionary of column-val assignments
(appropriate to this item's table-level). All columns, not
assigned in `columnvals` will be initialized with a value of ''.
Returns:
:py:obj:`qconcurrency.models.DictModelRow`
"""
set_columnvals = self._defaultcolumnvals.copy()
if self._hierarchy:
set_columnvals = set_columnvals[ self._hierarchy[0] ]
if columnvals:
set_columnvals.update( columnvals )
item = DictModelRow( parent=self, key=key, columnvals=set_columnvals)
# NOTE: this step should not be necessary,
# but it seems to be...
self.setItem( self.rowCount()-1, 0, item )
self._data[ str(key) ] = item
return item
def columns(self, level=None ):
"""
Returns the columns for a particular level of nested-table
within this :py:obj:`qconcurrency.models.DictModel`.
Args:
level (obj): ``( ex: 'jedi_class', 0 )``
If a `hierarchy` was assigned to this :py:obj:`qconcurrency.models.DictModel`,
this can be a label from it, or an integer indicating the level-of-nesting.
Otherwise, this will be an integer indicating the level-of-nesting
(and it will be ignored).
Returns:
.. code-block:: python
('id','firstname','lastname','username', ...)
"""
if self._hierarchy:
if level == None:
raise RuntimeError(
'This `qconcurrency.models.DictModel` was created with different columns at '
'different levels. You\'ll need to provide the `level` you are '
'interested in to get the column-list '
)
if level in self._columns:
columns = list(self._columns[ level ][:])
columns.insert( 0, 'id' )
return columns
elif isinstance( level, int ) and isinstance( self._hierarchy, Iterable):
if level <= len(self._hierarchy):
i = 0
for key in self._hierarchy:
if i == level:
columns = list(self._columns[ key ][:])
columns.insert( 0, 'id' )
return columns
i +=1
raise KeyError('unknown level: %s' % level )
else:
columns = list(self._columns[:])
columns.insert( 0, 'id' )
return columns
def column_index(self, level=None, column=None ):
"""
Returns the column-index for a specific columnname
at a specific level.
Args:
level (obj): ``( ex: 'jedi_class', 0 )``
If a `hierarchy` was assigned to this :py:obj:`qconcurrency.models.DictModel`,
this can be a label from it, or an integer indicating the level-of-nesting.
Otherwise, this will be an integer indicating the level-of-nesting
(and it will be ignored).
Returns:
.. code-block:: python
3 # a column-index
"""
if self._hierarchy:
if level == None:
raise RuntimeError(
'This `qconcurrency.models.DictModel` was created with different columns at '
'different levels. You\'ll need to provide the `level` you are '
'interested in to get the column-list '
)
if level in self._columns:
return self._columns[ level ].index( column ) +1
elif isinstance( level, int ) and isinstance( self._hierarchy, Iterable ):
if level <= len(self._hierarchy):
i = 0
for key in self._hierarchy:
if i == level:
return self._columns[ key ].index( column ) +1
i +=1
raise KeyError('unknown level: %s' % level )
else:
return self._columns.index( column ) +1
def default_columnvals(self, level=None ):
"""
Returns the default-columnvals for a particular level of nested-table.
See :py:meth:`qconcurrency.models.DictModelRow.level`
Args:
level (obj):
If a `hierarchy` was assigned to this :py:obj:`qconcurrency.models.DictModel`,
this will be a label from it. Otherwise, this will be an integer
indicating the level-of-nesting (and it will be ignored).
Returns:
.. code-block:: python
{
'firstname': None,
'lastname': None,
...
}
"""
if self._hierarchy:
if level in self._defaultcolumnvals:
return self._defaultcolumnvals[ level ]
elif isinstance( level, int ):
if level <= len(self._defaultcolumnvals):
i = 0
for key in self._defaultcolumnvals:
if i == level:
return self._defaultcolumnvals[ key ]
i +=1
raise KeyError('unknown level: %s' % level )
else:
return self._defaultcolumnvals
def hierarchy(self):
"""
Returns the model's hierarchy tuple
(if one has been assigned in :py:obj:`qconcurrency.models.DictModel.__init__`)
Returns:
.. code-block:: python
('jedi_class', 'user') # if assigned a hierarchy
None # if no hierarchy is assigned
"""
return self._hierarchy
def _get_rowitem(self, key):
"""
Returns the item in the first column of this :py:obj:`QtGui.QStandardItemModel`
for the row with the key indicated by `key`.
Args:
key (obj):
A key assigned to a row within this Model. Generally,
this would be a database-Id.
Returns:
QtGui.QStandardItem
"""
for i in range(self.rowCount()):
if self.item(i,0).text() == str(key):
return self.item(i,0)
raise KeyError(
'no row has the key "%s"' % key
)
def _get_colindex(self, level, column):
"""
Returns the column-index for a column within this :py:obj:`QtGui.QStandardItemModel`
by it's name.
Args:
column (str): ``(ex: 'name' )``
Any item from the :py:meth:`__init__` argument `columns`.
Returns:
.. code-block:: python
4 # integer, representing the 0-based index of this column
# in the table
Raises:
KeyError: if column does not exist in table
"""
if self._hierarchy:
if level == None:
raise RuntimeError(
'This `qconcurrency.models.DictModel` was created with different columns at '
'different levels. You\'ll need to provide the `level` you are '
'interested in to get the column-list '
)
if level in self._columns:
return self._columns[ level ].index( column ) +1
elif isinstance( level, int ) and isinstance( self._hierarchy, Iterable ):
if level <= len(self._hierarchy):
i = 0
for key in self._hierarchy:
if i == level:
return self._columns[ key ].index( column ) +1
i +=1
raise KeyError('unknown level: %s' % level )
else:
return self._columns.index(column) +1
raise KeyError(
'Column "%s" does not exist in this `qconcurrency.models.DictModel` columns: %s' % (
column, str(self._columns)
)
)
def keys(self):
"""
Returns list containing keys for every
row that has been added to this :py:obj:`qconcurrency.models.DictModel` s root
Returns:
.. code-block:: python
[ 1, 2, 3, 5, 8, ... ]
"""
return self._data.keys()
def removeRow(self, key):
self._data.pop( str(key) )
# row is gone. that is all we care about
try:
modelitem = self._get_rowitem( key )
return QtGui.QStandardItemModel.removeRow( self, modelitem.row() )
except( KeyError ):
return
def takeRow(self, key):
return self.removeRow( str(key) )
def __getitem__(self, key):
"""
Returns a :py:obj:`qconcurrency.models.DictModelRow` object representing
a row from this :py:obj:`qconcurrency.models.DictModel`.
"""
return self._data[str(key)]
def __delitem__(self, key):
"""
Wraps :py:meth:`removeRow`
"""
self.removeRow( key )
def __contains__(self, item):
"""
Returns True/False if a row with `key` exists in
:py:obj:`QtWidgets.QStandardItemModel`
"""
return str(item) in self._data
def __len__(self):
"""
Wraps `self._data.__len__`
"""
return len(self._data)
def __iter__(self):
"""
Allows iteration over Ids in DictModel.
"""
return iter(self._data)
def has_key(self, k):
"""
Wraps `self._data.has_key`
"""
return self._data.has_key(k)
def keys(self):
"""
Lists `key` value for every row in the
:py:obj:`QtWidgets.QStandardItemModel`
"""
return self._data.keys()
def values(self):
"""
Lists :py:obj:`DictModelRow` objects for every row in the
:py:obj:`QtWidgets.QStandardItemModel`
"""
return self._data.values()
def items(self):
"""
Lists a tuple with the `key` and :py:obj:`DictModelRow`
objects for every row in the :py:obj:`QtWidgets.QStandardItemModel`
"""
return self._data.items()
def clear(self):
"""
Removes all items from :py:obj:`QtGui.QStandardItemModel`
"""
self._data = {}
QtGui.QStandardItemModel.clear(self)
class DictModelRow( QtGui.QStandardItem ):
"""
A DictModelRow is a :py:obj:`QtGui.QStandardItem` that holds
an item's key (usually database-Id) within a :py:obj:`qconcurrency.models.DictModel`.
It is always added to a :py:obj:`qconcurrency.models.DictModel` at the column-index ``0``.
When setting columnvals, they are added to the same parent :py:obj:`qconcurrency.models.DictModel`
or :py:obj:`qconcurrency.models.DictModelRow`, but at different column-indexes.
Example:
.. code-block:: bash
===== ========|
DictModelRow _id | class | # level: 'jedi_class'
| ===== ========|
+---------> 101 | sith |
| |============================================|
| | _id | firstname | lastname | username | # level: 'user'
| |============================================|
+-------------> 56 | Darth | Vader | anakins |
+-------------> 57 | Darth | Maul | darthm |
|============================================|
/\\ /\\ /\\
| | |
QtGui.QStandardItem ----+------------+-------------+
"""
def __init__(self, parent, key, columnvals=None ):
"""
Args:
parent (QtGui.QStandardItem, QtGui.QStandardItemModel ):
Another QStandardItem that has already
been added to the model, or a model itself.
It will be used to access the model's info,
and this widget will be added to it.
key (obj):
A hashable python object that will be
used to represent this object's databaseId.
columnvals (dict, optional):
A dictionary of columns, and assignments to store
in the view.
"""
QtGui.QStandardItem.__init__(self, str(key))
if not isinstance( parent, QtGui.QStandardItemModel ):
if not parent.model():
raise RuntimeError(
'`parent` %s QStandardItem must have already been added to a QStandardItemModel' % repr(parent)
)
self._key = key
self._level = None # if `hierarchy` argument was set in `DictModel`, this will be
# a label indicating the type of information this
# table represents.
#
# otherwise, this will be an incremented integer
# (starting from 0)
# append this item to the parent's list of children
if isinstance( parent, QtGui.QStandardItemModel ):
if parent.hierarchy():
self._level = parent.hierarchy()[0]
else:
self._level = 0
parent.setItem( parent.rowCount(), 0, self )
else:
hierarchy = parent.model().hierarchy()
if hierarchy:
index = hierarchy.index( parent.level() )+1
self._level = hierarchy[ index ]
else:
self._level = parent.level() +1
parent.setChild( parent.rowCount(), 0, self )
self.setText( str(key) )
default_columnvals = self.model().default_columnvals(self._level)
self.set_columnvals( default_columnvals )
if columnvals:
self.set_columnvals( columnvals )
def __getitem__(self, key):
return self._get_child_row(key)
def add_child(self, key, columnvals=None ):
"""
Adds a new row to this DictModel, at a new level of nesting
henceforth referred to by the key `key`.
Example:
.. code-block:: bash
|==============|
| _id | column |
|==============|
| 100 | 'A' | # add_child( 102, {'column':'A1'} )
| |==============|
| | _id | column | # added child: model[100][102]
| |==============|
| | 102 | 'A1' |
| |==============|
| |
| 101 | 'B' |
|==============|
Args:
key (obj):
Key is the id you will use to refer to this object.
Generally it will be a databaseId. This object must be
hashable.
columnvals (dict, optional):
Optionally, you may provide a dictionary of column-val assignments
(appropriate to this item's table-level) as determined by the `columns`
argument to :py:meth:`qconcurrency.models.DictModel.__init__`
Returns:
:py:obj:`qconcurrency.models.DictModelRow`
See Also:
* :py:meth:`qconcurrency.models.DictModelRow.add_row`
* :py:meth:`qconcurrency.models.DictModel.add_row`
"""
item = DictModelRow( parent=self, key=key, columnvals=columnvals )
return item
def add_row(self, key, columnvals=None ):
"""
Adds a new row to this DictModel, at the same level of nesting
henceforth referred to by the key `key`.
Example:
.. code-block:: bash
|==============|
| _id | column |
|==============|
| 100 | 'A' | # add_row( 102, {'column':'C'} )
| 101 | 'B' |
| 102 | 'C' | # added row: model[102]
|==============|
Args:
key (obj):
Key is the id you will use to refer to this object.
Generally it will be a databaseId. This object must be
hashable.
columnvals (dict, optional):
Optionally, you may provide a dictionary of column-val assignments
(appropriate to this item's table-level) as determined by the `columns`
argument to :py:meth:`qconcurrency.models.DictModel.__init__`
Returns:
:py:obj:`qconcurrency.models.DictModelRow`
See Also:
* :py:meth:`qconcurrency.models.DictModelRow.add_row`
* :py:meth:`qconcurrency.models.DictModel.add_row`
"""
if self.parent():
item = DictModelRow( parent=self.parent(), key=key, columnvals=columnvals )
else:
item = DictModelRow( parent=self.model(), key=key, columnvals=columnvals )
return item
def set_columnvals(self, columnvals ):
"""
Set columnvals on a key of this :py:obj:`qconcurrency.models.DictModel`
"""
# validation
if self.model() is None:
raise RuntimeError('Cannot set columnvals until item has been added to a model')
columns = self.model().columns( self._level )
# set columnvals
for i in range(len(columns)):
column = columns[i]
if column in columnvals:
if columnvals[column] == None:
columnvals[column] = ''
if self.parent() is not None:
self.parent().setChild(
self.index().row(), # row
i, # column
QtGui.QStandardItem( str(columnvals[column]) ) # item
)
else:
self.model().setItem(
self.index().row(), # row
i, # column
QtGui.QStandardItem( str(columnvals[column]) ) # item
)
def columnvals(self):
"""
Returns a dictionary of this item's columnvals from the Model.
A column `_id` will be added to the list of columns, which will
be the `key` value of this row.
"""
columnvals = {}
columns = self.model().columns(self._level)
for i in range(len(columns)):
column = columns[i]
# nested-modelitem
if self.parent() is not None:
modelitem = self.parent().child( self.row(), i )
if modelitem is not None:
columnvals[ column ] = modelitem.text()
else:
raise RuntimeError(
'item at level "%s" in column "%s" (%s,%s) is None. Expected QtCore.QStandardItem' % (
self._level, column, self.row(), i)
)
# root-modelitems
else:
modelitem = self.model().item( self.row(), i )
if modelitem is not None:
columnvals[ column ] = modelitem.text()
else:
raise RuntimeError(
'item at level "%s" in column "%s" (%s,%s) is None. Expected QtCore.QStandardItem' % (
self._level, column, self.row(), i)
)
columnvals['_id'] = self._key
return columnvals
def columnval(self, name):
"""
Retrieve a single column-value only.
"""
if name == '_id':
if self.parent() is not None:
return self.parent().child( self.row(), 0 ).text()
else:
return self.model().item( self.row(), 0 ).text()
columns = self.model().columns(self._level)
for i in range(len(columns)):
column = columns[i]
if column == name:
if self.parent() is not None:
modelitem = self.parent().child( self.row(), i )
if modelitem:
return modelitem.text()
else:
modelitem = self.model().item( self.row(), i )
if modelitem:
return modelitem.text()
raise KeyError(
'Unable to find a column named: "%s" in %s' % (name, repr(columns))
)
def columnitem(self, name):
"""
Returns the sibling-widget representing one of the columnvals
"""
sibling_index = self.index().sibling( self.index().row(), self._get_colindex( name ) )
return self.model().itemFromIndex( sibling_index )
def level(self):
"""
Returns either a label (if :py:meth:`qconcurrency.models.DictModel.__init__` was passed a
`hierarchy` argument), or an integer representing the nesting-depth.
Either way, level is used to indicate the level-of-nesting of the table
that this item is in.
"""
return self._level
def delete(self):
"""
Removes this *row* from the model.
"""
if self.parent() is not None:
self.parent().removeRow( self.id() )
else:
self.model().removeRow( self.id() )
def _get_sibling_row(self, key):
"""
Returns a sibling with a different key at the same level.
Example:
.. code-block:: bash
----------------------------------------------------
key | name | path |
----------------------------------------------------
100 | 'mnt' | '/mnt' |
100.1 | 'mntusb' | '/mnt/usb' |
100.1.a | 'mntusbbackup' | '/mnt/usb/backup' |
100.2 | 'mntcd' | '/mnt/cd' |
| | |
200 | 'home' | '/home' |
200.1 | 'will' | '/home/will' |
In the above diagram representing the :py:obj:`QStandardItemModel`,
from `100.1` you would be able retrieve `100.2`.
"""
if self.parent() == None:
for i in range(self.model().rowCount()):
if self.model().item(i,0).text() == str(key):
return self.model().item(i,0)
else:
for i in range(self.parent().rowCount()):
if self.parent().child(i,0).text() == str(key):
return self.parent().child(i,0)
raise KeyError(
'Unable to find key %s in table containing %s' % (key, repr(self))
)
def _get_child_row(self, key):
"""
Returns a child with a particular key.
Example:
.. code-block:: bash
----------------------------------------------------
key | name | path |
----------------------------------------------------
100 | 'mnt' | '/mnt' |
100.1 | 'mntusb' | '/mnt/usb' |
100.1.a | 'mntusbbackup' | '/mnt/usb/backup' |
100.2 | 'mntcd' | '/mnt/cd' |
| | |
200 | 'home' | '/home' |
200.1 | 'will' | '/home/will' |
In the above diagram representing the :py:obj:`QStandardItemModel`,
from `100.1` you would be able retrieve `100.1.a`.
"""
if not self.rowCount():
raise RuntimeError(
'%s has no children. Cannot retrieve child at key %s' % (repr(self), key)
)
for i in range(self.rowCount()):
if self.child(i,0).text() == str(key):
return self.child(i,0)
raise KeyError(
'Cannot find child identified by key "%s" in %s' % (key,repr(self))
)
def _get_colindex(self, column):
return self.model()._get_colindex( self.level(), column )
def keys(self):
"""
Returns list containig keys for every
child-row that has been added to this :py:obj:`qconcurrency.models.DictModelRow`
"""
keys = []
for i in range(self.rowCount()):
keys.append( self.child(i,0).text() )
return keys
def id(self):
"""
Returns the `key` this row represents.
(It's value depends on the value passed to :py:meth:`qconcurrency.models.DictModelRow.add_row`
or :py:meth:`qconcurrency.models.DictModelRow.add_child` ).
"""
return self._key
if __name__ == '__main__':
from qconcurrency import QApplication
from Qt import QtWidgets
import sys
def test_simple():
with QApplication():
model = DictModel( columns=('a','b','c') )
# add toplevel rows
model.add_row( 100, columnvals={'a':'AAA', 'b':'BBB'} )
model.add_row( 200, columnvals={'a':'ZZZ', 'b':'XXX'} )
print( model[100].columnvals() )
print( model[200].columnvals() )
# add child-rows (and nested children)
model[100].add_child( 10, columnvals={'c':'CCC'} )
model[100][10].add_row( 11 )
model[100][10].add_row( 12 )
model[100][10].add_child( 1 , columnvals={'c':'DDD'} )
print( model[100][10].columnvals() )
print( model[100][10][1].columnvals() )
# add model to tree (so it is visible)
tree = QtWidgets.QTreeView()
tree.setModel( model )
tree.show()
def test_hierarchy_fixedcols():
with QApplication():
model = QtGui.QStandardItemModel()
model = DictModel(
hierarchy = ('jedi_class','user'),
columns = {'jedi_class':['class'], 'user':('username','firstname','lastname')}
)
model.add_row(10, columnvals={'class':'sith'} )
model.add_row(11, columnvals={'class':'jedi'} )
model[10].add_child( 101, columnvals={'username':'anakins', 'firstname':'anakin', 'lastname':'skywalker'} )
model[10].add_child( 102, columnvals={'username':'epalpatine'} )
model[10].add_row( 12, columnvals={'class':'other'} )
jediclassId = 10
userId = 101
print( model[jediclassId][userId].columnvals() )
# add model to tree (so it is visible)
tree = QtWidgets.QTreeView()
tree.setModel( model )
tree.show()
def runtests():
#test_simple()
test_hierarchy_fixedcols()
runtests()
| bsd-3-clause | -3,545,572,550,942,199,300 | 34.660075 | 119 | 0.464577 | false |
aerickson/jenkinsapi | jenkinsapi_tests/unittests/test_jenkins.py | 1 | 14593 | import mock
# To run unittests on python 2.6 please use unittest2 library
try:
import unittest2 as unittest
except ImportError:
import unittest
from jenkinsapi.plugins import Plugins
from jenkinsapi.utils.requester import Requester
from jenkinsapi.jenkins import Jenkins, JenkinsBase, Job
from jenkinsapi.custom_exceptions import JenkinsAPIException, UnknownJob, BadURL
class TestJenkins(unittest.TestCase):
DATA = {}
@mock.patch.object(Jenkins, '_poll')
def setUp(self, _poll):
_poll.return_value = self.DATA
self.J = Jenkins('http://localhost:8080',
username='foouser', password='foopassword')
@mock.patch.object(Jenkins, '_poll')
def test_clone(self, _poll):
_poll.return_value = self.DATA
JJ = self.J._clone()
self.assertNotEquals(id(JJ), id(self.J))
self.assertEquals(JJ, self.J)
def test_stored_passwords(self):
self.assertEquals(self.J.requester.password, 'foopassword')
self.assertEquals(self.J.requester.username, 'foouser')
@mock.patch.object(Jenkins, '_poll')
def test_reload(self, _poll):
mock_requester = Requester(username='foouser', password='foopassword')
mock_requester.get_url = mock.MagicMock(return_value='')
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword',
requester=mock_requester)
J.poll()
@mock.patch.object(JenkinsBase, '_poll')
@mock.patch.object(Jenkins, '_poll')
@mock.patch.object(Job, '_poll')
def test_lazy_loading(self, _base_poll, _poll, _job_poll):
_poll.return_value = {
'jobs': [
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
{'name': 'job_two',
'url': 'http://localhost:8080/job_two',
'color': 'blue'},
]
}
_base_poll.return_value = _poll.return_value
_job_poll.return_value = {}
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword', lazy=True)
self.assertEquals(J._data, None)
for idx, (job_name, job) in enumerate(J.get_jobs()):
self.assertEquals(
job_name,
_poll.return_value['jobs'][idx]['name'])
self.assertTrue(isinstance(job, Job))
self.assertEquals(
job.name,
_poll.return_value['jobs'][idx]['name'])
self.assertEquals(
job.baseurl,
_poll.return_value['jobs'][idx]['url'])
@mock.patch.object(JenkinsBase, '_poll')
@mock.patch.object(Jenkins, '_poll')
@mock.patch.object(Job, '_poll')
def test_get_jobs_info(self, _base_poll, _poll, _job_poll):
_poll.return_value = {
'jobs': [
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
{'name': 'job_two',
'url': 'http://localhost:8080/job_two',
'color': 'blue'},
]
}
_base_poll.return_value = _poll.return_value
_job_poll.return_value = {}
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword')
for idx, (url, job_name) in enumerate(J.get_jobs_info()):
self.assertEquals(
job_name,
_poll.return_value['jobs'][idx]['name'])
self.assertEquals(url, _poll.return_value['jobs'][idx]['url'])
@mock.patch.object(JenkinsBase, '_poll')
@mock.patch.object(Jenkins, '_poll')
@mock.patch.object(Job, '_poll')
def test_get_jobs_list(self, _base_poll, _poll, _job_poll):
_poll.return_value = {
'jobs': [
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
{'name': 'job_two',
'url': 'http://localhost:8080/job_two',
'color': 'blue'},
]
}
_base_poll.return_value = _poll.return_value
_job_poll.return_value = {}
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword')
for idx, job_name in enumerate(J.get_jobs_list()):
self.assertEquals(
job_name,
_poll.return_value['jobs'][idx]['name'])
@mock.patch.object(JenkinsBase, '_poll')
@mock.patch.object(Jenkins, '_poll')
@mock.patch.object(Job, '_poll')
def test_create_dup_job(self, _base_poll, _poll, _job_poll):
_poll.return_value = {
'jobs': [
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
{'name': 'job_two',
'url': 'http://localhost:8080/job_two',
'color': 'blue'},
]
}
_base_poll.return_value = _poll.return_value
_job_poll.return_value = {}
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword')
job = J.create_job('job_one', None)
self.assertTrue(isinstance(job, Job))
self.assertTrue(job.baseurl == 'http://localhost:8080/job_one')
self.assertTrue(job.name == 'job_one')
# Here we're going to test function, which is going to modify
# Jenkins internal data. It calls for data once to check
# if job already there, then calls again to see if job hs been created.
# So we need to create mock function, which
# will return different value per each call
# Define what we will return
create_job_returns = [
# This will be returned when job is not yet created
{
'jobs': [
{'name': 'job_one',
'url': 'http://localhost:8081/job_one',
'color': 'blue'},
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
]
},
# This to simulate that the job has been created
{
'jobs': [
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
{'name': 'job_two',
'url': 'http://localhost:8080/job_two',
'color': 'blue'},
{'name': 'job_new',
'url': 'http://localhost:8080/job_new',
'color': 'blue'},
]
}
]
# Mock function
def second_call_poll(tree=None):
return TestJenkins.create_job_returns.pop(0)
def job_second_call_poll(tree=None):
return {}
# Patch Jenkins with mock function
@mock.patch.object(Jenkins, '_poll', side_effect=second_call_poll)
@mock.patch.object(Job, '_poll', side_effect=job_second_call_poll)
def test_create_new_job(self, _poll, _job_poll):
_job_poll.return_value = {}
mock_requester = Requester(username='foouser', password='foopassword')
mock_requester.post_xml_and_confirm_status = mock.MagicMock(
return_value='')
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword',
requester=mock_requester)
job = J.create_job('job_new', None)
self.assertTrue(isinstance(job, Job))
self.assertTrue(job.baseurl == 'http://localhost:8080/job_new')
self.assertTrue(job.name == 'job_new')
@mock.patch.object(JenkinsBase, '_poll')
@mock.patch.object(Jenkins, '_poll')
@mock.patch.object(Job, '_poll')
def test_create_new_job_fail(self, _base_poll, _poll, _job_poll):
_job_poll.return_value = {}
_poll.return_value = {
'jobs': [
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
]
}
_base_poll.return_value = _poll.return_value
mock_requester = Requester(username='foouser', password='foopassword')
mock_requester.post_xml_and_confirm_status = mock.MagicMock(
return_value='')
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword',
requester=mock_requester)
with self.assertRaises(JenkinsAPIException) as ar:
J.create_job('job_new', None)
self.assertEquals(str(ar.exception), 'Cannot create job job_new')
@mock.patch.object(JenkinsBase, '_poll')
@mock.patch.object(Jenkins, '_poll')
@mock.patch.object(Job, '_poll')
def test_get_jenkins_obj_from_url(self, _base_poll, _poll, _job_poll):
_job_poll.return_value = {}
_poll.return_value = {
'jobs': [
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
]
}
_base_poll.return_value = _poll.return_value
mock_requester = Requester(username='foouser', password='foopassword')
mock_requester.post_xml_and_confirm_status = mock.MagicMock(
return_value='')
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword',
requester=mock_requester)
new_jenkins = J.get_jenkins_obj_from_url('http://localhost:8080/')
self.assertEquals(new_jenkins, J)
new_jenkins = J.get_jenkins_obj_from_url('http://localhost:8080/foo')
self.assertNotEquals(new_jenkins, J)
@mock.patch.object(JenkinsBase, '_poll')
@mock.patch.object(Jenkins, '_poll')
@mock.patch.object(Job, '_poll')
def test_get_jenkins_obj(self, _base_poll, _poll, _job_poll):
_job_poll.return_value = {}
_poll.return_value = {
'jobs': [
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
{'name': 'job_one',
'url': 'http://localhost:8080/job_one',
'color': 'blue'},
]
}
_base_poll.return_value = _poll.return_value
mock_requester = Requester(username='foouser', password='foopassword')
mock_requester.post_xml_and_confirm_status = mock.MagicMock(
return_value='')
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword',
requester=mock_requester)
new_jenkins = J.get_jenkins_obj()
self.assertEquals(new_jenkins, J)
@mock.patch.object(JenkinsBase, '_poll')
@mock.patch.object(Jenkins, '_poll')
def test_get_version(self, _base_poll, _poll):
class MockResponse(object):
def __init__(self):
self.headers = {}
self.headers['X-Jenkins'] = '1.542'
mock_requester = Requester(username='foouser', password='foopassword')
mock_requester.get_and_confirm_status = mock.MagicMock(
return_value=MockResponse())
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword',
requester=mock_requester)
self.assertEquals('1.542', J.version)
@mock.patch.object(JenkinsBase, '_poll')
@mock.patch.object(Jenkins, '_poll')
def test_get_version_nonexistent(self, _base_poll, _poll):
class MockResponse(object):
def __init__(self):
self.headers = {}
base_url = 'http://localhost:8080'
mock_requester = Requester(username='foouser', password='foopassword')
mock_requester.get_and_confirm_status = mock.MagicMock(
return_value=MockResponse())
J = Jenkins(base_url,
username='foouser', password='foopassword',
requester=mock_requester)
self.assertEquals('0.0', J.version)
@mock.patch.object(JenkinsBase, 'get_data')
def test_get_master_data(self, _base_poll):
base_url = 'http://localhost:808'
_base_poll.return_value = {
"busyExecutors": 59,
"totalExecutors": 75
}
j = Jenkins(base_url,
username='foouser', password='foopassword')
data = j.get_master_data()
self.assertEquals(data['busyExecutors'], 59)
self.assertEquals(data['totalExecutors'], 75)
class TestJenkinsURLs(unittest.TestCase):
@mock.patch.object(Jenkins, '_poll')
def testNoSlash(self, _poll):
_poll.return_value = {}
J = Jenkins('http://localhost:8080',
username='foouser', password='foopassword')
self.assertEquals(
J.get_create_url(),
'http://localhost:8080/createItem')
@mock.patch.object(Jenkins, '_poll')
def testWithSlash(self, _poll):
_poll.return_value = {}
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword')
self.assertEquals(
J.get_create_url(),
'http://localhost:8080/createItem')
@mock.patch.object(Jenkins, '_poll')
@mock.patch.object(Plugins, '_poll')
def test_has_plugin(self, _p_poll, _poll):
_poll.return_value = {}
_p_poll.return_value = {
'plugins': [
{
'deleted': False, 'hasUpdate': True, 'downgradable': False,
'dependencies': [{}, {}, {}, {}],
'longName': 'Jenkins Subversion Plug-in', 'active': True,
'shortName': 'subversion', 'backupVersion': None,
'url': 'http://wiki.jenkins-ci.org/display/JENKINS/Subversion+Plugin',
'enabled': True, 'pinned': False, 'version': '1.45',
'supportsDynamicLoad': 'MAYBE', 'bundled': True
}
]
}
J = Jenkins('http://localhost:8080/',
username='foouser', password='foopassword')
self.assertTrue(J.has_plugin('subversion'))
if __name__ == '__main__':
unittest.main()
| mit | 1,974,490,247,969,425,200 | 36.805699 | 90 | 0.533681 | false |
wjohnston888/pynet_test- | my_devices.py | 1 | 1517 | """
pynet-rtr1 (Cisco IOS) 184.105.247.70
pynet-rtr2 (Cisco IOS) 184.105.247.71
pynet-sw1 (Arista EOS) 184.105.247.72
pynet-sw2 (Arista EOS) 184.105.247.73
pynet-sw3 (Arista EOS) 184.105.247.74
pynet-sw4 (Arista EOS) 184.105.247.75
juniper-srx 184.105.247.76
"""
from getpass import getpass
password = getpass("Enter standard password: ")
cisco_rtr1 = dict(
hostname='184.105.247.70',
device_type='ios',
username='pyclass',
password=password,
optional_args = {}
)
cisco_rtr2 = dict(
hostname='184.105.247.71',
device_type='ios',
username='pyclass',
password=password,
optional_args = {}
)
arista_sw1 = dict(
hostname='184.105.247.72',
device_type='eos',
username='pyclass',
password=password,
optional_args = {}
)
arista_sw2 = dict(
hostname='184.105.247.73',
device_type='eos',
username='pyclass',
password=password,
optional_args = {}
)
juniper_srx = dict(
hostname='184.105.247.76',
device_type='junos',
username='pyclass',
password=password,
optional_args = {}
)
juniper1 = dict(
hostname='juniper1.twb-tech.com',
device_type='junos',
username='pyclass',
password=password,
optional_args = {}
)
juniper2 = dict(
hostname='juniper2.twb-tech.com',
device_type='junos',
username='pyclass',
password=password,
optional_args = {}
)
device_list = [
cisco_rtr1,
cisco_rtr2,
arista_sw1,
arista_sw2,
juniper_srx,
]
| apache-2.0 | -8,651,061,472,039,919,000 | 18.960526 | 47 | 0.619644 | false |
nightstrike/blender_nif_plugin | testframework/integration/textures/normal/test_normal.py | 1 | 7305 | """Export and import textured meshes."""
# ***** BEGIN LICENSE BLOCK *****
#
# Copyright © 2005-2015, NIF File Format Library and Tools contributors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the NIF File Format Library and Tools
# project nor the names of its contributors may be used to endorse
# or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# ***** END LICENSE BLOCK *****
import bpy
import nose.tools
import os.path
from pyffi.formats.nif import NifFormat
from integration import SingleNif
from integration.data import gen_data
from integration.geometry.trishape import b_gen_geometry
from integration.geometry.trishape import n_gen_geometry
from integration.geometry.uv import b_gen_uv
from integration.geometry.uv import n_gen_uv
from integration.property.material import b_gen_material
from integration.property.material import n_gen_material
from integration.textures import b_gen_texture
from integration.textures import n_gen_texture
from integration.textures.diffuse import b_gen_diffusemap
from integration.textures.diffuse import n_gen_diffusemap
from integration.textures.normal import b_gen_normalmap
from integration.textures.normal import n_gen_normalmap
from nose.tools import nottest
'''
Normal map, technically special case....
Handling if user supplies normal map instead of bump & vice-versa
Extra_shader_data -> NormalMapIndex (Civ VI, Sid Miener, Morrowind)
BSShaderPPLightingProperty (FO3 & NV)
BSLightingShaderProperty(Skyrim)
'''
#TODO
@nottest
class TestTexturePropertyNormalMap(SingleNif):
"""Test import/export of meshes with NiTexturingProperty based diffuse texture"""
n_name = "textures/normal/test_normal"
b_name = 'Cube'
# Paths
root_dir = os.getcwd()
nif_dir = os.path.join(root_dir, 'nif')
diffuse_texture_path = os.path.join(nif_dir, 'textures', 'diffuse', 'diffuse.dds')
normalmap_texture_path = os.path.join(nif_dir, 'textures', 'normal', 'normal.dds')
def b_create_data(self):
b_obj = b_gen_geometry.b_create_cube(self.b_name)
b_gen_uv.b_uv_object()
b_gen_geometry.b_transform_cube(b_obj)
b_mat = b_gen_material.b_create_material_block(b_obj)
b_gen_material.b_create_set_default_material_property(b_mat)
# diffuse
b_mat_texslot = b_gen_texture.b_create_textureslot(b_mat, 'Diffuse')
b_gen_texture.b_create_load_texture(b_mat_texslot, self.diffuse_texture_path)
b_gen_diffusemap.b_create_diffuse_texture_properties(b_mat_texslot)
# normal
b_mat_texslot = b_gen_texture.b_create_textureslot(b_mat, 'Normal')
b_gen_texture.b_create_load_texture(b_mat_texslot, self.normalmap_texture_path)
b_gen_normalmap.b_create_normal_texture_properties(b_mat_texslot)
def b_check_data(self):
b_obj = bpy.data.objects[self.b_name]
# TODO - probably should stick in some UV tests at some point.
b_mat = b_gen_material.b_check_material_block(b_obj) # check we have a material
b_gen_material.b_check_material_property(b_mat) # check its values
# diffuse
nose.tools.assert_equal(b_mat.texture_slots[0] != None, True) # check slot exists
b_texslot_diffuse = b_mat.texture_slots[0]
b_gen_texture.b_check_texture_slot(b_texslot_diffuse)
b_gen_texture.b_check_image_texture_property(b_texslot_diffuse, self.diffuse_texture_path)
b_gen_diffusemap.b_check_diffuse_texture_settings(b_texslot_diffuse)
# normal
nose.tools.assert_equal(b_mat.texture_slots[1] != None, True) # check slot exists
b_texslot_normal = b_mat.texture_slots[1]
b_gen_texture.b_check_texture_slot(b_texslot_normal)
b_gen_texture.b_check_image_texture_property(b_texslot_normal, self.normalmap_texture_path)
b_gen_normalmap.b_check_normal_texture_settings(b_texslot_normal)
def n_create_data(self):
gen_data.n_create_header_oblivion(self.n_data)
n_gen_texture.n_create_blocks(self.n_data)
n_nitrishape = self.n_data.roots[0].children[0]
n_gen_material.n_attach_material_prop(n_nitrishape) # add nimaterialprop
n_gen_texture.n_create_store_normal_data(n_nitrishape) #store normal data as NiBinaryExtraData
n_gen_texture.n_create_texture_property(n_nitrishape) # add nitexturingprop
n_textureprop = n_nitrishape.properties[0]
n_gen_diffusemap.n_create_diffuse_map(n_textureprop) #add nitexturesource diffuse
n_gen_normalmap.n_create_normal_map(n_textureprop) #add nitexturesource normalmap
return self.n_data
def n_check_data(self):
'TODO - probably should stick in some UV tests at some point.'
n_geom = self.n_data.roots[0].children[0]
nose.tools.assert_equal(n_geom.num_properties, 2) # mat & texture
n_gen_material.n_check_material_property(n_geom.properties[1])
n_tex_prop = n_geom.properties[0]
n_gen_texture.n_check_texturing_property(n_tex_prop) #check generic props
n_gen_diffusemap.n_check_diffuse_property(n_tex_prop) #check diffuse settings
n_gen_normalmap.n_check_normal_property(n_tex_prop) #check normal settings
# diffuse
n_texdesc_diffuse = n_tex_prop.base_texture
n_gen_texture.n_check_texdesc(n_texdesc_diffuse) # check generic props
n_gen_diffusemap.n_check_diffuse_source_texture(n_texdesc_diffuse.source, self.diffuse_texture_path) #check diffuse image
# normal
n_texdesc_normalmap = n_tex_prop.normal_texture
n_gen_texture.n_check_texdesc(n_texdesc_normalmap) # check generic props
n_gen_normalmap.n_check_normal_map_source_texture(n_texdesc_normalmap.source, self.normalmap_texture_path) #check diffuse image
| bsd-3-clause | 1,311,427,785,334,063,600 | 42.736527 | 135 | 0.702766 | false |
LittleBun/Personal | ics632/tutorial_sim_grid/topic2/generate_xml_bintree_and_hostfile.py | 1 | 5081 | #!/usr/bin/env python2.7
import sys
import os
import math
# Link parameters
link_latency = "10us"
link_bandwidth = 10
link_bandwidth_unit = "Gbps"
# Convenient math wrappers
def floor(x):
return int(math.floor(x))
def ceil(x):
return int(math.ceil(x))
def pow2(x):
return int(math.pow(2,x))
# XML generation functions
def issueHead():
head = ("<?xml version='1.0'?>\n"
"<!DOCTYPE platform SYSTEM \"http://simgrid.gforge.inria.fr/simgrid/simgrid.dtd\">\n"
"<platform version=\"4\">\n\n")
config_clause = ("<!-- WARNING: This <config></config> clause below\n"
"makes it so that NO COMPUTATION TIME is simulated. This is because\n"
"in this module, for pedagogic purposes, we don't want to muddy the\n"
"(simulation) waters with computational times. As a results, this\n"
"XML platform file may not be suitable for running other\n"
"simulations, unless you remove the <config></config> clause.\n"
"-->\n"
"<config>\n"
"<prop id=\"smpi/simulate-computation\" value=\"0\"></prop>\n"
"<prop id=\"smpi/running-power\" value=\"200000000000\"></prop>\n"
"</config>\n\n")
AS_head = "<AS id=\"AS0\" routing=\"Full\">\n"
return head + config_clause + AS_head
def issueTail():
return "</AS>\n</platform>\n"
def issueLink1(x):
return " <link id=\"link-"+str(x)+"\" latency=\""+str(link_latency)+"\" bandwidth=\""+str(link_bandwidth)+link_bandwidth_unit+"\"/>\n"
def issueLink2(x,y):
return " <link id=\"link-"+str(x)+"-"+str(y)+"\" latency=\""+str(link_latency)+"\" bandwidth=\""+str(link_bandwidth)+link_bandwidth_unit+"\"/>\n"
def issueLink3(x,y,bw):
return " <link id=\"link-"+str(x)+"-"+str(y)+"\" latency=\""+str(link_latency)+"\" bandwidth=\""+str(bw)+link_bandwidth_unit+"\"/>\n"
def issueHost(index):
return " <host id=\"host-"+str(index)+".hawaii.edu\" speed=\"200Gf\"/>\n"
def issueRouteHead(index1, index2):
return " <route src=\"host-"+str(index1)+".hawaii.edu\" dst=\"host-"+str(index2)+".hawaii.edu\">\n"
def issueRouteTail():
return " </route>\n"
def issueRouteLink1(x):
return "\t<link_ctn id=\"link-"+str(x)+"\"/>\n"
def issueRouteLink2(x,y):
return "\t<link_ctn id=\"link-"+str(x)+"-"+str(y)+"\"/>\n"
######################################################################
# Parse command-line arguments
if (len(sys.argv) != 2):
print >> sys.stderr, "Usage:a"+sys.argv[0]+" <num hosts>\n"
print >> sys.stderr, " Will generate a bintree_<num hosts>.xml and hostfile_<num hosts>.txt file\n"
exit(1)
num_hosts = int(sys.argv[1])
###############################################################
# Generate Binary Tree XML file
filename = "./bintree_"+str(num_hosts)+".xml"
fh = open(filename, 'w')
fh.write(issueHead())
# Create all hosts and links
for i in range(0,num_hosts):
fh.write(issueHost(i))
if (i*2+1 < num_hosts):
fh.write(issueLink2(i,i*2+1))
if (i*2+2 < num_hosts):
fh.write(issueLink2(i,i*2+2))
# Create all routes
for i in range(0,num_hosts):
level_i = floor(math.log(1+i,2))
for j in range(i+1,num_hosts):
fh.write(issueRouteHead(j,i))
# Host j is at the same of lower level than host i
level_j = floor(math.log(1+j,2))
current_host_path_j = j
# Go up to the same level of that of host i
for l in range(level_j,level_i,-1):
parent_host = floor(float(current_host_path_j-1)/2)
fh.write(issueRouteLink2(min(current_host_path_j,parent_host),max(current_host_path_j,parent_host)))
current_host_path_j = parent_host
# Find the common ancestor
current_host_path_i = i
while (current_host_path_j != current_host_path_i):
fh.write(issueRouteLink2(min(current_host_path_j,floor(float(current_host_path_j-1)/2)), max(current_host_path_j,floor(float(current_host_path_j-1)/2))))
current_host_path_i = floor(float(current_host_path_i-1)/2)
current_host_path_j = floor(float(current_host_path_j-1)/2)
common_ancestor = current_host_path_j
# Go back from i to the common ancestor
current_host_path_i = i
sequence = []
sequence.append(current_host_path_i)
while (current_host_path_i != common_ancestor):
parent_host = floor(float(current_host_path_i-1)/2)
sequence.append(parent_host)
current_host_path_i = parent_host
# Issue links in the common ancestor -> i order
sequence = sequence[::-1]
for k in range(0,len(sequence)-1):
fh.write(issueRouteLink2(min(sequence[k],sequence[k+1]),max(sequence[k],sequence[k+1])))
fh.write(issueRouteTail())
fh.write(issueTail())
fh.close()
print >> sys.stderr, "BinTree XML platform description file created: "+filename
###############################################################
## Generate host file
filename = "./hostfile_"+str(num_hosts)+".txt"
fh = open(filename, 'w')
for i in range(0,num_hosts):
fh.write("host-"+str(i)+".hawaii.edu\n")
fh.close()
print >> sys.stderr, "Hostfile created: "+filename
| unlicense | -6,806,824,665,409,877,000 | 34.78169 | 156 | 0.604212 | false |
shubhamchaudhary/biggboss | biggboss-checker.py | 1 | 6274 | #!/usr/bin/env python3
#
# Copyright (c) 2014 Shubham Chaudhary <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import os.path
import platform
import random
import re
import sys
import time
if sys.version_info >= (3,):
import urllib.request as urllib2
import urllib.parse as urlparse
import urllib.error as urlerror
else:
import urllib2
import urlparse
def atoi(text):
return int(text) if text.isdigit() else text
def natural_keys(text):
'''
alist.sort(key=natural_keys) sorts in human order
http://nedbatchelder.com/blog/200712/human_sorting.html
(See Toothy's implementation in the comments)
'''
return [atoi(c) for c in re.split('(\d+)', text)]
def notify_user(message=None):
''' Notify the user about a particular event with given message
'''
if not message:
message = 'Notification!!!'
#print('-'*len(message))
print('-'*int((len(message)-6)/2), 'NOTIFICATION', '-'*int((len(message)-6)/2))
print(message)
def get_page_data():
''' Get page data as string from server
'''
print('Sending request to servers of Colors . . .')
full_url = 'http://colors.in.com/in/biggboss'
full_url = 'http://colors.in.com/in/biggboss/videos/episodes'
# Send request
try:
#res.geturl(), .url=str, .status=200, .info=200, .msg=OK,
response = urllib2.urlopen(full_url)
except urlerror.HTTPError as exep:
print('The server couldn\'t fulfill the request.',
'Error code: ', exep.code)
except urlerror.URLError as exep:
print('We failed to reach a server.')
print('Reason: ', exep.reason)
else:
# everything is fine
#if verbose:
print('Data received, Decoding . . .')
web_page = str(response.read()) # More pythonic than .decode('utf-8')
return web_page
def get_link(web_page):
''' Get Latest episode link
'''
month = time.strftime('%B')
new_link_pattern = r'http://colors.in.com/in/biggboss/videos/bigg-boss-8-full-episode\d\d-' + month.lower() + r'-\d+\w\w-2014.*?.html'
#print('Checking: ', new_link_pattern)
link_reg = re.findall(new_link_pattern, web_page)
if link_reg:
#print(link_reg.group())
success_set = sorted(set(link_reg), key=natural_keys)
return success_set[-1]
def get_episode_list(web_page, new_episode_pattern=None):
''' Get latest episode list from webpage
'''
if not new_episode_pattern:
### PATTERN used by colors
#<li><a title="Bigg Boss 8, Full Episode-8, 29th September, 2014"
#href="http://colors.in.com/in/biggboss/videos/bigg-boss-8-full-episode8-29th-october-2014-69087-2.html#nav">
#http://colors.in.com/in/biggboss/videos/bigg-boss-8-full-episode23-november-14th-2014-10101036-2.html#nav
#Bigg Boss 8, Full Episode-8, 29th September, 2014</a></li>
#Bigg Boss 8, Full Episode-10, October 1st, 2014</a></li>
new_episode_pattern = time.strftime(r'%B-\d+\w\w').lower()
month = time.strftime('%B')
new_episode_pattern = r'Bigg Boss \d+, Full Episode-\d+, ' + month + r' \d+\w\w, 2014';
#new_episode_pattern = r'Bigg Boss \d+, Full Episode-\d+'
print('Checking for new episode with pattern:', new_episode_pattern)
success = re.findall(new_episode_pattern, web_page)
success_set = sorted(set(success), key=natural_keys)
return success_set
def check_biggboss_episode(new_episode_pattern=None, verbose=False):
''' Check for the latest bigg boss episode
'''
web_page = get_page_data()
if verbose:
print('Page Received:\n', web_page)
success_set = get_episode_list(web_page, new_episode_pattern)
# Parse for success or failure
print('Found:')
for item in success_set:
print('\t', item)
current_date = int(time.strftime('%d'))
current_hour = int(time.strftime('%H'))
current_month = time.strftime('%B')
if (current_month.lower() in success_set[-1].lower() and (
(str(current_date) in success_set[-1] and
(current_hour >= 20)) or
(str(current_date-1) in success_set[-1] and
(current_hour >= 0 and current_hour < 20))
)
):
msg = 'Found new episode online'
notify_user(msg)
latest_link = get_link(web_page)
if latest_link:
print('Here\'s the link: ', latest_link)
else:
print('No new episode right now')
def main():
''' Main function - Parse command line arguments
'''
from argparse import ArgumentParser
parser = ArgumentParser(prog='BiggBoss-checker')
parser.add_argument("-p", "--pattern", type=str, dest="pattern",
help="Search for this pattern instead of default")
parser.add_argument("-v", "--verbose", dest="verbosity",
action='store_true', default=False, help='Show verbose output')
args = parser.parse_args()
# Check input
try:
check_biggboss_episode(args.pattern, verbose=args.verbosity)
except:
raise
return 0
if __name__ == '__main__':
try:
main()
if os.name == 'nt' or platform.system() == 'Windows':
input('Press Enter or Close the window to exit !')
except KeyboardInterrupt:
print('\nClosing garacefully :)', sys.exc_info()[1])
except urlerror.HTTPError:
print('HTTP Error:', sys.exc_info()[1])
except SystemExit:
pass
except:
print('Unexpected Error:', sys.exc_info()[0])
print('Details:', sys.exc_info()[1])
raise
| gpl-3.0 | 3,544,284,820,136,747,500 | 35.057471 | 138 | 0.626076 | false |
robertnishihara/ray | python/ray/tests/test_gcs_fault_tolerance.py | 1 | 3783 | import sys
import ray
import pytest
from ray.test_utils import (
generate_system_config_map,
wait_for_condition,
wait_for_pid_to_exit,
)
@ray.remote
class Increase:
def method(self, x):
return x + 2
@ray.remote
def increase(x):
return x + 1
@pytest.mark.parametrize(
"ray_start_regular", [
generate_system_config_map(
num_heartbeats_timeout=20, ping_gcs_rpc_server_max_retries=60)
],
indirect=True)
def test_gcs_server_restart(ray_start_regular):
actor1 = Increase.remote()
result = ray.get(actor1.method.remote(1))
assert result == 3
ray.worker._global_node.kill_gcs_server()
ray.worker._global_node.start_gcs_server()
result = ray.get(actor1.method.remote(7))
assert result == 9
actor2 = Increase.remote()
result = ray.get(actor2.method.remote(2))
assert result == 4
result = ray.get(increase.remote(1))
assert result == 2
@pytest.mark.parametrize(
"ray_start_regular", [
generate_system_config_map(
num_heartbeats_timeout=20, ping_gcs_rpc_server_max_retries=60)
],
indirect=True)
def test_gcs_server_restart_during_actor_creation(ray_start_regular):
ids = []
for i in range(0, 100):
actor = Increase.remote()
ids.append(actor.method.remote(1))
ray.worker._global_node.kill_gcs_server()
ray.worker._global_node.start_gcs_server()
ready, unready = ray.wait(ids, num_returns=100, timeout=240)
print("Ready objects is {}.".format(ready))
print("Unready objects is {}.".format(unready))
assert len(unready) == 0
@pytest.mark.parametrize(
"ray_start_cluster_head", [
generate_system_config_map(
num_heartbeats_timeout=20, ping_gcs_rpc_server_max_retries=60)
],
indirect=True)
def test_node_failure_detector_when_gcs_server_restart(ray_start_cluster_head):
"""Checks that the node failure detector is correct when gcs server restart.
We set the cluster to timeout nodes after 2 seconds of heartbeats. We then
kill gcs server and remove the worker node and restart gcs server again to
check that the removed node will die finally.
"""
cluster = ray_start_cluster_head
worker = cluster.add_node()
cluster.wait_for_nodes()
# Make sure both head and worker node are alive.
nodes = ray.nodes()
assert len(nodes) == 2
assert nodes[0]["alive"] and nodes[1]["alive"]
to_be_removed_node = None
for node in nodes:
if node["RayletSocketName"] == worker.raylet_socket_name:
to_be_removed_node = node
assert to_be_removed_node is not None
head_node = cluster.head_node
gcs_server_process = head_node.all_processes["gcs_server"][0].process
gcs_server_pid = gcs_server_process.pid
# Kill gcs server.
cluster.head_node.kill_gcs_server()
# Wait to prevent the gcs server process becoming zombie.
gcs_server_process.wait()
wait_for_pid_to_exit(gcs_server_pid, 1000)
raylet_process = worker.all_processes["raylet"][0].process
raylet_pid = raylet_process.pid
# Remove worker node.
cluster.remove_node(worker, allow_graceful=False)
# Wait to prevent the raylet process becoming zombie.
raylet_process.wait()
wait_for_pid_to_exit(raylet_pid)
# Restart gcs server process.
cluster.head_node.start_gcs_server()
def condition():
nodes = ray.nodes()
assert len(nodes) == 2
for node in nodes:
if node["NodeID"] == to_be_removed_node["NodeID"]:
return not node["alive"]
return False
# Wait for the removed node dead.
wait_for_condition(condition, timeout=10)
if __name__ == "__main__":
import pytest
sys.exit(pytest.main(["-v", __file__]))
| apache-2.0 | 5,112,299,766,017,386,000 | 27.877863 | 80 | 0.653978 | false |
jankim/deepnl | bin/dl-sentiwords.py | 1 | 7318 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Learn sentiment-specific word embeddings from tweets.
Author: Giuseppe Attardi
"""
import logging
import numpy as np
import argparse
from ConfigParser import ConfigParser
from itertools import chain
# allow executing from anywhere without installing the package
import sys
import os
import distutils.util
builddir = os.path.dirname(os.path.realpath(__file__)) + '/../build/lib.'
libdir = builddir + distutils.util.get_platform() + '-' + '.'.join(map(str, sys.version_info[:2]))
sys.path.append(libdir)
# local
from deepnl import *
from deepnl.extractors import *
from deepnl.reader import TweetReader
from deepnl.network import Network
from deepnl.sentiwords import SentimentTrainer
# ----------------------------------------------------------------------
# Auxiliary functions
def create_trainer(args, converter):
"""
Creates or loads a neural network according to the specified args.
"""
logger = logging.getLogger("Logger")
if args.load:
logger.info("Loading provided network...")
trainer = SentimentTrainer.load(args.load)
trainer.learning_rate = args.learning_rate
else:
logger.info('Creating new network...')
trainer = SentimentTrainer(converter, args.learning_rate,
args.window/2, args.window/2,
args.hidden, args.ngrams, args.alpha)
trainer.saver = saver(args.output, args.vectors)
logger.info("... with the following parameters:")
logger.info(trainer.nn.description())
return trainer
def saver(model_file, vectors_file):
"""Function for saving model periodically"""
def save(trainer):
# save embeddings also separately
if vectors_file:
trainer.save_vectors(vectors_file)
if model_file:
trainer.save(model_file)
return save
# ----------------------------------------------------------------------
if __name__ == '__main__':
# set the seed for replicability
np.random.seed(42)
defaults = {}
parser = argparse.ArgumentParser(description="Learn word embeddings.")
parser.add_argument('-c', '--config', dest='config_file',
help='Specify config file', metavar='FILE')
# args, remaining_argv = parser.parse_known_args()
# if args.config_file:
# config = ConfigParser.SafeConfigParser()
# config.read([args.config_file])
# defaults = dict(config.items('Defaults'))
# parser.set_defaults(**defaults)
parser.add_argument('-w', '--window', type=int, default=5,
help='Size of the word window (default 5)',
dest='window')
parser.add_argument('-s', '--embeddings-size', type=int, default=50,
help='Number of features per word (default 50)',
dest='embeddings_size')
parser.add_argument('-e', '--epochs', type=int, default=100,
help='Number of training epochs (default 100)',
dest='iterations')
parser.add_argument('-l', '--learning-rate', type=float, default=0.001,
help='Learning rate for network weights (default 0.001)',
dest='learning_rate')
parser.add_argument('-n', '--hidden', type=int, default=200,
help='Number of hidden neurons (default 200)')
parser.add_argument('--ngrams', type=int, default=2,
help='Length of ngrams (default 2)')
parser.add_argument('--alpha', type=float, default=0.5,
help='Relative weight of normal wrt sentiment score (default 0.5)')
parser.add_argument('train', type=str,
help='File with text corpus for training.')
parser.add_argument('-o', '--output', type=str, default=None,
help='File where to save the model')
parser.add_argument('--vocab', type=str, required=True,
help='Vocabulary file, either read or created')
parser.add_argument('--vectors', type=str, required=True,
help='Embeddings file, either read and updated or created')
parser.add_argument('--load', type=str, default=None,
help='Load previously saved model')
parser.add_argument('--threads', type=int, default=1,
help='Number of threads (default 1)')
parser.add_argument('--variant', type=str, default=None,
help='Either "senna" (default), "polyglot" or "word2vec".')
parser.add_argument('-v', '--verbose', help='Verbose mode',
action='store_true')
args = parser.parse_args()
log_format = '%(message)s'
log_level = logging.DEBUG if args.verbose else logging.INFO
logging.basicConfig(format=log_format, level=log_level)
logger = logging.getLogger("Logger")
config = ConfigParser()
if args.config_file:
config.read(args.config_file)
# merge args with config
reader = TweetReader(args.ngrams)
reader.read(args.train)
vocab, bigrams, trigrams = reader.create_vocabulary(reader.sentences,
min_occurrences=2)
if os.path.exists(args.vocab):
# start with the given vocabulary
base_vocab = reader.load_vocabulary(args.vocab)
if os.path.exists(args.vectors):
embeddings = Embeddings(vectors=args.vectors, vocab=base_vocab,
variant=args.variant)
else:
embeddings = Embeddings(args.embeddings_size, vocab=base_vocab,
variant=args.variant)
# add the ngrams from the corpus
embeddings.merge(vocab)
logger.info("Overriding vocabulary in %s" % args.vocab)
embeddings.save_vocabulary(args.vocab)
elif args.variant == 'word2vec' and os.path.exists(args.vectors):
embeddings = Embeddings(vectors=args.vectors,
variant=args.variant)
embeddings.merge(vocab)
else:
embeddings = Embeddings(args.embeddings_size, vocab=vocab,
variant=args.variant)
# Assume bigrams are prefix of trigrams, or else we should put a terminator
# on trie
trie = {}
for b in chain(bigrams, trigrams):
tmp = trie
for w in b:
tmp = tmp.setdefault(embeddings.dict[w], {})
converter = Converter()
converter.add(embeddings)
trainer = create_trainer(args, converter)
report_intervals = max(args.iterations / 200, 1)
report_intervals = 10000 # DEBUG
logger.info("Starting training")
# a generator expression (can be iterated several times)
# It caches converted sentences, avoiding repeated conversions
converted_sentences = converter.generator(reader.sentences, cache=True)
trainer.train(converted_sentences, reader.polarities, trie,
args.iterations, report_intervals)
logger.info("Overriding vectors to %s" % args.vectors)
embeddings.save_vectors(args.vectors)
if args.output:
logger.info("Saving trained model to %s" % args.output)
trainer.save(args.output)
| gpl-3.0 | -4,546,892,508,208,062,000 | 37.114583 | 98 | 0.599344 | false |
cachance7/BattleQuip | battlequip/strategy.py | 1 | 4120 | from util import *
import random
class StrategyStateException(Exception):
def __init__(self, message):
super(StrategyStateException, self).__init__()
self.message = message
class Strategy(object):
"""Class to encapsulate logic of analyzing the state of a board and returning
a valid coordinate to next attack."""
def get_coord(board):
"""Analyzes the state of the board and returns a Coord"""
pass
class HuntTarget(Strategy):
"""This class implements the Hunt/Target strategy outlined by Nick Berry:
http://www.datagenetics.com/blog/december32011/index.html
In this strategy, we don't care about sunk ships, only hits and misses.
"""
def __init__(self, board):
"""Initialize with empty stack -> Hunt mode. If stack is nonempty,
then object is in Target mode."""
self.stack = []
self.height = board.height
self.width = board.width
# This board will reflect 4 different states:
# 0 - untested
# 1 - to be tested
# 2 - miss
# 3 - hit
# Initialize with 0s at start
self.board = []
[self.board.append([0]*self.width) for i in xrange(0, self.height)]
def get_coord(self, last_attack=None):
if last_attack:
new_coords = self._mark_hit_or_miss(*last_attack)
# Mark new coords to be inspected and push onto stack
while len(new_coords) > 0:
c = new_coords.pop()
self._mark_inspect(c)
self.stack.append(c)
if len(self.stack) > 0:
# target mode
return self.stack.pop()
# hunt mode
return self._next_untested_random()
def _mark_inspect(self, coord):
self.board[coord.row][coord.col] = 1
def _mark_hit_or_miss(self, coord, hit, sunk):
"""Marks the coord on the board as either hit or miss and returns new
set of coords if appropriate."""
self.board[coord.row][coord.col] = 3 if hit else 2
if hit:
# N,S,W,E
new_coords = [make_coord(coord.row-1,coord.col),
make_coord(coord.row+1, coord.col),
make_coord(coord.row, coord.col-1),
make_coord(coord.row, coord.col+1)]
# Now filter based on criteria
# Within bounds
new_coords = filter(lambda x: x[0] >= 0 and x[1] >= 0, new_coords)
new_coords = filter(lambda x: x[0] < self.height and x[1] < self.width, new_coords)
# Only untested
new_coords = filter(lambda x: self.board[x.row][x.col] == 0, new_coords)
return new_coords
else:
return []
def _next_untested(self):
"""Grabs the next untested coord in W->E, N->S order."""
for r in xrange(0, self.height):
for c in xrange(0, self.width):
if self.board[r][c] == 0:
return make_coord(r,c)
raise StrategyStateException('Game is over')
def _next_untested_random(self):
"""Grabs the next untested coord randomly."""
choices = []
for r in xrange(0, self.height):
for c in xrange(0, self.width):
if self.board[r][c] == 0:
choices.append((r,c))
if len(choices) == 0:
raise StrategyStateException('Game is over')
return make_coord(choices[random.randint(0, len(choices)-1)])
class HumanStrategy(Strategy):
def get_coord(board):
"""User analyzes the state of the board and inputs a letter & number
move. The move is verified before returning.
Returns:
A Coord cooresponding to the desired attack.
Raises:
InputAbortException: if user aborts
"""
user_move_str = raw_input("Enter a move: ")
try:
move = Move(user_move_str)
self.validate_move(board, move)
return move
except InvalidMoveException as ex:
print ex.message
return 'err'
| mit | -9,149,077,923,772,832,000 | 32.770492 | 95 | 0.563592 | false |
lhilt/scipy | runtests.py | 1 | 17145 | #!/usr/bin/env python
"""
runtests.py [OPTIONS] [-- ARGS]
Run tests, building the project first.
Examples::
$ python runtests.py
$ python runtests.py -s {SAMPLE_SUBMODULE}
$ python runtests.py -t {SAMPLE_TEST}
$ python runtests.py --ipython
$ python runtests.py --python somescript.py
$ python runtests.py --bench
Run a debugger:
$ gdb --args python runtests.py [...other args...]
Generate C code coverage listing under build/lcov/:
(requires http://ltp.sourceforge.net/coverage/lcov.php)
$ python runtests.py --gcov [...other args...]
$ python runtests.py --lcov-html
"""
#
# This is a generic test runner script for projects using NumPy's test
# framework. Change the following values to adapt to your project:
#
PROJECT_MODULE = "scipy"
PROJECT_ROOT_FILES = ['scipy', 'LICENSE.txt', 'setup.py']
SAMPLE_TEST = "scipy.fftpack.tests.test_real_transforms::TestIDSTIIIInt"
SAMPLE_SUBMODULE = "optimize"
EXTRA_PATH = ['/usr/lib/ccache', '/usr/lib/f90cache',
'/usr/local/lib/ccache', '/usr/local/lib/f90cache']
# ---------------------------------------------------------------------
if __doc__ is None:
__doc__ = "Run without -OO if you want usage info"
else:
__doc__ = __doc__.format(**globals())
import sys
import os
# In case we are run from the source directory, we don't want to import the
# project from there:
sys.path.pop(0)
from argparse import ArgumentParser, REMAINDER
import shutil
import subprocess
import time
import datetime
try:
from types import ModuleType as new_module
except ImportError: # old Python
from imp import new_module
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__)))
def main(argv):
parser = ArgumentParser(usage=__doc__.lstrip())
parser.add_argument("--verbose", "-v", action="count", default=1,
help="more verbosity")
parser.add_argument("--no-build", "-n", action="store_true", default=False,
help="do not build the project (use system installed version)")
parser.add_argument("--build-only", "-b", action="store_true", default=False,
help="just build, do not run any tests")
parser.add_argument("--doctests", action="store_true", default=False,
help="Run doctests in module")
parser.add_argument("--refguide-check", action="store_true", default=False,
help="Run refguide check (do not run regular tests.)")
parser.add_argument("--coverage", action="store_true", default=False,
help=("report coverage of project code. HTML output"
" goes under build/coverage"))
parser.add_argument("--gcov", action="store_true", default=False,
help=("enable C code coverage via gcov (requires GCC)."
" gcov output goes to build/**/*.gc*"))
parser.add_argument("--lcov-html", action="store_true", default=False,
help=("produce HTML for C code coverage information "
"from a previous run with --gcov. "
"HTML output goes to build/lcov/"))
parser.add_argument("--mode", "-m", default="fast",
help="'fast', 'full', or something that could be "
"passed to nosetests -A [default: fast]")
parser.add_argument("--submodule", "-s", default=None,
help="Submodule whose tests to run (cluster,"
" constants, ...)")
parser.add_argument("--pythonpath", "-p", default=None,
help="Paths to prepend to PYTHONPATH")
parser.add_argument("--tests", "-t", action='append',
help="Specify tests to run")
parser.add_argument("--python", action="store_true",
help="Start a Python shell with PYTHONPATH set")
parser.add_argument("--ipython", "-i", action="store_true",
help="Start IPython shell with PYTHONPATH set")
parser.add_argument("--shell", action="store_true",
help="Start Unix shell with PYTHONPATH set")
parser.add_argument("--debug", "-g", action="store_true",
help="Debug build")
parser.add_argument("--parallel", "-j", type=int, default=1,
help="Number of parallel jobs during build (requires "
"NumPy 1.10 or greater).")
parser.add_argument("--show-build-log", action="store_true",
help="Show build output rather than using a log file")
parser.add_argument("--bench", action="store_true",
help="Run benchmark suite instead of test suite")
parser.add_argument("--bench-compare", action="append", metavar="BEFORE",
help=("Compare benchmark results of current HEAD to"
" BEFORE. Use an additional "
"--bench-compare=COMMIT to override HEAD with"
" COMMIT. Note that you need to commit your "
"changes first!"
))
parser.add_argument("args", metavar="ARGS", default=[], nargs=REMAINDER,
help="Arguments to pass to Nose, Python or shell")
parser.add_argument("--pep8", action="store_true", default=False,
help="Perform pep8 check with pycodestyle.")
args = parser.parse_args(argv)
if args.pep8:
# os.system("flake8 scipy --ignore=F403,F841,F401,F811,F405,E121,E122,"
# "E123,E125,E126,E127,E128,E226,E231,E251,E265,E266,E302,"
# "E402,E501,E712,E721,E731,E741,W291,W293,W391,W503,W504"
# "--exclude=scipy/_lib/six.py")
os.system("pycodestyle scipy benchmarks/benchmarks")
sys.exit(0)
if args.bench_compare:
args.bench = True
args.no_build = True # ASV does the building
if args.lcov_html:
# generate C code coverage output
lcov_generate()
sys.exit(0)
if args.pythonpath:
for p in reversed(args.pythonpath.split(os.pathsep)):
sys.path.insert(0, p)
if args.gcov:
gcov_reset_counters()
if args.debug and args.bench:
print("*** Benchmarks should not be run against debug version; "
"remove -g flag ***")
if not args.no_build:
site_dir = build_project(args)
sys.path.insert(0, site_dir)
os.environ['PYTHONPATH'] = site_dir
extra_argv = args.args[:]
if extra_argv and extra_argv[0] == '--':
extra_argv = extra_argv[1:]
if args.python:
if extra_argv:
# Don't use subprocess, since we don't want to include the
# current path in PYTHONPATH.
sys.argv = extra_argv
with open(extra_argv[0], 'r') as f:
script = f.read()
sys.modules['__main__'] = new_module('__main__')
ns = dict(__name__='__main__',
__file__=extra_argv[0])
exec_(script, ns)
sys.exit(0)
else:
import code
code.interact()
sys.exit(0)
if args.ipython:
import IPython
IPython.embed(user_ns={})
sys.exit(0)
if args.shell:
shell = os.environ.get('SHELL', 'sh')
print("Spawning a Unix shell...")
os.execv(shell, [shell] + extra_argv)
sys.exit(1)
if args.coverage:
dst_dir = os.path.join(ROOT_DIR, 'build', 'coverage')
fn = os.path.join(dst_dir, 'coverage_html.js')
if os.path.isdir(dst_dir) and os.path.isfile(fn):
shutil.rmtree(dst_dir)
extra_argv += ['--cov-report=html:' + dst_dir]
if args.refguide_check:
cmd = [os.path.join(ROOT_DIR, 'tools', 'refguide_check.py'),
'--doctests']
if args.submodule:
cmd += [args.submodule]
os.execv(sys.executable, [sys.executable] + cmd)
sys.exit(0)
if args.bench:
# Run ASV
items = extra_argv
if args.tests:
items += args.tests
if args.submodule:
items += [args.submodule]
bench_args = []
for a in items:
bench_args.extend(['--bench', a])
if not args.bench_compare:
cmd = [os.path.join(ROOT_DIR, 'benchmarks', 'run.py'),
'run', '-n', '-e', '--python=same'] + bench_args
os.execv(sys.executable, [sys.executable] + cmd)
sys.exit(1)
else:
if len(args.bench_compare) == 1:
commit_a = args.bench_compare[0]
commit_b = 'HEAD'
elif len(args.bench_compare) == 2:
commit_a, commit_b = args.bench_compare
else:
p.error("Too many commits to compare benchmarks for")
# Check for uncommitted files
if commit_b == 'HEAD':
r1 = subprocess.call(['git', 'diff-index', '--quiet',
'--cached', 'HEAD'])
r2 = subprocess.call(['git', 'diff-files', '--quiet'])
if r1 != 0 or r2 != 0:
print("*"*80)
print("WARNING: you have uncommitted changes --- "
"these will NOT be benchmarked!")
print("*"*80)
# Fix commit ids (HEAD is local to current repo)
p = subprocess.Popen(['git', 'rev-parse', commit_b],
stdout=subprocess.PIPE)
out, err = p.communicate()
commit_b = out.strip()
p = subprocess.Popen(['git', 'rev-parse', commit_a],
stdout=subprocess.PIPE)
out, err = p.communicate()
commit_a = out.strip()
cmd = [os.path.join(ROOT_DIR, 'benchmarks', 'run.py'),
'continuous', '-e', '-f', '1.05',
commit_a, commit_b] + bench_args
os.execv(sys.executable, [sys.executable] + cmd)
sys.exit(1)
if args.build_only:
sys.exit(0)
else:
__import__(PROJECT_MODULE)
test = sys.modules[PROJECT_MODULE].test
if args.submodule:
tests = [PROJECT_MODULE + "." + args.submodule]
elif args.tests:
tests = args.tests
else:
tests = None
# Run the tests
if not args.no_build:
test_dir = site_dir
else:
test_dir = os.path.join(ROOT_DIR, 'build', 'test')
if not os.path.isdir(test_dir):
os.makedirs(test_dir)
shutil.copyfile(os.path.join(ROOT_DIR, '.coveragerc'),
os.path.join(test_dir, '.coveragerc'))
cwd = os.getcwd()
try:
os.chdir(test_dir)
result = test(args.mode,
verbose=args.verbose,
extra_argv=extra_argv,
doctests=args.doctests,
coverage=args.coverage,
tests=tests,
parallel=args.parallel)
finally:
os.chdir(cwd)
if isinstance(result, bool):
sys.exit(0 if result else 1)
elif result.wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
def build_project(args):
"""
Build a dev version of the project.
Returns
-------
site_dir
site-packages directory where it was installed
"""
root_ok = [os.path.exists(os.path.join(ROOT_DIR, fn))
for fn in PROJECT_ROOT_FILES]
if not all(root_ok):
print("To build the project, run runtests.py in "
"git checkout or unpacked source")
sys.exit(1)
dst_dir = os.path.join(ROOT_DIR, 'build', 'testenv')
env = dict(os.environ)
cmd = [sys.executable, 'setup.py']
# Always use ccache, if installed
env['PATH'] = os.pathsep.join(EXTRA_PATH +
env.get('PATH', '').split(os.pathsep))
if args.debug or args.gcov:
# assume everyone uses gcc/gfortran
env['OPT'] = '-O0 -ggdb'
env['FOPT'] = '-O0 -ggdb'
if args.gcov:
import distutils.sysconfig
cvars = distutils.sysconfig.get_config_vars()
env['OPT'] = '-O0 -ggdb'
env['FOPT'] = '-O0 -ggdb'
env['CC'] = cvars['CC'] + ' --coverage'
env['CXX'] = cvars['CXX'] + ' --coverage'
env['F77'] = 'gfortran --coverage '
env['F90'] = 'gfortran --coverage '
env['LDSHARED'] = cvars['LDSHARED'] + ' --coverage'
env['LDFLAGS'] = " ".join(cvars['LDSHARED'].split()[1:]) +\
' --coverage'
cmd += ['build']
if args.parallel > 1:
cmd += ['-j', str(args.parallel)]
# Install; avoid producing eggs so scipy can be imported from dst_dir.
cmd += ['install', '--prefix=' + dst_dir,
'--single-version-externally-managed',
'--record=' + dst_dir + 'tmp_install_log.txt']
from distutils.sysconfig import get_python_lib
site_dir = get_python_lib(prefix=dst_dir, plat_specific=True)
# easy_install won't install to a path that Python by default cannot see
# and isn't on the PYTHONPATH. Plus, it has to exist.
if not os.path.exists(site_dir):
os.makedirs(site_dir)
env['PYTHONPATH'] = site_dir
log_filename = os.path.join(ROOT_DIR, 'build.log')
start_time = datetime.datetime.now()
if args.show_build_log:
ret = subprocess.call(cmd, env=env, cwd=ROOT_DIR)
else:
log_filename = os.path.join(ROOT_DIR, 'build.log')
print("Building, see build.log...")
with open(log_filename, 'w') as log:
p = subprocess.Popen(cmd, env=env, stdout=log, stderr=log,
cwd=ROOT_DIR)
try:
# Wait for it to finish, and print something to indicate the
# process is alive, but only if the log file has grown (to
# allow continuous integration environments kill a hanging
# process accurately if it produces no output)
last_blip = time.time()
last_log_size = os.stat(log_filename).st_size
while p.poll() is None:
time.sleep(0.5)
if time.time() - last_blip > 60:
log_size = os.stat(log_filename).st_size
if log_size > last_log_size:
elapsed = datetime.datetime.now() - start_time
print(" ... build in progress ({0} "
"elapsed)".format(elapsed))
last_blip = time.time()
last_log_size = log_size
ret = p.wait()
except: # noqa: E722
p.terminate()
raise
elapsed = datetime.datetime.now() - start_time
if ret == 0:
print("Build OK ({0} elapsed)".format(elapsed))
else:
if not args.show_build_log:
with open(log_filename, 'r') as f:
print(f.read())
print("Build failed! ({0} elapsed)".format(elapsed))
sys.exit(1)
return site_dir
#
# GCOV support
#
def gcov_reset_counters():
print("Removing previous GCOV .gcda files...")
build_dir = os.path.join(ROOT_DIR, 'build')
for dirpath, dirnames, filenames in os.walk(build_dir):
for fn in filenames:
if fn.endswith('.gcda') or fn.endswith('.da'):
pth = os.path.join(dirpath, fn)
os.unlink(pth)
#
# LCOV support
#
LCOV_OUTPUT_FILE = os.path.join(ROOT_DIR, 'build', 'lcov.out')
LCOV_HTML_DIR = os.path.join(ROOT_DIR, 'build', 'lcov')
def lcov_generate():
try:
os.unlink(LCOV_OUTPUT_FILE)
except OSError:
pass
try:
shutil.rmtree(LCOV_HTML_DIR)
except OSError:
pass
print("Capturing lcov info...")
subprocess.call(['lcov', '-q', '-c',
'-d', os.path.join(ROOT_DIR, 'build'),
'-b', ROOT_DIR,
'--output-file', LCOV_OUTPUT_FILE])
print("Generating lcov HTML output...")
ret = subprocess.call(['genhtml', '-q', LCOV_OUTPUT_FILE,
'--output-directory', LCOV_HTML_DIR,
'--legend', '--highlight'])
if ret != 0:
print("genhtml failed!")
else:
print("HTML output generated under build/lcov/")
#
# Python 3 support
#
if sys.version_info[0] >= 3:
import builtins
exec_ = getattr(builtins, "exec")
else:
def exec_(code, globs=None, locs=None):
"""Execute code in a namespace."""
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
if __name__ == "__main__":
main(argv=sys.argv[1:])
| bsd-3-clause | -2,131,450,785,985,568,300 | 34.205339 | 87 | 0.533508 | false |
mahajrod/MACE | scripts/old/test_clustering_thresholds.py | 1 | 2709 | #!/usr/bin/env python
__author__ = 'Sergei F. Kliver'
import os
import sys
import argparse
from MACE.Parsers.VCF import CollectionVCF
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input_file", action="store", dest="input", required=True,
help="Input vcf file with mutations.")
parser.add_argument("-s", "--sample_name", action="store", dest="sample_name", default="unknown_sample",
help="Name of sample")
parser.add_argument("-y", "--testing_directory", action="store", dest="test_dir", default="threshold_test",
help="Directory where to output results of threshold test")
parser.add_argument("-e", "--extracting_method", action="store", dest="extracting_method", required=True,
help="Method used to extract clusters")
parser.add_argument("-p", "--scaffold_prefix", action="store", dest="scaffold_prefix", default="",
help="Prefix to write in picture before names of region/scaffold/chromosome. "
"Default: no prefix")
parser.add_argument("-d", "--distance_type", action="store", dest="distance_type", default="average",
help="Method used to calculate distance between clusters. Default: average")
parser.add_argument("-c", "--count_singletons", action="store_true", dest="count_singletons",
help="Draw plot of number of all clusters including singletons. Don't use it for samples "
"with low density of mutations")
parser.add_argument("-n", "--min_threshold", action="store", dest="min_threshold", required=True, type=int,
help="Minimun threshold for extracting of clusters.")
parser.add_argument("-x", "--max_threshold", action="store", dest="max_threshold", required=True, type=int,
help="Maximum threshold for extracting of clusters.")
parser.add_argument("-u", "--number_of_tests", action="store", dest="number_of_tests", required=True, type=int,
help="Number of tests")
args = parser.parse_args()
mutations = CollectionVCF(in_file=args.input, from_file=True)
mutations.test_thresholds(extracting_method=args.extracting_method,
threshold=(args.min_threshold, args.max_threshold, args.number_of_tests),
cluster_distance=args.distance_type,
dendrogramm_max_y=2000,
sample_name=args.sample_name,
save_clustering=False,
testing_dir=args.test_dir,
count_singletons=args.count_singletons,
scaffold_prefix=args.scaffold_prefix)
| apache-2.0 | 4,077,634,745,579,346,400 | 56.638298 | 111 | 0.619786 | false |
YourCyborg/Sun-RPI | docs/sphinx/src2rest/src2rest.py | 1 | 2118 | #! /usr/bin/python
#
# Auto-generate reST documentation for Sphinx from Evennia source
# code.
#
# Uses etinenned's sphinx autopackage script. Install it to folder
# "autogen" in this same directory:
#
# hg clone https://bitbucket.org/etienned/sphinx-autopackage-script autogen
#
# Create a directory tree "code/" containing one directory for every
# package in the PACKAGE dictionary below. Make sure EVENNIA_DIR
# points to an Evennia root dir. Then just run this script. A new
# folder sphinx/source/code will be created with the reST sources.
#
# Note - this is not working very well at the moment, not all sources
# seems to be properly detected and you get lots of errors when
# compiling. To nevertheless make a link to the code from the doc
# front page, edit docs/sphinx/sources/index.rst to reference
# code/modules.
#
import os, subprocess, shutil
EVENNIA_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
SPHINX_DIR = os.path.join(os.path.join(EVENNIA_DIR, "docs"), "sphinx")
SPHINX_SRC_DIR = os.path.join(SPHINX_DIR, "source")
SPHINX_CODE_DIR = os.path.join(SPHINX_SRC_DIR, "code")
CONVERT_DIR = os.path.join(SPHINX_DIR, 'src2rest')
AUTOGEN_EXE = os.path.join(CONVERT_DIR, os.path.join("autogen", "generate_modules.py"))
def src2rest():
"""
Run import
"""
try:
shutil.rmtree(SPHINX_CODE_DIR)
print "Emptied old %s." % SPHINX_CODE_DIR
except OSError:
pass
os.mkdir(SPHINX_CODE_DIR)
inpath = EVENNIA_DIR
outpath = SPHINX_CODE_DIR
excludes = [r".*/migrations/.*", r"evennia\.py$", r"manage\.py$",
r"runner\.py$", r"server.py$", r"portal.py$"]
subprocess.call(["python", AUTOGEN_EXE,
"-n", "Evennia",
"-d", outpath,
"-s", "rst",
"-f",
inpath] + excludes)
if __name__ == '__main__':
try:
src2rest()
except Exception, e:
print e
print "Make sure to read the header of this file so that it's properly set up."
| bsd-3-clause | 6,890,642,337,181,281,000 | 32.09375 | 107 | 0.635505 | false |
BartGo/bottle-stack | nonpip-dl.py | 1 | 1774 | """ Download external components (non-Python)
"""
import requests
import zipfile
""" Download additional (non-Python) files needed by the project
"""
import os
import shutil
import sys
sys.path.append("./lib")
# http://stackoverflow.com/questions/16694907/how-to-download-large-file-in-python-with-requests-py
def download_file(url):
"""Download a file"""
local_filename = url.split('/')[-1]
# NOTE the stream=True parameter
r = requests.get(url, stream=True)
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
return local_filename
# http://stackoverflow.com/questions/9431918/extracting-zip-file-contents-to-specific-directory-in-python-2-7
def unzip_file(file_in, file_out):
"""Unzip a file"""
with open(file_in, 'rb') as fh:
z = zipfile.ZipFile(fh)
for name in z.namelist():
outpath = file_out
z.extract(name, outpath)
print "Collecting assets (jquery, skeleton-css)"
if True:
shutil.rmtree("app/static/assets/jquery", True)
shutil.os.mkdir("app/static/assets/jquery")
shutil.os.mkdir("app/static/assets/jquery/js")
download_file("http://code.jquery.com/jquery-1.11.3.min.js")
shutil.move("jquery-1.11.3.min.js", "app/static/assets/jquery/js/jquery-1.11.3.min.js")
if True:
shutil.rmtree("app/static/assets/bootstrap", True)
download_file("https://github.com/twbs/bootstrap/releases/download/v3.3.4/bootstrap-3.3.4-dist.zip")
unzip_file("bootstrap-3.3.4-dist.zip", ".")
os.remove("bootstrap-3.3.4-dist.zip")
shutil.move("bootstrap-3.3.4-dist", "app/static/assets/bootstrap")
print "Completed"
| mit | 1,648,368,742,869,832,000 | 31.851852 | 109 | 0.669109 | false |
wondie/batch_gps_importer | batch_gps_importer.py | 1 | 3604 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Batch GPS Importer
A QGIS plugin
Initializer of the plugin.
-------------------
begin : 2017-03-18
copyright : (C) 2017 by Wondimagegn Tesfaye Beshah
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 3 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt5.QtCore import QSettings, QFileInfo, QTranslator, qVersion, \
QCoreApplication
from PyQt5.QtWidgets import QAction
from PyQt5.QtGui import QIcon
from .ui.gps_importer_starter import GpsImporter
from . import PLUGIN_DIR
class BatchGpsImporter(object):
"""
BatchGpsImport initializes the whole plugin and adds the plugin on toolbar
and Vector menu of GGIS.
"""
def __init__(self, iface):
"""
Initializes iface and importer object.
:param iface:
:type iface:
"""
self.iface = iface
self.importer = None
# Setup locale
locale_path = ''
locale = QSettings().value("locale/userLocale")[0:2]
if QFileInfo(PLUGIN_DIR).exists():
# Replace forward slash with backslash
# PLUGIN_DIR = PLUGIN_DIR.replace("\\", "/")
locale_path = PLUGIN_DIR + "/i18n/batch_gps_importer_%s.qm" % (locale,)
if QFileInfo(locale_path).exists():
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
def initGui(self):
"""
Initializes the plugin GUI.
"""
self.action = QAction(
QIcon('{}/images/batch.png'.format(PLUGIN_DIR)),
'Batch GPS Importer', self.iface.mainWindow()
)
self.action.setObjectName('gps_importer_action')
self.action.setWhatsThis('Configuration for Batch GPS Importer')
self.action.setStatusTip('Batch import GPX files')
self.action.triggered.connect(self.run)
# add toolbar button and menu item
self.iface.addToolBarIcon(self.action)
self.iface.addPluginToVectorMenu('&Batch GPS Importer', self.action)
def unload(self):
"""
Removes the plugin properly.
"""
# remove the plugin menu item and icon
self.iface.removePluginMenu('&Batch GPS Importer', self.action)
self.iface.removeToolBarIcon(self.action)
# disconnect form signal of the canvas
self.action.triggered.disconnect(self.run)
def run(self):
"""
Starts the plugin GUI.
:return:
:rtype:
"""
if self.importer is None:
self.importer = GpsImporter(self.iface)
self.importer.show()
else:
self.importer.show()
self.importer.activateWindow()
| gpl-3.0 | 602,411,071,137,016,400 | 36.936842 | 83 | 0.508879 | false |
pyfa-org/eos | tests/integration/customization/customization/test_ancillary_armor_repairer.py | 1 | 13565 | # ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos import Charge
from eos import ModuleLow
from eos import State
from eos.const.eos import ModAffecteeFilter
from eos.const.eos import ModDomain
from eos.const.eos import ModOperator
from eos.const.eve import AttrId
from eos.const.eve import EffectCategoryId
from eos.const.eve import EffectId
from eos.const.eve import TypeId
from tests.integration.customization.testcase import CustomizationTestCase
class TestPropulsionModules(CustomizationTestCase):
def setUp(self):
CustomizationTestCase.setUp(self)
self.mkattr(attr_id=AttrId.charged_armor_dmg_mult)
self.mkattr(attr_id=AttrId.armor_dmg_amount, stackable=False)
def test_local_aar(self):
effect = self.mkeffect(
effect_id=EffectId.fueled_armor_repair,
category_id=EffectCategoryId.active)
aar = ModuleLow(
self.mktype(
attrs={
AttrId.armor_dmg_amount: 50,
AttrId.charged_armor_dmg_mult: 3},
effects=[effect],
default_effect=effect).id,
state=State.active)
aar.charge = Charge(self.mktype(type_id=TypeId.nanite_repair_paste).id)
self.fit.modules.low.append(aar)
# Verification
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 150)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_remote_aar(self):
effect = self.mkeffect(
effect_id=EffectId.ship_module_ancillary_remote_armor_repairer,
category_id=EffectCategoryId.target)
raar = ModuleLow(
self.mktype(
attrs={
AttrId.armor_dmg_amount: 50,
AttrId.charged_armor_dmg_mult: 3},
effects=[effect],
default_effect=effect).id,
state=State.active)
raar.charge = Charge(self.mktype(type_id=TypeId.nanite_repair_paste).id)
self.fit.modules.low.append(raar)
# Verification
self.assertAlmostEqual(raar.attrs[AttrId.armor_dmg_amount], 150)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_rep_amount_state(self):
# Multiplier should be applied even when module is offline
effect = self.mkeffect(
effect_id=EffectId.fueled_armor_repair,
category_id=EffectCategoryId.active)
aar = ModuleLow(
self.mktype(
attrs={
AttrId.armor_dmg_amount: 50,
AttrId.charged_armor_dmg_mult: 3},
effects=[effect],
default_effect=effect).id,
state=State.offline)
aar.charge = Charge(self.mktype(type_id=TypeId.nanite_repair_paste).id)
self.fit.modules.low.append(aar)
# Verification
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 150)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_rep_amount_aar_attr_mult_absent(self):
effect = self.mkeffect(
effect_id=EffectId.fueled_armor_repair,
category_id=EffectCategoryId.active)
aar = ModuleLow(
self.mktype(
attrs={AttrId.armor_dmg_amount: 50},
effects=[effect],
default_effect=effect).id,
state=State.active)
aar.charge = Charge(self.mktype(type_id=TypeId.nanite_repair_paste).id)
self.fit.modules.low.append(aar)
# Verification
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 50)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_rep_amount_aar_attr_rep_amount_absent(self):
effect = self.mkeffect(
effect_id=EffectId.fueled_armor_repair,
category_id=EffectCategoryId.active)
aar = ModuleLow(
self.mktype(
attrs={AttrId.charged_armor_dmg_mult: 3},
effects=[effect],
default_effect=effect).id,
state=State.active)
aar.charge = Charge(self.mktype(type_id=TypeId.nanite_repair_paste).id)
self.fit.modules.low.append(aar)
# Verification
with self.assertRaises(KeyError):
aar.attrs[AttrId.armor_dmg_amount]
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_rep_amount_charge_absent(self):
effect = self.mkeffect(
effect_id=EffectId.fueled_armor_repair,
category_id=EffectCategoryId.active)
aar = ModuleLow(
self.mktype(
attrs={
AttrId.armor_dmg_amount: 50,
AttrId.charged_armor_dmg_mult: 3},
effects=[effect],
default_effect=effect).id,
state=State.active)
self.fit.modules.low.append(aar)
# Verification
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 50)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_rep_amount_charge_not_loaded(self):
effect = self.mkeffect(
effect_id=EffectId.fueled_armor_repair,
category_id=EffectCategoryId.active)
aar = ModuleLow(
self.mktype(
attrs={
AttrId.armor_dmg_amount: 50,
AttrId.charged_armor_dmg_mult: 3},
effects=[effect],
default_effect=effect).id,
state=State.active)
aar.charge = Charge(TypeId.nanite_repair_paste)
self.fit.modules.low.append(aar)
# Verification
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 150)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_rep_amount_recalc_aar_attr_mult_changed(self):
multmod_src_attr = self.mkattr()
multmod_mod = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=AttrId.charged_armor_dmg_mult,
operator=ModOperator.post_percent,
affector_attr_id=multmod_src_attr.id)
multmod_effect = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[multmod_mod])
multmod = ModuleLow(self.mktype(
attrs={multmod_src_attr.id: 50},
effects=[multmod_effect]).id)
effect = self.mkeffect(
effect_id=EffectId.fueled_armor_repair,
category_id=EffectCategoryId.active)
aar = ModuleLow(
self.mktype(
attrs={
AttrId.armor_dmg_amount: 50,
AttrId.charged_armor_dmg_mult: 3},
effects=[effect],
default_effect=effect).id,
state=State.active)
aar.charge = Charge(self.mktype(type_id=TypeId.nanite_repair_paste).id)
self.fit.modules.low.append(aar)
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 150)
# Action
self.fit.modules.low.append(multmod)
# Verification
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 225)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_rep_amount_recalc_charge_to_charge_absent(self):
effect = self.mkeffect(
effect_id=EffectId.fueled_armor_repair,
category_id=EffectCategoryId.active)
aar = ModuleLow(
self.mktype(
attrs={
AttrId.armor_dmg_amount: 50,
AttrId.charged_armor_dmg_mult: 3},
effects=[effect],
default_effect=effect).id,
state=State.active)
aar.charge = Charge(self.mktype(type_id=TypeId.nanite_repair_paste).id)
self.fit.modules.low.append(aar)
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 150)
# Action
aar.charge = None
# Verification
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 50)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_rep_amount_recalc_charge_absent_to_charge(self):
effect = self.mkeffect(
effect_id=EffectId.fueled_armor_repair,
category_id=EffectCategoryId.active)
aar = ModuleLow(
self.mktype(
attrs={
AttrId.armor_dmg_amount: 50,
AttrId.charged_armor_dmg_mult: 3},
effects=[effect],
default_effect=effect).id,
state=State.active)
self.fit.modules.low.append(aar)
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 50)
# Action
aar.charge = Charge(self.mktype(type_id=TypeId.nanite_repair_paste).id)
# Verification
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 150)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_rep_amount_stacking(self):
# Multiplier shouldn't be stacking penalized against any other
# multiplicative modifications
multmod_src_attr_mul = self.mkattr()
multmod_src_attr_div = self.mkattr()
multmod_src_attr_perc = self.mkattr()
multmod_mod_premul = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=AttrId.armor_dmg_amount,
operator=ModOperator.pre_mul,
affector_attr_id=multmod_src_attr_mul.id)
multmod_mod_prediv = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=AttrId.armor_dmg_amount,
operator=ModOperator.pre_div,
affector_attr_id=multmod_src_attr_div.id)
multmod_mod_postmul = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=AttrId.armor_dmg_amount,
operator=ModOperator.post_mul,
affector_attr_id=multmod_src_attr_mul.id)
multmod_mod_postdiv = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=AttrId.armor_dmg_amount,
operator=ModOperator.post_div,
affector_attr_id=multmod_src_attr_div.id)
multmod_mod_postperc = self.mkmod(
affectee_filter=ModAffecteeFilter.domain,
affectee_domain=ModDomain.ship,
affectee_attr_id=AttrId.armor_dmg_amount,
operator=ModOperator.post_percent,
affector_attr_id=multmod_src_attr_perc.id)
multmod_effect = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[
multmod_mod_premul,
multmod_mod_prediv,
multmod_mod_postmul,
multmod_mod_postdiv,
multmod_mod_postperc])
multmod = ModuleLow(self.mktype(
attrs={
multmod_src_attr_mul.id: 2,
multmod_src_attr_div.id: 0.5,
multmod_src_attr_perc.id: 100},
effects=[multmod_effect]).id)
effect = self.mkeffect(
effect_id=EffectId.fueled_armor_repair,
category_id=EffectCategoryId.active)
aar = ModuleLow(
self.mktype(
attrs={
AttrId.armor_dmg_amount: 50,
AttrId.charged_armor_dmg_mult: 3},
effects=[effect],
default_effect=effect).id,
state=State.active)
aar.charge = Charge(self.mktype(type_id=TypeId.nanite_repair_paste).id)
self.fit.modules.low.append(aar)
self.fit.modules.low.append(multmod)
# Verification
# If paste multiplier is not stacking penalized against any mods, final
# result will be 50 * 3 * 2 ^ 5
self.assertAlmostEqual(aar.attrs[AttrId.armor_dmg_amount], 4800)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
| lgpl-3.0 | 4,200,538,623,229,928,000 | 40.106061 | 80 | 0.6 | false |
ggood/adsbTheremin | aircraft_map.py | 1 | 6296 | # aircraft_map: maintains a list of aircraft "seen" by an ADSB
# receiver.
import math
import time
DEFAULT_PURGE_TIME = 120 # Forget planes not heard from in this many seconds
DEFAULT_PURGE_INTERVAL = 1 # How often to purge stale aircraft
EARTH_RADIUS = 6371000 # Earth's radius in meters
class Aircraft(object):
"""Represents a single aircraft"""
def __init__(self, id):
self._id = id
self._altitude = 0
self._latitude = 0.0
self._longitude = 0.0
self._update = 0.0
@property
def id(self):
return self._id
@property
def altitude(self):
return self._altitude
@property
def latitude(self):
return self._latitude
@property
def longitude(self):
return self._longitude
def __str__(self):
return "%s: alt %d lat %f lon %f" % (
self.id, self.altitude, self.latitude, self.longitude)
def __repr__(self):
return self.__str__()
def update(self, altitude, latitude, longitude):
"""Update an aircraft's altitude, latitude, and longitude"""
self._altitude = altitude
self._latitude = latitude
self._longitude = longitude
self._update = time.time()
def distance_to(self, lat, lon):
"""
Compute the distance from the aircraft to the point given by
lat and lon. This does not consider the aircraft's altitude. In
other words, this computes the distance to the projection
of the aircraft on the ground.
"""
d_lat = math.radians(lat - self._latitude)
d_lon = math.radians(lon - self._longitude)
lat1_rad = math.radians(self._latitude)
lat2_rad = math.radians(lat)
a = (math.sin(d_lat/2) * math.sin(d_lat/2) +
math.sin(d_lon/2) * math.sin(d_lon/2) *
math.cos(lat1_rad) * math.cos(lat2_rad))
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a));
d = EARTH_RADIUS * c;
return d
def bearing_from(self, lat, lon):
"""
Compute the bearing, in degrees, of the aircraft as seen from
the position given by lat and lon.
"""
lat1_rad = math.radians(self._latitude)
long1_rad = math.radians(self._longitude)
lat2_rad = math.radians(lat)
long2_rad = math.radians(lon)
d_lon = long2_rad - long1_rad
d_phi = math.log(
math.tan(
lat2_rad/2.0+math.pi/4.0)/math.tan(lat1_rad/2.0+math.pi/4.0))
if abs(d_lon) > math.pi:
if d_lon > 0.0:
d_lon = -(2.0 * math.pi - dLong)
else:
d_lon = (2.0 * math.pi + dLong)
bearing = (math.degrees(math.atan2(d_lon, d_phi)) + 360.0) % 360.0;
return bearing
class AircraftMap(object):
"""
This class keeps track of aircraft heard by an ADSB receiver.
You can feed all lines returned by the ADSB receiver into this
code, and it will consume all airborne position messages and update
the list of aircraft.
Aircraft not heard from in purge_age seconds will be discarded.
"""
def __init__(self, latitude, longitude, purge_age=DEFAULT_PURGE_TIME):
"""
Arguments:
latitude: the latitude, in fractional degrees, of the observer.
longitude: the longitude, in fractional degrees, of the observer.
purge_age: the time, in seconds, after which aircraft will be
discarded if no position updates have been seen.
"""
self._aircraft = {} # ADSB ID -> aircraft
self._latitude = latitude
self._longitude = longitude
self._purge_age = purge_age
self._last_purge = time.time()
def update(self, line):
self._purge()
parts = line.split(",")
if parts and (parts[0] == "MSG"):
if parts[1] == "3":
# Airborne position message
try:
aircraft_id = parts[4]
try:
altitude = int(parts[11])
lat = float(parts[14])
lon = float(parts[15])
aircraft = self._aircraft.get(aircraft_id)
if aircraft is None:
aircraft = Aircraft(aircraft_id)
self._aircraft[aircraft_id] = aircraft
aircraft.update(altitude, lat, lon)
except ValueError:
# Some position messages omit the lat/lon. Ignore.
return
except:
print("big oops: %s" % line)
raise
def _purge(self):
if time.time() - self._last_purge < DEFAULT_PURGE_INTERVAL:
return
n = 0
for id, aircraft in self._aircraft.items():
if aircraft._update < time.time() - self._purge_age:
del self._aircraft[id]
n += 1
#print("purged %d aircraft, %d remaining" % (n, len(self._aircraft)))
self._last_purge = time.time()
def print_summary(self):
print("%d aircraft" % len(self._aircraft))
def closest(self, count, min_altitude=0, max_altitude=100000):
"""
Return the closest [count] aircraft. If min_altitude or
max_altitude is provided, limit the retured results to
aircraft in that range. May return fewer than <count>
aircraft.
"""
# I know there's a one-line list comprehension that will do
# this, but I suck.
ret = []
dist_map = {} # distance -> aircraft
for id, aircraft in self._aircraft.items():
dist = aircraft.distance_to(self._latitude, self._longitude)
dist_map[dist] = aircraft
closest = sorted(dist_map.keys())
for d in closest:
aircraft = dist_map[d]
if (aircraft.altitude <= max_altitude and
aircraft.altitude >= min_altitude):
ret.append(aircraft)
if len(ret) >= count:
return ret
return ret
def count(self):
"""
Return the count of aircraft in the map.
"""
return len(self._aircraft)
| unlicense | 2,571,144,089,346,902,500 | 33.032432 | 77 | 0.546537 | false |
cmdunkers/DeeperMind | PythonEnv/lib/python2.7/site-packages/theano/sandbox/gpuarray/opt.py | 1 | 35148 | import copy
import numpy
import logging
from six.moves import xrange
import theano
from theano import tensor, scalar, gof
from theano.compile import optdb
from theano.compile.ops import shape_i
from theano.gof import (local_optimizer, EquilibriumDB,
SequenceDB, Optimizer, toolbox)
from theano.gof.optdb import LocalGroupDB
from theano.scalar.basic import Scalar, Pow, Cast
from theano.scan_module import scan_utils, scan_op, scan_opt
from theano.tensor.nnet.conv import ConvOp
from theano.tests.breakpoint import PdbBreakpoint
from .type import (GpuArrayType, GpuArrayConstant, get_context,
ContextNotDefined)
from .basic_ops import (as_gpuarray_variable, infer_context_name,
host_from_gpu, GpuToGpu,
HostFromGpu, GpuFromHost,
GpuSplit, GpuContiguous,
GpuAlloc, GpuAllocEmpty, GpuReshape,
GpuEye, gpu_join, GpuJoin)
from .blas import (gpu_dot22, GpuGemv, GpuGemm, GpuGer,
gpugemm_no_inplace)
from .conv import GpuConv
from .nnet import (GpuCrossentropySoftmaxArgmax1HotWithBias,
GpuCrossentropySoftmax1HotWithBiasDx,
GpuSoftmaxWithBias, GpuSoftmax)
from .elemwise import (GpuElemwise, GpuDimShuffle, GpuCAReduceCuda,
GpuCAReduceCPY)
from .subtensor import (GpuIncSubtensor, GpuSubtensor,
GpuAdvancedSubtensor1,
GpuAdvancedIncSubtensor1,
GpuAdvancedIncSubtensor1_dev20)
from .opt_util import alpha_merge, output_merge
_logger = logging.getLogger("theano.sandbox.gpuarray.opt")
gpu_optimizer = EquilibriumDB()
gpu_cut_copies = EquilibriumDB()
gpu_seqopt = SequenceDB()
# Don't register this right now
conv_groupopt = LocalGroupDB()
conv_groupopt.__name__ = "gpua_conv_opts"
gpu_seqopt.register('gpuarray_local_optimiziations', gpu_optimizer, 1,
'fast_compile', 'fast_run', 'inplace', 'gpuarray')
gpu_seqopt.register('gpuarray_cut_transfers', gpu_cut_copies, 2,
'fast_compile', 'fast_run', 'gpuarray')
# do not add 'fast_run' to these two as this would always enable gpuarray mode
optdb.register('gpuarray_opt', gpu_seqopt,
optdb.__position__.get('add_destroy_handler', 49.5) - 1,
'gpuarray')
def register_opt(*tags, **kwargs):
def f(local_opt):
name = (kwargs and kwargs.pop('name')) or local_opt.__name__
gpu_optimizer.register(name, local_opt, 'fast_run', 'gpuarray', *tags)
return local_opt
return f
register_opt('fast_compile')(theano.tensor.opt.local_track_shape_i)
gpu_optimizer.register('local_remove_all_assert',
theano.tensor.opt.local_remove_all_assert,
'unsafe')
def safe_to_gpu(x, ctx_name):
if isinstance(x.type, tensor.TensorType):
return GpuFromHost(ctx_name)(x)
else:
return x
def safe_to_cpu(x):
if isinstance(x.type, GpuArrayType):
return host_from_gpu(x)
else:
return x
def op_lifter(OP, cuda_only=False):
"""
OP(..., host_from_gpu(), ...) -> host_from_gpu(GpuOP(...))
gpu_from_host(OP(inp0, ...)) -> GpuOP(inp0, ...)
"""
def f(maker):
def local_opt(node):
if type(node.op) in OP:
# Either one of our inputs is on the gpu or
# all of our clients are on the gpu
replace = False
# TODO: Maybe set context_name with infer_context_name()?
context_name = None
# We replace if any input is a host_from_gpu
for i in node.inputs:
if i.owner and i.owner.op == host_from_gpu:
context_name = i.owner.inputs[0].type.context_name
replace = True
break
if not replace:
# We replace if *all* clients are on the GPU
clients = [c for o in node.outputs for c in o.clients]
replace = len(clients) != 0
for c, idx in clients:
if (c == 'output' or
not isinstance(c.op, GpuFromHost)):
replace = False
# TODO: check that the clients want the same context?
if replace:
# All clients are GpuFromHost and we have at least one
context_name = clients[0][0].op.context_name
# Check if we should replace
if (not replace or
(cuda_only and
get_context(context_name).kind != 'cuda')):
return False
new_op = maker(node, context_name)
# This is needed as sometimes new_op inherits from OP.
if new_op and new_op != node.op:
if isinstance(new_op, theano.Op):
# tag the inputs with the context in case
# the context was derived from the outputs
def tag(i, ctx):
i.tag.context_name = ctx
return i
inputs = [tag(i, context_name) for i in node.inputs]
return [safe_to_cpu(o) for o in
new_op(*inputs, return_list=True)]
elif isinstance(new_op, (tuple, list)):
return [safe_to_cpu(o) for o in new_op]
else: # suppose it is a variable on the GPU
return [host_from_gpu(new_op)]
return False
local_opt.__name__ = maker.__name__
return local_optimizer(OP)(local_opt)
return f
class InputToGpuOptimizer(Optimizer):
"""
Transfer the input to the gpu to start the rolling wave.
"""
def add_requirements(self, fgraph):
fgraph.attach_feature(toolbox.ReplaceValidate())
def apply(self, fgraph):
for input in fgraph.inputs:
if isinstance(input.type, GpuArrayType):
continue
# If all clients are outputs or transfers don't do anything.
if (all(cl[0] == 'output' or isinstance(cl[0].op, GpuFromHost)
for cl in input.clients)):
continue
ctx_name = getattr(input.tag, 'context_name', None)
try:
new_input = host_from_gpu(GpuFromHost(ctx_name)(input))
fgraph.replace_validate(input, new_input,
"InputToGpuOptimizer")
except TypeError:
# This could fail if the inputs are not TensorTypes
pass
except ContextNotDefined:
if hasattr(input.tag, 'context_name'):
raise
# If there is no context tag and no default context
# then it stays on the CPU
pass
gpu_seqopt.register('InputToGpuArrayOptimizer', InputToGpuOptimizer(),
0, 'fast_run', 'fast_compile', 'merge')
@local_optimizer([GpuFromHost, GpuToGpu, host_from_gpu])
def local_cut_gpu_transfers(node):
# gpu[ab] -> host -> gpub
if (isinstance(node.op, GpuFromHost) and
node.inputs[0].owner and
isinstance(node.inputs[0].owner.op, HostFromGpu)):
other = node.inputs[0].owner.inputs[0]
if node.op.context_name == other.type.context_name:
return [other]
else:
return [GpuToGpu(node.op.context_name)(other)]
# ? -> gpua -> host
elif (isinstance(node.op, HostFromGpu) and
node.inputs[0].owner):
n2 = node.inputs[0].owner
# host ->
if isinstance(n2.op, GpuFromHost):
return [n2.inputs[0]]
# gpub ->
if isinstance(n2.op, GpuToGpu):
return [host_from_gpu(n2.inputs[0])]
# ? -> gpua -> gpub
elif isinstance(node.op, GpuToGpu):
# Transfer within same context
if node.inputs[0].type.context_name == node.op.context_name:
return [node.inputs[0]]
if node.inputs[0].owner:
n2 = node.inputs[0].owner
# host ->
if isinstance(n2.op, GpuFromHost):
return [GpuFromHost(node.op.context_name)(n2.inputs[0])]
# gpuc ->
if isinstance(n2.op, GpuToGpu):
if node.op.context_name == n2.inputs[0].type.context_name:
return [n2.inputs[0]]
else:
return [node.op(n2.inputs[0])]
gpu_cut_copies.register('cut_gpua_host_transfers', local_cut_gpu_transfers,
'fast_compile', 'fast_run', 'inplace', 'gpuarray')
gpu_cut_copies.register('cut_gpua_constant_transfers',
tensor.opt.constant_folding,
'fast_compile', 'fast_run', 'gpuarray')
optdb['canonicalize'].register('local_cut_gpua_host_gpua',
local_cut_gpu_transfers,
'fast_compile', 'fast_run', 'gpuarray')
@register_opt('fast_compile')
@local_optimizer([tensor.Alloc])
def local_gpuaalloc2(node):
"""
Join(axis, {Alloc or HostFromGPU}, ...) -> Join(axis, GpuAlloc, Alloc, ...)
Moves an alloc that is an input to join to the gpu.
"""
try:
get_context(None)
except ContextNotDefined:
# If there is no default context then we do not perform the move here.
return
if (isinstance(node.op, tensor.Alloc) and
all(c != 'output' and
c.op == tensor.join and
all(i.owner and
i.owner.op in [host_from_gpu, tensor.alloc]
for i in c.inputs[1:])
for c, idx in node.outputs[0].clients)):
return [host_from_gpu(GpuAlloc(None)(*node.inputs))]
@register_opt('fast_compile')
@op_lifter([tensor.Alloc])
def local_gpuaalloc(node, context_name):
return GpuAlloc(context_name)(*node.inputs)
@register_opt()
@local_optimizer([GpuAlloc])
def local_gpualloc_memset_0(node):
if isinstance(node.op, GpuAlloc) and not node.op.memset_0:
inp = node.inputs[0]
if (isinstance(inp, GpuArrayConstant) and
inp.data.size == 1 and
(numpy.asarray(inp.data) == 0).all()):
new_op = GpuAlloc(node.op.context_name, memset_0=True)
return [new_op(*node.inputs)]
@register_opt()
@local_optimizer([GpuContiguous])
def local_gpu_contiguous_gpu_contiguous(node):
"""
gpu_contiguous(gpu_contiguous(x)) -> gpu_contiguous(x)
"""
if isinstance(node.op, GpuContiguous):
inp = node.inputs[0]
if inp.owner and isinstance(inp.owner.op, GpuContiguous):
return [inp]
@register_opt('fast_compile')
@op_lifter([tensor.Reshape])
def local_gpureshape(node, context_name):
op = node.op
name = op.name
if name:
name = 'Gpu' + name
res = GpuReshape(op.ndim, op.name)
return res
@register_opt('fast_compile')
@op_lifter([tensor.Rebroadcast])
def local_gpu_rebroadcast(node, context_name):
if isinstance(node.inputs[0].owner.op, HostFromGpu):
return node.op(node.inputs[0].owner.inputs[0])
@register_opt('fast_compile')
@op_lifter([tensor.Flatten])
def local_gpuflatten(node, context_name):
op = node.op
shp = []
if op.outdim != 1:
shp = [node.inputs[0].shape[i] for i in range(op.outdim - 1)]
shp += [-1]
res = GpuReshape(op.outdim, None)
o = res(node.inputs[0], theano.tensor.as_tensor_variable(shp))
return o
@register_opt('fast_compile')
@op_lifter([tensor.Elemwise])
def local_gpu_elemwise(node, context_name):
op = node.op
scal_op = op.scalar_op
name = op.name
if name:
name = 'Gpu' + name
if len(node.outputs) > 1:
return
res = GpuElemwise(scal_op, name=name,
inplace_pattern=copy.copy(op.inplace_pattern),
nfunc_spec=op.nfunc_spec)
# If the elemwise operation is a pow, casts might be required on the
# inputs and or outputs because only the (float, float)->float and
# (double, double)->double cases are implemented at the moment.
if isinstance(op.scalar_op, Pow):
# Only transfer the computation on the gpu if the output dtype is
# floating point. Else, give up on the transfer to the gpu.
out_dtype = node.outputs[0].dtype
if out_dtype not in ['float16', 'float32', 'float64']:
return
# Transfer the inputs on the GPU and cast them to the right dtype.
new_inputs = []
for inp in node.inputs:
if inp.dtype != out_dtype:
gpu_cast_op = GpuElemwise(Cast(Scalar(out_dtype)))
new_inputs.append(gpu_cast_op(as_gpuarray_variable(inp)))
else:
new_inputs.append(as_gpuarray_variable(inp))
# Perform the exponent on the gpu and transfer the output back to the
# cpu.
gpu_output = res(*new_inputs)
cpu_output = host_from_gpu(gpu_output)
return [cpu_output]
else:
return res
def max_inputs_to_GpuElemwise(node):
ptr_size = 8
int_size = 4
# we take the limit from CUDA for now
argument_limit = 232
ndim = node.inputs[0].type.ndim
# number of elements and shape
size_param_mandatory = (int_size * (ndim + 1)) + \
(ptr_size + int_size * ndim) * len(node.outputs)
nb_bytes_avail = argument_limit - size_param_mandatory
nb_bytes_per_input = ptr_size + ndim * int_size
max_nb_inputs = nb_bytes_avail // nb_bytes_per_input
return max_nb_inputs
gpu_local_elemwise_fusion = tensor.opt.local_elemwise_fusion_op(
GpuElemwise,
max_inputs_to_GpuElemwise)
optdb.register('gpua_elemwise_fusion',
tensor.opt.FusionOptimizer(gpu_local_elemwise_fusion), 71.00,
'fast_run', 'fusion', 'local_elemwise_fusion', 'gpuarray')
inplace_gpu_elemwise_opt = tensor.opt.inplace_elemwise_optimizer_op(
GpuElemwise)
optdb.register('gpua_inplace_opt', inplace_gpu_elemwise_opt, 75,
'inplace_elemwise_optimizer', 'fast_run', 'inplace', 'gpuarray')
@register_opt('fast_compile')
@op_lifter([tensor.DimShuffle])
def local_gpua_dimshuffle(node, context_name):
return GpuDimShuffle(node.op.input_broadcastable,
node.op.new_order)
@register_opt('fast_compile')
@op_lifter([tensor.SpecifyShape])
def local_gpua_specifyShape(node, context_name):
if isinstance(node.inputs[0].type, GpuArrayType):
return
inp = [GpuFromHost(context_name)(node.inputs[0])] + node.inputs[1:]
return tensor.specify_shape(*inp)
@register_opt('fast_compile')
@op_lifter([theano.compile.ops.Shape])
def local_gpua_shape(node, context_name):
# op_lifter will call this opt too frequently as the output is
# always on the CPU.
if isinstance(node.inputs[0].type, GpuArrayType):
return
return [GpuFromHost(context_name)(node.inputs[0]).shape]
def gpu_print_wrapper(op, cnda):
op.old_op.global_fn(op.old_op, numpy.asarray(cnda))
@register_opt('fast_compile')
@op_lifter([tensor.printing.Print])
def local_gpu_print_op(node, context_name):
x, = node.inputs
gpu_x, = x.owner.inputs
new_op = node.op.__class__(global_fn=gpu_print_wrapper)
new_op.old_op = node.op
return new_op(gpu_x)
@register_opt('fast_compile')
@local_optimizer([PdbBreakpoint])
def local_gpu_pdbbreakpoint_op(node):
if isinstance(node.op, PdbBreakpoint):
old_inputs = node.inputs
old_outputs = node.outputs
new_inputs = node.inputs[:1]
input_transfered = []
# Go through the monitored variables, only transfering on GPU those
# for which the input comes from the GPU or the output will be
# transfered on the GPU.
nb_monitored_vars = len(node.outputs)
for i in range(nb_monitored_vars):
inp = old_inputs[i + 1]
out = old_outputs[i]
input_is_from_gpu = (inp.owner and
isinstance(inp.owner.op, HostFromGpu))
output_goes_to_gpu = False
for c in out.clients:
if c == 'output':
continue
if isinstance(c[0].op, GpuFromHost):
output_goes_to_gpu = True
context_name = c[0].op.context_name
break
if input_is_from_gpu:
# The op should be applied on the GPU version of the input
new_inputs.append(inp.owner.inputs[0])
input_transfered.append(True)
elif output_goes_to_gpu:
# The input should be transfered to the gpu
new_inputs.append(GpuFromHost(context_name)(inp))
input_transfered.append(True)
else:
# No transfer is required.
new_inputs.append(inp)
input_transfered.append(False)
# Only continue the optimization if at least one input has been
# transfered to the gpu
if not any(input_transfered):
return False
# Apply the op on the new inputs
new_op_outputs = node.op(*new_inputs, return_list=True)
# Propagate the transfer to the gpu through the outputs that require
# it
new_outputs = []
for i in range(len(new_op_outputs)):
if input_transfered[i]:
new_outputs.append(host_from_gpu(new_op_outputs[i]))
else:
new_outputs.append(new_op_outputs[i])
return new_outputs
return False
@register_opt('fast_compile')
@op_lifter([tensor.Join])
def local_gpua_join(node, context_name):
return gpu_join
@register_opt('fast_compile')
@local_optimizer([GpuJoin])
def local_gpuajoin_1(node):
# join of a single element
if (isinstance(node.op, GpuJoin) and
len(node.inputs) == 2):
return [node.inputs[1]]
@register_opt('fast_compile')
@op_lifter([tensor.Split])
def local_gpua_split(node, context_name):
return GpuSplit(node.op.len_splits)
@register_opt('fast_compile')
@op_lifter([tensor.Subtensor])
def local_gpua_subtensor(node, context_name):
x = node.inputs[0]
if (x.owner and isinstance(x.owner.op, HostFromGpu)):
gpu_x = x.owner.inputs[0]
if (gpu_x.owner and
isinstance(gpu_x.owner.op, GpuFromHost) and
# And it is a shared var or an input of the graph.
not gpu_x.owner.inputs[0].owner):
if len(x.clients) == 1:
if any([n == 'output' or any([isinstance(v.type, GpuArrayType)
for v in n.inputs + n.outputs])
for n, _ in node.outputs[0].clients]):
return
else:
return [host_from_gpu(gpu_x.owner.op(node.outputs[0]))]
return GpuSubtensor(node.op.idx_list)
@register_opt('fast_compile')
@op_lifter([tensor.IncSubtensor])
def local_gpua_incsubtensor(node, context_name):
return GpuIncSubtensor(node.op.idx_list, node.op.inplace,
node.op.set_instead_of_inc,
node.op.destroyhandler_tolerate_aliased)
@register_opt('fast_compile')
@op_lifter([tensor.AdvancedSubtensor1])
def local_gpua_advanced_subtensor(node, context_name):
return GpuAdvancedSubtensor1()
@register_opt('fast_compile')
@op_lifter([tensor.AdvancedIncSubtensor1])
def local_gpua_advanced_incsubtensor(node, context_name):
# This is disabled on non-cuda contexts
if get_context(context_name).kind != 'cuda':
return None
x, y, ilist = node.inputs
# Gpu Ops needs both inputs to have the same dtype
if (x.type.dtype != y.type.dtype):
dtype = scalar.upcast(x.type.dtype, y.type.dtype)
if x.type.dtype != dtype:
x = tensor.cast(x, dtype)
if y.type.dtype != dtype:
y = tensor.cast(y, dtype)
set_instead_of_inc = node.op.set_instead_of_inc
active_device_no = theano.sandbox.cuda.active_device_number()
device_properties = theano.sandbox.cuda.device_properties
compute_capability = device_properties(active_device_no)['major']
if (compute_capability < 2 or x.ndim != 2 or y.ndim != 2):
return [GpuAdvancedIncSubtensor1(
set_instead_of_inc=set_instead_of_inc)(x, y, ilist)]
else:
return [GpuAdvancedIncSubtensor1_dev20(
set_instead_of_inc=set_instead_of_inc)(x, y, ilist)]
@register_opt('fast_compile')
@op_lifter([tensor.CAReduce, tensor.Sum, tensor.elemwise.Prod])
def local_gpua_careduce(node, context_name):
if isinstance(node.op.scalar_op, (scalar.Add, scalar.Mul,
scalar.Maximum, scalar.Minimum)):
ctx = get_context(context_name)
if ctx.kind == 'opencl':
op = GpuCAReduceCPY
if node.op.scalar_op not in [scalar.add, scalar.mul]:
# We don't support yet all reduction with cpy code.
return
elif ctx.kind == 'cuda':
op = GpuCAReduceCuda
else:
return False
x, = node.inputs
greduce = op(
node.op.scalar_op, axis=node.op.axis,
dtype=getattr(node.op, 'dtype', None),
acc_dtype=getattr(node.op, 'acc_dtype', None))
x.tag.context_name = context_name
gvar = greduce(x)
# We need to have the make node called, otherwise the mask can
# be None
if (op is GpuCAReduceCPY or
gvar.owner.op.supports_c_code([GpuFromHost(context_name)(x)])):
return greduce
else:
# Try to make a simpler pattern based on reshaping
# The principle is that if two adjacent dimensions have
# the same value in the reduce_mask, then we can reshape
# to make them a single dimension, do the reduction, and
# then reshape to get them back.
if node.op.axis is None:
reduce_mask = [1] * x.type.ndim
else:
reduce_mask = [0] * x.type.ndim
for a in node.op.axis:
assert reduce_mask[a] == 0
reduce_mask[a] = 1
shape_of = node.fgraph.shape_feature.shape_of
x_shape = shape_of[x]
new_in_shp = [x_shape[0]]
new_mask = [reduce_mask[0]]
for i in xrange(1, x.type.ndim):
if reduce_mask[i] == reduce_mask[i - 1]:
new_in_shp[-1] *= x_shape[i]
else:
new_mask.append(reduce_mask[i])
new_in_shp.append(x_shape[i])
new_axis = []
for idx, m in enumerate(new_mask):
if m == 1:
new_axis.append(idx)
greduce = op(
node.op.scalar_op,
axis=new_axis, reduce_mask=new_mask,
dtype=getattr(node.op, 'dtype', None),
acc_dtype=getattr(node.op, 'acc_dtype', None))
reshaped_x = x.reshape(tensor.stack(new_in_shp))
gpu_reshaped_x = GpuFromHost(context_name)(reshaped_x)
gvar = greduce(gpu_reshaped_x)
# We need to have the make node called, otherwise the mask can
# be None
reshaped_gpu_inputs = [gpu_reshaped_x]
if greduce.supports_c_code(reshaped_gpu_inputs):
reduce_reshaped_x = host_from_gpu(
greduce(gpu_reshaped_x))
if reduce_reshaped_x.ndim != node.outputs[0].ndim:
unreshaped_reduce = reduce_reshaped_x.reshape(
tensor.stack(shape_of[node.outputs[0]]))
else:
unreshaped_reduce = reduce_reshaped_x
return [unreshaped_reduce]
@register_opt('fast_compile')
@op_lifter([tensor.blas.Gemv, tensor.blas_c.CGemv])
def local_gpua_gemv(node, context_name):
return GpuGemv(inplace=node.op.inplace)
@register_opt('fast_compile')
@op_lifter([tensor.blas.Gemm])
def local_gpua_gemm(node, context_name):
return GpuGemm(inplace=node.op.inplace)
@register_opt('fast_compile')
@op_lifter([tensor.basic.Dot])
def local_gpua_hgemm(node, context_name):
from theano.sandbox.cuda import nvcc_compiler
if nvcc_compiler.nvcc_version < '7.5':
_logger.warning("Not performing dot of float16 on the GPU since "
"cuda 7.5 is not available. Updating could speed up "
"your code.")
return
A = node.inputs[0]
B = node.inputs[1]
if (A.ndim == 2 and B.ndim == 2 and
A.dtype == 'float16' and B.dtype == 'float16'):
fgraph = node.inputs[0].fgraph
C = GpuAllocEmpty(dtype='float16', context_name=context_name)(
shape_i(A, 0, fgraph),
shape_i(B, 1, fgraph))
return gpugemm_no_inplace(C, 1.0, A, B, 0.0)
@register_opt()
@alpha_merge(GpuGemm, alpha_in=1, beta_in=4)
def local_gpuagemm_alpha_merge(node, *inputs):
return [gpugemm_no_inplace(*inputs)]
@register_opt()
@output_merge(GpuGemm, alpha_in=1, beta_in=4, out_in=0)
def local_gpuagemm_output_merge(node, *inputs):
return [gpugemm_no_inplace(*inputs)]
@register_opt('fast_compile')
@op_lifter([tensor.blas.Ger, tensor.blas_c.CGer, tensor.blas_scipy.ScipyGer])
def local_gpua_ger(node, context_name):
return GpuGer(inplace=node.op.destructive)
@register_opt('fast_compile')
@op_lifter([tensor.blas.Dot22])
def local_gpua_dot22(node, context_name):
return gpu_dot22
@register_opt('fast_compile')
@op_lifter([tensor.basic.Eye])
def local_gpua_eye(node, context_name):
return GpuEye(dtype=node.op.dtype, context_name=context_name)
@register_opt('fast_compile')
@op_lifter([tensor.nnet.CrossentropySoftmaxArgmax1HotWithBias], cuda_only=True)
def local_gpua_crossentropysoftmaxargmax1hotwithbias(node, context_name):
return GpuCrossentropySoftmaxArgmax1HotWithBias()
@register_opt('fast_compile')
@op_lifter([tensor.nnet.CrossentropySoftmax1HotWithBiasDx], cuda_only=True)
def local_gpua_crossentropysoftmax1hotwithbiasdx(node, context_name):
return GpuCrossentropySoftmax1HotWithBiasDx()
@register_opt('fast_compile')
@op_lifter([tensor.nnet.Softmax], cuda_only=True)
def local_gpua_softmax(node, context_name):
return GpuSoftmax()
@register_opt('fast_compile')
@op_lifter([tensor.nnet.SoftmaxWithBias], cuda_only=True)
def local_gpua_softmaxwithbias(node, context_name):
return GpuSoftmaxWithBias()
@register_opt('fast_compile')
@op_lifter([theano.tensor.opt.Assert])
def local_assert(node, context_name):
if (node.inputs[0].owner and
isinstance(node.inputs[0].owner.op, HostFromGpu)):
return [host_from_gpu(node.op(node.inputs[0].owner.inputs[0],
*node.inputs[1:]))]
@register_opt('fast_compile')
@op_lifter([ConvOp])
def local_gpu_conv(node, context_name):
def GpuConvOp_from_ConvOp(op):
logical_img_hw = None
if op.kshp_logical is not None and op.kshp_logical != op.kshp:
return None
ret = GpuConv(border_mode=op.out_mode,
subsample=(op.dx, op.dy),
logical_img_hw=logical_img_hw,
logical_kern_hw=op.kshp_logical,
logical_kern_align_top=op.kshp_logical_top_aligned,
kshp=op.kshp,
version=op.version,
direction_hint=op.direction_hint,
verbose=op.verbose,
imshp=op.imshp,
nkern=op.nkern,
bsize=op.bsize,
fft_opt=op.fft_opt)
if op.imshp_logical is not None:
logical_img_hw = op.imshp_logical[1:3]
if logical_img_hw != op.imshp[1:3]:
rstride = int(numpy.ceil(op.imshp_logical[1] /
float(op.imshp[1])))
cstride = int(numpy.ceil(op.imshp_logical[2] /
float(op.imshp[2])))
def make_graph(img, kern):
buf = tensor.alloc(numpy.asarray(0, dtype=img.dtype),
img.shape[0], *op.imshp_logical)
img = tensor.set_subtensor(buf[:, :, ::rstride, ::cstride],
img)
img = GpuFromHost(context_name)(img)
return ret(img, kern)
return make_graph
return ret
def values_eq_approx(a, b):
"""
This fct is needed to don't have DebugMode raise useless
error due to ronding error.
This happen as We reduce on the two last dimensions, so this
can raise the absolute error if the number of element we
reduce on is significant.
"""
assert a.ndim == 4
atol = None
if a.shape[-1] * a.shape[-2] > 100:
# For float32 the default atol is 1e-5
atol = 3e-5
return GpuArrayType.values_eq_approx(a, b, atol=atol)
img, kern = node.inputs
gpu_conv = GpuConvOp_from_ConvOp(node.op)
if gpu_conv is None:
return
out = gpu_conv(GpuFromHost(context_name)(img),
GpuFromHost(context_name)(kern))
assert isinstance(out.type, GpuArrayType)
# Make sure to keep the broadcastable pattern of the original
# convolution even if we might gain or lose some due to different
# information at the node level.
out = tensor.patternbroadcast(out, node.outputs[0].broadcastable)
out.values_eq_approx = values_eq_approx
return [out]
# Register this here so that it goes after 'local_gpu_conv'
register_opt()(conv_groupopt)
@register_opt("low_memory")
@local_optimizer([GpuCAReduceCuda])
def local_gpu_elemwise_careduce(node):
"""
Merge some GpuCAReduceCuda and GPUElemwise.
"""
if (isinstance(node.op, GpuCAReduceCuda) and
node.op.pre_scalar_op is None and
node.inputs[0].owner and
isinstance(node.inputs[0].owner.op, GpuElemwise) and
# The Op support all scalar with 1 inputs. We don't
# automatically add more case, as some like trigonometic
# operation with some reduction pattern will probably result
# to slow down.
isinstance(node.inputs[0].owner.op.scalar_op, scalar.basic.Sqr)):
op = node.op
inp = node.inputs[0].owner.inputs[0]
return [GpuCAReduceCuda(scalar_op=op.scalar_op,
reduce_mask=op.reduce_mask,
pre_scalar_op=scalar.basic.sqr)(inp)]
def tensor_to_gpu(x, context_name):
if isinstance(x.type, tensor.TensorType):
y = GpuArrayType(broadcastable=x.type.broadcastable,
context_name=context_name,
dtype=x.type.dtype)()
if x.name:
y.name = x.name + '[Gpua]'
return y
else:
return x
def gpu_safe_new(x, tag=''):
"""
Internal function that constructs a new variable from x with the same
type, but with a different name (old name + tag). This function is used
by gradient, or the R-op to construct new variables for the inputs of
the inner graph such that there is no interference between the original
graph and the newly constructed graph.
"""
if hasattr(x, 'name') and x.name is not None:
nw_name = x.name + tag
else:
nw_name = None
if isinstance(x, theano.Constant):
return x.clone()
nw_x = x.type()
nw_x.name = nw_name
return nw_x
def gpu_reconstruct_graph(inputs, outputs, tag=None):
"""
Different interface to clone, that allows you to pass inputs.
Compared to clone, this method always replaces the inputs with
new variables of the same type, and returns those (in the same
order as the original inputs).
"""
if tag is None:
tag = ''
nw_inputs = [gpu_safe_new(x, tag) for x in inputs]
givens = {}
for nw_x, x in zip(nw_inputs, inputs):
givens[x] = nw_x
nw_outputs = scan_utils.clone(outputs, replace=givens)
return (nw_inputs, nw_outputs)
@register_opt('scan', 'fast_compile')
@op_lifter([scan_op.Scan])
def local_scan_to_gpua(node, context_name):
info = copy.deepcopy(node.op.info)
if info.get('gpua', False):
return
info['gpua'] = True
nw_ins = [node.inputs[0]]
e = (1 +
node.op.n_seqs +
node.op.n_mit_mot +
node.op.n_mit_sot +
node.op.n_sit_sot +
node.op.n_shared_outs)
nw_ins += [safe_to_gpu(x, context_name) for x in node.inputs[1:e]]
b = e
e = e + node.op.n_nit_sot
nw_ins += node.inputs[b:e]
nw_ins += [safe_to_gpu(x, context_name) for x in node.inputs[e:]]
scan_ins = [tensor_to_gpu(x, context_name) for x in node.op.inputs]
# The inner output corresponding to the looping condition should not be
# moved to the gpu
if node.op.info['as_while']:
scan_outs = [safe_to_gpu(x, context_name) for x in node.op.outputs[:-1]]
scan_outs += [node.op.outputs[-1]]
else:
scan_outs = [safe_to_gpu(x, context_name) for x in node.op.outputs]
scan_outs = scan_utils.clone(
scan_outs,
replace=list(zip(node.op.inputs,
(safe_to_cpu(x) for x in scan_ins))))
# We need to construct the hash here, because scan
# __init__ does not know about the gpu and can not
# handle graphs with inputs being on the gpu
tmp_in, tmp_out = gpu_reconstruct_graph(scan_ins, scan_outs)
local_fgraph = gof.FunctionGraph(tmp_in, tmp_out, clone=True)
_cmodule_key = gof.CLinker().cmodule_key_(local_fgraph, [])
info['gpu_hash'] = hash(_cmodule_key)
def typebuild(dtype, broadcastable, context_name=context_name):
return GpuArrayType(dtype=dtype, broadcastable=broadcastable,
context_name=context_name)
nw_op = scan_op.Scan(scan_ins, scan_outs, info,
typeConstructor=typebuild).make_node(*nw_ins)
return nw_op.outputs
def _scan_type_infer(node):
context_name = infer_context_name(*node.inputs)
def typebuild(dtype, broadcastable, context_name=context_name):
return GpuArrayType(dtype=dtype, broadcastable=broadcastable,
context_name=context_name)
return typebuild
optdb.register('gpua_scanOp_make_inplace',
scan_opt.ScanInplaceOptimizer(typeInfer=_scan_type_infer,
gpua_flag=True),
75,
'gpuarray',
'fast_run',
'inplace',
'scan')
| bsd-3-clause | -188,701,961,609,684,220 | 34.253761 | 80 | 0.584898 | false |
astroswego/data-plots | src/data_plots/stats.py | 1 | 3782 | import numpy
import matplotlib
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
from matplotlib.ticker import NullFormatter
from matplotlib import rcParams
from scipy.optimize import curve_fit
from data_plots.utils import labeler, titler
rcParams['text.usetex'] = True
def scatter_hist(x, y, *args,
bins=10,
linestyle='r--', scatterstyle='k+',
histtype='stepfilled', facecolor='#FFFFFF', hatch='/',
show_mean=True, show_std=True,
**kwargs):
# no labels
nullfmt = NullFormatter()
# definitions for axes
left, width = 0.1, 0.65
bottom, height = 0.1, 0.65
bottom_h = left_h = left+width+0.02
rect_scatter = [left, bottom, width, height]
rect_histx = [left, bottom_h, width, 0.2]
rect_histy = [left_h, bottom, 0.2, height]
# start with a rectangular Figure
fig = plt.figure(1, figsize=(8, 8))
axScatter = fig.add_axes(rect_scatter)
axHistx = fig.add_axes(rect_histx)
axHisty = fig.add_axes(rect_histy)
# no labels on some axes
axHistx.xaxis.set_major_formatter(nullfmt)
axHisty.yaxis.set_major_formatter(nullfmt)
# the scatter plot:
axScatter.plot(x, y, scatterstyle)
# determine limits
xmin, ymin = numpy.min(x), numpy.min(y)
xmax, ymax = numpy.max(x), numpy.max(y)
x_mean, y_mean = x.mean(), y.mean()
x_std, y_std = x.std(), y.std()
# xlims = ((numpy.array([-xmin, xmax]) // binwidth) + 1) * binwidth
# ylims = ((numpy.array([-ymin, ymax]) // binwidth) + 1) * binwidth
xbins = numpy.linspace(xmin, xmax, bins)
ybins = numpy.linspace(ymin, ymax, bins)
# xbins = numpy.arange(-xlims[0], xlims[1]+binwidth, binwidth)
# ybins = numpy.arange(-ylims[0], ylims[1]+binwidth, binwidth)
n, xbins, xpatches = axHistx.hist(x, bins=xbins, normed=1,
histtype=histtype, facecolor=facecolor,
hatch=hatch)
n, ybins, ypatches = axHisty.hist(y, bins=ybins, normed=1,
histtype=histtype, facecolor=facecolor,
hatch=hatch,
orientation='horizontal')
mean_formatter = r'$\mu = {0:.5f}$'.format
std_formatter = r'$\sigma = {0:.5f}$'.format
xhandles, yhandles = [], []
xlabels, ylabels = [], []
if show_mean:
p = plt.Rectangle((0, 0), 1, 1, fc="r")
xlabels.append(mean_formatter(x_mean))
ylabels.append(mean_formatter(y_mean))
xhandles.append(p)
yhandles.append(p)
if show_std:
p = plt.Rectangle((0, 0), 1, 1, fc="b")
xlabels.append(std_formatter(x_std))
ylabels.append(std_formatter(y_std))
xhandles.append(p)
yhandles.append(p)
if show_mean or show_std:
axHistx.legend(xhandles, xlabels,
fontsize='small', loc='upper right')
axHisty.legend(xhandles, xlabels,
fontsize='small', loc='upper right')
xpdf = mlab.normpdf(xbins, x_mean, x_std)
ypdf = mlab.normpdf(ybins, y_mean, y_std)
axHistx.plot(xbins, xpdf, linestyle)
axHisty.plot(ypdf, ybins, linestyle)
axHistx.set_xlim(axScatter.get_xlim())
axHisty.set_ylim(axScatter.get_ylim())
axHistx.locator_params(tight=False, nbins=3)
axHisty.locator_params(tight=False, nbins=3)
axHistx = titler(axHistx, **kwargs)
axScatter = labeler(axScatter, **kwargs)
return fig
def scatter_hist_from_file(input, *args, usecols=range(2), **kwargs):
x, y = numpy.loadtxt(input, usecols=usecols, unpack=True)
return scatter_hist(x, y, *args, **kwargs)
def _gauss(x, *p):
A, mu, sigma = p
return A*numpy.exp(-(x-mu)**2/(2.*sigma**2))
| mit | 7,716,508,680,442,090,000 | 31.886957 | 77 | 0.59651 | false |
HLP-R/hlpr_kinesthetic_teaching | hlpr_kinesthetic_interaction/src/hlpr_kinesthetic_interaction/jaco_7dof_arm.py | 1 | 2145 | #!/usr/bin/env python
import rospy
from hlpr_manipulation_utils.manipulator import Gripper
from kinova_msgs.srv import Start, Stop
from hlpr_kinesthetic_interaction.kinesthetic_interaction import KinestheticInteraction
"""
jaco_7dof_arm.py
Simple wrapper that abstracts out the arm class so that other arms
can use kinesthetic_interaction
"""
class Arm():
ENABLE_7DOF_GRAVITY_COMP_SERVICE = "/j2s7s300_driver/in/start_gravity_comp"
DISABLE_7DOF_GRAVITY_COMP_SERVICE = "/j2s7s300_driver/in/stop_gravity_comp"
ENABLE_7DOF_FORCE_SERVICE = "/j2s7s300_driver/in/start_force_control"
DISABLE_7DOF_FORCE_SERVICE = "/j2s7s300_driver/in/stop_force_control"
def __init__(self):
# Setup gravity compensation
rospy.logwarn("Waiting for gravity compensation service")
rospy.wait_for_service(Arm.ENABLE_7DOF_GRAVITY_COMP_SERVICE)
rospy.wait_for_service(Arm.DISABLE_7DOF_GRAVITY_COMP_SERVICE)
rospy.wait_for_service(Arm.ENABLE_7DOF_FORCE_SERVICE)
rospy.wait_for_service(Arm.DISABLE_7DOF_FORCE_SERVICE)
# Store the services
self.enable_grav_comp = rospy.ServiceProxy(Arm.ENABLE_7DOF_GRAVITY_COMP_SERVICE, Start)
self.disable_grav_comp = rospy.ServiceProxy(Arm.DISABLE_7DOF_GRAVITY_COMP_SERVICE, Stop)
self.enable_force = rospy.ServiceProxy(Arm.ENABLE_7DOF_FORCE_SERVICE, Start)
self.disable_force = rospy.ServiceProxy(Arm.DISABLE_7DOF_FORCE_SERVICE, Stop)
rospy.logwarn("Gravity compenstation service loaded")
# Initialize the gripper
self.gripper = Gripper()
def gravity_comp(self, toggle, ft_mode):
if ft_mode == KinestheticInteraction.TORQUE_MODE:
if toggle:
return self.enable_grav_comp()
else:
return self.disable_grav_comp()
elif ft_mode == KinestheticInteraction.FORCE_MODE:
if toggle:
return self.enable_force()
else:
return self.disable_force()
else:
rospy.logerr("Passed in unsupported ft mode: %s. Nothing will happen" % ft_mode)
return False
| bsd-3-clause | 3,589,335,116,544,430,000 | 38.722222 | 96 | 0.682051 | false |
openstack/heat | heat/hacking/checks.py | 1 | 2038 | # Copyright (c) 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from hacking import core
"""
Guidelines for writing new hacking checks
- Use only for Heat specific tests. OpenStack general tests
should be submitted to the common 'hacking' module.
- Pick numbers in the range H3xx. Find the current test with
the highest allocated number and then pick the next value.
- Keep the test method code in the source file ordered based
on the Heat3xx value.
- List the new rule in the top level HACKING.rst file
- Add test cases for each new rule to heat/tests/test_hacking.py
"""
@core.flake8ext
def no_log_warn(logical_line):
"""Disallow 'LOG.warn('
https://bugs.launchpad.net/tempest/+bug/1508442
Heat301
"""
if logical_line.startswith('LOG.warn('):
yield(0, 'Heat301 Use LOG.warning() rather than LOG.warn()')
@core.flake8ext
def check_python3_no_iteritems(logical_line):
msg = ("Heat302: Use dict.items() instead of dict.iteritems().")
if re.search(r".*\.iteritems\(\)", logical_line):
yield(0, msg)
@core.flake8ext
def check_python3_no_iterkeys(logical_line):
msg = ("Heat303: Use dict.keys() instead of dict.iterkeys().")
if re.search(r".*\.iterkeys\(\)", logical_line):
yield(0, msg)
@core.flake8ext
def check_python3_no_itervalues(logical_line):
msg = ("Heat304: Use dict.values() instead of dict.itervalues().")
if re.search(r".*\.itervalues\(\)", logical_line):
yield(0, msg)
| apache-2.0 | 7,629,870,061,720,237,000 | 28.536232 | 75 | 0.707556 | false |
ggf84/tupan | tupan/particles/body.py | 1 | 18588 | # -*- coding: utf-8 -*-
#
"""
TODO.
"""
from __future__ import print_function
import sys
import copy
import numpy as np
from ..lib import extensions
from ..lib.utils.timing import decallmethods, timings
__all__ = ["Bodies"]
class NbodyMethods(object):
"""This class holds common methods for particles in n-body systems.
"""
include_pn_corrections = False
attrs = [ # name, sctype, doc
("id", 'uint', "index"),
("mass", 'real', "mass"),
("eps2", 'real', "squared softening"),
("rx", 'real', "x-position"),
("ry", 'real', "y-position"),
("rz", 'real', "z-position"),
("vx", 'real', "x-velocity"),
("vy", 'real', "y-velocity"),
("vz", 'real', "z-velocity"),
("time", 'real', "current time"),
("nstep", 'uint', "step number"),
("tstep", 'real', "time step"),
]
special_attrs = [ # name, sctype, doc
]
@property # TODO: @classproperty ???
def dtype(self):
from ..lib.utils.ctype import ctypedict
return [(name, ctypedict[sctype])
for name, sctype, _ in self.attrs]
@property # TODO: @classproperty ???
def special_dtype(self):
from ..lib.utils.ctype import ctypedict
return [(name, ctypedict[sctype])
for name, sctype, _ in self.special_attrs]
@property
def pos(self): # XXX: deprecate?
return np.concatenate((self.rx, self.ry, self.rz,)).reshape(3, -1).T
@property
def vel(self): # XXX: deprecate?
return np.concatenate((self.vx, self.vy, self.vz,)).reshape(3, -1).T
@property
def px(self):
return self.mass * self.vx
@property
def py(self):
return self.mass * self.vy
@property
def pz(self):
return self.mass * self.vz
### total mass and center-of-mass methods
@property
def total_mass(self):
"""Total mass of the system.
"""
return float(self.mass.sum())
@property
def com_r(self):
"""Center-of-Mass position of the system.
.. note::
Post-Newtonian corrections, if enabled, are included.
"""
mrx = self.mass * self.rx
mry = self.mass * self.ry
mrz = self.mass * self.rz
if self.include_pn_corrections:
if not "pn_mrx" in self.__dict__:
self.register_auxiliary_attribute("pn_mrx", "real")
if not "pn_mry" in self.__dict__:
self.register_auxiliary_attribute("pn_mry", "real")
if not "pn_mrz" in self.__dict__:
self.register_auxiliary_attribute("pn_mrz", "real")
mrx += self.pn_mrx
mry += self.pn_mry
mrz += self.pn_mrz
mr = np.array([mrx, mry, mrz]).T
return mr.sum(0) / self.total_mass
@property
def com_v(self):
"""Center-of-Mass velocity of the system.
.. note::
Post-Newtonian corrections, if enabled, are included.
"""
mvx, mvy, mvz = self.px, self.py, self.pz
if self.include_pn_corrections:
if not "pn_mvx" in self.__dict__:
self.register_auxiliary_attribute("pn_mvx", "real")
if not "pn_mvy" in self.__dict__:
self.register_auxiliary_attribute("pn_mvy", "real")
if not "pn_mvz" in self.__dict__:
self.register_auxiliary_attribute("pn_mvz", "real")
mvx += self.pn_mvx
mvy += self.pn_mvy
mvz += self.pn_mvz
mv = np.array([mvx, mvy, mvz]).T
return mv.sum(0) / self.total_mass
@property
def com_linear_momentum(self):
"""Center-of-Mass linear momentum of the system.
"""
mtot = self.total_mass
com_v = self.com_v
return mtot * com_v
@property
def com_angular_momentum(self):
"""Center-of-Mass angular momentum of the system.
"""
mtot = self.total_mass
com_r = self.com_r
com_v = self.com_v
return mtot * np.cross(com_r, com_v)
@property
def com_kinetic_energy(self):
"""Center-of-Mass kinetic energy of the system.
"""
mtot = self.total_mass
com_v = self.com_v
return 0.5 * mtot * (com_v**2).sum()
def com_move_to(self, com_r, com_v):
"""Moves the center-of-mass to the given coordinates.
"""
self.rx += com_r[0]
self.ry += com_r[1]
self.rz += com_r[2]
self.vx += com_v[0]
self.vy += com_v[1]
self.vz += com_v[2]
def com_to_origin(self):
"""Moves the center-of-mass to the origin of coordinates.
"""
self.com_move_to(-self.com_r, -self.com_v)
### linear momentum
@property
def lm(self):
"""Individual linear momentum.
.. note::
Post-Newtonian corrections, if enabled, are included.
"""
lmx, lmy, lmz = self.px, self.py, self.pz
if self.include_pn_corrections:
if not "pn_mvx" in self.__dict__:
self.register_auxiliary_attribute("pn_mvx", "real")
if not "pn_mvy" in self.__dict__:
self.register_auxiliary_attribute("pn_mvy", "real")
if not "pn_mvz" in self.__dict__:
self.register_auxiliary_attribute("pn_mvz", "real")
lmx += self.pn_mvx
lmy += self.pn_mvy
lmz += self.pn_mvz
return np.array([lmx, lmy, lmz]).T
@property
def linear_momentum(self):
"""Total linear momentum of the system.
.. note::
This quantity possibly includes the linear momentum of the
center-of-mass w.r.t. the origin of coordinates.
.. note::
Post-Newtonian corrections, if enabled, are included.
"""
return self.lm.sum(0)
### angular momentum
@property
def am(self):
"""Individual angular momentum.
.. note::
Post-Newtonian corrections, if enabled, are included.
"""
px, py, pz = self.px, self.py, self.pz
amx = (self.ry * pz) - (self.rz * py)
amy = (self.rz * px) - (self.rx * pz)
amz = (self.rx * py) - (self.ry * px)
if self.include_pn_corrections:
if not "pn_amx" in self.__dict__:
self.register_auxiliary_attribute("pn_amx", "real")
if not "pn_amy" in self.__dict__:
self.register_auxiliary_attribute("pn_amy", "real")
if not "pn_amz" in self.__dict__:
self.register_auxiliary_attribute("pn_amz", "real")
amx += self.pn_amx
amy += self.pn_amy
amz += self.pn_amz
return np.array([amx, amy, amz]).T
@property
def angular_momentum(self):
"""Total angular momentum of the system.
.. note::
This quantity possibly includes the angular momentum of the
center-of-mass w.r.t. the origin of coordinates.
.. note::
Post-Newtonian corrections, if enabled, are included.
"""
return self.am.sum(0)
### kinetic energy
@property
def ke(self):
"""Individual kinetic energy.
.. note::
Post-Newtonian corrections, if enabled, are included.
"""
ke = 0.5 * self.mass * (self.vx**2 + self.vy**2 + self.vz**2)
if self.include_pn_corrections:
if not "pn_ke" in self.__dict__:
self.register_auxiliary_attribute("pn_ke", "real")
ke += self.pn_ke
return ke
@property
def kinetic_energy(self):
"""Total kinetic energy of the system.
.. note::
This quantity possibly includes the kinetic energy of the
center-of-mass w.r.t. the origin of coordinates.
.. note::
Post-Newtonian corrections, if enabled, are included.
"""
return float(self.ke.sum())
### potential energy
@property
def pe(self):
"""Individual potential energy.
"""
self.set_phi(self)
return self.mass * self.phi
@property
def potential_energy(self):
"""Total potential energy.
"""
return 0.5 * float(self.pe.sum())
### virial energy
@property
def ve(self):
"""Individual virial energy.
"""
return 2 * self.ke + self.pe
@property
def virial_energy(self):
"""Total virial energy.
"""
return 2 * self.kinetic_energy + self.potential_energy
### gravity
def set_tstep(self, ps, eta):
"""Set individual time-steps due to other particles.
"""
extensions.tstep.calc(self, ps, eta)
def set_phi(self, ps):
"""Set individual gravitational potential due to other particles.
"""
extensions.phi.calc(self, ps)
def set_acc(self, ps):
"""Set individual gravitational acceleration due to other particles.
"""
extensions.acc.calc(self, ps)
def set_pnacc(self, ps):
"""Set individual post-Newtonian gravitational acceleration due to
other particles.
"""
extensions.pnacc.calc(self, ps)
def set_acc_jerk(self, ps):
"""Set individual gravitational acceleration and jerk due to other
particles.
"""
extensions.acc_jerk.calc(self, ps)
def set_snap_crackle(self, ps):
"""Set individual gravitational snap and crackle due to other
particles.
"""
extensions.snap_crackle.calc(self, ps)
### miscellaneous methods
def min_tstep(self):
"""Minimum absolute value of tstep.
"""
return abs(self.tstep).min()
def max_tstep(self):
"""Maximum absolute value of tstep.
"""
return abs(self.tstep).max()
### lenght scales
@property
def virial_radius(self):
"""Virial radius of the system.
"""
mtot = self.total_mass
pe = self.potential_energy
return (mtot**2) / (-2*pe)
@property
def radial_size(self):
"""Radial size of the system (a.k.a. radius of gyration).
.. note::
This quantity is calculated w.r.t. the center-of-mass of the
system.
"""
com_r = self.com_r
rx = self.rx - com_r[0]
ry = self.ry - com_r[1]
rz = self.rz - com_r[2]
I = (self.mass * (rx**2 + ry**2 + rz**2)).sum()
s = (I / self.total_mass)**0.5
return s
### rescaling methods
def dynrescale_total_mass(self, total_mass):
"""Rescales the total mass of the system while maintaining its
dynamics unchanged.
"""
m_ratio = total_mass / self.total_mass
self.mass *= m_ratio
self.rx *= m_ratio
self.ry *= m_ratio
self.rz *= m_ratio
def dynrescale_radial_size(self, size):
"""Rescales the radial size of the system while maintaining its
dynamics unchanged.
"""
r_scale = size / self.radial_size
v_scale = 1 / r_scale**0.5
self.rx *= r_scale
self.ry *= r_scale
self.rz *= r_scale
self.vx *= v_scale
self.vy *= v_scale
self.vz *= v_scale
def dynrescale_virial_radius(self, rvir):
"""Rescales the virial radius of the system while maintaining its
dynamics unchanged.
"""
r_scale = rvir / self.virial_radius
v_scale = 1 / r_scale**0.5
self.rx *= r_scale
self.ry *= r_scale
self.rz *= r_scale
self.vx *= v_scale
self.vy *= v_scale
self.vz *= v_scale
def scale_to_virial(self):
"""Rescale system to virial equilibrium (2K + U = 0).
"""
ke = self.kinetic_energy
pe = self.potential_energy
v_scale = ((-0.5 * pe) / ke)**0.5
self.vx *= v_scale
self.vy *= v_scale
self.vz *= v_scale
def to_nbody_units(self):
"""Rescales system to nbody units while maintaining its dynamics
unchanged.
"""
self.dynrescale_total_mass(1.0)
self.dynrescale_virial_radius(1.0)
class PNbodyMethods(NbodyMethods):
"""This class holds some post-Newtonian methods.
"""
### PN stuff
### TODO: move these methods to a more appropriate place...
def pn_kick_ke(self, tau):
"""Kicks kinetic energy due to post-Newtonian terms.
"""
if not "pn_ke" in self.__dict__:
self.register_auxiliary_attribute("pn_ke", "real")
pnfx = self.mass * self.pnax
pnfy = self.mass * self.pnay
pnfz = self.mass * self.pnaz
self.pn_ke -= (self.vx * pnfx + self.vy * pnfy + self.vz * pnfz) * tau
def pn_drift_com_r(self, tau):
"""Drifts center of mass position due to post-Newtonian terms.
"""
if not "pn_mrx" in self.__dict__:
self.register_auxiliary_attribute("pn_mrx", "real")
if not "pn_mry" in self.__dict__:
self.register_auxiliary_attribute("pn_mry", "real")
if not "pn_mrz" in self.__dict__:
self.register_auxiliary_attribute("pn_mrz", "real")
self.pn_mrx += self.pn_mvx * tau
self.pn_mry += self.pn_mvy * tau
self.pn_mrz += self.pn_mvz * tau
def pn_kick_lmom(self, tau):
"""Kicks linear momentum due to post-Newtonian terms.
"""
if not "pn_mvx" in self.__dict__:
self.register_auxiliary_attribute("pn_mvx", "real")
if not "pn_mvy" in self.__dict__:
self.register_auxiliary_attribute("pn_mvy", "real")
if not "pn_mvz" in self.__dict__:
self.register_auxiliary_attribute("pn_mvz", "real")
pnfx = self.mass * self.pnax
pnfy = self.mass * self.pnay
pnfz = self.mass * self.pnaz
self.pn_mvx -= pnfx * tau
self.pn_mvy -= pnfy * tau
self.pn_mvz -= pnfz * tau
def pn_kick_amom(self, tau):
"""Kicks angular momentum due to post-Newtonian terms.
"""
if not "pn_amx" in self.__dict__:
self.register_auxiliary_attribute("pn_amx", "real")
if not "pn_amy" in self.__dict__:
self.register_auxiliary_attribute("pn_amy", "real")
if not "pn_amz" in self.__dict__:
self.register_auxiliary_attribute("pn_amz", "real")
pnfx = self.mass * self.pnax
pnfy = self.mass * self.pnay
pnfz = self.mass * self.pnaz
self.pn_amx -= (self.ry * pnfz - self.rz * pnfy) * tau
self.pn_amy -= (self.rz * pnfx - self.rx * pnfz) * tau
self.pn_amz -= (self.rx * pnfy - self.ry * pnfx) * tau
AbstractNbodyMethods = NbodyMethods
if "--pn_order" in sys.argv:
AbstractNbodyMethods = PNbodyMethods
#@decallmethods(timings)
#@make_attrs
# class Body(AbstractNbodyMethods):
# """
# The most basic particle type.
# """
# attrs = AbstractNbodyMethods.attrs + AbstractNbodyMethods.special_attrs
# names = AbstractNbodyMethods.names + AbstractNbodyMethods.special_names
# dtype = [(_[0], _[1]) for _ in attrs]
# data0 = np.zeros(0, dtype)
#
# def __init__(self, n=0, data=None):
# """
# Initializer
# """
# if data is None:
# if n: data = np.zeros(n, self.dtype)
# else: data = self.data0
# self.data = data
# self.n = len(self)
#
# #
# # miscellaneous methods
# #
#
#
# def append(self, obj):
# if obj.n:
# self.data = np.concatenate((self.data, obj.data))
# self.n = len(self)
#
#
# def remove(self, id):
# slc = np.where(self.id == id)
# self.data = np.delete(self.data, slc, 0)
# self.n = len(self)
#
#
# def insert(self, id, obj):
# index = np.where(self.id == id)[0]
# v = obj.data
# self.data = np.insert(self.data, index*np.ones(len(v)), v, 0)
# self.n = len(self)
#
#
# def pop(self, id=None):
# if id is None:
# index = -1
# id = self.id[-1]
# else:
# index = np.where(self.id == id)[0]
# obj = self[index]
# self.remove(id)
# return obj
#
#
# def get_state(self):
# return self.data
#
#
# def set_state(self, array):
# self.data[...] = array
# self.n = len(self)
###############################################################################
@decallmethods(timings)
class Bodies(AbstractNbodyMethods):
"""
"""
def __init__(self, n=0, items=None):
if items is None:
for (name, dtype) in self.dtype[:1]:
self.__dict__[name] = np.arange(n, dtype=dtype)
for (name, dtype) in self.dtype[1:]+self.special_dtype:
self.__dict__[name] = np.zeros(n, dtype=dtype)
else:
self.__dict__.update(items)
def __repr__(self):
return repr(self.__dict__)
def __str__(self):
fmt = type(self).__name__+"(["
if self.n:
for (k, v) in self.__dict__.items():
fmt += "\n\t{0}: {1},".format(k, v)
fmt += "\n"
fmt += "])"
return fmt
def __contains__(self, id):
return id in self.id
def __len__(self):
return len(self.id)
@property
def n(self):
return len(self)
def copy(self):
return copy.deepcopy(self)
def append(self, obj):
if obj.n:
items = {k: np.concatenate((getattr(self, k), v))
for (k, v) in obj.__dict__.items()}
self.__dict__.update(items)
def __getitem__(self, slc):
if isinstance(slc, int):
slc = [slc]
items = {k: v[slc] for (k, v) in self.__dict__.items()}
return type(self)(items=items)
def __setitem__(self, slc, values):
for (k, v) in self.__dict__.items():
v[slc] = getattr(values, k)
def astype(self, cls):
newobj = cls()
tmp = cls(self.n)
tmp.set_state(self.get_state())
newobj.append(tmp)
return newobj
def get_state(self):
array = np.zeros(self.n, dtype=self.dtype)
for name in array.dtype.names:
array[name] = getattr(self, name)
return array
def set_state(self, array):
for name in array.dtype.names:
if name in self.__dict__:
self.__dict__[name][...] = array[name]
########## end of file ##########
| mit | 647,833,605,706,238,800 | 26.497041 | 79 | 0.528298 | false |
ValorNaram/isl | inputchangers/001.py | 1 | 15830 | from __future__ import print_function
import os
globe = {}
generateIndex = False
lang = "en"
brackets = ["[&]", "{&}", "(&)", "<&>"]
class compiler():
def __init__(self):
self = self
def languageSupport(self, f):
if "langBuffer" in globe:
langBuffer = globe["langBuffer"]
else:
langBuffer = {}
if not f in langBuffer:
if os.path.exists(os.path.join(os.getcwd(), "lang", lang, f)):
sfile = open(os.path.join(os.getcwd(), "lang", lang, f), "r")
langBuffer[f] = sfile.read()
sfile.close()
else:
return False
globe["langBuffer"] = langBuffer
return langBuffer[f]
def searchBracketMatch(self, text, startPos=0):
global brackets
count = 0
toMatch = ""
for bracket in brackets:
first, second = bracket.split("&")
if text[startPos] == first:
toMatch = bracket
break
if toMatch == "":
return [startPos, -1]
first, second = bracket.split("&")
bStart = startPos
while True:
bEnd = text.find(second, bStart)
tmp = text[bStart:bEnd+1]
if tmp.find(first) > -1:
if text[bEnd-1] == "\\":
count -= 1
pass
count += 1
bStart = bEnd
else:
count -= 1
if count == 0:
break
return [startPos, bEnd]
def canApplyCode(self, src, curPosition):
bStart = -1
bEnd = curPosition
while True:
if src.find("<", bStart+1, bEnd) > curPosition or src.find("<", bStart+1, bEnd) == -1:
break
else:
bStart = src.find("<", bStart+1, bEnd)
bStart, bEnd = self.searchBracketMatch(src, bStart)
if curPosition > bStart and bEnd > curPosition:
return False #Position is in between HTML code, so no threatment as ISL code
else:
return True #Position isn't in between HTML code, so threatment as ISL code
def linkOptionTrigger(self, data):
text, url = data
if url.find(";") > -1:
target, url = url.split(";", 1)
target = target.strip()
url = url.strip()
if url.find("@") > -1:
url.replace("mailto:", "", 1)
url = "mailto:" + url
return "<a href='" + url + "' tabindex=%tabindex gridindex='%gridindex' onfocus='getIndex(this)' target='_" + target.replace("_", "", 1) + "'>" + text + "</a>"
if url.find("@") > -1:
url.replace("mailto:", "", 1)
url = "mailto:" + url
return "<a href='" + url + "' tabindex=%tabindex gridindex='%gridindex' onfocus='getIndex(this)'>" + text + "</a>"
def imageOptionTrigger(self, data):
symbol, alt, url = data
return "<img tabindex=%tabindex gridindex='%gridindex' onfocus='getIndex(this)' src='" + url + "' alt='" + alt + "' title='" + alt + "' />"
def colourOptionTrigger(self, data):
text, colour = data
if colour.count(";") > 0:
return "<span style='" + colour + "'>" + text + "</span>"
else:
return "<span style='color:" + colour + ";'>" + text + "</span>"
def abbrOptionTrigger(self, data):
abbr, text = data
if text.find(";") > -1:
ariaText, text = text.split(";", 1)
ariaText = ariaText.strip()
text = text.strip()
if ariaText.lower() == abbr.lower():
message = "Text explicit for assistive technology users specified."
result = self.languageSupport("abbr1")
if not result == False:
message = result
print(" \033[0;33m" + message + "\033[0;m")
return "<a aria-label='" + str(ariaText) + "' title='" + text + "'>" + abbr + "</a>"
else:
message = "Incorrect use of abbreviation cheat prevented"
result = self.languageSupport("abbr0")
if not result == False:
message = result
print(" \033[0;33m" + message + "\033[0;m")
return "<abbr title='" + text + "'>" + abbr + "</abbr>"
def videoORaudio(self, data):
tag, title, url = data
result = self.languageSupport("no" + tag + "support")
if not result:
result = "No " + tag + " support"
return "<" + tag + " controls tabindex=%tabindex gridindex='%gridindex' aria-label='" + title + "' src='" + url + "'>" + result + "</" + tag + ">"
def formOptionTrigger(self, data):
output = "<form enctype='text/plain' method='POST' action='" + data[2] + "'>"
fieldKeys = ["title", "type", "name", "placeholder"]
fields = data[1].split(";")
keys = {"postal code": {"title" : "Postal code" ,"type" : "text", "name" : "postalcode"},
"housenumber" : {"title" : "Housenumber", "type" : "text", "name" : "housenumber", "placeholder" : "e.g. 42"},
"street": {"title" : "Street" ,"type" : "text", "name" : "street", "placeholder" : "e.g. Wallstreet"},
"country": {"title" : "Country" ,"type" : "text", "name" : "country", "placeholder" : "e.g. USA"},
"city": {"title" : "City" ,"type" : "text", "name" : "city", "placeholder" : "e.g. New York"},
"firstname": {"title" : "First name" ,"type" : "text", "name" : "firstname"},
"lastname": {"title" : "Last name" ,"type" : "text", "name" : "lastname"},
"email": {"title" : "Mail Address" ,"type" : "email", "name" : "email", "placeholder" : "e.g. [email protected]"},
"mobile": {"title" : "Mobile" ,"type" : "text", "name" : "mobile"},
"telephone": {"title" : "Telephone" ,"type" : "text", "name" : "telephone"},
"password": {"title" : "Password" ,"type" : "password", "name" : "password"},
"search" : {"type" : "text", "name" : "search", "placeholder" : "Search"}}
for field in fields:
if field in keys:
attr = keys[field]
title = ("title" in attr) and attr["title"] or ""
_type = ("type" in attr) and attr["type"] or ""
name = ("name" in attr) and attr["name"] or ""
placeholder = ("placeholder" in attr) and attr["placeholder"] or ""
output += "<label>" + title + "<input type='" + _type + "' name='" + name + "' placeholder='" + placeholder + "' tabindex='%tabindex' gridindex='%gridindex' /></label><br/>"
output += "<input type='submit' /></form>"
return output
def searchFunctions(self, src):
#search brackets
triggers = {"(" : self.linkOptionTrigger, "!" : self.imageOptionTrigger, "{" : self.colourOptionTrigger, "[" : self.abbrOptionTrigger, "video" : self.videoORaudio, "audio" : self.videoORaudio, "form" : self.formOptionTrigger}
while src.find("[") > -1:
data = []
original = ""
new = ""
function = ""
start, end = self.searchBracketMatch(src, src.find("["))
if end > 0:
if start > 0:
bStart = start
bEnd = start
#going backwards for a whitespace, beginning of variable 'src', for a newline ('\n') character
while True:
#check, if index exists in variable 'src'
if bStart-1 > -1:
#ANSWER: Yes
#It is a whitespace?
if src[bStart] == " ":
#ANSWER: Yes, stop searching, take the results you have
break
#Check for the possibility to go one step backward
elif bStart-1 > -1:
#ANSWER: Yes. Check for the newline character
if src[bStart-1:bStart] == "\r":
#ANSWER: Yes, stop searching, take the results you have
break
bStart -= 1
else:
#ANSWER: Yes, stop searching, take the results you have
break
if src[bStart+1:bEnd] in triggers:
#A cheat was found (prefix)
function = triggers[src[bStart+1:bEnd]]
original += src[bStart+1:bEnd]
data.append(src[bStart+1:bEnd])
if src[end+1] in triggers and function == "":
#A cheat was found
function = triggers[src[end+1]]
data.append(src[start+1:end])
original += src[start:end+1]
start, end = self.searchBracketMatch(src, end+1)
if end > 0 and not function == "":
data.append(src[start+1:end])
original += src[start:end+1]
new = function(data)
src = src.replace(original, new)
src = src.replace(original, new)
return src
def buffering_intern(self, buffer, path, langFile, errortext):
global globe
if os.path.exists(path):
sfile = open(path, "r")
filebuffer = sfile.read()
sfile.close()
globe[buffer] = filebuffer
return filebuffer
else:
result = self.languageSupport("emojirefmissing")
if not result == False:
print(" \033[0;31m" + result + "\033[0;m")
else:
print(" \033[0;31mFile" + errortext + "\033[0;m")
return ""
def code(self, src, char):
global globe
unicodeBuffer = ""
bStart = 0
bEnd = 0
if "unicodeBuffer" in globe:
unicodeBuffer = globe["unicodeBuffer"]
if unicodeBuffer == "":
unicodeBuffer = self.buffering_intern("unicodeBuffer", os.path.join("unicode", "code"), "coderefmissing", "Code cheat not available due to missing file 'unicode/code'")
if unicodeBuffer == "":
return src
while src.find(char, 0, src.find(char) +len(char)) > -1:
bStart = src.find(char, 0, src.find(char) +len(char)) + len(char)
bEnd = src.find(char, bStart)
if bEnd == -1:
break
text = src[bStart:bEnd]
text_new = text
filebuffer = ""
for item in unicodeBuffer.split("\n"):
if item.find(" | ") > -1:
code, unicode = item.split(" | ", 1)
text_new = text_new.replace(code.strip(), unicode.strip())
src = src.replace(str(char) + text + str(char), "<code>" + text_new + "</code>")
return src
def headings(self, src):
global generateIndex
index = ""
headings = {"#" : True, "##" : True, "###" : True, "####" : True, "#####" : True, "######" : True}
if src.find("%index") > -1:
generateIndex = True
for entry in src.split("\r"):
if entry.find(" ") > -1:
heading, name = entry.split(" ", 1)
if heading in headings:
if generateIndex:
if index == "":
index = "[" + name + "](#" + name.lower().replace(" ", "_") + ")"
else:
index += "\r[" + name + "](#" + name.lower().replace(" ", "_") + ")"
replaceWith = "<h" + str(len(heading)) + " id='" + name.lower().replace(" ", "_") + "' tabindex=%tabindex gridindex='%gridindex' onfocus='getIndex(this)'>" + name + "</h" + str(len(heading)) + ">"
src = src.replace(entry, replaceWith, 1)
else:
src = src.replace(entry, "<h" + str(len(heading)) + " tabindex=%tabindex gridindex='%gridindex' onfocus='getIndex(this)'>" + name + "</h" + str(len(heading)) + ">", 1)
if generateIndex:
index = searchFunctions(index)
src = src.replace("%index", index, 1)
return src
def emojis(self, src):
global globe
emojiBuffer = ""
if "emojiBuffer" in globe:
emojiBuffer = globe["emojiBuffer"]
if emojiBuffer == "":
emojiBuffer = self.buffering_intern("emojiBuffer", os.path.join("unicode", "emojis"), "emojirefmissing", "Emoji cheat not available due to missing file 'unicode/emojis'")
if emojiBuffer == "":
return src
for item in emojiBuffer.split("\n"):
if item.find(" | ") > -1:
code, emoji = item.split(" | ", 1)
src = src.replace(code.strip(), emoji.strip())
return src
def table_intern(self, item, formatting, tag):
# ToDo
index = 0
output = "<tr>"
item = item.split("|")
for entry in formatting.split("|"):
entry = entry.strip()
if entry.startswith(":") and entry.endswith(":"):
output += "<" + tag + " style='text-align:center;'>"
elif entry.startswith(":"):
output += "<" + tag + " style='text-align:left;'>"
elif entry.endswith(":"):
output += "<" + tag + " style='text-align:right;'>"
elif entry.endswith("---"):
output += "<" + tag + " style='text-align:justify;'>"
else:
output += "<" + tag + ">"
output += item[index].strip() + "</" + tag + ">"
index += 1
output += "</tr>"
return output
def table(self, src):
# ToDo
# It is just designed to parse one table per file.
if src.find("|") == -1:
return src
tables = []
oldTables = []
data = []
dataOld = []
for item in src.split("\r"):
if item == "" and len(data) > 2 or item.find("|") == -1 and len(data) > 2:
tables.append(data)
oldTables.append(dataOld)
data = []
dataOld = []
if item.count("|") > 0:
itemOld = item
item = list(item.strip())
if item[0] == "|":
del item[0]
if item[len(item)-1] == "|":
del item[len(item)-1]
data.append("".join(item).strip())
dataOld.append("".join(itemOld))
#Prepairing
dataIndex = 0
for data in tables:
output = "<table>"
oldData = data
heading = data[0]
formatting = data[1]
del data[0], data[0]
output += self.table_intern(heading.strip(), formatting, "th")
#Table Content
for item in data:
item = item.strip()
output += self.table_intern(item, formatting, "td")
output += "</table>"
data = "\r".join(data)
src = src.replace("\r".join(oldTables[dataIndex]), output)
dataIndex += 1
return src
def translate_intern(self, src, startTag, endTag, xml):
while src.find(startTag, 0, src.find(endTag, src.find(startTag)+len(startTag))) > -1:
bStart = src.find(startTag, 0, src.find(endTag, src.find(startTag)+len(startTag))) + len(startTag)
bEnd = src.find(endTag, bStart)
if bEnd == -1:
break
text = src[bStart:bEnd]
if self.canApplyCode(src, bStart) and self.canApplyCode(src, bEnd):
src = src.replace(startTag + text + endTag, xml.replace("%s", text, 1))
else:
src = src.replace(startTag + text + endTag, "\\mox1" + text + endTag)
src = src.replace("\\mox1", startTag)
return src
def translate(self, src, startTag, endTag, xml):
src = self.translate_intern(src, " " + startTag, endTag, " " + xml)
#src = self.translate_intern(src, ">" + startTag, endTag, ">" + xml) deprecated
src = self.translate_intern(src, "\r" + startTag, endTag, "\r" + xml)
src = self.translate_intern(src, startTag, endTag, xml)
return src
def main(islinput, inputfile, pluginData, globalData):
global lang, globe
compile = compiler()
globe = globalData
currentIndex = 0
if "doclang" in pluginData:
lang = pluginData["doclang"]
for item in islinput:
if item.startswith(": ") and item.endswith(" :") or item == "::" or item == "":
islinput[currentIndex] = item
currentIndex += 1
continue
else:
key = item.split("\r")
if key[0].startswith("- ") and key[0].endswith(" -") or key[0] == "--":
key, src = item.split("\r", 1)
key = key + "\r"
else:
key = ""
src = item
src = "\r" + src #Entry start needs an extra marker
src = compile.code(src, "`") #Markdown code, change font
src = compile.table(src) #Added Markdown tables
src = src.replace("\r---\r", "\r<hr>\r").replace("\r---", "\r<hr>\r") #Markdown splitline
src = compile.headings(src) #Partial support for Markdown headings
src = compile.searchFunctions(src) #Partial Markdown link- and image implementation. My own implementation for form, abbreviation, coloured text, audio and video.
src = compile.translate(src, "bbb'", "'","<big><big><big>%s</big></big></big>") #My own specification for biggest text
src = compile.translate(src, "bb'", "'", "<big><big>%s</big></big>") #My own specification for bigger text
src = compile.translate(src, "b'", "'", "<big>%s</big>") #My own specification for big text
src = compile.translate(src, "s'", "'", "<small>%s</small>") #My own specification for small text
src = compile.translate(src, "**", "**", "<b>%s</b>") #Markdown bold
src = compile.translate(src, "*", "*", "<b>%s</b>") #WhatsApp code bold
src = compile.translate(src, "_", "_", "<i>%s</i>") #Markdown italic, WhatsApp code italic
src = compile.translate(src, "~", "~", "<del>%s</del>") #unofficial Markdown strikethrough; official WhatsApp strikethrough
src = compile.translate(src, "°", "°", "<mark>%s</mark>") #My own specification for highlighted text
src = compile.translate(src, "^", "^", "<sup>%s</sup>") #Markdown superscript
src = compile.translate(src, "\r> ", "\r", "\r<blockquote>\r%s</blockquote>\r") #Markdown citation, E-Mail style citation
src = src.replace("</blockquote>\r<blockquote>\r", "\r")
src = compile.emojis(src) #Following emoji- (Unicode Consortium) and its code specifications
if src.startswith("\r"):
src = src.replace("\r", "", 1) #Remove extra marker from entry start
islinput[currentIndex] = key + src
currentIndex += 1
return islinput, pluginData, globe
| mit | -5,764,654,230,822,029,000 | 39.480818 | 227 | 0.601655 | false |
graingert/maluroam | maluroam/eduroam_snort/models.py | 1 | 3623 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# models.py
#
# Copyright 2012 Thomas Grainger <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation; version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
from django.db import models
class Event(models.Model):
id = models.BigIntegerField(db_column= "event_id", primary_key=True)
username = models.CharField(max_length=765)
radius_account_id = models.CharField(max_length=765)
radius_session_id = models.CharField(max_length=765)
radius_info = models.TextField()
ip_src = models.CharField(max_length=765)
ip_dst = models.CharField(max_length=765)
start = models.DateTimeField()
finish = models.DateTimeField()
alerts = models.BigIntegerField()
blacklist = models.ForeignKey("Blacklist", db_column = "blacklist")
rule = models.ForeignKey("Rule", db_column = "rule")
rule_class = models.CharField(max_length=93)
def __unicode__(self):
return "{username}@{ip_src} accessed {ip_dst} from {start} till {finish}. Rule class: {rule_class}".format(
username = self.username,
ip_src = self.ip_src,
ip_dst = self.ip_dst,
start = self.start,
finish = self.finish,
rule_class = self.rule_class
)
class Meta:
db_table = u'event'
unique_together = ("username", "ip_src", "ip_dst", "start", "finish")
class Rule(models.Model):
id = models.BigIntegerField(primary_key=True, db_column="rule_id", editable=False)
name = models.CharField(max_length=765, db_column = "rule_name")
hide = models.BooleanField()
@models.permalink
def get_absolute_url(self):
return ('rule', (), {"pk":str(self.pk)});
def __unicode__(self):
return "{name}[{pk}]".format(name=self.name, pk=self.pk)
class Meta:
db_table = u'rules'
class Blacklist(models.Model):
id = models.BigIntegerField(primary_key=True, db_column="bl_id", editable=False)
name = models.CharField(max_length=765, editable=False)
url = models.CharField(max_length=765, editable=False)
serialized = models.TextField(editable=False)
updated = models.DateTimeField(editable=False)
hide = models.BooleanField()
@models.permalink
def get_absolute_url(self):
return ('blacklist', (), {"pk":str(self.pk)});
def __unicode__(self):
return self.name
class Meta:
db_table = u'blacklists'
class Script(models.Model):
id = models.AutoField(primary_key=True, db_column = "script_id", editable=False)
name = models.CharField(max_length=765)
updated = models.DateTimeField(db_column="lastupdated", editable=False)
@models.permalink
def get_absolute_url(self):
return ('script', (), {"pk":str(self.pk)});
def __unicode__(self):
return "{name}[{pk}]".format(
name=self.name,
pk=self.pk
)
class Meta:
db_table = u'scripts'
| agpl-3.0 | -4,906,065,184,165,069,000 | 33.504762 | 115 | 0.642009 | false |
Jozhogg/iris | docs/iris/example_tests/test_COP_1d_plot.py | 1 | 1263 | # (C) British Crown Copyright 2010 - 2014, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
# Import Iris tests first so that some things can be initialised before importing anything else.
import iris.tests as tests
from . import extest_util
with extest_util.add_examples_to_path():
import COP_1d_plot
class TestCOP1DPlot(tests.GraphicsTest):
"""Test the COP_1d_plot example code."""
def test_COP_1d_plot(self):
with extest_util.show_replaced_by_check_graphic(self):
COP_1d_plot.main()
if __name__ == '__main__':
tests.main()
| lgpl-3.0 | 4,704,153,623,236,729,000 | 33.135135 | 96 | 0.726841 | false |
rail-berkeley/d4rl | d4rl/locomotion/maze_env.py | 1 | 14289 | # Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Adapted from efficient-hrl maze_env.py."""
import os
import tempfile
import xml.etree.ElementTree as ET
import math
import numpy as np
import gym
from copy import deepcopy
RESET = R = 'r' # Reset position.
GOAL = G = 'g'
# Maze specifications for dataset generation
U_MAZE = [[1, 1, 1, 1, 1],
[1, R, 0, 0, 1],
[1, 1, 1, 0, 1],
[1, G, 0, 0, 1],
[1, 1, 1, 1, 1]]
BIG_MAZE = [[1, 1, 1, 1, 1, 1, 1, 1],
[1, R, 0, 1, 1, 0, 0, 1],
[1, 0, 0, 1, 0, 0, G, 1],
[1, 1, 0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 0, 0, 0, 1],
[1, G, 1, 0, 0, 1, 0, 1],
[1, 0, 0, 0, 1, G, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1]]
HARDEST_MAZE = [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, R, 0, 0, 0, 1, G, 0, 0, 0, 0, 1],
[1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1],
[1, 0, 0, 0, 0, G, 0, 1, 0, 0, G, 1],
[1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1],
[1, 0, G, 1, 0, 1, 0, 0, 0, 0, 0, 1],
[1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1],
[1, 0, 0, 1, G, 0, G, 1, 0, G, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
# Maze specifications with a single target goal
U_MAZE_TEST = [[1, 1, 1, 1, 1],
[1, R, 0, 0, 1],
[1, 1, 1, 0, 1],
[1, G, 0, 0, 1],
[1, 1, 1, 1, 1]]
BIG_MAZE_TEST = [[1, 1, 1, 1, 1, 1, 1, 1],
[1, R, 0, 1, 1, 0, 0, 1],
[1, 0, 0, 1, 0, 0, 0, 1],
[1, 1, 0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 0, 0, 0, 1],
[1, 0, 1, 0, 0, 1, 0, 1],
[1, 0, 0, 0, 1, 0, G, 1],
[1, 1, 1, 1, 1, 1, 1, 1]]
HARDEST_MAZE_TEST = [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, R, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1],
[1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1],
[1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1],
[1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1],
[1, 0, 0, 1, 0, 0, 0, 1, 0, G, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
# Maze specifications for evaluation
U_MAZE_EVAL = [[1, 1, 1, 1, 1],
[1, 0, 0, R, 1],
[1, 0, 1, 1, 1],
[1, 0, 0, G, 1],
[1, 1, 1, 1, 1]]
BIG_MAZE_EVAL = [[1, 1, 1, 1, 1, 1, 1, 1],
[1, R, 0, 0, 0, 0, G, 1],
[1, 0, 1, 0, 1, 1, 0, 1],
[1, 0, 0, 0, 0, 1, 0, 1],
[1, 1, 1, 0, 0, 1, 1, 1],
[1, G, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 1, 1, G, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1]]
HARDEST_MAZE_EVAL = [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, R, 0, 1, G, 0, 0, 1, 0, G, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1],
[1, 0, 0, 1, 0, 1, G, 0, 0, 0, 0, 1],
[1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1],
[1, G, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1],
[1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1],
[1, 0, 0, 0, G, 1, G, 0, 0, 0, G, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
U_MAZE_EVAL_TEST = [[1, 1, 1, 1, 1],
[1, 0, 0, R, 1],
[1, 0, 1, 1, 1],
[1, 0, 0, G, 1],
[1, 1, 1, 1, 1]]
BIG_MAZE_EVAL_TEST = [[1, 1, 1, 1, 1, 1, 1, 1],
[1, R, 0, 0, 0, 0, G, 1],
[1, 0, 1, 0, 1, 1, 0, 1],
[1, 0, 0, 0, 0, 1, 0, 1],
[1, 1, 1, 0, 0, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 1, 1, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1]]
HARDEST_MAZE_EVAL_TEST = [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, R, 0, 1, 0, 0, 0, 1, 0, G, 0, 1],
[1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1],
[1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1],
[1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1],
[1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
class MazeEnv(gym.Env):
LOCOMOTION_ENV = None # Must be specified by child class.
def __init__(
self,
maze_map,
maze_size_scaling,
maze_height=0.5,
manual_collision=False,
non_zero_reset=False,
reward_type='dense',
*args,
**kwargs):
if self.LOCOMOTION_ENV is None:
raise ValueError('LOCOMOTION_ENV is unspecified.')
xml_path = self.LOCOMOTION_ENV.FILE
tree = ET.parse(xml_path)
worldbody = tree.find(".//worldbody")
self._maze_map = maze_map
self._maze_height = maze_height
self._maze_size_scaling = maze_size_scaling
self._manual_collision = manual_collision
self._maze_map = maze_map
# Obtain a numpy array form for a maze map in case we want to reset
# to multiple starting states
temp_maze_map = deepcopy(self._maze_map)
for i in range(len(maze_map)):
for j in range(len(maze_map[0])):
if temp_maze_map[i][j] in [RESET,]:
temp_maze_map[i][j] = 0
elif temp_maze_map[i][j] in [GOAL,]:
temp_maze_map[i][j] = 1
self._np_maze_map = np.array(temp_maze_map)
torso_x, torso_y = self._find_robot()
self._init_torso_x = torso_x
self._init_torso_y = torso_y
for i in range(len(self._maze_map)):
for j in range(len(self._maze_map[0])):
struct = self._maze_map[i][j]
if struct == 1: # Unmovable block.
# Offset all coordinates so that robot starts at the origin.
ET.SubElement(
worldbody, "geom",
name="block_%d_%d" % (i, j),
pos="%f %f %f" % (j * self._maze_size_scaling - torso_x,
i * self._maze_size_scaling - torso_y,
self._maze_height / 2 * self._maze_size_scaling),
size="%f %f %f" % (0.5 * self._maze_size_scaling,
0.5 * self._maze_size_scaling,
self._maze_height / 2 * self._maze_size_scaling),
type="box",
material="",
contype="1",
conaffinity="1",
rgba="0.7 0.5 0.3 1.0",
)
torso = tree.find(".//body[@name='torso']")
geoms = torso.findall(".//geom")
_, file_path = tempfile.mkstemp(text=True, suffix='.xml')
tree.write(file_path)
self.LOCOMOTION_ENV.__init__(self, *args, file_path=file_path, non_zero_reset=non_zero_reset, reward_type=reward_type, **kwargs)
self.target_goal = None
def _xy_to_rowcol(self, xy):
size_scaling = self._maze_size_scaling
xy = (max(xy[0], 1e-4), max(xy[1], 1e-4))
return (int(1 + (xy[1]) / size_scaling),
int(1 + (xy[0]) / size_scaling))
def _get_reset_location(self,):
prob = (1.0 - self._np_maze_map) / np.sum(1.0 - self._np_maze_map)
prob_row = np.sum(prob, 1)
row_sample = np.random.choice(np.arange(self._np_maze_map.shape[0]), p=prob_row)
col_sample = np.random.choice(np.arange(self._np_maze_map.shape[1]), p=prob[row_sample] * 1.0 / prob_row[row_sample])
reset_location = self._rowcol_to_xy((row_sample, col_sample))
# Add some random noise
random_x = np.random.uniform(low=0, high=0.5) * 0.5 * self._maze_size_scaling
random_y = np.random.uniform(low=0, high=0.5) * 0.5 * self._maze_size_scaling
return (max(reset_location[0] + random_x, 0), max(reset_location[1] + random_y, 0))
def _rowcol_to_xy(self, rowcol, add_random_noise=False):
row, col = rowcol
x = col * self._maze_size_scaling - self._init_torso_x
y = row * self._maze_size_scaling - self._init_torso_y
if add_random_noise:
x = x + np.random.uniform(low=0, high=self._maze_size_scaling * 0.25)
y = y + np.random.uniform(low=0, high=self._maze_size_scaling * 0.25)
return (x, y)
def goal_sampler(self, np_random, only_free_cells=True, interpolate=True):
valid_cells = []
goal_cells = []
for i in range(len(self._maze_map)):
for j in range(len(self._maze_map[0])):
if self._maze_map[i][j] in [0, RESET, GOAL] or not only_free_cells:
valid_cells.append((i, j))
if self._maze_map[i][j] == GOAL:
goal_cells.append((i, j))
# If there is a 'goal' designated, use that. Otherwise, any valid cell can
# be a goal.
sample_choices = goal_cells if goal_cells else valid_cells
cell = sample_choices[np_random.choice(len(sample_choices))]
xy = self._rowcol_to_xy(cell, add_random_noise=True)
random_x = np.random.uniform(low=0, high=0.5) * 0.25 * self._maze_size_scaling
random_y = np.random.uniform(low=0, high=0.5) * 0.25 * self._maze_size_scaling
xy = (max(xy[0] + random_x, 0), max(xy[1] + random_y, 0))
return xy
def set_target_goal(self, goal_input=None):
if goal_input is None:
self.target_goal = self.goal_sampler(np.random)
else:
self.target_goal = goal_input
print ('Target Goal: ', self.target_goal)
## Make sure that the goal used in self._goal is also reset:
self._goal = self.target_goal
def _find_robot(self):
structure = self._maze_map
size_scaling = self._maze_size_scaling
for i in range(len(structure)):
for j in range(len(structure[0])):
if structure[i][j] == RESET:
return j * size_scaling, i * size_scaling
raise ValueError('No robot in maze specification.')
def _is_in_collision(self, pos):
x, y = pos
structure = self._maze_map
size_scaling = self._maze_size_scaling
for i in range(len(structure)):
for j in range(len(structure[0])):
if structure[i][j] == 1:
minx = j * size_scaling - size_scaling * 0.5 - self._init_torso_x
maxx = j * size_scaling + size_scaling * 0.5 - self._init_torso_x
miny = i * size_scaling - size_scaling * 0.5 - self._init_torso_y
maxy = i * size_scaling + size_scaling * 0.5 - self._init_torso_y
if minx <= x <= maxx and miny <= y <= maxy:
return True
return False
def step(self, action):
if self._manual_collision:
old_pos = self.get_xy()
inner_next_obs, inner_reward, done, info = self.LOCOMOTION_ENV.step(self, action)
new_pos = self.get_xy()
if self._is_in_collision(new_pos):
self.set_xy(old_pos)
else:
inner_next_obs, inner_reward, done, info = self.LOCOMOTION_ENV.step(self, action)
next_obs = self._get_obs()
return next_obs, inner_reward, done, info
def _get_best_next_rowcol(self, current_rowcol, target_rowcol):
"""Runs BFS to find shortest path to target and returns best next rowcol.
Add obstacle avoidance"""
current_rowcol = tuple(current_rowcol)
target_rowcol = tuple(target_rowcol)
if target_rowcol == current_rowcol:
return target_rowcol
visited = {}
to_visit = [target_rowcol]
while to_visit:
next_visit = []
for rowcol in to_visit:
visited[rowcol] = True
row, col = rowcol
left = (row, col - 1)
right = (row, col + 1)
down = (row + 1, col)
up = (row - 1, col)
for next_rowcol in [left, right, down, up]:
if next_rowcol == current_rowcol: # Found a shortest path.
return rowcol
next_row, next_col = next_rowcol
if next_row < 0 or next_row >= len(self._maze_map):
continue
if next_col < 0 or next_col >= len(self._maze_map[0]):
continue
if self._maze_map[next_row][next_col] not in [0, RESET, GOAL]:
continue
if next_rowcol in visited:
continue
next_visit.append(next_rowcol)
to_visit = next_visit
raise ValueError('No path found to target.')
def create_navigation_policy(self,
goal_reaching_policy_fn,
obs_to_robot=lambda obs: obs[:2],
obs_to_target=lambda obs: obs[-2:],
relative=False):
"""Creates a navigation policy by guiding a sub-policy to waypoints."""
def policy_fn(obs):
# import ipdb; ipdb.set_trace()
robot_x, robot_y = obs_to_robot(obs)
robot_row, robot_col = self._xy_to_rowcol([robot_x, robot_y])
target_x, target_y = self.target_goal
if relative:
target_x += robot_x # Target is given in relative coordinates.
target_y += robot_y
target_row, target_col = self._xy_to_rowcol([target_x, target_y])
print ('Target: ', target_row, target_col, target_x, target_y)
print ('Robot: ', robot_row, robot_col, robot_x, robot_y)
waypoint_row, waypoint_col = self._get_best_next_rowcol(
[robot_row, robot_col], [target_row, target_col])
if waypoint_row == target_row and waypoint_col == target_col:
waypoint_x = target_x
waypoint_y = target_y
else:
waypoint_x, waypoint_y = self._rowcol_to_xy([waypoint_row, waypoint_col], add_random_noise=True)
goal_x = waypoint_x - robot_x
goal_y = waypoint_y - robot_y
print ('Waypoint: ', waypoint_row, waypoint_col, waypoint_x, waypoint_y)
return goal_reaching_policy_fn(obs, (goal_x, goal_y))
return policy_fn
| apache-2.0 | 5,405,217,711,916,502,000 | 36.901857 | 132 | 0.492477 | false |
harlequin/sickbeard | sickbeard/metadata/tivo.py | 1 | 13263 | # Author: Nic Wolfe <[email protected]>
# Author: Gordon Turner <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import datetime
import os
import sickbeard
#from sickbeard.common import *
from sickbeard import logger, exceptions, helpers
from sickbeard.metadata import generic
from sickbeard import encodingKludge as ek
from lib.tvdb_api import tvdb_api, tvdb_exceptions
class TIVOMetadata(generic.GenericMetadata):
"""
Metadata generation class for TIVO
The following file structure is used:
show_root/Season 01/show - 1x01 - episode.avi.txt (* existing episode)
show_root/Season 01/.meta/show - 1x01 - episode.avi.txt (episode metadata)
This class only generates episode specific metadata files, it does NOT generated a default.txt file.
"""
def __init__(self,
show_metadata=False,
episode_metadata=False,
poster=False,
fanart=False,
episode_thumbnails=False,
season_thumbnails=False):
generic.GenericMetadata.__init__(self,
show_metadata,
episode_metadata,
poster,
fanart,
episode_thumbnails,
season_thumbnails)
self._ep_nfo_extension = "txt"
self.generate_ep_metadata = True
self.name = 'TIVO'
self.eg_show_metadata = "<i>not supported</i>"
self.eg_episode_metadata = "Season##\\.meta\\<i>filename</i>.txt"
self.eg_fanart = "<i>not supported</i>"
self.eg_poster = "<i>not supported</i>"
self.eg_episode_thumbnails = "<i>not supported</i>"
self.eg_season_thumbnails = "<i>not supported</i>"
# Override with empty methods for unsupported features.
def create_show_metadata(self, show_obj):
pass
def create_fanart(self, show_obj):
pass
def get_episode_thumb_path(self, ep_obj):
pass
def get_season_thumb_path(self, show_obj, season):
pass
def retrieveShowMetadata(self, dir):
return (None, None)
# Override and implement features for Tivo.
def get_episode_file_path(self, ep_obj):
"""
Returns a full show dir/.meta/episode.txt path for Tivo
episode metadata files.
Note, that pyTivo requires the metadata filename to include the original extention.
ie If the episode name is foo.avi, the metadata name is foo.avi.txt
ep_obj: a TVEpisode object to get the path for
"""
if ek.ek(os.path.isfile, ep_obj.location):
metadata_file_name = ek.ek(os.path.basename, ep_obj.location) + "." + self._ep_nfo_extension
metadata_dir_name = ek.ek(os.path.join, ek.ek(os.path.dirname, ep_obj.location), '.meta')
metadata_file_path = ek.ek(os.path.join, metadata_dir_name, metadata_file_name)
else:
logger.log(u"Episode location doesn't exist: "+str(ep_obj.location), logger.DEBUG)
return ''
return metadata_file_path
def _ep_data(self, ep_obj):
"""
Creates a key value structure for a Tivo episode metadata file and
returns the resulting data object.
ep_obj: a TVEpisode instance to create the metadata file for.
Lookup the show in http://thetvdb.com/ using the python library:
https://github.com/dbr/tvdb_api/
The results are saved in the object myShow.
The key values for the tivo metadata file are from:
http://pytivo.sourceforge.net/wiki/index.php/Metadata
"""
data = "";
eps_to_write = [ep_obj] + ep_obj.relatedEps
tvdb_lang = ep_obj.show.lang
try:
# There's gotta be a better way of doing this but we don't wanna
# change the language value elsewhere
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if tvdb_lang and not tvdb_lang == 'en':
ltvdb_api_parms['language'] = tvdb_lang
t = tvdb_api.Tvdb(actors=True, **ltvdb_api_parms)
myShow = t[ep_obj.show.tvdbid]
except tvdb_exceptions.tvdb_shownotfound, e:
raise exceptions.ShowNotFoundException(str(e))
except tvdb_exceptions.tvdb_error, e:
logger.log("Unable to connect to TVDB while creating meta files - skipping - "+str(e), logger.ERROR)
return False
for curEpToWrite in eps_to_write:
try:
myEp = myShow[curEpToWrite.season][curEpToWrite.episode]
except (tvdb_exceptions.tvdb_episodenotfound, tvdb_exceptions.tvdb_seasonnotfound):
logger.log("Unable to find episode " + str(curEpToWrite.season) + "x" + str(curEpToWrite.episode) + " on tvdb... has it been removed? Should I delete from db?")
return None
if myEp["firstaired"] == None and ep_obj.season == 0:
myEp["firstaired"] = str(datetime.date.fromordinal(1))
if myEp["episodename"] == None or myEp["firstaired"] == None:
return None
if myShow["seriesname"] != None:
# Title of the series (The Simpsons, Seinfeld, etc.) or title of the movie (The Mummy, Spiderman, etc).
data += ("title : " + myShow["seriesname"] + "\n")
# Name of series (The Simpsons, Seinfeld, etc.). This should be included if the show is episodic.
# For movies, you may repeat the name of the movie (The Mummy, Spiderman, etc), leave blank, or omit.
data += ("seriesTitle : " + myShow["seriesname"] + "\n")
# Title of the episode (Pilot, Homer's Night Out, Episode 02, etc.) Should be included for episodic shows.
# Leave blank or omit for movies.
data += ("episodeTitle : " + curEpToWrite.name + "\n")
# This should be entered for episodic shows and omitted for movies. The standard tivo format is to enter
# the season number followed by the episode number for that season. For example, enter 201 for season 2
# episode 01.
# This only shows up if you go into the Details from the Program screen.
# This seems to disappear once the video is transferred to TiVo.
# NOTE: May not be correct format, missing season, but based on description from wiki leaving as is.
data += ("episodeNumber : " + str(curEpToWrite.episode) + "\n")
# Must be entered as true or false. If true, the year from originalAirDate will be shown in parentheses
# after the episode's title and before the description on the Program screen.
# FIXME: Hardcode isEpisode to true for now, not sure how to handle movies
data += ("isEpisode : true\n")
# Write the synopsis of the video here.
# Micrsoft Word's smartquotes can die in a fire.
sanitizedDescription = curEpToWrite.description
# Replace double curly quotes
sanitizedDescription = sanitizedDescription.replace(u"\u201c", "\"").replace(u"\u201d", "\"")
# Replace single curly quotes
sanitizedDescription = sanitizedDescription.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u02BC", "'")
data += ("description : " + sanitizedDescription + "\n")
# Usually starts with "SH" and followed by 6-8 digits.
# Tivo uses zap2it for thier data, so the series id is the zap2it_id.
if myShow["zap2it_id"] != None:
data += ("seriesId : " + myShow["zap2it_id"] + "\n")
# This is the call sign of the channel the episode was recorded from.
if myShow["network"] != None:
data += ("callsign : " + myShow["network"] + "\n")
# This must be entered as yyyy-mm-ddThh:mm:ssZ (the t is capitalized and never changes, the Z is also
# capitalized and never changes). This is the original air date of the episode.
# NOTE: Hard coded the time to T00:00:00Z as we really don't know when during the day the first run happened.
if curEpToWrite.airdate != datetime.date.fromordinal(1):
data += ("originalAirDate : " + str(curEpToWrite.airdate) + "T00:00:00Z\n")
# This shows up at the beginning of the description on the Program screen and on the Details screen.
if myShow["actors"]:
for actor in myShow["actors"].split('|'):
if actor:
data += ("vActor : " + actor + "\n")
# This is shown on both the Program screen and the Details screen. It uses a single digit to determine the
# number of stars: 1 for 1 star, 7 for 4 stars
if myShow["rating"] != None:
try:
rating = float(myShow['rating'])
except ValueError:
rating = 0.0
rating = rating / 10 * 4
data += ("starRating : " + str(rating) + "\n")
# This is shown on both the Program screen and the Details screen.
# It uses the standard TV rating system of: TV-Y7, TV-Y, TV-G, TV-PG, TV-14, TV-MA and TV-NR.
if myShow["contentrating"]:
data += ("tvRating : " + str(myShow["contentrating"]) + "\n")
# This field can be repeated as many times as necessary or omitted completely.
if ep_obj.show.genre:
for genre in ep_obj.show.genre.split('|'):
if genre:
data += ("vProgramGenre : " + str(genre) + "\n")
# NOTE: The following are metadata keywords are not used
# displayMajorNumber
# showingBits
# displayMinorNumber
# colorCode
# vSeriesGenre
# vGuestStar, vDirector, vExecProducer, vProducer, vWriter, vHost, vChoreographer
# partCount
# partIndex
return data
def write_ep_file(self, ep_obj):
"""
Generates and writes ep_obj's metadata under the given path with the
given filename root. Uses the episode's name with the extension in
_ep_nfo_extension.
ep_obj: TVEpisode object for which to create the metadata
file_name_path: The file name to use for this metadata. Note that the extension
will be automatically added based on _ep_nfo_extension. This should
include an absolute path.
"""
data = self._ep_data(ep_obj)
if not data:
return False
nfo_file_path = self.get_episode_file_path(ep_obj)
nfo_file_dir = ek.ek(os.path.dirname, nfo_file_path)
try:
if not ek.ek(os.path.isdir, nfo_file_dir):
logger.log("Metadata dir didn't exist, creating it at "+nfo_file_dir, logger.DEBUG)
ek.ek(os.makedirs, nfo_file_dir)
helpers.chmodAsParent(nfo_file_dir)
logger.log(u"Writing episode nfo file to "+nfo_file_path)
nfo_file = ek.ek(open, nfo_file_path, 'w')
# Calling encode directly, b/c often descriptions have wonky characters.
nfo_file.write( data.encode( "utf-8" ) )
nfo_file.close()
helpers.chmodAsParent(nfo_file_path)
except IOError, e:
logger.log(u"Unable to write file to "+nfo_file_path+" - are you sure the folder is writable? "+str(e).decode('utf-8'), logger.ERROR)
return False
return True
# present a standard "interface"
metadata_class = TIVOMetadata
| gpl-3.0 | 8,133,758,677,926,244,000 | 40.839117 | 176 | 0.562618 | false |
ktan2020/legacy-automation | win/Lib/site-packages/wx-3.0-msw/wx/lib/agw/toasterbox.py | 1 | 46688 | # --------------------------------------------------------------------------- #
# TOASTERBOX wxPython IMPLEMENTATION
# Ported And Enhanced From wxWidgets Contribution (Aj Bommarito) By:
#
# Andrea Gavana, @ 16 September 2005
# Latest Revision: 14 Mar 2012, 21.00 GMT
#
#
# TODO/Caveats List
#
# 1. Any Idea?
#
#
# For All Kind Of Problems, Requests Of Enhancements And Bug Reports, Please
# Write To Me At:
#
# [email protected]
# [email protected]
#
# Or, Obviously, To The wxPython Mailing List!!!
#
#
# End Of Comments
# --------------------------------------------------------------------------- #
"""
ToasterBox is a cross-platform widget to make the creation of MSN style "toaster"
popups easier.
Description
===========
ToasterBox is a cross-platform widget to make the creation of MSN style "toaster"
popups easier. The syntax is really easy especially if you are familiar with the
syntax of wxPython.
It has 2 main styles:
- ``TB_SIMPLE``: using this style, you will be able to specify a background image for
ToasterBox, text properties as text colour, font and label;
- ``TB_COMPLEX``: this style will allow you to put almost any control inside a
ToasterBox. You can add a panel in which you can put all the controls you like.
Both styles support the setting of ToasterBox position (on screen coordinates),
size, the time after which the ToasterBox is destroyed (linger), and the scroll
speed of ToasterBox.
Usage
=====
Usage example::
import wx
import wx.lib.agw.toasterbox as TB
class MyFrame(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(self, parent, -1, "ToasterBox Demo")
toaster = TB.ToasterBox(self, tbstyle=TB.TB_COMPLEX)
toaster.SetPopupPauseTime(3000)
tbpanel = toaster.GetToasterBoxWindow()
panel = wx.Panel(tbpanel, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
button = wx.Button(panel, wx.ID_ANY, "Simple button")
sizer.Add(button, 0, wx.EXPAND)
panel.SetSizer(sizer)
toaster.AddPanel(panel)
wx.CallLater(1000, toaster.Play)
# our normal wxApp-derived class, as usual
app = wx.App(0)
frame = MyFrame(None)
app.SetTopWindow(frame)
frame.Show()
app.MainLoop()
Supported Platforms
===================
ToasterBox has been tested on the following platforms:
- Windows (verified on Windows XP, 2000)
- Linux
- Mac
Window Styles
=============
This class supports the following window styles:
==================== =========== ==================================================
Window Styles Hex Value Description
==================== =========== ==================================================
``TB_SIMPLE`` 0x1 A simple `ToasterBox`, with background image and text customization can be created.
``TB_ONTIME`` 0x1 `ToasterBox` will close after a specified amount of time.
``TB_COMPLEX`` 0x2 ToasterBoxes with different degree of complexity can be created. You can add as many controls as you want, provided that you call the meth:~ToasterBox.AddPanel` method and pass to it a dummy frame and a :class:`Panel`. See the demo for details.
``TB_ONCLICK`` 0x2 `ToasterBox` can be closed by clicking anywhere on the `ToasterBox` frame.
``TB_DEFAULT_STYLE`` 0x2008002 Default window style for `ToasterBox`, with no caption nor close box.
``TB_CAPTION`` 0x22009806 `ToasterBox` will have a caption, with the possibility to set a title for the `ToasterBox` frame, and a close box.
==================== =========== ==================================================
Events Processing
=================
`No custom events are available for this class.`
License And Version
===================
ToasterBox is distributed under the wxPython license.
Latest revision: Andrea Gavana @ 14 Mar 2012, 21.00 GMT
Version 0.3
"""
import textwrap
import wx
# Define Window List, We Use It Globally
winlist = []
""" Globally defined window list. """
TB_SIMPLE = 1
""" A simple ToasterBox, with background image and text customization can be created. """
TB_COMPLEX = 2
""" ToasterBoxes with different degree of complexity can be created. You can add as many controls as you want, provided that you call the AddPanel() method and pass to it a dummy frame and a wx.Panel. See the demo for details. """
TB_DEFAULT_STYLE = wx.SIMPLE_BORDER | wx.STAY_ON_TOP | wx.FRAME_NO_TASKBAR
""" Default window style for `ToasterBox`, with no caption nor close box. """
TB_CAPTION = TB_DEFAULT_STYLE | wx.CAPTION | wx.SYSTEM_MENU | wx.CLOSE_BOX | wx.FRAME_NO_TASKBAR
""" `ToasterBox` will have a caption, with the possibility to set a title for the `ToasterBox` frame, and a close box. """
TB_ONTIME = 1
""" `ToasterBox` will close after a specified amount of time. """
TB_ONCLICK = 2
""" `ToasterBox` can be closed by clicking anywhere on the `ToasterBox` frame. """
# scroll from up to down
TB_SCR_TYPE_UD = 1
""" Scroll from up to down. """
# scroll from down to up
TB_SCR_TYPE_DU = 2
""" Scroll from down to up. """
# fade in/out
TB_SCR_TYPE_FADE = 4
""" Fade in and out. """
# ------------------------------------------------------------------------------ #
# Class ToasterBox
# Main Class Implementation. It Is Basically A wx.Timer. It Creates And
# Displays Popups And Handles The "Stacking".
# ------------------------------------------------------------------------------ #
class ToasterBox(wx.Timer):
"""
ToasterBox is a cross-platform widget to make the creation of MSN style "toaster"
popups easier.
"""
def __init__(self, parent, tbstyle=TB_SIMPLE, windowstyle=TB_DEFAULT_STYLE,
closingstyle=TB_ONTIME, scrollType=TB_SCR_TYPE_DU):
"""
Default class constructor.
:param `parent`: the window parent;
:param `tbstyle`: the :class:`ToasterBox` main style. Can be one of the following
bits:
====================== ======= ================================
`ToasterBox` Style Value Description
====================== ======= ================================
``TB_SIMPLE`` 0x1 A simple :class:`ToasterBox`, with background image and text customization can be created
``TB_COMPLEX`` 0x2 `ToasterBoxes` with different degree of complexity can be created. You can add as many controls as you want, provided that you call the :meth:`~ToasterBox.AddPanel` method and pass to it a dummy frame and a :class:`Panel`.
====================== ======= ================================
:param `windowstyle`: this parameter influences the visual appearance of
:class:`ToasterBox`, and can be one of the following styles:
====================== ========== ================================
Window Style Hex Value Description
====================== ========== ================================
``TB_DEFAULT_STYLE`` 0x2008002 Default window style for :class:`ToasterBox`, with no caption nor close box.
``TB_CAPTION`` 0x22009806 :class:`ToasterBox` will have a caption, with the possibility to set a title for the :class:`ToasterBox` frame, and a close box.
====================== ========== ================================
:param `closingstyle`: the closing style for :class:`ToasterBox`. Can be one of the
following bits:
==================== =========== ==================================================
Closing Styles Hex Value Description
==================== =========== ==================================================
``TB_ONTIME`` 0x1 :class:`ToasterBox` will close after a specified amount of time.
``TB_ONCLICK`` 0x2 :class:`ToasterBox` can be closed by clicking anywhere on the :class:`ToasterBox` frame.
==================== =========== ==================================================
:param `scrollType`: the scrolling direction for :class:`ToasterBox`. Can be one of the
following bits:
==================== =========== ==================================================
Scroll Styles Hex Value Description
==================== =========== ==================================================
``TB_SCR_TYPE_UD`` 0x1 :class:`ToasterBox` will scroll from up to down
``TB_SCR_TYPE_DU`` 0x2 :class:`ToasterBox` will scroll from down to up
``TB_SCR_TYPE_FADE`` 0x4 :class:`ToasterBox` will fade in/out (without scrolling).
==================== =========== ==================================================
"""
self._parent = parent
self._sleeptime = 10
self._pausetime = 1700
self._popuptext = "default"
self._popupposition = wx.Point(100,100)
self._popuptop = wx.Point(0,0)
self._popupsize = wx.Size(150, 170)
self._usefocus = True
self._originalfocus = wx.Window.FindFocus()
self._backgroundcolour = wx.WHITE
self._foregroundcolour = wx.BLACK
self._textfont = wx.Font(8, wx.SWISS, wx.NORMAL, wx.NORMAL, False, "Verdana")
self._bitmap = None
self._tbstyle = tbstyle
self._windowstyle = windowstyle
self._closingstyle = closingstyle
self._scrollType = scrollType
self._panel = None
self._bottomright = wx.Point(wx.GetDisplaySize().GetWidth(),
wx.GetDisplaySize().GetHeight())
if parent is not None:
parent.Bind(wx.EVT_ICONIZE, lambda evt: [w.Hide() for w in winlist])
self._moveTimer = wx.Timer(parent, -1)
parent.Bind(wx.EVT_TIMER, self.OnMoveTimer, self._moveTimer)
self._tb = ToasterBoxWindow(self._parent, self, self._tbstyle, self._windowstyle,
self._closingstyle, scrollType=self._scrollType)
def SetPopupPosition(self, pos):
"""
Sets the :class:`ToasterBox` position on screen.
:param `pos`: the widget position, an instance of :class:`Point`.
"""
self._popupposition = pos
def SetPopupPositionByInt(self, pos):
"""
Sets the :class:`ToasterBox` position on screen, at one of the screen corners.
:param `pos`: an integer specifying the screen corner, namely:
============= ========================================
Corner Number Position
============= ========================================
0 Top left screen corner
1 Top right screen corner
2 Bottom left screen corner
3 Bottom right screen corner
============= ========================================
"""
w, h = wx.GetDisplaySize()
self._bottomright = wx.Point(w, h)
# top left
if pos == 0:
popupposition = wx.Point(0,0)
# top right
elif pos == 1:
popupposition = wx.Point(w - self._popupsize[0], 0)
# bottom left
elif pos == 2:
popupposition = wx.Point(0, h - self._popupsize[1])
# bottom right
elif pos == 3:
popupposition = wx.Point(self._bottomright.x - self._popupsize[0],
self._bottomright.y - self._popupsize[1])
self._bottomright = wx.Point(popupposition.x + self._popupsize[0],
popupposition.y + self._popupsize[1])
self._popupposition = popupposition
def CenterOnParent(self, direction=wx.BOTH):
"""
Centres the window on its parent (if any). If the :class:`ToasterBox` parent is ``None``,
it calls :meth:`~ToasterBox.CenterOnScreen`.
:param `direction`: specifies the direction for the centering. May be ``wx.HORIZONTAL``,
``wx.VERTICAL`` or ``wx.BOTH``.
:note: This methods provides for a way to center :class:`ToasterBox` over their parents instead of the
entire screen. If there is no parent, then behaviour is the same as :meth:`~ToasterBox.CenterOnScreen`.
:see: :meth:`~ToasterBox.CenterOnScreen`.
"""
if not self._parent:
self.CenterOnScreen(direction)
return
parent = self._parent
screenrect = parent.GetScreenRect()
toast_width, toast_height = self._popupsize
x, y = screenrect.GetX(), screenrect.GetY()
width, height = screenrect.GetWidth(), screenrect.GetHeight()
if direction == wx.VERTICAL:
pos = wx.Point(x, (y + (height/2) - (toast_height/2)))
elif direction == wx.HORIZONTAL:
pos = wx.Point((x + (width/2) - (toast_width/2)), y)
else:
pos = wx.Point((x + (width/2) - (toast_width/2)), (y + (height/2) - (toast_height/2)))
tb.SetPopupPosition(pos)
CentreOnParent = CenterOnParent
def CenterOnScreen(self, direction=wx.BOTH):
"""
Centres the :class:`ToasterBox` on screen.
:param `direction`: specifies the direction for the centering. May be ``wx.HORIZONTAL``,
``wx.VERTICAL`` or ``wx.BOTH``.
:see: :meth:`~ToasterBox.CenterOnParent`.
"""
screenSize = wx.GetDisplaySize()
toast_width, toast_height = self._popupsize
width, height = screenSize.GetWidth(), screenSize.GetHeight()
if direction == wx.VERTICAL:
pos = wx.Point(0, (height/2) - (toast_height/2))
elif direction == wx.HORIZONTAL:
pos = wx.Point((width/2) - (toast_width/2), 0)
else:
pos = wx.Point((width/2) - (toast_width/2), (height/2) - (toast_height/2))
tb.SetPopupPosition(pos)
CentreOnScreen = CenterOnScreen
def SetPopupBackgroundColour(self, colour=None):
"""
Sets the :class:`ToasterBox` background colour.
:param `colour`: a valid :class:`Colour` object. If defaulted to ``None``, then
the background colour will be white.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
if colour is None:
colour = wx.WHITE
if isinstance(colour, basestring):
colour = wx.NamedColour(colour)
self._backgroundcolour = colour
self._tb.SetPopupBackgroundColour(self._backgroundcolour)
def SetPopupTextColour(self, colour=None):
"""
Sets the :class:`ToasterBox` foreground colour.
:param `colour`: a valid :class:`Colour` object. If defaulted to ``None``, then
the background colour will be black.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
if colour is None:
colour = wx.BLACK
if isinstance(colour, basestring):
colour = wx.NamedColour(colour)
self._foregroundcolour = colour
def SetPopupTextFont(self, font=None):
"""
Sets the :class:`ToasterBox` text font.
:param `colour`: a valid :class:`Colour` object. If defaulted to ``None``, then
a simple generic font will be generated.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
if font is None:
font = wx.Font(8, wx.SWISS, wx.NORMAL, wx.NORMAL, False)
self._textfont = font
def SetPopupSize(self, size):
"""
Sets the :class:`ToasterBox` size.
:param `size`: the new control size, an instance of :class:`Size`.
"""
self._popupsize = size
def SetPopupPauseTime(self, pausetime):
"""
Sets the time after which the :class:`ToasterBox` is destroyed (linger).
:param `pausetime`: the delay after which the control is destroyed, in seconds.
"""
self._pausetime = pausetime
def SetPopupBitmap(self, bitmap=None):
"""
Sets the :class:`ToasterBox` background image.
:param `bitmap`: a valid :class:`Bitmap` object or filename. If defaulted
to ``None``, then no background bitmap is used.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
if bitmap is not None:
if isinstance(bitmap, basestring):
bitmap = wx.Bitmap(bitmap)
self._bitmap = bitmap
def SetPopupScrollSpeed(self, speed):
"""
Sets the :class:`ToasterBox` scroll speed.
:param `speed`: it is the pause time (in milliseconds) for every step in the
`ScrollUp` method.
"""
self._sleeptime = speed
def SetPopupText(self, text):
"""
Sets the :class:`ToasterBox` text label.
:param `text`: the widget label.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
self._popuptext = text
def AddPanel(self, panel):
"""
Adds a panel to the :class:`ToasterBox`.
:param `panel`: an instance of :class:`Window`.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_COMPLEX`` style.
"""
if not self._tbstyle & TB_COMPLEX:
raise Exception("\nERROR: Panel Can Not Be Added When Using TB_SIMPLE ToasterBox Style")
self._panel = panel
def Play(self):
""" Creates the :class:`ToasterBoxWindow`, that does all the job. """
# create new window
self._tb.SetPopupSize((self._popupsize[0], self._popupsize[1]))
self._tb.SetPopupPosition((self._popupposition[0], self._popupposition[1]))
self._tb.SetPopupPauseTime(self._pausetime)
self._tb.SetPopupScrollSpeed(self._sleeptime)
self._tb.SetUseFocus(self._usefocus, self._originalfocus)
if self._tbstyle == TB_SIMPLE:
self._tb.SetPopupTextColour(self._foregroundcolour)
self._tb.SetPopupBackgroundColour(self._backgroundcolour)
self._tb.SetPopupTextFont(self._textfont)
if self._bitmap is not None:
self._tb.SetPopupBitmap(self._bitmap)
self._tb.SetPopupText(self._popuptext)
if self._tbstyle == TB_COMPLEX:
if self._panel is not None:
self._tb.AddPanel(self._panel)
# clean up the list
self.CleanList()
# check to see if there is already a window displayed
# by looking at the linked list
if len(winlist) > 0:
# there ARE other windows displayed already
# reclac where it should display
self.MoveAbove(self._tb)
# shift new window on to the list
winlist.append(self._tb)
if not self._tb.Play():
# if we didn't show the window properly, remove it from the list
winlist.remove(winlist[-1])
# delete the object too
self._tb.Destroy()
return
def MoveAbove(self, tb):
"""
If a :class:`ToasterBox` already exists, move the new one above the existing one.
:param `tb`: another instance of :class:`ToasterBox`.
"""
# recalc where to place this popup
self._tb.SetPopupPosition((self._popupposition[0], self._popupposition[1] -
self._popupsize[1]*len(winlist)))
def GetToasterBoxWindow(self):
""" Returns the :class:`ToasterBox` frame. """
return self._tb
def SetTitle(self, title):
"""
Sets the :class:`ToasterBox` title if it was created with ``TB_CAPTION`` window style.
:param `title`: the :class:`ToasterBox` caption title.
"""
self._tb.SetTitle(title)
def SetUseFocus(self, focus):
"""
If `focus` is ``True``, Instructs :class:`ToasterBox` to steal the focus from the
parent application, otherwise it returns the focus to the original owner.
:param `focus`: ``True`` to set the focus on :class:`ToasterBox`, ``False`` to
return it to the original owner.
"""
self._usefocus = focus
def GetUseFocus(self):
""" Returns whether :class:`ToasterBox` will steal the focus from the parent application. """
return self._usefocus
def Notify(self):
""" It's time to hide a :class:`ToasterBox`. """
if len(winlist) == 0:
return
# clean the window list
self.CleanList()
# figure out how many blanks we have
try:
node = winlist[0]
except:
return
if not node:
return
self._startPos = node.GetPosition()[1]
self._moveTimer.Start(self._sleeptime)
def OnMoveTimer(self, event):
"""
Handles the ``wx.EVT_TIMER`` event for :class:`ToasterBox`, moving the new window
on top of the last one created.
:param `event`: a :class:`TimerEvent` event to be processed.
"""
current = self._startPos
if current >= self._popupposition[1]:
self._moveTimer.Stop()
# move windows to fill in blank space
if current > self._popupposition[1]:
current = self._popupposition[1]
# loop through all the windows
for j in xrange(0, len(winlist)):
ourNewHeight = current - (j*self._popupsize[1] - 8)
tmpTb = winlist[j]
# reset where the object THINKS its supposed to be
tmpTb.SetPopupPosition((self._popupposition[0], ourNewHeight))
# actually move it
tmpTb.SetDimensions(self._popupposition[0], ourNewHeight, tmpTb.GetSize().GetWidth(),
tmpTb.GetSize().GetHeight())
self._startPos += 4
def CleanList(self):
""" Cleans the window list, erasing the stack of :class:`ToasterBox` objects. """
if len(winlist) == 0:
return
node = winlist[0]
while node:
if not node.IsShown():
winlist.remove(node)
node.Close()
try:
node = winlist[0]
except:
node = 0
else:
indx = winlist.index(node)
try:
node = winlist[indx+1]
except:
node = 0
# ------------------------------------------------------------------------------ #
# Class ToasterBoxWindow
# This Class Does All The Job, By Handling Background Images, Text Properties
# And Panel Adding. Depending On The Style You Choose, ToasterBoxWindow Will
# Behave Differently In Order To Handle Widgets Inside It.
# ------------------------------------------------------------------------------ #
class ToasterBoxWindow(wx.Frame):
"""
This class does all the job, by handling background images, text properties
and panel adding. Depending on the style you choose, :class:`ToasterBoxWindow` will
behave differently in order to handle widgets inside it.
"""
def __init__(self, parent, parent2, tbstyle, windowstyle, closingstyle,
scrollType=TB_SCR_TYPE_DU):
"""
Default class constructor.
Used internally. Do not call directly this class in your application!
:param `parent`: the window parent;
:param `parent2`: the :class:`ToasterBox` calling this window;
:param `tbstyle`: the :class:`ToasterBoxWindow` main style. Can be one of the following
bits:
====================== ======= ================================
`ToasterBox` Style Value Description
====================== ======= ================================
``TB_SIMPLE`` 0x1 A simple :class:`ToasterBox`, with background image and text customization can be created
``TB_COMPLEX`` 0x2 `ToasterBoxes` with different degree of complexity can be created. You can add as many controls as you want, provided that you call the :meth:`~ToasterBoxWindow.AddPanel` method and pass to it a dummy frame and a :class:`Panel`.
====================== ======= ================================
:param `windowstyle`: this parameter influences the visual appearance of
:class:`ToasterBoxWindow`, and can be one of the following styles:
====================== ========== ================================
Window Style Hex Value Description
====================== ========== ================================
``TB_DEFAULT_STYLE`` 0x2008002 Default window style for :class:`ToasterBox`, with no caption nor close box.
``TB_CAPTION`` 0x22009806 :class:`ToasterBox` will have a caption, with the possibility to set a title for the :class:`ToasterBox` frame, and a close box.
====================== ========== ================================
:param `closingstyle`: the closing style for :class:`ToasterBoxWindow`. Can be one of the
following bits:
==================== =========== ==================================================
Closing Styles Hex Value Description
==================== =========== ==================================================
``TB_ONTIME`` 0x1 :class:`ToasterBox` will close after a specified amount of time.
``TB_ONCLICK`` 0x2 :class:`ToasterBox` can be closed by clicking anywhere on the :class:`ToasterBox` frame.
==================== =========== ==================================================
:param `scrollType`: the scrolling direction for :class:`ToasterBoxWindow`. Can be one of the
following bits:
==================== =========== ==================================================
Scroll Styles Hex Value Description
==================== =========== ==================================================
``TB_SCR_TYPE_UD`` 0x1 :class:`ToasterBox` will scroll from up to down
``TB_SCR_TYPE_DU`` 0x2 :class:`ToasterBox` will scroll from down to up
``TB_SCR_TYPE_FADE`` 0x4 :class:`ToasterBox` will fade in/out (without scrolling).
==================== =========== ==================================================
"""
wx.Frame.__init__(self, parent, wx.ID_ANY, "window", wx.DefaultPosition,
wx.DefaultSize, style=windowstyle | wx.CLIP_CHILDREN)
self._starttime = wx.GetLocalTime()
self._parent2 = parent2
self._parent = parent
self._sleeptime = 10
self._step = 4
self._pausetime = 1700
self._textcolour = wx.BLACK
self._popuptext = "Change Me!"
# the size we want the dialog to be
framesize = wx.Size(150, 170)
self._count = 1
self._tbstyle = tbstyle
self._windowstyle = windowstyle
self._closingstyle = closingstyle
self._backgroundcolour = wx.WHITE
if tbstyle == TB_COMPLEX:
self.sizer = wx.BoxSizer(wx.VERTICAL)
else:
self._staticbitmap = None
if self._windowstyle == TB_CAPTION:
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.SetTitle("")
if scrollType == TB_SCR_TYPE_FADE and not self.CanSetTransparent():
import warnings
warnings.warn("The style ``TB_SCR_TYPE_FADE`` is not supported on this platform.")
scrollType = TB_SCR_TYPE_DU
self._scrollType = scrollType
if self._closingstyle & TB_ONCLICK and self._windowstyle != TB_CAPTION:
self.Bind(wx.EVT_LEFT_DOWN, self.OnMouseDown)
self._bottomright = wx.Point(wx.GetDisplaySize().GetWidth(),
wx.GetDisplaySize().GetHeight())
self.SetDimensions(self._bottomright.x, self._bottomright.y,
framesize.GetWidth(), framesize.GetHeight())
self._scrollTimer = wx.Timer(self, -1)
self._alphaTimer = wx.Timer(self, -1)
self.Bind(wx.EVT_TIMER, self.OnScrollTimer, self._scrollTimer)
self.Bind(wx.EVT_TIMER, self.AlphaCycle, self._alphaTimer)
if not self._tbstyle & TB_COMPLEX:
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
def OnClose(self, event):
"""
Handles the ``wx.EVT_CLOSE`` event for :class:`ToasterBoxWindow`.
:param `event`: a :class:`CloseEvent` event to be processed.
"""
self.NotifyTimer(None)
event.Skip()
def OnMouseDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` event for :class:`ToasterBoxWindow`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
self.NotifyTimer(None)
event.Skip()
def SetPopupBitmap(self, bitmap=None):
"""
Sets the :class:`ToasterBox` background image.
:param `bitmap`: a valid :class:`Bitmap` object. If defaulted to ``None``, then
no background bitmap is used.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
if bitmap is None:
self._staticbitmap = None
else:
bitmap = bitmap.ConvertToImage()
xsize, ysize = self.GetSize()
bitmap = bitmap.Scale(xsize, ysize)
self._staticbitmap = bitmap.ConvertToBitmap()
def SetPopupSize(self, size):
"""
Sets the :class:`ToasterBox` size.
:param `size`: the new control size, an instance of :class:`Size`.
"""
self.SetDimensions(self._bottomright.x, self._bottomright.y, size[0], size[1])
def SetPopupPosition(self, pos):
"""
Sets the :class:`ToasterBox` position on screen.
:param `pos`: the widget position, an instance of :class:`Point`.
"""
self._bottomright = wx.Point(pos[0] + self.GetSize().GetWidth(),
pos[1] + self.GetSize().GetHeight())
self._dialogtop = pos
def SetPopupPositionByInt(self, pos):
"""
Sets the :class:`ToasterBox` position on screen, at one of the screen corners.
:param `pos`: an integer specifying the screen corner, namely:
============= ========================================
Corner Number Position
============= ========================================
0 Top left screen corner
1 Top right screen corner
2 Bottom left screen corner
3 Bottom right screen corner
============= ========================================
"""
w, h = wx.GetDisplaySize()
self._bottomright = wx.Point(w, h)
# top left
if pos == 0:
popupposition = wx.Point(0, 0)
# top right
elif pos == 1:
popupposition = wx.Point(w - self._popupsize[0], 0)
# bottom left
elif pos == 2:
popupposition = wx.Point(0, h - self._popupsize[1])
# bottom right
elif pos == 3:
popupposition = wx.Point(self._bottomright.x - self._popupsize[0],
self._bottomright.y - self._popupsize[1])
self._bottomright = wx.Point(popupposition.x + self._popupsize[0],
popupposition.y + self._popupsize[1])
self._dialogtop = popupposition
def SetPopupPauseTime(self, pausetime):
"""
Sets the time after which the :class:`ToasterBox` is destroyed (linger).
:param `pausetime`: the delay after which the control is destroyed, in seconds.
"""
self._pausetime = pausetime
def SetPopupScrollSpeed(self, speed):
"""
Sets the :class:`ToasterBox` scroll speed.
:param `speed`: it is the pause time (in milliseconds) for every step in the
:meth:`~ToasterBoxWindow.ScrollUp` method.
"""
self._sleeptime = speed
def AddPanel(self, panel):
"""
Adds a panel to the :class:`ToasterBox`.
:param `panel`: an instance of :class:`Window`.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_COMPLEX`` style.
"""
if not self._tbstyle & TB_COMPLEX:
raise Exception("\nERROR: Panel Can Not Be Added When Using TB_SIMPLE ToasterBox Style")
self.sizer.Add(panel, 1, wx.EXPAND)
self.SetSizer(self.sizer)
self.Layout()
if self._closingstyle & TB_ONCLICK and self._windowstyle != TB_CAPTION:
panel.Bind(wx.EVT_LEFT_DOWN, self.OnMouseDown)
def SetPopupText(self, text):
"""
Sets the :class:`ToasterBox` text label.
:param `text`: the widget label.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
self._popuptext = text
def SetPopupTextFont(self, font):
"""
Sets the :class:`ToasterBox` text font.
:param `colour`: a valid :class:`Colour` object. If defaulted to ``None``, then
a simple generic font will be generated.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
self._textfont = font
def GetPopupText(self):
"""
Returns the :class:`ToasterBox` text.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
return self._popuptext
def Play(self):
""" Creates the :class:`ToasterBoxWindow`, that does all the job. """
# do some checks to make sure this window is valid
if self._bottomright.x < 1 or self._bottomright.y < 1:
return False
if self.GetSize().GetWidth() < 50 or self.GetSize().GetWidth() < 50:
# toasterbox launches into a endless loop for some reason
# when you try to make the window too small.
return False
self._direction = wx.UP
self.SetupPositions()
self.ScrollUp()
timerid = wx.NewId()
self.showtime = wx.Timer(self, timerid)
self.showtime.Start(self._pausetime)
self.Bind(wx.EVT_TIMER, self.NotifyTimer, id=timerid)
return True
def NotifyTimer(self, event):
""" Hides gradually the :class:`ToasterBoxWindow`. """
if self._scrollType != TB_SCR_TYPE_FADE:
self.showtime.Stop()
del self.showtime
self._direction = wx.DOWN
self.SetupPositions()
self.ScrollDown()
def SetPopupBackgroundColour(self, colour):
"""
Sets the :class:`ToasterBox` background colour.
:param `colour`: a valid :class:`Colour` object. If defaulted to ``None``, then
the background colour will be white.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
self.SetBackgroundColour(colour)
self._backgroundcolour = colour
def SetPopupTextColour(self, colour):
"""
Sets the :class:`ToasterBox` foreground colour.
:param `colour`: a valid :class:`Colour` object. If defaulted to ``None``, then
the background colour will be black.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
self._textcolour = colour
def SetUseFocus(self, focus, originalfocus):
"""
If `focus` is ``True``, Instructs :class:`ToasterBoxWindow` to steal the focus from the
parent application, otherwise it returns the focus to the original owner.
:param `focus`: ``True`` to set the focus on :class:`ToasterBoxWindow`, ``False`` to
return it to the original owner;
:param `originalfocus`: an instance of :class:`Window`, representing a pointer to
the window which originally had the focus
"""
self._usefocus = focus
self._originalfocus = originalfocus
def OnScrollTimer(self, event):
"""
Handles the ``wx.EVT_TIMER`` event for :class:`ToasterBoxWindow` scrolling up/down.
:param `event`: a :class:`TimerEvent` event to be processed.
"""
if self._direction == wx.UP:
self.TearUp()
else:
self.TearDown()
def TearUp(self):
""" Scrolls the :class:`ToasterBox` up, which means gradually showing it. """
self._windowsize = self._windowsize + self._step
step = self._currentStep
if step < self._dialogtop[1]:
step = self._dialogtop[1]
# checking the type of the scroll (from up to down or from down to up)
if self._scrollType == TB_SCR_TYPE_UD:
dimY = self._dialogtop[1]
elif self._scrollType == TB_SCR_TYPE_DU:
dimY = step
self.SetDimensions(self._dialogtop[0], dimY, self.GetSize().GetWidth(), self._windowsize)
self.Refresh(False)
self._currentStep += self._scrollStep
if self._currentStep not in range(self._start, self._stop, self._scrollStep):
self._scrollTimer.Stop()
self.Update()
if self._tbstyle == TB_SIMPLE:
self.DrawText()
if self._usefocus:
self.SetFocus()
else:
self._originalfocus.SetFocus()
def TearDown(self):
""" Scrolls the :class:`ToasterBox` down, which means gradually hiding it. """
self._windowsize = self._windowsize - self._step
step = self._currentStep
if step > self._bottomright.y:
step = self._bottomright.y
if self._windowsize > 0:
# checking the type of the scroll (from up to down or from down to up)
if self._scrollType == TB_SCR_TYPE_UD:
dimY = self._dialogtop[1]
elif self._scrollType == TB_SCR_TYPE_DU:
dimY = step
self.SetDimensions(self._dialogtop[0], dimY,
self.GetSize().GetWidth(), self._windowsize)
self.Update()
self.Refresh()
self._currentStep += self._scrollStep
else:
self._scrollTimer.Stop()
self.Hide()
if self._parent2:
self._parent2.Notify()
def SetupPositions(self):
""" Sets up the position, size and scrolling step for :class:`ToasterBoxWindow`. """
if self._scrollType == TB_SCR_TYPE_FADE:
self.SetPosition(wx.Point(*self._dialogtop))
return
if self._direction == wx.UP:
# walk the Y value up in a raise motion
self._xpos = self.GetPosition().x
self._ypos = self._bottomright[1]
self._windowsize = 0
# checking the type of the scroll (from up to down or from down to up)
if self._scrollType == TB_SCR_TYPE_UD:
self._start = self._dialogtop[1]
self._stop = self._ypos
self._scrollStep = self._step
elif self._scrollType == TB_SCR_TYPE_DU:
self._start = self._ypos
self._stop = self._dialogtop[1]
self._scrollStep = -self._step
else:
# walk down the Y value
self._windowsize = self.GetSize().GetHeight()
# checking the type of the scroll (from up to down or from down to up)
if self._scrollType == TB_SCR_TYPE_UD:
self._start = self._bottomright.y
self._stop = self._dialogtop[1]
self._scrollStep = -self._step
elif self._scrollType == TB_SCR_TYPE_DU:
self._start = self._dialogtop[1]
self._stop = self._bottomright.y
self._scrollStep = self._step
self._currentStep = self._start
def ScrollUp(self):
""" Scrolls the :class:`ToasterBox` up, which means gradually showing it. """
if self._scrollType == TB_SCR_TYPE_FADE:
self._amount = 0
self._delta = 5
self.SetSize(self.GetSize())
self._alphaTimer.Start(self._sleeptime)
else:
self.Show(True)
self._scrollTimer.Start(self._sleeptime)
def ScrollDown(self):
""" Scrolls the :class:`ToasterBox` down, which means gradually hiding it. """
if self._scrollType == TB_SCR_TYPE_FADE:
self._amount = 255
self._delta = -5
self._alphaTimer.Start(self._sleeptime)
else:
self._scrollTimer.Start(self._sleeptime)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` event for :class:`ToasterBoxWindow`.
:param `event`: a :class:`PaintEvent` event to be processed.
:note: This event is handled and processed only if the style ``TB_SIMPLE`` is
given to :class:`ToasterBox`.
"""
dc = wx.AutoBufferedPaintDC(self)
self.DrawText(dc)
def DrawText(self, dc=None):
"""
Draws the text label for a :class:`ToasterBox` with ``TB_SIMPLE`` style set.
:param `dc`: an instance of :class:`DC`. If defaulted to ``None``, a :class:`ClientDC`
will be created on the fly.
"""
if dc is None:
dc = wx.ClientDC(self)
dc.SetBackground(wx.Brush(self._backgroundcolour))
dc.Clear()
if self._staticbitmap:
dc.DrawBitmap(self._staticbitmap, 0, 0)
dc.SetFont(self._textfont)
dc.SetTextForeground(self._textcolour)
if not hasattr(self, "text_coords"):
self._getTextCoords(dc)
dc.DrawTextList(*self.text_coords)
def AlphaCycle(self, event):
"""
Handles the ``wx.EVT_TIMER`` event for :class:`ToasterBoxWindow`.
:param `event`: a :class:`TimerEvent` event to be processed.
"""
# Increase (or decrease) the alpha channel
self._amount += self._delta
if self._tbstyle == TB_SIMPLE:
self.Refresh(False)
if self._amount > 255 or self._amount < 0:
# We're done, stop the timer
self._alphaTimer.Stop()
if self._amount < 0:
self.Hide()
if self._parent2:
self._parent2.Notify()
elif self._amount > 255:
if self._usefocus:
self.SetFocus()
else:
self._originalfocus.SetFocus()
return
# Make the ToasterBoxWindow more or less transparent
self.MakeWindowTransparent(self._amount)
if not self.IsShown():
self.Show()
def MakeWindowTransparent(self, amount):
"""
Makes the :class:`ToasterBoxWindow` window transparent.
:param `amount`: the alpha channel value.
"""
if not self.CanSetTransparent():
return
self.SetTransparent(amount)
def _getTextCoords(self, dc):
"""
Draw the user specified text.
:param `dc`: an instance of :class:`DC`.
:note: Use this method only for a :class:`ToasterBox` created with the ``TB_SIMPLE`` style.
"""
# border from sides and top to text (in pixels)
border = 7
# how much space between text lines
textPadding = 2
pText = self.GetPopupText()
max_len = len(pText)
tw, th = self._parent2._popupsize
if self._windowstyle == TB_CAPTION:
th = th - 20
while 1:
lines = textwrap.wrap(pText, max_len)
for line in lines:
w, h = dc.GetTextExtent(line)
if w > tw - border * 2:
max_len -= 1
break
else:
break
fh = 0
for line in lines:
w, h = dc.GetTextExtent(line)
fh += h + textPadding
y = (th - fh) / 2; coords = []
for line in lines:
w, h = dc.GetTextExtent(line)
x = (tw - w) / 2
coords.append((x, y))
y += h + textPadding
self.text_coords = (lines, coords)
| mit | -4,192,503,519,830,455,000 | 33.077385 | 279 | 0.525103 | false |
funkyfuture/deck-chores | deck_chores/indexes.py | 1 | 2095 | from functools import lru_cache
from types import MappingProxyType
from typing import Dict, Tuple
from deck_chores.config import cfg, CONTAINER_CACHE_SIZE
from deck_chores.utils import log
####
@lru_cache(maxsize=CONTAINER_CACHE_SIZE)
def container_name(container_id: str) -> str:
return cfg.client.containers.get(container_id).name
####
_service_locks_by_container_id: Dict[str, Tuple[str, ...]] = {}
service_locks_by_container_id = MappingProxyType(_service_locks_by_container_id)
_service_locks_by_service_id: Dict[Tuple[str, ...], str] = {}
service_locks_by_service_id = MappingProxyType(_service_locks_by_service_id)
def lock_service(service_id: Tuple[str, ...], container_id: str):
assert service_id not in service_locks_by_service_id
_service_locks_by_service_id[service_id] = container_id
assert container_id not in service_locks_by_container_id
_service_locks_by_container_id[container_id] = service_id
log.debug(f"Added lock for service {service_id} on container {container_id}.")
def reassign_service_lock(old_container_id: str, new_container_id: str):
service_id = _service_locks_by_container_id.pop(old_container_id)
assert old_container_id not in service_locks_by_container_id
assert new_container_id not in service_locks_by_container_id
_service_locks_by_container_id[new_container_id] = service_id
assert service_id in service_locks_by_service_id
_service_locks_by_service_id[service_id] = new_container_id
log.debug(
f"Reassigned lock for service {service_id} from container {old_container_id} "
f"to {new_container_id}."
)
def unlock_service(container_id: str):
service_id = _service_locks_by_container_id.pop(container_id, None)
if service_id is None:
return
_service_locks_by_service_id.pop(service_id)
log.debug(f"Removed lock for service {service_id} on container {container_id}.")
__all__ = (
"service_locks_by_container_id",
"service_locks_by_service_id",
lock_service.__name__,
reassign_service_lock.__name__,
unlock_service.__name__,
)
| isc | 137,992,603,129,664,260 | 33.344262 | 86 | 0.71074 | false |
Tim-Erwin/sanic | sanic/router.py | 1 | 13870 | import re
from collections import defaultdict, namedtuple
from collections.abc import Iterable
from functools import lru_cache
from sanic.exceptions import NotFound, InvalidUsage
from sanic.views import CompositionView
Route = namedtuple(
'Route',
['handler', 'methods', 'pattern', 'parameters', 'name', 'uri'])
Parameter = namedtuple('Parameter', ['name', 'cast'])
REGEX_TYPES = {
'string': (str, r'[^/]+'),
'int': (int, r'\d+'),
'number': (float, r'[0-9\\.]+'),
'alpha': (str, r'[A-Za-z]+'),
'path': (str, r'[^/].*?'),
}
ROUTER_CACHE_SIZE = 1024
def url_hash(url):
return url.count('/')
class RouteExists(Exception):
pass
class RouteDoesNotExist(Exception):
pass
class Router:
"""Router supports basic routing with parameters and method checks
Usage:
.. code-block:: python
@sanic.route('/my/url/<my_param>', methods=['GET', 'POST', ...])
def my_route(request, my_param):
do stuff...
or
.. code-block:: python
@sanic.route('/my/url/<my_param:my_type>', methods['GET', 'POST', ...])
def my_route_with_type(request, my_param: my_type):
do stuff...
Parameters will be passed as keyword arguments to the request handling
function. Provided parameters can also have a type by appending :type to
the <parameter>. Given parameter must be able to be type-casted to this.
If no type is provided, a string is expected. A regular expression can
also be passed in as the type. The argument given to the function will
always be a string, independent of the type.
"""
routes_static = None
routes_dynamic = None
routes_always_check = None
parameter_pattern = re.compile(r'<(.+?)>')
def __init__(self):
self.routes_all = {}
self.routes_names = {}
self.routes_static = {}
self.routes_dynamic = defaultdict(list)
self.routes_always_check = []
self.hosts = set()
@classmethod
def parse_parameter_string(cls, parameter_string):
"""Parse a parameter string into its constituent name, type, and
pattern
For example::
parse_parameter_string('<param_one:[A-z]>')` ->
('param_one', str, '[A-z]')
:param parameter_string: String to parse
:return: tuple containing
(parameter_name, parameter_type, parameter_pattern)
"""
# We could receive NAME or NAME:PATTERN
name = parameter_string
pattern = 'string'
if ':' in parameter_string:
name, pattern = parameter_string.split(':', 1)
default = (str, pattern)
# Pull from pre-configured types
_type, pattern = REGEX_TYPES.get(pattern, default)
return name, _type, pattern
def add(self, uri, methods, handler, host=None, strict_slashes=False,
version=None, name=None):
"""Add a handler to the route list
:param uri: path to match
:param methods: sequence of accepted method names. If none are
provided, any method is allowed
:param handler: request handler function.
When executed, it should provide a response object.
:param strict_slashes: strict to trailing slash
:param version: current version of the route or blueprint. See
docs for further details.
:return: Nothing
"""
if version is not None:
if uri.startswith('/'):
uri = "/".join(["/v{}".format(str(version)), uri[1:]])
else:
uri = "/".join(["/v{}".format(str(version)), uri])
# add regular version
self._add(uri, methods, handler, host, name)
if strict_slashes:
return
# Add versions with and without trailing /
slash_is_missing = (
not uri[-1] == '/' and not self.routes_all.get(uri + '/', False)
)
without_slash_is_missing = (
uri[-1] == '/' and not
self.routes_all.get(uri[:-1], False) and not
uri == '/'
)
# add version with trailing slash
if slash_is_missing:
self._add(uri + '/', methods, handler, host, name)
# add version without trailing slash
elif without_slash_is_missing:
self._add(uri[:-1], methods, handler, host, name)
def _add(self, uri, methods, handler, host=None, name=None):
"""Add a handler to the route list
:param uri: path to match
:param methods: sequence of accepted method names. If none are
provided, any method is allowed
:param handler: request handler function.
When executed, it should provide a response object.
:return: Nothing
"""
if host is not None:
if isinstance(host, str):
uri = host + uri
self.hosts.add(host)
else:
if not isinstance(host, Iterable):
raise ValueError("Expected either string or Iterable of "
"host strings, not {!r}".format(host))
for host_ in host:
self.add(uri, methods, handler, host_, name)
return
# Dict for faster lookups of if method allowed
if methods:
methods = frozenset(methods)
parameters = []
properties = {"unhashable": None}
def add_parameter(match):
name = match.group(1)
name, _type, pattern = self.parse_parameter_string(name)
parameter = Parameter(
name=name, cast=_type)
parameters.append(parameter)
# Mark the whole route as unhashable if it has the hash key in it
if re.search(r'(^|[^^]){1}/', pattern):
properties['unhashable'] = True
# Mark the route as unhashable if it matches the hash key
elif re.search(r'/', pattern):
properties['unhashable'] = True
return '({})'.format(pattern)
pattern_string = re.sub(self.parameter_pattern, add_parameter, uri)
pattern = re.compile(r'^{}$'.format(pattern_string))
def merge_route(route, methods, handler):
# merge to the existing route when possible.
if not route.methods or not methods:
# method-unspecified routes are not mergeable.
raise RouteExists(
"Route already registered: {}".format(uri))
elif route.methods.intersection(methods):
# already existing method is not overloadable.
duplicated = methods.intersection(route.methods)
raise RouteExists(
"Route already registered: {} [{}]".format(
uri, ','.join(list(duplicated))))
if isinstance(route.handler, CompositionView):
view = route.handler
else:
view = CompositionView()
view.add(route.methods, route.handler)
view.add(methods, handler)
route = route._replace(
handler=view, methods=methods.union(route.methods))
return route
if parameters:
# TODO: This is too complex, we need to reduce the complexity
if properties['unhashable']:
routes_to_check = self.routes_always_check
ndx, route = self.check_dynamic_route_exists(
pattern, routes_to_check)
else:
routes_to_check = self.routes_dynamic[url_hash(uri)]
ndx, route = self.check_dynamic_route_exists(
pattern, routes_to_check)
if ndx != -1:
# Pop the ndx of the route, no dups of the same route
routes_to_check.pop(ndx)
else:
route = self.routes_all.get(uri)
# prefix the handler name with the blueprint name
# if available
if hasattr(handler, '__blueprintname__'):
handler_name = '{}.{}'.format(
handler.__blueprintname__, name or handler.__name__)
else:
handler_name = name or getattr(handler, '__name__', None)
if route:
route = merge_route(route, methods, handler)
else:
route = Route(
handler=handler, methods=methods, pattern=pattern,
parameters=parameters, name=handler_name, uri=uri)
self.routes_all[uri] = route
pairs = self.routes_names.get(handler_name)
if not (pairs and (pairs[0] + '/' == uri or uri + '/' == pairs[0])):
self.routes_names[handler_name] = (uri, route)
if properties['unhashable']:
self.routes_always_check.append(route)
elif parameters:
self.routes_dynamic[url_hash(uri)].append(route)
else:
self.routes_static[uri] = route
@staticmethod
def check_dynamic_route_exists(pattern, routes_to_check):
for ndx, route in enumerate(routes_to_check):
if route.pattern == pattern:
return ndx, route
else:
return -1, None
def remove(self, uri, clean_cache=True, host=None):
if host is not None:
uri = host + uri
try:
route = self.routes_all.pop(uri)
for handler_name, pairs in self.routes_names.items():
if pairs[0] == uri:
self.routes_names.pop(handler_name)
break
except KeyError:
raise RouteDoesNotExist("Route was not registered: {}".format(uri))
if route in self.routes_always_check:
self.routes_always_check.remove(route)
elif url_hash(uri) in self.routes_dynamic \
and route in self.routes_dynamic[url_hash(uri)]:
self.routes_dynamic[url_hash(uri)].remove(route)
else:
self.routes_static.pop(uri)
if clean_cache:
self._get.cache_clear()
@lru_cache(maxsize=ROUTER_CACHE_SIZE)
def find_route_by_view_name(self, view_name):
"""Find a route in the router based on the specified view name.
:param view_name: string of view name to search by
:return: tuple containing (uri, Route)
"""
if not view_name:
return (None, None)
return self.routes_names.get(view_name, (None, None))
def get(self, request):
"""Get a request handler based on the URL of the request, or raises an
error
:param request: Request object
:return: handler, arguments, keyword arguments
"""
# No virtual hosts specified; default behavior
if not self.hosts:
return self._get(request.path, request.method, '')
# virtual hosts specified; try to match route to the host header
try:
return self._get(request.path, request.method,
request.headers.get("Host", ''))
# try default hosts
except NotFound:
return self._get(request.path, request.method, '')
@lru_cache(maxsize=ROUTER_CACHE_SIZE)
def _get(self, url, method, host):
"""Get a request handler based on the URL of the request, or raises an
error. Internal method for caching.
:param url: request URL
:param method: request method
:return: handler, arguments, keyword arguments
"""
url = host + url
# Check against known static routes
route = self.routes_static.get(url)
method_not_supported = InvalidUsage(
'Method {} not allowed for URL {}'.format(
method, url), status_code=405)
if route:
if route.methods and method not in route.methods:
raise method_not_supported
match = route.pattern.match(url)
else:
route_found = False
# Move on to testing all regex routes
for route in self.routes_dynamic[url_hash(url)]:
match = route.pattern.match(url)
route_found |= match is not None
# Do early method checking
if match and method in route.methods:
break
else:
# Lastly, check against all regex routes that cannot be hashed
for route in self.routes_always_check:
match = route.pattern.match(url)
route_found |= match is not None
# Do early method checking
if match and method in route.methods:
break
else:
# Route was found but the methods didn't match
if route_found:
raise method_not_supported
raise NotFound('Requested URL {} not found'.format(url))
kwargs = {p.name: p.cast(value)
for value, p
in zip(match.groups(1), route.parameters)}
route_handler = route.handler
if hasattr(route_handler, 'handlers'):
route_handler = route_handler.handlers[method]
return route_handler, [], kwargs, route.uri
def is_stream_handler(self, request):
""" Handler for request is stream or not.
:param request: Request object
:return: bool
"""
try:
handler = self.get(request)[0]
except (NotFound, InvalidUsage):
return False
if (hasattr(handler, 'view_class') and
hasattr(handler.view_class, request.method.lower())):
handler = getattr(handler.view_class, request.method.lower())
return hasattr(handler, 'is_stream')
| mit | 1,279,275,129,031,850,000 | 35.214099 | 79 | 0.559697 | false |
szwieback/nlscaling | testScenarios/testEstimationSkewNormal.py | 1 | 5774 | '''
Provides scenarios for testing the estimation of the parameters (error variances and mean map parameters, which correspond to generalized calibration constants)
The simulated data are drawn from SkewNormal distributions, whereas normal distributions are assumed in the estimation.
The results are described in testEstimation
'''
import GaussianBeliefNetwork as gbn
import GaussianRandomVariable as grv
import MeanMap as mm
import testEstimation
def scenarioEstimationStudentLinear(skew,skewchildren=False,verbose=False,seed0=1,size=350):
# test a linear PGBN
# Input parameters:
# - skew, float: skew of distribution
# - skewchildren, boolean (optional): also let conditional distributions of children be skewed
# - verbose, boolean (optional): print summary
# - seed0, integer (optional): seed to which the random number generator is set at the beginning
# - size, integer (optional): length of the simulated time series
print 'Test estimation with SkewNormal distribution with a skew of %f' %skew
if skewchildren:
print 'The same skew is also applied to the children'
print 'Scenario: Linear; three children; all mean maps are linear'
anom=grv.GaussianRandomVariable('t',[],[],1)
meas1=grv.GaussianRandomVariable('x',[anom],[mm.LinearMeanMap([-0.1,0.7])],3e-2)
meas2=grv.GaussianRandomVariable('y',[anom],[mm.LinearMeanMap([0.8,1.2])],1e-1)
meas3=grv.GaussianRandomVariable('z',[anom],[mm.LinearMeanMap([0.2,1.4])],6e-2)
inputPGBN=gbn.PyramidGaussianBeliefNetwork(anom,[meas1,meas2,meas3])
return testEstimation.testEstimation(inputPGBN,seed0=seed0,size=size,rvtype='SkewNormal',verbose=verbose,skew=skew,skewchildren=skewchildren)
def scenarioEstimationStudentQuadratic1(skew,skewchildren=False,verbose=False,seed0=1,size=350):
# test a quadratic (one linear) PGBN
# Input parameters:
# - skew, float: skew of distribution
# - skewchildren, boolean (optional): also let conditional distributions of children be skewed
# - verbose, boolean (optional): print summary
# - seed0, integer (optional): seed to which the random number generator is set at the beginning
# - size, integer (optional): length of the simulated time series
print 'Test estimation with SkewNormal distribution with a skew of %f' %skew
if skewchildren:
print 'The same skew is also applied to the children'
print 'Scenario: Quadratic 1; three children; one mean map is linear, the remaining two quadratic'
anom=grv.GaussianRandomVariable('t',[],[],1)
meas1=grv.GaussianRandomVariable('x',[anom],[mm.LinearMeanMap([-0.1,0.8])],3e-2)
meas2=grv.GaussianRandomVariable('y',[anom],[mm.QuadraticMeanMap([0.1,1.2,-0.15])],3e-2)
meas3=grv.GaussianRandomVariable('z',[anom],[mm.QuadraticMeanMap([0.2,1.1,0.1])],6e-2)
inputPGBN=gbn.PyramidGaussianBeliefNetwork(anom,[meas1,meas2,meas3])
return testEstimation.testEstimation(inputPGBN,seed0=seed0,size=size,rvtype='SkewNormal',verbose=verbose,skew=skew,skewchildren=skewchildren)
def scenarioEstimationStudentQuadratic2(skew,skewchildren=False,verbose=False,seed0=1,size=350):
# test a quadratic (one linear) PGBN
# Input parameters:
# - skew, float: skew of distribution
# - skewchildren, boolean (optional): also let conditional distributions of children be skewed
# - verbose, boolean (optional): print summary
# - seed0, integer (optional): seed to which the random number generator is set at the beginning
# - size, integer (optional): length of the simulated time series
print 'Test estimation with SkewNormal distribution with a skew of %f' %skew
if skewchildren:
print 'The same skew is also applied to the children'
print 'Scenario: Quadratic 2; three children; one mean map is linear, the remaining two quadratic'
anom=grv.GaussianRandomVariable('t',[],[],1)
meas1=grv.GaussianRandomVariable('x',[anom],[mm.LinearMeanMap([0.3,0.1])],(0.04)**2)
meas2=grv.GaussianRandomVariable('y',[anom],[mm.QuadraticMeanMap([0.2,0.05,0.005])],(0.08)**2)
meas3=grv.GaussianRandomVariable('z',[anom],[mm.QuadraticMeanMap([0.25,0.06,-0.008])],(0.02)**2)
inputPGBN=gbn.PyramidGaussianBeliefNetwork(anom,[meas1,meas2,meas3])
return testEstimation.testEstimation(inputPGBN,seed0=seed0,size=size,rvtype='SkewNormal',verbose=verbose,skew=skew,skewchildren=skewchildren)
def scenarioEstimationStudentQuadratic3(skew,skewchildren=False,verbose=False,seed0=1,size=350):
# test a quadratic (two linear) PGBN
# Input parameters:
# - skew, float: skew of distribution
# - skewchildren, boolean (optional): also let conditional distributions of children be skewed
# - verbose, boolean (optional): print summary
# - seed0, integer (optional): seed to which the random number generator is set at the beginning
# - size, integer (optional): length of the simulated time series
print 'Test estimation with SkewNormal distribution with a skew of %f' %skew
if skewchildren:
print 'The same skew is also applied to the children'
print 'Scenario: Quadratic 3; three children; two mean maps are linear, the remaining one quadratic'
anom=grv.GaussianRandomVariable('t',[],[],1)
meas1=grv.GaussianRandomVariable('x',[anom],[mm.LinearMeanMap([-0.1,0.8])],1e-1)
meas2=grv.GaussianRandomVariable('y',[anom],[mm.LinearMeanMap([0.1,1.2,])],9e-2)
meas3=grv.GaussianRandomVariable('z',[anom],[mm.QuadraticMeanMap([0.2,1.1,-0.25])],3e-2)
inputPGBN=gbn.PyramidGaussianBeliefNetwork(anom,[meas1,meas2,meas3])
return testEstimation.testEstimation(inputPGBN,seed0=seed0,size=size,rvtype='SkewNormal',verbose=verbose,skew=skew,skewchildren=skewchildren)
| gpl-3.0 | 6,476,651,784,709,622,000 | 69.414634 | 160 | 0.733634 | false |
AlexandreOuellet/halite-bot | input_data.py | 1 | 6237 | #!/usr/bin/env python
"""Functions for downloading and reading MNIST data."""
import gzip
import os
from six.moves.urllib.request import urlretrieve
import numpy
SOURCE_URL = 'http://yann.lecun.com/exdb/mnist/'
def maybe_download(filename, work_directory):
"""Download the data from Yann's website, unless it's already here."""
if not os.path.exists(work_directory):
os.mkdir(work_directory)
filepath = os.path.join(work_directory, filename)
if not os.path.exists(filepath):
filepath, _ = urlretrieve(SOURCE_URL + filename, filepath)
statinfo = os.stat(filepath)
print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')
return filepath
def _read32(bytestream):
dt = numpy.dtype(numpy.uint32).newbyteorder('>')
return numpy.frombuffer(bytestream.read(4), dtype=dt)
def extract_images(filename):
"""Extract the images into a 4D uint8 numpy array [index, y, x, depth]."""
print('Extracting', filename)
with gzip.open(filename) as bytestream:
magic = _read32(bytestream)
if magic != 2051:
raise ValueError(
'Invalid magic number %d in MNIST image file: %s' %
(magic, filename))
num_images = _read32(bytestream)[0]
rows = _read32(bytestream)[0]
cols = _read32(bytestream)[0]
buf = bytestream.read(rows * cols * num_images)
data = numpy.frombuffer(buf, dtype=numpy.uint8)
data = data.reshape(num_images, rows, cols, 1)
return data
def dense_to_one_hot(labels_dense, num_classes=10):
"""Convert class labels from scalars to one-hot vectors."""
num_labels = labels_dense.shape[0]
index_offset = numpy.arange(num_labels) * num_classes
labels_one_hot = numpy.zeros((num_labels, num_classes))
labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1
return labels_one_hot
def extract_labels(filename, one_hot=False):
"""Extract the labels into a 1D uint8 numpy array [index]."""
print('Extracting', filename)
with gzip.open(filename) as bytestream:
magic = _read32(bytestream)
if magic != 2049:
raise ValueError(
'Invalid magic number %d in MNIST label file: %s' %
(magic, filename))
num_items = _read32(bytestream)[0]
buf = bytestream.read(num_items)
labels = numpy.frombuffer(buf, dtype=numpy.uint8)
if one_hot:
return dense_to_one_hot(labels)
return labels
class DataSet(object):
def __init__(self, images, labels, fake_data=False):
if fake_data:
self._num_examples = 10000
else:
assert images.shape[0] == labels.shape[0], (
"images.shape: %s labels.shape: %s" % (images.shape,
labels.shape))
self._num_examples = images.shape[0]
# Convert shape from [num examples, rows, columns, depth]
# to [num examples, rows*columns] (assuming depth == 1)
assert images.shape[3] == 1
images = images.reshape(images.shape[0],
images.shape[1] * images.shape[2])
# Convert from [0, 255] -> [0.0, 1.0].
images = images.astype(numpy.float32)
images = numpy.multiply(images, 1.0 / 255.0)
self._images = images
self._labels = labels
self._epochs_completed = 0
self._index_in_epoch = 0
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
@property
def num_examples(self):
return self._num_examples
@property
def epochs_completed(self):
return self._epochs_completed
def next_batch(self, batch_size, fake_data=False):
"""Return the next `batch_size` examples from this data set."""
if fake_data:
fake_image = [1.0 for _ in xrange(784)]
fake_label = 0
return [fake_image for _ in xrange(batch_size)], [
fake_label for _ in xrange(batch_size)]
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self._num_examples:
# Finished epoch
self._epochs_completed += 1
# Shuffle the data
perm = numpy.arange(self._num_examples)
numpy.random.shuffle(perm)
self._images = self._images[perm]
self._labels = self._labels[perm]
# Start next epoch
start = 0
self._index_in_epoch = batch_size
assert batch_size <= self._num_examples
end = self._index_in_epoch
return self._images[start:end], self._labels[start:end]
def read_data_sets(train_dir, fake_data=False, one_hot=False):
class DataSets(object):
pass
data_sets = DataSets()
if fake_data:
data_sets.train = DataSet([], [], fake_data=True)
data_sets.validation = DataSet([], [], fake_data=True)
data_sets.test = DataSet([], [], fake_data=True)
return data_sets
TRAIN_IMAGES = 'train-images-idx3-ubyte.gz'
TRAIN_LABELS = 'train-labels-idx1-ubyte.gz'
TEST_IMAGES = 't10k-images-idx3-ubyte.gz'
TEST_LABELS = 't10k-labels-idx1-ubyte.gz'
VALIDATION_SIZE = 5000
local_file = maybe_download(TRAIN_IMAGES, train_dir)
train_images = extract_images(local_file)
local_file = maybe_download(TRAIN_LABELS, train_dir)
train_labels = extract_labels(local_file, one_hot=one_hot)
local_file = maybe_download(TEST_IMAGES, train_dir)
test_images = extract_images(local_file)
local_file = maybe_download(TEST_LABELS, train_dir)
test_labels = extract_labels(local_file, one_hot=one_hot)
validation_images = train_images[:VALIDATION_SIZE]
validation_labels = train_labels[:VALIDATION_SIZE]
train_images = train_images[VALIDATION_SIZE:]
train_labels = train_labels[VALIDATION_SIZE:]
data_sets.train = DataSet(train_images, train_labels)
data_sets.validation = DataSet(validation_images, validation_labels)
data_sets.test = DataSet(test_images, test_labels)
return data_sets | mit | 8,355,011,669,084,884,000 | 37.036585 | 78 | 0.612474 | false |
googleapis/python-datacatalog | google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py | 1 | 12048 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import packaging.version
import pkg_resources
import google.auth # type: ignore
import google.api_core # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.datacatalog_v1.types import policytagmanager
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
try:
# google.auth.__version__ was added in 1.26.0
_GOOGLE_AUTH_VERSION = google.auth.__version__
except AttributeError:
try: # try pkg_resources if it is available
_GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
class PolicyTagManagerTransport(abc.ABC):
"""Abstract transport class for PolicyTagManager."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
DEFAULT_HOST: str = "datacatalog.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials is service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
# should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
def _get_scopes_kwargs(
cls, host: str, scopes: Optional[Sequence[str]]
) -> Dict[str, Optional[Sequence[str]]]:
"""Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
scopes_kwargs = {}
if _GOOGLE_AUTH_VERSION and (
packaging.version.parse(_GOOGLE_AUTH_VERSION)
>= packaging.version.parse("1.25.0")
):
scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
else:
scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
return scopes_kwargs
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.create_taxonomy: gapic_v1.method.wrap_method(
self.create_taxonomy, default_timeout=None, client_info=client_info,
),
self.delete_taxonomy: gapic_v1.method.wrap_method(
self.delete_taxonomy, default_timeout=None, client_info=client_info,
),
self.update_taxonomy: gapic_v1.method.wrap_method(
self.update_taxonomy, default_timeout=None, client_info=client_info,
),
self.list_taxonomies: gapic_v1.method.wrap_method(
self.list_taxonomies, default_timeout=None, client_info=client_info,
),
self.get_taxonomy: gapic_v1.method.wrap_method(
self.get_taxonomy, default_timeout=None, client_info=client_info,
),
self.create_policy_tag: gapic_v1.method.wrap_method(
self.create_policy_tag, default_timeout=None, client_info=client_info,
),
self.delete_policy_tag: gapic_v1.method.wrap_method(
self.delete_policy_tag, default_timeout=None, client_info=client_info,
),
self.update_policy_tag: gapic_v1.method.wrap_method(
self.update_policy_tag, default_timeout=None, client_info=client_info,
),
self.list_policy_tags: gapic_v1.method.wrap_method(
self.list_policy_tags, default_timeout=None, client_info=client_info,
),
self.get_policy_tag: gapic_v1.method.wrap_method(
self.get_policy_tag, default_timeout=None, client_info=client_info,
),
self.get_iam_policy: gapic_v1.method.wrap_method(
self.get_iam_policy, default_timeout=None, client_info=client_info,
),
self.set_iam_policy: gapic_v1.method.wrap_method(
self.set_iam_policy, default_timeout=None, client_info=client_info,
),
self.test_iam_permissions: gapic_v1.method.wrap_method(
self.test_iam_permissions,
default_timeout=None,
client_info=client_info,
),
}
@property
def create_taxonomy(
self,
) -> Callable[
[policytagmanager.CreateTaxonomyRequest],
Union[policytagmanager.Taxonomy, Awaitable[policytagmanager.Taxonomy]],
]:
raise NotImplementedError()
@property
def delete_taxonomy(
self,
) -> Callable[
[policytagmanager.DeleteTaxonomyRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def update_taxonomy(
self,
) -> Callable[
[policytagmanager.UpdateTaxonomyRequest],
Union[policytagmanager.Taxonomy, Awaitable[policytagmanager.Taxonomy]],
]:
raise NotImplementedError()
@property
def list_taxonomies(
self,
) -> Callable[
[policytagmanager.ListTaxonomiesRequest],
Union[
policytagmanager.ListTaxonomiesResponse,
Awaitable[policytagmanager.ListTaxonomiesResponse],
],
]:
raise NotImplementedError()
@property
def get_taxonomy(
self,
) -> Callable[
[policytagmanager.GetTaxonomyRequest],
Union[policytagmanager.Taxonomy, Awaitable[policytagmanager.Taxonomy]],
]:
raise NotImplementedError()
@property
def create_policy_tag(
self,
) -> Callable[
[policytagmanager.CreatePolicyTagRequest],
Union[policytagmanager.PolicyTag, Awaitable[policytagmanager.PolicyTag]],
]:
raise NotImplementedError()
@property
def delete_policy_tag(
self,
) -> Callable[
[policytagmanager.DeletePolicyTagRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def update_policy_tag(
self,
) -> Callable[
[policytagmanager.UpdatePolicyTagRequest],
Union[policytagmanager.PolicyTag, Awaitable[policytagmanager.PolicyTag]],
]:
raise NotImplementedError()
@property
def list_policy_tags(
self,
) -> Callable[
[policytagmanager.ListPolicyTagsRequest],
Union[
policytagmanager.ListPolicyTagsResponse,
Awaitable[policytagmanager.ListPolicyTagsResponse],
],
]:
raise NotImplementedError()
@property
def get_policy_tag(
self,
) -> Callable[
[policytagmanager.GetPolicyTagRequest],
Union[policytagmanager.PolicyTag, Awaitable[policytagmanager.PolicyTag]],
]:
raise NotImplementedError()
@property
def get_iam_policy(
self,
) -> Callable[
[iam_policy_pb2.GetIamPolicyRequest],
Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]],
]:
raise NotImplementedError()
@property
def set_iam_policy(
self,
) -> Callable[
[iam_policy_pb2.SetIamPolicyRequest],
Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]],
]:
raise NotImplementedError()
@property
def test_iam_permissions(
self,
) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
Union[
iam_policy_pb2.TestIamPermissionsResponse,
Awaitable[iam_policy_pb2.TestIamPermissionsResponse],
],
]:
raise NotImplementedError()
__all__ = ("PolicyTagManagerTransport",)
| apache-2.0 | -6,601,856,494,022,337,000 | 35.509091 | 103 | 0.632885 | false |
Subsets and Splits