repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
mjschultz/django-tastefulpy | tests/basic/tests/resources.py | 1 | 6411 | from django.contrib.auth.models import User
from django.http import HttpRequest
from tastefulpy.bundle import Bundle
from tastefulpy.fields import ToOneField, ToManyField
from tastefulpy.resources import ModelResource
from basic.api.resources import SlugBasedNoteResource
from basic.models import Note, AnnotatedNote, SlugBasedNote
from testcases import TestCaseWithFixture
class InvalidLazyUserResource(ModelResource):
notes = ToManyField('basic.api.resources.FooResource', 'notes')
class Meta:
queryset = User.objects.all()
class NoPathLazyUserResource(ModelResource):
notes = ToManyField('FooResource', 'notes')
class Meta:
queryset = User.objects.all()
class LazyUserResource(ModelResource):
notes = ToManyField('basic.tests.resources.NoteResource', 'notes')
class Meta:
queryset = User.objects.all()
api_name = 'foo'
class NoteResource(ModelResource):
class Meta:
queryset = Note.objects.all()
class AnnotatedNoteResource(ModelResource):
class Meta:
queryset = AnnotatedNote.objects.all()
class NoteWithAnnotationsResource(ModelResource):
annotated = ToOneField(AnnotatedNoteResource, 'annotated', null=True)
class Meta:
queryset = Note.objects.all()
class NoteModelResourceTestCase(TestCaseWithFixture):
def test_init(self):
resource_1 = NoteResource()
self.assertEqual(len(resource_1.fields), 8)
self.assertNotEqual(resource_1._meta.queryset, None)
self.assertEqual(resource_1._meta.resource_name, 'note')
# TextFields should have ``default=''`` to match Django's behavior,
# even though that's not what is on the field proper.
self.assertEqual(resource_1.fields['content'].default, '')
def test_lazy_relations(self):
ilur = InvalidLazyUserResource()
nplur = NoPathLazyUserResource()
lur = LazyUserResource()
self.assertEqual(ilur.notes.to, 'basic.api.resources.FooResource')
self.assertEqual(nplur.notes.to, 'FooResource')
self.assertEqual(lur.notes.to, 'basic.tests.resources.NoteResource')
try:
ilur.notes.to_class()
self.fail("to_class on InvalidLazyUserResource should fail!")
except ImportError:
pass
try:
nplur.notes.to_class()
self.fail("to_class on NoPathLazyUserResource should fail!")
except ImportError:
pass
to_class = lur.notes.to_class()
self.assertTrue(isinstance(to_class, NoteResource))
# This is important, as without passing on the ``api_name``, URL
# reversals will fail. Fakes the instance as ``None``, since for
# testing purposes, we don't care.
related = lur.notes.get_related_resource(None)
self.assertEqual(related._meta.api_name, 'foo')
class AnnotatedNoteModelResourceTestCase(TestCaseWithFixture):
def test_one_to_one_regression(self):
# Make sure bits don't completely blow up if the related model
# is gone.
n1 = Note.objects.get(pk=1)
resource_1 = NoteWithAnnotationsResource()
n1_bundle = resource_1.build_bundle(obj=n1)
dehydrated = resource_1.full_dehydrate(n1_bundle)
class DetailURIKwargsResourceTestCase(TestCaseWithFixture):
def test_correct_detail_uri_model(self):
n1 = Note.objects.get(pk=1)
resource = NoteWithAnnotationsResource()
self.assertEqual(resource.detail_uri_kwargs(n1), {
'pk': 1,
})
def test_correct_detail_uri_bundle(self):
n1 = Note.objects.get(pk=1)
resource = NoteWithAnnotationsResource()
n1_bundle = resource.build_bundle(obj=n1)
self.assertEqual(resource.detail_uri_kwargs(n1_bundle), {
'pk': 1,
})
def test_correct_slug_detail_uri_model(self):
n1 = SlugBasedNote.objects.get(pk='first-post')
resource = SlugBasedNoteResource()
self.assertEqual(resource.detail_uri_kwargs(n1), {
'slug': 'first-post',
})
def test_correct_slug_detail_uri_bundle(self):
n1 = SlugBasedNote.objects.get(pk='first-post')
resource = SlugBasedNoteResource()
n1_bundle = resource.build_bundle(obj=n1)
self.assertEqual(resource.detail_uri_kwargs(n1_bundle), {
'slug': 'first-post',
})
class SlugBasedResourceTestCase(TestCaseWithFixture):
def setUp(self):
super(SlugBasedResourceTestCase, self).setUp()
self.n1 = SlugBasedNote.objects.get(pk='first-post')
self.request = HttpRequest()
self.request.method = 'PUT'
self.resource = SlugBasedNoteResource()
self.n1_bundle = self.resource.build_bundle(obj=self.n1)
def test_bundle_unique_field(self):
self.assertEqual(self.resource.get_bundle_detail_data(self.n1_bundle), u'first-post')
def test_obj_update(self):
bundle = self.resource.build_bundle(obj=self.n1, data={
'title': 'Foo!',
})
updated_bundle = self.resource.obj_update(bundle, slug='first-post')
self.assertEqual(updated_bundle.obj.slug, 'first-post')
self.assertEqual(updated_bundle.obj.title, 'Foo!')
# Again, without the PK this time.
self.n1.slug = None
bundle = self.resource.build_bundle(obj=self.n1, data={
'title': 'Bar!',
})
updated_bundle_2 = self.resource.obj_update(bundle, slug='first-post')
self.assertEqual(updated_bundle_2.obj.slug, 'first-post')
self.assertEqual(updated_bundle_2.obj.title, 'Bar!')
def test_update_in_place(self):
new_data = {
'slug': u'foo',
'title': u'Foo!',
}
new_bundle = self.resource.update_in_place(self.request, self.n1_bundle, new_data)
# Check for updated data.
self.assertEqual(new_bundle.obj.title, u'Foo!')
self.assertEqual(new_bundle.obj.slug, u'foo')
# Make sure it looked up the right instance, even though we didn't
# hand it a PK...
self.assertEqual(new_bundle.obj.pk, self.n1_bundle.obj.pk)
def test_rollback(self):
bundles = [
self.n1_bundle
]
self.resource.rollback(bundles)
# Make sure it's gone.
self.assertRaises(SlugBasedNote.DoesNotExist, SlugBasedNote.objects.get, pk='first-post')
| bsd-3-clause | 8,056,960,922,753,189,000 | 33.101064 | 97 | 0.65372 | false |
battlehorse/rhizosphere | appengine/src/py/handlers/doc.py | 1 | 1543 | #!/usr/bin/env python
#
# Copyright 2010 The Rhizosphere Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import os.path
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
from py import rhizoglobals
class DocHandler(webapp.RequestHandler):
def get(self):
docfile = self.request.path.replace('/doc', '')
if not docfile or docfile == '/':
docfile = '/index.html'
template_values = rhizoglobals.DefaultTemplate(self.request)
template_values.update({
'docfile': docfile,
})
path = os.path.join(os.path.dirname(__file__),
'../../templates/doc%s' % docfile)
self.response.out.write(template.render(path, template_values))
application = webapp.WSGIApplication(
[('/doc/.*', DocHandler),
('/doc', DocHandler),],
debug=rhizoglobals.appenginedebug)
def main():
run_wsgi_app(application)
if __name__ == '__main__':
main()
| apache-2.0 | 4,601,348,499,294,468,600 | 28.673077 | 74 | 0.702528 | false |
pombredanne/binaryornot | setup.py | 1 | 1529 | #!/usr/bin/env python
import os
import sys
import binaryornot
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# Python 2.6 does not have expectedFailre, unittest2 is a backport
tests_require = ['hypothesis']
try:
from unittest.case import expectedFailure
except ImportError:
tests_require.append('unittest2')
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='binaryornot',
version='0.4.0',
description='Ultra-lightweight pure Python package to check if a file is binary or text.',
long_description=readme + '\n\n' + history,
author='Audrey Roy',
author_email='[email protected]',
url='https://github.com/audreyr/binaryornot',
packages=[
'binaryornot',
],
package_dir={'binaryornot': 'binaryornot'},
include_package_data=True,
install_requires=[
'chardet>=2.0.0',
],
tests_require = tests_require,
license="BSD",
zip_safe=False,
keywords='binaryornot',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
],
test_suite='tests',
)
| bsd-3-clause | -1,515,747,397,239,565,000 | 26.303571 | 94 | 0.63571 | false |
spacepajamas/DiaTop | Mapping/Mapping_3_get_TOP_word_distribution.py | 1 | 2386 | # coding: utf-8
import numpy as np
import pandas as pd
import os
import json
import sys
import re
from collections import Counter
####
import datetime
mylist = []
today = datetime.date.today()
mylist.append(today)
date = str(mylist[0]) # print the date object, not the container ;-)
####
topic_dist_dfname = sys.argv[1] ## M1_topic_distr_df_<DATE>.csv
lda_model_name = sys.argv[2]
num_topwords = sys.argv[3]
######
topics = int(lda_model_name.split('_')[4])
####
if not os.path.exists('M3_data'):
os.makedirs('M3_data')
topic_dist_df = pd.read_csv(topic_dist_dfname)
print topic_dist_df
for i in range(1,topics):
giant_array = []
print i, '**********************'
top1 = topic_dist_df.loc[topic_dist_df[str(i)] > 0]
topn = pd.DataFrame(top1)
topinc_n_filepaths =list( pd.DataFrame(topn)['new_filepaths'])
print topinc_n_filepaths
for relevant_file in topinc_n_filepaths:
top_file = json.load(open(relevant_file, 'r') )
words = [k for k,v in Counter(top_file).items() ]
giant_array.extend(words)
popularwords = [popword[0] for popword in list(Counter(giant_array).most_common(int(num_topwords)))]
outfilename = 'M3_data/M3_topic-'+str(i)+'_top_words.json'
print outfilename
with open(outfilename, 'w') as outfile:
json.dump(popularwords, outfile)
giant_array = []
for k in range(len(topinc_n_filepaths)):
#print i
w_array = [0]*len(popularwords)
for w in popularwords:
with open(topinc_n_filepaths[k]) as json_data:
filedata = json.load(json_data)
#print filedata
#print type(filedata)
if w in filedata:
w_array[popularwords.index(w)] = 1
#print w_array
giant_array.append(np.array(w_array))
a = np.matrix(giant_array)
topic_word_dist = pd.DataFrame(a)
topic_word_dist.columns = popularwords
# print topic_word_dist
# print pd.DataFrame(top1)
topic_word_dist['new_filepaths'] = pd.Series(topinc_n_filepaths)
#print topic_word_dist
#print topn
mid_df = pd.merge(topn,topic_word_dist, on='new_filepaths', how='outer')
out_df = mid_df[['year']+list(mid_df.columns[4+topics:])]
outdfname = 'M3_data/M3_topic-'+str(i)+'_top_words.csv'
out_df.to_csv(outdfname, sep=',', encoding='utf-8', index = False)
| gpl-3.0 | 8,766,361,902,126,300,000 | 28.097561 | 105 | 0.621961 | false |
naifrec/cnn-dropout | code/filter_visualization.py | 1 | 12132 | import os
import sys
import timeit
import pylab
from PIL import Image
import numpy
import theano
import theano.tensor as T
from theano.tensor.signal import downsample
from theano.tensor.nnet import conv
from logistic_sgd import LogisticRegression, load_data
from cnn_tools import HiddenLayer, _dropout_from_layer, DropoutHiddenLayer, LeNetConvPoolLayer, DropoutLeNetConvPoolLayer
def display_filter(W, n_cols = 5):
"""
:type W: numpy_nd_array
:param W: parameter W of a convolutional + max pooling layer
:type image_width: int
: param image_width: width of the final image representing the different filters
"""
W_shape = W.shape
n_filters = W_shape[0]
#param filter_shape: (number of filters, num input feature maps, filter height, filter width)
filter_height = W_shape[2]
filter_width = W_shape[3]
n_lines = numpy.ceil(n_filters / n_cols)
for n in range(n_filters):
Wn = W[n,0,:,:]
Wn = Wn / Wn.max() # Scaling W to get 0-1 gray scale
pylab.subplot(n_lines, n_cols, n + 1); pylab.axis('off'); pylab.imshow(W[n,0,:,:], cmap=pylab.gray())
pylab.show()
def evaluate_lenet5(initial_learning_rate=0.1, learning_rate_decay = 1,
dropout_rates = [0.2, 0.2, 0.2, 0.5],
n_epochs=50,
dataset='mnist.pkl.gz',
display_filters = True,
nkerns=[20, 50], batch_size=500):
""" Demonstrates lenet on MNIST dataset
:type learning_rate: float
:param learning_rate: learning rate used (factor for the stochastic
gradient)
:type learning_rate_decay: float
:param learning_rate_decay: learning rate decay used (1 means learning rate decay is deactivated)
:type dropout_rates: list of float
:param dropout_rates: dropout rate used for each layer (input layer, 1st filtered layer, 2nd filtered layer, fully connected layer)
:type n_epochs: int
:param n_epochs: maximal number of epochs to run the optimizer
:type dataset: string
:param dataset: path to the dataset used for training /testing (MNIST here)
:type nkerns: list of ints
:param nkerns: number of kernels on each layer
"""
rng = numpy.random.RandomState(23455)
datasets = load_data(dataset)
train_set_x, train_set_y = datasets[0]
valid_set_x, valid_set_y = datasets[1]
test_set_x, test_set_y = datasets[2]
# compute number of minibatches for training, validation and testing
n_train_batches = train_set_x.get_value(borrow=True).shape[0]
n_valid_batches = valid_set_x.get_value(borrow=True).shape[0]
n_test_batches = test_set_x.get_value(borrow=True).shape[0]
n_train_batches /= batch_size
n_valid_batches /= batch_size
n_test_batches /= batch_size
# allocate symbolic variables for the data
index = T.lscalar() # index to a [mini]batch
epoch = T.scalar()
x = T.matrix('x') # the data is presented as rasterized images
y = T.ivector('y') # the labels are presented as 1D vector of
# [int] labels
learning_rate = theano.shared(numpy.asarray(initial_learning_rate,
dtype=theano.config.floatX))
######################
# BUILD ACTUAL MODEL #
######################
print '... building the model'
# Reshape matrix of rasterized images of shape (batch_size, 28 * 28)
# to a 4D tensor, compatible with our LeNetConvPoolLayer
# (28, 28) is the size of MNIST images.
layer0_input = x.reshape((batch_size, 1, 28, 28))
layer0_input_dropout = _dropout_from_layer(rng, layer0_input, dropout_rates[0])
# Construct the first convolutional pooling layer:
# filtering reduces the image size to (28-5+1 , 28-5+1) = (24, 24)
# maxpooling reduces this further to (24/2, 24/2) = (12, 12)
# 4D output tensor is thus of shape (batch_size, nkerns[0], 12, 12)
layer0_dropout = DropoutLeNetConvPoolLayer(
rng,
input=layer0_input_dropout,
image_shape=(batch_size, 1, 28, 28),
filter_shape=(nkerns[0], 1, 5, 5),
poolsize=(2, 2),
dropout_rate= dropout_rates[1]
)
layer0 = LeNetConvPoolLayer(
rng,
input=layer0_input,
image_shape=(batch_size, 1, 28, 28),
filter_shape=(nkerns[0], 1, 5, 5),
poolsize=(2, 2),
W=layer0_dropout.W * (1 - dropout_rates[0]),
b=layer0_dropout.b
)
# Construct the second convolutional pooling layer
# filtering reduces the image size to (12-5+1, 12-5+1) = (8, 8)
# maxpooling reduces this further to (8/2, 8/2) = (4, 4)
# 4D output tensor is thus of shape (batch_size, nkerns[1], 4, 4)
layer1_dropout = DropoutLeNetConvPoolLayer(
rng,
input=layer0_dropout.output,
image_shape=(batch_size, nkerns[0], 12, 12),
filter_shape=(nkerns[1], nkerns[0], 5, 5),
poolsize=(2, 2),
dropout_rate = dropout_rates[2]
)
layer1 = LeNetConvPoolLayer(
rng,
input=layer0.output,
image_shape=(batch_size, nkerns[0], 12, 12),
filter_shape=(nkerns[1], nkerns[0], 5, 5),
poolsize=(2, 2),
W=layer1_dropout.W * (1 - dropout_rates[1]),
b=layer1_dropout.b
)
# the HiddenLayer being fully-connected, it operates on 2D matrices of
# shape (batch_size, num_pixels) (i.e matrix of rasterized images).
# This will generate a matrix of shape (batch_size, nkerns[1] * 4 * 4),
# or (500, 50 * 4 * 4) = (500, 800) with the default values.
layer2_dropout_input = layer1_dropout.output.flatten(2)
layer2_input = layer1.output.flatten(2)
# construct a fully-connected sigmoidal layer
layer2_dropout = DropoutHiddenLayer(
rng,
input=layer2_dropout_input,
n_in=nkerns[1] * 4 * 4,
n_out=500,
activation=T.tanh,
dropout_rate = dropout_rates[3]
)
layer2 = HiddenLayer(
rng,
input=layer2_input,
n_in=nkerns[1] * 4 * 4,
n_out=500,
activation=T.tanh,
W=layer2_dropout.W * (1 - dropout_rates[2]),
b=layer2_dropout.b
)
# classify the values of the fully-connected sigmoidal layer
layer3_dropout = LogisticRegression(
input = layer2_dropout.output,
n_in = 500, n_out = 10)
layer3 = LogisticRegression(
input=layer2.output,
n_in=500, n_out=10,
W=layer3_dropout.W * (1 - dropout_rates[-1]),
b=layer3_dropout.b
)
# the cost we minimize during training is the NLL of the model
cost = layer3.negative_log_likelihood(y)
dropout_cost = layer3_dropout.negative_log_likelihood(y)
# create a function to compute the mistakes that are made by the model
test_model = theano.function(
[index],
layer3.errors(y),
givens={
x: test_set_x[index * batch_size: (index + 1) * batch_size],
y: test_set_y[index * batch_size: (index + 1) * batch_size]
}
)
validate_model = theano.function(
[index],
layer3.errors(y),
givens={
x: valid_set_x[index * batch_size: (index + 1) * batch_size],
y: valid_set_y[index * batch_size: (index + 1) * batch_size]
}
)
# create a list of all model parameters to be fit by gradient descent
params = layer3_dropout.params + layer2_dropout.params + layer1_dropout.params + layer0_dropout.params
# create a list of gradients for all model parameters
grads = T.grad(dropout_cost, params)
# train_model is a function that updates the model parameters by SGD
updates = [
(param_i, param_i - learning_rate * grad_i)
for param_i, grad_i in zip(params, grads)
]
train_model = theano.function(
[index],
dropout_cost,
updates=updates,
givens={
x: train_set_x[index * batch_size: (index + 1) * batch_size],
y: train_set_y[index * batch_size: (index + 1) * batch_size]
}
)
# Theano function to decay the learning rate
decay_learning_rate = theano.function(inputs=[], outputs=learning_rate,
updates={learning_rate: learning_rate * learning_rate_decay})
###############
# TRAIN MODEL #
###############
print '... training'
# early-stopping parameters
patience = 10000 # look as this many examples regardless
patience_increase = 2 # wait this much longer when a new best is
# found
improvement_threshold = 0.995 # a relative improvement of this much is
# considered significant
validation_frequency = min(n_train_batches, patience / 2)
# go through this many
# minibatche before checking the network
# on the validation set; in this case we
# check every epoch
best_validation_loss = numpy.inf
best_iter = 0
test_score = 0.
start_time = timeit.default_timer()
epoch = 0
done_looping = False
while (epoch < n_epochs) and (not done_looping):
epoch = epoch + 1
for minibatch_index in xrange(n_train_batches):
iter = (epoch - 1) * n_train_batches + minibatch_index
if iter % 100 == 0:
print 'training @ iter = ', iter
cost_ij = train_model(minibatch_index)
if (iter + 1) % validation_frequency == 0:
# compute zero-one loss on validation set
validation_losses = [validate_model(i) for i
in xrange(n_valid_batches)]
this_validation_loss = numpy.mean(validation_losses)
print('epoch %i, minibatch %i/%i, validation error %f %%' %
(epoch, minibatch_index + 1, n_train_batches,
this_validation_loss * 100.))
# if we got the best validation score until now
if this_validation_loss < best_validation_loss:
#improve patience if loss improvement is good enough
if this_validation_loss < best_validation_loss * \
improvement_threshold:
patience = max(patience, iter * patience_increase)
# save best validation score and iteration number
best_validation_loss = this_validation_loss
best_iter = iter
# test it on the test set
test_losses = [
test_model(i)
for i in xrange(n_test_batches)
]
test_score = numpy.mean(test_losses)
print((' epoch %i, minibatch %i/%i, test error of '
'best model %f %%') %
(epoch, minibatch_index + 1, n_train_batches,
test_score * 100.))
if patience <= iter:
done_looping = True
break
new_learning_rate = decay_learning_rate()
end_time = timeit.default_timer()
print('Optimization complete.')
print('Best validation score of %f %% obtained at iteration %i, '
'with test performance %f %%' %
(best_validation_loss * 100., best_iter + 1, test_score * 100.))
print >> sys.stderr, ('The code for file ' +
os.path.split(__file__)[1] +
' ran for %.2fm' % ((end_time - start_time) / 60.))
if display_filters:
# Retrieving the filters from first and second layer
first_convlayer_params = theano.function([], layer0_dropout.params)
second_convlayer_params = theano.function([], layer1_dropout.params)
W0 = first_convlayer_params[0]
W1 = second_convlayer_params[0]
# Display filters from first layer (20 filters)
display_filter(W0)
# Display filters from second layer (50 filters)
display_filter(W1)
if __name__ == '__main__':
evaluate_lenet5()
| mit | 8,826,870,589,074,008,000 | 34.267442 | 135 | 0.585312 | false |
plazas/wfirst-detectors-vnl | code/bias_nonlinearity_vs_beta_version2.py | 1 | 24495 | #!/usr/bin/python
import numpy as np
import os
import sys
import math
import matplotlib
matplotlib.use('Pdf')
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.font_manager as fm
## 7-20-15
## Simple code to explore NL as a function of beta, by using interleaving method
import galsim
import galsim.wfirst as wfirst
filters = wfirst.getBandpasses (AB_zeropoint=True)
import logging
logging.basicConfig(format="%(message)s", level=logging.INFO, stream=sys.stdout)
logger = logging.getLogger("tests_hsm_interleaving")
f=lambda x,beta : x - beta*x*x
from galsim.cdmodel import *
from sim2 import * ## where all the BF stuff is
from scipy import optimize
def measurement_function_NL (profile, e1_inter_vec=[], e2_inter_vec=[], size_inter_vec=[], noise=None, beta=3.566e-7, string='', type='nl'):
print " "
print "n: ", n
logger.info (string)
print "beta: ", beta
#### Calculate moments without effect
image=profile.drawImage(image=galsim.Image(base_size, base_size), scale=pixel_scale/n, method='no_pixel')
#print np.amax(image.array), np.sum(image.array)
if not noise == None:
read_noise = galsim.GaussianNoise(sigma=noise/(n**2))
image.addNoise(read_noise)
results=image.FindAdaptiveMom(hsmparams=new_params)
ref_e1=results.observed_shape.e1
ref_e2=results.observed_shape.e2
ref_s=results.moments_sigma
print "ref_e1, ref_e2, ref_s", ref_e1, ref_e2, ref_s
## Interleave the profiles
im_list=[]
offsets_list=[]
#create list of images to interleave-no effect
for j in xrange(n):
for i in xrange(n):
im=galsim.Image(base_size, base_size)
offset=galsim.PositionD(-(i+0.5)/n+0.5, -(j+0.5)/n+0.5)
offsets_list.append(offset)
#print "Offset: ", offset
profile.drawImage(image=im, scale=pixel_scale, offset=offset, method='no_pixel')
if type == 'bf':
#cd = PowerLawCD(5, 5.e-7, 5.e-7, 1.5e-7, 1.5e-7, 2.5e-7, 2.5e-7, 1.3)
cd = BaseCDModel (aL,aR,aB,aT)
im=cd.applyForward(im)
elif type == 'nl':
im.applyNonlinearity(f,beta)
else:
print "wrong type: 'bf' or 'nl' "
sys.exit(1)
im_list.append(im)
image=galsim.utilities.interleaveImages(im_list=im_list, N=(n,n), offsets=offsets_list)
print "Image shape, after interleave: ", image.array.shape
if not noise == None:
read_noise = galsim.GaussianNoise(sigma=noise)
image.addNoise(read_noise)
results=image.FindAdaptiveMom(hsmparams=new_params)
e1_inter_vec.append (results.observed_shape.e1 - ref_e1)
e2_inter_vec.append (results.observed_shape.e2 - ref_e2)
size_inter_vec.append ( (results.moments_sigma - ref_s) / ref_s)
print "results.observed_shape.e1, results.observed_shape.e2, results.moments_sigma ", results.observed_shape.e1, results.observed_shape.e2, results.moments_sigma
### Parameters
k=1000
base_size=1*k ## ??
n=3
m_zero=20 # 24
#m_gal=20
#gal_flux=6e4*2.521**(m_zero-m_gal)
gal_sigma=0.1
print "gal_sigma", gal_sigma
pixel_scale=0.11
noise=20
#e=0.0
type='nl'
if type == 'bf':
(aL,aR,aB,aT) = readmeanmatrices()
#lam = 1380. # NB: don't use lambda - that's a reserved word.
tel_diam = 2.4
obscuration_optical=0.3
#lam_over_diam = lam * 1.e-9 / tel_diam * galsim.radians
#lam_over_diam = lam_over_diam / galsim.arcsec
#print "lam_over_diam", lam_over_diam
#Define wavelengths, ellipticities, and magnitudes
wavelength_dict={'Z087':0.869,'Y106':1.060, 'J129':1.293, 'W149':1.485, 'H158':1.577, 'F184':1.842} # in microns
#wavelength_dict={'H158':1.577, 'Y106':1.060}
flux_dict={'Z087':8.57192e4,'Y106':8.68883e4, 'J129':8.76046e4, 'W149':2.68738e4, 'H158':8.81631e4, 'F184':6.08258e4}
#e_vec=[ (0., 0.), (0.05, 0.), (0., 0.05), (0.05, 0.05) ]#, (0., 0.075), (0.075, 0.), (0.075, 0.075)] #, 0.05, 0.06, 0.07, 0.08]
#e_vec= [(-0.05, 0.), (-0.025, 0.), (0.0, 0.0), (0.05, 0.), (0.025, 0.), (0.0, -0.05), (0.0, -0.025), (0.0, 0.025), (0.0, 0.05)]
e_vec= [ (-0.05, 0.), (-0.025, 0.), (0.0, 0.0), (0.025, 0.), (0.05, 0.) ]
e_theta=[0, 45, 90, 135 ]
new_params = galsim.hsm.HSMParams(max_amoment=60000000, max_mom2_iter=10000000, max_moment_nsig2=10000)
big_fft_params = galsim.GSParams(maximum_fft_size=4096)
m_gal_vec= [20] #, 20, 21, 22,23,24]
beta0=3.566e-7
beta_vec=[ 0., 0.1*beta0, 0.5*beta0, beta0, 1.5*beta0,2*beta0]
#print beta_vec
#sys.exit()
#vectors that will hold the output to plot
gauss_no_noise={}
optical_no_noise={}
gauss_noise={}
optical_noise={}
gauss_no_noise={}
optical_no_noise={}
gauss_noise={}
optical_noise={}
for lam in wavelength_dict:
gauss_no_noise[lam]={} #\Delta e1, \Delta e2, \Delta R/R
optical_no_noise[lam]={}
gauss_noise[lam]={}
optical_noise[lam]={}
for e in e_vec:
gauss_no_noise[lam][e]=[[],[],[]] #\Delta e1, \Delta e2, \Delta R/R
optical_no_noise[lam][e]=[[],[],[]]
gauss_noise[lam][e]=[[],[],[]]
optical_noise[lam][e]=[[],[],[]]
#for m_gal in m_gal_vec:
# gauss_no_noise[lam][m_gal]=[[],[],[]] #\Delta e1, \Delta e2, \Delta R/R
# optical_no_noise[lam][m_gal]=[[],[],[]]
# gauss_noise[lam][m_gal]=[[],[],[]]
# optical_noise[lam][m_gal]=[[],[],[]]
#for e in e_vec:
# gauss_no_noise[e]=[[],[],[]]
# optical_no_noise[e]=[[],[],[]]
# gauss_noise[e]=[[],[],[]]
# optical_noise[e]=[[],[],[]]
#for m_gal in m_gal_vec:
# gauss_no_noise[m_gal]=[[],[],[]]
# optical_no_noise[m_gal]=[[],[],[]]
# gauss_noise[m_gal]=[[],[],[]]
# optical_noise[m_gal]=[[],[],[]]
#for e in [e1_true]: ### Just one value of e1=0.01. Not really a nested loop.
for lam in wavelength_dict:
lam_over_diam = wavelength_dict[lam] * 1.e-6 / tel_diam * galsim.radians
lam_over_diam = lam_over_diam / galsim.arcsec
for e in e_vec:
for beta in beta_vec:
for m_gal in m_gal_vec:
logger.info(" ")
logger.info("ellipticity: (%g, %g)", e[0], e[1] )
logger.info("lambda: %s microns", wavelength_dict[lam])
logger.info("beta: %g", beta)
logger.info("magnitude: %g", m_gal)
# Gaussian
# no noise
#logger.info("First loop: gaussian, no noise")
gal_flux=flux_dict[lam]*2.512**(m_zero-m_gal)
#gal= galsim.Convolve (galsim.Gaussian(flux=gal_flux, sigma=gal_sigma).shear(galsim.Shear(e1=e[0],e2=e[1])) , galsim.Pixel(pixel_scale), gsparams=big_fft_params )
#measurement_function_NL (gal, e1_inter_vec=gauss_no_noise[lam][e][0], e2_inter_vec=gauss_no_noise[lam][e][1], size_inter_vec=gauss_no_noise[lam][e][2], noise=None, beta=beta, string='Gausian, no noise')
###### noise
#measurement_function_NL (gal, e1_inter_vec=gauss_noise[m_gal][0], e2_inter_vec=gauss_noise[m_gal][1], size_inter_vec=gauss_noise[m_gal][2], noise=noise, beta=beta, string='Gaussian, noise')
#######################Optical
logger.info("Third loop: Optical, no noise")
gal=galsim.Convolve (galsim.OpticalPSF(lam_over_diam, obscuration=obscuration_optical, flux=gal_flux).shear(galsim.Shear(e1=e[0],e2=e[1])), galsim.Pixel(pixel_scale), gsparams=big_fft_params )
measurement_function_NL (gal, e1_inter_vec=optical_no_noise[lam][e][0], e2_inter_vec=optical_no_noise[lam][e][1], size_inter_vec=optical_no_noise[lam][e][2], noise=None, beta=beta, string='Optical, no noise')
###### noise
#measurement_function_NL (gal, e1_inter_vec=optical_noise[m_gal][0], e2_inter_vec=optical_noise[m_gal][1], size_inter_vec=optical_noise[m_gal][2], noise=noise, beta=beta, string='Optical, noise')
#########################WFIRST
#gal=wfirst.getPSF(SCAs=7,approximate_struts=True, wavelength=filters['W149'])[7].shear(galsim.Shear(e1=e, e2=e))
#measurement_function_NL (gal, true_e1=e, true_e2=e, true_s=0., e1_vec=w_e1, e2_vec=w_e2, size_vec=w_s, e1_inter_vec=wi_e1, e2_inter_vec=wi_e2, size_inter_vec=wi_s, noise=None, string='WFIRST, no noise')
# noise
#measurement_function_NL (gal, true_e1=e, true_e2=e, true_s=0., e1_vec=nw_e1, e2_vec=nw_e2, size_vec=nw_s, e1_inter_vec=nwi_e1, e2_inter_vec=nwi_e2, size_inter_vec=nwi_s, noise=noise, string='WFIRST, noise')
#factor_vec=xrange(1,11)
#for e in [e_vec[1]]:
# for factor in factor_vec:
pp=PdfPages("test_bias_NL_vs_beta.pdf")
print "Name of output PDF file: test_bias_NL_vs_beta.pdf"
#### PLOTS
#### Do the plotting here
plt.minorticks_on()
#plt.tight_layout()
### We do not have matplotlib 1.1, with the 'style' package. Modify the matplotlibrc file parameters instead
import matplotlib as mpl
mpl.rc('lines', linewidth=1, color='black', linestyle='-')
mpl.rc('font', family='serif',weight='normal', size=10.0 )
mpl.rc('text', color='black', usetex=False)
mpl.rc('axes', edgecolor='black', linewidth=1, grid=False, titlesize=9, labelsize=10, labelweight='normal',labelcolor='black')
mpl.rc('axes.formatter', limits=[-4,4])
mpl.rcParams['xtick.major.size']=7
mpl.rcParams['xtick.minor.size']=4
mpl.rcParams['xtick.major.pad']=8
mpl.rcParams['xtick.minor.pad']=8
mpl.rcParams['xtick.labelsize']= '11'
mpl.rcParams['xtick.minor.width']= 1.0
mpl.rcParams['xtick.major.width']= 1.0
mpl.rcParams['ytick.major.size']=7
mpl.rcParams['ytick.minor.size']=4
mpl.rcParams['ytick.major.pad']=8
mpl.rcParams['ytick.minor.pad']=8
mpl.rcParams['ytick.labelsize']= '11'
mpl.rcParams['ytick.minor.width']= 1.0
mpl.rcParams['ytick.major.width']= 1.0
mpl.rc ('legend', numpoints=1, fontsize='10', shadow=False, frameon=False)
## Plot parameters
plt.subplots_adjust(hspace=0.01, wspace=0.01)
prop = fm.FontProperties(size=9)
marker_size=7
loc_label = "upper left"
visible_x, visible_y = True, True
grid=False
ymin, ymax = -0.0001, 0.0001
m_req=1e-3
c_req=1e-4
color_vec=['r', 'y', 'g', 'c', 'b', 'm', 'k']
#color_dict={0.0:'r', 0.025:'k', 0.05:'b', 0.075:'m', 0.08:'c', 0.1:'g'}
color_dict_e={}
for i,e in enumerate(e_vec):
color_dict_e[e]=color_vec[i%len(color_vec)]
color_dict_m={}
for i,m_gal in enumerate(m_gal_vec):
color_dict_m[m_gal]=color_vec[i%len(color_vec)]
color_vec_lam=['m','b', 'c', 'g', 'y', 'r']
color_dict_lam={}
for i,lam in enumerate(wavelength_dict):
color_dict_lam[lam]=color_vec_lam[i%len(color_vec_lam)]
alpha=0.6
plot_positions_six={'Z087':321,'Y106':322, 'J129':323, 'W149':324, 'H158':325, 'F184':326}
## Theory for Delta R / R
#def theory_size_gauss (sigma, beta, flux_vec):
# sigma/=(pixel_scale) ### Convert to pixels?
# return ( (8*math.pi - beta*flux_vec/(sigma**2) ) / (8*math.pi - 2*beta*flux_vec/(sigma**2) ) ) - 1
#flux_vec=flux_dict['H158']*2.512**( m_zero - np.array(mag_gal_vec) )
#ratio_vec= theory_size_gauss (gal_sigma, beta_vec, flux_vec )
def add_subplot_axes(ax,rect,axisbg='w'):
fig = plt.gcf()
box = ax.get_position()
width = box.width
height = box.height
inax_position = ax.transAxes.transform(rect[0:2])
transFigure = fig.transFigure.inverted()
infig_position = transFigure.transform(inax_position)
x = infig_position[0]
y = infig_position[1]
width *= rect[2]
height *= rect[3] # <= Typo was here
subax = fig.add_axes([x,y,width,height],axisbg=axisbg)
x_labelsize = subax.get_xticklabels()[0].get_size()
y_labelsize = subax.get_yticklabels()[0].get_size()
x_labelsize *= rect[2]**0.5
y_labelsize *= rect[3]**0.5
subax.xaxis.set_tick_params(labelsize=x_labelsize)
subax.yaxis.set_tick_params(labelsize=y_labelsize)
return subax
def plot_function_e_and_r (fig, x1_vec, y1_vec, x2_vec, y2_vec, xlabel1='', xlabel2='', ylabel1=r"$\Delta$e", ylabel2=r"$\Delta$R/R", lam_key='', e_key=(0.0, 0.0), m_key='', label_bool=False):
color_fmt=color_dict_lam[lam_key]
#plot_pos=plot_positions_six[lam_key]
#label='e=(%g, %g)' %(e_key[0], e_key[1])
label=lam_key
#print "x1_vec, y1_vec, x2_vec, y2_vec", x1_vec, y1_vec, x2_vec, y2_vec
#fig = plt.figure()
ax = fig.add_subplot (211)
ax.errorbar( x1_vec, y1_vec, yerr=None, ecolor = color_fmt, label=label, fmt=color_fmt+'s-', markersize=marker_size, alpha=alpha)
#ax.errorbar( x_vec, y2_vec, yerr=None, ecolor = color_fmt, label='e2=%g'%e_key[1], fmt=color_fmt+'x-', markersize=marker_size, alpha=alpha)
plt.axhline(y=0.,color='k',ls='solid')
#plt.axhspan(-m_req, m_req, facecolor='0.5', alpha=0.3)
ax.set_xticklabels([int(x) for x in ax.get_xticks()], visible=visible_x)
lx=ax.set_xlabel(xlabel1, visible=visible_x)
#lx.set_fontsize(font_size)
ax.set_xscale('linear')
ax.set_yticklabels(ax.get_yticks(), visible= visible_y)
ly=ax.set_ylabel(ylabel1, visible=visible_y)
#ly.set_fontsize(font_size)
ax.set_yscale('linear')
#plt.ylim ([ymin, ymax])
xmin, xmax=plt.xlim()
delta=(xmax-xmin)
plt.xlim ([xmin - 0.01*delta, xmax + 0.01*delta])
#plt.title(lam_key+" (%g $\mu$m)"%wavelength_dict[lam], fontsize=11)
#if plot_pos== 321:
if label_bool:
ax.legend(loc=loc_label , fancybox=True, ncol=2, numpoints=1, prop = prop)
#plt.grid(grid, which='both', ls='-', alpha=0.5)
plt.grid(grid)
ax = fig.add_subplot (212)
ax.errorbar( x2_vec, y2_vec, yerr=None, ecolor = color_fmt, label=label, fmt=color_fmt+'o-', markersize=marker_size, alpha=alpha)
#ax.errorbar( x_vec, theory_delta_r_gauss, yerr=None, ecolor = 'k', label='theory Gauss', fmt='r-', markersize=marker_size, alpha=1.)
plt.axhline(y=0.,color='k',ls='solid')
#if label_bool:
#plt.axhline(y=1e-4, color='r',ls='-', label='1e-4') # requirement
#ax.errorbar(x_vec, ratio_vec, yerr=None, ecolor = 'b', label='Theory', fmt='bo-', markersize=marker_size, alpha=alpha)
#plt.axhspan(-m_req, m_req, facecolor='0.5', alpha=0.3)
ax.set_xticklabels([int(x) for x in ax.get_xticks()], visible=visible_x)
lx=ax.set_xlabel(xlabel2, visible=visible_x)
#lx.set_fontsize(font_size)
ax.set_xscale('linear')
ax.set_yticklabels(ax.get_yticks(), visible= visible_y)
ly=ax.set_ylabel(ylabel2, visible=visible_y)
#ly.set_fontsize(font_size)
ax.set_yscale('linear')
#plt.ylim ([ymin, ymax])
xmin, xmax=plt.xlim()
delta=(xmax-xmin)
plt.xlim ([xmin - 0.01*delta, xmax + 0.01*delta])
#if type=='o':
#plt.ylim ([0., 0.026])
#plt.ylim([0., 0.18e-4])
#plt.title(lam_key+" (%g $\mu$m)"%wavelength_dict[lam], fontsize=11)
#if plot_pos== 324:
if label_bool:
ax.legend(loc=loc_label , fancybox=True, ncol=2, numpoints=1, prop = prop)
#Inset with zoom
#subpos = [0.35, 0.30, 0.475, 0.35]
#subax1 = add_subplot_axes(ax,subpos)
#subax1.plot (x_vec, y3_vec, color_fmt+'o-', markersize=marker_size, alpha=alpha)
#subax1.plot (x_vec, ratio_vec,'bo-', markersize=marker_size, alpha=alpha)
#subax1.axhline(y=1e-4, color='r',ls='--')
#plt.ylim([-1e-4, 3e-4])
#if type == 'o':
# plt.xlim ([22, 24.5])
#else:
# plt.xlim ([21.8, 24.2])
# subax1.set_yticklabels(subax1.get_yticks(), size=5, visible=True)
# subax1.set_xticklabels(subax1.get_xticks(), size=5, visible=True)
def plot_function_e (fig, x_vec, y1_vec, y2_vec, string='', xlabel='', y1label=r"$\Delta$e", label_string='', lam_key='', e_key=(0.0,0.0), m_key=''):
color_fmt=color_dict_mag[m_key]
plot_pos=plot_positions_six[lam_key]
label='e1=%g, m=%g'%(e_key,m_key)
#label='e1=%g'%e_key
label2='e2=%g, m=%g'%(e_key,m_key)
#label2='e2=%g'%e_key
ax = fig.add_subplot (plot_pos)
ax.errorbar( x_vec, y1_vec, yerr=None, ecolor = color_fmt, label=label, fmt=color_fmt+'s-', markersize=marker_size, alpha=alpha)
ax.errorbar( x_vec, y2_vec, yerr=None, ecolor = color_fmt, label=label2, fmt=color_fmt+'x-', markersize=marker_size, alpha=alpha)
plt.axhline(y=0.,color='k',ls='solid')
ax.set_xticklabels([int(x) for x in ax.get_xticks()], visible=visible_x)
lx=ax.set_xlabel(xlabel, visible=visible_x)
#lx.set_fontsize(font_size)
ax.set_xscale('linear')
ax.set_yticklabels(ax.get_yticks(), visible= visible_y)
ly=ax.set_ylabel(y1label, visible=visible_y)
#ly.set_fontsize(font_size)
ax.set_yscale('linear')
#plt.ylim ([ymin, ymax])
xmin, xmax=plt.xlim()
delta=(xmax-xmin)
plt.xlim ([xmin-0.03*delta, xmax + 0.03*delta])
plt.title(lam_key+" (%g $\mu$m)"%wavelength_dict[lam], fontsize=10)
if plot_pos== 321:
ax.legend(loc=loc_label , fancybox=True, ncol=2, numpoints=1, prop = prop)
#plt.grid(grid, which='both', ls='-', alpha=0.5)
plt.grid(grid)
def plot_function_r (fig, x_vec, y3_vec, xlabel='', y2label=r"$\Delta$R/R", lam_key='',m_key='', e_key=0.0, type='o'):
color_fmt=color_dict_mag [m_key]
plot_pos=plot_positions_six[lam_key]
ax = fig.add_subplot (plot_pos)
label='m=%g'%(m_key)
#label='e1=e2=%g'%(e_key)
ax.errorbar( x_vec, y3_vec, yerr=None, ecolor = color_fmt, label=label, fmt=color_fmt+'o-', markersize=marker_size, alpha=alpha)
plt.axhline(y=0.,color='k',ls='solid')
plt.axhline(y=1e-4, color='r',ls='--') # requirement
plt.axvline(x=beta0, color='b',ls='--') # nominal beta
#plt.axhspan(-m_req, m_req, facecolor='0.5', alpha=0.3)
ax.set_xticklabels([int(x) for x in ax.get_xticks()], visible=visible_x)
lx=ax.set_xlabel(xlabel, visible=visible_x)
#lx.set_fontsize(font_size)
ax.set_xscale('linear')
ax.set_yticklabels(ax.get_yticks(), visible= visible_y)
ly=ax.set_ylabel(y2label, visible=visible_y)
#ly.set_fontsize(font_size)
ax.set_yscale('linear')
#plt.ylim ([ymin, ymax])
xmin, xmax=plt.xlim()
delta=(xmax-xmin)
plt.xlim ([xmin-0.03*delta, xmax + 0.06*delta])
if type=='o':
plt.ylim ([0., 0.009])
plt.title(lam_key+" (%g $\mu$m)"%wavelength_dict[lam], fontsize=10)
if plot_pos== 324:
ax.legend(loc=loc_label , fancybox=True, ncol=1, numpoints=1, prop = prop)
#plt.grid(grid, which='both', ls='-', alpha=0.5)
#Inset with zoom
subpos = [1-0.275, 0.15, 0.29, 0.375]
if type == 'o':
subpos = [1-0.275, 0.25, 0.29, 0.375]
subax1 = add_subplot_axes(ax,subpos)
subax1.plot ( x_vec, y3_vec, color_fmt+'o-', label=label, markersize=marker_size, alpha=alpha)
subax1.axhline(y=1e-4, color='r',ls='--')
plt.ylim([0., 3e-4])
if type == 'o':
plt.xlim ([7e-9, 3e-8])
else:
plt.xlim ([1e-8, 6e-8])
subax1.set_yticklabels(subax1.get_yticks(), size=3.9, visible=True)
subax1.set_xticklabels(subax1.get_xticks(), size=3.9, visible=True)
if plot_pos in [322,324,326]:
subax1.yaxis.set_label_position("right")
subax1.yaxis.tick_right()
subax1.set_yticklabels(subax1.get_yticks(), size=4, visible=True)
subax1.set_xticklabels(subax1.get_xticks(), size=3.9, visible=True)
#pp.savefig()
plot_positions_six={'Z087':321,'Y106':322, 'J129':323, 'W149':324, 'H158':325, 'F184':326}
if type == 'bf':
string_g= "BF: BaseCDModel" + "\n" + "Gaussian ($\sigma$=%g'')* Pixel (0.11 arcsec/pix), no noise." %(gal_sigma)
string_o= "BF: BaseCDModel" + "\n" + "Optical (tel_diam=%g m, obscuration=%g) * Pixel (0.11 arcsec/pix), no noise. "%(tel_diam, obscuration_optical)
elif type == 'nl':
string_g= r"Non-linearity: $f=x-\beta x^{2}$ " + "\n" + "Gaussian ($\sigma$=%g'') * Pixel (0.11 arcsec/pix), no noise." %(gal_sigma)
string_o= r"Non-linearity: $f=x-\beta x^{2}$ " + "\n" + "Optical (tel_diam=%g m, obscuration=%g) * Pixel (0.11 arcsec/pix), no noise. "%(tel_diam, obscuration_optical)
else:
print "invalid type (nor 'bf' nor 'nl')"
sys.exit(1)
# + r"($\beta$=%g)" %(beta0)
def get_slope (x, y):
fitfunc = lambda p, x: p[0]*x
errfunc = lambda p, x, y: fitfunc(p, x) - y
p0 = [1.]
p1, success = optimize.leastsq(errfunc, p0[:], args=(x,y))
print 'pendiente:', p1[0]
return p1[0]
dic_list=[optical_no_noise]
e_vec_temp=[]
for var in e_vec:
e_vec_temp.append(var[0])
for dictionary in dic_list:
beta_vec=np.array(beta_vec)
slope_dict={}
for lam in wavelength_dict:
slope_dict[lam] =[[],[]] #slope_e1, slope_r
## Gaussian no noise, Delta_e, one filter
fig = plt.figure()
for lam in wavelength_dict:
for e in e_vec:
slope_e1=get_slope (beta_vec, dictionary[lam][e][0]) #delta_e1
slope_dict[lam][0].append(slope_e1)
slope_r=get_slope (beta_vec, dictionary[lam][e][2]) #delta_r
slope_dict[lam][1].append(slope_r)
for lam in wavelength_dict:
print "lam", lam
plot_function_e_and_r (fig, e_vec_temp, slope_dict[lam][0] , e_vec_temp, slope_dict[lam][1], xlabel1='$e_1$', xlabel2='$e_2$', ylabel1=r"$\Delta e$/$\beta$", ylabel2=r"$\Delta R/R/\beta$", lam_key=lam, e_key=e, label_bool=True)
plt.suptitle(string_o, fontsize=11)
fig.tight_layout()
plt.subplots_adjust(top=0.85)
pp.savefig(fig)
plt.close()
"""
## Gaussian no noise, Delta_e, all filters
fig = plt.figure()
for lam in wavelength_dict:
for e in e_vec: # One single value
for m_gal in m_gal_vec:
plot_function_e (fig, beta_vec , gauss_no_noise[lam][m_gal][0],gauss_no_noise[lam][m_gal][1], xlabel=r"$\beta$", lam_key=lam, e_key=e, m_key=m_gal)
string="Gaussian($\sigma$=%g'')*Pixel, no noise. " %(gal_sigma) +r"$f=x-\beta x^{2}$"+"\n Object flux: gal_flux=6e4*2.521**(%g-%g)" %( m_zero, m_gal)
plt.suptitle(string, fontsize=11)
fig.tight_layout()
plt.subplots_adjust(top=0.85)
pp.savefig(fig)
## Gaussian no noise, Delta_R/R, all filters
fig = plt.figure()
for lam in wavelength_dict:
for e in e_vec: # One single value
for m_gal in m_gal_vec:
plot_function_r (fig, beta_vec , gauss_no_noise[lam][m_gal][2], xlabel=r"$\beta$", lam_key=lam, e_key=e, m_key=m_gal)
string="Gaussian($\sigma$=%g'')*Pixel, no noise. " %(gal_sigma) +r"$f=x-\beta x^{2}$"+"\n Object flux: gal_flux=6e4*2.521**(%g-%g)" %( m_zero, m_gal)
plt.suptitle(string, fontsize=11)
fig.tight_layout()
plt.subplots_adjust(top=0.85)
pp.savefig(fig)
## Optical no noise, Delta_e, all filters
fig = plt.figure()
for lam in wavelength_dict:
for e in e_vec: # One single value
for m_gal in m_gal_vec:
plot_function_e (fig, beta_vec, optical_no_noise[lam][m_gal][0], optical_no_noise[lam][m_gal][1], xlabel=r"$\beta$", lam_key=lam, e_key=e, m_key=m_gal)
string="Optical(tel_diam=%g m)*Pixel, no noise. "%(tel_diam) + r"$f=x-\beta x^{2}$," +"\n Object flux: gal_flux=6e4*2.521**(%g-%g)" %(m_zero, m_gal)
plt.suptitle(string, fontsize=11)
fig.tight_layout()
plt.subplots_adjust(top=0.85)
pp.savefig(fig)
## Optical no noise, Delta_R/R, all filters
fig = plt.figure()
for lam in wavelength_dict:
for e in e_vec: # One single value
for m_gal in m_gal_vec:
plot_function_r (fig, beta_vec , optical_no_noise[lam][m_gal][2], xlabel=r"$\beta$", lam_key=lam, e_key=e, m_key=m_gal, type='o')
string="Optical(tel_diam=%g m)*Pixel, no noise. "%(tel_diam) + r"$f=x-\beta x^{2}$," + "\n Object flux: gal_flux=6e4*2.521**(%g-%g)" %(m_zero, m_gal)
plt.suptitle(string, fontsize=11)
fig.tight_layout()
plt.subplots_adjust(top=0.85)
pp.savefig(fig)
"""
"""
fig=plt.figure()
for e in e_vec:
for m_gal in m_gal_vec:
plot_function (fig,beta_vec, gauss_noise[m_gal][0],gauss_noise[m_gal][1],gauss_noise[m_gal][2], string="Gaussian*Pixel, noise. " +r"$f=x-\beta x^{2}$", xlabel=r"$\beta$", e_key=e, m_key=m_gal)
pp.savefig(fig)
fig=plt.figure()
for e in e_vec:
for m_gal in m_gal_vec:
plot_function (fig, beta_vec, optical_no_noise[m_gal][0], optical_no_noise[m_gal][1], optical_no_noise[m_gal][2], string="Optical($\lambda$=%g nm, tel_diam=%g m)*Pixel, no noise. "%(lam,tel_diam) +r"$f=x-\beta x^{2}$" , xlabel=r"$\beta$", e_key=e, m_key=m_gal)
pp.savefig(fig)
fig=plt.figure()
for e in e_vec:
for m_gal in m_gal_vec:
plot_function (fig, beta_vec, optical_noise[m_gal][0],optical_noise[m_gal][1],optical_noise[m_gal][2], string="Optical*Pixel, noise. " +r"$f=x-\beta x^{2}$" , xlabel=r"$\beta$", e_key=e, m_key=m_gal)
pp.savefig(fig)
"""
pp.close()
| mit | -2,120,859,853,378,731,500 | 36.454128 | 268 | 0.616452 | false |
googleapis/artman | noxfile.py | 1 | 2030 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import nox
import os
@nox.session(python=['3.4', '3.5', '3.6', '3.7'])
def unit_tests(session):
"""Run the unit test suite."""
# Install all test dependencies, then install this package in-place.
session.install('mock', 'pytest', 'pytest-cov', 'pyfakefs',
'restructuredtext_lint')
session.install('-e', '.')
# Run py.test against the unit tests.
session.run('py.test', '-rxs', '--cov', '--cov-append', '--cov-report=')
@nox.session(python='3.6')
def lint(session):
"""Run the linter."""
session.install('flake8')
session.run('flake8', '--max-complexity=8', 'artman',
'--exclude=test/output', 'test')
@nox.session(python='3.6')
def coverage(session):
"""Provide a coverage report."""
session.install('coverage')
try:
# session.run('coverage', 'report')
session.run('coverage', 'html')
finally:
session.run('coverage', 'erase')
@nox.session(python='3.6')
def docs(session):
"""Build the docs."""
# Install Sphinx and also all of the google-cloud-* packages.
session.chdir(os.path.realpath(os.path.dirname(__file__)))
session.install('setuptools >= 36.4.0', 'sphinx >= 1.6.3', '.')
# Build the docs!
session.run('rm', '-rf', 'docs/_build/')
session.run('sphinx-build', '-W', '-b', 'html', '-d',
'docs/_build/doctrees', 'docs/', 'docs/_build/html/')
| apache-2.0 | -3,492,665,946,125,000,000 | 30.71875 | 76 | 0.637438 | false |
IDNoise/NoiseIDE | NoiseIDEPython/idn_mainframe.py | 1 | 18342 | import ide_migrations
from idn_erlang_constats import SINGLE_APP_PROJECT, MULTIPLE_APP_PROJECT, CONFIG_PROJECT_TYPE
__author__ = 'Yaroslav Nikityshev aka IDNoise'
import sys
import urllib2
from wx.lib.dialogs import MultiMessageDialog
import yaml
from idn_erlang_dialogs import ErlangOptionsDialog
from idn_erlang_project import ErlangProject, LoadProject
from idn_erlang_project_form import ErlangProjectFrom
import idn_installer
from idn_utils import Menu, GetImage, readFile, writeBinaryFile, Timer, CreateButton, CreateLabel
from idn_shortcut_window import ShortcutWindow
import os
import wx
from wx.lib.agw import aui
from idn_colorschema import ColorSchema
from idn_winmanager import Manager
from idn_notebook import Notebook, EditorNotebook
from idn_config import Config, ConfigEditForm
import core
from idn_project import Project
import traceback
import asyncore
from threading import Thread, Event
#lists:flatten(edoc:read("d:/projects/noiseide/noiseidepython/data/erlang/modules/noiseide/src/test_cache_module.erl")).
installNewVersion = False
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
class AsyncoreThread(Thread):
def __init__(self):
Thread.__init__(self)
self.active = Event()
def Start(self):
self.active.set()
self.start()
def Stop(self):
self.active.clear()
def run(self):
while self.active.is_set():
asyncore.poll(0.1)
class NoiseIDE(wx.Frame):
def __init__(self, *args, **kwargs):
wx.Frame.__init__(self, None, wx.ID_ANY, 'Noise IDE', size = (1680, 900), pos = (10, 10))
self.asyncthread = AsyncoreThread()
self.asyncthread.Start()
self.cwd = os.getcwd()
core.MainFrame = self
wx.ToolTip.SetMaxWidth(600)
self.Maximize()
Config.load()
ColorSchema.load(Config.ColorSchema())
icon = wx.Icon('data/images/icon.png', wx.BITMAP_TYPE_PNG, 16, 16)
self.SetIcon(icon)
self.explorer = None
self.project = None
agwFlags = aui.AUI_MGR_DEFAULT | aui.AUI_MGR_AUTONB_NO_CAPTION | aui.AUI_MGR_LIVE_RESIZE
self.WinMgr = Manager(self, agwFlags = agwFlags )
self.SetupSimpleMenu()
self.CreateToolBar()
Project.TYPE_PROJECT_DICT["erlang"] = ErlangProject
self.TabMgr = EditorNotebook(self)
self.TabMgr.SetArtProvider(aui.VC71TabArt())
self.TabMgrPaneInfo = aui.AuiPaneInfo().Center()\
.MaximizeButton().MinimizeButton().CaptionVisible(False)\
.CloseButton(False).Floatable(False).MinSize(100, 100)
self.WinMgr.AddPane1(self.TabMgr, self.TabMgrPaneInfo )
self.ToolMgr = Notebook(self)
self.ToolMgrPaneInfo = aui.AuiPaneInfo().Bottom()\
.MaximizeButton().MinimizeButton().CloseButton(False).Floatable(False)\
.BestSize(400, 300).MinSize(100, 100).Name("Tools").Caption("Tools").CaptionVisible(True)\
.MinimizeMode(aui.AUI_MINIMIZE_POS_BOTTOM | aui.AUI_MINIMIZE_CAPT_SMART)
self.WinMgr.AddPane1(self.ToolMgr, self.ToolMgrPaneInfo)
core.TabMgr = self.TabMgr
core.ToolMgr = self.ToolMgr
core.WinMgr = self.WinMgr
self.WinMgr.Update()
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.WinMgr.Update()
def newVersionChecker():
self.OnHelpCheckForUpdates(None, True)
self.autoCheckTimer = Timer(600, newVersionChecker)
self.autoCheckTimer.Start()
args = sys.argv[1:]
wx.CallAfter(self.OnAfterLoad, args)
def async(self):
while True:
asyncore.loop()
def OnAfterLoad(self, args):
newVersion = Config.GetCurrentVersion()
current = Config.GetProp("current_version", 0)
if current < newVersion:
self.apply_migration(current)
Config.SetProp("current_version", newVersion)
if args:
path = args[0]
if os.path.isfile(path) and path.endswith(".noiseide"):
self.OpenProject(path)
return
self.TryLoadLastProject()
def apply_migration(self, version):
for v, fun in ide_migrations.MIGRATIONS.items():
if v > version:
fun()
def SetupSimpleMenu(self):
self.menubar = wx.MenuBar()
self.fileMenu = Menu()
self.fileMenu.AppendMenuItem('New project', self, self.OnNewErlangProject)
self.fileMenu.AppendMenuItem('Open Project', self, self.OnOpenProject)
if Config.LastProjects():
lastProjects = Menu()
self.fileMenu.AppendMenu(wx.ID_ANY, "Recent projects", lastProjects)
def handler(p):
return lambda e: self.OpenProject(p)
for p in reversed(Config.LastProjects()):
projectData = yaml.load(file(p, 'r'))
lastProjects.AppendMenuItem(projectData[Project.CONFIG_PROJECT_NAME], self, handler(p))
self.fileMenu.AppendSeparator()
self.fileMenu.AppendMenuItem('User Settings', self, self.OnEditOptions)
self.fileMenu.AppendSeparator()
self.fileMenu.AppendMenuItem('Quit', self, self.OnQuit)
self.menubar.Append(self.fileMenu, '&File')
languagesMenu = Menu()
self.menubar.Append(languagesMenu, "&Languages")
erlangMenu = Menu()
languagesMenu.AppendMenu(wx.ID_ANY, 'Erlang', erlangMenu)
erlangMenu.AppendMenuItem("Options", self, lambda e: self.SetupRuntimes())
helpMenu = Menu()
helpMenu.AppendMenuItem("Shorcuts", self, self.OnHelpShortcuts)
helpMenu.AppendMenuItem("Check for updates", self, self.OnHelpCheckForUpdates)
helpMenu.AppendMenuItem("About", self, self.OnHelpAbout)
self.menubar.Append(helpMenu, '&Help')
self.SetMenuBar(self.menubar)
def SetupProjectMenu(self):
self.menubar = wx.MenuBar()
self.fileMenu = Menu()
projectsMenu = Menu()
projectsMenu.AppendMenuItem('Erlang', self, self.OnNewErlangProject)
self.fileMenu.AppendMenu(wx.ID_ANY, "New project", projectsMenu)
self.fileMenu.AppendMenuItem('Open Project', self, self.OnOpenProject)
if Config.LastProjects():
lastProjects = Menu()
self.fileMenu.AppendMenu(wx.ID_ANY, "Recent projects", lastProjects)
def handler(p):
return lambda e: self.OpenProject(p)
for p in Config.LastProjects():
projectData = yaml.load(file(p, 'r'))
lastProjects.AppendMenuItem(projectData[Project.CONFIG_PROJECT_NAME], self, handler(p))
self.fileMenu.AppendSeparator()
self.fileMenu.AppendMenuItem('User Settings', self, self.OnEditOptions)
self.fileMenu.AppendSeparator()
self.fileMenu.AppendMenuItem('Quit', self, self.OnQuit)
self.menubar.Append(self.fileMenu, '&File')
self.editorMenu = Menu()
self.menubar.Append(self.editorMenu, '&Edit')
self.projectMenu = Menu()
self.menubar.Append(self.projectMenu, '&Project')
languagesMenu = Menu()
self.menubar.Append(languagesMenu, "&Languages")
erlangMenu = Menu()
languagesMenu.AppendMenu(wx.ID_ANY, 'Erlang', erlangMenu)
erlangMenu.AppendMenuItem("Options", self, lambda e: self.SetupRuntimes())
erlangMenu.AppendSeparator()
self.erlangMenu = erlangMenu
self.viewMenu = Menu()
self.viewMenu.AppendCheckMenuItem('Show white space', self, self.OnMenuShowWhiteSpace, Config.GetProp("show_white_space", False))
self.viewMenu.AppendCheckMenuItem('Show EOL', self, self.OnMenuShowEOL, Config.GetProp("show_eol", False))
self.viewMenu.AppendSeparator()
self.menubar.Append(self.viewMenu, "&View")
helpMenu = Menu()
helpMenu.AppendMenuItem("Shorcuts", self, self.OnHelpShortcuts)
helpMenu.AppendMenuItem("Check for updates", self, self.OnHelpCheckForUpdates)
helpMenu.AppendMenuItem("About", self, self.OnHelpAbout)
self.menubar.Append(helpMenu, '&Help')
self.SetMenuBar(self.menubar)
def OnHelpShortcuts(self, event):
ShortcutWindow(self).Show()
def OnHelpCheckForUpdates(self, event, auto = False):
try:
if not auto:
self.SetCursor(wx.StockCursor(wx.CURSOR_WAIT))
version = Config.GetCurrentVersion()
revfile = urllib2.urlopen("https://dl.dropbox.com/s/8hg4b53tugmgbe0/rev.cfg")
newData = revfile.read()
newVersion = float(newData.split("\n")[0].split(":")[1].strip())
self.autoCheckTimer.Stop()
if newVersion > version:
dial = MultiMessageDialog(self,
'There is new version {} available. Current version is {}. Do you want to install?'.format(newVersion, version),
msg2 = 'Changelog:\n\n' + newData,
caption = 'New version {} available'.format(newVersion),
style = wx.YES_NO | wx.ICON_QUESTION)
if dial.ShowModal() == wx.ID_YES:
progressDialog = wx.ProgressDialog("Autoupdater", "Downloading installer...", parent = self, style = wx.PD_APP_MODAL | wx.PD_ELAPSED_TIME | wx.PD_AUTO_HIDE)
progressDialog.Show()
installDir = os.path.join(core.TempDir(), "installer")
if not os.path.isdir(installDir):
os.mkdir(installDir)
installerFileName = os.path.join(installDir, "NoiseIDE.msi")
installerFile = open(installerFileName, 'wb')
dataFile = urllib2.urlopen("https://dl.dropbox.com/s/u1esqq4h68qufcz/NoiseIDE.msi")
meta = dataFile.info()
fileSize = int(meta.getheaders("Content-Length")[0])
fileSizeDl = 0
block_sz = 8192
while True:
buf = dataFile.read(block_sz)
if not buf:
break
fileSizeDl += len(buf)
installerFile.write(buf)
newValue = int(float(fileSizeDl) / float(fileSize) * 100)
progressDialog.Update(newValue)
installerFile.close()
writeBinaryFile(os.path.join(installDir, "rev.cfg"), newData)
#idn_installer.Decompress(installerFileName)
global installNewVersion
installNewVersion = True
self.Close()
elif not auto:
wx.MessageBox("You have last version", "Check new version result")
except Exception, e:
core.Log("Update error", e)
if not auto:
wx.MessageBox("Update check error. Check log for info", "Check new version result")
if not auto:
self.SetCursor(wx.StockCursor(wx.CURSOR_DEFAULT))
def TryLoadLastProject(self):
dialog = HelloDialog(self)
dialog.Show()
#lastProject = Config.GetProp("last_project")
#if lastProject and os.path.isfile(lastProject):
#dial = wx.MessageDialog(None,
#'Do you want to open last project {}?'.format(os.path.basename(lastProject)),
#'Last project',
#wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION)
#if dial.ShowModal() == wx.ID_YES:
#self.OpenProject(lastProject)
def CreateToolBar(self):
self.toolbar = wx.Frame.CreateToolBar(self)
self.navBackT = self.toolbar.AddLabelTool(wx.ID_ANY, 'Navigate Back', GetImage('navigateBack.png'), shortHelp = 'Navigate Back')
self.navForwardT = self.toolbar.AddLabelTool(wx.ID_ANY, 'Navigate Forward', GetImage('navigateForward.png'), shortHelp = 'Navigate Forward')
self.Bind(wx.EVT_TOOL, lambda e: self.TabMgr.NavigateBack(), self.navBackT)
self.Bind(wx.EVT_TOOL, lambda e: self.TabMgr.NavigateForward(), self.navForwardT)
self.toolbar.Realize()
def OnMenuShowWhiteSpace(self, event):
newValue = not Config.GetProp("show_white_space", False)
Config.SetProp("show_white_space", newValue)
for editor in self.TabMgr.Pages():
editor.UpdateOptions()
def OnMenuShowEOL(self, event):
newValue = not Config.GetProp("show_eol", False)
Config.SetProp("show_eol", newValue)
for editor in self.TabMgr.Pages():
editor.UpdateOptions()
def OnEditOptions(self, event):
form = ConfigEditForm()
form.ShowModal()
def OnHelpAbout(self, event):
wx.MessageBox("IDE for Erlang programming language.\nMade by Yaroslav 'IDNoise' Nikityshev.", "Noise IDE v {}".format(Config.GetCurrentVersion()))
def MenuBar(self):
return self.menubar
def OnOpenProject(self, event):
dialog = wx.FileDialog(
self,
message = "Select project",
wildcard = "*.noiseide",
style = wx.FD_OPEN | wx.FD_FILE_MUST_EXIST
)
if dialog.ShowModal() == wx.ID_OK:
filePath = dialog.GetPath()
self.OpenProject(filePath)
dialog.Destroy()
def OpenProject(self, projectFile):
if self.project:
self.project.Close()
projectFile = os.path.normpath(projectFile)
Config.SetProp("last_project", projectFile)
projects = Config.LastProjects()
if projectFile in projects:
projects.remove(projectFile)
projects.append(projectFile)
Config.SetLastProjects(projects)
self.SetupProjectMenu()
self.project = LoadProject(projectFile)
self.SetTitle(self.project.ProjectName() + " - " + "Noise IDE")
def OnNewErlangProject(self, event):
self.CheckRuntimes()
dlg = wx.SingleChoiceDialog(self, "Select project type", "Project type", [SINGLE_APP_PROJECT, MULTIPLE_APP_PROJECT])
if dlg.ShowModal() == wx.ID_CANCEL:
return
ErlangProjectFrom(None, dlg.GetStringSelection()).ShowModal()
def OnClose(self, event):
self.asyncthread.Stop()
if self.project:
self.project.Close()
Config.save()
self.autoCheckTimer.Stop()
for wnd in wx.GetTopLevelWindows():
if wnd != self:
wnd.Close(True)
event.Skip()
def OnQuit(self, event):
self.Close(True)
def CheckRuntimes(self):
#Config.SetProp(Config.RUNTIMES, availableRuntimes)
while not Config.AvailableRuntimes():
if not Config.Runtimes():
wx.MessageBox("Add at least one erlang runtime!", "Error")
else:
wx.MessageBox("Specify at least one proper path to erl executable.", "Error")
self.SetupRuntimes(True)
def SetupRuntimes(self, atLeastOneRequired = False):
dlg = ErlangOptionsDialog(self, atLeastOneRequired)
dlg.ShowModal()
class HelloDialog(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent, title = "Hello, " + Config.GetProp(Config.USER_NAME),
style = wx.DEFAULT_DIALOG_STYLE)
self.recentLB = wx.ListCtrl(self, -1, style = wx.LC_REPORT | wx.LC_NO_HEADER | wx.LC_ALIGN_LEFT)
self.recentLB.SetMinSize((200, 200))
self.recentLB.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.OnClickRecent)
self.recentLB.InsertColumn(0, "data")
self.recentLB.SetColumnWidth(0, 200)
i = 0
self.navigation = {}
for p in reversed(Config.LastProjects()):
projectData = yaml.load(file(p, 'r'))
self.recentLB.InsertStringItem(i, projectData[Project.CONFIG_PROJECT_NAME])
self.navigation[i] = p
i += 1
self.createNewB = CreateButton(self, "New Project", self.OnCreateNew)
self.createNewB.MinSize = (100, 30)
self.openOtherB = CreateButton(self, "Open Project", self.OnOpenOther)
self.openOtherB.MinSize = (100, 30)
gSizer = wx.GridBagSizer(2, 2)
gSizer.Add(CreateLabel(self, "Open recent:"), (0, 0), flag = wx.ALL | wx.ALIGN_CENTER, border = 4)
gSizer.Add(self.recentLB, (1, 0), (4, 1), flag = wx.ALL | wx.ALIGN_CENTER | wx.EXPAND, border = 4)
gSizer.Add(CreateLabel(self, "Create new:"), (0, 1), flag = wx.ALL | wx.ALIGN_CENTER, border = 4)
gSizer.Add(self.createNewB, (1, 1), flag = wx.ALL | wx.ALIGN_TOP, border = 4)
gSizer.Add(CreateLabel(self, "Open other:"), (2, 1), flag = wx.ALL | wx.ALIGN_CENTER, border = 4)
gSizer.Add(self.openOtherB, (3, 1), flag = wx.ALL | wx.ALIGN_TOP, border = 4)
self.SetSizer(gSizer)
self.Layout()
gSizer.SetSizeHints(self)
def OnCreateNew(self, event):
self.Close()
self.Parent.OnNewErlangProject(None)
def OnClickRecent(self, event):
project = self.navigation[event.GetIndex()]
self.Parent.OpenProject(project)
self.Close()
def OnOpenOther(self, event):
self.Close()
self.Parent.OnOpenProject(None)
class App(wx.App):
def __init__(self):
wx.App.__init__(self, redirect=False)
self.SetAppName("NoiseIDE")
core.App = self
wx.Log.SetLogLevel(0)
frame = NoiseIDE()
frame.Show()
if __name__ == '__main__':
def excepthook(type, value, tb):
message = 'Uncaught exception:\n'
message += ''.join(traceback.format_exception(type, value, tb))
core.Log(message)
sys.excepthook = excepthook
def main():
app = App()
app.MainLoop()
try:
main()
if installNewVersion:
core.Log("start install")
import pythoncom
import win32com
import win32com.client
command = os.path.join(core.TempDir(), "installer", "NoiseIDE.msi")
if sys.platform == "win32":
pythoncom.CoInitialize()
shell = win32com.client.Dispatch('WScript.Shell')
shell.Run(command, 1, False)
pythoncom.CoUninitialize()
except Exception, e:
core.Log("app error" + str(e))
| gpl-2.0 | 1,234,255,979,040,456,200 | 36.896694 | 176 | 0.615364 | false |
stevecrozz/optionshouse-api-client | tests/response_fixture.py | 1 | 14015 | AUTH_LOGIN_RESPONSE = {
"EZMessage":{
"data":{
"lastName":"Doe",
"funded":"true",
"nasdaq":{
"professional":"false",
"agree":"true"
},
"authToken":"b7b426ef-8e20-46f1-bad6-1244f0b6de02",
"nyse":{
"professional":"false",
"agree":"true"
},
"opera":{
"professional":"false",
"agree":"true"
},
"delayedQuotes":"true",
"firstName":"John",
"access":"granted",
"requiresAccountCreation":"false",
"professional":"false",
},
"action":"auth.login"
}
}
KEEPALIVE_RESPONSE = {}
AUTH_LOGOUT_RESPONSE = {
"EZMessage":{
"action":"auth.logout",
"data":{
"authToken":"",
"logout":"complete",
}
}
}
ACCOUNT_INFO_RESPONSE = {
"EZMessage":{
"data":{
"account":[
{
"canChangeCommissionSchedule":"true",
"accountId":"1711105",
"nextCommissionSchedule":"COMMISSION_I",
"isVirtual":"false",
"riskMaxDollarsPerOrder":"1000000",
"riskMaxSharesPerOrder":"50000.0",
"accountDesc":"Roth IRA",
"accountName":"Peak6, Corporate",
"yearAccountOpened":"2007",
"riskMaxContractsPerOrder":"4001.0",
"optionsWarning":"false",
"partnerCode":"OH",
"accountType":"ROTH_IRA",
"account":"77959070",
"canAccountACH":"false",
"accountTypeId":"5",
"currentCommissionSchedule":"COMMISSION_I"
},
{
"canChangeCommissionSchedule":"true",
"accountId":"1761020",
"nextCommissionSchedule":"COMMISSION_II",
"isVirtual":"true",
"riskMaxDollarsPerOrder":"1000000",
"riskMaxSharesPerOrder":"50000.0",
"accountDesc":"Individual",
"accountName":"V_R1796",
"yearAccountOpened":"2007",
"riskMaxContractsPerOrder":"4001.0",
"optionsWarning":"false",
"accountType":"INDIVIDUAL",
"account":"V_R1796",
"accountTypeId":"0",
"canAccountACH":"false",
"currentCommissionSchedule":"COMMISSION_II"
}
],
"login":{
"lastName":"Doe",
"accountMode":"single",
"rfqWarning":"false",
"toolsWarning":"true",
"loginCount":"151",
"firstName":"John",
"toolsWarningVersion":"4",
"defaultSymbol":"IBM",
"uiMode":"retail"
},
"inactivityTimeout":"60",
"requiresAccountCreation":"false"
},
"action":"account.info"
}
}
ACCOUNT_CASH_RESPONSE = {
"EZMessage":{
"data":{
"marginEquity":"4241.02",
"accountValueYearToDate":"-7233.10",
"availableToWithdraw":"0.00",
"accountValueMonthToDate":"-7233.10",
"pendingOrders":"0.00",
"accountValueDailyChange":"77.33",
"dayTradingBuyPower":"0.00",
"accountValue":"6212.02",
"optionBuyingPower":"555.37",
"stockBuyingPower":"1110.74",
"cashBalance":"-1713.33",
"availableToTrade":"-1713.33",
"portfolioValue":"7925.35"
},
"action":"account.cash"
}
}
VIEW_QUOTE_LIST_RESPONSE = {
"EZMessage":{
"action":"view.quote.list",
"data":{
"session":"open",
"quote":[
{
"key":"MARKETSTATUS",
"marketClosed":False
},
{
"key":"IBM:::S",
"symbol":"IBM",
"bid":163.11000061035156,
"ask":163.41000366210938,
"isExchangeDelayed":False,
"volume":3901100,
"mark":163.260009765625,
"dailyChange":-0.2599945068359375,
"stockLast":163.3000030517578,
"low":162.61000061035156,
"high":163.60000610351562,
"open":163.39999389648438,
"extClose":"163.30",
"extChangeAmount":"0.0",
"extChangePercent":"0.0",
"extChangeTime":"03:23 PM CST Feb 02 2011",
"prevClose":163.55999755859375,
"last":"163.30",
"change":"-0.2599945",
"changePercent":"-0.15895972",
"bidSize":3,
"askSize":1,
"exchange":"NYSE",
"hasDividends":True,
"divConfirm":True,
"divAmount":0.6499999761581421,
"exDate":"Tue Feb 08 00:00:00 CST 2011",
"hasEarnings":False,
"earningsConfirm":False
},
{
"key":"QQQQ:::S",
"symbol":"QQQQ",
"bid":56.939998626708984,
"ask":56.959999084472656,
"isExchangeDelayed":False,
"volume":48649000,
"mark":56.94999694824219,
"dailyChange":-0.07999801635742188,
"stockLast":56.970001220703125,
"low":56.84000015258789,
"high":57.16999816894531,
"open":56.91999816894531,
"extClose":"56.97",
"extChangeAmount":"0.0",
"extChangePercent":"0.0",
"extChangeTime":"05:05 PM CST Feb 02 2011",
"prevClose":57.04999923706055,
"last":"56.97",
"change":"-0.07999802",
"changePercent":"-0.1402244",
"bidSize":39,
"askSize":145,
"exchange":"NASDAQ",
"hasDividends":False,
"divConfirm":False,
"hasEarnings":False,
"earningsConfirm":False
"avg10dayVolume":"3816541",
"week52high":148.86,
"peRatio":”13.5”,
"dividendYield":"1.758",
"dividendDate":"2010/11/08"
},
{
"key":"EVEP:::S",
"symbol":"EVEP",
"isExchangeDelayed":False,
"volume":320400,
"mark":44.150001525878906,
"dailyChange":-0.20000076293945312,
"stockLast":44.130001068115234,
"low":44.09000015258789,
"high":44.9900016784668,
"open":44.75,
"last":"44.13",
"change":"-0.20000076",
"changePercent":"-0.45116344",
"hasDividends":True,
"divConfirm":True,
"divAmount":0.7599999904632568,
"exDate":"Thu Feb 03 00:00:00 CST 2011",
"hasEarnings":False,
"earningsConfirm":False
}
]
}
}
}
VIEW_SERIES_RESPONSE = {
"EZMessage":{
"action":"view.series",
"data":{
"s":[
{
"e":"Jul 11",
"k":[
"A:20110716:300000:C",
"A:20110716:300000:P",
"A:20110716:330000:C",
"A:20110716:330000:P",
"A:20110716:350000:C",
"A:20110716:350000:P",
"A:20110716:380000:C",
"A:20110716:380000:P",
"A:20110716:400000:C",
"A:20110716:400000:P",
"A:20110716:430000:C",
"A:20110716:430000:P",
"A:20110716:450000:C",
"A:20110716:450000:P",
"A:20110716:460000:C",
"A:20110716:460000:P",
"A:20110716:470000:C",
"A:20110716:470000:P",
"A:20110716:480000:C",
"A:20110716:480000:P",
"A:20110716:490000:C",
"A:20110716:490000:P",
"A:20110716:500000:C",
"A:20110716:500000:P",
"A:20110716:525000:C",
"A:20110716:525000:P",
"A:20110716:550000:C",
"A:20110716:550000:P",
"A:20110716:575000:C",
"A:20110716:575000:P",
"A:20110716:600000:C",
"A:20110716:600000:P",
"A:20110716:650000:C",
"A:20110716:650000:P",
"A:20110716:700000:C",
"A:20110716:700000:P",
"A:20110716:750000:C",
"A:20110716:750000:P"
]
}
],
"q":"51.01"
}
}
}
ACCOUNT_MARGIN_JSON_RESPONSE = {
"EZMessage":{
"action":"account.margin.json",
"data":{
"dayTradesActualPrior":"0",
"patternDayTrader":"false",
"noDatabaseRecord":"true",
"dayTradesActualAfterOrder":"0",
"marginChange":"-204.32",
"orgSma":"555.37",
"fudgeFactor":"0.00",
"stockBuyingPower":"322.14",
"optionBuyingPower":"161.07",
"commission":[
{
"m_order_id":"1",
"commission":"2.95",
"fee":{
"CBOE":"0.00",
"PENNY_STOCK":"0.00",
"INDEX":"0.00",
"SEC":"0.02",
"OCC":"0.00",
"TAF":"0.01",
"DIRECTED_ORDER":"0.00"
}
}
]
}
}
}
ORDER_CREATE_JSON_RESPONSE = {
"EZMessage":{
"action":"order.create.json",
"data":{
"created":"true",
"id":"152325619"
}
}
}
ORDER_MODIFY_JSON_RESPONSE = {
"EZMessage":{
"action":"order.modify.json",
"data":{
"modified":"true",
"newId":"152325620"
}
}
}
ORDER_CANCEL_JSON_RESPONSE = {
"EZMessage":{
"action":"order.cancel.json",
"data":{
"canceled":"true",
"id":"152325620"
}
}
}
MASTER_ACCOUNT_ORDERS_RESPONSE = {
"EZMessage":{
"action":"master.account.orders",
"data":{
"timestamp":"2011-02-01 12:00:32.460107",
"response_type":"json",
"master_account_orders":{
"page":0,
"page_size":30,
"total_records":23,
"records":[
{
"order_id":29638848,
"message":"Click for details",
"time_in_force":"day",
"quantity":5,
"fill_quantity":"",
"transaction":"Buy To Open",
"short_description":"IBM Stock",
"long_description":"IBM Stock",
"status":"Canceled",
"date_created_ms":1296537096000,
"last_updated_ms":1296537106000,
"date_created":"2011-01-31 23:11:36",
"last_updated":"2011-01-31 23:11:46",
"master_order_id":152325506,
"order_type":"regular",
"price_type":"limit",
"price":161,
"trigger_or der":False,
"trailing_stop_order":False,
"complex_order":False,
"modifiable":False,
"root_order_id":152325506,
"is_spread_order":False,
"is_mutual_fund_order":False,
"underlying_stock_symbol":"IBM",
"timestamp":"2011-0 2-01 12:00:32",
"has_expired_keys":False,
"security_keys":"IBM:::S"
}
]
}
}
}
}
ACCOUNT_POSITIONS_RESPONSE = {
"EZMessage":{
"data":{
"unified":[
{
"accountId":"1761020",
"shareCostBasis":"3.6",
"isCustomCostBasis":"false ",
"expString":"Jul 11",
"stock":"47.98",
"description":"BP Jul 11 46.00 Call",
"defaultCostBasis":"720",
"isExchangeDelayed":"false",
"underlying":"BP",
"spc":"100",
"bid":"4.0",
"securityKey":"BP:20110716:460000:C",
"qty":"2",
"dailyChange":"0.03",
"multiplier":"100.0",
"gain":"90",
"sortHint":[
"BP201107160000460000C",
"BP201107160000460000C"
],
"mktVal":"810",
"posValChange":"5",
"price":"4.05",
"st rikeString":"46.00",
"canExercise":"true",
"costBasis":"720",
"positionNewToday":"false",
"ask":"4.1"
}
],
"timeStamp":"1296583232029"
},
"action":"account.positions"
}
}
ACCOUNT_ACTIVITY_RESPONSE = {
"EZMessage":{
"data":{
"total":"2",
"timeStamp":"1296583401513",
"activity":[
{
"activityDateStr":"2011/01/31 ",
"price":"162.0",
"accountId":"1761020",
"symbol":"IBM",
"transaction":"BTO",
"description":"IBM Stock",
"qty":"5.0",
"netAmount":"-810.00"
},
{
"activityDateStr":"2011/01/10",
"com":"8.8",
"price":"3.6",
"acco untId":"1761020",
"symbol":"BP",
"transaction":"BTO",
"description":"BP Jul 11 46.00 Call",
"qty":"2.0",
"netAmount":"-728.80"
}
]
},
"action":"account.activity"
}
}
| gpl-3.0 | 8,688,621,776,684,135,000 | 29.525054 | 60 | 0.423025 | false |
mfitzp/smrtr | core/middleware.py | 1 | 1187 | from django.conf import settings
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.core.exceptions import PermissionDenied
from django.contrib.auth.views import login
from core.views import *
class RequireLoginMiddleware(object):
def __init__( self ):
# Root path for registration/accounts (/accounts)
self.loginpath = getattr( settings, 'LOGIN_PATH' )
# Login url (/accounts/login)
self.loginurl = getattr( settings, 'LOGIN_URL' )
def process_request( self, request ):
# Not logged in, and target path is not in login/registration path
if request.user.is_anonymous():
if ( request.path.startswith(self.loginpath) or request.path == '/' ) != True:
if request.POST:
return login( request )
else:
return HttpResponseRedirect('%s?next=%s' % (self.loginurl, request.path))
def process_view(self, request, view_func, view_args, view_kwargs):
if request.user.is_anonymous() and view_func == home:
return intro( request )
| bsd-3-clause | 5,610,095,557,820,775,000 | 38.566667 | 93 | 0.622578 | false |
effigies/PySurfer | surfer/viz.py | 1 | 110278 | import os
from os.path import join as pjoin
from tempfile import mkdtemp
from warnings import warn
import numpy as np
from scipy import stats, ndimage, misc
from scipy.interpolate import interp1d
from matplotlib.colors import colorConverter
import nibabel as nib
from mayavi import mlab
from mayavi.tools.mlab_scene_model import MlabSceneModel
from mayavi.core import lut_manager
from mayavi.core.ui.api import SceneEditor
from mayavi.core.ui.mayavi_scene import MayaviScene
from traits.api import (HasTraits, Range, Int, Float,
Bool, Enum, on_trait_change, Instance)
from . import utils, io
from .utils import (Surface, verbose, create_color_lut, _get_subjects_dir,
string_types, assert_ffmpeg_is_available, ffmpeg)
import logging
logger = logging.getLogger('surfer')
lh_viewdict = {'lateral': {'v': (180., 90.), 'r': 90.},
'medial': {'v': (0., 90.), 'r': -90.},
'rostral': {'v': (90., 90.), 'r': -180.},
'caudal': {'v': (270., 90.), 'r': 0.},
'dorsal': {'v': (180., 0.), 'r': 90.},
'ventral': {'v': (180., 180.), 'r': 90.},
'frontal': {'v': (120., 80.), 'r': 106.739},
'parietal': {'v': (-120., 60.), 'r': 49.106}}
rh_viewdict = {'lateral': {'v': (180., -90.), 'r': -90.},
'medial': {'v': (0., -90.), 'r': 90.},
'rostral': {'v': (-90., -90.), 'r': 180.},
'caudal': {'v': (90., -90.), 'r': 0.},
'dorsal': {'v': (180., 0.), 'r': 90.},
'ventral': {'v': (180., 180.), 'r': 90.},
'frontal': {'v': (60., 80.), 'r': -106.739},
'parietal': {'v': (-60., 60.), 'r': -49.106}}
viewdicts = dict(lh=lh_viewdict, rh=rh_viewdict)
def make_montage(filename, fnames, orientation='h', colorbar=None,
border_size=15):
"""Save montage of current figure
Parameters
----------
filename : str
The name of the file, e.g, 'montage.png'. If None, the image
will not be saved.
fnames : list of str | list of array
The images to make the montage of. Can be a list of filenames
or a list of image data arrays.
orientation : 'h' | 'v' | list
The orientation of the montage: horizontal, vertical, or a nested
list of int (indexes into fnames).
colorbar : None | list of int
If None remove colorbars, else keep the ones whose index
is present.
border_size : int
The size of the border to keep.
Returns
-------
out : array
The montage image data array.
"""
import Image
# This line is only necessary to overcome a PIL bug, see:
# http://stackoverflow.com/questions/10854903/what-is-causing-
# dimension-dependent-attributeerror-in-pil-fromarray-function
fnames = [f if isinstance(f, string_types) else f.copy() for f in fnames]
if isinstance(fnames[0], string_types):
images = map(Image.open, fnames)
else:
images = map(Image.fromarray, fnames)
# get bounding box for cropping
boxes = []
for ix, im in enumerate(images):
# sum the RGB dimension so we do not miss G or B-only pieces
gray = np.sum(np.array(im), axis=-1)
gray[gray == gray[0, 0]] = 0 # hack for find_objects that wants 0
if np.all(gray == 0):
raise ValueError("Empty image (all pixels have the same color).")
labels, n_labels = ndimage.label(gray.astype(np.float))
slices = ndimage.find_objects(labels, n_labels) # slice roi
if colorbar is not None and ix in colorbar:
# we need all pieces so let's compose them into single min/max
slices_a = np.array([[[xy.start, xy.stop] for xy in s]
for s in slices])
# TODO: ideally gaps could be deduced and cut out with
# consideration of border_size
# so we need mins on 0th and maxs on 1th of 1-nd dimension
mins = np.min(slices_a[:, :, 0], axis=0)
maxs = np.max(slices_a[:, :, 1], axis=0)
s = (slice(mins[0], maxs[0]), slice(mins[1], maxs[1]))
else:
# we need just the first piece
s = slices[0]
# box = (left, top, width, height)
boxes.append([s[1].start - border_size, s[0].start - border_size,
s[1].stop + border_size, s[0].stop + border_size])
# convert orientation to nested list of int
if orientation == 'h':
orientation = [range(len(images))]
elif orientation == 'v':
orientation = [[i] for i in range(len(images))]
# find bounding box
n_rows = len(orientation)
n_cols = max(len(row) for row in orientation)
if n_rows > 1:
min_left = min(box[0] for box in boxes)
max_width = max(box[2] for box in boxes)
for box in boxes:
box[0] = min_left
box[2] = max_width
if n_cols > 1:
min_top = min(box[1] for box in boxes)
max_height = max(box[3] for box in boxes)
for box in boxes:
box[1] = min_top
box[3] = max_height
# crop images
cropped_images = []
for im, box in zip(images, boxes):
cropped_images.append(im.crop(box))
images = cropped_images
# Get full image size
row_w = [sum(images[i].size[0] for i in row) for row in orientation]
row_h = [max(images[i].size[1] for i in row) for row in orientation]
out_w = max(row_w)
out_h = sum(row_h)
# compose image
new = Image.new("RGBA", (out_w, out_h))
y = 0
for row, h in zip(orientation, row_h):
x = 0
for i in row:
im = images[i]
pos = (x, y)
new.paste(im, pos)
x += im.size[0]
y += h
if filename is not None:
try:
new.save(filename)
except Exception:
print("Error saving %s" % filename)
return np.array(new)
def _prepare_data(data):
"""Ensure data is float64 and has proper endianness.
Note: this is largely aimed at working around a Mayavi bug.
"""
data = data.copy()
data = data.astype(np.float64)
if data.dtype.byteorder == '>':
data.byteswap(True)
return data
def _force_render(figures, backend):
"""Ensure plots are updated before properties are used"""
if not isinstance(figures, list):
figures = [[figures]]
for ff in figures:
for f in ff:
f.render()
mlab.draw(figure=f)
if backend == 'TraitsUI':
from pyface.api import GUI
_gui = GUI()
orig_val = _gui.busy
_gui.set_busy(busy=True)
_gui.process_events()
_gui.set_busy(busy=orig_val)
_gui.process_events()
def _make_viewer(figure, n_row, n_col, title, scene_size, offscreen):
"""Triage viewer creation
If n_row == n_col == 1, then we can use a Mayavi figure, which
generally guarantees that things will be drawn before control
is returned to the command line. With the multi-view, TraitsUI
unfortunately has no such support, so we only use it if needed.
"""
if figure is None:
# spawn scenes
h, w = scene_size
if offscreen is True:
orig_val = mlab.options.offscreen
mlab.options.offscreen = True
figures = [[mlab.figure(size=(h / n_row, w / n_col))
for _ in range(n_col)] for __ in range(n_row)]
mlab.options.offscreen = orig_val
_v = None
else:
# Triage: don't make TraitsUI if we don't have to
if n_row == 1 and n_col == 1:
figure = mlab.figure(title, size=(w, h))
mlab.clf(figure)
figures = [[figure]]
_v = None
else:
window = _MlabGenerator(n_row, n_col, w, h, title)
figures, _v = window._get_figs_view()
else:
if not isinstance(figure, (list, tuple)):
figure = [figure]
if not len(figure) == n_row * n_col:
raise ValueError('For the requested view, figure must be a '
'list or tuple with exactly %i elements, '
'not %i' % (n_row * n_col, len(figure)))
_v = None
figures = [figure[slice(ri * n_col, (ri + 1) * n_col)]
for ri in range(n_row)]
return figures, _v
class _MlabGenerator(HasTraits):
"""TraitsUI mlab figure generator"""
from traitsui.api import View
view = Instance(View)
def __init__(self, n_row, n_col, width, height, title, **traits):
HasTraits.__init__(self, **traits)
self.mlab_names = []
self.n_row = n_row
self.n_col = n_col
self.width = width
self.height = height
for fi in range(n_row * n_col):
name = 'mlab_view%03g' % fi
self.mlab_names.append(name)
self.add_trait(name, Instance(MlabSceneModel, ()))
self.view = self._get_gen_view()
self._v = self.edit_traits(view=self.view)
self._v.title = title
def _get_figs_view(self):
figures = []
ind = 0
for ri in range(self.n_row):
rfigs = []
for ci in range(self.n_col):
x = getattr(self, self.mlab_names[ind])
rfigs.append(x.mayavi_scene)
ind += 1
figures.append(rfigs)
return figures, self._v
def _get_gen_view(self):
from traitsui.api import (View, Item, VGroup, HGroup)
ind = 0
va = []
for ri in range(self.n_row):
ha = []
for ci in range(self.n_col):
ha += [Item(name=self.mlab_names[ind], style='custom',
resizable=True, show_label=False,
editor=SceneEditor(scene_class=MayaviScene))]
ind += 1
va += [HGroup(*ha)]
view = View(VGroup(*va), resizable=True,
height=self.height, width=self.width)
return view
class Brain(object):
"""Class for visualizing a brain using multiple views in mlab
Parameters
----------
subject_id : str
subject name in Freesurfer subjects dir
hemi : str
hemisphere id (ie 'lh', 'rh', 'both', or 'split'). In the case
of 'both', both hemispheres are shown in the same window.
In the case of 'split' hemispheres are displayed side-by-side
in different viewing panes.
surf : geometry name
freesurfer surface mesh name (ie 'white', 'inflated', etc.)
curv : boolean
if true, loads curv file and displays binary curvature
(default: True)
title : str
title for the window
cortex : str or tuple
specifies how binarized curvature values are rendered.
either the name of a preset PySurfer cortex colorscheme (one of
'classic', 'bone', 'low_contrast', or 'high_contrast'), or the
name of mayavi colormap, or a tuple with values (colormap, min,
max, reverse) to fully specify the curvature colors.
size : float or pair of floats
the size of the window, in pixels. can be one number to specify
a square window, or the (width, height) of a rectangular window.
background, foreground : matplotlib colors
color of the background and foreground of the display window
figure : list of instances of mayavi.core.scene.Scene | None
If None, a new window will be created with the appropriate
views.
subjects_dir : str | None
If not None, this directory will be used as the subjects directory
instead of the value set using the SUBJECTS_DIR environment
variable.
views : list | str
views to use
show_toolbar : bool
If True, toolbars will be shown for each view.
offscreen : bool
If True, rendering will be done offscreen (not shown). Useful
mostly for generating images or screenshots, but can be buggy.
Use at your own risk.
Attributes
----------
brains : list
List of the underlying brain instances.
"""
def __init__(self, subject_id, hemi, surf, curv=True, title=None,
cortex="classic", size=800, background="black",
foreground="white", figure=None, subjects_dir=None,
views=['lat'], show_toolbar=False, offscreen=False,
config_opts=None):
# Keep backwards compatability
if config_opts is not None:
msg = ("The `config_opts` dict has been deprecated and will "
"be removed in future versions. You should update your "
"code and pass these options directly to the `Brain` "
"constructor.")
warn(msg)
cortex = config_opts.get("cortex", cortex)
background = config_opts.get("background", background)
foreground = config_opts.get("foreground", foreground)
size = config_opts.get("size", size)
width = config_opts.get("width", size)
height = config_opts.get("height", size)
size = (width, height)
col_dict = dict(lh=1, rh=1, both=1, split=2)
n_col = col_dict[hemi]
if hemi not in col_dict.keys():
raise ValueError('hemi must be one of [%s], not %s'
% (', '.join(col_dict.keys()), hemi))
# Get the subjects directory from parameter or env. var
subjects_dir = _get_subjects_dir(subjects_dir=subjects_dir)
self._hemi = hemi
if title is None:
title = subject_id
self.subject_id = subject_id
if not isinstance(views, list):
views = [views]
n_row = len(views)
# load geometry for one or both hemispheres as necessary
offset = None if hemi != 'both' else 0.0
self.geo = dict()
if hemi in ['split', 'both']:
geo_hemis = ['lh', 'rh']
elif hemi == 'lh':
geo_hemis = ['lh']
elif hemi == 'rh':
geo_hemis = ['rh']
else:
raise ValueError('bad hemi value')
for h in geo_hemis:
# Initialize a Surface object as the geometry
geo = Surface(subject_id, h, surf, subjects_dir, offset)
# Load in the geometry and (maybe) curvature
geo.load_geometry()
if curv:
geo.load_curvature()
self.geo[h] = geo
# deal with making figures
self._set_window_properties(size, background, foreground)
figures, _v = _make_viewer(figure, n_row, n_col, title,
self._scene_size, offscreen)
self._figures = figures
self._v = _v
self._window_backend = 'Mayavi' if self._v is None else 'TraitsUI'
for ff in self._figures:
for f in ff:
if f.scene is not None:
f.scene.background = self._bg_color
f.scene.foreground = self._fg_color
# force rendering so scene.lights exists
_force_render(self._figures, self._window_backend)
self.toggle_toolbars(show_toolbar)
_force_render(self._figures, self._window_backend)
self._toggle_render(False)
# fill figures with brains
kwargs = dict(surf=surf, curv=curv, title=None,
cortex=cortex, subjects_dir=subjects_dir,
bg_color=self._bg_color, offset=offset)
brains = []
brain_matrix = []
for ri, view in enumerate(views):
brain_row = []
for hi, h in enumerate(['lh', 'rh']):
if not (hemi in ['lh', 'rh'] and h != hemi):
ci = hi if hemi == 'split' else 0
kwargs['hemi'] = h
kwargs['geo'] = self.geo[h]
kwargs['figure'] = figures[ri][ci]
kwargs['backend'] = self._window_backend
brain = _Hemisphere(subject_id, **kwargs)
brain.show_view(view)
brains += [dict(row=ri, col=ci, brain=brain, hemi=h)]
brain_row += [brain]
brain_matrix += [brain_row]
self._toggle_render(True)
self._original_views = views
self._brain_list = brains
for brain in self._brain_list:
brain['brain']._orient_lights()
self.brains = [b['brain'] for b in brains]
self.brain_matrix = np.array(brain_matrix)
self.subjects_dir = subjects_dir
# Initialize the overlay and label dictionaries
self.foci_dict = dict()
self.labels_dict = dict()
self.overlays_dict = dict()
self.contour_list = []
self.morphometry_list = []
self.annot_list = []
self.data_dict = dict(lh=None, rh=None)
# note that texts gets treated differently
self.texts_dict = dict()
self.n_times = None
###########################################################################
# HELPERS
def _toggle_render(self, state, views=None):
"""Turn rendering on (True) or off (False)"""
figs = []
[figs.extend(f) for f in self._figures]
if views is None:
views = [None] * len(figs)
for vi, (_f, view) in enumerate(zip(figs, views)):
if state is False and view is None:
views[vi] = mlab.view(figure=_f)
# Testing backend doesn't have this option
if mlab.options.backend != 'test':
_f.scene.disable_render = not state
if state is True and view is not None:
mlab.draw(figure=_f)
mlab.view(*view, figure=_f)
# let's do the ugly force draw
if state is True:
_force_render(self._figures, self._window_backend)
return views
def _set_window_properties(self, size, background, foreground):
"""Set window properties that are used elsewhere."""
# old option "size" sets both width and height
try:
width, height = size
except TypeError:
width, height = size, size
self._scene_size = height, width
bg_color_rgb = colorConverter.to_rgb(background)
self._bg_color = bg_color_rgb
fg_color_rgb = colorConverter.to_rgb(foreground)
self._fg_color = fg_color_rgb
def get_data_properties(self):
""" Get properties of the data shown
Returns
-------
props : dict
Dictionary with data properties
props["fmin"] : minimum colormap
props["fmid"] : midpoint colormap
props["fmax"] : maximum colormap
props["transparent"] : lower part of colormap transparent?
props["time"] : time points
props["time_idx"] : current time index
props["smoothing_steps"] : number of smoothing steps
"""
props = dict()
keys = ['fmin', 'fmid', 'fmax', 'transparent', 'time', 'time_idx',
'smoothing_steps']
try:
if self.data_dict['lh'] is not None:
hemi = 'lh'
else:
hemi = 'rh'
for key in keys:
props[key] = self.data_dict[hemi][key]
except KeyError:
# The user has not added any data
for key in keys:
props[key] = 0
return props
def toggle_toolbars(self, show=None):
"""Toggle toolbar display
Parameters
----------
show : bool | None
If None, the state is toggled. If True, the toolbar will
be shown, if False, hidden.
"""
# don't do anything if testing is on
if self._figures[0][0].scene is not None:
# this may not work if QT is not the backend (?), or in testing
if hasattr(self._figures[0][0].scene, 'scene_editor'):
# Within TraitsUI
bars = [f.scene.scene_editor._tool_bar
for ff in self._figures for f in ff]
else:
# Mayavi figure
bars = [f.scene._tool_bar for ff in self._figures for f in ff]
if show is None:
if hasattr(bars[0], 'isVisible'):
# QT4
show = not bars[0].isVisible()
elif hasattr(bars[0], 'Shown'):
# WX
show = not bars[0].Shown()
for bar in bars:
if hasattr(bar, 'setVisible'):
bar.setVisible(show)
elif hasattr(bar, 'Show'):
bar.Show(show)
def _get_one_brain(self, d, name):
"""Helper for various properties"""
if len(self.brains) > 1:
raise ValueError('Cannot access brain.%s when more than '
'one view is plotted. Use brain.brain_matrix '
'or brain.brains.' % name)
if isinstance(d, dict):
out = dict()
for key, value in d.iteritems():
out[key] = value[0]
else:
out = d[0]
return out
@property
def overlays(self):
"""Wrap to overlays"""
return self._get_one_brain(self.overlays_dict, 'overlays')
@property
def foci(self):
"""Wrap to foci"""
return self._get_one_brain(self.foci_dict, 'foci')
@property
def labels(self):
"""Wrap to labels"""
return self._get_one_brain(self.labels_dict, 'labels')
@property
def contour(self):
"""Wrap to contour"""
return self._get_one_brain(self.contour_list, 'contour')
@property
def annot(self):
"""Wrap to annot"""
return self._get_one_brain(self.annot_list, 'contour')
@property
def texts(self):
"""Wrap to texts"""
self._get_one_brain([[]], 'texts')
out = dict()
for key, val in self.texts_dict.iteritems():
out[key] = val['text']
return out
@property
def _geo(self):
"""Wrap to _geo"""
self._get_one_brain([[]], '_geo')
if ('lh' in self.geo) and ['lh'] is not None:
return self.geo['lh']
else:
return self.geo['rh']
@property
def data(self):
"""Wrap to data"""
self._get_one_brain([[]], 'data')
if self.data_dict['lh'] is not None:
data = self.data_dict['lh'].copy()
else:
data = self.data_dict['rh'].copy()
if 'colorbars' in data:
data['colorbar'] = data['colorbars'][0]
return data
def _check_hemi(self, hemi):
"""Check for safe single-hemi input, returns str"""
if hemi is None:
if self._hemi not in ['lh', 'rh']:
raise ValueError('hemi must not be None when both '
'hemispheres are displayed')
else:
hemi = self._hemi
elif hemi not in ['lh', 'rh']:
extra = ' or None' if self._hemi in ['lh', 'rh'] else ''
raise ValueError('hemi must be either "lh" or "rh"' + extra)
return hemi
def _check_hemis(self, hemi):
"""Check for safe dual or single-hemi input, returns list"""
if hemi is None:
if self._hemi not in ['lh', 'rh']:
hemi = ['lh', 'rh']
else:
hemi = [self._hemi]
elif hemi not in ['lh', 'rh']:
extra = ' or None' if self._hemi in ['lh', 'rh'] else ''
raise ValueError('hemi must be either "lh" or "rh"' + extra)
else:
hemi = [hemi]
return hemi
def _read_scalar_data(self, source, hemi, name=None, cast=True):
"""Load in scalar data from an image stored in a file or an array
Parameters
----------
source : str or numpy array
path to scalar data file or a numpy array
name : str or None, optional
name for the overlay in the internal dictionary
cast : bool, optional
either to cast float data into 64bit datatype as a
workaround. cast=True can fix a rendering problem with
certain versions of Mayavi
Returns
-------
scalar_data : numpy array
flat numpy array of scalar data
name : str
if no name was provided, deduces the name if filename was given
as a source
"""
# If source is a string, try to load a file
if isinstance(source, string_types):
if name is None:
basename = os.path.basename(source)
if basename.endswith(".gz"):
basename = basename[:-3]
if basename.startswith("%s." % hemi):
basename = basename[3:]
name = os.path.splitext(basename)[0]
scalar_data = io.read_scalar_data(source)
else:
# Can't think of a good way to check that this will work nicely
scalar_data = source
if cast:
if (scalar_data.dtype.char == 'f' and
scalar_data.dtype.itemsize < 8):
scalar_data = scalar_data.astype(np.float)
return scalar_data, name
def _get_display_range(self, scalar_data, min, max, sign):
if scalar_data.min() >= 0:
sign = "pos"
elif scalar_data.max() <= 0:
sign = "neg"
# Get data with a range that will make sense for automatic thresholding
if sign == "neg":
range_data = np.abs(scalar_data[np.where(scalar_data < 0)])
elif sign == "pos":
range_data = scalar_data[np.where(scalar_data > 0)]
else:
range_data = np.abs(scalar_data)
# Get a numeric value for the scalar minimum
if min is None:
min = "robust_min"
if min == "robust_min":
min = stats.scoreatpercentile(range_data, 2)
elif min == "actual_min":
min = range_data.min()
# Get a numeric value for the scalar maximum
if max is None:
max = "robust_max"
if max == "robust_max":
max = stats.scoreatpercentile(scalar_data, 98)
elif max == "actual_max":
max = range_data.max()
return min, max
###########################################################################
# ADDING DATA PLOTS
def add_overlay(self, source, min=2, max="robust_max", sign="abs",
name=None, hemi=None):
"""Add an overlay to the overlay dict from a file or array.
Parameters
----------
source : str or numpy array
path to the overlay file or numpy array with data
min : float
threshold for overlay display
max : float
saturation point for overlay display
sign : {'abs' | 'pos' | 'neg'}
whether positive, negative, or both values should be displayed
name : str
name for the overlay in the internal dictionary
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown. If two hemispheres are being shown, an error will
be thrown.
"""
hemi = self._check_hemi(hemi)
# load data here
scalar_data, name = self._read_scalar_data(source, hemi, name=name)
min, max = self._get_display_range(scalar_data, min, max, sign)
if sign not in ["abs", "pos", "neg"]:
raise ValueError("Overlay sign must be 'abs', 'pos', or 'neg'")
old = OverlayData(scalar_data, self.geo[hemi], min, max, sign)
ol = []
views = self._toggle_render(False)
for brain in self._brain_list:
if brain['hemi'] == hemi:
ol.append(brain['brain'].add_overlay(old))
if name in self.overlays_dict:
name = "%s%d" % (name, len(self.overlays_dict) + 1)
self.overlays_dict[name] = ol
self._toggle_render(True, views)
def add_data(self, array, min=None, max=None, thresh=None,
colormap="RdBu_r", alpha=1,
vertices=None, smoothing_steps=20, time=None,
time_label="time index=%d", colorbar=True,
hemi=None, remove_existing=False, time_label_size=14):
"""Display data from a numpy array on the surface.
This provides a similar interface to add_overlay, but it displays
it with a single colormap. It offers more flexibility over the
colormap, and provides a way to display four dimensional data
(i.e. a timecourse).
Note that min sets the low end of the colormap, and is separate
from thresh (this is a different convention from add_overlay)
Note: If the data is defined for a subset of vertices (specified
by the "vertices" parameter), a smoothing method is used to interpolate
the data onto the high resolution surface. If the data is defined for
subsampled version of the surface, smoothing_steps can be set to None,
in which case only as many smoothing steps are applied until the whole
surface is filled with non-zeros.
Parameters
----------
array : numpy array
data array (nvtx vector)
min : float
min value in colormap (uses real min if None)
max : float
max value in colormap (uses real max if None)
thresh : None or float
if not None, values below thresh will not be visible
colormap : string, list of colors, or array
name of matplotlib colormap to use, a list of matplotlib colors,
or a custom look up table (an n x 4 array coded with RBGA values
between 0 and 255).
alpha : float in [0, 1]
alpha level to control opacity
vertices : numpy array
vertices for which the data is defined (needed if len(data) < nvtx)
smoothing_steps : int or None
number of smoothing steps (smooting is used if len(data) < nvtx)
Default : 20
time : numpy array
time points in the data array (if data is 2D)
time_label : str | callable | None
format of the time label (a format string, a function that maps
floating point time values to strings, or None for no label)
colorbar : bool
whether to add a colorbar to the figure
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown. If two hemispheres are being shown, an error will
be thrown.
remove_existing : bool
Remove surface added by previous "add_data" call. Useful for
conserving memory when displaying different data in a loop.
time_label_size : int
Font size of the time label (default 14)
"""
hemi = self._check_hemi(hemi)
if min is None:
min = array.min()
if max is None:
max = array.max()
# Create smoothing matrix if necessary
if len(array) < self.geo[hemi].x.shape[0]:
if vertices is None:
raise ValueError("len(data) < nvtx: need vertices")
adj_mat = utils.mesh_edges(self.geo[hemi].faces)
smooth_mat = utils.smoothing_matrix(vertices, adj_mat,
smoothing_steps)
else:
smooth_mat = None
# Calculate initial data to plot
if array.ndim == 1:
array_plot = array
elif array.ndim == 2:
array_plot = array[:, 0]
else:
raise ValueError("data has to be 1D or 2D")
if smooth_mat is not None:
array_plot = smooth_mat * array_plot
# Copy and byteswap to deal with Mayavi bug
mlab_plot = _prepare_data(array_plot)
# Process colormap argument into a lut
lut = create_color_lut(colormap)
colormap = "Greys"
data = dict(array=array, smoothing_steps=smoothing_steps,
fmin=min, fmid=(min + max) / 2, fmax=max,
transparent=False, time=0, time_idx=0,
vertices=vertices, smooth_mat=smooth_mat)
# Create time array and add label if 2D
if array.ndim == 2:
if time is None:
time = np.arange(array.shape[1])
self._times = time
self.n_times = array.shape[1]
if not self.n_times == len(time):
raise ValueError('time is not the same length as '
'array.shape[1]')
if isinstance(time_label, basestring):
time_label_fmt = time_label
time_label = lambda x: time_label_fmt % x
data["time_label"] = time_label
data["time"] = time
data["time_idx"] = 0
y_txt = 0.05 + 0.05 * bool(colorbar)
else:
self._times = None
self.n_times = None
surfs = []
bars = []
views = self._toggle_render(False)
for bi, brain in enumerate(self._brain_list):
if brain['hemi'] == hemi:
out = brain['brain'].add_data(array, mlab_plot, vertices,
smooth_mat, min, max, thresh,
lut, colormap, alpha, time,
time_label, colorbar)
s, ct, bar = out
surfs.append(s)
bars.append(bar)
row, col = np.unravel_index(bi, self.brain_matrix.shape)
if array.ndim == 2 and time_label is not None:
self.add_text(0.95, y_txt, time_label(time[0]),
name="time_label", row=row, col=col,
font_size=time_label_size,
justification='right')
self._toggle_render(True, views)
data['surfaces'] = surfs
data['colorbars'] = bars
data['orig_ctable'] = ct
if remove_existing and self.data_dict[hemi] is not None:
for surf in self.data_dict[hemi]['surfaces']:
surf.parent.parent.remove()
self.data_dict[hemi] = data
def add_annotation(self, annot, borders=True, alpha=1, hemi=None,
remove_existing=True):
"""Add an annotation file.
Parameters
----------
annot : str
Either path to annotation file or annotation name
borders : bool | int
Show only label borders. If int, specify the number of steps
(away from the true border) along the cortical mesh to include
as part of the border definition.
alpha : float in [0, 1]
Alpha level to control opacity
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown. If two hemispheres are being shown, data must exist
for both hemispheres.
remove_existing : bool
If True (default), remove old annotations.
"""
hemis = self._check_hemis(hemi)
# Figure out where the data is coming from
if os.path.isfile(annot):
filepath = annot
path = os.path.split(filepath)[0]
file_hemi, annot = os.path.basename(filepath).split('.')[:2]
if len(hemis) > 1:
if annot[:2] == 'lh.':
filepaths = [filepath, pjoin(path, 'rh' + annot[2:])]
elif annot[:2] == 'rh.':
filepaths = [pjoin(path, 'lh' + annot[2:], filepath)]
else:
raise RuntimeError('To add both hemispheres '
'simultaneously, filename must '
'begin with "lh." or "rh."')
else:
filepaths = [filepath]
else:
filepaths = []
for hemi in hemis:
filepath = pjoin(self.subjects_dir,
self.subject_id,
'label',
".".join([hemi, annot, 'annot']))
if not os.path.exists(filepath):
raise ValueError('Annotation file %s does not exist'
% filepath)
filepaths += [filepath]
views = self._toggle_render(False)
if remove_existing is True:
# Get rid of any old annots
for a in self.annot_list:
a['surface'].remove()
self.annot_list = []
al = self.annot_list
for hemi, filepath in zip(hemis, filepaths):
# Read in the data
labels, cmap, _ = nib.freesurfer.read_annot(filepath,
orig_ids=True)
# Maybe zero-out the non-border vertices
self._to_borders(labels, hemi, borders)
# Handle null labels properly
# (tksurfer doesn't use the alpha channel, so sometimes this
# is set weirdly. For our purposes, it should always be 0.
# Unless this sometimes causes problems?
cmap[np.where(cmap[:, 4] == 0), 3] = 0
if np.any(labels == 0) and not np.any(cmap[:, -1] == 0):
cmap = np.vstack((cmap, np.zeros(5, int)))
# Set label ids sensibly
ord = np.argsort(cmap[:, -1])
ids = ord[np.searchsorted(cmap[ord, -1], labels)]
cmap = cmap[:, :4]
# Set the alpha level
alpha_vec = cmap[:, 3]
alpha_vec[alpha_vec > 0] = alpha * 255
for brain in self._brain_list:
if brain['hemi'] == hemi:
al.append(brain['brain'].add_annotation(annot, ids, cmap))
self.annot_list = al
self._toggle_render(True, views)
def add_label(self, label, color=None, alpha=1, scalar_thresh=None,
borders=False, hemi=None, subdir=None):
"""Add an ROI label to the image.
Parameters
----------
label : str | instance of Label
label filepath or name. Can also be an instance of
an object with attributes "hemi", "vertices", "name", and
optionally "color" and "values" (if scalar_thresh is not None).
color : matplotlib-style color | None
anything matplotlib accepts: string, RGB, hex, etc. (default
"crimson")
alpha : float in [0, 1]
alpha level to control opacity
scalar_thresh : None or number
threshold the label ids using this value in the label
file's scalar field (i.e. label only vertices with
scalar >= thresh)
borders : bool | int
Show only label borders. If int, specify the number of steps
(away from the true border) along the cortical mesh to include
as part of the border definition.
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown. If two hemispheres are being shown, an error will
be thrown.
subdir : None | str
If a label is specified as name, subdir can be used to indicate
that the label file is in a sub-directory of the subject's
label directory rather than in the label directory itself (e.g.
for ``$SUBJECTS_DIR/$SUBJECT/label/aparc/lh.cuneus.label``
``brain.add_label('cuneus', subdir='aparc')``).
Notes
-----
To remove previously added labels, run Brain.remove_labels().
"""
if isinstance(label, string_types):
hemi = self._check_hemi(hemi)
if color is None:
color = "crimson"
if os.path.isfile(label):
filepath = label
label_name = os.path.basename(filepath).split('.')[1]
else:
label_name = label
label_fname = ".".join([hemi, label_name, 'label'])
if subdir is None:
filepath = pjoin(self.subjects_dir, self.subject_id,
'label', label_fname)
else:
filepath = pjoin(self.subjects_dir, self.subject_id,
'label', subdir, label_fname)
if not os.path.exists(filepath):
raise ValueError('Label file %s does not exist'
% filepath)
# Load the label data and create binary overlay
if scalar_thresh is None:
ids = nib.freesurfer.read_label(filepath)
else:
ids, scalars = nib.freesurfer.read_label(filepath,
read_scalars=True)
ids = ids[scalars >= scalar_thresh]
else:
# try to extract parameters from label instance
try:
hemi = label.hemi
ids = label.vertices
if label.name is None:
label_name = 'unnamed'
else:
label_name = str(label.name)
if color is None:
if hasattr(label, 'color') and label.color is not None:
color = label.color
else:
color = "crimson"
if scalar_thresh is not None:
scalars = label.values
except Exception:
raise ValueError('Label was not a filename (str), and could '
'not be understood as a class. The class '
'must have attributes "hemi", "vertices", '
'"name", and (if scalar_thresh is not None)'
'"values"')
hemi = self._check_hemi(hemi)
if scalar_thresh is not None:
ids = ids[scalars >= scalar_thresh]
label = np.zeros(self.geo[hemi].coords.shape[0])
label[ids] = 1
# make sure we have a unique name
if label_name in self.labels_dict:
i = 2
name = label_name + '_%i'
while name % i in self.labels_dict:
i += 1
label_name = name % i
self._to_borders(label, hemi, borders, restrict_idx=ids)
# make a list of all the plotted labels
ll = []
views = self._toggle_render(False)
for brain in self._brain_list:
if brain['hemi'] == hemi:
ll.append(brain['brain'].add_label(label, label_name,
color, alpha))
self.labels_dict[label_name] = ll
self._toggle_render(True, views)
def _to_borders(self, label, hemi, borders, restrict_idx=None):
"""Helper to potentially convert a label/parc to borders"""
if not isinstance(borders, (bool, int)) or borders < 0:
raise ValueError('borders must be a bool or positive integer')
if borders:
n_vertices = label.size
edges = utils.mesh_edges(self.geo[hemi].faces)
border_edges = label[edges.row] != label[edges.col]
show = np.zeros(n_vertices, dtype=np.int)
keep_idx = np.unique(edges.row[border_edges])
if isinstance(borders, int):
for _ in range(borders):
keep_idx = np.in1d(self.geo[hemi].faces.ravel(), keep_idx)
keep_idx.shape = self.geo[hemi].faces.shape
keep_idx = self.geo[hemi].faces[np.any(keep_idx, axis=1)]
keep_idx = np.unique(keep_idx)
if restrict_idx is not None:
keep_idx = keep_idx[np.in1d(keep_idx, restrict_idx)]
show[keep_idx] = 1
label *= show
def remove_labels(self, labels=None, hemi=None):
"""Remove one or more previously added labels from the image.
Parameters
----------
labels : None | str | list of str
Labels to remove. Can be a string naming a single label, or None to
remove all labels. Possible names can be found in the Brain.labels
attribute.
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown. If two hemispheres are being shown, an error will
be thrown.
"""
hemi = self._check_hemi(hemi)
if labels is None:
labels = self.labels_dict.keys()
elif isinstance(labels, str):
labels = [labels]
for key in labels:
label = self.labels_dict.pop(key)
for ll in label:
ll.remove()
def add_morphometry(self, measure, grayscale=False, hemi=None,
remove_existing=True, colormap=None,
min=None, max=None, colorbar=True):
"""Add a morphometry overlay to the image.
Parameters
----------
measure : {'area' | 'curv' | 'jacobian_white' | 'sulc' | 'thickness'}
which measure to load
grayscale : bool
whether to load the overlay with a grayscale colormap
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown. If two hemispheres are being shown, data must exist
for both hemispheres.
remove_existing : bool
If True (default), remove old annotations.
colormap : str
Mayavi colormap name, or None to use a sensible default.
min, max : floats
Endpoints for the colormap; if not provided the robust range
of the data is used.
colorbar : bool
If True, show a colorbar corresponding to the overlay data.
"""
hemis = self._check_hemis(hemi)
morph_files = []
for hemi in hemis:
# Find the source data
surf_dir = pjoin(self.subjects_dir, self.subject_id, 'surf')
morph_file = pjoin(surf_dir, '.'.join([hemi, measure]))
if not os.path.exists(morph_file):
raise ValueError(
'Could not find %s in subject directory' % morph_file)
morph_files += [morph_file]
views = self._toggle_render(False)
if remove_existing is True:
# Get rid of any old overlays
for m in self.morphometry_list:
m['surface'].remove()
if m["colorbar"] is not None:
m['colorbar'].visible = False
self.morphometry_list = []
ml = self.morphometry_list
for hemi, morph_file in zip(hemis, morph_files):
if colormap is None:
# Preset colormaps
if grayscale:
colormap = "gray"
else:
colormap = dict(area="pink",
curv="RdBu",
jacobian_white="pink",
sulc="RdBu",
thickness="pink")[measure]
# Read in the morphometric data
morph_data = nib.freesurfer.read_morph_data(morph_file)
# Get a cortex mask for robust range
self.geo[hemi].load_label("cortex")
ctx_idx = self.geo[hemi].labels["cortex"]
# Get the display range
min_default, max_default = np.percentile(morph_data[ctx_idx],
[2, 98])
if min is None:
min = min_default
if max is None:
max = max_default
# Use appropriate values for bivariate measures
if measure in ["curv", "sulc"]:
lim = np.max([abs(min), abs(max)])
min, max = -lim, lim
# Set up the Mayavi pipeline
morph_data = _prepare_data(morph_data)
for brain in self._brain_list:
if brain['hemi'] == hemi:
ml.append(brain['brain'].add_morphometry(morph_data,
colormap, measure,
min, max,
colorbar))
self.morphometry_list = ml
self._toggle_render(True, views)
def add_foci(self, coords, coords_as_verts=False, map_surface=None,
scale_factor=1, color="white", alpha=1, name=None,
hemi=None):
"""Add spherical foci, possibly mapping to displayed surf.
The foci spheres can be displayed at the coordinates given, or
mapped through a surface geometry. In other words, coordinates
from a volume-based analysis in MNI space can be displayed on an
inflated average surface by finding the closest vertex on the
white surface and mapping to that vertex on the inflated mesh.
Parameters
----------
coords : numpy array
x, y, z coordinates in stereotaxic space or array of vertex ids
coords_as_verts : bool
whether the coords parameter should be interpreted as vertex ids
map_surface : Freesurfer surf or None
surface to map coordinates through, or None to use raw coords
scale_factor : int
controls the size of the foci spheres
color : matplotlib color code
HTML name, RBG tuple, or hex code
alpha : float in [0, 1]
opacity of focus gylphs
name : str
internal name to use
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown. If two hemispheres are being shown, an error will
be thrown.
"""
hemi = self._check_hemi(hemi)
# Figure out how to interpret the first parameter
if coords_as_verts:
coords = self.geo[hemi].coords[coords]
map_surface = None
# Possibly map the foci coords through a surface
if map_surface is None:
foci_coords = np.atleast_2d(coords)
else:
foci_surf = Surface(self.subject_id, hemi, map_surface,
subjects_dir=self.subjects_dir)
foci_surf.load_geometry()
foci_vtxs = utils.find_closest_vertices(foci_surf.coords, coords)
foci_coords = self.geo[hemi].coords[foci_vtxs]
# Get a unique name (maybe should take this approach elsewhere)
if name is None:
name = "foci_%d" % (len(self.foci_dict) + 1)
# Convert the color code
if not isinstance(color, tuple):
color = colorConverter.to_rgb(color)
views = self._toggle_render(False)
fl = []
for brain in self._brain_list:
if brain['hemi'] == hemi:
fl.append(brain['brain'].add_foci(foci_coords, scale_factor,
color, alpha, name))
self.foci_dict[name] = fl
self._toggle_render(True, views)
def add_contour_overlay(self, source, min=None, max=None,
n_contours=7, line_width=1.5, colormap="YlOrRd_r",
hemi=None, remove_existing=True, colorbar=True):
"""Add a topographic contour overlay of the positive data.
Note: This visualization will look best when using the "low_contrast"
cortical curvature colorscheme.
Parameters
----------
source : str or array
path to the overlay file or numpy array
min : float
threshold for overlay display
max : float
saturation point for overlay display
n_contours : int
number of contours to use in the display
line_width : float
width of contour lines
colormap : string, list of colors, or array
name of matplotlib colormap to use, a list of matplotlib colors,
or a custom look up table (an n x 4 array coded with RBGA values
between 0 and 255).
hemi : str | None
If None, it is assumed to belong to the hemipshere being
shown. If two hemispheres are being shown, an error will
be thrown.
remove_existing : bool
If there is an existing contour overlay, remove it before plotting.
colorbar : bool
If True, show the colorbar for the scalar value.
"""
hemi = self._check_hemi(hemi)
# Read the scalar data
scalar_data, _ = self._read_scalar_data(source, hemi)
min, max = self._get_display_range(scalar_data, min, max, "pos")
# Deal with Mayavi bug
scalar_data = _prepare_data(scalar_data)
# Maybe get rid of an old overlay
if hasattr(self, "contour") and remove_existing:
for c in self.contour_list:
c['surface'].remove()
if c['colorbar'] is not None:
c['colorbar'].visible = False
# Process colormap argument into a lut
lut = create_color_lut(colormap)
views = self._toggle_render(False)
cl = []
for brain in self._brain_list:
if brain['hemi'] == hemi:
cl.append(brain['brain'].add_contour_overlay(scalar_data,
min, max,
n_contours,
line_width, lut,
colorbar))
self.contour_list = cl
self._toggle_render(True, views)
def add_text(self, x, y, text, name, color=None, opacity=1.0,
row=-1, col=-1, font_size=None, justification=None):
""" Add a text to the visualization
Parameters
----------
x : Float
x coordinate
y : Float
y coordinate
text : str
Text to add
name : str
Name of the text (text label can be updated using update_text())
color : Tuple
Color of the text. Default: (1, 1, 1)
opacity : Float
Opacity of the text. Default: 1.0
row : int
Row index of which brain to use
col : int
Column index of which brain to use
"""
if name in self.texts_dict:
self.texts_dict[name]['text'].remove()
text = self.brain_matrix[row, col].add_text(x, y, text,
name, color, opacity)
self.texts_dict[name] = dict(row=row, col=col, text=text)
if font_size is not None:
text.property.font_size = font_size
text.actor.text_scale_mode = 'viewport'
if justification is not None:
text.property.justification = justification
def update_text(self, text, name, row=-1, col=-1):
"""Update text label
Parameters
----------
text : str
New text for label
name : str
Name of text label
"""
if name not in self.texts_dict:
raise KeyError('text name "%s" unknown' % name)
self.texts_dict[name]['text'].text = text
###########################################################################
# DATA SCALING / DISPLAY
def reset_view(self):
"""Orient camera to display original view
"""
for view, brain in zip(self._original_views, self._brain_list):
brain['brain'].show_view(view)
def show_view(self, view=None, roll=None, distance=None, row=-1, col=-1):
"""Orient camera to display view
Parameters
----------
view : {'lateral' | 'medial' | 'rostral' | 'caudal' |
'dorsal' | 'ventral' | 'frontal' | 'parietal' |
dict}
brain surface to view or kwargs to pass to mlab.view()
Returns
-------
view : tuple
tuple returned from mlab.view
roll : float
camera roll
distance : float | 'auto' | None
distance from the origin
row : int
Row index of which brain to use
col : int
Column index of which brain to use
"""
return self.brain_matrix[row][col].show_view(view, roll, distance)
def set_distance(self, distance=None):
"""Set view distances for all brain plots to the same value
Parameters
----------
distance : float | None
Distance to use. If None, brains are set to the farthest
"best fit" distance across all current views; note that
the underlying "best fit" function can be buggy.
Returns
-------
distance : float
The distance used.
"""
if distance is None:
distance = []
for ff in self._figures:
for f in ff:
mlab.view(figure=f, distance='auto')
v = mlab.view(figure=f)
# This should only happen for the test backend
if v is None:
v = [0, 0, 100]
distance += [v[2]]
distance = max(distance)
for ff in self._figures:
for f in ff:
mlab.view(distance=distance, figure=f)
return distance
@verbose
def scale_data_colormap(self, fmin, fmid, fmax, transparent, verbose=None):
"""Scale the data colormap.
Parameters
----------
fmin : float
minimum value of colormap
fmid : float
value corresponding to color midpoint
fmax : float
maximum value for colormap
transparent : boolean
if True: use a linear transparency between fmin and fmid
verbose : bool, str, int, or None
If not None, override default verbose level (see surfer.verbose).
"""
if not (fmin < fmid) and (fmid < fmax):
raise ValueError("Invalid colormap, we need fmin<fmid<fmax")
# Cast inputs to float to prevent integer division
fmin = float(fmin)
fmid = float(fmid)
fmax = float(fmax)
logger.info("colormap: fmin=%0.2e fmid=%0.2e fmax=%0.2e "
"transparent=%d" % (fmin, fmid, fmax, transparent))
# Get the original colormap
for h in ['lh', 'rh']:
data = self.data_dict[h]
if data is not None:
table = data["orig_ctable"].copy()
# Add transparency if needed
if transparent:
n_colors = table.shape[0]
n_colors2 = int(n_colors / 2)
table[:n_colors2, -1] = np.linspace(0, 255, n_colors2)
table[n_colors2:, -1] = 255 * np.ones(n_colors - n_colors2)
# Scale the colormap
table_new = table.copy()
n_colors = table.shape[0]
n_colors2 = int(n_colors / 2)
# Index of fmid in new colorbar
fmid_idx = int(np.round(n_colors * ((fmid - fmin) /
(fmax - fmin))) - 1)
# Go through channels
for i in range(4):
part1 = np.interp(np.linspace(0, n_colors2 - 1, fmid_idx + 1),
np.arange(n_colors),
table[:, i])
table_new[:fmid_idx + 1, i] = part1
part2 = np.interp(np.linspace(n_colors2, n_colors - 1,
n_colors - fmid_idx - 1),
np.arange(n_colors),
table[:, i])
table_new[fmid_idx + 1:, i] = part2
views = self._toggle_render(False)
# Use the new colormap
for hemi in ['lh', 'rh']:
data = self.data_dict[hemi]
if data is not None:
for surf in data['surfaces']:
cmap = surf.module_manager.scalar_lut_manager
cmap.lut.table = table_new
cmap.data_range = np.array([fmin, fmax])
# Update the data properties
data["fmin"], data['fmid'], data['fmax'] = fmin, fmid, fmax
data["transparent"] = transparent
self._toggle_render(True, views)
def set_data_time_index(self, time_idx, interpolation='quadratic'):
"""Set the data time index to show
Parameters
----------
time_idx : int | float
Time index. Non-integer values will be displayed using
interpolation between samples.
interpolation : str
Interpolation method (``scipy.interpolate.interp1d`` parameter,
one of 'linear' | 'nearest' | 'zero' | 'slinear' | 'quadratic' |
'cubic', default 'quadratic'). Interpolation is only used for
non-integer indexes.
"""
if self.n_times is None:
raise RuntimeError('cannot set time index with no time data')
if time_idx < 0 or time_idx >= self.n_times:
raise ValueError("time index out of range")
views = self._toggle_render(False)
for hemi in ['lh', 'rh']:
data = self.data_dict[hemi]
if data is not None:
# interpolation
if isinstance(time_idx, float):
times = np.arange(self.n_times)
ifunc = interp1d(times, data['array'], interpolation, 1)
plot_data = ifunc(time_idx)
else:
plot_data = data["array"][:, time_idx]
if data["smooth_mat"] is not None:
plot_data = data["smooth_mat"] * plot_data
for surf in data["surfaces"]:
surf.mlab_source.scalars = plot_data
data["time_idx"] = time_idx
# Update time label
if data["time_label"]:
if isinstance(time_idx, float):
ifunc = interp1d(times, data['time'])
time = ifunc(time_idx)
else:
time = data["time"][time_idx]
self.update_text(data["time_label"](time), "time_label")
self._toggle_render(True, views)
@property
def data_time_index(self):
"""Retrieve the currently displayed data time index
Returns
-------
time_idx : int
Current time index.
Notes
-----
Raises a RuntimeError if the Brain instance has not data overlay.
"""
time_idx = None
for hemi in ['lh', 'rh']:
data = self.data_dict[hemi]
if data is not None:
time_idx = data["time_idx"]
return time_idx
raise RuntimeError("Brain instance has no data overlay")
@verbose
def set_data_smoothing_steps(self, smoothing_steps, verbose=None):
"""Set the number of smoothing steps
Parameters
----------
smoothing_steps : int
Number of smoothing steps
verbose : bool, str, int, or None
If not None, override default verbose level (see surfer.verbose).
"""
views = self._toggle_render(False)
for hemi in ['lh', 'rh']:
data = self.data_dict[hemi]
if data is not None:
adj_mat = utils.mesh_edges(self.geo[hemi].faces)
smooth_mat = utils.smoothing_matrix(data["vertices"],
adj_mat, smoothing_steps)
data["smooth_mat"] = smooth_mat
# Redraw
if data["array"].ndim == 1:
plot_data = data["array"]
else:
plot_data = data["array"][:, data["time_idx"]]
plot_data = data["smooth_mat"] * plot_data
for surf in data["surfaces"]:
surf.mlab_source.scalars = plot_data
# Update data properties
data["smoothing_steps"] = smoothing_steps
self._toggle_render(True, views)
def index_for_time(self, time, rounding='closest'):
"""Find the data time index closest to a specific time point
Parameters
----------
time : scalar
Time.
rounding : 'closest' | 'up' | 'down
How to round if the exact time point is not an index.
Returns
-------
index : int
Data time index closest to time.
"""
if self.n_times is None:
raise RuntimeError("Brain has no time axis")
times = self._times
# Check that time is in range
tmin = np.min(times)
tmax = np.max(times)
max_diff = (tmax - tmin) / (len(times) - 1) / 2
if time < tmin - max_diff or time > tmax + max_diff:
err = ("time = %s lies outside of the time axis "
"[%s, %s]" % (time, tmin, tmax))
raise ValueError(err)
if rounding == 'closest':
idx = np.argmin(np.abs(times - time))
elif rounding == 'up':
idx = np.nonzero(times >= time)[0][0]
elif rounding == 'down':
idx = np.nonzero(times <= time)[0][-1]
else:
err = "Invalid rounding parameter: %s" % repr(rounding)
raise ValueError(err)
return idx
def set_time(self, time):
"""Set the data time index to the time point closest to time
Parameters
----------
time : scalar
Time.
"""
idx = self.index_for_time(time)
self.set_data_time_index(idx)
def _get_colorbars(self, row, col):
shape = self.brain_matrix.shape
row = row % shape[0]
col = col % shape[1]
ind = np.ravel_multi_index((row, col), self.brain_matrix.shape)
colorbars = []
h = self._brain_list[ind]['hemi']
if self.data_dict[h] is not None and 'colorbars' in self.data_dict[h]:
colorbars.append(self.data_dict[h]['colorbars'][row])
if len(self.morphometry_list) > 0:
colorbars.append(self.morphometry_list[ind]['colorbar'])
if len(self.contour_list) > 0:
colorbars.append(self.contour_list[ind]['colorbar'])
if len(self.overlays_dict) > 0:
for name, obj in self.overlays_dict.items():
for bar in ["pos_bar", "neg_bar"]:
try: # deal with positive overlays
this_ind = min(len(obj) - 1, ind)
colorbars.append(getattr(obj[this_ind], bar))
except AttributeError:
pass
return colorbars
def _colorbar_visibility(self, visible, row, col):
for cb in self._get_colorbars(row, col):
if cb is not None:
cb.visible = visible
def show_colorbar(self, row=-1, col=-1):
"""Show colorbar(s) for given plot
Parameters
----------
row : int
Row index of which brain to use
col : int
Column index of which brain to use
"""
self._colorbar_visibility(True, row, col)
def hide_colorbar(self, row=-1, col=-1):
"""Hide colorbar(s) for given plot
Parameters
----------
row : int
Row index of which brain to use
col : int
Column index of which brain to use
"""
self._colorbar_visibility(False, row, col)
def close(self):
"""Close all figures and cleanup data structure."""
for ri, ff in enumerate(self._figures):
for ci, f in enumerate(ff):
if f is not None:
mlab.close(f)
self._figures[ri][ci] = None
# should we tear down other variables?
if self._v is not None:
self._v.dispose()
self._v = None
def __del__(self):
if hasattr(self, '_v') and self._v is not None:
self._v.dispose()
self._v = None
###########################################################################
# SAVING OUTPUT
def save_single_image(self, filename, row=-1, col=-1):
"""Save view from one panel to disk
Only mayavi image types are supported:
(png jpg bmp tiff ps eps pdf rib oogl iv vrml obj
Parameters
----------
filename: string
path to new image file
row : int
row index of the brain to use
col : int
column index of the brain to use
Due to limitations in TraitsUI, if multiple views or hemi='split'
is used, there is no guarantee painting of the windows will
complete before control is returned to the command line. Thus
we strongly recommend using only one figure window (which uses
a Mayavi figure to plot instead of TraitsUI) if you intend to
script plotting commands.
"""
brain = self.brain_matrix[row, col]
ftype = filename[filename.rfind('.') + 1:]
good_ftypes = ['png', 'jpg', 'bmp', 'tiff', 'ps',
'eps', 'pdf', 'rib', 'oogl', 'iv', 'vrml', 'obj']
if ftype not in good_ftypes:
raise ValueError("Supported image types are %s"
% " ".join(good_ftypes))
mlab.draw(brain._f)
mlab.savefig(filename, figure=brain._f)
def save_image(self, filename):
"""Save view from all panels to disk
Only mayavi image types are supported:
(png jpg bmp tiff ps eps pdf rib oogl iv vrml obj
Parameters
----------
filename: string
path to new image file
Due to limitations in TraitsUI, if multiple views or hemi='split'
is used, there is no guarantee painting of the windows will
complete before control is returned to the command line. Thus
we strongly recommend using only one figure window (which uses
a Mayavi figure to plot instead of TraitsUI) if you intend to
script plotting commands.
"""
misc.imsave(filename, self.screenshot())
def screenshot(self, mode='rgb', antialiased=False):
"""Generate a screenshot of current view
Wraps to mlab.screenshot for ease of use.
Parameters
----------
mode: string
Either 'rgb' or 'rgba' for values to return
antialiased: bool
Antialias the image (see mlab.screenshot() for details)
row : int
row index of the brain to use
col : int
column index of the brain to use
Returns
-------
screenshot: array
Image pixel values
Notes
-----
Due to limitations in TraitsUI, if multiple views or hemi='split'
is used, there is no guarantee painting of the windows will
complete before control is returned to the command line. Thus
we strongly recommend using only one figure window (which uses
a Mayavi figure to plot instead of TraitsUI) if you intend to
script plotting commands.
"""
row = []
for ri in range(self.brain_matrix.shape[0]):
col = []
n_col = 2 if self._hemi == 'split' else 1
for ci in range(n_col):
col += [self.screenshot_single(mode, antialiased,
ri, ci)]
row += [np.concatenate(col, axis=1)]
data = np.concatenate(row, axis=0)
return data
def screenshot_single(self, mode='rgb', antialiased=False, row=-1, col=-1):
"""Generate a screenshot of current view from a single panel
Wraps to mlab.screenshot for ease of use.
Parameters
----------
mode: string
Either 'rgb' or 'rgba' for values to return
antialiased: bool
Antialias the image (see mlab.screenshot() for details)
row : int
row index of the brain to use
col : int
column index of the brain to use
Returns
-------
screenshot: array
Image pixel values
Notes
-----
Due to limitations in TraitsUI, if multiple views or hemi='split'
is used, there is no guarantee painting of the windows will
complete before control is returned to the command line. Thus
we strongly recommend using only one figure window (which uses
a Mayavi figure to plot instead of TraitsUI) if you intend to
script plotting commands.
"""
brain = self.brain_matrix[row, col]
return mlab.screenshot(brain._f, mode, antialiased)
def save_imageset(self, prefix, views, filetype='png', colorbar='auto',
row=-1, col=-1):
"""Convenience wrapper for save_image
Files created are prefix+'_$view'+filetype
Parameters
----------
prefix: string | None
filename prefix for image to be created. If None, a list of
arrays representing images is returned (not saved to disk).
views: list
desired views for images
filetype: string
image type
colorbar: 'auto' | int | list of int | None
For 'auto', the colorbar is shown in the middle view (default).
For int or list of int, the colorbar is shown in the specified
views. For ``None``, no colorbar is shown.
row : int
row index of the brain to use
col : int
column index of the brain to use
Returns
-------
images_written: list
all filenames written
"""
if isinstance(views, string_types):
raise ValueError("Views must be a non-string sequence"
"Use show_view & save_image for a single view")
if colorbar == 'auto':
colorbar = [len(views) // 2]
elif isinstance(colorbar, int):
colorbar = [colorbar]
images_written = []
for iview, view in enumerate(views):
try:
if colorbar is not None and iview in colorbar:
self.show_colorbar(row, col)
else:
self.hide_colorbar(row, col)
self.show_view(view, row=row, col=col)
if prefix is not None:
fname = "%s_%s.%s" % (prefix, view, filetype)
images_written.append(fname)
self.save_single_image(fname, row, col)
else:
images_written.append(self.screenshot_single(row=row,
col=col))
except ValueError:
print("Skipping %s: not in view dict" % view)
return images_written
def save_image_sequence(self, time_idx, fname_pattern, use_abs_idx=True,
row=-1, col=-1, montage='single', border_size=15,
colorbar='auto', interpolation='quadratic'):
"""Save a temporal image sequence
The files saved are named "fname_pattern % (pos)" where "pos" is a
relative or absolute index (controlled by "use_abs_idx")
Parameters
----------
time_idx : array-like
Time indices to save. Non-integer values will be displayed using
interpolation between samples.
fname_pattern : str
Filename pattern, e.g. 'movie-frame_%0.4d.png'.
use_abs_idx : boolean
If True the indices given by "time_idx" are used in the filename
if False the index in the filename starts at zero and is
incremented by one for each image (Default: True).
row : int
Row index of the brain to use.
col : int
Column index of the brain to use.
montage: 'current' | 'single' | list
Views to include in the images: 'current' uses the currently
displayed image; 'single' (default) uses a single view, specified
by the ``row`` and ``col`` parameters; a 1 or 2 dimensional list
can be used to specify a complete montage. Examples:
``['lat', 'med']`` lateral and ventral views ordered horizontally;
``[['fro'], ['ven']]`` frontal and ventral views ordered
vertically.
border_size: int
Size of image border (more or less space between images).
colorbar: 'auto' | int | list of int | None
For 'auto', the colorbar is shown in the middle view (default).
For int or list of int, the colorbar is shown in the specified
views. For ``None``, no colorbar is shown.
interpolation : str
Interpolation method (``scipy.interpolate.interp1d`` parameter,
one of 'linear' | 'nearest' | 'zero' | 'slinear' | 'quadratic' |
'cubic', default 'quadratic'). Interpolation is only used for
non-integer indexes.
Returns
-------
images_written: list
all filenames written
"""
current_time_idx = self.data_time_index
images_written = list()
rel_pos = 0
for idx in time_idx:
self.set_data_time_index(idx, interpolation)
fname = fname_pattern % (idx if use_abs_idx else rel_pos)
if montage == 'single':
self.save_single_image(fname, row, col)
elif montage == 'current':
self.save_image(fname)
else:
self.save_montage(fname, montage, 'h', border_size, colorbar,
row, col)
images_written.append(fname)
rel_pos += 1
# Restore original time index
self.set_data_time_index(current_time_idx)
return images_written
def save_montage(self, filename, order=['lat', 'ven', 'med'],
orientation='h', border_size=15, colorbar='auto',
row=-1, col=-1):
"""Create a montage from a given order of images
Parameters
----------
filename: string | None
path to final image. If None, the image will not be saved.
order: list
list of views: order of views to build montage (default ['lat',
'ven', 'med']; nested list of views to specify views in a
2-dimensional grid (e.g, [['lat', 'ven'], ['med', 'fro']])
orientation: {'h' | 'v'}
montage image orientation (horizontal of vertical alignment; only
applies if ``order`` is a flat list)
border_size: int
Size of image border (more or less space between images)
colorbar: 'auto' | int | list of int | None
For 'auto', the colorbar is shown in the middle view (default).
For int or list of int, the colorbar is shown in the specified
views. For ``None``, no colorbar is shown.
row : int
row index of the brain to use
col : int
column index of the brain to use
Returns
-------
out : array
The montage image, useable with matplotlib.imshow().
"""
# find flat list of views and nested list of view indexes
assert orientation in ['h', 'v']
if isinstance(order, (str, dict)):
views = [order]
elif all(isinstance(x, (str, dict)) for x in order):
views = order
else:
views = []
orientation = []
for row_order in order:
if isinstance(row_order, (str, dict)):
orientation.append([len(views)])
views.append(row_order)
else:
orientation.append([])
for view in row_order:
orientation[-1].append(len(views))
views.append(view)
if colorbar == 'auto':
colorbar = [len(views) // 2]
elif isinstance(colorbar, int):
colorbar = [colorbar]
brain = self.brain_matrix[row, col]
# store current view + colorbar visibility
current_view = mlab.view(figure=brain._f)
colorbars = self._get_colorbars(row, col)
colorbars_visibility = dict()
for cb in colorbars:
if cb is not None:
colorbars_visibility[cb] = cb.visible
images = self.save_imageset(None, views, colorbar=colorbar, row=row,
col=col)
out = make_montage(filename, images, orientation, colorbar,
border_size)
# get back original view and colorbars
mlab.view(*current_view, figure=brain._f)
for cb in colorbars:
if cb is not None:
cb.visible = colorbars_visibility[cb]
return out
def save_movie(self, fname, time_dilation=4., tmin=None, tmax=None,
framerate=24, interpolation='quadratic', codec='mpeg4',
bitrate='1M'):
"""Save a movie (for data with a time axis)
.. Warning::
This method assumes that time is specified in seconds when adding
data. If time is specified in milliseconds this will result in
movies 1000 times longer than expected.
Parameters
----------
fname : str
Path at which to save the movie.
time_dilation : float
Factor by which to stretch time (default 4). For example, an epoch
from -100 to 600 ms lasts 700 ms. With ``time_dilation=4`` this
would result in a 2.8 s long movie.
tmin : float
First time point to include (default: all data).
tmax : float
Last time point to include (default: all data).
framerate : float
Framerate of the movie (frames per second, default 24).
interpolation : str
Interpolation method (``scipy.interpolate.interp1d`` parameter,
one of 'linear' | 'nearest' | 'zero' | 'slinear' | 'quadratic' |
'cubic', default 'quadratic').
codec : str
Codec to use with ffmpeg (default 'mpeg4').
bitrate : str | float
Bitrate to use to encode movie. Can be specified as number (e.g.
64000) or string (e.g. '64k'). Default value is 1M
Notes
-----
This method requires FFmpeg to be installed in the system PATH. FFmpeg
is free and can be obtained from `here
<http://ffmpeg.org/download.html>`_.
"""
assert_ffmpeg_is_available()
if tmin is None:
tmin = self._times[0]
elif tmin < self._times[0]:
raise ValueError("tmin=%r is smaller than the first time point "
"(%r)" % (tmin, self._times[0]))
if tmax is None:
tmax = self._times[-1]
elif tmax >= self._times[-1]:
raise ValueError("tmax=%r is greater than the latest time point "
"(%r)" % (tmax, self._times[-1]))
# find indexes at which to create frames
tstep = 1. / (framerate * time_dilation)
if np.allclose((tmax - tmin) % tstep, 0):
tstop = tmax + tstep / 2.
else:
tstop = tmax
times = np.arange(tmin, tstop, tstep)
interp_func = interp1d(self._times, np.arange(self.n_times))
time_idx = interp_func(times)
n_times = len(time_idx)
if n_times == 0:
raise ValueError("No time points selected")
logger.debug("Save movie for time points/samples\n%s\n%s"
% (times, time_idx))
tempdir = mkdtemp()
frame_pattern = 'frame%%0%id.png' % (np.floor(np.log10(n_times)) + 1)
fname_pattern = os.path.join(tempdir, frame_pattern)
self.save_image_sequence(time_idx, fname_pattern, False, -1, -1,
'current', interpolation=interpolation)
ffmpeg(fname, fname_pattern, framerate, codec=codec, bitrate=bitrate)
def animate(self, views, n_steps=180., fname=None, use_cache=False,
row=-1, col=-1):
"""Animate a rotation.
Currently only rotations through the axial plane are allowed.
Parameters
----------
views: sequence
views to animate through
n_steps: float
number of steps to take in between
fname: string
If not None, it saves the animation as a movie.
fname should end in '.avi' as only the AVI format is supported
use_cache: bool
Use previously generated images in ./.tmp/
row : int
Row index of the brain to use
col : int
Column index of the brain to use
"""
brain = self.brain_matrix[row, col]
gviews = map(brain._xfm_view, views)
allowed = ('lateral', 'caudal', 'medial', 'rostral')
if not len([v for v in gviews if v in allowed]) == len(gviews):
raise ValueError('Animate through %s views.' % ' '.join(allowed))
if fname is not None:
if not fname.endswith('.avi'):
raise ValueError('Can only output to AVI currently.')
tmp_dir = './.tmp'
tmp_fname = pjoin(tmp_dir, '%05d.png')
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir)
for i, beg in enumerate(gviews):
try:
end = gviews[i + 1]
dv, dr = brain._min_diff(beg, end)
dv /= np.array((n_steps))
dr /= np.array((n_steps))
brain.show_view(beg)
for i in range(int(n_steps)):
brain._f.scene.camera.orthogonalize_view_up()
brain._f.scene.camera.azimuth(dv[0])
brain._f.scene.camera.elevation(dv[1])
brain._f.scene.renderer.reset_camera_clipping_range()
_force_render([[brain._f]], self._window_backend)
if fname is not None:
if not (os.path.isfile(tmp_fname % i) and use_cache):
self.save_single_image(tmp_fname % i, row, col)
except IndexError:
pass
if fname is not None:
fps = 10
# we'll probably want some config options here
enc_cmd = " ".join(["mencoder",
"-ovc lavc",
"-mf fps=%d" % fps,
"mf://%s" % tmp_fname,
"-of avi",
"-lavcopts vcodec=mjpeg",
"-ofps %d" % fps,
"-noskip",
"-o %s" % fname])
ret = os.system(enc_cmd)
if ret:
print("\n\nError occured when exporting movie\n\n")
class _Hemisphere(object):
"""Object for visualizing one hemisphere with mlab"""
def __init__(self, subject_id, hemi, surf, figure, geo, curv, title,
cortex, subjects_dir, bg_color, offset, backend):
if hemi not in ['lh', 'rh']:
raise ValueError('hemi must be either "lh" or "rh"')
# Set the identifying info
self.subject_id = subject_id
self.hemi = hemi
self.subjects_dir = subjects_dir
self.viewdict = viewdicts[hemi]
self.surf = surf
self._f = figure
self._bg_color = bg_color
self._backend = backend
# mlab pipeline mesh and surface for geomtery
self._geo = geo
if curv:
curv_data = self._geo.bin_curv
meshargs = dict(scalars=curv_data)
colormap, vmin, vmax, reverse = self._get_geo_colors(cortex)
kwargs = dict(colormap=colormap, vmin=vmin, vmax=vmax)
else:
curv_data = None
meshargs = dict()
kwargs = dict(color=(.5, .5, .5))
meshargs['figure'] = self._f
x, y, z, f = self._geo.x, self._geo.y, self._geo.z, self._geo.faces
self._geo_mesh = mlab.pipeline.triangular_mesh_source(x, y, z, f,
**meshargs)
# add surface normals
self._geo_mesh.data.point_data.normals = self._geo.nn
self._geo_mesh.data.cell_data.normals = None
self._geo_surf = mlab.pipeline.surface(self._geo_mesh,
figure=self._f, reset_zoom=True,
**kwargs)
if curv and reverse:
curv_bar = mlab.scalarbar(self._geo_surf)
curv_bar.reverse_lut = True
curv_bar.visible = False
def show_view(self, view=None, roll=None, distance=None):
"""Orient camera to display view"""
if isinstance(view, string_types):
try:
vd = self._xfm_view(view, 'd')
view = dict(azimuth=vd['v'][0], elevation=vd['v'][1])
roll = vd['r']
except ValueError as v:
print(v)
raise
_force_render(self._f, self._backend)
if view is not None:
view['reset_roll'] = True
view['figure'] = self._f
view['distance'] = distance
# DO NOT set focal point, can screw up non-centered brains
# view['focalpoint'] = (0.0, 0.0, 0.0)
mlab.view(**view)
if roll is not None:
mlab.roll(roll=roll, figure=self._f)
_force_render(self._f, self._backend)
view = mlab.view(figure=self._f)
roll = mlab.roll(figure=self._f)
return view, roll
def _xfm_view(self, view, out='s'):
"""Normalize a given string to available view
Parameters
----------
view: string
view which may match leading substring of available views
Returns
-------
good: string
matching view string
out: {'s' | 'd'}
's' to return string, 'd' to return dict
"""
if view not in self.viewdict:
good_view = [k for k in self.viewdict if view == k[:len(view)]]
if len(good_view) == 0:
raise ValueError('No views exist with this substring')
if len(good_view) > 1:
raise ValueError("Multiple views exist with this substring."
"Try a longer substring")
view = good_view[0]
if out == 'd':
return self.viewdict[view]
else:
return view
def _min_diff(self, beg, end):
"""Determine minimum "camera distance" between two views.
Parameters
----------
beg: string
origin anatomical view
end: string
destination anatomical view
Returns
-------
diffs: tuple
(min view "distance", min roll "distance")
"""
beg = self._xfm_view(beg)
end = self._xfm_view(end)
if beg == end:
dv = [360., 0.]
dr = 0
else:
end_d = self._xfm_view(end, 'd')
beg_d = self._xfm_view(beg, 'd')
dv = []
for b, e in zip(beg_d['v'], end_d['v']):
diff = e - b
# to minimize the rotation we need -180 <= diff <= 180
if diff > 180:
dv.append(diff - 360)
elif diff < -180:
dv.append(diff + 360)
else:
dv.append(diff)
dr = np.array(end_d['r']) - np.array(beg_d['r'])
return (np.array(dv), dr)
def add_overlay(self, old):
"""Add an overlay to the overlay dict from a file or array"""
surf = OverlayDisplay(old, figure=self._f)
for bar in ["pos_bar", "neg_bar"]:
try:
self._format_cbar_text(getattr(surf, bar))
except AttributeError:
pass
return surf
@verbose
def add_data(self, array, mlab_plot, vertices, smooth_mat, min, max,
thresh, lut, colormap, alpha, time, time_label, colorbar):
"""Add data to the brain"""
# Calculate initial data to plot
if array.ndim == 1:
array_plot = array
elif array.ndim == 2:
array_plot = array[:, 0]
else:
raise ValueError("data has to be 1D or 2D")
# Set up the visualization pipeline
mesh = mlab.pipeline.triangular_mesh_source(self._geo.x,
self._geo.y,
self._geo.z,
self._geo.faces,
scalars=mlab_plot,
figure=self._f)
mesh.data.point_data.normals = self._geo.nn
mesh.data.cell_data.normals = None
if thresh is not None:
if array_plot.min() >= thresh:
warn("Data min is greater than threshold.")
else:
mesh = mlab.pipeline.threshold(mesh, low=thresh)
surf = mlab.pipeline.surface(mesh, colormap=colormap,
vmin=min, vmax=max,
opacity=float(alpha), figure=self._f)
# apply look up table if given
if lut is not None:
surf.module_manager.scalar_lut_manager.lut.table = lut
# Get the original colormap table
orig_ctable = \
surf.module_manager.scalar_lut_manager.lut.table.to_array().copy()
# Get the colorbar
if colorbar:
bar = mlab.scalarbar(surf)
self._format_cbar_text(bar)
bar.scalar_bar_representation.position2 = .8, 0.09
else:
bar = None
return surf, orig_ctable, bar
def add_annotation(self, annot, ids, cmap):
"""Add an annotation file"""
# Create an mlab surface to visualize the annot
mesh = mlab.pipeline.triangular_mesh_source(self._geo.x,
self._geo.y,
self._geo.z,
self._geo.faces,
scalars=ids,
figure=self._f)
mesh.data.point_data.normals = self._geo.nn
mesh.data.cell_data.normals = None
surf = mlab.pipeline.surface(mesh, name=annot, figure=self._f)
# Set the color table
surf.module_manager.scalar_lut_manager.lut.table = cmap
# Set the brain attributes
annot = dict(surface=surf, name=annot, colormap=cmap)
return annot
def add_label(self, label, label_name, color, alpha):
"""Add an ROI label to the image"""
mesh = mlab.pipeline.triangular_mesh_source(self._geo.x,
self._geo.y,
self._geo.z,
self._geo.faces,
scalars=label,
figure=self._f)
mesh.data.point_data.normals = self._geo.nn
mesh.data.cell_data.normals = None
surf = mlab.pipeline.surface(mesh, name=label_name, figure=self._f)
color = colorConverter.to_rgba(color, alpha)
cmap = np.array([(0, 0, 0, 0,), color]) * 255
surf.module_manager.scalar_lut_manager.lut.table = cmap
return surf
def add_morphometry(self, morph_data, colormap, measure,
min, max, colorbar):
"""Add a morphometry overlay to the image"""
mesh = mlab.pipeline.triangular_mesh_source(self._geo.x,
self._geo.y,
self._geo.z,
self._geo.faces,
scalars=morph_data,
figure=self._f)
mesh.data.point_data.normals = self._geo.nn
mesh.data.cell_data.normals = None
surf = mlab.pipeline.surface(mesh, colormap=colormap,
vmin=min, vmax=max,
name=measure, figure=self._f)
# Get the colorbar
if colorbar:
bar = mlab.scalarbar(surf)
self._format_cbar_text(bar)
bar.scalar_bar_representation.position2 = .8, 0.09
else:
bar = None
# Fil in the morphometry dict
return dict(surface=surf, colorbar=bar, measure=measure)
def add_foci(self, foci_coords, scale_factor, color, alpha, name):
"""Add spherical foci, possibly mapping to displayed surf"""
# Create the visualization
points = mlab.points3d(foci_coords[:, 0],
foci_coords[:, 1],
foci_coords[:, 2],
np.ones(foci_coords.shape[0]),
scale_factor=(10. * scale_factor),
color=color, opacity=alpha, name=name,
figure=self._f)
return points
def add_contour_overlay(self, scalar_data, min=None, max=None,
n_contours=7, line_width=1.5, lut=None,
colorbar=True):
"""Add a topographic contour overlay of the positive data"""
# Set up the pipeline
mesh = mlab.pipeline.triangular_mesh_source(self._geo.x, self._geo.y,
self._geo.z,
self._geo.faces,
scalars=scalar_data,
figure=self._f)
mesh.data.point_data.normals = self._geo.nn
mesh.data.cell_data.normals = None
thresh = mlab.pipeline.threshold(mesh, low=min)
surf = mlab.pipeline.contour_surface(thresh, contours=n_contours,
line_width=line_width)
if lut is not None:
surf.module_manager.scalar_lut_manager.lut.table = lut
# Set the colorbar and range correctly
bar = mlab.scalarbar(surf,
nb_colors=n_contours,
nb_labels=n_contours + 1)
bar.data_range = min, max
self._format_cbar_text(bar)
bar.scalar_bar_representation.position2 = .8, 0.09
if not colorbar:
bar.visible = False
# Set up a dict attribute with pointers at important things
return dict(surface=surf, colorbar=bar)
def add_text(self, x, y, text, name, color=None, opacity=1.0):
""" Add a text to the visualization"""
return mlab.text(x, y, text, name=name, color=color,
opacity=opacity, figure=self._f)
def _orient_lights(self):
"""Set lights to come from same direction relative to brain."""
if self.hemi == "rh":
if self._f.scene is not None and \
self._f.scene.light_manager is not None:
for light in self._f.scene.light_manager.lights:
light.azimuth *= -1
def _get_geo_colors(self, cortex):
"""Return an mlab colormap name, vmin, and vmax for binary curvature.
Parameters
----------
cortex : {classic, high_contrast, low_contrast, bone, tuple}
The name of one of the preset cortex styles, or a tuple
with four entries as described in the return vales.
Returns
-------
colormap : string
mlab colormap name
vmin : float
curv colormap minimum
vmax : float
curv colormap maximum
reverse : boolean
boolean indicating whether the colormap should be reversed
"""
colormap_map = dict(classic=("Greys", -1, 2, False),
high_contrast=("Greys", -.1, 1.3, False),
low_contrast=("Greys", -5, 5, False),
bone=("bone", -.2, 2, True))
if cortex in colormap_map:
color_data = colormap_map[cortex]
elif cortex in lut_manager.lut_mode_list():
color_data = cortex, -1, 2, False
else:
color_data = cortex
return color_data
def _format_cbar_text(self, cbar):
bg_color = self._bg_color
if bg_color is None or sum(bg_color) < 2:
text_color = (1., 1., 1.)
else:
text_color = (0., 0., 0.)
cbar.label_text_property.color = text_color
class OverlayData(object):
"""Encapsulation of statistical neuroimaging overlay viz data"""
def __init__(self, scalar_data, geo, min, max, sign):
if scalar_data.min() >= 0:
sign = "pos"
elif scalar_data.max() <= 0:
sign = "neg"
self.geo = geo
if sign in ["abs", "pos"]:
# Figure out the correct threshold to avoid TraitErrors
# This seems like not the cleanest way to do this
pos_max = np.max((0.0, np.max(scalar_data)))
if pos_max < min:
thresh_low = pos_max
else:
thresh_low = min
self.pos_lims = [thresh_low, min, max]
else:
self.pos_lims = None
if sign in ["abs", "neg"]:
# Figure out the correct threshold to avoid TraitErrors
# This seems even less clean due to negative convolutedness
neg_min = np.min((0.0, np.min(scalar_data)))
if neg_min > -min:
thresh_up = neg_min
else:
thresh_up = -min
self.neg_lims = [thresh_up, -max, -min]
else:
self.neg_lims = None
# Byte swap copy; due to mayavi bug
self.mlab_data = _prepare_data(scalar_data)
class OverlayDisplay():
"""Encapsulation of overlay viz plotting"""
def __init__(self, ol, figure):
args = [ol.geo.x, ol.geo.y, ol.geo.z, ol.geo.faces]
kwargs = dict(scalars=ol.mlab_data, figure=figure)
if ol.pos_lims is not None:
pos_mesh = mlab.pipeline.triangular_mesh_source(*args, **kwargs)
pos_mesh.data.point_data.normals = ol.geo.nn
pos_mesh.data.cell_data.normals = None
pos_thresh = mlab.pipeline.threshold(pos_mesh, low=ol.pos_lims[0])
self.pos = mlab.pipeline.surface(pos_thresh, colormap="YlOrRd",
vmin=ol.pos_lims[1],
vmax=ol.pos_lims[2],
figure=figure)
self.pos_bar = mlab.scalarbar(self.pos, nb_labels=5)
self.pos_bar.reverse_lut = True
else:
self.pos = None
if ol.neg_lims is not None:
neg_mesh = mlab.pipeline.triangular_mesh_source(*args, **kwargs)
neg_mesh.data.point_data.normals = ol.geo.nn
neg_mesh.data.cell_data.normals = None
neg_thresh = mlab.pipeline.threshold(neg_mesh,
up=ol.neg_lims[0])
self.neg = mlab.pipeline.surface(neg_thresh, colormap="PuBu",
vmin=ol.neg_lims[1],
vmax=ol.neg_lims[2],
figure=figure)
self.neg_bar = mlab.scalarbar(self.neg, nb_labels=5)
else:
self.neg = None
self._format_colorbar()
def remove(self):
if self.pos is not None:
self.pos.remove()
self.pos_bar.visible = False
if self.neg is not None:
self.neg.remove()
self.neg_bar.visible = False
def _format_colorbar(self):
if self.pos is not None:
self.pos_bar.scalar_bar_representation.position = (0.53, 0.01)
self.pos_bar.scalar_bar_representation.position2 = (0.42, 0.09)
if self.neg is not None:
self.neg_bar.scalar_bar_representation.position = (0.05, 0.01)
self.neg_bar.scalar_bar_representation.position2 = (0.42, 0.09)
class TimeViewer(HasTraits):
"""TimeViewer object providing a GUI for visualizing time series
Useful for visualizing M/EEG inverse solutions on Brain object(s).
Parameters
----------
brain : Brain (or list of Brain)
brain(s) to control
"""
# Nested import of traisui for setup.py without X server
from traitsui.api import (View, Item, VSplit, HSplit, Group)
min_time = Int(0)
max_time = Int(1E9)
current_time = Range(low="min_time", high="max_time", value=0)
# colormap: only update when user presses Enter
fmax = Float(enter_set=True, auto_set=False)
fmid = Float(enter_set=True, auto_set=False)
fmin = Float(enter_set=True, auto_set=False)
transparent = Bool(True)
smoothing_steps = Int(20, enter_set=True, auto_set=False,
desc="number of smoothing steps. Use -1 for"
"automatic number of steps")
orientation = Enum("lateral", "medial", "rostral", "caudal",
"dorsal", "ventral", "frontal", "parietal")
# GUI layout
view = View(VSplit(Item(name="current_time"),
Group(HSplit(Item(name="fmin"),
Item(name="fmid"),
Item(name="fmax"),
Item(name="transparent")
),
label="Color scale",
show_border=True),
Item(name="smoothing_steps"),
Item(name="orientation")
)
)
def __init__(self, brain):
super(TimeViewer, self).__init__()
if isinstance(brain, (list, tuple)):
self.brains = brain
else:
self.brains = [brain]
# Initialize GUI with values from first brain
props = self.brains[0].get_data_properties()
self._disable_updates = True
self.max_time = len(props["time"]) - 1
self.current_time = props["time_idx"]
self.fmin = props["fmin"]
self.fmid = props["fmid"]
self.fmax = props["fmax"]
self.transparent = props["transparent"]
if props["smoothing_steps"] is None:
self.smoothing_steps = -1
else:
self.smoothing_steps = props["smoothing_steps"]
self._disable_updates = False
# Make sure all brains have the same time points
for brain in self.brains[1:]:
this_props = brain.get_data_properties()
if not np.all(props["time"] == this_props["time"]):
raise ValueError("all brains must have the same time"
"points")
# Show GUI
self.configure_traits()
@on_trait_change("smoothing_steps")
def set_smoothing_steps(self):
""" Change number of smooting steps
"""
if self._disable_updates:
return
smoothing_steps = self.smoothing_steps
if smoothing_steps < 0:
smoothing_steps = None
for brain in self.brains:
brain.set_data_smoothing_steps(self.smoothing_steps)
@on_trait_change("orientation")
def set_orientation(self):
""" Set the orientation
"""
if self._disable_updates:
return
for brain in self.brains:
brain.show_view(view=self.orientation)
@on_trait_change("current_time")
def set_time_point(self):
""" Set the time point shown
"""
if self._disable_updates:
return
for brain in self.brains:
brain.set_data_time_index(self.current_time)
@on_trait_change("fmin, fmid, fmax, transparent")
def scale_colormap(self):
""" Scale the colormap
"""
if self._disable_updates:
return
for brain in self.brains:
brain.scale_data_colormap(self.fmin, self.fmid, self.fmax,
self.transparent)
| bsd-3-clause | 4,961,174,037,160,292,000 | 37.789307 | 79 | 0.518907 | false |
themotleyfool/django-offline-messages | offline_messages/migrations/0001_initial.py | 1 | 1680 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='OfflineExpiration',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('datetime', models.DateTimeField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OfflineMessage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('level', models.IntegerField(default=20)),
('message', models.TextField()),
('created', models.DateTimeField(auto_now_add=True)),
('read', models.BooleanField(default=False)),
('object_id', models.PositiveIntegerField(null=True, blank=True)),
('meta', jsonfield.fields.JSONField(default={}, null=True, blank=True)),
('content_type', models.ForeignKey(blank=True, to='contenttypes.ContentType', null=True)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
| bsd-3-clause | 4,627,862,076,759,330,000 | 36.333333 | 114 | 0.558333 | false |
googleapis/googleapis-gen | google/cloud/securitycenter/v1/securitycenter-v1-py/google/cloud/securitycenter_v1/services/security_center/pagers.py | 1 | 32304 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional
from google.cloud.securitycenter_v1.types import notification_config
from google.cloud.securitycenter_v1.types import securitycenter_service
from google.cloud.securitycenter_v1.types import source
class GroupAssetsPager:
"""A pager for iterating through ``group_assets`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.GroupAssetsResponse` object, and
provides an ``__iter__`` method to iterate through its
``group_by_results`` field.
If there are more pages, the ``__iter__`` method will make additional
``GroupAssets`` requests and continue to iterate
through the ``group_by_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.GroupAssetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.GroupAssetsResponse],
request: securitycenter_service.GroupAssetsRequest,
response: securitycenter_service.GroupAssetsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.GroupAssetsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.GroupAssetsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.GroupAssetsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.GroupAssetsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[securitycenter_service.GroupResult]:
for page in self.pages:
yield from page.group_by_results
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class GroupAssetsAsyncPager:
"""A pager for iterating through ``group_assets`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.GroupAssetsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``group_by_results`` field.
If there are more pages, the ``__aiter__`` method will make additional
``GroupAssets`` requests and continue to iterate
through the ``group_by_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.GroupAssetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.GroupAssetsResponse]],
request: securitycenter_service.GroupAssetsRequest,
response: securitycenter_service.GroupAssetsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.GroupAssetsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.GroupAssetsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.GroupAssetsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.GroupAssetsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[securitycenter_service.GroupResult]:
async def async_generator():
async for page in self.pages:
for response in page.group_by_results:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class GroupFindingsPager:
"""A pager for iterating through ``group_findings`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.GroupFindingsResponse` object, and
provides an ``__iter__`` method to iterate through its
``group_by_results`` field.
If there are more pages, the ``__iter__`` method will make additional
``GroupFindings`` requests and continue to iterate
through the ``group_by_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.GroupFindingsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.GroupFindingsResponse],
request: securitycenter_service.GroupFindingsRequest,
response: securitycenter_service.GroupFindingsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.GroupFindingsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.GroupFindingsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.GroupFindingsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.GroupFindingsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[securitycenter_service.GroupResult]:
for page in self.pages:
yield from page.group_by_results
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class GroupFindingsAsyncPager:
"""A pager for iterating through ``group_findings`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.GroupFindingsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``group_by_results`` field.
If there are more pages, the ``__aiter__`` method will make additional
``GroupFindings`` requests and continue to iterate
through the ``group_by_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.GroupFindingsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.GroupFindingsResponse]],
request: securitycenter_service.GroupFindingsRequest,
response: securitycenter_service.GroupFindingsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.GroupFindingsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.GroupFindingsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.GroupFindingsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.GroupFindingsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[securitycenter_service.GroupResult]:
async def async_generator():
async for page in self.pages:
for response in page.group_by_results:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListAssetsPager:
"""A pager for iterating through ``list_assets`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListAssetsResponse` object, and
provides an ``__iter__`` method to iterate through its
``list_assets_results`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListAssets`` requests and continue to iterate
through the ``list_assets_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListAssetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.ListAssetsResponse],
request: securitycenter_service.ListAssetsRequest,
response: securitycenter_service.ListAssetsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListAssetsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListAssetsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListAssetsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.ListAssetsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[securitycenter_service.ListAssetsResponse.ListAssetsResult]:
for page in self.pages:
yield from page.list_assets_results
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListAssetsAsyncPager:
"""A pager for iterating through ``list_assets`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListAssetsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``list_assets_results`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListAssets`` requests and continue to iterate
through the ``list_assets_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListAssetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.ListAssetsResponse]],
request: securitycenter_service.ListAssetsRequest,
response: securitycenter_service.ListAssetsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListAssetsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListAssetsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListAssetsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.ListAssetsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[securitycenter_service.ListAssetsResponse.ListAssetsResult]:
async def async_generator():
async for page in self.pages:
for response in page.list_assets_results:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListFindingsPager:
"""A pager for iterating through ``list_findings`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListFindingsResponse` object, and
provides an ``__iter__`` method to iterate through its
``list_findings_results`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListFindings`` requests and continue to iterate
through the ``list_findings_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListFindingsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.ListFindingsResponse],
request: securitycenter_service.ListFindingsRequest,
response: securitycenter_service.ListFindingsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListFindingsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListFindingsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListFindingsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.ListFindingsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[securitycenter_service.ListFindingsResponse.ListFindingsResult]:
for page in self.pages:
yield from page.list_findings_results
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListFindingsAsyncPager:
"""A pager for iterating through ``list_findings`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListFindingsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``list_findings_results`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListFindings`` requests and continue to iterate
through the ``list_findings_results`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListFindingsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.ListFindingsResponse]],
request: securitycenter_service.ListFindingsRequest,
response: securitycenter_service.ListFindingsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListFindingsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListFindingsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListFindingsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.ListFindingsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[securitycenter_service.ListFindingsResponse.ListFindingsResult]:
async def async_generator():
async for page in self.pages:
for response in page.list_findings_results:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListNotificationConfigsPager:
"""A pager for iterating through ``list_notification_configs`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse` object, and
provides an ``__iter__`` method to iterate through its
``notification_configs`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListNotificationConfigs`` requests and continue to iterate
through the ``notification_configs`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.ListNotificationConfigsResponse],
request: securitycenter_service.ListNotificationConfigsRequest,
response: securitycenter_service.ListNotificationConfigsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListNotificationConfigsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListNotificationConfigsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.ListNotificationConfigsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[notification_config.NotificationConfig]:
for page in self.pages:
yield from page.notification_configs
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListNotificationConfigsAsyncPager:
"""A pager for iterating through ``list_notification_configs`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``notification_configs`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListNotificationConfigs`` requests and continue to iterate
through the ``notification_configs`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.ListNotificationConfigsResponse]],
request: securitycenter_service.ListNotificationConfigsRequest,
response: securitycenter_service.ListNotificationConfigsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListNotificationConfigsRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListNotificationConfigsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListNotificationConfigsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.ListNotificationConfigsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[notification_config.NotificationConfig]:
async def async_generator():
async for page in self.pages:
for response in page.notification_configs:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListSourcesPager:
"""A pager for iterating through ``list_sources`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListSourcesResponse` object, and
provides an ``__iter__`` method to iterate through its
``sources`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListSources`` requests and continue to iterate
through the ``sources`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListSourcesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., securitycenter_service.ListSourcesResponse],
request: securitycenter_service.ListSourcesRequest,
response: securitycenter_service.ListSourcesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListSourcesRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListSourcesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListSourcesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[securitycenter_service.ListSourcesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[source.Source]:
for page in self.pages:
yield from page.sources
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListSourcesAsyncPager:
"""A pager for iterating through ``list_sources`` requests.
This class thinly wraps an initial
:class:`google.cloud.securitycenter_v1.types.ListSourcesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``sources`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListSources`` requests and continue to iterate
through the ``sources`` field on the
corresponding responses.
All the usual :class:`google.cloud.securitycenter_v1.types.ListSourcesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[securitycenter_service.ListSourcesResponse]],
request: securitycenter_service.ListSourcesRequest,
response: securitycenter_service.ListSourcesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.securitycenter_v1.types.ListSourcesRequest):
The initial request object.
response (google.cloud.securitycenter_v1.types.ListSourcesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = securitycenter_service.ListSourcesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[securitycenter_service.ListSourcesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[source.Source]:
async def async_generator():
async for page in self.pages:
for response in page.sources:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
| apache-2.0 | -1,597,406,854,641,823,200 | 41.957447 | 105 | 0.657039 | false |
Orav/kbengine | kbe/res/scripts/common/Lib/site-packages/pip/_vendor/html5lib/serializer/htmlserializer.py | 1 | 13229 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
import gettext
_ = gettext.gettext
try:
from functools import reduce
except ImportError:
pass
from ..constants import voidElements, booleanAttributes, spaceCharacters
from ..constants import rcdataElements, entities, xmlEntities
from .. import utils
from xml.sax.saxutils import escape
spaceCharacters = "".join(spaceCharacters)
try:
from codecs import register_error, xmlcharrefreplace_errors
except ImportError:
unicode_encode_errors = "strict"
else:
unicode_encode_errors = "htmlentityreplace"
encode_entity_map = {}
is_ucs4 = len("\U0010FFFF") == 1
for k, v in list(entities.items()):
# skip multi-character entities
if ((is_ucs4 and len(v) > 1) or
(not is_ucs4 and len(v) > 2)):
continue
if v != "&":
if len(v) == 2:
v = utils.surrogatePairToCodepoint(v)
else:
v = ord(v)
if not v in encode_entity_map or k.islower():
# prefer < over < and similarly for &, >, etc.
encode_entity_map[v] = k
def htmlentityreplace_errors(exc):
if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
res = []
codepoints = []
skip = False
for i, c in enumerate(exc.object[exc.start:exc.end]):
if skip:
skip = False
continue
index = i + exc.start
if utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]):
codepoint = utils.surrogatePairToCodepoint(exc.object[index:index + 2])
skip = True
else:
codepoint = ord(c)
codepoints.append(codepoint)
for cp in codepoints:
e = encode_entity_map.get(cp)
if e:
res.append("&")
res.append(e)
if not e.endswith(";"):
res.append(";")
else:
res.append("&#x%s;" % (hex(cp)[2:]))
return ("".join(res), exc.end)
else:
return xmlcharrefreplace_errors(exc)
register_error(unicode_encode_errors, htmlentityreplace_errors)
del register_error
class HTMLSerializer(object):
# attribute quoting options
quote_attr_values = False
quote_char = '"'
use_best_quote_char = True
# tag syntax options
omit_optional_tags = True
minimize_boolean_attributes = True
use_trailing_solidus = False
space_before_trailing_solidus = True
# escaping options
escape_lt_in_attrs = False
escape_rcdata = False
resolve_entities = True
# miscellaneous options
alphabetical_attributes = False
inject_meta_charset = True
strip_whitespace = False
sanitize = False
options = ("quote_attr_values", "quote_char", "use_best_quote_char",
"omit_optional_tags", "minimize_boolean_attributes",
"use_trailing_solidus", "space_before_trailing_solidus",
"escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
"alphabetical_attributes", "inject_meta_charset",
"strip_whitespace", "sanitize")
def __init__(self, **kwargs):
"""Initialize HTMLSerializer.
Keyword options (default given first unless specified) include:
inject_meta_charset=True|False
Whether it insert a meta element to define the character set of the
document.
quote_attr_values=True|False
Whether to quote attribute values that don't require quoting
per HTML5 parsing rules.
quote_char=u'"'|u"'"
Use given quote character for attribute quoting. Default is to
use double quote unless attribute value contains a double quote,
in which case single quotes are used instead.
escape_lt_in_attrs=False|True
Whether to escape < in attribute values.
escape_rcdata=False|True
Whether to escape characters that need to be escaped within normal
elements within rcdata elements such as style.
resolve_entities=True|False
Whether to resolve named character entities that appear in the
source tree. The XML predefined entities < > & " '
are unaffected by this setting.
strip_whitespace=False|True
Whether to remove semantically meaningless whitespace. (This
compresses all whitespace to a single space except within pre.)
minimize_boolean_attributes=True|False
Shortens boolean attributes to give just the attribute value,
for example <input disabled="disabled"> becomes <input disabled>.
use_trailing_solidus=False|True
Includes a close-tag slash at the end of the start tag of void
elements (empty elements whose end tag is forbidden). E.g. <hr/>.
space_before_trailing_solidus=True|False
Places a space immediately before the closing slash in a tag
using a trailing solidus. E.g. <hr />. Requires use_trailing_solidus.
sanitize=False|True
Strip all unsafe or unknown constructs from output.
See `html5lib user documentation`_
omit_optional_tags=True|False
Omit start/end tags that are optional.
alphabetical_attributes=False|True
Reorder attributes to be in alphabetical order.
.. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation
"""
if 'quote_char' in kwargs:
self.use_best_quote_char = False
for attr in self.options:
setattr(self, attr, kwargs.get(attr, getattr(self, attr)))
self.errors = []
self.strict = False
def encode(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, unicode_encode_errors)
else:
return string
def encodeStrict(self, string):
assert(isinstance(string, text_type))
if self.encoding:
return string.encode(self.encoding, "strict")
else:
return string
def serialize(self, treewalker, encoding=None):
self.encoding = encoding
in_cdata = False
self.errors = []
if encoding and self.inject_meta_charset:
from ..filters.inject_meta_charset import Filter
treewalker = Filter(treewalker, encoding)
# WhitespaceFilter should be used before OptionalTagFilter
# for maximum efficiently of this latter filter
if self.strip_whitespace:
from ..filters.whitespace import Filter
treewalker = Filter(treewalker)
if self.sanitize:
from ..filters.sanitizer import Filter
treewalker = Filter(treewalker)
if self.omit_optional_tags:
from ..filters.optionaltags import Filter
treewalker = Filter(treewalker)
# Alphabetical attributes must be last, as other filters
# could add attributes and alter the order
if self.alphabetical_attributes:
from ..filters.alphabeticalattributes import Filter
treewalker = Filter(treewalker)
for token in treewalker:
type = token["type"]
if type == "Doctype":
doctype = "<!DOCTYPE %s" % token["name"]
if token["publicId"]:
doctype += ' PUBLIC "%s"' % token["publicId"]
elif token["systemId"]:
doctype += " SYSTEM"
if token["systemId"]:
if token["systemId"].find('"') >= 0:
if token["systemId"].find("'") >= 0:
self.serializeError(_("System identifer contains both single and double quote characters"))
quote_char = "'"
else:
quote_char = '"'
doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char)
doctype += ">"
yield self.encodeStrict(doctype)
elif type in ("Characters", "SpaceCharacters"):
if type == "SpaceCharacters" or in_cdata:
if in_cdata and token["data"].find("</") >= 0:
self.serializeError(_("Unexpected </ in CDATA"))
yield self.encode(token["data"])
else:
yield self.encode(escape(token["data"]))
elif type in ("StartTag", "EmptyTag"):
name = token["name"]
yield self.encodeStrict("<%s" % name)
if name in rcdataElements and not self.escape_rcdata:
in_cdata = True
elif in_cdata:
self.serializeError(_("Unexpected child element of a CDATA element"))
for (attr_namespace, attr_name), attr_value in token["data"].items():
# TODO: Add namespace support here
k = attr_name
v = attr_value
yield self.encodeStrict(' ')
yield self.encodeStrict(k)
if not self.minimize_boolean_attributes or \
(k not in booleanAttributes.get(name, tuple())
and k not in booleanAttributes.get("", tuple())):
yield self.encodeStrict("=")
if self.quote_attr_values or not v:
quote_attr = True
else:
quote_attr = reduce(lambda x, y: x or (y in v),
spaceCharacters + ">\"'=", False)
v = v.replace("&", "&")
if self.escape_lt_in_attrs:
v = v.replace("<", "<")
if quote_attr:
quote_char = self.quote_char
if self.use_best_quote_char:
if "'" in v and '"' not in v:
quote_char = '"'
elif '"' in v and "'" not in v:
quote_char = "'"
if quote_char == "'":
v = v.replace("'", "'")
else:
v = v.replace('"', """)
yield self.encodeStrict(quote_char)
yield self.encode(v)
yield self.encodeStrict(quote_char)
else:
yield self.encode(v)
if name in voidElements and self.use_trailing_solidus:
if self.space_before_trailing_solidus:
yield self.encodeStrict(" /")
else:
yield self.encodeStrict("/")
yield self.encode(">")
elif type == "EndTag":
name = token["name"]
if name in rcdataElements:
in_cdata = False
elif in_cdata:
self.serializeError(_("Unexpected child element of a CDATA element"))
yield self.encodeStrict("</%s>" % name)
elif type == "Comment":
data = token["data"]
if data.find("--") >= 0:
self.serializeError(_("Comment contains --"))
yield self.encodeStrict("<!--%s-->" % token["data"])
elif type == "Entity":
name = token["name"]
key = name + ";"
if not key in entities:
self.serializeError(_("Entity %s not recognized" % name))
if self.resolve_entities and key not in xmlEntities:
data = entities[key]
else:
data = "&%s;" % name
yield self.encodeStrict(data)
else:
self.serializeError(token["data"])
def render(self, treewalker, encoding=None):
if encoding:
return b"".join(list(self.serialize(treewalker, encoding)))
else:
return "".join(list(self.serialize(treewalker)))
def serializeError(self, data="XXX ERROR MESSAGE NEEDED"):
# XXX The idea is to make data mandatory.
self.errors.append(data)
if self.strict:
raise SerializeError
def SerializeError(Exception):
"""Error in serialized tree"""
pass
| lgpl-3.0 | 3,097,852,555,493,873,000 | 39.340625 | 119 | 0.521052 | false |
FreshXOpenSource/wallaby-app-crm | wallaby/apps/crm/rooms/tickets.py | 1 | 6272 | # Copyright (c) by it's authors.
# Some rights reserved. See LICENSE, AUTHORS.
from wallaby.pf.room import *
import wallaby.backends.couchdb as couchdb
from wallaby.plugins.couchdb.document import *
from wallaby.pf.peer.database import *
from wallaby.pf.peer.viewer import *
from wallaby.pf.peer.multiViewer import *
from wallaby.pf.peer.editDocument import *
from wallaby.pf.peer.documentChanger import *
from datetime import date
import re
class Tickets(Room):
Receiving = [
Viewer.In.Document,
'Custom.In.CreateInvoicePos',
'Custom.In.CreateSingleInvoicePos',
MultiViewer.Out.MultiSelect
]
Sending = [
Database.In.SaveDocument,
DocumentChanger.In.InsertRow
]
def __init__(self, name):
Room.__init__(self, name)
self._couchdb = couchdb.Database.getDatabase(None)
self._doc = None
self._invoiceDoc = None
self._multi = None
self._state = None
def customPeers(self):
self._invoiceRoom = House.get('INVOICE')
self.catch(MultiViewer.Out.MultiSelect, self._multiSelect)
self.catch('Custom.In.CreateInvoicePos', self._createInvoicePos)
self.catch('Custom.In.CreateSingleInvoicePos', self._createInvoicePos)
self.catch(Viewer.In.Document, self._document)
self._invoiceRoom.catch(Viewer.In.Document, self._invoiceDocument)
self._invoiceRoom.catch(EditDocument.Out.State, self._docState)
def _multiSelect(self, action, selection):
print "select multi", selection
if selection != None and len(selection) > 0:
self._multi = selection
else:
self._multi = None
def _docState(self, action, state):
self._state = state
@defer.inlineCallbacks
def _createInvoicePos(self, action, payload):
if action == 'Custom.In.CreateSingleInvoicePos':
single = True
else:
single = False
if self._invoiceDoc == None or not self._state:
return
if not self._state in ('Edit', 'New', 'Dirty', 'View'):
return
ids = self._multi
if not ids:
ids = []
if self._doc != None:
if self._doc.documentID not in ids:
ids.append(self._doc.documentID)
newdoc = self._invoiceDoc.clone()
articles = newdoc.get('articles')
if articles == None: articles = []
newArticles = []
allFromDate = None
allToDate = None
for id in ids:
doc = yield self._couchdb.get(id)
if not doc: continue
doc = CouchdbDocument(data=doc)
issueDescription = doc.get('issueDescription')
if issueDescription == None: issueDescription = ''
else: issueDescription = re.sub(r'[\r\n]', '', issueDescription) + "\n"
tracks = doc.get('tracks')
fromDate = None
toDate = None
if tracks != None:
for key, t in tracks.items():
desc = t['comment']
spentOn = t['spentOn']
d = date(*spentOn)
if not fromDate or d < fromDate: fromDate = d
if not toDate or d > toDate: toDate = d
if not allFromDate or d < allFromDate: allFromDate = d
if not allToDate or d > allToDate: allToDate = d
if desc != None and len(desc) > 0: issueDescription += ' - ' + desc + "\n"
hours = float(doc.get('hours'))
article = {
"count": hours,
"description": issueDescription,
"price": 100.0,
"unit": "Stunde(n)",
"total": hours * 100.0,
"fromDate": [fromDate.year, fromDate.month, fromDate.day],
"toDate": [toDate.year, toDate.month, toDate.day]
}
if not single and self._state in ('Edit', 'New', 'Dirty'):
self._invoiceContext.throw(DocumentChanger.In.InsertRow, ("articles", article) )
else:
newArticles.append(article)
doc.set('status', 'booked')
yield self._couchdb.save(doc._data)
if single:
article = {
"count": 0.0,
"description": "",
"price": 100.0,
"unit": "Stunde(n)",
"total": 0.0,
"fromDate": [allFromDate.year, allFromDate.month, allFromDate.day],
"toDate": [allToDate.year, allToDate.month, allToDate.day]
}
for a in newArticles:
article["count"] += float(a["count"])
article["description"] += a["description"]
article["total"] = float(article["count"]) * float(article["price"])
newArticles = [article]
if self._state in ('Edit', 'New', 'Dirty'):
self._invoiceContext.throw(DocumentChanger.In.InsertRow, ("articles", article) )
if self._state in ('Edit', 'New', 'Dirty'):
return
for a in newArticles:
articles.append(a)
total = 0.0
fromDate = None
toDate = None
for a in articles:
d1 = d2 = None
if "fromDate" in a: d1 = date(*a["fromDate"])
if "toDate" in a: d2 = date(*a["toDate"])
if d1 and (not fromDate or d1 < fromDate): fromDate = d1
if d2 and (not toDate or d2 > toDate): toDate = d2
total += float(a['total'])
vat = total * 0.19
if fromDate: newdoc.set('workPeriod.fromDate', [fromDate.year, fromDate.month, fromDate.day])
if toDate: newdoc.set('workPeriod.toDate', [toDate.year, toDate.month, toDate.day])
newdoc.set('vat', vat)
newdoc.set('vat', vat)
newdoc.set('netto', total)
newdoc.set('brutto', total + vat)
newdoc.set('articles', articles)
self._invoiceContext.throw(Database.In.SaveDocument, (newdoc, None))
def _document(self, action, doc):
self._doc = doc
def _invoiceDocument(self, action, doc):
self._invoiceDoc = doc
| bsd-2-clause | -2,272,778,632,203,881,700 | 30.20398 | 101 | 0.546397 | false |
asherbender/mcl | mcl/messages/messages.py | 1 | 24010 | """Object specification for creating messages in MCL.
The :mod:`~.messages.messages` module provides a means for implementing MCL
message objects. This is done through the :class:`.Message` object. Since
:class:`.Message` objects derive from python dictionaries, they operate near
identically.
:class:`.Message` objects are a specification of what structure of data is
being transmitted on a particular :class:`.abstract.Connection`. As a result
:class:`.Message` objects are defined by:
- mandatory message attributes that must be present when instances of the
new :class:`.Message` objects are created
- a :class:`~.abstract.Connection` object instance specifying where the
message can be broadcast and received
Creating MCL :class:`.Message` objects is simple and is demonstrated in the
following example:
.. testcode::
from mcl import Message
from mcl.network.udp import Connection as UdpConnection
# Define a message.
class ExampleMessage(Message):
mandatory = ('text', )
connection = UdpConnection('ff15::a')
# Create instance of message.
msg = ExampleMessage(text='hello world')
print msg
# Messages objects contain the a 'timestamp' key which records the UTC time
# of when the message object was instantiated. To update the timestamp and
# message attributes, use the update() method.
msg.update(text="I'm a lumberjack")
print msg
# To update a message attribute without updating the timestamp, set it
# directly.
msg['text'] = 'Spam! Spam!'
print msg
# Serialise message into a msgpack binary string. Hex-ify the string for
# demonstration and printability.
print msg.encode().encode('hex')
# The message can also be encoded as a JSON object.
print msg.to_json()
.. testoutput::
:hide:
{'timestamp': ..., 'name': 'ExampleMessage', 'text': 'hello world'}
{'timestamp': ..., 'name': 'ExampleMessage', 'text': "I'm a lumberjack"}
{'timestamp': ..., 'name': 'ExampleMessage', 'text': 'Spam! Spam!'}
...
The following functions can be used to retrieve and manipulate
:class:`.Message` objects:
- :func:`~.messages.get_message_objects` return :class:`.Message` object(s)
from name(s)
- :func:`~.messages.list_messages` list message objects derived from
:class:`.Message`
- :func:`~.messages.remove_message_object` de-register a :class:`.Message`
object from the list of known messages
.. codeauthor:: Asher Bender <[email protected]>
.. codeauthor:: James Ward <[email protected]>
.. sectionauthor:: Asher Bender <[email protected]>
"""
import sets
import json
import time
import msgpack
import mcl.network.abstract
# Globally track Message() definitions. The meta-class _RegisterMeta() inserts
# Message() definitions into _MESSAGES when Message() objects are subclassed.
_MESSAGES = list()
class _MessageMeta(type):
"""Meta-class for manufacturing and globally registering Message() objects.
The :class:`._MessageMeta` object is a meta-class designed to manufacture
MCL :class:`.Message` classes. The meta-class works by dynamically adding
mandatory attributes to a class definition at run time if and ONLY if the
class inherits from :class:`.abstract.Connection`.
Classes that inherit from :class:`.Message` must implement the `mandatory`
and `connection` attributes where:
- `mandatory` is a list of strings defining the names of mandatory
message attributes that must be present when instances of the new
:class:`.Message` objects are created. During instantiation the input
list *args is mapped to the attributes defined by `mandatory`. If
`mandatory` is not present, a :exc:`.TypeError` will be raised.
- `connection` is an instance of a :class:`~.abstract.Connection`
object specifying where the message can be broadcast and received.
The meta-class also maintains a global register of :class:`.Message`
sub-classes. :class:`.Message` sub-classes are added to the register when
they are defined. During this process :class:`._MessageMeta` checks to see
if a :class:`.Message` class with the same name has already been defined.
Note that the list of :class:`.Message` sub-classes can be acquired by
calling::
messages = Message.__subclasses__()
The reason the :class:`._MessageMeta` is preferred is that it can provide
error checking at the time of definition. Note that sub-classes cannot
easily be removed from the list returned by
``Message.__subclasses__()``. By using this meta-class, :class:`.Message`
objects can be removed from the global register via other methods (see
:func:`.remove_message_object`).
Raises:
TypeError: If a :class:`.Message` object with the same name already
exists.
TypeError: If the parent class is a :class:`.Message` object and
`mandatory` is ill-specified.
"""
def __new__(cls, name, bases, dct):
"""Manufacture a message class.
Manufacture a Message class for objects inheriting from
:class:`.Message`. This is done by searching the input dictionary `dct`
for the keys `mandatory` and `connection` where:
- `mandatory` is a list of strings defining the names of mandatory
message attributes that must be present when instances of the new
:class:`.Message` object are created. During instantiation the
input list *args is mapped to the attributes defined by
`mandatory`. If `mandatory` is not present, a :exc:`.TypeError`
will be raised.
- `connection` is an instance of a :class:`~.abstract.Connection`
object specifying where the message can be broadcast and
received.
A new message class is manufactured using the definition specified by
the attribute `mandatory`. The property 'mandatory' is attached to the
returned class.
Args:
cls (class): is the class being instantiated.
name (string): is the name of the new class.
bases (tuple): base classes of the new class.
dct (dict): dictionary mapping the class attribute names to objects.
Returns:
:class:`.Message`: sub-class of :class:`.Message` with mandatory
attributes defined by the original `mandatory` attribute.
Raises:
NameError: If the `name` is message or a :class:`.Message` subclass
with the same name already exists.
TypeError: If the `mandatory` or `connection` attributes are
ill-specified.
ValueError: If the `mandatory` attribute contains the words
`mandatory` or `connection`.
"""
# Do not look for the mandatory attributes in the Message() base class.
if (name == 'Message') and (bases == (dict,)):
return super(_MessageMeta, cls).__new__(cls, name, bases, dct)
# Do not look for the mandatory attributes in sub-classes of the
# Message() base class.
elif bases != (Message,):
return super(_MessageMeta, cls).__new__(cls, name, bases, dct)
# Cannot call messages 'Message'.
if name == 'Message':
raise NameError("Cannot name Message() subclasses 'Message'.")
# Check that a message with the same name does not exist.
elif name in [message.__name__ for message in _MESSAGES]:
msg = "A Message() with the name '%s' already exists."
raise NameError(msg % name)
# Objects inheriting from Message() are required to have a 'mandatory'
# and 'connection' attribute.
mandatory = dct.get('mandatory', {})
connection = dct.get('connection', None)
# Ensure 'mandatory' is a list or tuple of strings.
if ((not isinstance(mandatory, (list, tuple))) or
(not all(isinstance(item, basestring) for item in mandatory))):
msg = "'mandatory' must be a list or tuple or strings."
raise TypeError(msg)
# Ensure the connection object is properly specified.
if not isinstance(connection, mcl.network.abstract.Connection):
msg = "The argument 'connection' must be an instance of a "
msg += "Connection() subclass."
raise TypeError(msg)
# Check that a message with the same connection does not exist.
for message in _MESSAGES:
if connection.to_dict() == message.connection.to_dict():
msg = 'A Connection() with the same parameters already exists:'
msg += ' %s' % str(connection)
raise Exception(msg)
# Detect duplicate attribute names.
seen_attr = set()
for attr in mandatory:
if (attr == 'mandatory') or (attr == 'connection'):
msg = "Field names cannot be 'mandatory' or 'connection'."
raise ValueError(msg)
if attr in seen_attr:
raise ValueError('Encountered duplicate field name: %r' % attr)
seen_attr.add(attr)
# Add basic message attributes as read-only CLASS attributes. This is
# done by dynamically manufacturing a meta-class with properties
# returning the basic message attributes.
metacls = type('%sMeta' % name, (cls,),
{'name': property(lambda cls: name),
'mandatory': property(lambda cls: mandatory),
'connection': property(lambda cls: connection)})
# Add basic message attributes as read-only INSTANCE attributes. This
# is done by adding properties that return the basic message attributes
# to the manufactured class.
del(dct['mandatory'])
del(dct['connection'])
dct['name'] = property(lambda cls: name)
dct['mandatory'] = property(lambda cls: mandatory)
dct['connection'] = property(lambda cls: connection)
obj = super(_MessageMeta, cls).__new__(metacls, name, bases, dct)
# Store message definition.
_MESSAGES.append(obj)
return obj
class Message(dict):
"""Base class for MCL message objects.
The :class:`.Message` object provides a base class for defining MCL message
objects. Objects inheriting from :class:`.Message` must implement the
attribute `mandatory` where:
- `mandatory` is a list of strings defining the names of mandatory
connection parameters that must be present when instances of the new
:class:`~.abstract.Connection` object are created. If `mandatory` is
not present, a TypeError will be raised.
These attributes define a message format and allow :class:`.Message` to
manufacture a message class adhering to the specified definition.
Raises:
TypeError: If any of the input argument are invalid.
"""
__metaclass__ = _MessageMeta
def __init__(self, *args, **kwargs):
# If no inputs were passed into the constructor, initialise the object
# with empty fields.
if not args and not kwargs:
empty = [None] * len(self.mandatory)
kwargs = dict(zip(self.mandatory, empty))
# Initialise message object with items.
super(Message, self).__init__()
self.update(*args, **kwargs)
# Ensure the message adheres to specification.
if not sets.Set(self.keys()).issuperset(sets.Set(self.mandatory)):
msg = "'%s' must have the following items: [" % self['name']
msg += ', '.join(self.mandatory)
msg += '].'
raise TypeError(msg)
def __setitem__(self, key, value):
"""Set an item to a new value.
Prevent write access to the keys 'name'.
"""
# Prevent write access to Message name.
if key == 'name' and key in self:
msg = "The key value '%s' in '%s' is read-only."
raise ValueError(msg % (key, self.__class__.__name__))
# All other items can be accessed normally.
else:
super(Message, self).__setitem__(key, value)
def __set_time(self):
"""Update the CPU time-stamp in milliseconds from UTC epoch.
"""
# Return the time in seconds since the epoch as a floating point
# number. Note that even though the time is always returned as a
# floating point number, not all systems provide time with a better
# precision than 1 second. While this function normally returns
# non-decreasing values, it can return a lower value than a previous
# call if the system clock has been set back between the two calls.
#
# Note: The datetime documentation claims datetime.datetime.now()
# supplies more precision than can be gotten from time.time()
# timestamp if possible. To simplify the code
#
# From:
# https://docs.python.org/2/library/time.html#time.time
# https://docs.python.org/2/library/datetime.html#datetime.datetime.now
#
super(Message, self).__setitem__('timestamp', time.time())
def to_json(self):
"""Return the contents of the message as a JSON string.
Returns:
str: JSON formatted representation of the message contents.
"""
return json.dumps(self)
def encode(self):
"""Return the contents of the message as serialised binary msgpack data.
Returns:
str: serialised binary msgpack representation of the message
contents.
"""
return msgpack.dumps(self)
def __decode(self, data):
"""Unpack msgpack serialised binary data.
Args:
data (str): msgpack serialised message data.
Returns:
dict: unpacked message contents.
Raises:
TypeError: If the input binary data could not be unpacked.
"""
try:
dct = msgpack.loads(data)
# The transmitted object is a dictionary.
if type(dct) is dict:
# Check if mandatory attributes are missing.
missing = sets.Set(self.mandatory) - sets.Set(dct.keys())
# Decode was successful.
if not missing:
return dct
# Transmitted object is missing mandatory fields.
else:
msg = 'The transmitted object was missing the following '
msg += 'mandatory items: [' + ', '.join(missing) + '].'
# Transmitted object was decoded but is not a dictionary.
else:
msg = "Serialised object is of type '%s' and not a dictionary."
msg = msg % str(type(dct))
# Decoding was unsuccessful.
except Exception as e:
msg = "Could not unpack message. Error encountered:\n\n%s" % str(e)
# Raise error encountered during unpacking.
raise TypeError(msg)
def update(self, *args, **kwargs):
"""Update message contents with new values.
Update message contents from an optional positional argument and/or a
set of keyword arguments.
If a positional argument is given and it is a serialised binary msgpack
representation of the message contents, it is unpacked and used to
update the contents of the message.
.. testcode::
serialised = ExampleMessage(text='hello world')
print ExampleMessage(serialised)
.. testoutput::
:hide:
{'timestamp': ..., 'name': 'ExampleMessage', 'text': 'hello world'}
If a positional argument is given and it is a mapping object, the
message is updated with the same key-value pairs as the mapping object.
.. testcode::
print ExampleMessage({'text': 'hello world'})
.. testoutput::
:hide:
{'timestamp': ..., 'name': 'ExampleMessage', 'text': 'hello world'}
If the positional argument is an iterable object. Each item in the
iterable must itself be an iterable with exactly two objects. The first
object of each item becomes a key in the new dictionary, and the second
object the corresponding value. If a key occurs more than once, the
last value for that key becomes the corresponding value in the message.
.. testcode::
print ExampleMessage(zip(('text',), ('hello world',)))
.. testoutput::
:hide:
{'timestamp': ..., 'name': 'ExampleMessage', 'text': 'hello world'}
If keyword arguments are given, the keyword arguments and their values
are used to update the contents of the message
.. testcode::
print ExampleMessage(text='hello world')
.. testoutput::
:hide:
{'timestamp': ..., 'name': 'ExampleMessage', 'text': 'hello world'}
If the key 'timestamp' is present in the input, the timestamp of the
message is set to the input value. If no 'timestamp' value is
specified, the CPU time-stamp, in milliseconds from UTC epoch, at the
end of the update is recorded.
Args:
*args (list): positional arguments
*kwargs (dict): keyword arguments.
Raises:
TypeError: If the message contents could not be updated.
"""
# Set the default timestamp to None. If it is updated by the passed in
# arguments, we won't update it automatically.
if 'timestamp' not in self:
self['timestamp'] = None
original_time = self['timestamp']
if len(args) > 1:
msg = 'Input argument must be a msgpack serialised dictionary, '
msg += 'a mapping object or iterable object.'
raise TypeError(msg)
# Update message with a serialised dictionary:
#
# msg.update(binary)
#
if (len(args) == 1) and (type(args[0]) is str):
super(Message, self).update(self.__decode(args[0]))
return
# Update message with a dictionary (and keyword arguments):
#
# msg.update(one=1, two=2, three=3)
# msg.update(zip(['one', 'two', 'three'], [1, 2, 3]))
# msg.update([('two', 2), ('one', 1), ('three', 3)])
# msg.update({'three': 3, 'one': 1, 'two': 2})
#
else:
try:
super(Message, self).update(*args, **kwargs)
except Exception as e:
msg = "Could not update message. Error encountered:\n\n%s"
raise TypeError(msg % str(e))
# Populate the name key with the message name.
if 'name' not in self:
super(Message, self).__setitem__('name', self.__class__.__name__)
# The name parameter was modified.
elif self['name'] != self.__class__.__name__:
msg = "Attempted to set the read-only key value %s['%s'] = '%s'."
raise ValueError(msg % (self.__class__.__name__,
'name', self['name']))
# Record the time of update if the 'timestamp' field was not
# specified. By checking for changes to the 'timestamp' field, users
# can set null values (None) or falsy values (a timestamp of 0).
if self['timestamp'] == original_time:
self.__set_time()
def remove_message_object(name):
"""De-register a :class:`.Message` object from the list of known messages.
Args:
name (string): Name of the :class:`.Message` object to de-register.
Returns:
bool: :data:`.True` if the :class:`.Message` object was
de-registered. :data:`.False` if the :class:`.Message` object does
not exist.
"""
# Create name of available messages.
names = [msg.__name__ for msg in _MESSAGES]
# The message exists, remove it from the list.
if name in names:
index = names.index(name)
del _MESSAGES[index]
return True
# The message does not exist. No action required.
else:
return False
def list_messages(include=None, exclude=None):
"""List objects derived from :class:`.Message`.
Args:
include (list): list of message object names to include.
exclude (list): list of message object names to exclude.
Returns:
list: a list of message objects derived from :class:`.Message` is
returned.
"""
# Save includes.
if isinstance(include, basestring):
include = [include, ]
elif include is not None:
if ((not hasattr(include, '__iter__')) or
(not all([isinstance(itm, basestring) for itm in include]))):
msg = "'include' must be a string or a list of strings.'"
raise TypeError(msg)
# Save excludes.
if isinstance(exclude, basestring):
exclude = [exclude, ]
elif exclude is not None:
if ((not hasattr(exclude, '__iter__')) or
(not all([isinstance(itm, basestring) for itm in exclude]))):
msg = "'exclude' must be a string or a list of strings.'"
raise TypeError(msg)
# Filter available messages.
messages = list()
for message in _MESSAGES:
# Do not include messages in the black list.
if exclude and message.name in exclude:
continue
# Only include messages in the white list (if it exists).
if include and message.name not in include:
continue
messages.append(message)
return messages
def get_message_objects(names):
"""Return :class:`.Message` object(s) from name(s).
Args:
name (:obj:`python:string` or :obj:`python:list`): The name (as a
string) of a single message object to retrieve. To retrieve
multiple message objects, input a list containing the object names.
Returns:
Message or list: If a single message object is requested (string
input), the requested py:class:`.Message` is returned. If multiple
message objects are requested (list input), a list of message
objects is returned.
Raises:
TypeError: If `names` is not a string or list/tuple of strings.
NameError: If `names` does not exist or multiple message objects are
found.
"""
# Input is a string.
if isinstance(names, basestring):
# Create name of available messages.
messages = [(msg, msg.__name__) for msg in _MESSAGES]
# Cache messages with a matching name.
matches = list()
for message in messages:
if message[1] == names:
matches.append(message)
# Message does not exist.
if len(matches) == 0:
raise NameError("Could locate the message named: '%s'." % names)
# Multiple messages with the same name exist.
elif len(matches) > 1:
msg = "Multiple messages named '%s' found including:\n" % names
for message in matches:
msg += ' %s.%s\n' % (message[0].__module__, message[1])
raise NameError(msg)
# Return unique message.
return matches[0][0]
# Input is a list or tuple.
elif ((isinstance(names, (list, tuple))) and
(all([isinstance(itm, basestring) for itm in names]))):
messages = list()
for name in names:
try:
messages.append(get_message_objects(name))
except:
raise
return messages
# Invalid input type.
else:
msg = "The input 'names' must be a string or a list/tuple of strings."
raise TypeError(msg)
| bsd-3-clause | -5,560,837,598,931,066,000 | 35.656489 | 83 | 0.61137 | false |
pignacio/var-log-recetas | var_log_recetas/ingredient/migrations/0002_initial_ingredients.py | 1 | 2713 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_measure_units(apps, schema_editor):
MeasureUnit = apps.get_model('ingredient', 'MeasureUnit')
db_alias = schema_editor.connection.alias
MeasureUnit.objects.using(db_alias).bulk_create([
MeasureUnit(name='units', short_name='u'),
MeasureUnit(name='grams', short_name='g'),
MeasureUnit(name='cups', short_name='u'),
MeasureUnit(name='mililiters', short_name='ml'),
MeasureUnit(name='cubic centimeters', short_name='cc'),
MeasureUnit(name='tablespoons', short_name='tbsp'),
MeasureUnit(name='teaspoons', short_name='tsp'),
])
def create_ingredients(apps, schema_editor):
INGREDIENTS = {
'Sugar': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Milk': ('cups', 'mililiters', 'cubic centimeters', 'tablespoons', 'teaspoons'),
'Egg': ('units', 'grams'),
'Egg yolk': ('units',),
'Egg white': ('units',),
'Cream': ('cups', 'mililiters', 'cubic centimeters', 'tablespoons', 'teaspoons'),
'Vanilla extract': ('mililiters', 'cubic centimeters', 'tablespoons', 'teaspoons'),
'Oat': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Flour': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Selfraising Flour': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Condensed milk': ('cups',
'cubic centimeters',
'tablespoons',
'teaspoons',
'grams'),
'Baking powder': ('tablespoons', 'teaspoons', 'grams'),
'Baking soda': ('tablespoons', 'teaspoons', 'grams'),
'Butter': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Water': ('cups', 'mililiters', 'cubic centimeters', 'tablespoons', 'teaspoons'),
'Salt': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Oil': ('cups', 'mililiters', 'cubic centimeters', 'tablespoons', 'teaspoons'),
}
MeasureUnit = apps.get_model('ingredient', 'MeasureUnit')
Ingredient = apps.get_model('ingredient', 'Ingredient')
for ingredient, units in INGREDIENTS.items():
ingredient, _created = Ingredient.objects.get_or_create(name=ingredient)
for unit in units:
ingredient.units.add(MeasureUnit.objects.get(name=unit))
db_alias = schema_editor.connection.alias
class Migration(migrations.Migration):
dependencies = [
('ingredient', '0001_initial'),
]
operations = [
migrations.operations.RunPython(create_measure_units),
migrations.operations.RunPython(create_ingredients),
]
| gpl-3.0 | 5,447,176,207,372,048,000 | 41.390625 | 91 | 0.597862 | false |
FPGAwars/apio | apio/managers/downloader.py | 1 | 2304 | # -*- coding: utf-8 -*-
# -- This file is part of the Apio project
# -- (C) 2016-2019 FPGAwars
# -- Author Jesús Arroyo
# -- Licence GPLv2
# -- Derived from:
# ---- Platformio project
# ---- (C) 2014-2016 Ivan Kravets <[email protected]>
# ---- Licence Apache v2
from email.utils import parsedate_tz
from math import ceil
from time import mktime
import requests
import click
from apio import util
requests.packages.urllib3.disable_warnings()
class FDUnrecognizedStatusCode(util.ApioException):
MESSAGE = "Got an unrecognized status code '{0}' when downloaded {1}"
class FileDownloader:
CHUNK_SIZE = 1024
def __init__(self, url, dest_dir=None):
self._url = url
self._fname = url.split("/")[-1]
self._destination = self._fname
if dest_dir:
self.set_destination(util.safe_join(dest_dir, self._fname))
self._progressbar = None
self._request = None
# make connection
self._request = requests.get(url, stream=True)
if self._request.status_code != 200:
raise FDUnrecognizedStatusCode(self._request.status_code, url)
def set_destination(self, destination):
self._destination = destination
def get_filepath(self):
return self._destination
def get_lmtime(self):
if "last-modified" in self._request.headers:
return self._request.headers.get("last-modified")
return None
def get_size(self):
return int(self._request.headers.get("content-length"))
def start(self):
itercontent = self._request.iter_content(chunk_size=self.CHUNK_SIZE)
with open(self._destination, "wb") as file:
chunks = int(ceil(self.get_size() / float(self.CHUNK_SIZE)))
with click.progressbar(length=chunks, label="Downloading") as pbar:
for _ in pbar:
file.write(next(itercontent))
self._request.close()
self._preserve_filemtime(self.get_lmtime())
def _preserve_filemtime(self, lmdate):
if lmdate is not None:
timedata = parsedate_tz(lmdate)
lmtime = mktime(timedata[:9])
util.change_filemtime(self._destination, lmtime)
def __del__(self):
if self._request:
self._request.close()
| gpl-2.0 | 6,235,525,954,317,994,000 | 26.746988 | 79 | 0.623535 | false |
vmax-feihu/hue | desktop/libs/notebook/src/notebook/connectors/spark_shell.py | 1 | 8921 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
import time
LOG = logging.getLogger(__name__)
from django.utils.translation import ugettext as _
from desktop.lib.exceptions_renderable import PopupException
from desktop.lib.i18n import force_unicode
from desktop.lib.rest.http_client import RestException
try:
from spark.conf import LIVY_SERVER_SESSION_KIND
from spark.job_server_api import get_api as get_spark_api
except ImportError, e:
LOG.exception('Spark is not enabled')
from notebook.data_export import download as spark_download
from notebook.connectors.base import SessionExpired, _get_snippet_session, Api,\
QueryError
class SparkApi(Api):
PROPERTIES = [
{'name': 'jars', 'nice_name': _('Jars'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': False},
{'name': 'files', 'nice_name': _('Files'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': False},
{'name': 'pyFiles', 'nice_name': _('pyFiles'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': False},
{'name': 'driverMemory', 'nice_name': _('Driver Memory'), 'default': '1', 'type': 'jvm', 'is_yarn': False},
{'name': 'driverCores', 'nice_name': _('Driver Cores'), 'default': '1', 'type': 'number', 'is_yarn': True},
{'name': 'executorMemory', 'nice_name': _('Executors Memory'), 'default': '1', 'type': 'jvm', 'is_yarn': True},
{'name': 'executorCores', 'nice_name': _('Executor Cores'), 'default': '1', 'type': 'number', 'is_yarn': True},
{'name': 'queue', 'nice_name': _('Queue'), 'default': '1', 'type': 'string', 'is_yarn': True},
{'name': 'archives', 'nice_name': _('Archives'), 'default': '', 'type': 'csv-hdfs-files', 'is_yarn': True},
{'name': 'numExecutors', 'nice_name': _('Executors Numbers'), 'default': '1', 'type': 'number', 'is_yarn': True},
]
SPARK_UI_RE = re.compile("Started SparkUI at (http[s]?://([0-9a-zA-Z-_\.]+):(\d+))")
YARN_JOB_RE = re.compile("tracking URL: (http[s]?://.+/)")
STANDALONE_JOB_RE = re.compile("Got job (\d+)")
def create_session(self, lang='scala', properties=None):
props = dict([(p['name'], p['value']) for p in properties]) if properties is not None else {}
props['kind'] = lang
api = get_spark_api(self.user)
response = api.create_session(**props)
status = api.get_session(response['id'])
count = 0
while status['state'] == 'starting' and count < 120:
status = api.get_session(response['id'])
count += 1
time.sleep(1)
if status['state'] != 'idle':
info = '\n'.join(status['log']) if status['log'] else 'timeout'
raise QueryError(_('The Spark session could not be created in the cluster: %s') % info)
return {
'type': lang,
'id': response['id'],
'properties': properties
}
def execute(self, notebook, snippet):
api = get_spark_api(self.user)
session = _get_snippet_session(notebook, snippet)
try:
response = api.submit_statement(session['id'], snippet['statement'])
return {
'id': response['id'],
'has_result_set': True,
}
except Exception, e:
message = force_unicode(str(e)).lower()
if 'session not found' in message or 'connection refused' in message or 'session is in state busy' in message:
raise SessionExpired(e)
else:
raise e
def check_status(self, notebook, snippet):
api = get_spark_api(self.user)
session = _get_snippet_session(notebook, snippet)
cell = snippet['result']['handle']['id']
try:
response = api.fetch_data(session['id'], cell)
return {
'status': response['state'],
}
except Exception, e:
message = force_unicode(str(e)).lower()
if 'session not found' in message:
raise SessionExpired(e)
else:
raise e
def fetch_result(self, notebook, snippet, rows, start_over):
api = get_spark_api(self.user)
session = _get_snippet_session(notebook, snippet)
cell = snippet['result']['handle']['id']
try:
response = api.fetch_data(session['id'], cell)
except Exception, e:
message = force_unicode(str(e)).lower()
if 'session not found' in message:
raise SessionExpired(e)
else:
raise e
content = response['output']
if content['status'] == 'ok':
data = content['data']
images = []
try:
table = data['application/vnd.livy.table.v1+json']
except KeyError:
try:
images = [data['image/png']]
except KeyError:
images = []
data = [[data['text/plain']]]
meta = [{'name': 'Header', 'type': 'STRING_TYPE', 'comment': ''}]
type = 'text'
else:
data = table['data']
headers = table['headers']
meta = [{'name': h['name'], 'type': h['type'], 'comment': ''} for h in headers]
type = 'table'
# Non start_over not supported
if not start_over:
data = []
return {
'data': data,
'images': images,
'meta': meta,
'type': type
}
elif content['status'] == 'error':
tb = content.get('traceback', None)
if tb is None:
msg = content.get('ename', 'unknown error')
evalue = content.get('evalue')
if evalue is not None:
msg = '%s: %s' % (msg, evalue)
else:
msg = ''.join(tb)
raise QueryError(msg)
def download(self, notebook, snippet, format):
try:
api = get_spark_api(self.user)
session = _get_snippet_session(notebook, snippet)
cell = snippet['result']['handle']['id']
return spark_download(api, session['id'], cell, format)
except Exception, e:
raise PopupException(e)
def cancel(self, notebook, snippet):
api = get_spark_api(self.user)
session = _get_snippet_session(notebook, snippet)
response = api.cancel(session['id'])
return {'status': 0}
def get_log(self, notebook, snippet, startFrom=0, size=None):
api = get_spark_api(self.user)
session = _get_snippet_session(notebook, snippet)
return api.get_log(session['id'], startFrom=startFrom, size=size)
def progress(self, snippet, logs):
return 50
def close_statement(self, snippet): # Individual statements cannot be closed
pass
def close_session(self, session):
api = get_spark_api(self.user)
if session['id'] is not None:
try:
api.close(session['id'])
return {
'session': session['id'],
'status': 0
}
except RestException, e:
if e.code == 404 or e.code == 500: # TODO remove the 500
raise SessionExpired(e)
else:
return {'status': -1}
def get_jobs(self, notebook, snippet, logs):
if self._is_yarn_mode():
# Tracking URL is found at the start of the logs
start_logs = self.get_log(notebook, snippet, startFrom=0, size=100)
return self._get_yarn_jobs(start_logs)
else:
return self._get_standalone_jobs(logs)
def _get_standalone_jobs(self, logs):
job_ids = set([])
# Attempt to find Spark UI Host and Port from startup logs
spark_ui_url = self.SPARK_UI_RE.search(logs)
if not spark_ui_url:
LOG.warn('Could not find the Spark UI URL in the session logs.')
return []
else:
spark_ui_url = spark_ui_url.group(1)
# Standalone/Local mode runs on same host as Livy, attempt to find Job IDs in Spark log
for match in self.STANDALONE_JOB_RE.finditer(logs):
job_id = match.group(1)
job_ids.add(job_id)
jobs = [{
'name': job_id,
'url': '%s/jobs/job/?id=%s' % (spark_ui_url, job_id)
} for job_id in job_ids]
return jobs
def _get_yarn_jobs(self, logs):
tracking_urls = set([])
# YARN mode only outputs the tracking-proxy URL, not Job IDs
for match in self.YARN_JOB_RE.finditer(logs):
url = match.group(1)
tracking_urls.add(url)
jobs = [{
'name': url.strip('/').split('/')[-1], # application_id is the last token
'url': url
} for url in tracking_urls]
return jobs
def _is_yarn_mode(self):
return LIVY_SERVER_SESSION_KIND.get() == "yarn"
| apache-2.0 | 8,104,299,454,335,192,000 | 31.089928 | 117 | 0.61047 | false |
logicabrity/odes | scikits/odes/sundials/setup-tine.py | 1 | 3823 | #!/usr/bin/env python
from __future__ import print_function
import os
import platform
from numpy.distutils.system_info import get_info
from scikits.odes._build import cython
#from Cython.Distutils import build_ext
base_path = os.path.abspath(os.path.dirname(__file__))
lapack_opt = None
def win():
"""
Return True if a windows system
"""
if platform.system() in ["Windows", "win32"]:
return True
return False
if not win():
try:
lapack_opt = get_info('lapack_opt',notfound_action=2)
except:
print('LAPACK not found, no sundials solvers')
if win():
print ('In win')
INCL_DIRS_LAPACK = ['C:/MinGW/lib/Lapack/lapack-3.4.1/SRC']
LIB_DIRS_LAPACK = ['C:/MinGW/lib/Lapack/lib']
LIBS_LAPACK = ['lapack', 'blas']
INCL_DIRS_SUNDIALS = ['C:/Program Files/sundials/include']
LIB_DIRS_SUNDIALS = ['C:/Program Files/sundials', 'C:/Program Files/sundials/lib']
LIBS_SUNDIALS = ['sundials_nvecserial']
LIBS_IDA = ['sundials_ida']
LIBS_CVODE = ['sundials_cvode']
LIB_DIRS_GFORTRAN = ['C:/MinGW/lib/gcc/mingw32/4.6.2']
LIBS_FORTRAN = ['gfortran']
else:
INCL_DIRS_LAPACK = []
LIB_DIRS_LAPACK = []
LIBS_LAPACK = []
if lapack_opt:
INCL_DIRS_LAPACK = lapack_opt.get('include_dirs',[])
LIB_DIRS_LAPACK = lapack_opt.get('library_dirs',[])
LIBS_LAPACK = lapack_opt.get('libraries',[])
# Edit following lines if sundials is installed differently!
INCL_DIRS_SUNDIALS = [os.path.abspath(os.path.dirname(__file__))]
LIB_DIRS_SUNDIALS = [os.path.abspath(os.path.dirname(__file__)),
'/usr/lib', '/usr/local/lib/',
]
LIBS_SUNDIALS = ['sundials_nvecserial']
LIBS_IDA = ['sundials_ida']
LIBS_CVODE = ['sundials_cvode']
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
print("=============================================")
print("parent package is %s" % parent_package)
print("top path is %s" % top_path)
print("=============================================")
config = Configuration('sundials', parent_package, top_path)
if lapack_opt or win():
# sundials library
## assume installed globally at the moment
##config.add_library('sundials_ida',
# sundials cython wrappers
cython(['common_defs.pyx'], working_path=base_path,
include_dirs=[])
config.add_extension("common_defs",
sources=['common_defs.c'],
include_dirs=INCL_DIRS_SUNDIALS)
cython(['ida.pyx'], working_path=base_path)
config.add_extension("ida",
sources=['ida.c'],
depends=['common_defs.c'],
include_dirs=INCL_DIRS_SUNDIALS+INCL_DIRS_LAPACK,
library_dirs=LIB_DIRS_SUNDIALS+LIB_DIRS_LAPACK+LIB_DIRS_GFORTRAN,
libraries=LIBS_IDA+LIBS_SUNDIALS+LIBS_LAPACK+LIBS_FORTRAN)
cython(['cvode.pyx'], working_path=base_path)
config.add_extension("cvode",
sources=['cvode.c'],
depends=['common_defs.c'],
include_dirs=INCL_DIRS_SUNDIALS+INCL_DIRS_LAPACK,
library_dirs=LIB_DIRS_SUNDIALS+LIB_DIRS_LAPACK+LIB_DIRS_GFORTRAN,
libraries=LIBS_CVODE+LIBS_SUNDIALS+LIBS_LAPACK+LIBS_FORTRAN)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| bsd-3-clause | 2,066,079,431,865,335,300 | 37.010204 | 94 | 0.548784 | false |
waveform80/oliphant | docs/conf.py | 1 | 3848 | #!/usr/bin/env python3
# vim: set et sw=4 sts=4 fileencoding=utf-8:
#
# The MIT License (MIT)
#
# Copyright (c) 2014-2015 Dave Jones <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import os
# -- General configuration ------------------------------------------------
extensions = ['sphinx.ext.intersphinx']
templates_path = ['_templates']
source_suffix = '.rst'
#source_encoding = 'utf-8-sig'
master_doc = 'index'
project = 'Oliphant'
author = 'Dave Jones'
copyright = '2014-2015 %s' % author
version = '0.1'
release = version
#language = None
#today_fmt = '%B %d, %Y'
exclude_patterns = ['_build']
#default_role = None
#add_function_parentheses = True
#add_module_names = True
#show_authors = False
pygments_style = 'sphinx'
#modindex_common_prefix = []
#keep_warnings = False
# -- Autodoc configuration ------------------------------------------------
autodoc_member_order = 'groupwise'
# -- Intersphinx configuration --------------------------------------------
intersphinx_mapping = {
}
# -- Options for HTML output ----------------------------------------------
html_theme = 'default'
#html_theme_options = {}
#html_theme_path = []
#html_title = None
#html_short_title = None
#html_logo = None
#html_favicon = None
html_static_path = ['_static']
#html_extra_path = []
#html_last_updated_fmt = '%b %d, %Y'
#html_use_smartypants = True
#html_sidebars = {}
#html_additional_pages = {}
#html_domain_indices = True
#html_use_index = True
#html_split_index = False
#html_show_sourcelink = True
#html_show_sphinx = True
#html_show_copyright = True
#html_use_opensearch = ''
#html_file_suffix = None
htmlhelp_basename = '%sdoc' % project.lower()
# Hack to make wide tables work properly in RTD
# See https://github.com/snide/sphinx_rtd_theme/issues/117 for details
def setup(app):
app.add_stylesheet('style_override.css')
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
'papersize': 'a4paper',
'pointsize': '10pt',
#'preamble': '',
}
latex_documents = [
(
'index', # source start file
'%s.tex' % project.lower(), # target filename
'%s Documentation' % project, # title
author, # author
'manual', # documentclass
),
]
#latex_logo = None
#latex_use_parts = False
#latex_show_pagerefs = False
#latex_show_urls = False
#latex_appendices = []
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
man_pages = []
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
texinfo_documents = []
#texinfo_appendices = []
#texinfo_domain_indices = True
#texinfo_show_urls = 'footnote'
#texinfo_no_detailmenu = False
| mit | 5,657,725,867,801,998,000 | 28.829457 | 79 | 0.633576 | false |
igsr/igsr_analysis | BEDTools/BEDTools.py | 1 | 4620 | '''
Created on 24 Apr 2017
@author: ernesto
'''
import subprocess
import tempfile
class BEDTools:
'''
Class used to perform different operations with the BEDTools package.
This is essentially a wrapper for the BEDTools package. The functionality is quite limited and
additional functions will be added as necessary
'''
def __init__(self, bedtools_folder=None):
"""
Constructor
Parameters
----------
bedtools_folder : str, optional
Path to folder with bedtools binary.
"""
self.bedtools_folder = bedtools_folder
def make_windows(self, w, g, s=None, subtract=None, lextend=None, rextend=None, verbose=False):
"""
This method will make windows from a genome file by using 'bedtools makewindows'
Parameters
----------
w : int
width of windows in bp.
g : str
Path to genome file.
s : int, optional
overlap in bp. i.e. if -w 100 -s 80 will generate:
chr1 0 100
chr1 80 180
chr1 160 260
...
So, -s defines the offset in bp
Another example -w 1000 -s 200
chr1 0 1000
chr1 200 1200
chr1 400 1400
chr1 600 1600
lextend : int, optional
Extend each interval to the left by int bases.
rextend : int, optional
Extend each interval to the right by int bases.
subtract : str, optional
BED file containing the features that will be removed from the generated windows.
For example, if we have the following window:
chr20 1000 2000
And we have the following feature in the BED file: chr20 1100 1200
Then the resulting windows will be like:
chr20 1000 1100
chr20 1200 2000
verbose : bool, default = False
Returns
-------
coordlist : list
A list of lists. Each sublist is composed of ['chr','start','end']
It will return an empty list if not elements for a certain chr are defined.
Raises
------
Exception
"""
command = ""
if self.bedtools_folder:
command += self.bedtools_folder+"/"
command += "bedtools makewindows -g {0} -w {1}".format(g, w)
if s is not None:
command += " -s {0}".format(s)
coordlist = []
if verbose is not False:
print(command)
try:
stdout = subprocess.check_output(command, shell=True)
coordlist = [l.split("\t") for l in stdout.decode("utf-8").strip().split("\n")]
except subprocess.CalledProcessError as exc:
raise Exception(exc.output)
if subtract is not None:
temp = tempfile.NamedTemporaryFile()
try:
ofile = open(temp.name, 'w')
for i in coordlist:
ofile.write("{0}\t{1}\t{2}\n".format(i[0], i[1], i[2]))
ofile.close()
command1 = "{0}/bedtools subtract -a {1} -b {2}".format(self.bedtools_folder,
temp.name, subtract)
coordlist = None
try:
stdout = subprocess.check_output(command1, shell=True)
coordlist = [l.split("\t") for l in stdout.decode("utf-8").strip().split("\n")]
except subprocess.CalledProcessError as exc:
raise Exception(exc.output)
finally:
temp.close()
if lextend is not None:
first_seen = False
for k, lon in enumerate(coordlist):
if first_seen is True:
lon[1] = str(int(lon[1])+lextend)
first_seen = True
coordlist[k] = lon
if rextend is not None:
for k, lon in enumerate(coordlist):
if k != len(coordlist)-1:
lon[2] = str(int(lon[2])+rextend)
coordlist[k] = lon
return coordlist
def __str__(self):
sab = []
for key in self.__dict__:
sab.append("{key}='{value}'".format(key=key, value=self.__dict__[key]))
return ', '.join(sab)
def __repr__(self):
return self.__str__()
| apache-2.0 | -600,444,568,850,752,000 | 30.006711 | 100 | 0.498052 | false |
zeldin/libsigrokdecode | decoders/spdif/pd.py | 1 | 8991 | ##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2014 Guenther Wenninger <[email protected]>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, see <http://www.gnu.org/licenses/>.
##
import sigrokdecode as srd
class SamplerateError(Exception):
pass
class Decoder(srd.Decoder):
api_version = 3
id = 'spdif'
name = 'S/PDIF'
longname = 'Sony/Philips Digital Interface Format'
desc = 'Serial bus for connecting digital audio devices.'
license = 'gplv2+'
inputs = ['logic']
outputs = ['spdif']
channels = (
{'id': 'data', 'name': 'Data', 'desc': 'Data line'},
)
annotations = (
('bitrate', 'Bitrate / baudrate'),
('preamble', 'Preamble'),
('bits', 'Bits'),
('aux', 'Auxillary-audio-databits'),
('samples', 'Audio Samples'),
('validity', 'Data Valid'),
('subcode', 'Subcode data'),
('chan_stat', 'Channnel Status'),
('parity', 'Parity Bit'),
)
annotation_rows = (
('info', 'Info', (0, 1, 3, 5, 6, 7, 8)),
('bits', 'Bits', (2,)),
('samples', 'Samples', (4,)),
)
def putx(self, ss, es, data):
self.put(ss, es, self.out_ann, data)
def puty(self, data):
self.put(self.ss_edge, self.samplenum, self.out_ann, data)
def __init__(self):
self.state = 'GET FIRST PULSE WIDTH'
self.ss_edge = None
self.first_edge = True
self.samplenum_prev_edge = 0
self.pulse_width = 0
self.clocks = []
self.range1 = 0
self.range2 = 0
self.preamble_state = 0
self.preamble = []
self.seen_preamble = False
self.last_preamble = 0
self.first_one = True
self.subframe = []
def start(self):
self.out_ann = self.register(srd.OUTPUT_ANN)
# Assume that the initial pin state is logic 0.
self.initial_pins = [0]
def metadata(self, key, value):
if key == srd.SRD_CONF_SAMPLERATE:
self.samplerate = value
def get_pulse_type(self):
if self.range1 == 0 or self.range2 == 0:
return -1
if self.pulse_width >= self.range2:
return 2
elif self.pulse_width >= self.range1:
return 0
else:
return 1
def find_first_pulse_width(self):
if self.pulse_width != 0:
self.clocks.append(self.pulse_width)
self.state = 'GET SECOND PULSE WIDTH'
def find_second_pulse_width(self):
if self.pulse_width > (self.clocks[0] * 1.3) or \
self.pulse_width < (self.clocks[0] * 0.7):
self.clocks.append(self.pulse_width)
self.state = 'GET THIRD PULSE WIDTH'
def find_third_pulse_width(self):
if not ((self.pulse_width > (self.clocks[0] * 1.3) or \
self.pulse_width < (self.clocks[0] * 0.7)) \
and (self.pulse_width > (self.clocks[1] * 1.3) or \
self.pulse_width < (self.clocks[1] * 0.7))):
return
self.clocks.append(self.pulse_width)
self.clocks.sort()
self.range1 = (self.clocks[0] + self.clocks[1]) / 2
self.range2 = (self.clocks[1] + self.clocks[2]) / 2
spdif_bitrate = int(self.samplerate / (self.clocks[2] / 1.5))
self.ss_edge = 0
self.puty([0, ['Signal Bitrate: %d Mbit/s (=> %d kHz)' % \
(spdif_bitrate, (spdif_bitrate/ (2 * 32)))]])
clock_period_nsec = 1000000000 / spdif_bitrate
self.last_preamble = self.samplenum
# We are done recovering the clock, now let's decode the data stream.
self.state = 'DECODE STREAM'
def decode_stream(self):
pulse = self.get_pulse_type()
if not self.seen_preamble:
# This is probably the start of a preamble, decode it.
if pulse == 2:
self.preamble.append(self.get_pulse_type())
self.state = 'DECODE PREAMBLE'
self.ss_edge = self.samplenum - self.pulse_width - 1
return
# We've seen a preamble.
if pulse == 1 and self.first_one:
self.first_one = False
self.subframe.append([pulse, self.samplenum - \
self.pulse_width - 1, self.samplenum])
elif pulse == 1 and not self.first_one:
self.subframe[-1][2] = self.samplenum
self.putx(self.subframe[-1][1], self.samplenum, [2, ['1']])
self.bitcount += 1
self.first_one = True
else:
self.subframe.append([pulse, self.samplenum - \
self.pulse_width - 1, self.samplenum])
self.putx(self.samplenum - self.pulse_width - 1,
self.samplenum, [2, ['0']])
self.bitcount += 1
if self.bitcount == 28:
aux_audio_data = self.subframe[0:4]
sam, sam_rot = '', ''
for a in aux_audio_data:
sam = sam + str(a[0])
sam_rot = str(a[0]) + sam_rot
sample = self.subframe[4:24]
for s in sample:
sam = sam + str(s[0])
sam_rot = str(s[0]) + sam_rot
validity = self.subframe[24:25]
subcode_data = self.subframe[25:26]
channel_status = self.subframe[26:27]
parity = self.subframe[27:28]
self.putx(aux_audio_data[0][1], aux_audio_data[3][2], \
[3, ['Aux 0x%x' % int(sam, 2), '0x%x' % int(sam, 2)]])
self.putx(sample[0][1], sample[19][2], \
[3, ['Sample 0x%x' % int(sam, 2), '0x%x' % int(sam, 2)]])
self.putx(aux_audio_data[0][1], sample[19][2], \
[4, ['Audio 0x%x' % int(sam_rot, 2), '0x%x' % int(sam_rot, 2)]])
if validity[0][0] == 0:
self.putx(validity[0][1], validity[0][2], [5, ['V']])
else:
self.putx(validity[0][1], validity[0][2], [5, ['E']])
self.putx(subcode_data[0][1], subcode_data[0][2],
[6, ['S: %d' % subcode_data[0][0]]])
self.putx(channel_status[0][1], channel_status[0][2],
[7, ['C: %d' % channel_status[0][0]]])
self.putx(parity[0][1], parity[0][2], [8, ['P: %d' % parity[0][0]]])
self.subframe = []
self.seen_preamble = False
self.bitcount = 0
def decode_preamble(self):
if self.preamble_state == 0:
self.preamble.append(self.get_pulse_type())
self.preamble_state = 1
elif self.preamble_state == 1:
self.preamble.append(self.get_pulse_type())
self.preamble_state = 2
elif self.preamble_state == 2:
self.preamble.append(self.get_pulse_type())
self.preamble_state = 0
self.state = 'DECODE STREAM'
if self.preamble == [2, 0, 1, 0]:
self.puty([1, ['Preamble W', 'W']])
elif self.preamble == [2, 2, 1, 1]:
self.puty([1, ['Preamble M', 'M']])
elif self.preamble == [2, 1, 1, 2]:
self.puty([1, ['Preamble B', 'B']])
else:
self.puty([1, ['Unknown Preamble', 'Unknown Prea.', 'U']])
self.preamble = []
self.seen_preamble = True
self.bitcount = 0
self.first_one = True
self.last_preamble = self.samplenum
def decode(self):
if not self.samplerate:
raise SamplerateError('Cannot decode without samplerate.')
# Throw away first detected edge as it might be mangled data.
self.wait({0: 'e'})
while True:
# Wait for any edge (rising or falling).
(data,) = self.wait({0: 'e'})
self.pulse_width = self.samplenum - self.samplenum_prev_edge - 1
self.samplenum_prev_edge = self.samplenum
if self.state == 'GET FIRST PULSE WIDTH':
self.find_first_pulse_width()
elif self.state == 'GET SECOND PULSE WIDTH':
self.find_second_pulse_width()
elif self.state == 'GET THIRD PULSE WIDTH':
self.find_third_pulse_width()
elif self.state == 'DECODE STREAM':
self.decode_stream()
elif self.state == 'DECODE PREAMBLE':
self.decode_preamble()
| gpl-3.0 | -5,038,679,791,654,598,000 | 35.697959 | 86 | 0.530197 | false |
sirkonst/concurrently | tests/engines/test_asyncio_thread.py | 1 | 3952 | import asyncio
import time
from queue import Queue
import pytest
from concurrently import concurrently, AsyncIOThreadEngine, UnhandledExceptions
from . import EngineTest, paramz_conc_count, paramz_data_count
def process(data):
time.sleep(data)
return time.time()
class TestAsyncIOThreadEngine(EngineTest):
@pytest.mark.asyncio(forbid_global_loop=True)
@paramz_conc_count
@paramz_data_count
async def test_concurrently(self, conc_count, data_count):
data = range(data_count)
q_data = Queue()
for d in data:
q_data.put(d)
q_results = Queue()
start_time = time.time()
@concurrently(conc_count, engine=AsyncIOThreadEngine)
def _parallel():
while not q_data.empty():
d = q_data.get()
res = process(d)
q_results.put({d: res})
await _parallel()
results = {}
while not q_results.empty():
results.update(q_results.get())
def calc_delta(n):
if n // conc_count == 0:
return n
return n + calc_delta(n - conc_count)
assert len(results) == data_count
for n, v in results.items():
delta = v - start_time
assert int(delta) == calc_delta(n)
@pytest.mark.asyncio(forbid_global_loop=True)
async def test_stop(self):
data = range(3)
i_data = iter(data)
results = {}
start_time = time.time()
@concurrently(2, engine=AsyncIOThreadEngine)
def _parallel():
for d in i_data:
r = process(d)
results[d] = r
await asyncio.sleep(0.5)
await _parallel.stop()
assert len(results) == 1
assert int(results[0]) == int(start_time)
@pytest.mark.asyncio(forbid_global_loop=True)
async def test_exception(self):
data = range(2)
i_data = iter(data)
@concurrently(2, engine=AsyncIOThreadEngine)
def _parallel():
for d in i_data:
if d == 1:
raise RuntimeError()
with pytest.raises(UnhandledExceptions) as exc:
await _parallel()
assert len(exc.value.exceptions) == 1
assert isinstance(exc.value.exceptions[0], RuntimeError)
@pytest.mark.asyncio(forbid_global_loop=True)
async def test_exception_suppress(self):
data = range(2)
i_data = iter(data)
results = {}
start_time = time.time()
@concurrently(2, engine=AsyncIOThreadEngine)
def _parallel():
for d in i_data:
if d == 1:
raise RuntimeError()
res = process(d)
results[d] = res
await _parallel(suppress_exceptions=True)
assert len(results) == 1
assert int(results[0]) == int(start_time)
exc_list = _parallel.exceptions()
assert len(exc_list) == 1
assert isinstance(exc_list[0], RuntimeError)
@pytest.mark.asyncio(forbid_global_loop=True)
async def test_fail_hard(self):
i_data = iter(range(4))
results = {}
@concurrently(3, engine=AsyncIOThreadEngine)
def _parallel():
for d in i_data:
if d == 1:
raise RuntimeError()
time.sleep(d)
results[d] = True
with pytest.raises(RuntimeError):
await _parallel(fail_hard=True)
assert len(results) == 1
assert results[0]
@pytest.mark.asyncio(forbid_global_loop=True)
async def test_decorated_fn_is_not_coroutine(self):
with pytest.raises(AssertionError) as e:
@concurrently(1, engine=AsyncIOThreadEngine)
async def _coroutine():
pass
assert str(e.value) == \
'Decorated function `_coroutine` must be regular not a coroutine'
| mit | -1,208,497,335,089,531,100 | 27.228571 | 79 | 0.558451 | false |
JNU-Include/CNN | lib/softmax_onehot.py | 1 | 2438 | import tensorflow as tf
from file2buffer import File2Buffer
from neural_network import NeuralNetwork
from lib.nntype import NNType
class SoftmaxOnehot (NeuralNetwork):
Y_one_hot = None
def set_placeholder(self, num_of_input, num_of_output):
self.X = tf.placeholder(tf.float32, [None, num_of_input])
self.Y = tf.placeholder(tf.int32, [None, num_of_output])
#num_of_class: 7, if self.Y is 4 then generates [[0],[0],[0],[0],[1],[0],[0]] as Y_one_hot
def set_one_hot(self, num_of_class):
self.Y_one_hot = tf.one_hot(self.Y, num_of_class) # one hot
print("one_hot", self.Y_one_hot)
self.Y_one_hot = tf.reshape(self.Y_one_hot, [-1, num_of_class]) #리스트 [[a],[b]] -> [a, b]
print("reshape", self.Y_one_hot)
def create_layer(self, previous_output, num_of_input, num_of_neuron, w_name='weight', b_name='bias'):
self.set_weight_initializer() ## a hole for you
if self.xaiver_initializer == NNType.XAIVER:
# http://stackoverflow.com/questions/33640581/how-to-do-xavier-initialization-on-tensorflow
W = tf.get_variable(w_name, shape=[num_of_input, num_of_neuron], initializer = tf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.random_normal([num_of_neuron]), name=b_name)
else : # if self.initializer == None:
W = tf.Variable(tf.random_normal([num_of_input, num_of_neuron]), name = w_name)
b = tf.Variable(tf.random_normal([num_of_neuron]), name = b_name)
# tf.nn.softmax computes softmax activations
# softmax = exp(logits) / reduce_sum(exp(logits), dim)
logits = tf.matmul(previous_output, W) + b
return logits
def set_hypothesis(self, logits):
self.hypothesis = tf.nn.softmax(logits)
def set_cost_function(self, logits):
cost_i = tf.nn.softmax_cross_entropy_with_logits(logits, labels=self.Y_one_hot)
self.cost_function = tf.reduce_mean(cost_i)
def evaluate(self, afile):
f2b = File2Buffer()
f2b.file_load(afile)
index_of_max_value = tf.argmax(self.hypothesis, 1)
hit_record = tf.equal(index_of_max_value, tf.argmax(self.Y_one_hot, 1))
recognition_rate = tf.reduce_mean(tf.cast(hit_record, tf.float32))
#
acc = self.sess.run(recognition_rate, feed_dict={self.X: f2b.x_data, self.Y: f2b.y_data})
print("Acc: {:.2%}".format(acc*100))
| mit | 4,101,979,121,624,030,700 | 41.666667 | 130 | 0.630757 | false |
steder/aws-snippets | launch_config_janitor.py | 1 | 2159 | import time
from boto.ec2.autoscale import AutoScaleConnection
def get_asg_connection():
conn = AutoScaleConnection()
autoscale_groups = conn.get_all_groups(max_records=1)
return conn
def find_unused_launch_configs():
conn = get_asg_connection()
autoscale_groups = conn.get_all_groups(max_records=100)
launch_configs = conn.get_all_launch_configurations(max_records=100)
launch_config_names = {lc.name for lc in launch_configs}
used_launch_config_names = {asg.launch_config_name for asg in autoscale_groups}
unused_launch_config_names = launch_config_names - used_launch_config_names
print "Autoscale Groups and Current Launch Configs:"
print "{:<40}{:<40}".format("ASG", "LC")
for asg in autoscale_groups:
#print "asg:", asg.name, "-> lc:", asg.launch_config_name
print "{:<40}{:<40}".format(asg.name, asg.launch_config_name)
print "\nUnused Launch Configs: (launch configs without a autoscale group)"
unused_launch_config_names = list(sorted(unused_launch_config_names))
for unused_launch_config in unused_launch_config_names:
print "\t", unused_launch_config
return unused_launch_config_names
def cleanup_unused_launch_configs(unused_launch_config_names, delete=False):
conn = get_asg_connection()
configs = conn.get_all_launch_configurations(names=unused_launch_config_names)
print "\nGetting ready to cleanup launch configs ... {}".format(delete and "FOR REAL" or "DRYRUN")
for config in configs:
if delete:
print "deleting launch config: {} in {} seconds...".format(config.name, 5)
time.sleep(5)
print "deleting launch config: {}!".format(config.name)
response = config.delete()
print "deleted launch config: {} ({})!".format(config.name, response)
else:
print "dry run: not deleting config:", config.name
if __name__=="__main__":
names = find_unused_launch_configs()
if names:
cleanup_unused_launch_configs(names, delete=True)
else:
print "\nNo unused launch configs! Launch Config Janitor is going to go take a nap..."
| mit | -5,964,978,494,769,163,000 | 39.735849 | 102 | 0.67207 | false |
joerg-krause/upmpdcli | src/mediaserver/cdplugins/qobuz/exception.py | 1 | 1086 | from __future__ import print_function
'''
qobuz.exception
~~~~~~~~~~~~~~~
:part_of: xbmc-qobuz
:copyright: (c) 2012 by Joachim Basmaison, Cyril Leclerc
:license: GPLv3, see LICENSE for more details.
'''
import sys
import pprint
import traceback
class QobuzXbmcError(Exception):
def __init__(self, **ka):
if not 'additional' in ka or ka['additional'] is None:
ka['additional'] = ''
if (not 'who' in ka) or (not 'what' in ka):
raise Exception(
'QobuzXbmcError', 'Missing constructor arguments (who|what)')
nl = "\n"
msg = "[QobuzXbmcError]" + nl
msg += " - who : " + pprint.pformat(ka['who']) + nl
msg += " - what : " + ka['what'] + nl
msg += " - additional : " + repr(ka['additional']) + nl
# msg += " - type : " + self.exc_type + nl
# msg += " - value : " + self.exc_value + nl
msg += " - Stack : " + nl
print("%s" % msg, file=sys.stderr)
print("%s" % traceback.print_exc(10), file=sys.stderr)
| gpl-2.0 | 1,213,666,413,833,839,400 | 31.909091 | 77 | 0.514733 | false |
MatthieuDartiailh/eapii | eapii/core/has_i_props.py | 1 | 26370 | # -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# Copyright 2014 by Eapii Authors, see AUTHORS for more details.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENCE, distributed with this software.
#------------------------------------------------------------------------------
""" HasIProp is the most basic object in Eapii.
It handles the use of IProperty, Subsystem, and Channel and the possibility
to customize IProperty behaviour by defining specially named methods.
"""
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from future.utils import with_metaclass, bind_method
from types import FunctionType, MethodType
from functools import update_wrapper
from inspect import cleandoc, getsourcelines
from textwrap import fill
from abc import ABCMeta
from collections import defaultdict
from .iprops.i_property import IProperty
from .iprops.proxies import make_proxy
# Prefixes for IProperty specially named methods.
PRE_GET_PREFIX = '_pre_get_'
GET_PREFIX = '_get_'
POST_GET_PREFIX = '_post_get_'
PRE_SET_PREFIX = '_pre_set_'
SET_PREFIX = '_set_'
POST_SET_PREFIX = '_post_set_'
CUSTOMIZABLE = ((PRE_GET_PREFIX, 'pre_get'), (GET_PREFIX, 'get'),
(POST_GET_PREFIX, 'post_get'),
(PRE_SET_PREFIX, 'pre_set'), (SET_PREFIX, 'set'),
(POST_SET_PREFIX, 'post_set'))
RANGE_PREFIX = '_range_'
def wrap_custom_iprop_methods(cls, meth_name, iprop):
""" Wrap a HasIProp method to make it an instance method of a IProperty.
This is necessary so that users can define overriding method in a natural
way in the HasIProp subclass assuming that the instance object will be
passed as first argument and the IProperty object as second when in reality
it will be the other way round due to python binding mechanism.
Parameters
----------
cls : type
Class on which the method which should override the default behaviour
of the IProperty is defined.
meth_name : unicode
Name of the method which should be used to override the default
behaviour of the IProperty.
iprop : IProperty
Instance of IProperty whose default behaviour should be overridden.
Returns
-------
wrapped : MethodType
Method object which can be
"""
wrapped = getattr(cls, meth_name)
def wrapper(iprop, instance, *args, **kwargs):
return wrapped(instance, iprop, *args, **kwargs)
update_wrapper(wrapper, wrapped)
return MethodType(wrapper, iprop)
def channel_getter_factory(cls, name, ch_cls):
""" Factory function returning custom builder for channel instances.
The factory function is bound to the calling class.
Parameters
----------
cls : type
Class to which bind the channel getter method.
name : unicode
Name of the channel, used for caching and naming purposes.
ch_cls : type
Class of the channel used for instantiation.
Return
------
Bound
"""
def channel_getter(self, ch_id):
return self._generic_get_channel(name, ch_cls, ch_id)
f_name = 'get_' + name
if not hasattr(cls, f_name):
# In Python 2 needs the cast as we use unicode_litterals
channel_getter.__name__ = str(f_name)
bind_method(cls, f_name, channel_getter)
class set_iprop_paras(object):
"""Placeholder use to alter an iprop in a subclass.
This can be used to lightly alter an IProperty defined on a parent class
by for example changing the secure_comm or the getter but without
rewriting everything.
Parameters
----------
**kwargs
New keyword arguments to pass to the constructor to alter the IProp.
"""
def __init__(self, **kwargs):
self.custom_attrs = kwargs
def customize(self, iprop):
"""Customize an iprop using the given kwargs.
"""
cls = type(iprop)
kwargs = iprop.creation_kwargs.copy()
kwargs.update(self.custom_attrs)
new = cls(**kwargs)
# Now set the method modifiers if any.
ndict = new.__dict__
for k, v in iprop.__dict__.items():
if k not in ndict:
if isinstance(v, MethodType):
setattr(new, k, MethodType(v.__func__, new))
else:
setattr(new, k, v)
return new
class AbstractHasIProp(with_metaclass(ABCMeta, object)):
"""Sentinel class for the collections of IProperties.
"""
pass
class AbstractSubSystem(with_metaclass(ABCMeta, object)):
"""Sentinel for subsystem identification.
"""
pass
AbstractHasIProp.register(AbstractSubSystem)
class AbstractChannel(with_metaclass(ABCMeta, object)):
"""Sentinel class for channel identification.
"""
pass
AbstractHasIProp.register(AbstractChannel)
class HasIPropsMeta(type):
""" Metaclass handling IProperty customisation, subsystems registration...
"""
def __new__(meta, name, bases, dct):
# Pass over the class dict once and collect the information
# necessary to implement the various behaviours.
iprops = {} # IProperty declarations
subsystems = {} # Subsystem declarations
channels = {} # Channels declaration
cust_iprops = {'pre_get': [], # Pre get methods _pre_get_*
'get': [], # Get methods: _get_*
'post_get': [], # Post get methods: _post_get_*
'pre_set': [], # Pre set methods: _pre_set_*
'set': [], # Set methods: _set_*
'post_set': [] # Post set methods: _post_set_*
}
iprop_paras = {} # Sentinels used to change an iprop
# behaviour.
ranges = [] # Names of the defined ranges.
for key, value in dct.iteritems():
if isinstance(value, IProperty):
iprops[key] = value
value.name = key
elif isinstance(value, set_iprop_paras):
iprop_paras[key] = value
# We check first channels as they are also subsystems
elif isinstance(value, type):
if issubclass(value, AbstractChannel):
channels[key] = value
elif issubclass(value, AbstractSubSystem):
subsystems[key] = value
elif isinstance(value, FunctionType):
if key.startswith(POST_GET_PREFIX):
cust_iprops['post_get'].append(key)
elif key.startswith(PRE_SET_PREFIX):
cust_iprops['pre_set'].append(key)
elif key.startswith(POST_SET_PREFIX):
cust_iprops['post_set'].append(key)
elif key.startswith(PRE_GET_PREFIX):
cust_iprops['pre_get'].append(key)
elif key.startswith(GET_PREFIX):
cust_iprops['get'].append(key)
elif key.startswith(SET_PREFIX):
cust_iprops['set'].append(key)
elif key.startswith(RANGE_PREFIX):
ranges.append(key)
for k in iprop_paras:
del dct[k]
# Create the class object.
cls = super(HasIPropsMeta, meta).__new__(meta, name, bases, dct)
# Analyse the source code to find the doc for the defined IProperties.
if iprops:
lines, _ = getsourcelines(cls)
doc = ''
for line in lines:
l = line.strip()
if l.startswith('#:'):
doc += ' ' + l[2:].strip()
elif ' = ' in l:
name = l.split(' = ', 1)[0]
if name in iprops:
iprops[name].__doc__ = fill(doc.strip(), 79)
doc = ''
# Walk the mro of the class, excluding itself, in reverse order
# collecting all of the iprops into a single dict. The reverse
# update preserves the mro of overridden iprops.
base_iprops = {}
for base in reversed(cls.__mro__[1:-1]):
if base is not AbstractHasIProp \
and issubclass(base, AbstractHasIProp):
base_iprops.update(base.__iprops__)
# The set of iprops which live on this class as opposed to a
# base class. This enables the code which hooks up the various
# static behaviours to only clone a iprops when necessary.
owned_iprops = set(iprops.keys())
all_iprops = dict(base_iprops)
all_iprops.update(iprops)
# Clone and customize iprops for which a set_iprops_attr has been
# declared.
for k, v in iprop_paras.items():
ip = v.customize(all_iprops[k])
owned_iprops.add(k)
setattr(cls, k, ip)
# Add the special statically defined behaviours for the iprops.
# If the target iprop is defined on a parent class, it is cloned
# so that the behaviour of the parent class is not modified.
def clone_if_needed(ip):
if ip.name not in owned_iprops:
ip = ip.clone()
all_iprops[ip.name] = ip
iprops[ip.name] = ip
owned_iprops.add(ip)
setattr(cls, ip.name, ip)
return ip
def customize_iprops(cls, iprops, prefix, ip_meth):
n = len(prefix)
for mangled in iprops:
target = mangled[n:]
if target in all_iprops:
iprop = clone_if_needed(all_iprops[target])
wrapped = wrap_custom_iprop_methods(cls, mangled, iprop)
setattr(iprop, ip_meth, wrapped)
else:
mess = cleandoc('''{} has no IProperty {} whose behaviour
can be customised''')
raise AttributeError(mess.format(cls, target))
for prefix, attr in CUSTOMIZABLE:
customize_iprops(cls, cust_iprops[attr], prefix, attr)
for ss in subsystems.values():
if not ss.secure_com_exceptions:
ss.secure_com_exceptions = cls.secure_com_exceptions
for ch in channels.values():
if not ch.secure_com_exceptions:
ch.secure_com_exceptions = cls.secure_com_exceptions
# Put a reference to the iprops dict on the class. This is used
# by HasIPropsMeta to query for the iprops.
cls.__iprops__ = iprops
# Put a reference to the subsystems in the class.
# This is used at initialisation to create the appropriate subsystems
cls.__subsystems__ = subsystems
# Keep a ref to names of the declared ranges accessors.
cls.__ranges__ = set([r[7:] for r in ranges])
# Create channel initialisation methods.
cls.__channels__ = set(channels)
for ch, ch_cls in channels.items():
channel_getter_factory(cls, ch, ch_cls)
return cls
class HasIProps(with_metaclass(HasIPropsMeta, object)):
""" Base class for objects using the IProperties mechanisms.
"""
#: Tuple of iproperties names which shoulb be cached by default.
caching_permissions = ()
#: Tuple of exception to consider when securing a communication (either via
#: secure_communication decorator or for iproperties with a non zero
#: secur_comm value)
secure_com_exceptions = ()
def __init__(self, caching_allowed=True, caching_permissions={}):
self._cache = {}
self._range_cache = {}
self._proxies = {}
subsystems = self.__subsystems__
channels = self.__channels__
if caching_allowed:
# Avoid overriding class attribute
perms = {p: True for p in self.caching_permissions}
perms.update(caching_permissions)
self._caching_permissions = set([key for key in perms
if isinstance(perms[key], bool)
and perms[key]])
ss_cache_allowed = {ss: bool(perms.get(ss)) for ss in subsystems}
ss_caching = {k: v for k, v in perms.items()
if k in subsystems and isinstance(v, dict)}
self._ch_cache_allowed = {ch: bool(perms.get(ch))
for ch in channels}
self._ch_caching = {k: v for k, v in perms.items()
if k in channels and isinstance(v, dict)}
else:
self._caching_permissions = set()
ss_cache_allowed = {ss: False for ss in subsystems}
ss_caching = {}
self._ch_cache_allowed = {ch: False for ch in channels}
self._ch_caching = {}
for ss, cls in subsystems.items():
subsystem = cls(self, caching_allowed=ss_cache_allowed[ss],
caching_permissions=ss_caching.get(ss, {}))
setattr(self, ss, subsystem)
if self.__channels__:
self._channel_cache = {ch: {} for ch in self.__channels__}
def get_iprop(self, name):
""" Acces the iprop matching the given name.
Parameters
----------
name : unicode
Name of the IProperty to be retrieved
Returns
-------
iprop : IProperty
Matching IProperty object
"""
return getattr(self.__class__, name)
@property
def declared_ranges(self):
"""Set of declared ranges for the class.
Ranges are considered declared as soon as a getter has been defined.
"""
return self.__ranges__
def get_range(self, range_id):
"""Access the range object matching the definition.
Parameters
----------
range_id : str
Id of the range to retrieve. The id should be the name of an
IProperty identified as a range (initialized with the range
keyword).
Returns
-------
range_validator: AbstractRangeValidator
A range validator matching the current attributes state, which can
be used to validate values.
"""
if range_id not in self._range_cache:
self._range_cache[range_id] = getattr(self,
RANGE_PREFIX+range_id)()
return self._range_cache[range_id]
def discard_range(self, range_id):
""" Remove a range from the cache.
This should be called by methods of IProperty setters which for any
reasons invalidate a range.
Parameters
----------
range_id : str
Id of the range to retrieve. The id should be the name of an
IProperty identified as a range (initialized with the range
keyword).
"""
if range_id in self._range_cache:
del self._range_cache[range_id]
def patch_iprop(self, iprop, **kwargs):
"""Modify the behaviour of an iproperty for the current instance.
This is achieved by creating a proxy on the IProperty linked to that
instance. NB : when overriding a method the function used should take
as first argument the iprop and as second th HasIProps object, no
automatic wrapping is performed.
Parameters
----------
iprop : unicode
Name of the IProperty whose behaviour should be overridden.
**kwargs
Attributes of the IProperty to override in the proxy.
"""
if not hasattr(self, '_proxied'):
self._proxied = []
i_p = getattr(type(self), iprop)
if self not in i_p._proxies:
make_proxy(i_p, self, kwargs)
self._proxied.append(i_p)
else:
proxy = i_p._proxies[self]
proxy.patch(kwargs)
def unpatch_iprop(self, iprop, *args):
"""Restore the behaviour of an IProperty to its default.
This is achieved by replacing the attributes by the ones of the proxy.
If the proxy comes back to the iprop behaviour it is discarded.
Parameters
----------
iprop : unicode
Name of the IProperty whose behaviour should be overridden.
*args : optional
Names of the attributes which should be restored. If omitted the
proxy will be removed.
Raises
------
KeyError :
If no proxy exists for the given IProp.
"""
i_p = getattr(type(self), iprop)
if self not in i_p._proxies:
raise KeyError('No proxy found for {}'.format(iprop))
if not args:
del i_p._proxies[self]
self._proxied.remove(i_p)
else:
proxy = i_p._proxies[self]
proxy.unpatch(args)
if proxy.obsolete:
del i_p._proxies[self]
self._proxied.remove(i_p)
def unpatch_all(self):
"""Restore all IProperties behaviour to their default one.
The class overidden behaviour are of course preserved.
"""
for iprop in self._proxied:
del iprop._proxies[self]
self._proxied = []
def clear_cache(self, subsystems=True, channels=True, properties=None):
""" Clear the cache of all the properties or only of the specified
ones.
Parameters
----------
subsystems : bool, optional
Whether or not to clear the subsystems. This argument is used only
if properties is None.
channels : bool, optional
Whether or not to clear the channels. This argument is used only
if properties is None.
properties : iterable of str, optional
Name of the properties whose cache should be cleared. Dotted names
can be used to access subsystems and channels. When accessing
channels the cache of all instances is cleared. All caches
will be cleared if not specified.
"""
cache = self._cache
if properties:
sss = defaultdict(list)
chs = defaultdict(list)
for name in properties:
if '.' in name:
aux, n = name.split('.', 1)
if aux in self.__subsystems__:
sss[aux].append(n)
else:
chs[aux].append(n)
elif name in cache:
del cache[name]
for ss in sss:
getattr(self, ss).clear_cache(properties=sss[ss])
if self.__channels__:
for ch in chs:
for o in self._channel_cache.get(ch, {}).values():
o.clear_cache(properties=chs[ch])
else:
self._cache = {}
if subsystems:
for ss in self.__subsystems__:
getattr(self, ss).clear_cache(channels=channels)
if channels and self.__channels__:
for chs in self._channel_cache.values():
for ch in chs.values():
ch.clear_cache(subsystems)
def check_cache(self, subsystems=True, channels=True, properties=None):
"""Return the value of the cache of the object.
The cache values for the subsystems and channels are not accessible.
Parameters
----------
subsystems : bool, optional
Whether or not to include the subsystems caches. This argument is
used only if properties is None.
channels : bool, optional
Whether or not to include the channels caches. This argument is
used only if properties is None.
properties : iterable of str, optional
Name of the properties whose cache should be cleared. All caches
will be cleared if not specified.
Returns
-------
cache : dict
Dict containing the cached value, if the properties arg is given
None will be returned for the field with no cached value.
"""
cache = {}
if properties:
sss = defaultdict(list)
chs = defaultdict(list)
for name in properties:
if '.' in name:
aux, n = name.split('.', 1)
if aux in self.__subsystems__:
sss[aux].append(n)
else:
chs[aux].append(n)
elif name in self._cache:
cache[name] = self._cache[name]
for ss in sss:
cache[ss] = getattr(self, ss).check_cache(properties=sss[ss])
if self.__channels__:
for ch in chs:
ch_cache = {}
cache[ch] = ch_cache
for ch_id, o in self._channel_cache.get(ch, {}).items():
ch_cache[ch_id] = o.check_cache(properties=chs[ch])
else:
cache = self._cache.copy()
if subsystems:
for ss in self.__subsystems__:
cache[ss] = getattr(self, ss)._cache.copy()
if channels:
for chs, ch_dict in self._channel_cache.items():
ch_cache = {}
cache[chs] = ch_cache
for ch in ch_dict:
ch_cache[ch] = ch_dict[ch]._cache.copy()
return cache
def reopen_connection(self):
"""Reopen the connection to the instrument.
"""
message = fill(cleandoc(
'''This method is used to reopen a connection whose state
is suspect, for example the last message sent did not
go through, and should be implemented by classes
subclassing HasIProps'''),
80)
raise NotImplementedError(message)
def default_get_iproperty(self, iprop, cmd, *args, **kwargs):
"""Method used by default by the IProperty to retrieve a value from an
instrument.
Parameters
----------
iprop : IProperty
Reference to the property issuing this call.
cmd :
Command used by the implementation to determine what should be done
to get the answer from the instrument.
*args :
Additional arguments necessary to retrieve the instrument state.
**kwargs :
Additional keywords arguments necessary to retrieve the instrument
state.
"""
mess = fill(cleandoc('''Method used by default by the IProperty to
retrieve a value from an instrument. Should be implemented by
classes subclassing HasIProps.'''), 80)
raise NotImplementedError(mess)
def default_set_iproperty(self, iprop, cmd, *args, **kwargs):
"""Method used by default by the IProperty to set an instrument value.
Parameters
----------
iprop : IProperty
Reference to the property issuing this call.
cmd :
Command used by the implementation to determine what should be done
to set the instrument state.
*args :
Additional arguments necessary to retrieve the instrument state.
**kwargs :
Additional keywords arguments necessary to retrieve the instrument
state.
"""
mess = fill(cleandoc('''Method used by default by the IProperty to
set an instrument value. Should be implemented by
classes subclassing HasIProps.'''), 80)
raise NotImplementedError(mess)
def default_check_instr_operation(self, iprop, value, i_value):
"""Method used by default by the IProperty to check the instrument
operation.
Parameters
----------
iprop : IProperty
Reference to the property issuing this call.
value :
Value assigned by the user.
i_value :
Value computed by the pre_set method of the IProperty.
Returns
-------
result : bool
Is everything ok ? Can we assume that the last operation succeeded.
precision :
Any precision about the situation, this can be any object but
something should always be returned.
"""
mess = fill(cleandoc('''Method used by default by the IProperty to
check the instrument operation. Should be implemented by
classes subclassing HasIProps.'''), 80)
raise NotImplementedError(mess)
def _generic_get_channel(self, name, ch_cls, ch_id):
""" Generic implementation of the channel getter.
This function manages the channel cache and is responsible for
delivering to the user the channel object corresponding to the given
id.
Parameters
----------
name : unicode
Name of the channel. This the name of the class attribute used to
define the channel. This argument is directly provided by the
specific get_* channel method.
ch_cls : type
Class of the channel which should be created if necessary.
ch_id :
Object used to identified the channel this can typically be an
integer or a string. In any case it should be hashable.
Returns
-------
channel : Channel
Channel instance bound to this object with the correct id.
"""
ch_cache = self._channel_cache
if name in ch_cache and ch_id in ch_cache[name]:
return ch_cache[name][ch_id]
else:
ch = ch_cls(self, ch_id,
caching_allowed=self._ch_cache_allowed[name],
caching_permissions=self._ch_caching.get(name, {}))
ch_cache[name][ch_id] = ch
return ch
AbstractHasIProp.register(HasIProps)
| bsd-3-clause | 740,333,654,297,545,200 | 34.539084 | 79 | 0.560713 | false |
fosfataza/protwis | build/management/commands/parse_excel_annotations.py | 1 | 18588 | from django.core.management.base import BaseCommand, CommandError
from django.core.management import call_command
from django.conf import settings
from django.db import connection
from common.alignment import Alignment, ClosestReceptorHomolog
from protein.models import Protein, ProteinSegment
from structure.models import Structure
import datetime
import logging
from optparse import make_option
import os
import shutil
import xlrd
import yaml
from collections import OrderedDict
_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
def dict_constructor(loader, node):
return OrderedDict(loader.construct_pairs(node))
def represent_ordereddict(dumper, data):
value = []
for item_key, item_value in data.items():
node_key = dumper.represent_data(item_key)
node_value = dumper.represent_data(item_value)
value.append((node_key, node_value))
return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', value)
yaml.add_representer(OrderedDict, represent_ordereddict)
yaml.add_constructor(_mapping_tag, dict_constructor)
class Command(BaseCommand):
help = 'Basic functions for build scrips'
logger = logging.getLogger(__name__)
def add_arguments(self, parser):
parser.add_argument('-f', '--filename',
action='store',
dest='filename',
help='Path to Uniprot text file')
parser.add_argument('-m',
action='store_true',
default=False,
help='Run main template search. Updates Xtal_Templ.csv with closest receptor homologs')
annotation_source_file = os.sep.join([settings.DATA_DIR, 'structure_data', 'Structural_Annotation.xlsx'])
xtal_seg_end_file = os.sep.join([settings.DATA_DIR, 'structure_data', 'annotation', 'xtal_segends.yaml'])
mod_xtal_seg_end_file = os.sep.join([settings.DATA_DIR, 'structure_data', 'annotation', 'mod_xtal_segends.yaml'])
xtal_seg_end_bw_file = os.sep.join([settings.DATA_DIR, 'structure_data', 'annotation', 'xtal_segends_bw.yaml'])
non_xtal_seg_end_file = os.sep.join([settings.DATA_DIR, 'structure_data', 'annotation', 'non_xtal_segends.yaml'])
non_xtal_seg_end_bw_file = os.sep.join([settings.DATA_DIR, 'structure_data', 'annotation', 'non_xtal_segends_bw.yaml'])
all_anomalities_file = os.sep.join([settings.DATA_DIR, 'structure_data', 'annotation', 'all_anomalities.yaml'])
xtal_anomalities_file = os.sep.join([settings.DATA_DIR, 'structure_data', 'annotation', 'xtal_anomalities.yaml'])
sequence_file = os.sep.join([settings.DATA_DIR, 'structure_data', 'annotation', 'sequences.yaml'])
if not os.path.exists(os.sep.join([settings.DATA_DIR, 'structure_data', 'annotation'])):
os.makedirs(os.sep.join([settings.DATA_DIR, 'structure_data', 'annotation']))
def handle(self, *args, **options):
self.data = self.parse_excel(self.annotation_source_file)
self.dump_files()
# self.analyse_annotation_consistency()
self.find_representatives()
if options['m']:
self.main_template_search()
def parse_excel(self,path):
workbook = xlrd.open_workbook(path)
worksheets = workbook.sheet_names()
d = {}
for worksheet_name in worksheets:
if worksheet_name in d:
print('Error, worksheet with this name already loaded')
continue
d[worksheet_name] = OrderedDict()
worksheet = workbook.sheet_by_name(worksheet_name)
num_rows = worksheet.nrows - 1
num_cells = worksheet.ncols - 1
curr_row = 0 #skip first, otherwise -1
headers = []
for i in range(num_cells):
h = worksheet.cell_value(0, i)
if h=="":
#replace header with index if empty
h = "i_"+str(i)
if h in headers:
# print('already have ',h)
h += "_"+str(i)
# print(h)
headers.append(worksheet.cell_value(0, i))
for curr_row in range(1,num_rows+1):
row = worksheet.row(curr_row)
key = worksheet.cell_value(curr_row, 0)
if key=='':
#in case there is no key for whatever reason
# print("no key!")
continue
# if key in d[worksheet_name]:
# print(key, "already in",worksheet_name)
d[worksheet_name][key] = OrderedDict()
temprow = {}
for curr_cell in range(num_cells):
# cell_type = worksheet.cell_type(curr_row, curr_cell)
cell_value = worksheet.cell_value(curr_row, curr_cell)
# temprow.append(cell_value)
if headers[curr_cell] not in d[worksheet_name][key]:
#do not overwrite
d[worksheet_name][key][headers[curr_cell]] = cell_value
# if curr_row>2: break
return d
def analyse_annotation_consistency(self):
NonXtal = self.data["Bulges_Constr_NonXtal_GPCRdb#"]
Xtal = self.data["Bulges_Constr_Xtal_GPCRdb#"]
output = {}
counting_xtal = {}
counting_non_xtal = {}
for entry_protein,vals in NonXtal.items():
anomalies=[]
anomalies_xtal=[]
for key,val in vals.items():
if "x" in val and "_" not in val:
if val.index("x") in [1,2]:
anomalies.append(val)
if vals['Xtal Templ'] in Xtal:
#print(Xtal[vals['Xtal Templ']])
for key,val in Xtal[vals['Xtal Templ']].items():
if "x" in val and "_" not in val:
if val.index("x") in [1,2]:
anomalies_xtal.append(val)
if entry_protein==vals['Xtal Templ']:
list1 = list(set(anomalies) - set(anomalies_xtal))
list2 = list(set(anomalies_xtal) - set(anomalies))
if list1 or list2:
for i in list1:
if i not in counting_non_xtal:
counting_non_xtal[i] = 0
counting_non_xtal[i] += 1
for i in list2:
if i not in counting_xtal:
counting_xtal[i] = 0
counting_xtal[i] += 1
#print("ISSUE!")
#print(entry_protein)
#print("NonXtal_anomalies",anomalies,"Xtal_anomalies",anomalies_xtal)
if list1: print(entry_protein,vals['Xtal Templ'],"Present in non-xtal, but not xtal",list1)
if list2: print(entry_protein,vals['Xtal Templ'],"Present in xtal, but not non-xtal",list2)
print("Overall")
print("Present in non-xtal, but not xtal",counting_xtal)
print("Present in xtal, but not non-xtal",counting_non_xtal)
structures = self.data["SegEnds_Xtal_Prot#"]
structures_non_xtal = self.data["SegEnds_NonXtal_Prot#"]
info = {}
for structure,vals in structures.items():
if structure.split("_")[-1] == "wt":
# print(structure)
entry = vals['UniProt']
info[entry] = {}
for key,val in vals.items():
# print(val,key)
if len(key)>3:
continue
if not key:
continue
if key[-1]!="b" and key[-1]!="e":
continue
info[entry][key] = val
if structures_non_xtal[entry][key]!=val:
print("error with ",entry,key,"Xtal sheet:",val,"NonXtal sheet:",structures_non_xtal[entry][key])
print(structures_non_xtal[entry])
print(vals)
#print(structure,info)
# with open(self.xtal_seg_end_file, 'w') as outfile:
# yaml.dump(pdb_info, outfile)
def dump_files(self):
structures = self.data["SegEnds_Xtal_Prot#"]
pdb_info = {}
pdb_info_all = {}
for structure,vals in structures.items():
if structure.split("_")[-1] == "wt":
continue
if structure.split("_")[-1] == "dist":
continue
#print(structure)
pdb_id = structure.split("_")[-1]
pdb_info[pdb_id] = OrderedDict()
for key,val in vals.items():
if len(key)>3:
continue
if not key:
continue
if key[-1]!="b" and key[-1]!="e":
continue
pdb_info[pdb_id][key] = val
for structure,vals in structures.items():
entry = structure
pdb_info_all[entry] = OrderedDict()
for key,val in vals.items():
if len(key)>3:
continue
if not key:
continue
if key[-1]!="b" and key[-1]!="e":
continue
pdb_info_all[entry][key] = val
data = self.data["SegEnds_Xtal_BW#"]
Xtal_SegEnds_BW = {}
for structure,vals in data.items():
entry = structure
Xtal_SegEnds_BW[entry] = OrderedDict()
for key,val in vals.items():
if not key:
continue
if len(key)>3 and key[-1]!="b" and key[-1]!="e":
continue
Xtal_SegEnds_BW[entry][key] = val
data = self.data["SegEnds_NonXtal_BW#"]
NonXtal_SegEnds_BW = {}
for structure,vals in data.items():
entry = structure
NonXtal_SegEnds_BW[entry] = OrderedDict()
for key,val in vals.items():
if not key:
continue
if len(key)>3 and key[-1]!="b" and key[-1]!="e" and key!="XtalTempl":
continue
NonXtal_SegEnds_BW[entry][key] = val
data = self.data["SegEnds_NonXtal_Prot#"]
NonXtal_SegEnds_Prot = {}
for structure,vals in data.items():
entry = structure
NonXtal_SegEnds_Prot[entry] = OrderedDict()
for key,val in vals.items():
if not key:
continue
if len(key)>3 and key[-1]!="b" and key[-1]!="e" and key!="Xtal Templ":
continue
NonXtal_SegEnds_Prot[entry][key] = val
# data = self.data["Bulges_Constr_Xtal_GPCRdb#"]
# Xtal_Bulges_Constr_GPCRdb = {}
# for structure,vals in data.items():
# entry = structure
# Xtal_Bulges_Constr_GPCRdb[entry] = OrderedDict()
# for key,val in vals.items():
# if not key:
# continue
# Xtal_Bulges_Constr_GPCRdb[entry][key] = val
data = self.data["Bulges_Constr_NonXtal_GPCRdb#"]
NonXtal_Bulges_Constr_GPCRdb = {}
for structure,vals in data.items():
entry = structure
NonXtal_Bulges_Constr_GPCRdb[entry] = OrderedDict()
for key,val in vals.items():
if not key:
continue
NonXtal_Bulges_Constr_GPCRdb[entry][key] = val
data = self.data["Seqs"]
Seqs = {}
for structure,vals in data.items():
entry = structure
Seqs[entry] = OrderedDict()
for key,val in vals.items():
if not key:
continue
Seqs[entry][key] = val
pdb_info = OrderedDict(sorted(pdb_info.items()))
with open(self.mod_xtal_seg_end_file, 'w') as outfile:
yaml.dump(pdb_info, outfile, indent=4)
pdb_info_all = OrderedDict(sorted(pdb_info_all.items()))
with open(self.xtal_seg_end_file, 'w') as outfile:
yaml.dump(pdb_info_all, outfile, indent=4)
Xtal_SegEnds_BW = OrderedDict(sorted(Xtal_SegEnds_BW.items()))
with open(self.xtal_seg_end_bw_file, 'w') as outfile:
yaml.dump(Xtal_SegEnds_BW, outfile, indent=4)
NonXtal_SegEnds_BW = OrderedDict(sorted(NonXtal_SegEnds_BW.items()))
with open(self.non_xtal_seg_end_bw_file, 'w') as outfile:
yaml.dump(NonXtal_SegEnds_BW, outfile, indent=4)
NonXtal_SegEnds_Prot = OrderedDict(sorted(NonXtal_SegEnds_Prot.items()))
with open(self.non_xtal_seg_end_file, 'w') as outfile:
yaml.dump(NonXtal_SegEnds_Prot, outfile, indent=4)
# Xtal_Bulges_Constr_GPCRdb = OrderedDict(sorted(Xtal_Bulges_Constr_GPCRdb.items()))
# with open(self.xtal_anomalities_file, 'w') as outfile:
# yaml.dump(Xtal_Bulges_Constr_GPCRdb, outfile, indent=4)
NonXtal_Bulges_Constr_GPCRdb = OrderedDict(sorted(NonXtal_Bulges_Constr_GPCRdb.items()))
with open(self.all_anomalities_file, 'w') as outfile:
yaml.dump(NonXtal_Bulges_Constr_GPCRdb, outfile, indent=4)
Seqs = OrderedDict(sorted(Seqs.items()))
with open(self.sequence_file, 'w') as outfile:
yaml.dump(Seqs, outfile, indent=4)
def main_template_search(self):
output_csv = ''
changes = {}
counter = 0
for protein, values in self.data['Xtal_Templ'].items():
values = self.data['Xtal_Templ'][protein]
crh = ClosestReceptorHomolog(protein)
closest_hom = crh.find_closest_receptor_homolog()
if values['Template']!=closest_hom.protein.entry_name:
changes[protein] = [values['Template'], closest_hom.protein.entry_name]
output_csv+='{},{}\n'.format(protein, closest_hom.protein.entry_name)
counter+=1
with open(os.sep.join([settings.DATA_DIR,'structure_data','annotation','xtal_templates.csv']),'w') as f:
f.write(output_csv)
if len(changes)>0:
print('Changed {} entries out of {} (reference: [changed_from, changed_to]):'.format(len(changes), counter))
print(changes)
print('INFO: xtal_templates.csv file updated. Please update Structural_Annotation.xlsx Xtal_Templ sheet with this .csv')
return changes
def find_representatives(self):
grouped = {}
counter = 0
xtals, nums, states, resolutions = [], [], [], []
out = OrderedDict()
exceptions = ['4L6R']
with open(os.sep.join([settings.DATA_DIR,'structure_data','annotation','xtal_representatives.yaml']), 'w') as outfile:
for key, values in self.data['SegEnds_Xtal_Prot#'].items():
if counter==0:
prev_rec = values['UniProt']
counter+=1
if values['PDB']=='_wt' or 'dist' in key:
continue
if values['Repr']!='-':
if values['Repr']=='Repr_Act':
actstat = 'Active'
elif values['Repr']=='Repr_Inter':
actstat = 'Intermediate'
elif values['Repr']=='Repr_Inact':
actstat = 'Inactive'
out[values['PDB']] = actstat
yaml.dump(out, outfile, default_flow_style=False)
# if prev_rec!=values['UniProt'] or counter==len(self.data['SegEnds_Xtal_Prot#']):
# if counter==len(self.data['SegEnds_Xtal_Prot#']):
# xtals.append(key)
# nums.append(values['#Res'])
# states.append(values['State'])
# resolutions.append(values['Resolution'])
# if len(xtals)>0:
# max_num_ia, max_x_ia, max_num_a, max_x_a, ia_count, a_count = 0, 0, 0, 0, 0, 0
# for x, n, s, r in zip(xtals, nums, states, resolutions):
# if s=='Inact':
# if ia_count==0:
# max_res_ia = r
# if n>max_num_ia and x[-4:] not in exceptions:
# max_num_ia = n
# max_x_ia = x
# max_res_ia = r
# elif n==max_num_ia and x[-4:] not in exceptions:
# if r<max_res_ia:
# max_num_ia = n
# max_x_ia = x
# max_res_ia = r
# ia_count+=1
# elif s=='Act':
# if a_count==0:
# max_res_a = r
# if n>max_num_a and x[-4:] not in exceptions:
# max_num_a = n
# max_x_a = x
# elif n==max_num_a and x[-4:] not in exceptions:
# if r<max_res_a:
# max_num_a = n
# max_x_a = x
# max_res_a = r
# a_count+=1
# for x, n in zip(xtals, nums):
# if x==max_x_ia:
# out[x] = 'Repr_Inact'
# elif x==max_x_a:
# out[x] = 'Repr_Act'
# else:
# out[x] = '-'
# yaml.dump(out, outfile, indent=4)
# xtals, nums, states, resolutions = [], [], [], []
# out = OrderedDict()
# xtals.append(key)
# nums.append(values['#Res'])
# states.append(values['State'])
# resolutions.append(values['Resolution'])
# else:
# xtals.append(key)
# nums.append(values['#Res'])
# states.append(values['State'])
# resolutions.append(values['Resolution'])
# prev_rec = values['UniProt']
| apache-2.0 | 6,456,276,811,557,499,000 | 42.12761 | 132 | 0.498924 | false |
vinaymayar/python-game-workshop | lesson6/solution_return.py | 1 | 1783 | """lesson6/solution_return.py
Contains solutions for functions that return values.
"""
# Exercise 6: Write a function my_name that returns your name. Remember
# to test your function by calling it and print the result.
# print(my_name()) -> "Vinay Mayar"
def my_name():
return "Vinay Mayar"
print(my_name())
# Exercise 7: Write a function maximum that takes four numbers and returns
# the largest number.
# print(maximum(-3, 0, 4, 9)) -> 9
def maximum(a, b, c, d):
if a > b and a > c and a > d:
return a
elif b > c and b > d:
return b
elif c > d:
return c
else:
return d
print(maximum(-3, 0, 4, 9))
# Exercise 8: Write a function that pluralizes a word. For most nouns,
# the plural is simply formed by adding an 's' to the end. For nouns
# that end in 's', 'ch', 'sh', 'x', 'o', or 'z', add 'es' to the end.
# For nouns that end in 'y', replace the 'y' with 'ies'.
# HINT: You can get the last letter of a string as follows:
# string = "hello"
# string[-1] -> "o"
# and the last two letters with
# string[-2:] -> "lo"
def pluralize(word):
last_letter = word[-1]
last_two_letters = word[-2:]
pluralized_word = word
# Your code here
if last_letter == "y":
pluralized_word = word[:-1] + "ies"
elif last_letter == 's' or last_two_letters == 'ch' or \
last_two_letters == 'sh' or last_letter == 'x' or \
last_letter == 'o' or last_letter == 'z':
pluralized_word = word + "es"
else:
pluralized_word = word + "s"
return pluralized_word
print(pluralize("program")) # -> programs
print(pluralize("couch")) # -> couches
print(pluralize("kite")) # -> kites
print(pluralize("potato")) # -> potatoes
print(pluralize("flurry")) # -> flurries
| mit | -6,011,757,241,365,324,000 | 24.84058 | 74 | 0.607403 | false |
fcaneto/py_programming_exercises | others/sowpods/sowpods.py | 1 | 3068 | import time
def timed_function(f):
def f_timer(*args, **kwargs):
start = time.time()
result = f(*args, **kwargs)
end = time.time()
print('> %s took %s seconds' % (f.__name__, end - start))
return result
return f_timer
def read_swopods():
words = []
with open('sowpods.txt') as f:
words = [w.strip() for w in f.readlines()]
return words
@timed_function
def find_longest_palindrome(words):
def is_palindrome(w):
i = 0
j = len(w) - 1
while i < j:
if w[i] != w[j]:
break
i += 1
j -= 1
return i >= j
lp = None
for word in words:
if is_palindrome(word):
if lp is None or len(word) > len(lp):
lp = word
return lp
@timed_function
def find_most_repeated_char(words):
def find_most_repeated_in_word(w):
frequencies = {}
most_repeated = None # tuple (char, frequency)
for c in w:
if c not in frequencies:
frequencies[c] = 1
else:
frequencies[c] = frequencies[c] + 1
if most_repeated is None or frequencies[c] > most_repeated[1]:
most_repeated = (c, frequencies[c])
return most_repeated
all_words_most_repeated = None # tuple (char, frequency, word)
for word in words:
char, frequency = find_most_repeated_in_word(word)
if all_words_most_repeated is None or frequency > all_words_most_repeated[1]:
all_words_most_repeated = (char, frequency, word)
return all_words_most_repeated
def get_histogram(w):
"""
'<char><frequency>' string, sorted by chars,
ex: bus => b1s1u1
"""
histogram = {}
for c in w:
if c not in histogram:
histogram[c] = 1
else:
histogram[c] += 1
key = ''
for c in sorted(histogram.keys()):
key += '%s%s' % (c, histogram[c])
return key
@timed_function
def find_longest_anagram(words):
histograms = {}
longest_anagrams = None # tuple 2 words
for w in words:
key = get_histogram(w)
if key not in histograms:
histograms[key] = [w]
else:
histograms[key].append(w)
if longest_anagrams is None or len(w) > len(longest_anagrams[0]):
longest_anagrams = histograms[key]
return longest_anagrams
from pympler import summary, muppy
if __name__ == "__main__":
print('SOWPODS analyzer')
words = read_swopods()
print('Total: %s words' % len(words))
print('%s is the longest palindrome' % (find_longest_palindrome(words)))
char, frequency, word = find_most_repeated_char(words)
print("'%s' is the most repeated character in one word (in '%s', occurs %s times)" % (char, word, frequency))
print("%s are the longest anagrams" % (find_longest_anagram(words)))
all_objects = muppy.get_objects()
sum1 = summary.summarize(all_objects)
summary.print_(sum1) | mit | -7,031,068,858,553,519,000 | 23.75 | 113 | 0.555737 | false |
pasaunders/code-katas | src/parenthetics.py | 1 | 2219 | """A script to test if the parentheticals of a string are matched."""
class Queue(object):
"""A singly linked list, to hold parens."""
def __init__(self):
"""Initialize a Queue class."""
self.head = None
self.length = 0
# def enqueue(self, data):
# """Add a value to the queue's head."""
# node = Node(data, self.head)
# self.length += 1
# if self.length == 0:
# self.tail = node
# if self.length > 0:
# self.head.prev = node
# old_head = self.head
# self.head = node
# self.head.next = old_head
def enqueue(self, val):
"""Append a val to the end of the queue."""
node = Node(val)
if self.head is None:
self.head = node
else:
tail = self.head
while tail.next:
tail = tail.next
tail.next = node
self.length += 1
def dequeue(self):
"""Remove a value from the queue's tail."""
val = self.head.data
self.head = self.head.next
self.length -= 1
return val
def size(self):
"""Return the size of the queue."""
return self.length
def peek(self):
"""Look at the next value in the queue."""
return self.tail.data
class Node(object):
"""Node to build a linked list."""
def __init__(self, data=None, next=None):
"""Initialize a node for the queue."""
self.data = data
self.next = next
def paren(test_string):
"""Parentheticals: 0 for balanced, 1 for open, -1 for broken."""
left_paren = Queue()
right_paren = Queue()
for i in range(len(test_string)):
if test_string[i] == '(':
left_paren.enqueue(i)
elif test_string[i] == ')':
right_paren.enqueue(i)
while left_paren.length != 0 and right_paren.length != 0:
if left_paren.dequeue() >= right_paren.dequeue():
return -1
if left_paren.length == 0 and right_paren.length == 0:
return 0
if left_paren.length == 0:
return -1
if right_paren.length == 0:
return 1
else:
return 'The while loop broke.'
| mit | -3,740,751,714,495,872,500 | 26.7375 | 69 | 0.529518 | false |
joxer/Baka-No-Voltron | tmp/android.dist/private/renpy/add_from.py | 1 | 3056 | # Copyright 2004-2015 Tom Rothamel <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import collections
import renpy
import os
import codecs
# A map from filename to position, target label pairs.
missing = collections.defaultdict(list)
def report_missing(target, filename, position):
"""
Reports that the call statement ending at `position` in `filename`
is missing a from clause.
`target`
The string
"""
missing[filename].append((position, target))
# Labels that we've created while running add_from.
new_labels = set()
def generate_label(target):
"""
Generate a reasonable and unique new label for a call to `target`.
"""
n = 0
while True:
if n:
label = "_call_{}_{}".format(target, n)
else:
label = "_call_{}".format(target)
if not renpy.exports.has_label(label) and not (label in new_labels):
break
n += 1
new_labels.add(label)
return label
def process_file(fn):
"""
Adds missing from clauses to `fn`.
"""
if not os.path.exists(fn):
return
edits = missing[fn]
edits.sort()
with codecs.open(fn, "r", "utf-8") as f:
data = f.read()
# How much of the input has been consumed.
consumed = 0
# The output.
output = u""
for position, target in edits:
output += data[consumed:position]
consumed = position
output += " from {}".format(generate_label(target))
output += data[consumed:]
with codecs.open(fn + ".new", "w", "utf-8") as f:
f.write(output)
try:
os.unlink(fn + ".bak")
except:
pass
os.rename(fn, fn + ".bak")
os.rename(fn + ".new", fn)
def add_from():
renpy.arguments.takes_no_arguments("Adds from clauses to call statements that are missing them.")
for fn in missing:
if fn.startswith(renpy.config.gamedir):
process_file(fn)
return False
renpy.arguments.register_command("add_from", add_from)
| gpl-2.0 | -3,361,583,998,807,433,700 | 25.119658 | 101 | 0.661976 | false |
chrislit/abydos | tests/distance/test_distance_baulieu_i.py | 1 | 2856 | # Copyright 2019-2020 by Christopher C. Little.
# This file is part of Abydos.
#
# Abydos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abydos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Abydos. If not, see <http://www.gnu.org/licenses/>.
"""abydos.tests.distance.test_distance_baulieu_i.
This module contains unit tests for abydos.distance.BaulieuI
"""
import unittest
from abydos.distance import BaulieuI
class BaulieuITestCases(unittest.TestCase):
"""Test BaulieuI functions.
abydos.distance.BaulieuI
"""
cmp = BaulieuI()
cmp_no_d = BaulieuI(alphabet=0)
def test_baulieu_i_dist(self):
"""Test abydos.distance.BaulieuI.dist."""
# Base cases
self.assertEqual(self.cmp.dist('', ''), 0.0)
self.assertEqual(self.cmp.dist('a', ''), 0.0)
self.assertEqual(self.cmp.dist('', 'a'), 0.0)
self.assertEqual(self.cmp.dist('abc', ''), 0.0)
self.assertEqual(self.cmp.dist('', 'abc'), 0.0)
self.assertEqual(self.cmp.dist('abc', 'abc'), 0.0)
self.assertEqual(self.cmp.dist('abcd', 'efgh'), 1.0)
self.assertAlmostEqual(self.cmp.dist('Nigel', 'Niall'), 0.75)
self.assertAlmostEqual(self.cmp.dist('Niall', 'Nigel'), 0.75)
self.assertAlmostEqual(self.cmp.dist('Colin', 'Coiln'), 0.75)
self.assertAlmostEqual(self.cmp.dist('Coiln', 'Colin'), 0.75)
self.assertAlmostEqual(
self.cmp.dist('ATCAACGAGT', 'AACGATTAG'), 0.5545454545
)
def test_baulieu_i_sim(self):
"""Test abydos.distance.BaulieuI.sim."""
# Base cases
self.assertEqual(self.cmp.sim('', ''), 1.0)
self.assertEqual(self.cmp.sim('a', ''), 1.0)
self.assertEqual(self.cmp.sim('', 'a'), 1.0)
self.assertEqual(self.cmp.sim('abc', ''), 1.0)
self.assertEqual(self.cmp.sim('', 'abc'), 1.0)
self.assertEqual(self.cmp.sim('abc', 'abc'), 1.0)
self.assertEqual(self.cmp.sim('abcd', 'efgh'), 0.0)
self.assertAlmostEqual(self.cmp.sim('Nigel', 'Niall'), 0.25)
self.assertAlmostEqual(self.cmp.sim('Niall', 'Nigel'), 0.25)
self.assertAlmostEqual(self.cmp.sim('Colin', 'Coiln'), 0.25)
self.assertAlmostEqual(self.cmp.sim('Coiln', 'Colin'), 0.25)
self.assertAlmostEqual(
self.cmp.sim('ATCAACGAGT', 'AACGATTAG'), 0.4454545455
)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 3,979,587,235,103,986,700 | 36.578947 | 70 | 0.642157 | false |
rmmbear/Android-QA-Helper | helper/extract_data.py | 1 | 30899 | """
Other modules expect that all extraction functions' names start with
'extract_'.
"""
import re
import string
import logging
from collections import OrderedDict
import helper
LOGGER = logging.getLogger(__name__)
SIZE_PREFIXES = {x:1024**y for y, x in enumerate(" KMGTP")}
# source: https://www.khronos.org/registry/OpenGL/index_es.php
# last updated: 2018.01.06
TEXTURE_COMPRESSION_IDS = {
"GL_AMD_compressed_ATC_texture" : "ATC",
"GL_ATI_compressed_texture_atitc" : "ATC",
"GL_ATI_texture_compression_atitc" : "ATC",
"GL_OES_compressed_ETC1_RGB8_texture" : "ETC1",
"GL_OES_compressed_ETC2_RGB8_texture" : "ETC2",
"GL_EXT_texture_compression_s3tc_srgb" : "S3TC (DXTC) - sRGB",
"GL_EXT_texture_compression_s3tc" : "S3TC (DXTC)",
"GL_EXT_texture_compression_dxt1" : "DXT1",
"GL_IMG_texture_compression_pvrtc" : "PVRTC",
"GL_IMG_texture_compression_pvrtc2" : "PVRTC2",
"GL_AMD_compressed_3DC_texture" : "3DC",
"GL_EXT_texture_compression_latc" : "LATC",
"GL_NV_texture_compression_latc" : "LATC",
"GL_OES_texture_compression_astc" : "ASTC",
"GL_KHR_texture_compression_astc_hdr" : "ASTC HDR",
"GL_KHR_texture_compression_astc_ldr" : "ASTC LDR",
"GL_KHR_texture_compression_astc_sliced_3d" : "ASTC - sliced 3D",
"GL_EXT_texture_compression_rgtc" : "RGTC",
"GL_EXT_texture_compression_bptc" : "BPTC",
}
ABI_TO_ARCH = {
"armeabi" :"32bit (ARM)",
"armeabi-v7a":"32bit (ARM)",
"arm64-v8a" :"64bit (ARM64)",
"x86" :"32bit (Intel x86)",
"x86_64" :"64bit (Intel x86_64)",
"mips" :"32bit (Mips)",
"mips64" :"64bit (Mips64)",
}
#shell script for finding executables in PATH
SH_PATH_EXE = """
for dir in ${PATH//:/ }; do
for file in $dir/*; do
if [ -x "$file" ]; then
echo ${file##*/};
fi;
done;
done;
""".strip().replace("\n", "")
# shell script for extracting data from each logical cpu
SH_CPU_DATA = """
STARTING_PATH=/sys/devices/system/cpu;
NUM=0;
while true; do
dir=${STARTING_PATH}/cpu${NUM};
if [ ! -d "$dir" ]; then
break;
else
echo /// cpu${NUM};
echo ---- cpufreq ----;
for file in $dir/cpufreq/*; do
if [ ! -r "$file" ]; then
continue;
fi;
if [ ! -f "$file" ]; then
continue;
fi;
echo ${file##*/} : $(cat $file);
done;
echo ---- topology ----;
for file in $dir/topology/*; do
if [ ! -r "$file" ]; then
continue;
fi;
if [ ! -f "$file" ]; then
continue;
fi;
echo ${file##*/} : $(cat $file);
done;
fi;
((NUM++));
done;
""".strip()
INFO_SOURCES = {
"getprop" : ("getprop",),
"iserial" : ("cat", "/sys/class/android_usb/android0/iSerial"),
"cpuinfo" : ("cat", "/proc/cpuinfo"),
"max_cpu_freq" : ("cat", "/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq"),
"possible_cpu_cores" : ("cat", "/sys/devices/system/cpu/possible"),
"surfaceflinger_dump" : ("dumpsys", "SurfaceFlinger"),
"display_dump" : ("dumpsys", "display"),
"meminfo" : ("cat", "/proc/meminfo"),
"kernel_version" : ("cat", "/proc/version"),
"shell_environment" : ("printenv",),
"available_commands" : (SH_PATH_EXE,),
"cpu_data" : (SH_CPU_DATA,),
"device_features" : ("pm", "list", "features"),
"device_libraries" : ("pm", "list", "libraries"),
"system_apps" : ("pm", "list", "packages", "-s"),
"third-party_apps" : ("pm", "list", "packages", "-3"),
"screen_size" : ("wm", "size"),
"screen_density" : ("wm", "density"),
"internal_sd_space" : ("df", "\"$EXTERNAL_STORAGE\""),
"external_sd_space" : ("df", "\"$SECONDARY_STORAGE\""),
#debug info included in debug dump
"build.prop" : ("cat", "/system/build.prop"),
"disk_space" : ("df",),
#debug info included only in full debug dump
"debug_dumpsys_full" : ("dumpsys",),
"debug_directory_map" : ("ls", "-alR"),
"debug_permission_list" : ("pm", "list", "permissions"),
"debug_device_instrumentation" : ("pm", "list", "instrumentation"),
}
NOTABLE_FEATURES = [
("Bluetooth", "feature:android.hardware.bluetooth"),
("Bluetooth Low-Energy", "feature:android.hardware.bluetooth_le"),
("IR Sensor", "feature:android.hardware.consumerir"),
("Fingerprint Scanner", "feature:android.hardware.fingerprint"),
("NFC", "feature:android.hardware.nfc"),
("CDMA Telephony", "feature:android.hardware.telephony.cdma"),
("GSM Telephony", "feature:android.hardware.telephony.gsm"),
("VR Headtracking", "feature:android.hardware.vr.headtracking"),
("VR Mode", "feature:android.software.vr.mode"),
("High-Performance VR Mode", "feature:android.hardware.vr.high_performance"),
("WiFi-Aware", "feature:android.hardware.wifi.aware"),
("WiFi", "feature:android.hardware.wifi"),
]
# information surfaced to the user in device dump
SURFACED_VERBOSE = OrderedDict()
SURFACED_VERBOSE["Identity"] = (
("Model", "device_model"),
("Manufacturer", "device_manufacturer"),
("Device", "device_device"),
("Name", "device_name"),
("Brand", "device_brand"),
("Serial Number", "device_serial_number"),
)
SURFACED_VERBOSE["System"] = (
("API Level", "android_api_level"),
("Android Version", "android_version"),
("Aftermarket Firmware", "aftermarket_firmware"),
("Aftermarket Firmware Version", "aftermarket_firmware_version"),
("Build ID", "android_build_id"),
("Build Fingerprint", "android_build_fingerprint"),
("Kernel Version", "kernel_version"),
)
SURFACED_VERBOSE["Chipset"] = (
("Board", "board"),
("RAM", "ram_capacity"),
("GPU Vendor", "gpu_vendor"),
("GPU Model", "gpu_model"),
("OpenGL ES Version", "gles_version"),
("Known Texture Compression Types", "gles_texture_compressions"),
("CPU Summary", "cpu_summary"),
("CPU Architecture", "cpu_architecture"),
("CPU Clock Range", "cpu_clock_range"),
("Available ABIs", "cpu_abis"),
("CPU Features", "cpu_features"),
# Some chipsets include multiple CPUs that device switches between depending on power needed for given task
# in such cases, following entries will be added for each cpu
# ("CPU# Core Count", "cpu#_core_count"),
# ("CPU# Clock Range", "cpu#_clock_range"),
# ("CPU# Clock Jump intervals", "cpu#_clock_intervals"),
)
SURFACED_VERBOSE["Display"] = (
("Resolution", "display_resolution"),
("Density", "display_density"),
("X-DPI", "display_x-dpi"),
("Y-DPI", "display_y-dpi"),
#("Size", "display_physical_size"),
)
SURFACED_VERBOSE["Storage"] = (
("Internal Storage Path", "internal_sd_path"),
("Internal Storage Space Total", "internal_sd_capacity"),
("Internal Storage Space Available", "internal_sd_free"),
("SD Card Path", "external_sd_path"),
("SD Card Space Total", "external_sd_capacity"),
("SD Card Space Available", "external_sd_free"),
)
SURFACED_VERBOSE["Notable Features"] = ((None, "device_notable_features"),)
SURFACED_VERBOSE["Device Features"] = ((None, "device_features"),)
SURFACED_VERBOSE["System Apps"] = ((None, "system_apps"),)
SURFACED_VERBOSE["Third-Party Apps"] = ((None, "third-party_apps"),)
SURFACED_VERBOSE["Shell Commands"] = ((None, "shell_commands"),)
SURFACED_VERBOSE["GLES Extensions"] = ((None, "gles_extensions"),)
INFO_KEYS = [
"aftermarket_firmware",
"aftermarket_firmware_version",
"android_api_level",
"android_build_fingerprint",
"android_build_id",
"android_version",
"anr_trace_path",
"board",
"cpu_abis",
"cpu_architecture",
"cpu_clock_range",
"cpu_features",
"cpu_summary",
"cpu0_clock_intervals",
"cpu0_clock_range",
"cpu0_core_count",
"cpu0_max_frequency",
"cpu0_min_frequency",
#"cpu#_clock_intervals",
#"cpu#_clock_range",
#"cpu#_core_count",
#"cpu#_max_frequency",
#"cpu#_min_frequency",
"device_brand",
"device_device",
"device_features",
"device_manufacturer",
"device_model",
"device_name",
"device_notable_features",
"device_serial_number",
"display_density",
#"display_physical_size",
"display_resolution",
"display_x-dpi",
"display_y-dpi",
"external_sd_capacity",
"external_sd_free",
"external_sd_path",
"gles_extensions",
"gles_texture_compressions",
"gles_version",
"gpu_model",
"gpu_vendor",
"internal_sd_capacity",
"internal_sd_free",
"internal_sd_path",
"kernel_version",
"ram_capacity",
"shell_commands",
"system_apps",
"third-party_apps",
#"gpu_frequency",
#"gpu_ram",
#"ram_type",
]
def abi_to_arch(abi):
""""""
if abi not in ABI_TO_ARCH:
return f"Unknown ({abi})"
return ABI_TO_ARCH[abi]
def run_extraction_command(device, source_name, use_cache=True, keep_cache=True):
"""Run extraction command and return its output.
If there is a value stored under the corresponding source name in
device's _init_cache, that value is then returned instead.
"""
from helper.device import Device
try:
if not use_cache:
raise KeyError
return device._init_cache[source_name]
except KeyError:
if not isinstance(device, Device):
return ""
out = device.shell_command(
*INFO_SOURCES[source_name], return_output=True, as_list=False)
if keep_cache:
device._init_cache[source_name] = out
return out
def bytes_to_human(byte_size: int) -> str:
"""Convert bytes to human readable size.
1KB = 1024B
"""
for power, letter in enumerate(" KMGTPEZY"):
if byte_size < 1024**(power+1):
break
return f"{byte_size/1024**power:.2f}{letter.strip()}B"
def df_parser(df_output: str) -> list:
"""For some infuriating reason, some vendors opt to include a
version of df that does not accept any options, so we need to
manually detect size formatting used in the output.
May they burn in hell forever.
Return list of four-element tuples. All sizes are in bytes,
tuple[0] = file system
tuple[1] = total space
tuple[2] = used space
tuple[3] = free space
"""
#TODO: do a second pass on this function
df_output = [x.split() for x in df_output.splitlines()]
index_row = [x.lower() for x in df_output.pop(0)]
string_to_size = {
"1k-blocks":1024,
"1024-blocks":1024,
"512-blocks":512,
}
known_size_multiplier = 0
for name, size in string_to_size.items():
if name in index_row:
known_size_multiplier = size
# assume human-readable values if header does not reveal block size
total_column, used_column, free_column = None, None, None
for index, column_name in enumerate(index_row):
column_name = column_name.lower()
if column_name in ["1k-blocks", "512-blocks", "1024-blocks", "size"]:
total_column = index
if column_name in ["used", "%used"]:
used_column = index
if column_name in ["free", "available", "avail"]:
free_column = index
# check for missing columns
if sum((int(bool(x)) for x in (total_column, used_column, free_column))) < 3:
LOGGER.error("Could not find indices of all columns")
LOGGER.error("size: %s, used: %s, free: %s", total_column, used_column, free_column)
LOGGER.error("Index row is: %s", index_row)
re_search = re.compile("([0-9.]+)([%A-z]*)")
lines = []
accepted_chars = string.ascii_lowercase + string.digits + ",.%"
accepted_chars = set(accepted_chars)
for row in df_output:
if not row:
continue
#TODO: improve error-checking
if "denied" in row:
lines.append((row[0], -1, -1, -1))
continue
if total_column:
total_val = row[total_column].lower()
# if returns non-empty set, the string contained invalid characters
if set(total_val) - accepted_chars:
total_val = ""
if used_column:
used_val = row[used_column].lower()
if set(used_val) - accepted_chars:
used_val = ""
if free_column:
free_val = row[free_column].lower()
if set(free_val) - accepted_chars:
free_val = ""
# convert values to bytes
#TODO: and third, and fourth pass on this
# this if-else soup is somehow easier to understand than my previous solution
if total_val:
if known_size_multiplier:
total_val = float(total_val)* known_size_multiplier
else:
total_val, total_unit = re_search.search(row[total_column]).groups()
if total_unit:
total_val = float(total_val) * SIZE_PREFIXES[total_unit.upper()]
else:
total_val = -1
if used_val:
#TODO:subtract available from total instead of calculating from percentages
if "%" in used_val:
used_val = re_search.search(used_val).group(1)
used_val = float(used_val) * total_val / 100
elif known_size_multiplier:
used_val = float(used_val) * known_size_multiplier
else:
used_val, used_unit = re_search.search(used_val).groups()
if used_unit:
used_val = float(used_val) * SIZE_PREFIXES[used_unit.upper()]
else:
used_val = -1
if free_val:
if known_size_multiplier:
free_val = float(free_val) * known_size_multiplier
else:
free_val, free_unit = re_search.search(free_val).groups()
if free_unit:
free_val = float(free_val) * SIZE_PREFIXES[free_unit.upper()]
else:
free_val = -1
lines.append((row[0], int(total_val), int(used_val), int(free_val)))
return lines
def extract_identity(device):
"""
"""
#serial = run_extraction_command(device, "iserial")
propnames = OrderedDict([
# OS
("[ro.build.version.release]", "android_version"),
("[ro.build.version.sdk]", "android_api_level"),
("[ro.build.id]", "android_build_id"),
("[ro.build.fingerprint]", "android_build_fingerprint"),
# AFTERMARKET OS
# only one of these will be available, if any at all
("[ro.build.version.fireos]", "aftermarket_firmware"),
("[ro.miui.ui.version.name]", "aftermarket_firmware"),
("[ro.oxygen.version]", "aftermarket_firmware"),
("[ro.build.version.opporom]", "aftermarket_firmware"),
("[ro.cm.version]", "aftermarket_firmware"),
("[ro.lineage.version]", "aftermarket_firmware"),
("[ro.aokp.version]", "aftermarket_firmware"),
("[ro.pa.version]", "aftermarket_firmware"),
("[ro.omni.version]", "aftermarket_firmware"),
("[ro.rr.version]", "aftermarket_firmware"),
("[ro.modversion]", "aftermarket_firmware_version"),
# AliOS, LeWaOS, Baidu Yi, CopperheadOS
# IDENTITY
# Sony devices specify human-readable model name in prop key [ro.semc.product.name]
("[ro.boot.serialno]", "device_serial_number"),
("[ro.product.model]", "device_model"),
("[ro.product.manufacturer]", "device_manufacturer"),
("[ro.product.device]", "device_device"),
("[ro.product.name]", "device_name"),
("[ro.product.brand]", "device_brand"),
# CPU
("[ro.product.cpu.abi]", "abi1"),
("[ro.product.cpu.abi2]", "abi2"),
("[ro.product.cpu.abilist]", "abilist"),
("[ro.board.platform]", "board"),
# PATHS
("[dalvik.vm.stack-trace-file]", "anr_trace_path"),
("[internal_sd_path]", "internal_sd_path"),
("[external_sd_path]", "external_sd_path"),
# OTHER
("[ro.boot.hardware.ddr]", "ram_type"),
("[ro.sf.lcd_density]", "display_density"),
])
getprop = run_extraction_command(device, "getprop").splitlines()
continued_value = list()
multiline_prop = False
for line in getprop:
if not line:
continue
if multiline_prop:
# prop name is kept from last loop
prop_value += line.strip()
else:
prop_name, prop_value = line.split(": ", maxsplit=1)
if prop_value[-1] != "]":
multiline_prop = True
continue
multiline_prop = False
prop_value = prop_value.strip("\n\r []")
if not prop_value:
continue
try:
destination = propnames[prop_name]
except KeyError:
continue
device.info_dict[destination] = prop_value.strip()
if device.info_dict["aftermarket_firmware"] is None:
if device.info_dict["aftermarket_firmware_version"] is not None:
device.info_dict["aftermarket_firmware"] = "Unknown OS"
else:
device.info_dict["aftermarket_firmware"] = "-none-"
device.info_dict["aftermarket_firmware_version"] = "-none-"
all_abis = []
for abi in ["abi1", "abi2", "abilist"]:
if abi in device.info_dict:
all_abis.extend([x.strip() for x in device.info_dict[abi].split(",")])
cpu_arch = abi_to_arch(all_abis[0])
device.info_dict["cpu_architecture"] = cpu_arch
all_abis = set(all_abis) # removes duplicates
device.info_dict["cpu_abis"] = list(all_abis)
kernel_version = run_extraction_command(device, "kernel_version").strip()
device.info_dict["kernel_version"] = kernel_version
def extract_chipset(device):
""""""
meminfo = run_extraction_command(device, "meminfo")
ram = re.search("(?:MemTotal\\:\\s*)([^A-z\\ ]*)", meminfo)
if ram:
device.info_dict["ram_capacity"] = ram.group(1).strip()
cpuinfo = run_extraction_command(device, "cpuinfo")
for re_ in ("(?:Hardware\\s*?\\:)([^\\n\\r]*)",
"(?:model\\ name\\s*?\\:)([^\\n\\r]*)",
#"(?:Processor\\s*?\\:)([^\\n\\r]*)",
):
board = re.search(re_, cpuinfo)
if board:
board = board.group(1).strip()
device.info_dict["board"] = board
break
cpu_features = re.search("(?:Features\\s*?\\:)([^\\n\\r]*)", cpuinfo)
if cpu_features:
cpu_features = [x.strip() for x in cpu_features.group(1).split()]
device.info_dict["cpu_features"] = cpu_features
def extract_cpu(device):
""""""
cpu_dict = {}
phys_cpu_dict = {}
max_frequency = 0
min_frequency = 99999999999
shell_out = run_extraction_command(device, "cpu_data")
if not shell_out:
return
shell_out = shell_out.split("/// cpu")[1::]
for cpu in shell_out:
cpu = cpu.strip().splitlines()
cpu_id = int(cpu[0].strip())
cpu_dict[cpu_id] = {}
for line in cpu[1::]:
if line.startswith("----"):
continue
line = line.strip().split(" : ", maxsplit=1)
if len(line) != 2:
if len(line) == 1 and not line[0]:
#skip empty lines
continue
LOGGER.warning("Unexpected output found while extracting cpu data: %s", str([line]))
# above is most likely happening when cores are put to sleep - those files then become unavailable
# first core should always be awake
#TODO: Ivestigate whether similar can happen when cpus are suddenly switched in multi-cpu chipset
# it most likely does
#TODO: which probably means that info from only one cpu can be scanned at a time
# grumble grumble
# update: 2018.09.19 - the answer is "depends", need more compat data
continue
cpu_dict[cpu_id][line[0]] = line[1]
current_id = 0
phys_id = 0
while True:
try:
cpu_dict[current_id]
except KeyError:
break
current_cpu_dict = cpu_dict[current_id]
try:
current_cpu_dict['cpuinfo_max_freq']
current_cpu_dict['cpuinfo_min_freq']
current_cpu_dict['scaling_available_frequencies']
except KeyError:
current_cpu_dict = {}
if not current_cpu_dict:
count = 0
broken_name = "_unknown{}"
n_broken_name = broken_name.format(count)
while n_broken_name in phys_cpu_dict:
count += 1
n_broken_name = broken_name.format(count)
broken_name = n_broken_name
phys_cpu_dict[broken_name] = {
'max_frequency':0,
'min_frequency':0,
'clock_range':"Unknown",
'clock_intervals':"Unknown",
'cores':"Unknown",
}
unknown_cores = 1
while True:
try:
cpu_dict[current_id + unknown_cores]
except KeyError:
break
next_dict = cpu_dict[current_id + unknown_cores]
try:
current_cpu_dict['cpuinfo_max_freq']
current_cpu_dict['cpuinfo_min_freq']
current_cpu_dict['scaling_available_frequencies']
if next_dict:
break
except KeyError:
pass
unknown_cores += 1
phys_cpu_dict[broken_name]['core_count'] = unknown_cores
current_id += unknown_cores
continue
phys_id = cpu_dict[current_id]['physical_package_id']
phys_cpu_dict[phys_id] = {}
phys_cpu_dict[phys_id]['max_frequency'] = int(current_cpu_dict['cpuinfo_max_freq'].strip()) / (1000000)
phys_cpu_dict[phys_id]['min_frequency'] = int(current_cpu_dict['cpuinfo_min_freq'].strip()) / (1000000)
phys_cpu_dict[phys_id]['clock_range'] = " - ".join([str(int(current_cpu_dict['cpuinfo_min_freq']) / (1000000)), str(int(current_cpu_dict['cpuinfo_max_freq']) / (1000000))]) + " GHz"
phys_cpu_dict[phys_id]['clock_intervals'] = [int(x.strip()) for x in current_cpu_dict['scaling_available_frequencies'].strip().split(" ")]
x, y = current_cpu_dict['core_siblings_list'].strip().split("-", maxsplit=1)
phys_cpu_dict[phys_id]['cores'] = [z for z in range(int(x), int(y)+1)]
phys_cpu_dict[phys_id]['core_count'] = len(phys_cpu_dict[phys_id]['cores'])
current_id = phys_cpu_dict[phys_id]['cores'][-1] + 1
device.info_dict["cpu_summary"] = []
for cpu_id, cpu in phys_cpu_dict.items():
device.info_dict[f"cpu{cpu_id}_max_frequency"] = cpu["max_frequency"]
if cpu["max_frequency"] > max_frequency:
max_frequency = cpu["max_frequency"]
device.info_dict[f"cpu{cpu_id}_min_frequency"] = cpu["min_frequency"]
if cpu["min_frequency"] < min_frequency:
min_frequency = cpu["min_frequency"]
device.info_dict[f"cpu{cpu_id}_clock_intervals"] = cpu["clock_intervals"]
device.info_dict[f"cpu{cpu_id}_core_count"] = cpu["core_count"]
device.info_dict["cpu_summary"].append(
"{}-core {} GHz".format(cpu["core_count"], cpu["max_frequency"]))
device.info_dict["cpu_clock_range"] = " - ".join(
[str(min_frequency), str(max_frequency)]) + " GHz"
#"cpu_clock_range",
#"cpu_max_frequency",
#"cpu_min_frequency",
#"cpu_summary",
#"cpu#_clock_intervals",
#"cpu#_clock_range",
#"cpu#_core_count",
# example cpu_dict entry
"""
'affected_cpus': '0 1 2 3'
'cpuinfo_max_freq': '1300000'
'cpuinfo_min_freq': '598000'
'cpuinfo_transition_latency': '1000'
'related_cpus': '0 1 2 3'
'scaling_available_frequencies': '1300000 1196000 1040000 747500 598000'
'scaling_available_governors': 'userspace powersave hotplug performance'
'scaling_cur_freq': '598000'
'scaling_driver': 'mt-cpufreq'
'scaling_governor': 'hotplug'
'scaling_min_freq': '598000'
'scaling_setspeed': '<unsupported>'
'core_id': '0'
'core_siblings': 'f'
'core_siblings_list': '0-3'
'physical_package_id': '0'
'thread_siblings': '1'
'thread_siblings_list': '0'
"""
def extract_gpu(device):
""""""
gpu_vendor, gpu_model, gles_version = [None for x in range(3)]
dumpsys = run_extraction_command(device, "surfaceflinger_dump")
gpu_line = re.search("(?:GLES\\:)([^\n\r]*)", dumpsys)
if gpu_line:
gpu_vendor, gpu_model, gles_version = gpu_line.group(1).strip().split(",", 2)
device.info_dict["gpu_vendor"] = gpu_vendor.strip()
device.info_dict["gpu_model"] = gpu_model.strip()
device.info_dict["gles_version"] = gles_version.strip()
gles_extensions = re.search("(?:GLES\\:[^\\r\\n]*)(?:\\s*)([^\\r\\n]*)", dumpsys)
if gles_extensions:
gles_extensions = gles_extensions.group(1).strip().split(" ")
device.info_dict["gles_texture_compressions"] = []
device.info_dict["gles_extensions"] = gles_extensions
for extension in gles_extensions:
try:
device.info_dict["gles_texture_compressions"].append(
TEXTURE_COMPRESSION_IDS[extension])
except KeyError:
# extension is not a known type of texture compression, continue
continue
def extract_display(device):
""""""
dumpsys = run_extraction_command(device, "surfaceflinger_dump")
x_dpi = re.search("(?:x-dpi\\s*\\:\\s*)([^\\n]*)", dumpsys)
y_dpi = re.search("(?:y-dpi\\s*\\:\\s*)([^\\n]*)", dumpsys)
resolution = re.search("(?:Display\\[0\\] :)([^,]*)", dumpsys)
if not resolution:
wm_output = run_extraction_command(device, "screen_size")
resolution = re.search("(?:Physical size:)([^\\n]*)", wm_output)
if device.info_dict["display_density"] is None:
wm_output = run_extraction_command(device, "screen_density")
density = re.search("(?:Physical\\ density\\:)([0-9A-z]*)", wm_output)
if density:
density = density.group(1)
device.info_dict["display_density"] = density
if resolution:
resolution = resolution.group(1).strip()
if x_dpi:
x_dpi = x_dpi.group(1).strip()
if y_dpi:
y_dpi = y_dpi.group(1).strip()
device.info_dict["display_resolution"] = resolution
device.info_dict["display_x-dpi"] = x_dpi
device.info_dict["display_y-dpi"] = y_dpi
def extract_features(device):
""""""
feature_list = run_extraction_command(device, "device_features")
device.info_dict["device_notable_features"] = []
for feature_name, feature_string in NOTABLE_FEATURES:
if feature_string in feature_list:
device.info_dict["device_notable_features"].append(feature_name)
all_features = re.findall("(?:^feature:)([a-zA-Z0-9\\_\\.\\=]*)", feature_list, re.M)
device.info_dict["device_features"] = all_features
def extract_storage(device):
"""Extract list of various paths."""
internal_sd = None
external_sd = None
shell_env = run_extraction_command(device, "shell_environment")
try:
internal_sd = re.search("(?:EXTERNAL_STORAGE=)([^\\s]*)", shell_env).group(1)
except AttributeError:
pass
try:
external_sd = re.search("(?:SECONDARY_STORAGE=)([^\\s]*)", shell_env).group(1)
except AttributeError:
pass
# TODO: Some devices specify only the directory containing the trace file and not the file itself
# Check whether there is any difference in traces on those devices
if not device.is_dir(internal_sd):
guesses = ["/mnt/sdcard", "/storage/emulated/legacy", "/mnt/shell/emulated/0"]
for guess in guesses:
if device.is_dir(guess):
internal_sd = guess
break
device.info_dict["internal_sd_path"] = internal_sd
device.info_dict["external_sd_path"] = external_sd
external_sd_space = run_extraction_command(device, "external_sd_space")
if "no such file or directory" in external_sd_space.lower() or \
"permission denied" in external_sd_space:
filesystem, size, used, free = ["Unavailable" for x in range(4)]
else:
filesystem, size, used, free = df_parser(external_sd_space.strip())[0]
device.info_dict["external_sd_capacity"] = size
device.info_dict["external_sd_free"] = free
internal_sd_space = run_extraction_command(device, "internal_sd_space")
if "no such file or directory" in internal_sd_space.lower() or \
"permission denied" in internal_sd_space:
filesystem, size, used, free = ["Unavailable" for x in range(4)]
else:
filesystem, size, used, free = df_parser(internal_sd_space.strip())[0]
device.info_dict["internal_sd_capacity"] = size
device.info_dict["internal_sd_free"] = free
def extract_available_commands(device):
"""Extract a list of available shell commands."""
device.info_dict["shell_commands"] = []
commands = run_extraction_command(device, "available_commands").splitlines()
device.info_dict["shell_commands"] = [x for x in commands if x]
def extract_installed_packages(device):
"""Extract a list of installed system and third-party packages."""
extract_system_packages(device)
extract_thirdparty_packages(device)
def extract_system_packages(device):
""""""
if device.info_dict["system_apps"]:
# system apps can generally only be disabled, downgraded or updated
# and do not need to be re-checked
return
device.info_dict["system_apps"] = []
for package in run_extraction_command(device, "system_apps").splitlines():
if not package:
continue
try:
app = package.split("package:", maxsplit=1)[1]
except IndexError:
LOGGER.warning("Could not split system package line: %s", package)
continue
device.info_dict["system_apps"].append(app.strip())
def extract_thirdparty_packages(device):
""""""
device.info_dict["third-party_apps"] = []
count = 0
for line in run_extraction_command(device, "third-party_apps", use_cache=False, keep_cache=False).splitlines():
if not line:
continue
try:
app = line.split("package:", maxsplit=1)[1]
except IndexError:
LOGGER.warning("Could not split thirdparty package line: %s", line)
continue
device.info_dict["third-party_apps"].append(app.strip())
count += 1
if count == 0:
device.info_dict["third-party_apps"] = "-none-"
| gpl-3.0 | -587,591,065,393,223,000 | 34.232611 | 189 | 0.582673 | false |
torchtarget/accounts_downloader | HSBC/HSBCOpenTestDirect.py | 1 | 1780 | import time
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
browser = webdriver.Firefox()
#browser.find_element_by_css_selector("span.logon-span").click()
#browser.find_element_by_link_text("Personal Internet BankingOpens in a new Window.").click()
browser.get("https://www.ebanking.hsbc.com.hk/1/2/logon?LANGTAG=en&COUNTRYTAG=US")
try:
myElem = WebDriverWait(browser, delay).until(EC.presence_of_element_located((By.NAME, 'u_UserID')))
print("Page is ready!")
except TimeoutException:
print("Loading took too much time!")
browser.find_element_by_name("u_UserID").clear()
browser.find_element_by_name("u_UserID").send_keys("")
browser.find_element_by_css_selector("img[alt=\"Dual Password\"]").click()
try:
myElem = WebDriverWait(browser, delay).until(EC.presence_of_element_located((By.ID, 'memorableAnswer')))
browser.find_element_by_id("memorableAnswer").clear()
browser.find_element_by_id("memorableAnswer").send_keys("pond34sde")
except TimeoutException:
print("Loading took too much time!")
i=1
pass_base="pass"
mypass=myencoder.get_pass2("HSBC")
s
while(i < 9):
passstring=pass_base+str(i)
print(passstring)
if (browser.find_element_by_id(passstring).is_enabled()):
browser.find_element_by_id(passstring).send_keys(mypass[i])
#print("Ele1editable")
#else:
#print("Ele1eNOTditable")
i=i+1
browser.find_element_by_css_selector("input.submit_input").click()
account_saldo = driver.find_element_by_css_selector("#balanceField > span.privacy").text
| gpl-3.0 | -5,120,247,033,813,328,000 | 38.555556 | 108 | 0.741573 | false |
stephenfin/patchwork | patchwork/migrations/0015_add_series_models.py | 1 | 3074 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('patchwork', '0014_remove_userprofile_primary_project'),
]
operations = [
migrations.CreateModel(
name='SeriesReference',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('msgid', models.CharField(max_length=255, unique=True)),
],
),
migrations.CreateModel(
name='Series',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, help_text=b'An optional name to associate with the series, e.g. "John\'s PCI series".', max_length=255, null=True)),
('date', models.DateTimeField()),
('version', models.IntegerField(default=1, help_text=b'Version of series as indicated by the subject prefix(es)')),
('total', models.IntegerField(help_text=b'Number of patches in series as indicated by the subject prefix(es)')),
('cover_letter', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='series', to='patchwork.CoverLetter')),
],
options={
'ordering': ('date',),
},
),
migrations.CreateModel(
name='SeriesPatch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('number', models.PositiveSmallIntegerField(help_text=b'The number assigned to this patch in the series')),
('patch', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='patchwork.Patch')),
('series', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='patchwork.Series')),
],
options={
'ordering': ['number'],
},
),
migrations.AddField(
model_name='series',
name='patches',
field=models.ManyToManyField(related_name='series', through='patchwork.SeriesPatch', to='patchwork.Patch'),
),
migrations.AddField(
model_name='series',
name='submitter',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='patchwork.Person'),
),
migrations.AddField(
model_name='seriesreference',
name='series',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='references', related_query_name=b'reference', to='patchwork.Series'),
),
migrations.AlterUniqueTogether(
name='seriespatch',
unique_together=set([('series', 'number'), ('series', 'patch')]),
),
]
| gpl-2.0 | -5,498,517,678,572,326,000 | 44.880597 | 171 | 0.585556 | false |
fabrizziop/encryption-modular | lib_user_input.py | 1 | 1852 | import getpass
from lib_file_ops import *
from lib_keyslot import is_header_psk
from lib_gui import *
def input_int_until_list_or_default(list_desired, default_val):
is_done = False
while is_done == False:
try:
tv = int(input())
if tv in list_desired:
is_done = True
else:
print('Incorrect Value')
except ValueError:
tv = default_val
is_done = True
return tv
def input_password_until_match():
pass_ok = False
while pass_ok == False:
passwn = getpass.getpass('Password: ')
passwn_check = getpass.getpass('Confirm password: ')
if passwn == passwn_check:
pass_ok = True
else:
print("Passwords don't match, please retry.")
return passwn
def force_integer_input(des_str):
cor_key = False
while cor_key == False:
try:
ipt = int(input(des_str))
cor_key = True
except ValueError:
print("Try again.")
return ipt
def user_file_prompt(prompt_string):
print(prompt_string)
file_name = gui_get_filename_to_open()
file_condition = is_file_accessible(file_name)
if file_condition == True:
return read_file_to_bytearray(file_name), file_name
else:
return False, False
def user_file_prompt_noread(prompt_string):
print(prompt_string)
file_name = gui_get_filename_to_open()
file_condition = is_file_accessible(file_name)
if file_condition == True:
return True, file_name
else:
return False, False
def user_encryption_type_prompt(allow_rsa):
encryption_type = True
if allow_rsa == True:
encryption_type = ask_psk_or_rsa()
# print(encryption_type)
if encryption_type == True:
password = dual_password_prompt("Enter Encryption Password")
return True, password
else:
return False, False
def user_decryption_prompt(b0f):
if is_header_psk(b0f) == True:
password = simple_password_prompt("Enter Decryption Password")
return True, password
else:
return False, False
| gpl-2.0 | -2,521,419,785,190,123,000 | 24.040541 | 64 | 0.704644 | false |
treycucco/py-utils | tests/pypgqueue/__init__.py | 1 | 2669 | import unittest
from tests import get_model_access, test_db, connection_string, SKIP_PYPGQUEUE_TEST
import idb.pypgqueue as pq
from time import sleep
from threading import Thread
import logging
def sleep_job(data):
secs = float(data["seconds"])
sleep(secs)
def exc_job(data):
raise Exception("Here's an exception for you")
def get_logger():
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
if not logger.handlers:
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter("%(asctime)s\t%(levelname)s\t%(message)s"))
logger.addHandler(handler)
return logger
@unittest.skipIf(SKIP_PYPGQUEUE_TEST, "TESTING OFF: pypgqueue")
@unittest.skipIf(test_db != "pg", "Only works with postgres currently")
class PyPGQueueTestCase(unittest.TestCase):
def setUp(self):
self.da = get_model_access()
self.da.open(autocommit=True)
self.da.update("job.serialization_keys", dict(active_job_id=None))
self._tearDown()
self._logger = None
def tearDown(self):
self._tearDown()
def _tearDown(self):
self.da.delete("job.jobs")
self.da.delete("job.serialization_keys")
def table_counts(self):
return (self.da.count("job.jobs"), self.da.count("job.serialization_keys"))
@unittest.skip("because")
def test_queue_job(self):
jc0, skc0 = self.table_counts()
self.assertEqual(jc0, 0)
self.assertEqual(skc0, 0)
pq.queue_job(self.da, "a_job", { "a": 1, "b": 2 })
pq.queue_job(self.da, "b_job")
pq.queue_job(self.da, "c_job", { "a": 3, "b": 4 }, "tenant/id")
pq.queue_job(self.da, "d_job", None, "tenant/id")
pq.queue_job(self.da, "e_job", None, "another_tenant/id")
jc1, skc1 = self.table_counts()
self.assertEqual(jc1, 5)
self.assertEqual(skc1, 2)
# @unittest.skip("notifications not working qq")
def test_job_running(self):
qm = pq.PyPGQueue(connection_string, workers=2, logger=get_logger())
qm.register_job_function("sleep_job", sleep_job)
qm.register_job_function("exc_job", exc_job)
def start_qm():
qm.start()
def queue_jobs():
sleep(0.1)
pq.queue_job(self.da, "sleep_job", { "seconds": 2 }, "skey")
sleep(0.1)
pq.queue_job(self.da, "sleep_job", { "seconds": 2 }, "skey")
sleep(0.1)
pq.queue_job(self.da, "sleep_job", { "seconds": 3 })
sleep(0.1)
pq.queue_job(self.da, "exc_job")
sleep(0.1)
pq.queue_job(self.da, "bad_job_name")
sqmt = Thread(target=start_qm)
qjt = Thread(target=queue_jobs)
sqmt.start()
qjt.start()
sleep(5)
qm.stop(None, pq.STOP_WHEN_ALL_DONE)
sqmt.join()
| bsd-3-clause | 6,235,172,142,661,464,000 | 28.655556 | 86 | 0.650431 | false |
ucamhal/ravenpy | raven/raven_django/backends.py | 1 | 4276 | # Copyright (c) 2012 Hal Blackburn <[email protected]> and
# CARET, University of Cambridge http://www.caret.cam.ac.uk/
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from django.contrib.auth import backends
from django.contrib.auth.models import User
from django.utils.crypto import constant_time_compare
import raven
from raven.raven_django import settings
class RavenBackend(backends.ModelBackend):
"""A Django Authentication backend which uses Raven auth responses to
authenticate users.
"""
def _get_validator(self, keys, expected_url):
if not keys:
keys = settings.RAVEN_KEYS
return raven.Validator(keys, expected_post_login_url=expected_url)
def authenticate(self, raven_response=None, expected_data=None,
expected_url="", keys_override=None):
"""Gets the user authenticated by a Raven auth response string.
Args:
raven_response: A Raven auth response string as provided by the
client in the WLS-Response URL query param.
expected_data: The authentication will only succeed if the data in
the signed response is equal to this value. This can be used to
protect against "login CSRF" attacks by ensuring a Raven auth
response was in response to one originating from the Django app.
expected_url: If provided, the authentication will only succeed if
the URL in the auth response matches this argument. Can safely
be ignored.
keys_override: A dict of name -> public key mappings to pass to the
raven.Validator. If not provided the RAVEN_KEYS settings value
is used. If that has no value then raven.RAVEN_KEYS is used.
Returns:
None if the auth response was invalid, or didn't represent a
successful authentication, otherwise a User object representing the
authenticated user.
Raises:
ValueError: If raven_response is provided (indicating the auth()
request is intended for this method) but no expected_data is
provided.
"""
# If no raven_response is set then this authenticate() call must not
# be meant for us
if raven_response is None:
return False
if expected_data is None:
raise ValueError("No expected_data value provided.")
validator = self._get_validator(keys_override, expected_url)
try:
authresponse = validator.validate(raven_response)
username = authresponse.get_authenticated_identity()
except (raven.NotAuthenticatedException, raven.InvalidityException):
return None
# Ensure response data matches expected_data. In order not to leak
# information on how much of a string matched, a comparison method is
# used which takes the same time to determine string equality,
# regardless of how much of a string matched.
if not constant_time_compare(expected_data, authresponse.request_data):
return None
user, _ = User.objects.get_or_create(username=username)
return user
| mit | 8,166,771,306,197,634,000 | 49.305882 | 80 | 0.684752 | false |
SavinaRoja/challenges | Rosalind/String_Algorithms/REVP.py | 1 | 3254 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Locating Restriction Sites
Usage:
REVP.py <input> [--compare] [--max=MAX] [--min=MIN]
REVP.py (--help | --version)
Options:
--compare run a speed comparison of various methods
--max=MAX Set the maximum length of palindrome to search for, even
numbers should be used
--min=MIN Set the minimum length of palindrome to search for, even
numbers should be used. Less than 4 is not recommended.
-h --help show this help message and exit
-v --version show version and exit
"""
problem_description = """Locating Restriction Sites
Problem
A DNA string is a reverse palindrome if it is equal to its reverse complement.
For instance, GCATGC is a reverse palindrome because its reverse complement is
GCATGC. See Figure 2.
Given: A DNA string of length at most 1 kbp in FASTA format.
Return: The position and length of every reverse palindrome in the string having
length between 4 and 12. You may return these pairs in any order.
Sample Dataset
>Rosalind_24
TCAATGCATGCGGGTCTATATGCAT
Sample Output
4 6
5 4
6 6
7 4
17 4
18 4
20 6
21 4
"""
from docopt import docopt
from time import time
def parse_fasta_sequence(inp_file):
with open(inp_file, 'r') as inp:
name = inp.readline().strip()[1:]
sequence = ''
for line in inp.readlines():
sequence += line.strip()
return name, sequence
def palindromes_by_nuclei(sequence, max_pal=12, min_pal=4):
'''
Checks for reverse palindromes in a DNA sequence; acts as a generator that
will yield the starting offset of a palindrome along with its length.
'''
comp = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A'}
for offset in range(len(sequence)):
mod = 0
#Length is twice the mod value
try:
while sequence[offset - mod] == comp[sequence[offset + mod + 1]]:
mod += 1
if mod * 2 >= min_pal:
yield offset - mod + 1, mod * 2
if mod * 2 >= max_pal or offset - mod < 0:
break
except IndexError: # Expanded past sequence length
pass
def main(max_pal, min_pal):
if max_pal is not None:
max_pal = int(max_pal)
if min_pal is not None:
min_pal = int(min_pal)
name, sequence = parse_fasta_sequence(arguments['<input>'])
for offset, length in palindromes_by_nuclei(sequence, max_pal, min_pal):
print('{0} {1}'.format(offset + 1, length))
def compare(max_pal, min_pal):
if max_pal is not None:
max_pal = int(max_pal)
if min_pal is not None:
min_pal = int(min_pal)
name, sequence = parse_fasta_sequence(arguments['<input>'])
start = time()
for i in range(100):
for offset, length in palindromes_by_nuclei(sequence, max_pal, min_pal):
pass
print('''It took {0} seconds to complete 100 iterations of the Palindrome
by Nuclei search.\n'''.format(time() - start))
if __name__ == '__main__':
arguments = docopt(__doc__, version='0.0.1')
if arguments['--compare']:
compare(arguments['--max'], arguments['--min'])
else:
main(arguments['--max'], arguments['--min'])
| unlicense | 4,442,858,641,994,704,000 | 27.051724 | 80 | 0.614935 | false |
impactlab/eemeter | tests/modeling/test_model_data_billing_formatter.py | 1 | 3055 | import tempfile
from datetime import datetime
import numpy as np
import pandas as pd
import pytest
import pytz
from eemeter.testing.mocks import MockWeatherClient
from eemeter.weather import ISDWeatherSource
from eemeter.modeling.formatters import ModelDataBillingFormatter
from eemeter.structures import EnergyTrace
@pytest.fixture
def mock_isd_weather_source():
tmp_dir = tempfile.mkdtemp()
ws = ISDWeatherSource("722880", tmp_dir)
ws.client = MockWeatherClient()
return ws
@pytest.fixture
def trace1():
data = {
"value": [1, 1, 1, 1, np.nan],
"estimated": [False, False, True, False, False]
}
columns = ["value", "estimated"]
index = [
datetime(2011, 1, 1, tzinfo=pytz.UTC),
datetime(2011, 2, 1, tzinfo=pytz.UTC),
datetime(2011, 3, 2, tzinfo=pytz.UTC),
datetime(2011, 4, 3, tzinfo=pytz.UTC),
datetime(2011, 4, 29, tzinfo=pytz.UTC),
]
df = pd.DataFrame(data, index=index, columns=columns)
return EnergyTrace("ELECTRICITY_CONSUMPTION_SUPPLIED", df, unit="KWH")
@pytest.fixture
def trace2():
data = {
"value": [np.nan],
"estimated": [True]
}
columns = ["value", "estimated"]
index = [
datetime(2011, 1, 1, tzinfo=pytz.UTC),
]
df = pd.DataFrame(data, index=index, columns=columns)
return EnergyTrace("ELECTRICITY_CONSUMPTION_SUPPLIED", df, unit="KWH")
@pytest.fixture
def trace3():
data = {
"value": [1, np.nan],
"estimated": [True, False]
}
columns = ["value", "estimated"]
index = [
datetime(2011, 1, 1, tzinfo=pytz.UTC),
datetime(2011, 2, 1, tzinfo=pytz.UTC),
]
df = pd.DataFrame(data, index=index, columns=columns)
return EnergyTrace("ELECTRICITY_CONSUMPTION_SUPPLIED", df, unit="KWH")
def test_basic_daily(trace1, mock_isd_weather_source):
mdbf = ModelDataBillingFormatter()
input_data = mdbf.create_input(
trace1, mock_isd_weather_source)
trace_data, temperature_data = input_data
assert trace_data.shape == (4,)
assert temperature_data.shape == (2832, 1)
description = mdbf.describe_input(input_data)
assert description.get('start_date') == \
datetime(2011, 1, 1, tzinfo=pytz.UTC)
assert description.get('end_date') == \
datetime(2011, 4, 29, tzinfo=pytz.UTC)
assert description.get('n_rows') == 4
def test_empty(trace2, mock_isd_weather_source):
mdbf = ModelDataBillingFormatter()
input_data = mdbf.create_input(
trace2, mock_isd_weather_source)
trace_data, temperature_data = input_data
assert trace_data.shape == (0,)
assert temperature_data.shape == (0,)
description = mdbf.describe_input(input_data)
assert description.get('start_date') is None
assert description.get('end_date') is None
assert description.get('n_rows') == 0
def test_small(trace3, mock_isd_weather_source):
mdbf = ModelDataBillingFormatter()
with pytest.raises(ValueError):
mdbf.create_input(trace3, mock_isd_weather_source)
| mit | -8,839,910,606,952,451,000 | 28.375 | 74 | 0.655974 | false |
tupes/School | CS333/testing_kasiski.py | 1 | 1594 |
from unittest import TestCase, main
from collections import Counter
from kasiski_examination import *
from utilities import removeTextNonLetters, removeBinaryNonLetters
class TestKasiskiFactors(TestCase):
def setUp(self):
self.message = removeTextNonLetters("Ppqca xqvekg ybnkmazu ybngbal jon i tszm jyim. Vrag voht vrau c tksg. Ddwuo xitlazu vavv raz c vkb qp iwpou.")
self.seq_spacings = {'AZU': [48], 'VRA': [8, 32, 24], 'YBN': [8]}
self.seq_factors = {
'AZU': [2, 3, 4, 6, 8, 12, 16],
'VRA': [8, 2, 4, 8, 16, 2, 4, 2, 3, 4, 6, 8, 12],
'YBN': [8, 2, 4]}
self.factor_counts = Counter({2: 5, 4: 5, 8: 5, 16: 2, 3: 2, 6: 2, 12: 2})
def test_getRepeatedSeqSpacings(self):
self.assertEqual(getRepeatedSeqSpacings(self.message), self.seq_spacings)
def test_getSeqFactors(self):
self.assertEqual(getSeqFactors(self.seq_spacings), self.seq_factors)
def test_getFactorCounts(self):
self.assertEqual(getFactorCounts(self.seq_factors), self.factor_counts)
class TestKasiskiExamination(TestCase):
def setUp(self):
self.likely_key_lengths = [3, 2, 6, 4, 12, 8, 9, 16, 5, 11, 10, 15, 7, 14, 13]
def test_kasiskiExamination_text(self):
with open('data/book_ciphertext.txt') as f:
ciphertext = removeTextNonLetters(f.read())
self.assertEqual(kasiskiExamination(ciphertext), self.likely_key_lengths)
def test_kasiskiExamination_binary(self):
with open('data/book_cipherbytes', 'rb') as f:
cipherbytes = removeBinaryNonLetters(f.read())
self.assertEqual(kasiskiExamination(cipherbytes), self.likely_key_lengths)
if __name__ == '__main__':
main() | gpl-3.0 | 8,913,561,625,061,415,000 | 32.93617 | 149 | 0.705772 | false |
circuitsforfun/Wav2Header | wav2header.py | 1 | 5200 | #!/usr/bin/python
#////////////////////////////////////////////////////////////////////////////////
#// //
#// wav2Header.py //
#// -PYTHON SCRIPT- //
#// Written By: Richard Wardlow //
#// //
#// //
#// Summary: Convert 8-bit mono wav to .h header file for MCU //
#// //
#// //
#// //
#////////////////////////////////////////////////////////////////////////////////
#/////////////////////////////
#// IMPORT MODULES HERE //
#/////////////////////////////
import pyaudio
import wave
import sys, os
import numpy
from optparse import OptionParser
#///////////
#// NOTES //
#///////////
#////////////////////////////////////
#// GLOBAL VARIABLES DECLARED HERE //
#////////////////////////////////////
VERSION = '1.0.0'
#////////////////////////////////////
#// GLOBAL FUNCTIONS DECLARED HERE //
#////////////////////////////////////
def addCommandLineOptions(parser):
parser.add_option(
'-f', '--file',
dest='wavFile',
default=None,
help='Specify Wav file for conversion')
parser.add_option(
'-q', '--quality',
dest='quality',
default=2,
help='Specify quality conversion 1=High, 2=Med, 3=Low')
parser.add_option(
'-s', '--sample',
dest='sample',
default=None,
help='Optional sample override for more specific conversion quality')
parser.add_option(
'-o', '--output',
dest='headerFile',
default=None,
help='Specify output header file')
parser.add_option(
'-l', '--length',
dest='length',
default=None,
help='Optional Length Limit in bytes to keep the header file from being to big')
def wav2header(options):
s = None
print """
*****************************************************
Wav To Header File Conversion
Version: """ + VERSION + """
*****************************************************\n
"""
if options.wavFile == None:
print 'Need to specify a wav file for conversion'
print '--help for help'
sys.exit(0)
if options.headerFile == None:
print 'Need to specify a header output file'
print '--help for help'
sys.exit(0)
if int(options.quality) < 1 or int(options.quality) > 3:
print 'Quality setting must be either 1=High, 2=Med, or 3=Low'
print '--help for help'
sys.exit(0)
wf = wave.open(options.wavFile, 'rb')
cf = open(options.headerFile, 'w')
if options.sample == None:
if int(options.quality) == 1:
sfps = 6400
elif int(options.quality) == 2:
sfps = 4800
elif int(options.quality) == 3:
sfps = 3200
else:
sfps = int(options.sample)
print 'Converting wav to header....'
srate = wf.getframerate()
smrate = int(srate / sfps)
ws = wf.getnframes()
data = wf.readframes(1024)
wavsamp = int(ws / smrate) + 2
data2 = numpy.arange(0, wavsamp)
x = 0
for x in range(0, wavsamp):
data2[x] = 0
x = 0
y = smrate
tmp = 0
tmp2 = 0.0
while data != '':
for i in data:
if y == smrate:
tmp = ord(i)
data2[x] = tmp
x = x + 1
y = 0
y = y + 1
data = wf.readframes(1024)
xcnt = 0;
delayTimer = int(360.0 - (((sfps / 1600.0) - 1.0) * 90.0))
cf.write('static const unsigned short envelope_table[] = { //Use Approx. Delay Time of: ' + str(delayTimer) + ' us @ 64Mhz\n')
if options.length != None:
if x > int(options.length):
x = int(options.length)
print 'Sample length is longer than length limit...Truncating sample to stay within limit.'
for y in range(0,x):
tmp2 = data2[y]
if y == 0:
cf.write('%i' % (tmp2))
else:
cf.write(',%i' % (tmp2))
xcnt += 1
if xcnt > 15:
cf.write('\n')
xcnt = 0
cf.write('};\n')
cf.close()
print 'Header Bytes: ' + str(x)
print 'Use a delay time of approx. ' + str(delayTimer) + ' us @ 64Mhz'
print 'Conversion complete!'
if __name__ == '__main__':
parser = OptionParser()
addCommandLineOptions(parser)
options, args = parser.parse_args()
result = wav2header(options)
sys.exit(result)
| gpl-3.0 | 4,546,926,191,659,368,000 | 31.298137 | 131 | 0.3975 | false |
yumikohey/WaiterCar | self-driving-car-ai/server.py | 1 | 7617 | #!/usr/bin/env python
# coding: Latin-1
# Load library functions we want
import SocketServer
import RPi.GPIO as io
io.setmode(io.BCM)
# Constant values
# PWM_MAX = 100
# Disable warning from GPIO
io.setwarnings(False)
# Here we configure the GPIO settings for the left and right motors spinning direction.
# It defines the four GPIO pins used as input on the L298 H-Bridge to set the motor mode (forward, reverse and stopp).
#Steering Motor
leftmotor_in1_pin = 27
leftmotor_in2_pin = 22
io.setup(leftmotor_in1_pin, io.OUT)
io.setup(leftmotor_in2_pin, io.OUT)
#Foward/Backward Motor
rightmotor_in1_pin = 24
rightmotor_in2_pin = 25
io.setup(rightmotor_in1_pin, io.OUT)
io.setup(rightmotor_in2_pin, io.OUT)
io.output(leftmotor_in1_pin, False)
io.output(leftmotor_in2_pin, False)
io.output(rightmotor_in1_pin, False)
io.output(rightmotor_in2_pin, False)
# Map of drives to pins
# lDrives = [DRIVE_1, DRIVE_2, DRIVE_3, DRIVE_4]
rightmotorpwm_pin = 4
io.setup(rightmotorpwm_pin, io.OUT)
rightmotorpwm = io.PWM(rightmotorpwm_pin,100)
rightmotorpwm.start(0)
rightmotorpwm.ChangeDutyCycle(0)
def setMotorMode(motor, mode, p):
# setMotorMode()
# Sets the mode for the L298 H-Bridge which motor is in which mode.
# This is a short explanation for a better understanding:
# motor -> which motor is selected left motor or right motor
# mode -> mode explains what action should be performed by the H-Bridge
# setMotorMode(leftmotor, reverse) -> The left motor is called by a function and set into reverse mode
# setMotorMode(rightmotor, stopp) -> The right motor is called by a function and set into stopp mode
power = abs(p)*100
if motor == "steermotor":
if mode == "left":
io.output(leftmotor_in1_pin, True)
io.output(leftmotor_in2_pin, False)
elif mode == "right":
io.output(leftmotor_in1_pin, False)
io.output(leftmotor_in2_pin, True)
else:
io.output(leftmotor_in1_pin, False)
io.output(leftmotor_in2_pin, False)
elif motor == "powermotor":
if mode == "reverse":
io.output(rightmotor_in1_pin, True)
io.output(rightmotor_in2_pin, False)
rightmotorpwm.ChangeDutyCycle(power)
elif mode == "forward":
io.output(rightmotor_in1_pin, False)
io.output(rightmotor_in2_pin, True)
rightmotorpwm.ChangeDutyCycle(power)
else:
io.output(rightmotor_in1_pin, False)
io.output(rightmotor_in2_pin, False)
else:
io.output(leftmotor_in1_pin, False)
io.output(leftmotor_in2_pin, False)
io.output(rightmotor_in1_pin, False)
io.output(rightmotor_in2_pin, False)
def setMotorSteer(power):
# SetMotorLeft(power)
# Sets the drive level for the left motor, from +1 (max) to -1 (min).
# This is a short explanation for a better understanding:
# SetMotorLeft(0) -> left motor is stopped
# SetMotorLeft(0.75) -> left motor moving forward at 75% power
# SetMotorLeft(-0.5) -> left motor moving reverse at 50% power
# SetMotorLeft(1) -> left motor moving forward at 100% power
if power < 0:
# Reverse mode for the left motor
setMotorMode("steermotor", "left", power)
# pwm = -int(PWM_MAX * power)
# if pwm > PWM_MAX:
# pwm = PWM_MAX
elif power > 0:
# Forward mode for the left motor
setMotorMode("steermotor", "right", power)
# pwm = int(PWM_MAX * power)
# if pwm > PWM_MAX:
# pwm = PWM_MAX
else:
# Stopp mode for the left motor
setMotorMode("steermotor", "stop", power)
# pwm = 0
# print "SetMotorLeft", pwm
# leftmotorpwm.ChangeDutyCycle(pwm)
def setMotorPower(power):
# SetMotorRight(power)
# Sets the drive level for the right motor, from +1 (max) to -1 (min).
# This is a short explanation for a better understanding:
# SetMotorRight(0) -> right motor is stopped
# SetMotorRight(0.75) -> right motor moving forward at 75% power
# SetMotorRight(-0.5) -> right motor moving reverse at 50% power
# SetMotorRight(1) -> right motor moving forward at 100% power
if power < 0:
# Reverse mode for the right motor
setMotorMode("powermotor", "reverse", power)
# pwm = -int(PWM_MAX * power)
# if pwm > PWM_MAX:
# pwm = PWM_MAX
elif power > 0:
# Forward mode for the right motor
setMotorMode("powermotor", "forward", power)
# pwm = int(PWM_MAX * power)
# if pwm > PWM_MAX:
# pwm = PWM_MAX
else:
# Stopp mode for the right motor
setMotorMode("powermotor", "stop", power)
# pwm = 0
# Function to set all drives off
def MotorOff():
io.output(leftmotor_in1_pin, False)
io.output(leftmotor_in2_pin, False)
io.output(rightmotor_in1_pin, False)
io.output(rightmotor_in2_pin, False)
# Settings for the RemoteKeyBorg server
portListen = 9038 # What messages to listen for (LEDB on an LCD)
# Class used to handle UDP messages
class PicoBorgHandler(SocketServer.BaseRequestHandler):
# Function called when a new message has been received
def handle(self):
global isRunning
request, socket = self.request # Read who spoke to us and what they said
request = request.lower() # Convert command to upper case
print 'reuqeust:', request
# driveCommands = request.split(',') # Separate the command into individual drives
# Special commands
if request == 'alloff' or request == '0':
# Turn all drives off
MotorOff()
print 'All drives off'
elif request == 'x':
# Exit the program
MotorOff()
print 'All drives off'
elif request == 'wa':
setMotorPower(1)
setMotorSteer(-1)
elif request == 'wd':
setMotorPower(1)
setMotorSteer(1)
elif request == 'sa':
setMotorPower(-1)
setMotorSteer(-1)
elif request == 'sd':
setMotorPower(-1)
setMotorSteer(1)
elif request == 'w':
setMotorPower(1)
setMotorSteer(0)
elif request == 's':
setMotorPower(-1)
setMotorSteer(0)
elif request == 'a':
setMotorPower(0)
setMotorSteer(-1)
elif request == 'd':
setMotorPower(0)
setMotorSteer(1)
elif request == '0':
setMotorPower(0)
setMotorSteer(0)
elif request == 'w0':
setMotorPower(1)
setMotorSteer(0)
elif request == 's0':
setMotorPower(-1)
setMotorSteer(0)
try:
global isRunning
# Start by turning all drives off
MotorOff()
raw_input('You can now turn on the power, press ENTER to continue')
# Setup the UDP listener
remoteKeyBorgServer = SocketServer.UDPServer(('', portListen), PicoBorgHandler)
# Loop until terminated remotely
isRunning = True
while isRunning:
remoteKeyBorgServer.handle_request()
# Turn off the drives and release the GPIO pins
print 'Finished'
MotorOff()
raw_input('Turn the power off now, press ENTER to continue')
io.cleanup()
except KeyboardInterrupt:
# CTRL+C exit, turn off the drives and release the GPIO pins
print 'Terminated'
MotorOff()
raw_input('Turn the power off now, press ENTER to continue')
io.cleanup() | mit | -5,007,044,393,506,858,000 | 31.279661 | 118 | 0.620717 | false |
kthordarson/youtube-dl-ruv | youtube_dl/utils.py | 1 | 43616 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import calendar
import codecs
import contextlib
import ctypes
import datetime
import email.utils
import errno
import gzip
import itertools
import io
import json
import locale
import math
import os
import pipes
import platform
import re
import ssl
import socket
import struct
import subprocess
import sys
import tempfile
import traceback
import xml.etree.ElementTree
import zlib
from .compat import (
compat_chr,
compat_getenv,
compat_html_entities,
compat_html_parser,
compat_parse_qs,
compat_str,
compat_urllib_error,
compat_urllib_parse,
compat_urllib_parse_urlparse,
compat_urllib_request,
compat_urlparse,
)
# This is not clearly defined otherwise
compiled_regex_type = type(re.compile(''))
std_headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) Gecko/20100101 Firefox/10.0 (Chrome)',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-us,en;q=0.5',
}
def preferredencoding():
"""Get preferred encoding.
Returns the best encoding scheme for the system, based on
locale.getpreferredencoding() and some further tweaks.
"""
try:
pref = locale.getpreferredencoding()
u'TEST'.encode(pref)
except:
pref = 'UTF-8'
return pref
def write_json_file(obj, fn):
""" Encode obj as JSON and write it to fn, atomically """
args = {
'suffix': '.tmp',
'prefix': os.path.basename(fn) + '.',
'dir': os.path.dirname(fn),
'delete': False,
}
# In Python 2.x, json.dump expects a bytestream.
# In Python 3.x, it writes to a character stream
if sys.version_info < (3, 0):
args['mode'] = 'wb'
else:
args.update({
'mode': 'w',
'encoding': 'utf-8',
})
tf = tempfile.NamedTemporaryFile(**args)
try:
with tf:
json.dump(obj, tf)
os.rename(tf.name, fn)
except:
try:
os.remove(tf.name)
except OSError:
pass
raise
if sys.version_info >= (2, 7):
def find_xpath_attr(node, xpath, key, val):
""" Find the xpath xpath[@key=val] """
assert re.match(r'^[a-zA-Z-]+$', key)
assert re.match(r'^[a-zA-Z0-9@\s:._-]*$', val)
expr = xpath + u"[@%s='%s']" % (key, val)
return node.find(expr)
else:
def find_xpath_attr(node, xpath, key, val):
# Here comes the crazy part: In 2.6, if the xpath is a unicode,
# .//node does not match if a node is a direct child of . !
if isinstance(xpath, unicode):
xpath = xpath.encode('ascii')
for f in node.findall(xpath):
if f.attrib.get(key) == val:
return f
return None
# On python2.6 the xml.etree.ElementTree.Element methods don't support
# the namespace parameter
def xpath_with_ns(path, ns_map):
components = [c.split(':') for c in path.split('/')]
replaced = []
for c in components:
if len(c) == 1:
replaced.append(c[0])
else:
ns, tag = c
replaced.append('{%s}%s' % (ns_map[ns], tag))
return '/'.join(replaced)
def xpath_text(node, xpath, name=None, fatal=False):
if sys.version_info < (2, 7): # Crazy 2.6
xpath = xpath.encode('ascii')
n = node.find(xpath)
if n is None:
if fatal:
name = xpath if name is None else name
raise ExtractorError('Could not find XML element %s' % name)
else:
return None
return n.text
def get_element_by_id(id, html):
"""Return the content of the tag with the specified ID in the passed HTML document"""
return get_element_by_attribute("id", id, html)
def get_element_by_attribute(attribute, value, html):
"""Return the content of the tag with the specified attribute in the passed HTML document"""
m = re.search(r'''(?xs)
<([a-zA-Z0-9:._-]+)
(?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]+|="[^"]+"|='[^']+'))*?
\s+%s=['"]?%s['"]?
(?:\s+[a-zA-Z0-9:._-]+(?:=[a-zA-Z0-9:._-]+|="[^"]+"|='[^']+'))*?
\s*>
(?P<content>.*?)
</\1>
''' % (re.escape(attribute), re.escape(value)), html)
if not m:
return None
res = m.group('content')
if res.startswith('"') or res.startswith("'"):
res = res[1:-1]
return unescapeHTML(res)
def clean_html(html):
"""Clean an HTML snippet into a readable string"""
# Newline vs <br />
html = html.replace('\n', ' ')
html = re.sub(r'\s*<\s*br\s*/?\s*>\s*', '\n', html)
html = re.sub(r'<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html)
# Strip html tags
html = re.sub('<.*?>', '', html)
# Replace html entities
html = unescapeHTML(html)
return html.strip()
def sanitize_open(filename, open_mode):
"""Try to open the given filename, and slightly tweak it if this fails.
Attempts to open the given filename. If this fails, it tries to change
the filename slightly, step by step, until it's either able to open it
or it fails and raises a final exception, like the standard open()
function.
It returns the tuple (stream, definitive_file_name).
"""
try:
if filename == u'-':
if sys.platform == 'win32':
import msvcrt
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
stream = open(encodeFilename(filename), open_mode)
return (stream, filename)
except (IOError, OSError) as err:
if err.errno in (errno.EACCES,):
raise
# In case of error, try to remove win32 forbidden chars
alt_filename = os.path.join(
re.sub(u'[/<>:"\\|\\\\?\\*]', u'#', path_part)
for path_part in os.path.split(filename)
)
if alt_filename == filename:
raise
else:
# An exception here should be caught in the caller
stream = open(encodeFilename(filename), open_mode)
return (stream, alt_filename)
def timeconvert(timestr):
"""Convert RFC 2822 defined time string into system timestamp"""
timestamp = None
timetuple = email.utils.parsedate_tz(timestr)
if timetuple is not None:
timestamp = email.utils.mktime_tz(timetuple)
return timestamp
def sanitize_filename(s, restricted=False, is_id=False):
"""Sanitizes a string so it could be used as part of a filename.
If restricted is set, use a stricter subset of allowed characters.
Set is_id if this is not an arbitrary string, but an ID that should be kept if possible
"""
def replace_insane(char):
if char == '?' or ord(char) < 32 or ord(char) == 127:
return ''
elif char == '"':
return '' if restricted else '\''
elif char == ':':
return '_-' if restricted else ' -'
elif char in '\\/|*<>':
return '_'
if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()):
return '_'
if restricted and ord(char) > 127:
return '_'
return char
result = u''.join(map(replace_insane, s))
if not is_id:
while '__' in result:
result = result.replace('__', '_')
result = result.strip('_')
# Common case of "Foreign band name - English song title"
if restricted and result.startswith('-_'):
result = result[2:]
if not result:
result = '_'
return result
def orderedSet(iterable):
""" Remove all duplicates from the input iterable """
res = []
for el in iterable:
if el not in res:
res.append(el)
return res
def _htmlentity_transform(entity):
"""Transforms an HTML entity to a character."""
# Known non-numeric HTML entity
if entity in compat_html_entities.name2codepoint:
return compat_chr(compat_html_entities.name2codepoint[entity])
mobj = re.match(r'#(x?[0-9]+)', entity)
if mobj is not None:
numstr = mobj.group(1)
if numstr.startswith(u'x'):
base = 16
numstr = u'0%s' % numstr
else:
base = 10
return compat_chr(int(numstr, base))
# Unknown entity in name, return its literal representation
return (u'&%s;' % entity)
def unescapeHTML(s):
if s is None:
return None
assert type(s) == compat_str
return re.sub(
r'&([^;]+);', lambda m: _htmlentity_transform(m.group(1)), s)
def encodeFilename(s, for_subprocess=False):
"""
@param s The name of the file
"""
assert type(s) == compat_str
# Python 3 has a Unicode API
if sys.version_info >= (3, 0):
return s
if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5:
# Pass u'' directly to use Unicode APIs on Windows 2000 and up
# (Detecting Windows NT 4 is tricky because 'major >= 4' would
# match Windows 9x series as well. Besides, NT 4 is obsolete.)
if not for_subprocess:
return s
else:
# For subprocess calls, encode with locale encoding
# Refer to http://stackoverflow.com/a/9951851/35070
encoding = preferredencoding()
else:
encoding = sys.getfilesystemencoding()
if encoding is None:
encoding = 'utf-8'
return s.encode(encoding, 'ignore')
def encodeArgument(s):
if not isinstance(s, compat_str):
# Legacy code that uses byte strings
# Uncomment the following line after fixing all post processors
#assert False, 'Internal error: %r should be of type %r, is %r' % (s, compat_str, type(s))
s = s.decode('ascii')
return encodeFilename(s, True)
def decodeOption(optval):
if optval is None:
return optval
if isinstance(optval, bytes):
optval = optval.decode(preferredencoding())
assert isinstance(optval, compat_str)
return optval
def formatSeconds(secs):
if secs > 3600:
return '%d:%02d:%02d' % (secs // 3600, (secs % 3600) // 60, secs % 60)
elif secs > 60:
return '%d:%02d' % (secs // 60, secs % 60)
else:
return '%d' % secs
def make_HTTPS_handler(opts_no_check_certificate, **kwargs):
if sys.version_info < (3, 2):
import httplib
class HTTPSConnectionV3(httplib.HTTPSConnection):
def __init__(self, *args, **kwargs):
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
def connect(self):
sock = socket.create_connection((self.host, self.port), self.timeout)
if getattr(self, '_tunnel_host', False):
self.sock = sock
self._tunnel()
try:
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=ssl.PROTOCOL_TLSv1)
except ssl.SSLError:
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=ssl.PROTOCOL_SSLv23)
class HTTPSHandlerV3(compat_urllib_request.HTTPSHandler):
def https_open(self, req):
return self.do_open(HTTPSConnectionV3, req)
return HTTPSHandlerV3(**kwargs)
elif hasattr(ssl, 'create_default_context'): # Python >= 3.4
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
context.options &= ~ssl.OP_NO_SSLv3 # Allow older, not-as-secure SSLv3
if opts_no_check_certificate:
context.verify_mode = ssl.CERT_NONE
return compat_urllib_request.HTTPSHandler(context=context, **kwargs)
else: # Python < 3.4
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = (ssl.CERT_NONE
if opts_no_check_certificate
else ssl.CERT_REQUIRED)
context.set_default_verify_paths()
try:
context.load_default_certs()
except AttributeError:
pass # Python < 3.4
return compat_urllib_request.HTTPSHandler(context=context, **kwargs)
class ExtractorError(Exception):
"""Error during info extraction."""
def __init__(self, msg, tb=None, expected=False, cause=None, video_id=None):
""" tb, if given, is the original traceback (so that it can be printed out).
If expected is set, this is a normal error message and most likely not a bug in youtube-dl.
"""
if sys.exc_info()[0] in (compat_urllib_error.URLError, socket.timeout, UnavailableVideoError):
expected = True
if video_id is not None:
msg = video_id + ': ' + msg
if cause:
msg += u' (caused by %r)' % cause
if not expected:
msg = msg + u'; please report this issue on https://yt-dl.org/bug . Be sure to call youtube-dl with the --verbose flag and include its complete output. Make sure you are using the latest version; type youtube-dl -U to update.'
super(ExtractorError, self).__init__(msg)
self.traceback = tb
self.exc_info = sys.exc_info() # preserve original exception
self.cause = cause
self.video_id = video_id
def format_traceback(self):
if self.traceback is None:
return None
return u''.join(traceback.format_tb(self.traceback))
class RegexNotFoundError(ExtractorError):
"""Error when a regex didn't match"""
pass
class DownloadError(Exception):
"""Download Error exception.
This exception may be thrown by FileDownloader objects if they are not
configured to continue on errors. They will contain the appropriate
error message.
"""
def __init__(self, msg, exc_info=None):
""" exc_info, if given, is the original exception that caused the trouble (as returned by sys.exc_info()). """
super(DownloadError, self).__init__(msg)
self.exc_info = exc_info
class SameFileError(Exception):
"""Same File exception.
This exception will be thrown by FileDownloader objects if they detect
multiple files would have to be downloaded to the same file on disk.
"""
pass
class PostProcessingError(Exception):
"""Post Processing exception.
This exception may be raised by PostProcessor's .run() method to
indicate an error in the postprocessing task.
"""
def __init__(self, msg):
self.msg = msg
class MaxDownloadsReached(Exception):
""" --max-downloads limit has been reached. """
pass
class UnavailableVideoError(Exception):
"""Unavailable Format exception.
This exception will be thrown when a video is requested
in a format that is not available for that video.
"""
pass
class ContentTooShortError(Exception):
"""Content Too Short exception.
This exception may be raised by FileDownloader objects when a file they
download is too small for what the server announced first, indicating
the connection was probably interrupted.
"""
# Both in bytes
downloaded = None
expected = None
def __init__(self, downloaded, expected):
self.downloaded = downloaded
self.expected = expected
class YoutubeDLHandler(compat_urllib_request.HTTPHandler):
"""Handler for HTTP requests and responses.
This class, when installed with an OpenerDirector, automatically adds
the standard headers to every HTTP request and handles gzipped and
deflated responses from web servers. If compression is to be avoided in
a particular request, the original request in the program code only has
to include the HTTP header "Youtubedl-No-Compression", which will be
removed before making the real request.
Part of this code was copied from:
http://techknack.net/python-urllib2-handlers/
Andrew Rowls, the author of that code, agreed to release it to the
public domain.
"""
@staticmethod
def deflate(data):
try:
return zlib.decompress(data, -zlib.MAX_WBITS)
except zlib.error:
return zlib.decompress(data)
@staticmethod
def addinfourl_wrapper(stream, headers, url, code):
if hasattr(compat_urllib_request.addinfourl, 'getcode'):
return compat_urllib_request.addinfourl(stream, headers, url, code)
ret = compat_urllib_request.addinfourl(stream, headers, url)
ret.code = code
return ret
def http_request(self, req):
for h, v in std_headers.items():
if h not in req.headers:
req.add_header(h, v)
if 'Youtubedl-no-compression' in req.headers:
if 'Accept-encoding' in req.headers:
del req.headers['Accept-encoding']
del req.headers['Youtubedl-no-compression']
if 'Youtubedl-user-agent' in req.headers:
if 'User-agent' in req.headers:
del req.headers['User-agent']
req.headers['User-agent'] = req.headers['Youtubedl-user-agent']
del req.headers['Youtubedl-user-agent']
if sys.version_info < (2, 7) and '#' in req.get_full_url():
# Python 2.6 is brain-dead when it comes to fragments
req._Request__original = req._Request__original.partition('#')[0]
req._Request__r_type = req._Request__r_type.partition('#')[0]
return req
def http_response(self, req, resp):
old_resp = resp
# gzip
if resp.headers.get('Content-encoding', '') == 'gzip':
content = resp.read()
gz = gzip.GzipFile(fileobj=io.BytesIO(content), mode='rb')
try:
uncompressed = io.BytesIO(gz.read())
except IOError as original_ioerror:
# There may be junk add the end of the file
# See http://stackoverflow.com/q/4928560/35070 for details
for i in range(1, 1024):
try:
gz = gzip.GzipFile(fileobj=io.BytesIO(content[:-i]), mode='rb')
uncompressed = io.BytesIO(gz.read())
except IOError:
continue
break
else:
raise original_ioerror
resp = self.addinfourl_wrapper(uncompressed, old_resp.headers, old_resp.url, old_resp.code)
resp.msg = old_resp.msg
# deflate
if resp.headers.get('Content-encoding', '') == 'deflate':
gz = io.BytesIO(self.deflate(resp.read()))
resp = self.addinfourl_wrapper(gz, old_resp.headers, old_resp.url, old_resp.code)
resp.msg = old_resp.msg
return resp
https_request = http_request
https_response = http_response
def parse_iso8601(date_str, delimiter='T'):
""" Return a UNIX timestamp from the given date """
if date_str is None:
return None
m = re.search(
r'(\.[0-9]+)?(?:Z$| ?(?P<sign>\+|-)(?P<hours>[0-9]{2}):?(?P<minutes>[0-9]{2})$)',
date_str)
if not m:
timezone = datetime.timedelta()
else:
date_str = date_str[:-len(m.group(0))]
if not m.group('sign'):
timezone = datetime.timedelta()
else:
sign = 1 if m.group('sign') == '+' else -1
timezone = datetime.timedelta(
hours=sign * int(m.group('hours')),
minutes=sign * int(m.group('minutes')))
date_format = '%Y-%m-%d{0}%H:%M:%S'.format(delimiter)
dt = datetime.datetime.strptime(date_str, date_format) - timezone
return calendar.timegm(dt.timetuple())
def unified_strdate(date_str):
"""Return a string with the date in the format YYYYMMDD"""
if date_str is None:
return None
upload_date = None
#Replace commas
date_str = date_str.replace(',', ' ')
# %z (UTC offset) is only supported in python>=3.2
date_str = re.sub(r' ?(\+|-)[0-9]{2}:?[0-9]{2}$', '', date_str)
format_expressions = [
'%d %B %Y',
'%d %b %Y',
'%B %d %Y',
'%b %d %Y',
'%b %dst %Y %I:%M%p',
'%b %dnd %Y %I:%M%p',
'%b %dth %Y %I:%M%p',
'%Y-%m-%d',
'%Y/%m/%d',
'%d.%m.%Y',
'%d/%m/%Y',
'%d/%m/%y',
'%Y/%m/%d %H:%M:%S',
'%d/%m/%Y %H:%M:%S',
'%Y-%m-%d %H:%M:%S',
'%Y-%m-%d %H:%M:%S.%f',
'%d.%m.%Y %H:%M',
'%d.%m.%Y %H.%M',
'%Y-%m-%dT%H:%M:%SZ',
'%Y-%m-%dT%H:%M:%S.%fZ',
'%Y-%m-%dT%H:%M:%S.%f0Z',
'%Y-%m-%dT%H:%M:%S',
'%Y-%m-%dT%H:%M:%S.%f',
'%Y-%m-%dT%H:%M',
]
for expression in format_expressions:
try:
upload_date = datetime.datetime.strptime(date_str, expression).strftime('%Y%m%d')
except ValueError:
pass
if upload_date is None:
timetuple = email.utils.parsedate_tz(date_str)
if timetuple:
upload_date = datetime.datetime(*timetuple[:6]).strftime('%Y%m%d')
return upload_date
def determine_ext(url, default_ext=u'unknown_video'):
if url is None:
return default_ext
guess = url.partition(u'?')[0].rpartition(u'.')[2]
if re.match(r'^[A-Za-z0-9]+$', guess):
return guess
else:
return default_ext
def subtitles_filename(filename, sub_lang, sub_format):
return filename.rsplit('.', 1)[0] + u'.' + sub_lang + u'.' + sub_format
def date_from_str(date_str):
"""
Return a datetime object from a string in the format YYYYMMDD or
(now|today)[+-][0-9](day|week|month|year)(s)?"""
today = datetime.date.today()
if date_str == 'now'or date_str == 'today':
return today
match = re.match('(now|today)(?P<sign>[+-])(?P<time>\d+)(?P<unit>day|week|month|year)(s)?', date_str)
if match is not None:
sign = match.group('sign')
time = int(match.group('time'))
if sign == '-':
time = -time
unit = match.group('unit')
#A bad aproximation?
if unit == 'month':
unit = 'day'
time *= 30
elif unit == 'year':
unit = 'day'
time *= 365
unit += 's'
delta = datetime.timedelta(**{unit: time})
return today + delta
return datetime.datetime.strptime(date_str, "%Y%m%d").date()
def hyphenate_date(date_str):
"""
Convert a date in 'YYYYMMDD' format to 'YYYY-MM-DD' format"""
match = re.match(r'^(\d\d\d\d)(\d\d)(\d\d)$', date_str)
if match is not None:
return '-'.join(match.groups())
else:
return date_str
class DateRange(object):
"""Represents a time interval between two dates"""
def __init__(self, start=None, end=None):
"""start and end must be strings in the format accepted by date"""
if start is not None:
self.start = date_from_str(start)
else:
self.start = datetime.datetime.min.date()
if end is not None:
self.end = date_from_str(end)
else:
self.end = datetime.datetime.max.date()
if self.start > self.end:
raise ValueError('Date range: "%s" , the start date must be before the end date' % self)
@classmethod
def day(cls, day):
"""Returns a range that only contains the given day"""
return cls(day,day)
def __contains__(self, date):
"""Check if the date is in the range"""
if not isinstance(date, datetime.date):
date = date_from_str(date)
return self.start <= date <= self.end
def __str__(self):
return '%s - %s' % ( self.start.isoformat(), self.end.isoformat())
def platform_name():
""" Returns the platform name as a compat_str """
res = platform.platform()
if isinstance(res, bytes):
res = res.decode(preferredencoding())
assert isinstance(res, compat_str)
return res
def _windows_write_string(s, out):
""" Returns True if the string was written using special methods,
False if it has yet to be written out."""
# Adapted from http://stackoverflow.com/a/3259271/35070
import ctypes
import ctypes.wintypes
WIN_OUTPUT_IDS = {
1: -11,
2: -12,
}
try:
fileno = out.fileno()
except AttributeError:
# If the output stream doesn't have a fileno, it's virtual
return False
if fileno not in WIN_OUTPUT_IDS:
return False
GetStdHandle = ctypes.WINFUNCTYPE(
ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD)(
("GetStdHandle", ctypes.windll.kernel32))
h = GetStdHandle(WIN_OUTPUT_IDS[fileno])
WriteConsoleW = ctypes.WINFUNCTYPE(
ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE, ctypes.wintypes.LPWSTR,
ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD),
ctypes.wintypes.LPVOID)(("WriteConsoleW", ctypes.windll.kernel32))
written = ctypes.wintypes.DWORD(0)
GetFileType = ctypes.WINFUNCTYPE(ctypes.wintypes.DWORD, ctypes.wintypes.DWORD)(("GetFileType", ctypes.windll.kernel32))
FILE_TYPE_CHAR = 0x0002
FILE_TYPE_REMOTE = 0x8000
GetConsoleMode = ctypes.WINFUNCTYPE(
ctypes.wintypes.BOOL, ctypes.wintypes.HANDLE,
ctypes.POINTER(ctypes.wintypes.DWORD))(
("GetConsoleMode", ctypes.windll.kernel32))
INVALID_HANDLE_VALUE = ctypes.wintypes.DWORD(-1).value
def not_a_console(handle):
if handle == INVALID_HANDLE_VALUE or handle is None:
return True
return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) != FILE_TYPE_CHAR
or GetConsoleMode(handle, ctypes.byref(ctypes.wintypes.DWORD())) == 0)
if not_a_console(h):
return False
def next_nonbmp_pos(s):
try:
return next(i for i, c in enumerate(s) if ord(c) > 0xffff)
except StopIteration:
return len(s)
while s:
count = min(next_nonbmp_pos(s), 1024)
ret = WriteConsoleW(
h, s, count if count else 2, ctypes.byref(written), None)
if ret == 0:
raise OSError('Failed to write string')
if not count: # We just wrote a non-BMP character
assert written.value == 2
s = s[1:]
else:
assert written.value > 0
s = s[written.value:]
return True
def write_string(s, out=None, encoding=None):
if out is None:
out = sys.stderr
assert type(s) == compat_str
if sys.platform == 'win32' and encoding is None and hasattr(out, 'fileno'):
if _windows_write_string(s, out):
return
if ('b' in getattr(out, 'mode', '') or
sys.version_info[0] < 3): # Python 2 lies about mode of sys.stderr
byt = s.encode(encoding or preferredencoding(), 'ignore')
out.write(byt)
elif hasattr(out, 'buffer'):
enc = encoding or getattr(out, 'encoding', None) or preferredencoding()
byt = s.encode(enc, 'ignore')
out.buffer.write(byt)
else:
out.write(s)
out.flush()
def bytes_to_intlist(bs):
if not bs:
return []
if isinstance(bs[0], int): # Python 3
return list(bs)
else:
return [ord(c) for c in bs]
def intlist_to_bytes(xs):
if not xs:
return b''
if isinstance(chr(0), bytes): # Python 2
return ''.join([chr(x) for x in xs])
else:
return bytes(xs)
# Cross-platform file locking
if sys.platform == 'win32':
import ctypes.wintypes
import msvcrt
class OVERLAPPED(ctypes.Structure):
_fields_ = [
('Internal', ctypes.wintypes.LPVOID),
('InternalHigh', ctypes.wintypes.LPVOID),
('Offset', ctypes.wintypes.DWORD),
('OffsetHigh', ctypes.wintypes.DWORD),
('hEvent', ctypes.wintypes.HANDLE),
]
kernel32 = ctypes.windll.kernel32
LockFileEx = kernel32.LockFileEx
LockFileEx.argtypes = [
ctypes.wintypes.HANDLE, # hFile
ctypes.wintypes.DWORD, # dwFlags
ctypes.wintypes.DWORD, # dwReserved
ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
ctypes.POINTER(OVERLAPPED) # Overlapped
]
LockFileEx.restype = ctypes.wintypes.BOOL
UnlockFileEx = kernel32.UnlockFileEx
UnlockFileEx.argtypes = [
ctypes.wintypes.HANDLE, # hFile
ctypes.wintypes.DWORD, # dwReserved
ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
ctypes.POINTER(OVERLAPPED) # Overlapped
]
UnlockFileEx.restype = ctypes.wintypes.BOOL
whole_low = 0xffffffff
whole_high = 0x7fffffff
def _lock_file(f, exclusive):
overlapped = OVERLAPPED()
overlapped.Offset = 0
overlapped.OffsetHigh = 0
overlapped.hEvent = 0
f._lock_file_overlapped_p = ctypes.pointer(overlapped)
handle = msvcrt.get_osfhandle(f.fileno())
if not LockFileEx(handle, 0x2 if exclusive else 0x0, 0,
whole_low, whole_high, f._lock_file_overlapped_p):
raise OSError('Locking file failed: %r' % ctypes.FormatError())
def _unlock_file(f):
assert f._lock_file_overlapped_p
handle = msvcrt.get_osfhandle(f.fileno())
if not UnlockFileEx(handle, 0,
whole_low, whole_high, f._lock_file_overlapped_p):
raise OSError('Unlocking file failed: %r' % ctypes.FormatError())
else:
import fcntl
def _lock_file(f, exclusive):
fcntl.flock(f, fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH)
def _unlock_file(f):
fcntl.flock(f, fcntl.LOCK_UN)
class locked_file(object):
def __init__(self, filename, mode, encoding=None):
assert mode in ['r', 'a', 'w']
self.f = io.open(filename, mode, encoding=encoding)
self.mode = mode
def __enter__(self):
exclusive = self.mode != 'r'
try:
_lock_file(self.f, exclusive)
except IOError:
self.f.close()
raise
return self
def __exit__(self, etype, value, traceback):
try:
_unlock_file(self.f)
finally:
self.f.close()
def __iter__(self):
return iter(self.f)
def write(self, *args):
return self.f.write(*args)
def read(self, *args):
return self.f.read(*args)
def get_filesystem_encoding():
encoding = sys.getfilesystemencoding()
return encoding if encoding is not None else 'utf-8'
def shell_quote(args):
quoted_args = []
encoding = get_filesystem_encoding()
for a in args:
if isinstance(a, bytes):
# We may get a filename encoded with 'encodeFilename'
a = a.decode(encoding)
quoted_args.append(pipes.quote(a))
return u' '.join(quoted_args)
def takewhile_inclusive(pred, seq):
""" Like itertools.takewhile, but include the latest evaluated element
(the first element so that Not pred(e)) """
for e in seq:
yield e
if not pred(e):
return
def smuggle_url(url, data):
""" Pass additional data in a URL for internal use. """
sdata = compat_urllib_parse.urlencode(
{u'__youtubedl_smuggle': json.dumps(data)})
return url + u'#' + sdata
def unsmuggle_url(smug_url, default=None):
if not '#__youtubedl_smuggle' in smug_url:
return smug_url, default
url, _, sdata = smug_url.rpartition(u'#')
jsond = compat_parse_qs(sdata)[u'__youtubedl_smuggle'][0]
data = json.loads(jsond)
return url, data
def format_bytes(bytes):
if bytes is None:
return u'N/A'
if type(bytes) is str:
bytes = float(bytes)
if bytes == 0.0:
exponent = 0
else:
exponent = int(math.log(bytes, 1024.0))
suffix = [u'B', u'KiB', u'MiB', u'GiB', u'TiB', u'PiB', u'EiB', u'ZiB', u'YiB'][exponent]
converted = float(bytes) / float(1024 ** exponent)
return u'%.2f%s' % (converted, suffix)
def get_term_width():
columns = compat_getenv('COLUMNS', None)
if columns:
return int(columns)
try:
sp = subprocess.Popen(
['stty', 'size'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = sp.communicate()
return int(out.split()[1])
except:
pass
return None
def month_by_name(name):
""" Return the number of a month by (locale-independently) English name """
ENGLISH_NAMES = [
u'January', u'February', u'March', u'April', u'May', u'June',
u'July', u'August', u'September', u'October', u'November', u'December']
try:
return ENGLISH_NAMES.index(name) + 1
except ValueError:
return None
def fix_xml_ampersands(xml_str):
"""Replace all the '&' by '&' in XML"""
return re.sub(
r'&(?!amp;|lt;|gt;|apos;|quot;|#x[0-9a-fA-F]{,4};|#[0-9]{,4};)',
u'&',
xml_str)
def setproctitle(title):
assert isinstance(title, compat_str)
try:
libc = ctypes.cdll.LoadLibrary("libc.so.6")
except OSError:
return
title_bytes = title.encode('utf-8')
buf = ctypes.create_string_buffer(len(title_bytes))
buf.value = title_bytes
try:
libc.prctl(15, buf, 0, 0, 0)
except AttributeError:
return # Strange libc, just skip this
def remove_start(s, start):
if s.startswith(start):
return s[len(start):]
return s
def remove_end(s, end):
if s.endswith(end):
return s[:-len(end)]
return s
def url_basename(url):
path = compat_urlparse.urlparse(url).path
return path.strip(u'/').split(u'/')[-1]
class HEADRequest(compat_urllib_request.Request):
def get_method(self):
return "HEAD"
def int_or_none(v, scale=1, default=None, get_attr=None, invscale=1):
if get_attr:
if v is not None:
v = getattr(v, get_attr, None)
if v == '':
v = None
return default if v is None else (int(v) * invscale // scale)
def str_or_none(v, default=None):
return default if v is None else compat_str(v)
def str_to_int(int_str):
""" A more relaxed version of int_or_none """
if int_str is None:
return None
int_str = re.sub(r'[,\.\+]', u'', int_str)
return int(int_str)
def float_or_none(v, scale=1, invscale=1, default=None):
return default if v is None else (float(v) * invscale / scale)
def parse_duration(s):
if s is None:
return None
s = s.strip()
m = re.match(
r'(?i)(?:(?:(?P<hours>[0-9]+)\s*(?:[:h]|hours?)\s*)?(?P<mins>[0-9]+)\s*(?:[:m]|mins?|minutes?)\s*)?(?P<secs>[0-9]+)(?P<ms>\.[0-9]+)?\s*(?:s|secs?|seconds?)?$', s)
if not m:
return None
res = int(m.group('secs'))
if m.group('mins'):
res += int(m.group('mins')) * 60
if m.group('hours'):
res += int(m.group('hours')) * 60 * 60
if m.group('ms'):
res += float(m.group('ms'))
return res
def prepend_extension(filename, ext):
name, real_ext = os.path.splitext(filename)
return u'{0}.{1}{2}'.format(name, ext, real_ext)
def check_executable(exe, args=[]):
""" Checks if the given binary is installed somewhere in PATH, and returns its name.
args can be a list of arguments for a short output (like -version) """
try:
subprocess.Popen([exe] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
except OSError:
return False
return exe
def get_exe_version(exe, args=['--version'],
version_re=r'version\s+([0-9._-a-zA-Z]+)',
unrecognized=u'present'):
""" Returns the version of the specified executable,
or False if the executable is not present """
try:
out, err = subprocess.Popen(
[exe] + args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()
except OSError:
return False
firstline = out.partition(b'\n')[0].decode('ascii', 'ignore')
m = re.search(version_re, firstline)
if m:
return m.group(1)
else:
return unrecognized
class PagedList(object):
def __len__(self):
# This is only useful for tests
return len(self.getslice())
class OnDemandPagedList(PagedList):
def __init__(self, pagefunc, pagesize):
self._pagefunc = pagefunc
self._pagesize = pagesize
def getslice(self, start=0, end=None):
res = []
for pagenum in itertools.count(start // self._pagesize):
firstid = pagenum * self._pagesize
nextfirstid = pagenum * self._pagesize + self._pagesize
if start >= nextfirstid:
continue
page_results = list(self._pagefunc(pagenum))
startv = (
start % self._pagesize
if firstid <= start < nextfirstid
else 0)
endv = (
((end - 1) % self._pagesize) + 1
if (end is not None and firstid <= end <= nextfirstid)
else None)
if startv != 0 or endv is not None:
page_results = page_results[startv:endv]
res.extend(page_results)
# A little optimization - if current page is not "full", ie. does
# not contain page_size videos then we can assume that this page
# is the last one - there are no more ids on further pages -
# i.e. no need to query again.
if len(page_results) + startv < self._pagesize:
break
# If we got the whole page, but the next page is not interesting,
# break out early as well
if end == nextfirstid:
break
return res
class InAdvancePagedList(PagedList):
def __init__(self, pagefunc, pagecount, pagesize):
self._pagefunc = pagefunc
self._pagecount = pagecount
self._pagesize = pagesize
def getslice(self, start=0, end=None):
res = []
start_page = start // self._pagesize
end_page = (
self._pagecount if end is None else (end // self._pagesize + 1))
skip_elems = start - start_page * self._pagesize
only_more = None if end is None else end - start
for pagenum in range(start_page, end_page):
page = list(self._pagefunc(pagenum))
if skip_elems:
page = page[skip_elems:]
skip_elems = None
if only_more is not None:
if len(page) < only_more:
only_more -= len(page)
else:
page = page[:only_more]
res.extend(page)
break
res.extend(page)
return res
def uppercase_escape(s):
unicode_escape = codecs.getdecoder('unicode_escape')
return re.sub(
r'\\U[0-9a-fA-F]{8}',
lambda m: unicode_escape(m.group(0))[0],
s)
def escape_rfc3986(s):
"""Escape non-ASCII characters as suggested by RFC 3986"""
if sys.version_info < (3, 0) and isinstance(s, unicode):
s = s.encode('utf-8')
return compat_urllib_parse.quote(s, b"%/;:@&=+$,!~*'()?#[]")
def escape_url(url):
"""Escape URL as suggested by RFC 3986"""
url_parsed = compat_urllib_parse_urlparse(url)
return url_parsed._replace(
path=escape_rfc3986(url_parsed.path),
params=escape_rfc3986(url_parsed.params),
query=escape_rfc3986(url_parsed.query),
fragment=escape_rfc3986(url_parsed.fragment)
).geturl()
try:
struct.pack(u'!I', 0)
except TypeError:
# In Python 2.6 (and some 2.7 versions), struct requires a bytes argument
def struct_pack(spec, *args):
if isinstance(spec, compat_str):
spec = spec.encode('ascii')
return struct.pack(spec, *args)
def struct_unpack(spec, *args):
if isinstance(spec, compat_str):
spec = spec.encode('ascii')
return struct.unpack(spec, *args)
else:
struct_pack = struct.pack
struct_unpack = struct.unpack
def read_batch_urls(batch_fd):
def fixup(url):
if not isinstance(url, compat_str):
url = url.decode('utf-8', 'replace')
BOM_UTF8 = u'\xef\xbb\xbf'
if url.startswith(BOM_UTF8):
url = url[len(BOM_UTF8):]
url = url.strip()
if url.startswith(('#', ';', ']')):
return False
return url
with contextlib.closing(batch_fd) as fd:
return [url for url in map(fixup, fd) if url]
def urlencode_postdata(*args, **kargs):
return compat_urllib_parse.urlencode(*args, **kargs).encode('ascii')
try:
etree_iter = xml.etree.ElementTree.Element.iter
except AttributeError: # Python <=2.6
etree_iter = lambda n: n.findall('.//*')
def parse_xml(s):
class TreeBuilder(xml.etree.ElementTree.TreeBuilder):
def doctype(self, name, pubid, system):
pass # Ignore doctypes
parser = xml.etree.ElementTree.XMLParser(target=TreeBuilder())
kwargs = {'parser': parser} if sys.version_info >= (2, 7) else {}
tree = xml.etree.ElementTree.XML(s.encode('utf-8'), **kwargs)
# Fix up XML parser in Python 2.x
if sys.version_info < (3, 0):
for n in etree_iter(tree):
if n.text is not None:
if not isinstance(n.text, compat_str):
n.text = n.text.decode('utf-8')
return tree
US_RATINGS = {
'G': 0,
'PG': 10,
'PG-13': 13,
'R': 16,
'NC': 18,
}
def parse_age_limit(s):
if s is None:
return None
m = re.match(r'^(?P<age>\d{1,2})\+?$', s)
return int(m.group('age')) if m else US_RATINGS.get(s, None)
def strip_jsonp(code):
return re.sub(r'(?s)^[a-zA-Z0-9_]+\s*\(\s*(.*)\);?\s*?\s*$', r'\1', code)
def js_to_json(code):
def fix_kv(m):
v = m.group(0)
if v in ('true', 'false', 'null'):
return v
if v.startswith('"'):
return v
if v.startswith("'"):
v = v[1:-1]
v = re.sub(r"\\\\|\\'|\"", lambda m: {
'\\\\': '\\\\',
"\\'": "'",
'"': '\\"',
}[m.group(0)], v)
return '"%s"' % v
res = re.sub(r'''(?x)
"(?:[^"\\]*(?:\\\\|\\")?)*"|
'(?:[^'\\]*(?:\\\\|\\')?)*'|
[a-zA-Z_][a-zA-Z_0-9]*
''', fix_kv, code)
res = re.sub(r',(\s*\])', lambda m: m.group(1), res)
return res
def qualities(quality_ids):
""" Get a numeric quality value out of a list of possible values """
def q(qid):
try:
return quality_ids.index(qid)
except ValueError:
return -1
return q
DEFAULT_OUTTMPL = '%(title)s-%(id)s.%(ext)s'
def limit_length(s, length):
""" Add ellipses to overly long strings """
if s is None:
return None
ELLIPSES = '...'
if len(s) > length:
return s[:length - len(ELLIPSES)] + ELLIPSES
return s
def version_tuple(v):
return [int(e) for e in v.split('.')]
def is_outdated_version(version, limit, assume_new=True):
if not version:
return not assume_new
try:
return version_tuple(version) < version_tuple(limit)
except ValueError:
return not assume_new
| unlicense | -7,128,415,689,819,608,000 | 30.176555 | 240 | 0.576096 | false |
sanja7s/SR_Twitter | src_CAPITAL/Soc_Sem_Capital.py | 1 | 5448 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
'''
analyze assortativity of the graphs in terms of sentiment
'''
from igraph import *
import networkx as nx
import os
import numpy as np
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
import os
import matplotlib.cm as cm
from collections import defaultdict
import matplotlib
font = {'family' : 'sans-serif',
'variant' : 'normal',
'weight' : 'light',
'size' : 14}
matplotlib.rc('font', **font)
f_in_user_labels = "usr_num_CVs.tab"
##################
f_in_user_taxons = "user_taxons.tab"
f_in_user_concepts = "user_concepts.tab"
f_in_user_entities = "user_entities.tab"
f_in_num_tweets = "usr_num_tweets.tab"
#########################
#
f_in_user_sentiment = "user_sentiment.tab"
#
# mention graph
#########################
f_in_graph = "threshold_mention_graphs/directed_threshold0.tab"
f_in_graph_weights = "threshold_mention_graphs/mention_graph_weights.dat"
f_out_sent_mention_graph = "directed_threshold0_sent_val.tab"
IN_DIR = "../../../DATA/mention_graph/"
f_out_mention = "sentiment_assortativity_mention_2.txt"
#########################
def social_capital_vs_sentiment():
os.chdir(IN_DIR)
G = Graph.Read_Ncol(f_in_graph_weights,names=True, directed=True, weights=True)
summary(G)
f = open(f_in_user_sentiment, "r")
soc_cap = []
soc_cap_int = []
sem_cap = []
ierror = 0
cnt = 0
for line in f:
(vid, vn, val) = line.split('\t')
val = float(val)
v = G.vs.select(name = vid)
cnt += 1
v["val"] = val
try:
d = G.degree(v[0].index)
wd = G.strength(v[0].index, weights='weight')
if d < 1000:
soc_cap.append(d)
soc_cap_int.append(wd)
sem_cap.append(val)
except IndexError:
ierror += 1
print cnt, ierror
to_delete_vertices = [v.index for v in G.vs if v["val"] == None]
print len(to_delete_vertices)
G.delete_vertices(to_delete_vertices)
summary(G)
#print "Sent nominal assortativity is %f " % (G.assortativity_nominal(types="val",directed=True))
print "Sent assortativity is %f " % (G.assortativity("val",directed=True))
print "Sent assortativity UNDIR is %f " % (G.assortativity("val",directed=False))
plot_capitals(soc_cap, sem_cap)
plot_capitals(soc_cap_int, sem_cap)
def social_capital_vs_CVs():
os.chdir(IN_DIR)
G = Graph.Read_Ncol(f_in_graph_weights,names=True, directed=True, weights=True)
summary(G)
f = open(f_in_user_labels, "r")
soc_cap = []
soc_cap_int = []
sem_cap = []
CAPs_vol = defaultdict(int)
ierror = 0
cnt = 0
for line in f:
(vid, val) = line.split('\t')
val = int(val)
v = G.vs.select(name = vid)
cnt += 1
v["val"] = val
try:
d = G.degree(v[0].index)
wd = G.strength(v[0].index, weights='weight')
if d < 1000:
soc_cap.append(d)
soc_cap_int.append(wd)
sem_cap.append(val)
except IndexError:
ierror += 1
print cnt, ierror
to_delete_vertices = [v.index for v in G.vs if v["val"] == None]
print len(to_delete_vertices)
G.delete_vertices(to_delete_vertices)
summary(G)
plot_capitals(soc_cap, sem_cap)
plot_capitals(soc_cap_int, sem_cap)
def social_capital_vs_concepts():
os.chdir(IN_DIR)
G = Graph.Read_Ncol(f_in_graph_weights,names=True, directed=True, weights=True)
summary(G)
f = open(f_in_user_concepts, "r")
soc_cap = []
soc_cap_int = []
sem_cap = []
ierror = 0
cnt = 0
for line in f:
(vid, val) = line.split('\t')
val = int(val)
v = G.vs.select(name = vid)
cnt += 1
v["val"] = val
try:
d = G.degree(v[0].index, mode=IN)
wd = G.strength(v[0].index, weights='weight', mode=IN)
if d < 1000:
soc_cap.append(d)
soc_cap_int.append(wd)
sem_cap.append(val)
except IndexError:
ierror += 1
print cnt, ierror
to_delete_vertices = [v.index for v in G.vs if v["val"] == None]
print len(to_delete_vertices)
G.delete_vertices(to_delete_vertices)
summary(G)
print "CV nominal assortativity is %f " % (G.assortativity_nominal(types="val",directed=True))
print "CV assortativity is %f " % (G.assortativity("val",directed=True))
print "CV assortativity UNDIR is %f " % (G.assortativity("val",directed=False))
plot_capitals(soc_cap, sem_cap)
plot_capitals(soc_cap_int, sem_cap)
def social_capital_vs_entities():
os.chdir(IN_DIR)
G = Graph.Read_Ncol(f_in_graph_weights,names=True, directed=True, weights=True)
summary(G)
f = open(f_in_user_entities, "r")
soc_cap = []
soc_cap_int = []
sem_cap = []
ierror = 0
cnt = 0
for line in f:
(vid, val) = line.split('\t')
val = int(val)
v = G.vs.select(name = vid)
cnt += 1
v["val"] = val
try:
d = G.degree(v[0].index)
wd = G.strength(v[0].index, weights='weight')
if d < 1000:
soc_cap.append(d)
soc_cap_int.append(wd)
sem_cap.append(val)
except IndexError:
ierror += 1
print cnt, ierror
to_delete_vertices = [v.index for v in G.vs if v["val"] == None]
print len(to_delete_vertices)
G.delete_vertices(to_delete_vertices)
summary(G)
print "CV nominal assortativity is %f " % (G.assortativity_nominal(types="val",directed=True))
print "CV assortativity is %f " % (G.assortativity("val",directed=True))
print "CV assortativity UNDIR is %f " % (G.assortativity("val",directed=False))
plot_capitals(soc_cap, sem_cap)
plot_capitals(soc_cap_int, sem_cap)
def plot_capitals(x, y):
plt.scatter(x,y,color='darkorchid')
plt.show()
#social_capital_vs_CVs()
#social_capital_vs_concepts()
#social_capital_vs_entities()
social_capital_vs_sentiment() | mit | 2,786,904,285,392,879,000 | 21.894958 | 99 | 0.646109 | false |
wcjohns/poach | gpio/stepper_motor.py | 1 | 2057 | #!/usr/bin/python
#--------------------------------------
# ___ ___ _ ____
# / _ \/ _ \(_) __/__ __ __
# / , _/ ___/ /\ \/ _ \/ // /
# /_/|_/_/ /_/___/ .__/\_, /
# /_/ /___/
#
# Stepper Motor Test
#
# A simple script to control
# a stepper motor.
#
# Author : Matt Hawkins
# Date : 11/07/2012
#
# http://www.raspberrypi-spy.co.uk/
#
#--------------------------------------
#!/usr/bin/env python
# Import required libraries
import time
import RPi.GPIO as GPIO
# Use BCM GPIO references
# instead of physical pin numbers
GPIO.setmode(GPIO.BCM)
# Define GPIO signals to use
# Pins 18,22,24,26
# GPIO24,GPIO25,GPIO8,GPIO7
#StepPins = [24,25,8,7]
#if you use the wiring instructions from http://www.scraptopower.co.uk/Raspberry-Pi/how-to-connect-stepper-motors-a-raspberry-pi
#you need to set the pins to the following:
StepPins = [17,18,21,22]
# Set all pins as output
for pin in StepPins:
print "Setup pins"
GPIO.setup(pin,GPIO.OUT)
GPIO.output(pin, False)
# Define some settings
StepCounter = 0
#WaitTime = 0.5
WaitTime = 0.001
# Define simple sequence
StepCount1 = 4
Seq1 = []
Seq1 = range(0, StepCount1)
Seq1[0] = [1,0,0,0]
Seq1[1] = [0,1,0,0]
Seq1[2] = [0,0,1,0]
Seq1[3] = [0,0,0,1]
# Define advanced sequence
# as shown in manufacturers datasheet
StepCount2 = 8
Seq2 = []
Seq2 = range(0, StepCount2)
Seq2[0] = [1,0,0,0]
Seq2[1] = [1,1,0,0]
Seq2[2] = [0,1,0,0]
Seq2[3] = [0,1,1,0]
Seq2[4] = [0,0,1,0]
Seq2[5] = [0,0,1,1]
Seq2[6] = [0,0,0,1]
Seq2[7] = [1,0,0,1]
# Choose a sequence to use
Seq = Seq2
StepCount = StepCount2
# Start main loop
while 1==1:
for pin in range(0, 4):
xpin = StepPins[pin]
if Seq[StepCounter][pin]!=0:
# print " Step %i Enable %i" %(StepCounter,xpin)
GPIO.output(xpin, True)
else:
GPIO.output(xpin, False)
StepCounter += 1
# If we reach the end of the sequence
# start again
if (StepCounter==StepCount):
StepCounter = 0
if (StepCounter<0):
StepCounter = StepCount
# Wait before moving on
time.sleep(WaitTime)
| lgpl-3.0 | -6,791,374,788,318,470,000 | 19.777778 | 128 | 0.584346 | false |
mazafrav/JdeRobot | src/tools/colorTuner_py/filters/rgbFilter.py | 1 | 1363 |
from threading import Lock
import cv2
import numpy as np
'''Max Values supported by OpenCV'''
RGBMAX = [255,255,255]
RGBMIN = [0,0,0]
class RgbFilter:
def __init__(self):
self.lock = Lock()
self.MAX = RGBMAX
self.MIN = RGBMIN
self.uLimit = self.MAX
self.dLimit = self.MIN
def getName(self):
return 'RGB'
def setUpLimit (self, r, g, b):
self.lock.acquire()
self.uLimit = [r,g,b]
self.lock.release()
def getUpLimit (self):
self.lock.acquire()
lim = self.uLimit
self.lock.release()
return lim
def setDownLimit(self, r, g, b):
self.lock.acquire()
self.dLimit = [r,g,b]
self.lock.release()
def getDownLimit(self):
self.lock.acquire()
lim = self.dLimit
self.lock.release()
return lim
def getMAX(self):
return self.MAX
def getMIN(self):
return self.MIN
def apply (self, img):
rup,gup,bup = self.getUpLimit()
rdwn,gdwn,bdwn = self.getDownLimit()
minValues = np.array([rdwn,gdwn,bdwn],dtype=np.uint8)
maxValues = np.array([rup,gup,bup], dtype=np.uint8)
mask = cv2.inRange(img, minValues, maxValues)
res = cv2.bitwise_and(img,img, mask= mask)
return res
return img | gpl-3.0 | -8,469,146,792,431,476,000 | 18.211268 | 61 | 0.559795 | false |
foxscotch/foxrollbot | db.py | 1 | 3694 | import os
import sqlite3
from pathlib import Path
from jinja2 import Template
from roll import RollCommand
from errors import *
class SavedRollManager:
"""
Class for managing saved rolls.
Attributes:
db (str): URI of database used for connections
"""
TABLE = 'saved_rolls'
"""str: Name of table in which to store saved rolls"""
def __init__(self, db=None):
"""
Create a SavedRollManager instance.
If a connection is not passed, it will use a new in-memory database.
Args:
db (str): URI of database to connect to
"""
if db is None:
self.db = 'file:foxrollbot_db?mode=memory&cache=shared'
else:
self.db = db
# This attribute is used to maintain a single connection to the
# database, so that in-memory databases aren't just lost after every
# connection is finished.
self._main_connection = sqlite3.connect(self.db, uri=True)
self._load_statements()
self._init_db()
def _init_db(self):
"""
Ensure that the database is set up correctly, initializing it if
necessary.
"""
cursor = self._main_connection.cursor()
cursor.execute(self.sql['create_table'])
self._main_connection.commit()
def _load_statements(self):
"""Load SQL statements from the ./sql directory."""
home = Path('.')
context = {'table_name': self.TABLE}
self.sql = {}
for path in home.glob('./sql/*'):
with open(path) as f:
template = Template(f.read().strip())
self.sql[path.stem] = template.render(context)
def connect(self):
return sqlite3.connect(self.db, uri=True)
def save(self, name, args, user):
"""
Save a roll to the database.
Args:
name (str): Name of saved roll
args (list): Arguments to save for roll
user (int): User ID to save roll for
"""
# Make sure the given arguments are valid first.
RollCommand.from_args(args)
connection = self.connect()
cursor = connection.cursor()
cursor.execute(self.sql['save'], {'name': name,
'args': ' '.join(args),
'user': user})
connection.commit()
def get(self, name, user):
"""
Get a saved roll from the database.
Args:
name (str): Name of saved roll
user (int): User ID to get roll for
Returns:
list: List of arguments of saved roll
"""
connection = self.connect()
cursor = connection.cursor()
cursor.execute(self.sql['get'], {'name': name,
'user': user})
result = cursor.fetchone()
if result is not None:
return result[0].split()
else:
raise DoesNotExistException(
'Could not find an applicable saved roll with that name.')
def delete(self, name, user):
"""
Delete a saved roll from the database.
Args:
name (str): Name of saved roll
user (int): User ID to delete roll from
"""
connection = self.connect()
cursor = connection.cursor()
cursor.execute(self.sql['delete'], {'name': name,
'user': user})
if cursor.rowcount < 1:
raise DoesNotExistException(
'Could not find an applicable saved roll with that name.')
connection.commit()
| mit | 5,504,732,386,518,892,000 | 29.278689 | 76 | 0.539253 | false |
wscullin/spack | lib/spack/spack/test/cmd/install.py | 1 | 6876 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import os
import filecmp
import pytest
import spack
import spack.cmd.install
from spack.spec import Spec
from spack.main import SpackCommand
install = SpackCommand('install')
@pytest.fixture(scope='module')
def parser():
"""Returns the parser for the module command"""
parser = argparse.ArgumentParser()
spack.cmd.install.setup_parser(parser)
return parser
@pytest.fixture()
def noop_install(monkeypatch):
def noop(*args, **kwargs):
return
monkeypatch.setattr(spack.package.PackageBase, 'do_install', noop)
def test_install_package_and_dependency(
tmpdir, builtin_mock, mock_archive, mock_fetch, config,
install_mockery):
tmpdir.chdir()
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
files = tmpdir.listdir()
filename = tmpdir.join('test.xml')
assert filename in files
content = filename.open().read()
assert 'tests="2"' in content
assert 'failures="0"' in content
assert 'errors="0"' in content
s = Spec('libdwarf').concretized()
assert not spack.repo.get(s).stage.created
@pytest.mark.usefixtures('noop_install', 'builtin_mock', 'config')
def test_install_runtests():
assert not spack.package_testing._test_all
assert not spack.package_testing.packages_to_test
install('--test=root', 'dttop')
assert not spack.package_testing._test_all
assert spack.package_testing.packages_to_test == set(['dttop'])
spack.package_testing.clear()
install('--test=all', 'a')
assert spack.package_testing._test_all
assert not spack.package_testing.packages_to_test
spack.package_testing.clear()
install('--run-tests', 'a')
assert spack.package_testing._test_all
assert not spack.package_testing.packages_to_test
spack.package_testing.clear()
assert not spack.package_testing._test_all
assert not spack.package_testing.packages_to_test
def test_install_package_already_installed(
tmpdir, builtin_mock, mock_archive, mock_fetch, config,
install_mockery):
tmpdir.chdir()
install('libdwarf')
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
files = tmpdir.listdir()
filename = tmpdir.join('test.xml')
assert filename in files
content = filename.open().read()
assert 'tests="2"' in content
assert 'failures="0"' in content
assert 'errors="0"' in content
skipped = [line for line in content.split('\n') if 'skipped' in line]
assert len(skipped) == 2
@pytest.mark.parametrize('arguments,expected', [
([], spack.dirty), # The default read from configuration file
(['--clean'], False),
(['--dirty'], True),
])
def test_install_dirty_flag(parser, arguments, expected):
args = parser.parse_args(arguments)
assert args.dirty == expected
def test_package_output(tmpdir, capsys, install_mockery, mock_fetch):
"""Ensure output printed from pkgs is captured by output redirection."""
# we can't use output capture here because it interferes with Spack's
# logging. TODO: see whether we can get multiple log_outputs to work
# when nested AND in pytest
spec = Spec('printing-package').concretized()
pkg = spec.package
pkg.do_install(verbose=True)
log_file = os.path.join(spec.prefix, '.spack', 'build.out')
with open(log_file) as f:
out = f.read()
# make sure that output from the actual package file appears in the
# right place in the build log.
assert "BEFORE INSTALL\n==> './configure'" in out
assert "'install'\nAFTER INSTALL" in out
def test_install_output_on_build_error(builtin_mock, mock_archive, mock_fetch,
config, install_mockery, capfd):
# capfd interferes with Spack's capturing
with capfd.disabled():
out = install('build-error', fail_on_error=False)
assert isinstance(install.error, spack.build_environment.ChildError)
assert install.error.name == 'ProcessError'
assert 'configure: error: in /path/to/some/file:' in out
assert 'configure: error: cannot run C compiled programs.' in out
def test_install_output_on_python_error(builtin_mock, mock_archive, mock_fetch,
config, install_mockery):
out = install('failing-build', fail_on_error=False)
assert isinstance(install.error, spack.build_environment.ChildError)
assert install.error.name == 'InstallError'
assert 'raise InstallError("Expected failure.")' in out
def test_install_with_source(
builtin_mock, mock_archive, mock_fetch, config, install_mockery):
"""Verify that source has been copied into place."""
install('--source', '--keep-stage', 'trivial-install-test-package')
spec = Spec('trivial-install-test-package').concretized()
src = os.path.join(
spec.prefix.share, 'trivial-install-test-package', 'src')
assert filecmp.cmp(os.path.join(mock_archive.path, 'configure'),
os.path.join(src, 'configure'))
def test_show_log_on_error(builtin_mock, mock_archive, mock_fetch,
config, install_mockery, capfd):
"""Make sure --show-log-on-error works."""
with capfd.disabled():
out = install('--show-log-on-error', 'build-error',
fail_on_error=False)
assert isinstance(install.error, spack.build_environment.ChildError)
assert install.error.pkg.name == 'build-error'
assert 'Full build log:' in out
errors = [line for line in out.split('\n')
if 'configure: error: cannot run C compiled programs' in line]
assert len(errors) == 2
| lgpl-2.1 | 6,151,122,762,028,452,000 | 34.261538 | 79 | 0.670157 | false |
sassoftware/jobmaster | jobmaster/server.py | 1 | 10167 | #!/usr/bin/python
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import optparse
import os
import json
import sys
from conary import conarycfg
from conary.lib.log import setupLogging
from conary.lib.util import rmtree
from mcp import jobstatus
from mcp.messagebus import bus_node
from mcp.messagebus import messages
from mcp.messagebus import nodetypes
from mcp.messagebus.logger import MessageBusLogger
from rmake.lib import procutil
from jobmaster import config
from jobmaster import jobhandler
from jobmaster import util
from jobmaster.networking import AddressGenerator
from jobmaster.proxy import ProxyServer
from jobmaster.resources.devfs import LoopManager
from jobmaster.resources.block import get_scratch_lvs
from jobmaster.response import ResponseProxy
from jobmaster.subprocutil import setDebugHook
# Register image job message type with rMake
from mcp import image_job
image_job = image_job
log = logging.getLogger(__name__)
class JobMaster(bus_node.BusNode):
sessionClass = 'image_master'
subscriptions = [
'/image_command',
]
timerPeriod = 5
def __init__(self, cfg):
node = nodetypes.MasterNodeType(cfg.slaveLimit,
procutil.MachineInformation())
buslogger = MessageBusLogger.new(__name__ + '.messagebus')
bus_node.BusNode.__init__(self, (cfg.queueHost, cfg.queuePort),
nodeInfo=node, logger=buslogger)
self.cfg = cfg
self.handlers = {}
self.subprocesses = []
self._cfgCache = {}
self._map = self.bus.session._map
def getConaryConfig(self, rbuilderUrl, cache=True):
if cache and rbuilderUrl in self._cfgCache:
ccfg = self._cfgCache[rbuilderUrl]
else:
if not rbuilderUrl.endswith('/'):
rbuilderUrl += '/'
ccfg = conarycfg.ConaryConfiguration(True)
ccfg.initializeFlavors()
# Don't inherit proxy settings from the system
ccfg.configLine('proxyMap []')
ccfg.configLine('includeConfigFile %sconaryrc' % rbuilderUrl)
if cache:
self._cfgCache[rbuilderUrl] = ccfg
return ccfg
def pre_start(self):
self.addressGenerator = AddressGenerator(self.cfg.pairSubnet)
self.loopManager = LoopManager(
os.path.join(self.cfg.basePath, 'locks/loop'))
self.proxyServer = ProxyServer(self.cfg.masterProxyPort, self._map,
self)
def run(self):
log.info("Started with pid %d.", os.getpid())
setDebugHook()
try:
self.serve_forever()
finally:
self.killHandlers()
def killHandlers(self):
handlers, self.handlers = self.handlers, {}
for handler in handlers.values():
handler.kill()
# Node client machinery and entry points
def onTimer(self):
"""
Send jobmaster status to the dispatcher every 5 seconds.
"""
self.nodeInfo.machineInfo.update()
msg = messages.MasterStatusMessage()
msg.set(self.nodeInfo)
self.bus.sendMessage('/image_event', msg)
def doResetCommand(self, msg):
"""
Terminate all jobs, esp. after a dispatcher restart.
"""
log.info("Terminating all jobs per dispatcher request.")
self.killHandlers()
def doJobCommand(self, msg):
"""
Run a new image job.
"""
job = msg.payload.job
try:
handler = jobhandler.JobHandler(self, job)
self.proxyServer.addTarget(handler.network.slaveAddr, job.rbuilder_url)
handler.start()
self.handlers[job.uuid] = handler
except:
log.exception("Unhandled exception while starting job handler")
self.removeJob(job, failed=True)
def doStopCommand(self, msg):
"""Stop one running job."""
uuid = msg.getUUID()
if uuid in self.handlers:
log.info("Stopping job %s", uuid)
self.handlers[uuid].stop()
else:
log.info("Ignoring request to stop unknown job %s", uuid)
def doSetSlotsCommand(self, msg):
"""Set the number of slots."""
self.nodeInfo.slots = self.cfg.slaveLimit = int(msg.getSlots())
log.info("Setting slot limit to %d.", self.cfg.slaveLimit)
# Write the new value to file so it is preserved across restarts.
cfgDir = os.path.join(self.cfg.basePath, 'config.d')
if os.access(cfgDir, os.W_OK):
fObj = open(cfgDir + '/99_runtime.conf', 'w')
self.cfg.storeKey('slaveLimit', fObj)
fObj.close()
else:
log.warning("Could not write new config in %s.", cfgDir)
def handleRequestIfReady(self, sleepTime=1.0):
bus_node.BusNode.handleRequestIfReady(self, sleepTime)
# Check on all our subprocesses to make sure they are alive and reap
# them if they are not.
for handler in self.handlers.values():
if not handler.check():
self.handlerStopped(handler)
for proc in self.subprocesses[:]:
if not proc.check():
self.subprocesses.remove(proc)
def handlerStopped(self, handler):
"""
Clean up after a handler has exited.
"""
uuid = handler.job.uuid
# If the handler did not exit cleanly, notify the rBuilder that the job
# has failed.
if handler.exitCode:
log.error("Handler for job %s terminated unexpectedly", uuid)
self.removeJob(handler.job, failed=True)
else:
self.removeJob(handler.job, failed=False)
self.proxyServer.removeTarget(handler.network.slaveAddr)
del self.handlers[uuid]
def removeJob(self, job, failed=False):
if failed:
try:
response = ResponseProxy(job.rbuilder_url,
json.loads(job.job_data))
response.sendStatus(jobstatus.FAILED,
"Error creating build environment")
except:
log.exception("Unable to report failure for job %s", job.uuid)
msg = messages.JobCompleteMessage()
msg.set(job.uuid)
self.bus.sendMessage('/image_event', msg)
# Utility methods
def clean_mounts(self):
last = None
while True:
mounts = open('/proc/mounts').read().splitlines()
tried = set()
for mount in mounts:
mount = mount.split()[1]
for prefix in ('devfs', 'rootfs'):
if mount.startswith('/tmp/%s-' % prefix):
try:
util.call('umount ' + mount)
log.info("Unmounted %s", mount)
os.rmdir(mount)
except:
pass
tried.add(mount)
break
if not tried:
break
if tried == last:
log.warning("Failed to unmount these points: %s",
' '.join(tried))
break
last = tried
for lv_name in get_scratch_lvs(self.cfg.lvmVolumeName):
log.info("Deleting LV %s/%s", self.cfg.lvmVolumeName, lv_name)
util.call('lvremove -f %s/%s' % (self.cfg.lvmVolumeName, lv_name))
def clean_roots(self):
# Contents roots are no longer used; delete everything
root = os.path.join(self.cfg.basePath, 'roots')
for name in os.listdir(root):
path = os.path.join(root, name)
log.info("Deleting old contents root %s", name)
rmtree(path)
def main(args):
parser = optparse.OptionParser()
parser.add_option('-c', '--config-file', default=config.CONFIG_PATH)
parser.add_option('-n', '--no-daemon', action='store_true')
parser.add_option('--clean-mounts', action='store_true',
help='Clean up stray mount points and logical volumes')
parser.add_option('--clean-roots', action='store_true',
help='Clean up old jobslave roots')
options, args = parser.parse_args(args)
cfg = config.MasterConfig()
cfg.read(options.config_file)
if options.clean_mounts or options.clean_roots:
options.no_daemon = True
level = cfg.getLogLevel()
setupLogging(logPath=cfg.logPath, fileLevel=level, consoleFormat='file',
consoleLevel=level if options.no_daemon else None)
master = JobMaster(cfg)
if options.clean_mounts:
return master.clean_mounts()
elif options.clean_roots:
return master.clean_roots()
elif options.no_daemon:
master.pre_start()
master.run()
return 0
else:
master.pre_start()
# Double-fork to daemonize
pid = os.fork()
if pid:
return
pid = os.fork()
if pid:
os._exit(0)
try:
os.setsid()
devNull = os.open(os.devnull, os.O_RDWR)
os.dup2(devNull, sys.stdout.fileno())
os.dup2(devNull, sys.stderr.fileno())
os.dup2(devNull, sys.stdin.fileno())
os.close(devNull)
fObj = open(cfg.pidFile, 'w')
fObj.write(str(os.getpid()))
fObj.close()
master.run()
finally:
try:
os.unlink(cfg.pidFile)
finally:
os._exit(0)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| apache-2.0 | 8,605,496,121,579,022,000 | 32.444079 | 83 | 0.592112 | false |
Karkus476/flexlay | flexlay/util/sexpr_reader.py | 1 | 1596 | # Flexlay - A Generic 2D Game Editor
# Copyright (C) 2014 Ingo Ruhnke <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def sexpr_filter(name, tree):
ret = []
for i in tree:
if i[0] == name:
ret.append(i[1:])
return ret
def assoc_ref(lst, key):
if lst == []:
return None
elif lst[0][0] == key:
return lst[0][1:]
else:
return assoc_ref(lst[1:], key)
def get_value_from_tree(spec, tree, default):
if spec == []:
return tree
elif spec == ['_']:
# is it a translatable string?
if isinstance(tree[0], list) and tree[0][0] == "_":
return tree[0][1]
else:
return tree[0]
elif tree == []:
return default
else:
el = assoc_ref(tree, spec[0])
if el is not None:
return get_value_from_tree(spec[1:], el, default)
else:
return default
class SExprReader:
def __init__(self):
pass
# EOF #
| gpl-3.0 | 3,790,031,150,815,291,000 | 26.517241 | 71 | 0.612155 | false |
patoloughlin/tagforthat | PythonCode/test_corpus.py | 1 | 3073 | #!/usr/bin/env python
import unittest
import sys,os
import index
import classifier
path = os.path.abspath(__file__)
sys.path.append(os.path.join(os.path.dirname(path), "../"))
import utils
from settings import settings
corpus = [
dict(question_id=0,tags=["Windows","crash","bluescreen"],body="Help! Windows 8 crashed on me. I unplugged my ethernet cable and it gave me a blue screen!"),
dict(question_id=1,tags=["Windows","C#","Visual Studio"],body="Hey there, I am trying to write a C# program in Visual Studio for my university. I am confused, please help."),
dict(question_id=2,tags=["Linux","crash","drivers"],body="Hi, I recently updated the Linux kernel to 3.03 and my ATI drivers as well. When using modprobe I got a kernel panic! :("),
dict(question_id=3,tags=["C++","pointer","graphics"],body="In CSCE 441, I heard we have to use quite a few pointers to complete our graphics homework. The program needs to be fast."),
dict(question_id=4,tags=["Java","Android","NullPointer"],body="I'm writing an Android java application, and I can't seem to get around this NullPointerException. I thought java didn't have pointers"),
dict(question_id=5,tags=["C++","pointer","dereference"],body="C++ noobie here. How, exactly do pointers and dereferencing work? It seems like a whole lot of guesswork to me."),
dict(question_id=6,tags=["C#","Windows","Mono"],body="Hi fellow Windows fanatics! Maybe we should use Monodevelop for our prefered language to allow for cross-platform coding."),
dict(question_id=7,tags=["Linux","Slackware","package"],body="Hello everybody. Recently, I borked my slackware install (corrupt kernel) by using third-party packages managers like slapt-get. Please teach me to be a responsible slacker!"),
dict(question_id=8,tags=["Java","graphics","meshes"],body="Hey there, I've been trying to create an algorithm that will programmatically create meshes using Java and lwjgl. I need help on this."),
dict(question_id=9,tags=["crash","Windows","cats"],body="Help! My cat walked across the keyboard and i used windows on my windows pc and my computer crashed!! help!"),
dict(question_id=10,tags=["Linux","crash","C#"],body="please help me use C# on Linux. Support for a non-windows platform seems very limited."),
]
class TestCoreAlg(unittest.TestCase):
def setUp(self):
self.index = index.Indexer()
self.index.index(corpus)
self.classifier = classifier.PNAClassifier(self.index.tagInfo,corpus)
def test_classifier_for_pointers(self):
tags = self.classifier.runPNAClassifier(4,2,"I hate pointers")
self.assertEqual(True,'pointer' in tags)
def test_classifier_for_cats(self):
tags = self.classifier.runPNAClassifier(4,2,"My cat ran accross my keyboard!")
self.assertEqual(True,'cats' in tags)
def test_classifier_for_windows(self):
tags = self.classifier.runPNAClassifier(4,2,"I got a blue screen of death!")
self.assertEqual(True,'bluescreen' in tags)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -8,579,990,683,114,029,000 | 61.714286 | 242 | 0.708103 | false |
YACOWS/opps-infographics | setup.py | 1 | 1735 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
from opps import infographics
install_requires = ["opps", 'opps-timelinejs']
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules']
dependency_links = ['-e git+https://github.com/opps/opps-timelinejs.git#egg=opps-timelinejs']
try:
long_description = open('README.md').read()
except:
long_description = infographics.__description__
setup(
name='opps-infographics',
namespace_packages=['opps', 'opps.infographics'],
version=infographics.__version__,
description=infographics.__description__,
long_description=long_description,
classifiers=classifiers,
keywords='infographic opps cms django apps magazines websites',
author=infographics.__author__,
author_email=infographics.__email__,
url='http://oppsproject.org',
download_url="https://github.com/YACOWS/opps-infographics/tarball/master",
license=infographics.__license__,
packages=find_packages(exclude=('doc', 'docs',)),
package_dir={'opps': 'opps'},
install_requires=install_requires,
dependency_links=dependency_links,
include_package_data=True,
package_data={
'infographics': ['templates/*']
}
)
| mit | 6,057,118,776,904,806,000 | 33.7 | 93 | 0.640922 | false |
Signiant/ultron | ultron/plugins/eb/__init__.py | 1 | 11821 | import boto3,json,imp,pprint
from botocore.exceptions import ClientError
import logging
appversions = []
def log (message):
print id() + ": " + message
def id():
return "eb"
def get_env_elb(envname,region, current_session):
client = current_session.client('elasticbeanstalk')
response = ""
elb_name = ""
try:
response = client.describe_environment_resources(
EnvironmentName=envname
)
except Exception, e:
log("Error describing the EB environment resources for " + envname + " (" + str(e) + ")")
except KeyError, e:
print "Key " + e + "not found"
if response:
# Eb only uses a single load balancer so grab the first
elb_name = response['EnvironmentResources']['LoadBalancers'][0]['Name']
return elb_name
def is_current_eb_env_live(env_lb,switchable_dns_entry,zoneid,region, current_session):
isLive = False
current_live_elb = get_r53_alias_entry(switchable_dns_entry, zoneid, current_session).rstrip('.').lower()
if current_live_elb.startswith(env_lb.lower()):
isLive = True
return isLive
#
# Get the route53 Alias entry for a given name
#
def get_r53_alias_entry(query_name,zoneid, current_session):
try:
endpoint = ""
client = current_session.client('route53')
response = client.list_resource_record_sets(
HostedZoneId=zoneid,
StartRecordName=query_name,
StartRecordType='A',
MaxItems='1'
)
if response:
endpoint = response['ResourceRecordSets'][0]['AliasTarget']['DNSName']
except Exception, e:
print str(e)
except KeyError, e:
print "Key " + e + "not found"
return endpoint
#eb do boto call and retrieve data
def eb_check_versions(profile_name, region_name, chealth, env_array, onlyiflive,slack_channel):
appversions = []
mysession = boto3.session.Session(profile_name=profile_name, region_name=region_name)
client = mysession.client('elasticbeanstalk')
response = client.describe_environments(
IncludeDeleted=False,
)
for env in response['Environments']:
c_version = env['VersionLabel']
c_app = env['ApplicationName']
c_env = env['EnvironmentName']
c_solstack = env['SolutionStackName']
c_health = env['Health']
date_updated = env['DateUpdated']
# set app version
c_appversion = {('app'): c_app, ('version'): c_version, ('environmentname'): c_env,
('solutionstack'): c_solstack, ('health'): c_health, ('dateupdated'):date_updated,
('regionname'): region_name,('slackchannel'):slack_channel}
for areas in env_array:
if areas in c_app:
logging.debug("MATCH: version label is %s app is %s environment is %s\n areas is %s checking app %s\n\n"%(c_version,c_app,c_env, areas,c_app))
else:
logging.debug("version label is %s app is %s environment is %s\n areas is %s checking app %s" % (
c_version, c_app, c_env, areas, c_app))
current_application_name = c_app.replace(" ","").lower()
current_application_keyword = areas.replace(" ","").lower()
if current_application_keyword in current_application_name:
# add the corresponding build name tag for each eb environment
c_appversion.update({('build_master_tag'): env_array[areas]['build_master_tag']})
if onlyiflive:
current_dns_name = env_array[areas]['dns_name']
current_zone_id = env_array[areas]['zone_id']
if current_dns_name != "" and current_zone_id != "":
env_lb = get_env_elb(c_env, region_name, mysession)
checklive = is_current_eb_env_live(env_lb, current_dns_name, current_zone_id, region_name,mysession)
else:
checklive = False
if checklive:
if chealth:
if env['Health'] == "Green":
appversions.append(c_appversion)
else:
appversions.append(c_appversion)
else:
if chealth:
if env['Health'] == "Green":
appversions.append(c_appversion)
else:
appversions.append(c_appversion)
return appversions
#version print out for eb environments
def get_version_output_string(thestring):
team_dot_index = thestring.find('.')
team_version_ending = thestring[team_dot_index:]
version_isolate = team_version_ending.split('.')
if version_isolate[-2].isdigit():
e_str = ('.').join(version_isolate[:-1])
elif version_isolate[-3].isdigit():
e_str = ('.').join(version_isolate[:-2])
else:
e_str = ('.').join(version_isolate[:-1])
return e_str[1:]
#extract the second part of service name to compare
def get_service_name_ending(thestring):
slash_index = thestring.find('/')
thestring = thestring[(slash_index+1):]
slash_index = thestring.find('-')
thestring = thestring[(slash_index + 1):]
return thestring.replace('.json',"")
#Main comparing function
def compare_environment(team_env,master_env, j_tags):
"""""
Return types
1 - Matches Master
2 - Does not match master
3 - branch
"""""
result = 0
if (j_tags[0] in master_env):
if team_env == master_env:
result = 1
else:
if (j_tags[0] in team_env):
result = 2
else:
result = 3
#print " MATCH IS: "+team_env +" == " + master_env+" ==> "+str(result)
logging.debug("comparing %s and %s result is %s"% (team_env,master_env,result))
return result
def does_key_exist(thearray,thestring):
if thearray[thestring]:
return thearray[thestring]
else:
return ""
#compress string is larger than 30 length
def shorten_input(thestring):
if len(thestring) > 30:
thestring = thestring[:27]+"..."
return thestring
else:
return thestring
#get build url
def format_string_for_comparison(word):
if "-" in word:
word = word.replace("-","_")
if " " in word:
word = word.replace(" ","_")
word = word.lower().split("_")
return word
def build_compare_words(lookup,compareto, j_tags):
result = False
compareto = format_string_for_comparison(compareto)
lookup = format_string_for_comparison(lookup)
res = list(set(compareto) ^ set(lookup))
if len(res) == 2 and j_tags[0] in res and j_tags[2] in res:
result = True
elif len(res) == 1 and (j_tags[0] in res or j_tags[1] in res):
result = True
return result
def get_build_url(cached_array, lookup_word, prelim_version, j_tags, match_num, ismaster):
the_url = None
build_detail = None
for the_names in cached_array:
if build_compare_words(lookup_word, the_names['name'], j_tags):
the_url =the_names['url']
build_detail = the_names['name']
symbols_array = [".","_","-"]
build_num = []
build_detail = shorten_input(build_detail)
for symb in symbols_array:
if symb in prelim_version:
build_num = prelim_version.split(symb)
break
if match_num == 2 and ismaster:
if len(build_num) > 1 and the_url:
final_url = str(the_url)+build_num[-1]+"/promotion/ | ver: "+str(prelim_version)
final_url = "build: "+ build_detail+"\n<"+final_url+ ">"
else:
# build url corresponding to service was not found
final_url = "build: "+ build_detail+"\nver: "+str(prelim_version)
else:
final_url = "build: " + build_detail + "\nver: " + str(prelim_version)
return final_url
def eb_compare_master_team(tkey,m_array, cached_array, jenkins_build_tags):
compared_array = dict()
eb_data = []
for eachmaster in m_array:
for m_data in m_array[eachmaster]:
for t_array in tkey:
logging.debug(t_array['regionname'] +" "+t_array['version'])
team_dot_index = t_array['version'].find('.')
team_version_prefix = t_array['version'][:team_dot_index]
team_version_ending = t_array['version'][team_dot_index:]
master_dot_index = m_data['version'].find('.')
master_version_prefix = m_data['version'][0:master_dot_index]
master_version_ending = m_data['version'][master_dot_index:]
if team_version_prefix == master_version_prefix:
amatch = compare_environment(team_version_ending, master_version_ending, jenkins_build_tags)
prelim_master_version = get_version_output_string(m_data['version'])
master_version_entry = get_build_url(cached_array, m_data['build_master_tag'],
prelim_master_version, jenkins_build_tags,
amatch, ismaster=True)
prelim_team_version = get_version_output_string(t_array['version'])
team_version_entry = get_build_url(cached_array, t_array['build_master_tag'],
prelim_team_version, jenkins_build_tags,
amatch, ismaster=False)
eb_data.append({"master_env":m_data['environmentname'],
"master_version": master_version_entry,
"master_updateddate":m_data['dateupdated'],
"team_env":t_array['environmentname'],
"team_version": team_version_entry,
"team_updateddate":t_array['dateupdated'],
"Match":amatch, "mastername": eachmaster,
"regionname":t_array['regionname'],
"slackchannel": does_key_exist(t_array,'slackchannel'),
"pluginname": "eb"
})
#print " master version entry"
compared_array.update({'eb environment': eb_data})
return compared_array
#main eb plugin function
def check_versions(master_array, team_array, superjenkins_data, jenkins_build_tags):
masterdata = dict()
teamdata = dict()
for master_items in master_array:
get_master_data = master_array[master_items]
master_plugin_data = eb_check_versions(get_master_data['profile_name'], get_master_data['region_name'],
get_master_data['onlycheckifhealthy'], get_master_data['environments'],
get_master_data['onlylive'], get_master_data['slack_channel'])
if master_plugin_data:
masterdata[master_items] = master_plugin_data
for team_items in team_array:
get_team_data = team_array[team_items]
team_plugin_data = eb_check_versions(get_team_data['profile_name'], get_team_data['region_name'],
get_team_data['onlycheckifhealthy'], get_team_data['environments'],
get_team_data['onlylive'], get_team_data['slack_channel'])
compared_data = eb_compare_master_team(team_plugin_data, masterdata, superjenkins_data, jenkins_build_tags)
teamdata[team_items] = compared_data
return teamdata
| mit | 1,354,183,845,398,758,700 | 33.767647 | 158 | 0.561966 | false |
portfors-lab/sparkle | sparkle/stim/types/stimuli_classes.py | 1 | 15924 | import logging
import ntpath
import os
import numpy as np
import yaml
from scipy.signal import chirp, hann, square, butter, lfilter, buttord
from sparkle.stim.abstract_component import AbstractStimulusComponent
from sparkle.tools.audiotools import audiorate, audioread, make_tone, make_carrier_tone, signal_amplitude
from sparkle.tools.exceptions import FileDoesNotExistError
class PureTone(AbstractStimulusComponent):
name = "Pure Tone"
explore = True
protocol = True
_frequency = 5000
def frequency(self):
return self._frequency
def setFrequency(self, freq):
self._frequency = freq
def signal(self, fs, atten, caldb, calv):
tone = \
make_tone(self._frequency, self._intensity + atten, self._duration, self._risefall, fs, caldb=caldb, calv=calv)[
0]
return tone
def stateDict(self):
state = super(PureTone, self).stateDict()
state['frequency'] = self._frequency
return state
def loadState(self, state):
super(PureTone, self).loadState(state)
self._frequency = state['frequency']
def auto_details(self):
details = super(PureTone, self).auto_details()
details['frequency'] = {'unit': 'Hz', 'min': 0, 'max': 200000}
return details
def verify(self, **kwargs):
if 'samplerate' in kwargs:
if kwargs['samplerate'] / 2 < self._frequency:
return "Generation sample rate must be at least twice the stimulus frequency"
return super(PureTone, self).verify(**kwargs)
class SquareWave(PureTone):
name = "Square Wave"
_frequency = 50
_amplitude = 1
_risefall = 0
_transition = 0
def transition(self):
return self._transition
def setTransition(self, transition):
self._transition = transition
def signal(self, fs, atten, caldb, calv):
npts = int(self._duration * fs)
if self._transition == 0:
t = np.linspace(0, self._duration, npts)
sig = square(2 * np.pi * self._frequency * t)
else:
transitionpts = int(self._transition * fs)
transition = np.linspace(1, -1, transitionpts)
halfperiod = np.ones(int(round(((1 / self._frequency) * fs) / 2)))
sig = []
count = 0
while len(sig) < npts:
if np.mod(count, 4) == 0:
sig = np.append(sig, -transition)
elif np.mod(count, 4) == 1:
sig = np.append(sig, halfperiod)
elif np.mod(count, 4) == 2:
sig = np.append(sig, transition)
elif np.mod(count, 4) == 3:
sig = np.append(sig, -halfperiod)
else:
pass
count += 1
# Remove extra signal
sig = sig[:npts]
# Scale sig to proper amplitude
sig = sig * (self._amplitude / 2) + (self._amplitude / 2)
if self._risefall > 0:
rf_npts = int(self._risefall * fs) / 2
wnd = hann(rf_npts * 2) # cosine taper
sig[:rf_npts] = sig[:rf_npts] * wnd[:rf_npts]
sig[-rf_npts:] = sig[-rf_npts:] * wnd[rf_npts:]
return sig
def auto_details(self):
details = super(SquareWave, self).auto_details()
# del details['risefall']
del details['intensity']
details['amplitude'] = {'unit': 'V', 'min': -10, 'max': 10.}
details['transition'] = {'unit': 's', 'min': 0, 'max': 0.1}
return details
def loadState(self, state):
super(SquareWave, self).loadState(state)
self._amplitude = state['amplitude']
self._transition = state['transition']
def stateDict(self):
state = super(SquareWave, self).stateDict()
state['amplitude'] = self._amplitude
state['transition'] = self._transition
return state
class FMSweep(AbstractStimulusComponent):
name = "FM Sweep"
_start_f = 5000
_stop_f = 1e5
explore = True
protocol = True
def startFrequency(self):
return self._start_f
def stopFrequency(self):
return self._stop_f
def setStartFrequency(self, f):
self._start_f = f
def setStopFrequency(self, f):
self._stop_f = f
def signal(self, fs, atten, caldb, calv):
amp = self.amplitude(caldb, calv)
npts = self._duration * fs
t = np.arange(npts).astype(float) / fs
signal = chirp(t, f0=self._start_f, f1=self._stop_f, t1=self._duration)
amp_scale = signal_amplitude(signal, fs)
signal = ((signal / amp_scale) * amp)
if self._risefall > 0:
rf_npts = int(self._risefall * fs) / 2
wnd = hann(rf_npts * 2) # cosine taper
signal[:rf_npts] = signal[:rf_npts] * wnd[:rf_npts]
signal[-rf_npts:] = signal[-rf_npts:] * wnd[rf_npts:]
return signal
def auto_details(self):
details = super(FMSweep, self).auto_details()
details['start_f'] = {'unit': 'Hz', 'min': 0, 'max': 200000, 'text': "Start Frequency"}
details['stop_f'] = {'unit': 'Hz', 'min': 0, 'max': 200000, 'text': "Stop Frequency"}
return details
def loadState(self, state):
super(FMSweep, self).loadState(state)
self._start_f = state['start_f']
self._stop_f = state['stop_f']
def stateDict(self):
state = super(FMSweep, self).stateDict()
state['start_f'] = self._start_f
state['stop_f'] = self._stop_f
return state
class Vocalization(AbstractStimulusComponent):
name = "Vocalization"
explore = True
protocol = True
_filename = None
_browsedir = os.path.expanduser('~')
paths = []
def browsedir(self):
return self._browsedir
def setBrowseDir(self, browsedir):
self._browsedir = browsedir
def file(self):
if self._filename is not None and self._findFile():
return self._filename
else:
return None
def samplerate(self):
if self._filename is not None and self._findFile():
return audiorate(self._filename)
def stateDict(self):
state = super(Vocalization, self).stateDict()
state['filename'] = self._filename
state['browsedir'] = self._browsedir
return state
def loadState(self, state):
super(Vocalization, self).loadState(state)
browsedir = state['browsedir']
fname = state['filename']
if os.path.isdir(browsedir):
self._browsedir = browsedir
self._filename = fname
if fname is None:
logger = logging.getLogger('main')
logger.warn('Vocalization loaded with no file defined')
# if not os.path.isdir(browsedir):
# raise FileDoesNotExistError(browsedir)
# self._browsedir = browsedir
# if not os.path.isfile(fname):
# raise FileDoesNotExistError(fname)
# self._filename = fname
def setFile(self, fname):
if fname is not None:
self._filename = fname
fs, wavdata = audioread(self._filename)
# round to the nearest ms
duration = np.trunc((float(len(wavdata)) / fs) * 1000) / 1000
self._duration = duration
def _findFile(self):
if os.path.isfile(self._filename):
return True
# If we are reviewing data, vocal files may not be in original
# location. Search paths for filename, use ntpath to be able
# to pick apart windows style paths on mac/linux
basename = ntpath.basename(self._filename)
for path in self.paths:
if os.path.isfile(os.path.join(path, basename)):
self.setFile(os.path.join(path, basename))
return True
logger = logging.getLogger('main')
logger.warn('File: {} not found'.format(basename))
return False
def signal(self, fs, atten, caldb, calv):
if self._filename is None:
# allow lack of file to not cause error, catch in GUI when necessary?
logger = logging.getLogger('main')
logger.warn('Vocalization signal request without a file')
return np.array([0, 0])
if not self._findFile():
return np.array([0, 0])
fs, wavdata = audioread(self._filename)
if fs != fs:
print 'specified', fs, 'wav file', fs
raise Exception("specified samplerate does not match wav stimulus")
# truncate to nears ms
duration = float(len(wavdata)) / fs
# print 'duration {}, desired {}'.format(duration, np.trunc(duration*1000)/1000)
desired_npts = int((np.trunc(duration * 1000) / 1000) * fs)
# print 'npts. desired', len(wavdata), desired_npts
wavdata = wavdata[:desired_npts]
amp_scale = signal_amplitude(wavdata, fs)
signal = ((wavdata / amp_scale) * self.amplitude(caldb, calv))
if self._risefall > 0:
rf_npts = int(self._risefall * fs) / 2
wnd = hann(rf_npts * 2) # cosine taper
signal[:rf_npts] = signal[:rf_npts] * wnd[:rf_npts]
signal[-rf_npts:] = signal[-rf_npts:] * wnd[rf_npts:]
return signal
def auto_details(self):
details = super(Vocalization, self).auto_details()
del details['duration']
details['filename'] = {'label': 'Edit from component dialog'}
return details
def verify(self, **kwargs):
if self._filename is None:
return "Vocalization stimulus without a specified file"
return 0
def setDuration(self, dur):
raise Exception("Duration not settable on recordings")
def set(self, param, value):
if param == 'duration':
raise Exception("Duration not settable on recordings")
super(Vocalization, self).set(param, value)
class WhiteNoise(AbstractStimulusComponent):
name = "White Noise"
explore = True
protocol = True
# keeps signal same to subsequent signal() calls
_noise = np.random.normal(0, 1.0, (15e5,))
def signal(self, fs, atten, caldb, calv):
npts = self._duration * fs
signal = self._noise[:npts]
amp = self.amplitude(caldb, calv)
amp_scale = signal_amplitude(signal, fs)
signal = ((signal / amp_scale) * amp)
if self._risefall > 0:
rf_npts = int(self._risefall * fs) / 2
wnd = hann(rf_npts * 2) # cosine taper
signal[:rf_npts] = signal[:rf_npts] * wnd[:rf_npts]
signal[-rf_npts:] = signal[-rf_npts:] * wnd[rf_npts:]
# print 'signal max', np.amax(abs(signal)), amp, amp_scale, 'rms', np.sqrt(np.mean(signal**2))
return signal
class Silence(AbstractStimulusComponent):
name = "silence"
protocol = True
_risefall = 0
_intensity = 0
def auto_details(self):
details = super(Silence, self).auto_details()
less_details = {'duration': details['duration']}
return less_details
def signal(self, *args, **kwargs):
fs = kwargs['fs']
return np.zeros((self._duration * fs,))
class NoStim(AbstractStimulusComponent):
name = "OFF"
explore = True
def signal(self, fs, atten, caldb, calv):
return [0, 0]
def auto_details(self):
return {}
class BandNoise(AbstractStimulusComponent):
name = "Band noise"
explore = True
protocol = True
# keeps signal same to subsequent signal() calls
_noise = np.random.normal(0, 1.0, (15e5,))
_center_frequency = 20000
_width = 1.0 # octave = 1/_width
def signal(self, fs, atten, caldb, calv):
npts = self._duration * fs
# start with full spectrum white noise and band-pass to get desired
# frequency range
signal = self._noise[:npts]
# band frequency cutoffs
delta = 10 ** (3. / (10. * (2 * self._width)))
low_freq = self._center_frequency / delta
high_freq = self._center_frequency * delta
# scipy butter function wants frequencies normalized between 0. and 1.
nyquist = fs / 2.
low_normed = low_freq / nyquist
high_normed = high_freq / nyquist
order, wn = buttord([low_normed, high_normed], [low_normed - 0.05, high_normed + 0.05], 1, 40)
# print 'CUTOFFS', low_freq, high_freq
# print 'ORDER WN', order, wn, low_normed, high_normed
b, a = butter(order, wn, btype='band')
signal = lfilter(b, a, signal)
if self._risefall > 0:
rf_npts = int(self._risefall * fs) / 2
wnd = hann(rf_npts * 2) # cosine taper
signal[:rf_npts] = signal[:rf_npts] * wnd[:rf_npts]
signal[-rf_npts:] = signal[-rf_npts:] * wnd[rf_npts:]
return signal
def auto_details(self):
details = super(BandNoise, self).auto_details()
details['center_frequency'] = {'unit': 'Hz', 'min': 0, 'max': 200000, 'text': "Center Frequency"}
details['width'] = {'unit': 'Ocatve', 'min': 0.001, 'max': 100, 'text': "Band Width 1/"}
return details
def loadState(self, state):
super(BandNoise, self).loadState(state)
self._center_frequency = state['center_frequency']
self._width = state['width']
def stateDict(self):
state = super(BandNoise, self).stateDict()
state['center_frequency'] = self._center_frequency
state['width'] = self._width
return state
class Modulation(PureTone):
name = "modulations"
explore = False
protocol = False
_modulation = 0
_mod_frequency = 0
def modulation(self):
return self._modulation
def setModulation(self, modulation):
self._modulation = modulation
def mod_frequency(self):
return self._mod_frequency
def setModFrequency(self, mod_frequency):
self._mod_frequency = mod_frequency
def auto_details(self):
details = super(Modulation, self).auto_details()
details['mod_frequency'] = {'unit': 'Hz', 'min': 0, 'max': 200000}
details['modulation'] = {'unit': '%', 'min': 0, 'max': 100}
return details
def loadState(self, state):
super(Modulation, self).loadState(state)
self._mod_frequency = state['mod_frequency']
self._modulation = state['modulation']
def stateDict(self):
state = super(Modulation, self).stateDict()
state['mod_frequency'] = self._mod_frequency
state['modulation'] = self._modulation
return state
class SAM(Modulation):
"""Sinusoidal Amplitude Modulation"""
name = "S.A.M."
explore = True
protocol = True
_mod_frequency = 0
_modulation = 0
def signal(self, fs, atten, caldb, calv):
npts = int(self._duration * fs)
t = np.linspace(0, self._duration, npts) \
\
# test = (1 + (self._modulation/100) * np.cos(2 * np.pi * self._mod_frequency * t)) * np.sin(2 * np.pi * self._frequency * t)
carrier_tone = \
make_carrier_tone(self._frequency, self._intensity + atten, self._duration, fs, caldb=caldb, calv=calv)[0]
modulation_tone = np.cos(2 * np.pi * self._mod_frequency * t + np.pi)
sig = (1 + (self._modulation / 100) * modulation_tone) * carrier_tone
if self._risefall > 0:
rf_npts = int(self._risefall * fs) / 2
wnd = hann(rf_npts * 2) # cosine taper
sig[:rf_npts] = sig[:rf_npts] * wnd[:rf_npts]
sig[-rf_npts:] = sig[-rf_npts:] * wnd[rf_npts:]
return sig
class SquareWaveModulation(Modulation):
name = "squaremod"
class SFM(AbstractStimulusComponent):
"""Sinusoidal Frequency Modulation"""
name = "sfm"
class Ripples(AbstractStimulusComponent):
name = "ripples"
| gpl-3.0 | 5,100,014,708,183,515,000 | 30.975904 | 133 | 0.579942 | false |
leandrotoledo/python-telegram-bot | tests/test_animation.py | 2 | 13717 | #!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2021
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
import os
from pathlib import Path
import pytest
from flaky import flaky
from telegram import PhotoSize, Animation, Voice, TelegramError, MessageEntity, Bot
from telegram.error import BadRequest
from telegram.utils.helpers import escape_markdown
from tests.conftest import check_shortcut_call, check_shortcut_signature, check_defaults_handling
@pytest.fixture(scope='function')
def animation_file():
f = open('tests/data/game.gif', 'rb')
yield f
f.close()
@pytest.fixture(scope='class')
def animation(bot, chat_id):
with open('tests/data/game.gif', 'rb') as f:
return bot.send_animation(
chat_id, animation=f, timeout=50, thumb=open('tests/data/thumb.jpg', 'rb')
).animation
class TestAnimation:
animation_file_id = 'CgADAQADngIAAuyVeEez0xRovKi9VAI'
animation_file_unique_id = 'adc3145fd2e84d95b64d68eaa22aa33e'
width = 320
height = 180
duration = 1
# animation_file_url = 'https://python-telegram-bot.org/static/testfiles/game.gif'
# Shortened link, the above one is cached with the wrong duration.
animation_file_url = 'http://bit.ly/2L18jua'
file_name = 'game.gif.mp4'
mime_type = 'video/mp4'
file_size = 4127
caption = "Test *animation*"
def test_slot_behaviour(self, animation, recwarn, mro_slots):
for attr in animation.__slots__:
assert getattr(animation, attr, 'err') != 'err', f"got extra slot '{attr}'"
assert not animation.__dict__, f"got missing slot(s): {animation.__dict__}"
assert len(mro_slots(animation)) == len(set(mro_slots(animation))), "duplicate slot"
animation.custom, animation.file_name = 'should give warning', self.file_name
assert len(recwarn) == 1 and 'custom' in str(recwarn[0].message), recwarn.list
def test_creation(self, animation):
assert isinstance(animation, Animation)
assert isinstance(animation.file_id, str)
assert isinstance(animation.file_unique_id, str)
assert animation.file_id != ''
assert animation.file_unique_id != ''
def test_expected_values(self, animation):
assert animation.file_size == self.file_size
assert animation.mime_type == self.mime_type
assert animation.file_name == self.file_name
assert isinstance(animation.thumb, PhotoSize)
@flaky(3, 1)
def test_send_all_args(self, bot, chat_id, animation_file, animation, thumb_file):
message = bot.send_animation(
chat_id,
animation_file,
duration=self.duration,
width=self.width,
height=self.height,
caption=self.caption,
parse_mode='Markdown',
disable_notification=False,
thumb=thumb_file,
)
assert isinstance(message.animation, Animation)
assert isinstance(message.animation.file_id, str)
assert isinstance(message.animation.file_unique_id, str)
assert message.animation.file_id != ''
assert message.animation.file_unique_id != ''
assert message.animation.file_name == animation.file_name
assert message.animation.mime_type == animation.mime_type
assert message.animation.file_size == animation.file_size
assert message.animation.thumb.width == self.width
assert message.animation.thumb.height == self.height
@flaky(3, 1)
def test_send_animation_custom_filename(self, bot, chat_id, animation_file, monkeypatch):
def make_assertion(url, data, **kwargs):
return data['animation'].filename == 'custom_filename'
monkeypatch.setattr(bot.request, 'post', make_assertion)
assert bot.send_animation(chat_id, animation_file, filename='custom_filename')
monkeypatch.delattr(bot.request, 'post')
@flaky(3, 1)
def test_get_and_download(self, bot, animation):
new_file = bot.get_file(animation.file_id)
assert new_file.file_size == self.file_size
assert new_file.file_id == animation.file_id
assert new_file.file_path.startswith('https://')
new_file.download('game.gif')
assert os.path.isfile('game.gif')
@flaky(3, 1)
def test_send_animation_url_file(self, bot, chat_id, animation):
message = bot.send_animation(
chat_id=chat_id, animation=self.animation_file_url, caption=self.caption
)
assert message.caption == self.caption
assert isinstance(message.animation, Animation)
assert isinstance(message.animation.file_id, str)
assert isinstance(message.animation.file_unique_id, str)
assert message.animation.file_id != ''
assert message.animation.file_unique_id != ''
assert message.animation.duration == animation.duration
assert message.animation.file_name == animation.file_name
assert message.animation.mime_type == animation.mime_type
assert message.animation.file_size == animation.file_size
@flaky(3, 1)
def test_send_animation_caption_entities(self, bot, chat_id, animation):
test_string = 'Italic Bold Code'
entities = [
MessageEntity(MessageEntity.ITALIC, 0, 6),
MessageEntity(MessageEntity.ITALIC, 7, 4),
MessageEntity(MessageEntity.ITALIC, 12, 4),
]
message = bot.send_animation(
chat_id, animation, caption=test_string, caption_entities=entities
)
assert message.caption == test_string
assert message.caption_entities == entities
@flaky(3, 1)
@pytest.mark.parametrize('default_bot', [{'parse_mode': 'Markdown'}], indirect=True)
def test_send_animation_default_parse_mode_1(self, default_bot, chat_id, animation_file):
test_string = 'Italic Bold Code'
test_markdown_string = '_Italic_ *Bold* `Code`'
message = default_bot.send_animation(chat_id, animation_file, caption=test_markdown_string)
assert message.caption_markdown == test_markdown_string
assert message.caption == test_string
@flaky(3, 1)
@pytest.mark.parametrize('default_bot', [{'parse_mode': 'Markdown'}], indirect=True)
def test_send_animation_default_parse_mode_2(self, default_bot, chat_id, animation_file):
test_markdown_string = '_Italic_ *Bold* `Code`'
message = default_bot.send_animation(
chat_id, animation_file, caption=test_markdown_string, parse_mode=None
)
assert message.caption == test_markdown_string
assert message.caption_markdown == escape_markdown(test_markdown_string)
@flaky(3, 1)
@pytest.mark.parametrize('default_bot', [{'parse_mode': 'Markdown'}], indirect=True)
def test_send_animation_default_parse_mode_3(self, default_bot, chat_id, animation_file):
test_markdown_string = '_Italic_ *Bold* `Code`'
message = default_bot.send_animation(
chat_id, animation_file, caption=test_markdown_string, parse_mode='HTML'
)
assert message.caption == test_markdown_string
assert message.caption_markdown == escape_markdown(test_markdown_string)
def test_send_animation_local_files(self, monkeypatch, bot, chat_id):
# For just test that the correct paths are passed as we have no local bot API set up
test_flag = False
expected = (Path.cwd() / 'tests/data/telegram.jpg/').as_uri()
file = 'tests/data/telegram.jpg'
def make_assertion(_, data, *args, **kwargs):
nonlocal test_flag
test_flag = data.get('animation') == expected and data.get('thumb') == expected
monkeypatch.setattr(bot, '_post', make_assertion)
bot.send_animation(chat_id, file, thumb=file)
assert test_flag
monkeypatch.delattr(bot, '_post')
@flaky(3, 1)
@pytest.mark.parametrize(
'default_bot,custom',
[
({'allow_sending_without_reply': True}, None),
({'allow_sending_without_reply': False}, None),
({'allow_sending_without_reply': False}, True),
],
indirect=['default_bot'],
)
def test_send_animation_default_allow_sending_without_reply(
self, default_bot, chat_id, animation, custom
):
reply_to_message = default_bot.send_message(chat_id, 'test')
reply_to_message.delete()
if custom is not None:
message = default_bot.send_animation(
chat_id,
animation,
allow_sending_without_reply=custom,
reply_to_message_id=reply_to_message.message_id,
)
assert message.reply_to_message is None
elif default_bot.defaults.allow_sending_without_reply:
message = default_bot.send_animation(
chat_id, animation, reply_to_message_id=reply_to_message.message_id
)
assert message.reply_to_message is None
else:
with pytest.raises(BadRequest, match='message not found'):
default_bot.send_animation(
chat_id, animation, reply_to_message_id=reply_to_message.message_id
)
@flaky(3, 1)
def test_resend(self, bot, chat_id, animation):
message = bot.send_animation(chat_id, animation.file_id)
assert message.animation == animation
def test_send_with_animation(self, monkeypatch, bot, chat_id, animation):
def test(url, data, **kwargs):
return data['animation'] == animation.file_id
monkeypatch.setattr(bot.request, 'post', test)
message = bot.send_animation(animation=animation, chat_id=chat_id)
assert message
def test_de_json(self, bot, animation):
json_dict = {
'file_id': self.animation_file_id,
'file_unique_id': self.animation_file_unique_id,
'width': self.width,
'height': self.height,
'duration': self.duration,
'thumb': animation.thumb.to_dict(),
'file_name': self.file_name,
'mime_type': self.mime_type,
'file_size': self.file_size,
}
animation = Animation.de_json(json_dict, bot)
assert animation.file_id == self.animation_file_id
assert animation.file_unique_id == self.animation_file_unique_id
assert animation.file_name == self.file_name
assert animation.mime_type == self.mime_type
assert animation.file_size == self.file_size
def test_to_dict(self, animation):
animation_dict = animation.to_dict()
assert isinstance(animation_dict, dict)
assert animation_dict['file_id'] == animation.file_id
assert animation_dict['file_unique_id'] == animation.file_unique_id
assert animation_dict['width'] == animation.width
assert animation_dict['height'] == animation.height
assert animation_dict['duration'] == animation.duration
assert animation_dict['thumb'] == animation.thumb.to_dict()
assert animation_dict['file_name'] == animation.file_name
assert animation_dict['mime_type'] == animation.mime_type
assert animation_dict['file_size'] == animation.file_size
@flaky(3, 1)
def test_error_send_empty_file(self, bot, chat_id):
animation_file = open(os.devnull, 'rb')
with pytest.raises(TelegramError):
bot.send_animation(chat_id=chat_id, animation=animation_file)
@flaky(3, 1)
def test_error_send_empty_file_id(self, bot, chat_id):
with pytest.raises(TelegramError):
bot.send_animation(chat_id=chat_id, animation='')
def test_error_send_without_required_args(self, bot, chat_id):
with pytest.raises(TypeError):
bot.send_animation(chat_id=chat_id)
def test_get_file_instance_method(self, monkeypatch, animation):
def make_assertion(*_, **kwargs):
return kwargs['file_id'] == animation.file_id
assert check_shortcut_signature(Animation.get_file, Bot.get_file, ['file_id'], [])
assert check_shortcut_call(animation.get_file, animation.bot, 'get_file')
assert check_defaults_handling(animation.get_file, animation.bot)
monkeypatch.setattr(animation.bot, 'get_file', make_assertion)
assert animation.get_file()
def test_equality(self):
a = Animation(
self.animation_file_id,
self.animation_file_unique_id,
self.height,
self.width,
self.duration,
)
b = Animation('', self.animation_file_unique_id, self.height, self.width, self.duration)
d = Animation('', '', 0, 0, 0)
e = Voice(self.animation_file_id, self.animation_file_unique_id, 0)
assert a == b
assert hash(a) == hash(b)
assert a is not b
assert a != d
assert hash(a) != hash(d)
assert a != e
assert hash(a) != hash(e)
| lgpl-3.0 | -3,698,455,636,162,138,000 | 39.703264 | 99 | 0.641467 | false |
selboo/starl-mangle | switch/post_security.py | 1 | 14058 | #!/usr/bin/env python
#coding=utf-8
import logging
import os.path
import re
import sys
import telnetlib
from compiler.ast import Print
from types import BooleanType
import os,time,paramiko,optparse
debugging = 0
log = logging.getLogger()
#hdlr = logging.StreamHandler( sys.stdout )
#formatter = log.Formatter( '%(asctime)s %(levelname)s %(message)s' )
#hdlr.setFormatter( formatter )
#log.addHandler( hdlr )
hdlr = logging.StreamHandler( sys.stdout )
formatter = logging.Formatter( '%(asctime)s %(levelname)s %(message)s', '%T' )
hdlr.setFormatter( formatter )
log.addHandler( hdlr )
swcorehost = "192.168.192.1"
morestr = "--More--"
morestr_asa = "<--- More --->"
endstr = ">"
#change_switchip = raw_input( "please enter switchip (192.168.192.1-6): " )
#change_port = raw_input( "please enter port (0/1-24) : " )
#change_vlan = raw_input( "please enter vlan (3,4,100,50,60,250,): " )
'''初始化telnet信息'''
ciscoSpstr = '\x08\x08\x08\x08\x08\x08\x08\x08\x08 \x08\x08\x08\x08\x08\x08\x08\x08\x08'
_mac_address_re = re.compile( "([0-9A-Fa-f]{4}.[0-9A-Fa-f]{4}.[0-9A-Fa-f]{4})" )
_vlan_num_re = re.compile( "Vlan\d{1,3}" )
#_faEth_re = re.compile( '(FastEthernet\d*/\d*[/\d*]{0,2})' )
_faEth_re = re.compile( 'FastEthernet\d*/\d*[/\d*]{0,}' )
_interface_re = re.compile( '(Fa\d*/\d*[/\d*]{0,})' )
#_vlan_faEth_re = re.compile ( '(Vlan\d{1,3})|(FastEthernet\d*/\d*[/\d*]{0,2})' )
_vlan_faEth_re = re.compile ( 'Vlan\d{1,3}|FastEthernet\d*/\d*[/\d*]{0,}' )
_ip_mac_address_reall = re.compile( "(\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3})[\d\D]*([0-9A-Fa-f]{4}.[0-9A-Fa-f]{4}.[0-9A-Fa-f]{4})" )
_ip_mac_address_re = re.compile( "(\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3})[\d\D]*([0-9A-Fa-f]{4}.[0-9A-Fa-f]{4}.[0-9A-Fa-f]{4})" )
_ip_address_re = re.compile( "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}" )
#_ip_address_re = re.compile( "\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}" )
#_ip_address_re = re.compile( "(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})" )
#_ip_address_reall = re.compile( "(\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3})" )
_cdp_ne_re = re.compile( "([A-Za-z]+?[0-9]{0,})[' Fas ']+?(\d*/\d*[/\d*]{0,})" )
_mac_address_table_re = re.compile( "([0-9]+?)[' ']+?([0-9A-Fa-f]{4}.[0-9A-Fa-f]{4}.[0-9A-Fa-f]{4})[\d\D]+?(Fa\d*/\d*[/\d*]{0,})" )
_cdpEntry_IP_re = re.compile( "IP address:[\d\D]+?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" )
def telnetReplaceStr ( stringSrc, strRps ):
strNew = stringSrc.replace( strRps, '' )
return strNew
class telnetXKCS:
'''
xkcs telnet class
'''
def __init__( self , host , userid , passwd , port , enpasswd , debug = debugging):
'''
init telnet
'''
self.host = host
self.userid = userid
self.passwd = passwd
self.port = port
self.enpasswd = enpasswd
self.tn = telnetlib.Telnet()
self.tn.set_debuglevel( debug )
self.endstr = '>'
# self.proxy_status = False
# self.telnetInit()
def telnetInit( self ):
#print "telnet %s " % ( host )
self.tn.open( self.host, self.port )
self.telnetCoreConnect()
def telnet_switch_init( self ):
#print "telnet %s " % ( host )
self.tn.open( self.host, self.port )
self.telnetCoreConnect()
"""
print host
print userid
print passwd
print port
"""
def telnet_switch_connect( self ):
#tn.open( change_switchip, telnet_port )
log.info( "telnet Core Connect starting->username" )
self.tn.read_until( "Username:" )
self.tn.write( self.userid + "\n" )
log.info( "telnet Core Connect starting->username" )
self.tn.read_until( "Password:" )
self.tn.write( self.passwd + "\n" )
self.tn.read_until( ">" )
log.info( "telnet Core Connect done" )
def telnetProxyConnect( self ):
#print "proxy telnet:%s" % ( host )
self.proxy_status = True
self.tn.write( 'telnet %s \n' % ( self.host ) )
self.telnetCoreConnect()
def telnetCoreConnect( self ):
#tn.open( change_switchip, telnet_port )
log.info( "telnet Core Connect starting->username" )
self.tn.read_until( "Username:" )
self.tn.write( self.userid + "\n" )
log.info( "telnet Core Connect starting->username" )
self.tn.read_until( "Password:" )
self.tn.write( self.passwd + "\n" )
self.tn.read_until( ">" )
# return tn
log.info( "telnet Core Connect done" )
def telnetCiscoenable( self ):
self.endstr = '#'
self.tn.write( "en\n" )
'''特权密码'''
self.tn.read_until( "Password:" )
self.tn.write( self.enpasswd + "\n" )
self.tn.read_until( '#' )
log.info( "telnet Cisco Enable Mode done" )
def telnetcmdmor( self, cmd, Save_Name = None ):
if Save_Name == None:
self.tn.write( cmd + '\n' )
else:
self.tn.write( cmd + '\n' + Save_Name + '\n' )
self.tn.read_until( "[confirm]" )
self.tn.write( "\n" )
log.debug( "start more" )
message = ''
message_list = ''
while 1:
message = self.tn.expect( [morestr, self.endstr], 5 )
# print "mes:", message
# print "me2", message[2]
message_list += telnetReplaceStr( telnetReplaceStr( message[2], ciscoSpstr ), morestr )
if str( message[2] ).find( self.endstr ) >= 0:
# print "message find # ,break"
break
elif str( message[2] ).find( morestr ) >= 0:
# print "more..."
self.tn.write( ' ' )
elif str( message[2] ).find( morestr_asa ) >= 0:
self.tn.write( ' ' )
log.debug( "END message:%s" % ( message_list ) )
return message_list
def telnetFinal( self ):
# if self.proxy_status:
# self.telnetcmdmor( 'exit \n' )
self.telnetcmdmor( 'exit \n' )
self.tn.close()
def reSearchIPandMac( re_rule, linestr ):
match = re_rule.findall( linestr )
if match:
return match[0]
def interfaceport ( arpstr ):
ipDic = []
for line in arpstr:
if len( line ) < 5:
continue
elif line.find ( 'Protocol' ) >= 0:
continue
elif line.find ( endstr ) >= 0:
continue
else:
interface = reSearchIPandMac( _interface_re, line )
return interface
def switchip ( arpstr ):
ipDic = []
for line in arpstr:
if len( line ) < 5:
continue
elif line.find ( 'Protocol' ) >= 0:
continue
elif line.find ( endstr ) >= 0:
continue
else:
switchip = reSearchIPandMac( _cdpEntry_IP_re, line )
if switchip == None:
switchip = swcorehost
return switchip
def switchinfo (interface, switchinfo):
switchname = "core"
for switcher, faport in switchinfo:
if "Fa"+faport == interface:
switchname = switcher
else:
continue
return switchname
def usage():
help_msg = '''
--ip check ip address
--arpbind Arp Bind
--postbind Cisco Ethernet Bind
--lanid Exchang Vlanid
--save Save Switch Config
--debug debug info
Exmaple:
'''
print help_msg
print "\t%s --ip 192.168.80.100 --arpbind --postbind --vlanid 100" %sys.argv[0]
def arpIpMAc( arpstr ):
for line in arpstr:
if len( line ) < 5:
continue
elif line.find ( 'Protocol' ) >= 0:
continue
elif line.find ( endstr ) >= 0:
continue
else:
ipaddress = reSearchIPandMac( _ip_address_re, line )
macaddress = reSearchIPandMac( _mac_address_re, line )
vlanFaEth = reSearchIPandMac( _vlan_faEth_re, line )
return ipaddress,macaddress,vlanFaEth
def Post_Security (macaddress, switchipadd, switchinterface):
swcore.telnet_switch_init(switchipadd)
swcore.telnetCiscoenable()
swcore.telnetcmdmor('configure terminal')
swcore.telnetcmdmor('interface %s' % switchinterface)
swcore.telnetcmdmor('no switchport port-security violation shutdown')
swcore.telnetcmdmor('no switchport port-security maximum 1')
swcore.telnetcmdmor('no switchport port-security mac-address')
swcore.telnetcmdmor('no switchport port-security')
swcore.telnetcmdmor('switchport port-security')
swcore.telnetcmdmor('switchport port-security mac-address %s' % macaddress)
swcore.telnetcmdmor('switchport port-security maximum 1')
swcore.telnetcmdmor('switchport port-security violation shutdown')
swcore.telnetcmdmor('exit')
swcore.telnetcmdmor('exit')
Status = swcore.telnetcmdmor('show running-config interface %s' % switchinterface)
head = '|===================Post Bind Info===================|\n'
tail = '|====================================================|'
message = head + Status + "\n" + tail
return message
def Arp_Bind (ip, mac):
swcore.telnet_switch_init(swcorehost)
swcore.telnetCiscoenable()
ipaddress_numb = swcore.telnetcmdmor('show arp | in %s' % mac)
ipaddress_list = _ip_address_re.findall(ipaddress_numb)
swcore.telnetcmdmor('configure terminal')
for iplist in ipaddress_list:
swcore.telnetcmdmor('no arp %s' % iplist)
Status = swcore.telnetcmdmor('arp %s %s ARPA' %(ip, mac))
head = '|===================Arp Bind Info=======================|\n'
tail = '|=======================================================|'
message = head + "core(config)#" + Status + "\n" + tail
return message
def Exchange_Vlan (vlanid, switchipadd, switchinterface):
switchinterface = "Fa0/9"
swcore.telnet_switch_init(switchipadd)
swcore.telnetCiscoenable()
swcore.telnetcmdmor('configure terminal')
swcore.telnetcmdmor('interface %s' % switchinterface)
swcore.telnetcmdmor('switchport access vlan %s' % vlanid)
swcore.telnetcmdmor('exit')
swcore.telnetcmdmor('exit')
Status = swcore.telnetcmdmor('show running-config interface %s' % switchinterface)
head = '|===================Exchang VLan Info===================|\n'
tail = '|=======================================================|'
message = head + Status + "\n" + tail
return message
def Save_Switch (switchipadd):
Save_Date = time.strftime('%Y-%m-%d',time.localtime(time.time()))
swcore.telnet_switch_init(switchipadd)
swcore.telnetCiscoenable()
swcore.telnetcmdmor('copy running-config flash:', Save_Date + '-running-config')
Status = swcore.telnetcmdmor('write ')
head = '|===================Save Switch Config===================|\n'
tail = '|=======================================================|'
message = head + Status + "\n" + tail
return message
def get_info (ip):
ipinfo = swcore.telnetcmdmor('show arp | in %s ' % ip)
ipaddress,macaddress,vlanFaEth = arpIpMAc( ipinfo.split( '\r\n' ) )
if ( str(macaddress) != 'None' ):
ipinfo_1 = swcore.telnetcmdmor('show mac address-table | in %s ' % macaddress)
interface = interfaceport( ipinfo_1.split( '\r\n' ) )
else:
interface = 'None'
if ( str(interface) != 'None' ):
ipinfo_2 = swcore.telnetcmdmor('show cdp neighbors')
switchcdp = _cdp_ne_re.findall(ipinfo_2)
switchname = switchinfo(interface,switchcdp)
else:
switchname = 'None'
if ( str(switchname) != 'None'):
ipinfo_3 = swcore.telnetcmdmor('show cdp entry %s ' % switchname)
switchipadd = switchip( ipinfo_3.split( '\r\n' ) )
else:
switchipadd = 'None'
if ( str(switchipadd) != 'None'):
if switchipadd == swcorehost:
ipinfo_4 = swcore.telnetcmdmor( 'sh mac address-table | in %s ' % ( macaddress ) )
else:
swcore.telnetProxyConnect( switchipadd )
ipinfo_4 = swcore.telnetcmdmor( 'sh mac address-table | in %s ' % ( macaddress ) )
switchinfoto = _mac_address_table_re.findall( ipinfo_4 )
vlanid,mac,switchinterface = switchinfoto[0]
else:
vlanid = 'None'
switchinterface = 'None'
return ip,macaddress,vlanid,switchname,switchipadd,switchinterface
if __name__ == '__main__':
opt = optparse.OptionParser()
opt.add_option('--ip')
opt.add_option("--arpbind", action="store_true", dest="arpbind", default=False)
opt.add_option('--postbind', action="store_true", dest="postbind", default=False)
opt.add_option('--vlanid')
opt.add_option('--debug', default = 'info')
opt.add_option('--save', action="store_true", dest="save", default=False)
options, arguments = opt.parse_args()
if not (options.ip):
usage()
sys.exit(1)
swcore = telnetXKCS()
swcore.telnetInit()
ip,macaddress,vlanid,switchname,switchipadd,switchinterface = get_info (options.ip)
def debug_info():
print '|-------------------Debug Info------------------|'
print '| IP Address:\t\t %s\t\t|' % ip
print '| MAC Address:\t\t %s\t\t|' % macaddress
print '| VLAN Number:\t\t %s\t\t\t|' % vlanid
print '| Swith Name:\t\t %s\t\t|' % switchname
print '| Swith IP:\t\t %s\t\t|' % switchipadd
print '| Swith Ethernet:\t %s\t\t\t|' % switchinterface
print '|-----------------------------------------------|'
debug_info()
if options.arpbind == True:
print Arp_Bind(ip, macaddress)
if options.postbind == True:
print Post_Security(macaddress, switchipadd, switchinterface)
if options.vlanid != None:
print Exchange_Vlan(options.vlanid, switchipadd, switchinterface)
if options.save == True:
print Save_Switch(switchipadd)
| apache-2.0 | -5,841,469,361,818,844,000 | 33.496314 | 131 | 0.556695 | false |
tbenthompson/tectosaur | tests/test_find_near_adj.py | 1 | 3789 | import numpy as np
import tectosaur.mesh.mesh_gen as mesh_gen
from tectosaur.mesh.find_near_adj import *
from tectosaur.util.test_decorators import golden_master
import logging
logger = logging.getLogger(__name__)
def find_adjacents(tris):
pts = np.random.rand(np.max(tris) + 1, 3)
close_pairs = find_close_or_touching(pts, tris, pts, tris, 1.0)
close, va, ea = split_adjacent_close(close_pairs, tris, tris)
return va, ea
tris = np.array([[0, 1, 2], [2, 1, 3], [0, 4, 5]])
def test_find_adjacents():
va, ea = find_adjacents(tris)
assert(va.size == 8)
assert(ea.size == 4)
assert(np.all(va.flatten() == (0, 2, 0, 0, 2, 0, 0, 0)))
assert(np.all(ea.flatten() == (0, 1, 1, 0)))
def test_nearfield():
corners = [[-1, -1, 0], [1, -1, 0], [1, 1, 0], [-1, 1, 0]]
pts, tris = mesh_gen.make_rect(3, 3, corners)
assert(tris.shape[0] == 8)
close_pairs = find_close_or_touching(pts, tris, pts, tris, 1.0)
close, va, ea = split_adjacent_close(close_pairs, tris, tris)
check_for = [
(0, 5), (0, 6), (0, 3), (1, 7), (2, 7), (3, 0),
(4, 7), (5, 0), (6, 0), (7, 2), (7, 1), (7, 4)
]
assert(len(close) == len(check_for))
for pair in check_for:
assert(pair in close)
@golden_master()
def test_find_close_notself(request):
corners = [[-1, -1, 0], [1, -1, 0], [1, 1, 0], [-1, 1, 0]]
m = mesh_gen.make_rect(2, 2, corners)
threshold = 1.0
obs_pts = np.array([0.5 * np.ones(5), 0.5 * np.ones(5), np.linspace(0, 2, 5)]).T.copy()
out = fast_find_nearfield.get_nearfield(
obs_pts, np.zeros(obs_pts.shape[0]), *get_tri_centroids_rs(*m), threshold, 50
)
return out
@golden_master()
def test_close_or_touching(request):
n = 20
pts, tris = mesh_gen.make_rect(n, n, [[-1, -1, 0], [1, -1, 0], [1, 1, 0], [-1, 1, 0]])
near_pairs = find_close_or_touching(pts, tris, pts, tris, 1.25)
return np.sort(near_pairs, axis = 0)
@golden_master()
def test_find_nearfield_real(request):
n = 20
pts, tris = mesh_gen.make_rect(n, n, [[-1, -1, 0], [1, -1, 0], [1, 1, 0], [-1, 1, 0]])
va, ea = find_adjacents(tris)
close_pairs = find_close_or_touching(pts, tris, pts, tris, 1.25)
close, va, ea = split_adjacent_close(close_pairs, tris, tris)
return close[np.lexsort([close[:,1],close[:,0]], axis = 0)].flatten()
@golden_master()
def test_find_adjacency(request):
m = mesh_gen.make_sphere([0,0,0], 1.0, 3)
close_pairs = find_close_or_touching(*m, *m, 1.25)
close, va, ea = split_adjacent_close(close_pairs, m[1], m[1])
# va, ea = adjacency.find_adjacents(m)
va = np.array(va)
ea = np.array(ea)
all = np.zeros((va.shape[0] + ea.shape[0], 4),)
all[:va.shape[0],:4] = va
all[va.shape[0]:,:2] = ea
sorted_idxs = np.lexsort([all[:,1], all[:,0]], axis = 0)
all_sorted = all[sorted_idxs,:]
return all_sorted
def benchmark_find_nearfield():
corners = [[-1, -1, 0], [1, -1, 0], [1, 1, 0], [-1, 1, 0]]
nx = ny = 707
pts, tris = mesh_gen.make_rect(nx, ny, corners)
print('n_tris: ' + str(tris.shape[0]))
# va, ea = adjacency.find_adjacents(tris)
near_pairs = find_close_or_touching(pts, tris, 1.25)
def benchmark_adjacency():
from tectosaur.mesh.mesh_gen import make_rect
from tectosaur.util.timer import Timer
L = 8
nx = ny = int(2 ** L / np.sqrt(2))
t = Timer()
m = make_rect(nx, ny, [[-1, -1, 0], [-1, 1, 0], [1, 1, 0], [1, -1, 0]])
logger.debug('n_tris: ' + str(m[1].shape[0]))
t.report('make')
close_pairs = find_close_or_touching(m[0], m[1], 1.25)
t.report('close or touching')
close, va, ea = split_adjacent_close(close_pairs, m[1])
t.report('find adj')
if __name__ == "__main__":
benchmark_adjacency()
# benchmark_find_nearfield()
| mit | 2,283,117,319,817,519,900 | 35.085714 | 91 | 0.574558 | false |
BuildingLink/sentry | tests/sentry/utils/test_data_scrubber.py | 1 | 11947 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from sentry.constants import FILTER_MASK
from sentry.testutils import TestCase
from sentry.utils.data_scrubber import SensitiveDataFilter
VARS = {
'foo': 'bar',
'password': 'hello',
'the_secret': 'hello',
'a_password_here': 'hello',
'api_key': 'secret_key',
'apiKey': 'secret_key',
}
PUBLIC_KEY = """-----BEGIN PUBLIC KEY-----
MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA6A6TQjlPyMurLh/igZY4
izA9sJgeZ7s5+nGydO4AI9k33gcy2DObZuadWRMnDwc3uH/qoAPw/mo3KOcgEtxU
xdwiQeATa3HVPcQDCQiKm8xIG2Ny0oUbR0IFNvClvx7RWnPEMk05CuvsL0AA3eH5
xn02Yg0JTLgZEtUT3whwFm8CAwEAAQ==
-----END PUBLIC KEY-----"""
PRIVATE_KEY = """-----BEGIN PRIVATE KEY-----
MIIJRAIBADANBgkqhkiG9w0BAQEFAASCCS4wggkqAgEAAoICAQCoNFY4P+EeIXl0
mLpO+i8uFqAaEFQ8ZX2VVpA13kNEHuiWXC3HPlQ+7G+O3XmAsO+Wf/xY6pCSeQ8h
mLpO+i8uFqAaEFQ8ZX2VVpA13kNEHuiWXC3HPlQ+7G+O3XmAsO+Wf/xY6pCSeQ8h
-----END PRIVATE KEY-----"""
ENCRYPTED_PRIVATE_KEY = """-----BEGIN ENCRYPTED PRIVATE KEY-----
MIIJjjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIWVhErdQOFVoCAggA
IrlYQUV1ig4U3viYh1Y8viVvRlANKICvgj4faYNH36UterkfDjzMonb/cXNeJEOS
YgorM2Pfuec5vtPRPKd88+Ds/ktIlZhjJwnJjHQMX+lSw5t0/juna2sLH2dpuAbi
PSk=
-----END ENCRYPTED PRIVATE KEY-----"""
RSA_PRIVATE_KEY = """-----BEGIN RSA PRIVATE KEY-----
+wn9Iu+zgamKDUu22xc45F2gdwM04rTITlZgjAs6U1zcvOzGxk8mWJD5MqFWwAtF
zN87YGV0VMTG6ehxnkI4Fg6i0JPU3QIDAQABAoICAQCoCPjlYrODRU+vd2YeU/gM
THd+9FBxiHLGXNKhG/FRSyREXEt+NyYIf/0cyByc9tNksat794ddUqnLOg0vwSkv
-----END RSA PRIVATE KEY-----"""
class SensitiveDataFilterTest(TestCase):
def _check_vars_sanitized(self, vars, proc):
"""
Helper to check that keys have been sanitized.
"""
assert 'foo' in vars
assert vars['foo'] == 'bar'
assert 'password' in vars
assert vars['password'] == FILTER_MASK
assert 'the_secret' in vars
assert vars['the_secret'] == FILTER_MASK
assert 'a_password_here' in vars
assert vars['a_password_here'] == FILTER_MASK
assert 'api_key' in vars
assert vars['api_key'] == FILTER_MASK
assert 'apiKey' in vars
assert vars['apiKey'] == FILTER_MASK
def test_stacktrace(self):
data = {
'sentry.interfaces.Stacktrace': {
'frames': [{'vars': VARS}],
}
}
proc = SensitiveDataFilter()
proc.apply(data)
assert 'sentry.interfaces.Stacktrace' in data
stack = data['sentry.interfaces.Stacktrace']
assert 'frames' in stack
assert len(stack['frames']) == 1
frame = stack['frames'][0]
assert 'vars' in frame
self._check_vars_sanitized(frame['vars'], proc)
def test_http(self):
data = {
'sentry.interfaces.Http': {
'data': VARS,
'env': VARS,
'headers': list(VARS.items()),
'cookies': VARS,
}
}
proc = SensitiveDataFilter()
proc.apply(data)
assert 'sentry.interfaces.Http' in data
http = data['sentry.interfaces.Http']
for n in ('data', 'env', 'cookies'):
assert n in http
self._check_vars_sanitized(http[n], proc)
assert 'headers' in http
self._check_vars_sanitized(dict(http['headers']), proc)
def test_user(self):
data = {
'sentry.interfaces.User': {
'username': 'secret',
'data': VARS,
},
}
proc = SensitiveDataFilter()
proc.apply(data)
assert 'sentry.interfaces.User' in data
assert data['sentry.interfaces.User']['username'] == 'secret'
self._check_vars_sanitized(data['sentry.interfaces.User']['data'], proc)
def test_extra(self):
data = {
'extra': VARS
}
proc = SensitiveDataFilter()
proc.apply(data)
assert 'extra' in data
self._check_vars_sanitized(data['extra'], proc)
def test_contexts(self):
data = {
'contexts': {
'secret': VARS,
'biz': VARS,
},
}
proc = SensitiveDataFilter()
proc.apply(data)
assert 'contexts' in data
assert 'secret' in data['contexts']
assert 'biz' in data['contexts']
self._check_vars_sanitized(data['contexts']['secret'], proc)
self._check_vars_sanitized(data['contexts']['biz'], proc)
def test_querystring_as_string(self):
data = {
'sentry.interfaces.Http': {
'query_string': 'foo=bar&password=hello&the_secret=hello'
'&a_password_here=hello&api_key=secret_key',
}
}
proc = SensitiveDataFilter()
proc.apply(data)
assert 'sentry.interfaces.Http' in data
http = data['sentry.interfaces.Http']
assert http['query_string'] == (
'foo=bar&password=%(m)s&the_secret=%(m)s'
'&a_password_here=%(m)s&api_key=%(m)s' % {'m': FILTER_MASK}
)
def test_querystring_as_string_with_partials(self):
data = {
'sentry.interfaces.Http': {
'query_string': 'foo=bar&password&baz=bar',
}
}
proc = SensitiveDataFilter()
proc.apply(data)
assert 'sentry.interfaces.Http' in data
http = data['sentry.interfaces.Http']
assert http['query_string'] == 'foo=bar&password&baz=bar'
def test_sanitize_additional_sensitive_fields(self):
additional_sensitive_dict = {
'fieldy_field': 'value',
'moar_other_field': 'another value'
}
data = {
'extra': dict(list(VARS.items()) + list(additional_sensitive_dict.items()))
}
proc = SensitiveDataFilter(additional_sensitive_dict.keys())
proc.apply(data)
for field in additional_sensitive_dict.keys():
assert data['extra'][field] == FILTER_MASK
self._check_vars_sanitized(data['extra'], proc)
def test_sanitize_credit_card(self):
proc = SensitiveDataFilter()
result = proc.sanitize('foo', '4571234567890111')
assert result == FILTER_MASK
def test_sanitize_credit_card_amex(self):
# AMEX numbers are 15 digits, not 16
proc = SensitiveDataFilter()
result = proc.sanitize('foo', '378282246310005')
assert result == FILTER_MASK
def test_sanitize_credit_card_discover(self):
proc = SensitiveDataFilter()
result = proc.sanitize('foo', '6011111111111117')
assert result == FILTER_MASK
def test_sanitize_credit_card_visa(self):
proc = SensitiveDataFilter()
result = proc.sanitize('foo', '4111111111111111')
assert result == FILTER_MASK
def test_sanitize_credit_card_mastercard(self):
proc = SensitiveDataFilter()
result = proc.sanitize('foo', '5555555555554444')
assert result == FILTER_MASK
def test_sanitize_credit_card_within_value(self):
proc = SensitiveDataFilter()
result = proc.sanitize('foo', "'4571234567890111'")
assert result == FILTER_MASK
proc = SensitiveDataFilter()
result = proc.sanitize('foo', "foo 4571234567890111")
assert result == FILTER_MASK
def test_does_not_sanitize_timestamp_looks_like_card(self):
proc = SensitiveDataFilter()
result = proc.sanitize('foo', '1453843029218310')
assert result == '1453843029218310'
def test_sanitize_url(self):
proc = SensitiveDataFilter()
result = proc.sanitize('foo', 'pg://matt:pass@localhost/1')
assert result == 'pg://matt:%s@localhost/1' % FILTER_MASK
result = proc.sanitize('foo', "foo 'redis://redis:foo@localhost:6379/0' bar")
assert result == "foo 'redis://redis:%s@localhost:6379/0' bar" % FILTER_MASK
result = proc.sanitize('foo', "'redis://redis:foo@localhost:6379/0'")
assert result == "'redis://redis:%s@localhost:6379/0'" % FILTER_MASK
result = proc.sanitize('foo', "foo redis://redis:foo@localhost:6379/0 bar")
assert result == "foo redis://redis:%s@localhost:6379/0 bar" % FILTER_MASK
result = proc.sanitize('foo', "foo redis://redis:foo@localhost:6379/0 bar pg://matt:foo@localhost/1")
assert result == "foo redis://redis:%s@localhost:6379/0 bar pg://matt:%s@localhost/1" % (FILTER_MASK, FILTER_MASK)
# Make sure we don't mess up any other url.
# This url specifically if passed through urlunsplit(urlsplit()),
# it'll change the value.
result = proc.sanitize('foo', 'postgres:///path')
assert result == 'postgres:///path'
# Don't be too overly eager within JSON strings an catch the right field.
result = proc.sanitize('foo', '{"a":"https://localhost","b":"foo@localhost","c":"pg://matt:pass@localhost/1","d":"lol"}')
assert result == '{"a":"https://localhost","b":"foo@localhost","c":"pg://matt:%s@localhost/1","d":"lol"}' % FILTER_MASK
def test_sanitize_http_body(self):
data = {
'sentry.interfaces.Http': {
'data': '{"email":"[email protected]","password":"zzzzz"}',
},
}
proc = SensitiveDataFilter()
proc.apply(data)
assert 'sentry.interfaces.Http' in data
http = data['sentry.interfaces.Http']
assert http['data'] == FILTER_MASK
def test_does_not_fail_on_non_string(self):
data = {
'extra': {
'foo': 1,
},
}
proc = SensitiveDataFilter()
proc.apply(data)
assert data['extra'] == {'foo': 1}
def test_does_sanitize_public_key(self):
data = {
'extra': {
's': PUBLIC_KEY,
},
}
proc = SensitiveDataFilter()
proc.apply(data)
assert data['extra'] == {'s': FILTER_MASK}
def test_does_sanitize_private_key(self):
data = {
'extra': {
's': PRIVATE_KEY,
},
}
proc = SensitiveDataFilter()
proc.apply(data)
assert data['extra'] == {'s': FILTER_MASK}
def test_does_sanitize_encrypted_private_key(self):
data = {
'extra': {
's': ENCRYPTED_PRIVATE_KEY,
},
}
proc = SensitiveDataFilter()
proc.apply(data)
assert data['extra'] == {'s': FILTER_MASK}
def test_does_sanitize_rsa_private_key(self):
data = {
'extra': {
's': RSA_PRIVATE_KEY,
},
}
proc = SensitiveDataFilter()
proc.apply(data)
assert data['extra'] == {'s': FILTER_MASK}
def test_does_sanitize_social_security_number(self):
data = {
'extra': {
's': '123-45-6789',
},
}
proc = SensitiveDataFilter()
proc.apply(data)
assert data['extra'] == {'s': FILTER_MASK}
def test_exclude_fields_on_field_name(self):
data = {
'extra': {
'password': '123-45-6789',
},
}
proc = SensitiveDataFilter(exclude_fields=['password'])
proc.apply(data)
assert data['extra'] == {'password': '123-45-6789'}
def test_exclude_fields_on_field_value(self):
data = {
'extra': {
'foobar': '123-45-6789',
},
}
proc = SensitiveDataFilter(exclude_fields=['foobar'])
proc.apply(data)
assert data['extra'] == {'foobar': '123-45-6789'}
def test_empty_field(self):
data = {
'extra': {
'foobar': 'xxx',
},
}
proc = SensitiveDataFilter(fields=[''])
proc.apply(data)
assert data['extra'] == {'foobar': 'xxx'}
| bsd-3-clause | -5,240,779,456,825,479,000 | 31.464674 | 129 | 0.572529 | false |
cschwede/swift-containerlist | containerlist/middleware.py | 1 | 5527 | # Copyright (c) 2013 Christian Schwede <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" WSGI Middleware for Openstack Swift Proxy.
Allows GET request on account level for all user in this account.
"""
import json
import copy
import time
import eventlet
from swift.common.swob import wsgify
from swift.common.utils import split_path, cache_from_env
from swift.common.wsgi import make_pre_authed_request
from swift.proxy.controllers.base import get_container_info
try:
from swift.account.utils import account_listing_response, \
account_listing_content_type
except ImportError:
from swift_account_utils import account_listing_response, \
account_listing_content_type
class AccountGuestBroker(object):
""" Mimics an account broker, but only returns list of containers the
user has access to. Only used when request originated from non-owner. """
def __init__(self, app, request, account, groups, *args, **kwargs):
self.app = app
self.account = account
self.request = request
self.groups = groups
self.memcache_client = None
self.min_sleep = 5
def get_info(self):
""" This is basically a dummy. """
return {'container_count': None,
'object_count': None,
'bytes_used': None,
'created_at': None,
'put_timestamp': None}
def list_containers_iter(self, *args, **kwargs):
""" Returns a list of containers the user has access to """
try:
version, account = self.request.split_path(2, 2)
except ValueError:
pass
path = "/%s/%s?format=json" % (version, account)
for key in ('limit', 'marker', 'end_marker', 'prefix', 'delimiter'):
value = self.request.params.get(key)
if value:
path+= '&%s=%s' % (key, value)
if self.memcache_client is None:
self.memcache_client = cache_from_env(self.request.environ)
memcache_key = 'containerlist%s%s' % (path, str(self.groups))
containers = self.memcache_client.get(memcache_key)
if containers is not None:
return containers
req = make_pre_authed_request(self.request.environ, 'GET', path)
resp = req.get_response(self.app)
tmp_containers = json.loads(resp.body)
# No cached result? -> ratelimit request to prevent abuse
memcache_key_sleep = 'containerlist_sleep/%s' % self.account
last_request_time = self.memcache_client.get(memcache_key_sleep)
if last_request_time and len(tmp_containers) > 0:
last_request = time.time() - last_request_time
if last_request < self.min_sleep:
eventlet.sleep(self.min_sleep - last_request)
containers = []
for container in tmp_containers:
tmp_env = copy.copy(self.request.environ)
container_name = container['name'].encode("utf8")
path_info = "/%s/%s/%s" % (version, account, container_name)
tmp_env['PATH_INFO'] = path_info
container_info = get_container_info(tmp_env, self.app)
acl = (container_info.get('read_acl') or '').split(',')
if (list(set(self.groups) & set(acl))):
containers.append((container['name'],
container['count'],
container['bytes'],
0))
self.memcache_client.set(memcache_key, containers, time=60)
self.memcache_client.set(memcache_key_sleep, time.time())
return containers
@property
def metadata(self):
""" Dummy for Broker """
return {}
class ContainerListMiddleware(object):
""" WSGI Middleware """
def __init__(self, app, *args, **kwargs):
self.app = app
@wsgify
def __call__(self, request):
""" Returnes container listing for non-owners """
try:
(_vers, account, container) = split_path(request.path_info, 1, 3)
except ValueError, ex:
return self.app
groups = (request.remote_user or '').split(',')
non_owner = ('.reseller_admin' not in groups
and account not in groups
and '.wsgi.pre_authed' not in groups
and groups != [''])
if account and not container and non_owner and request.method == 'GET':
content_type, _error = account_listing_content_type(request)
broker = AccountGuestBroker(self.app, request, account, groups)
return account_listing_response(account, request,
content_type, broker)
return self.app
def filter_factory(global_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
def containerlist_filter(app):
return ContainerListMiddleware(app)
return containerlist_filter
| apache-2.0 | -575,241,135,822,902,100 | 35.124183 | 79 | 0.606658 | false |
pixunil/Cinnamon | files/usr/share/cinnamon/cinnamon-settings/bin/ChooserButtonWidgets.py | 1 | 21376 | from __future__ import division
from gi.repository import Gtk, GObject, GLib, GdkPixbuf
import cairo
import tweenEquations
import os
import math
import gettext
gettext.install("cinnamon", "/usr/share/locale")
TWEEN_SHAPES = ["Quad", "Cubic", "Quart", "Quint", "Sine", "Expo", "Circ", "Elastic", "Back", "Bounce"]
TWEEN_DIRECTIONS = ["In", "Out", "InOut", "OutIn"]
EFFECT_STYLE_NAMES = {
"none": _("None"),
"scale": _("Scale"),
"fade": _("Fade"),
"blend": _("Blend"),
"move": _("Move"),
"flyUp": _("Fly up"),
"flyDown": _("Fly down"),
"traditional": _("Traditional")
}
PREVIEW_HEIGHT = 48
PREVIEW_WIDTH = 96
ANIMATION_DURATION = 800
ANIMATION_FRAME_RATE = 20
class BaseChooserButton(Gtk.Button):
def __init__ (self, has_button_label=False):
super(BaseChooserButton, self).__init__()
self.menu = Gtk.Menu()
self.button_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=2)
self.button_image = Gtk.Image()
self.button_box.add(self.button_image)
if has_button_label:
self.button_label = Gtk.Label()
self.button_box.add(self.button_label)
self.add(self.button_box)
self.connect("button-release-event", self._on_button_clicked)
def popup_menu_below_button (self, *args):
# the introspection for GtkMenuPositionFunc seems to change with each Gtk version,
# this is a workaround to make sure we get the menu and the widget
menu = args[0]
widget = args[-1]
window = widget.get_window()
screen = window.get_screen()
monitor = screen.get_monitor_at_window(window)
warea = screen.get_monitor_workarea(monitor)
wrect = widget.get_allocation()
mrect = menu.get_allocation()
unused_var, window_x, window_y = window.get_origin()
# Position left edge of the menu with the right edge of the button
x = window_x + wrect.x + wrect.width
# Center the menu vertically with respect to the monitor
y = warea.y + (warea.height / 2) - (mrect.height / 2)
# Now, check if we're still touching the button - we want the right edge
# of the button always 100% touching the menu
if y > (window_y + wrect.y):
y = y - (y - (window_y + wrect.y))
elif (y + mrect.height) < (window_y + wrect.y + wrect.height):
y = y + ((window_y + wrect.y + wrect.height) - (y + mrect.height))
push_in = True # push_in is True so all menu is always inside screen
return (x, y, push_in)
def _on_button_clicked(self, widget, event):
if event.button == 1:
self.menu.show_all()
self.menu.popup(None, None, self.popup_menu_below_button, self, event.button, event.time)
class PictureChooserButton(BaseChooserButton):
def __init__ (self, num_cols=4, button_picture_size=None, menu_pictures_size=None, has_button_label=False):
super(PictureChooserButton, self).__init__(has_button_label)
self.num_cols = num_cols
self.button_picture_size = button_picture_size
self.menu_pictures_size = menu_pictures_size
self.row = 0
self.col = 0
self.progress = 0.0
context = self.get_style_context()
context.add_class("gtkstyle-fallback")
self.connect_after("draw", self.on_draw)
def on_draw(self, widget, cr, data=None):
if self.progress == 0:
return False
box = widget.get_allocation()
context = widget.get_style_context()
c = context.get_background_color(Gtk.StateFlags.SELECTED)
max_length = box.width * .6
start = (box.width - max_length) / 2
y = box.height - 5
cr.save()
cr.set_source_rgba(c.red, c.green, c.blue, c.alpha)
cr.set_line_width(3)
cr.set_line_cap(1)
cr.move_to(start, y)
cr.line_to(start + (self.progress * max_length), y)
cr.stroke()
cr.restore()
return False
def increment_loading_progress(self, inc):
progress = self.progress + inc
self.progress = min(1.0, progress)
self.queue_draw()
def reset_loading_progress(self):
self.progress = 0.0
self.queue_draw()
def set_picture_from_file (self, path):
if os.path.exists(path):
if self.button_picture_size is None:
pixbuf = GdkPixbuf.Pixbuf.new_from_file(path)
else:
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_scale(path, -1, self.button_picture_size, True)
self.button_image.set_from_pixbuf(pixbuf)
def set_button_label(self, label):
self.button_label.set_markup(label)
def _on_picture_selected(self, menuitem, path, callback, id=None):
if id is not None:
result = callback(path, id)
else:
result = callback(path)
if result:
self.set_picture_from_file(path)
def clear_menu(self):
menu = self.menu
self.menu = Gtk.Menu()
self.row = 0
self.col = 0
menu.destroy()
def add_picture(self, path, callback, title=None, id=None):
if os.path.exists(path):
if self.menu_pictures_size is None:
pixbuf = GdkPixbuf.Pixbuf.new_from_file(path)
else:
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_scale(path, -1, self.menu_pictures_size, True)
image = Gtk.Image.new_from_pixbuf (pixbuf)
menuitem = Gtk.MenuItem()
if title is not None:
vbox = Gtk.VBox()
vbox.pack_start(image, False, False, 2)
label = Gtk.Label()
label.set_text(title)
vbox.pack_start(label, False, False, 2)
menuitem.add(vbox)
else:
menuitem.add(image)
if id is not None:
menuitem.connect('activate', self._on_picture_selected, path, callback, id)
else:
menuitem.connect('activate', self._on_picture_selected, path, callback)
self.menu.attach(menuitem, self.col, self.col+1, self.row, self.row+1)
self.col = (self.col+1) % self.num_cols
if (self.col == 0):
self.row = self.row + 1
def add_separator(self):
self.row = self.row + 1
self.menu.attach(Gtk.SeparatorMenuItem(), 0, self.num_cols, self.row, self.row+1)
def add_menuitem(self, menuitem):
self.row = self.row + 1
self.menu.attach(menuitem, 0, self.num_cols, self.row, self.row+1)
class DateChooserButton(Gtk.Button):
__gsignals__ = {
'date-changed': (GObject.SignalFlags.RUN_FIRST, None, (int,int,int))
}
def __init__(self):
super(DateChooserButton, self).__init__()
self.year, self.month, self.day = GLib.DateTime.new_now_local().get_ymd()
self.connect("clicked", self.on_button_clicked)
def on_button_clicked(self, *args):
self.dialog = Gtk.Dialog(transient_for=self.get_toplevel(),
title=_("Select a date"),
flags=Gtk.DialogFlags.MODAL,
buttons=(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OK, Gtk.ResponseType.OK))
content = self.dialog.get_content_area()
calendar = Gtk.Calendar()
content.pack_start(calendar, True, True, 0)
calendar.select_month(self.month-1, self.year)
calendar.select_day(self.day)
def select_today(*args):
date = GLib.DateTime.new_now_local().get_ymd()
calendar.select_month(date[1]-1, date[0])
calendar.select_day(date[2])
today = Gtk.Button(label=_("Today"))
today.connect("clicked", select_today)
content.pack_start(today, False, False, 0)
content.show_all()
response = self.dialog.run()
if response == Gtk.ResponseType.OK:
date = calendar.get_date()
self.set_date(date[0], date[1]+1, date[2]) #calendar uses 0 based month
self.emit("date-changed", self.year, self.month, self.day)
self.dialog.destroy()
def get_date(self):
return self.year, self.month, self.day
def set_date(self, year, month, day):
self.year = year
self.month = month
self.day = day
date = GLib.DateTime.new_local(year, month, day, 1, 1, 1)
date_string = date.format(_("%B %e, %Y"))
self.set_label(date_string)
def draw_window(context, x, y, color, alpha = 1, scale = 1):
if scale <= 0:
return
alpha = min(max(alpha, 0), 1)
context.set_source_rgba(color.red, color.green, color.blue, alpha)
context.save()
context.translate(x, y)
context.scale(scale, scale)
context.rectangle(-PREVIEW_WIDTH / 4., -PREVIEW_HEIGHT / 4., PREVIEW_WIDTH / 2., PREVIEW_HEIGHT / 2.)
context.fill()
context.restore()
# The following classes contain the functions to draw effect previews. To add a new effect,
# you will only need to include the draw_preview function. To provide an animation preview,
# you will also need to include the animate fuction. You will also need to add your new effect
# to EFFECT_STYLES_NAME above
class none(object):
def draw_preview(self, context, x, y, color):
draw_window(context, x, y, color, 1.)
class scale(object):
def draw_preview(self, context, x, y, color):
steps = 3
for i in range(steps):
draw_window(context, x, y, color, (steps - i) * 1. / steps, (i + 1.) / steps)
def animate(self, context, x, y, percent_complete, color):
scale = 1 - percent_complete
draw_window(context, x, y, color, scale=scale)
class fade(object):
def draw_preview(self, context, x, y, color):
draw_window(context, x, y, color, .5)
def animate(self, context, x, y, percent_complete, color):
alpha = 1 - percent_complete
draw_window(context, x, y, color, alpha=alpha)
class blend(object):
def draw_preview(self, context, x, y, color):
steps = 3
for i in range(steps):
draw_window(context, x, y, color, (steps - i) * 1. / steps, 1 + i / (steps - 1.) / 2)
def animate(self, context, x, y, percent_complete, color):
scale = 1 + percent_complete / 2
alpha = 1 - percent_complete
draw_window(context, x, y, color, alpha=alpha, scale=scale)
class traditional(object):
def draw_preview(self, context, x, y, color):
gradient = cairo.LinearGradient(x, y * 2, x, y)
gradient.add_color_stop_rgba(0, color.red, color.green, color.blue, 0)
gradient.add_color_stop_rgb(1, color.red, color.green, color.blue)
context.set_source(gradient)
context.move_to(x, y * 2)
context.line_to(x * 1.5, y * 1.5)
context.line_to(x * 1.5, y * .5)
context.line_to(x * .5, y * .5)
context.line_to(x * .5, y * 1.5)
context.fill()
def animate(self, context, x, y, percent_complete, color):
y *= 1 + percent_complete
scale = 1 - percent_complete
alpha = 1 - percent_complete
draw_window(context, x, y, color, alpha=alpha, scale=scale)
class move(object):
def draw_preview(self, context, x, y, color):
gradient = cairo.LinearGradient(0, 0, x, y)
gradient.add_color_stop_rgba(0, color.red, color.green, color.blue, 0)
gradient.add_color_stop_rgb(1, color.red, color.green, color.blue)
context.set_source(gradient)
context.move_to(x / 5, y / 5)
context.line_to(x * 1.5, y * .5)
context.line_to(x * 1.5, y * 1.5)
context.line_to(x * .5, y * 1.5)
context.fill()
def animate(self, context, x, y, percent_complete, color):
remain = 1 - percent_complete
draw_window(context, x*remain, y*remain, color, scale=remain)
class flyUp(object):
def draw_preview(self, context, x, y, color):
gradient = cairo.LinearGradient(0, y * 2, 0, y * 1.5)
gradient.add_color_stop_rgba(0, color.red, color.green, color.blue, 0)
gradient.add_color_stop_rgb(1, color.red, color.green, color.blue)
context.set_source(gradient)
context.rectangle(x / 2, y / 2, x, y * 1.5)
context.fill()
def animate(self, context, x, y, percent_complete, color):
y *= 1 - percent_complete * 1.5
draw_window(context, x, y, color)
class flyDown(object):
def draw_preview(self, context, x, y, color):
gradient = cairo.LinearGradient(0, 0, 0, y / 2)
gradient.add_color_stop_rgba(0, color.red, color.green, color.blue, 0)
gradient.add_color_stop_rgb(1, color.red, color.green, color.blue)
context.set_source(gradient)
context.rectangle(x / 2, 0, x, y * 1.5)
context.fill()
def animate(self, context, x, y, percent_complete, color):
y *= 1 + percent_complete * 1.5
draw_window(context, x, y, color)
# a button to select tweens
class TweenChooserButton(BaseChooserButton):
__gproperties__ = {
"tween": (str,
"tween value",
"Value of the selected tween",
None,
GObject.PARAM_READWRITE)
}
def __init__(self):
super(TweenChooserButton, self).__init__()
self.tween = ""
self.set_size_request(128, -1)
self.build_menuitem("None", 0, 0)
row = 1
for suffix in TWEEN_SHAPES:
col = 0
for prefix in TWEEN_DIRECTIONS:
self.build_menuitem(prefix + suffix, col, row)
col += 1
row += 1
def build_menuitem(self, name, col, row):
menuitem = TweenMenuItem("ease" + name)
menuitem.connect("activate", self.change_value)
self.menu.attach(menuitem, col, col + 1, row, row + 1)
def change_value(self, widget):
self.props.tween = widget.tween_type
def do_get_property(self, prop):
if prop.name == 'tween':
return self.tween
else:
raise AttributeError('unknown property %s' % prop.name)
def do_set_property(self, prop, value):
if prop.name == 'tween':
if value != self.tween:
self.tween = value
self.set_label(self.tween)
else:
raise AttributeError('unknown property %s' % prop.name)
# menu item for TweenChooserButton
class TweenMenuItem(Gtk.MenuItem):
def __init__(self, tween_type):
super(TweenMenuItem, self).__init__()
self.animating = False
self.timer = None
self.tween_type = tween_type
self.tween_function = getattr(tweenEquations, tween_type)
self.vbox = Gtk.VBox()
self.add(self.vbox)
box = Gtk.Box()
self.vbox.add(box)
self.graph = Gtk.DrawingArea()
box.add(self.graph)
self.graph.set_size_request(PREVIEW_WIDTH, PREVIEW_HEIGHT)
self.graph.connect("draw", self.draw_graph)
self.arrow = Gtk.DrawingArea()
box.pack_end(self.arrow, False, False, 0)
self.arrow.set_size_request(5, PREVIEW_HEIGHT)
self.arrow.connect("draw", self.draw_arrow)
self.connect("enter-notify-event", self.start_animation)
self.connect("leave-notify-event", self.stop_animation)
label = Gtk.Label()
self.vbox.add(label)
label.set_text(tween_type)
def draw_graph(self, widget, context):
width = PREVIEW_WIDTH - 2.
height = PREVIEW_HEIGHT / 8.
style = widget.get_style_context()
if self.animating:
color = style.get_background_color(Gtk.StateFlags.SELECTED)
else:
color = style.get_color(Gtk.StateFlags.NORMAL)
context.set_source_rgb(color.red, color.green, color.blue)
context.move_to(1, height * 6)
for i in range(int(width)):
value = self.tween_function(i + 1., height * 6, -height * 4, width)
context.line_to(i + 2, value)
context.stroke()
def draw_arrow(self, widget, context):
if not self.animating:
return
height = PREVIEW_HEIGHT / 8.
style = widget.get_style_context()
color = style.get_color(Gtk.StateFlags.NORMAL)
context.set_source_rgb(color.red, color.green, color.blue)
value = self.tween_function(self.elapsed/ANIMATION_DURATION, height * 6, -height * 4, 1)
context.arc(5, value, 5, math.pi / 2, math.pi * 1.5)
context.fill()
def start_animation(self, *args):
self.animating = True
self.elapsed = 0
self.arrow.queue_draw()
self.graph.queue_draw()
self.timer = GObject.timeout_add(ANIMATION_FRAME_RATE, self.advance_animation)
def stop_animation(self, *args):
self.animating = False
if self.timer:
GObject.source_remove(self.timer)
self.timer = None
self.arrow.queue_draw()
self.graph.queue_draw()
def advance_animation(self):
self.elapsed += ANIMATION_FRAME_RATE
if self.elapsed >= ANIMATION_DURATION:
self.timer = None
return False
# self.stop_animation()
self.arrow.queue_draw()
return True
# a button to select effect types
class EffectChooserButton(BaseChooserButton):
__gproperties__ = {
"effect": (str,
"effect value",
"Value of the selected effect",
None,
GObject.PARAM_READWRITE)
}
def __init__(self, effect_styles=None):
super(EffectChooserButton, self).__init__()
self.effect = ""
self.effect_styles = ["none", "scale"] if effect_styles == None else effect_styles
self.set_size_request(128, -1)
row = 0
col = 0
for option in self.effect_styles:
self.build_menuitem(option, col, row)
col += 1
if col >= 4:
col = 0
row += 1
def build_menuitem(self, effect_type, col, row):
# apply the specific effect type methods onto the base effect type menu item
EffectTypeMenuItem = type(effect_type+"MenuItem",
(globals()[effect_type], EffectMenuItem),
{"effect_type": effect_type})
menuitem = EffectTypeMenuItem()
menuitem.connect("activate", self.change_value)
self.menu.attach(menuitem, col, col + 1, row, row + 1)
def change_value(self, widget):
self.props.effect = widget.effect_type
def do_get_property(self, prop):
if prop.name == 'effect':
return self.effect
else:
raise AttributeError('unknown property %s' % prop.name)
def do_set_property(self, prop, value):
if prop.name == 'effect':
if value != self.effect:
self.effect = value
self.set_label(EFFECT_STYLE_NAMES[self.effect])
else:
raise AttributeError('unknown property %s' % prop.name)
# menu item for TweenChooserButton
class EffectMenuItem(Gtk.MenuItem):
def __init__(self):
super(EffectMenuItem, self).__init__()
self.animating = False
self.timer = None
self.drawing = Gtk.DrawingArea()
self.drawing.connect("draw", self.draw)
self.drawing.set_size_request(PREVIEW_WIDTH, PREVIEW_HEIGHT)
self.style = self.drawing.get_style_context()
box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.add(box)
self.connect("enter-notify-event", self.start_animation)
self.connect("leave-notify-event", self.stop_animation)
box.add(self.drawing)
label = Gtk.Label()
box.add(label)
label.set_text(EFFECT_STYLE_NAMES[self.effect_type])
def start_animation(self, *args):
if not hasattr(self, "animate"):
return
self.animating = True
self.elapsed = 0
self.drawing.queue_draw()
self.timer = GObject.timeout_add(ANIMATION_FRAME_RATE, self.advance_animation)
def stop_animation(self, *args):
self.animating = False
if self.timer:
GObject.source_remove(self.timer)
self.timer = None
self.drawing.queue_draw()
def advance_animation(self):
if self.elapsed > ANIMATION_DURATION:
self.stop_animation()
self.elapsed += ANIMATION_FRAME_RATE
self.drawing.queue_draw()
return True
def draw(self, widget, context):
x = PREVIEW_WIDTH / 2.
y = PREVIEW_HEIGHT / 2.
color = self.get_color()
if self.animating:
percent_complete = self.elapsed / ANIMATION_DURATION
self.animate(context, x, y, percent_complete, color)
else:
self.draw_preview(context, x, y, color)
# value = self.transition(self.state % self.duration, 0, 1, self.duration - 1)
def get_color(self):
if self.animating:
return self.style.get_color(Gtk.StateFlags.NORMAL)
return self.style.get_background_color(Gtk.StateFlags.SELECTED)
| gpl-2.0 | 598,406,912,072,054,500 | 33.871126 | 111 | 0.584207 | false |
StephenOrJames/UBinE | locate/rooms.py | 1 | 4367 | def get_building(quadrangle, room_number):
"""Finds and returns the building that a room is in for a specific quadrangle, or None"""
quadrangle = quadrangle.lower()
def in_ranges(ranges):
"""Determines if a room is in a list of ranges (each specified by tuples)"""
for first, last in ranges:
if first <= room_number <= last:
return True
return False
if quadrangle == "fargo":
if in_ranges([(201, 208), (301, 311), (401, 417)]):
return 1
elif in_ranges([(209, 218), (312, 324), (418, 434)]):
return 2
elif in_ranges([(219, 226), (325, 334), (435, 450)]):
return 3
elif in_ranges([(551, 554), (651, 656), (751, 754), (851, 856), (951, 957), (1051, 1058)]):
return 4
elif in_ranges([(361, 364), (461, 464), (561, 566)]):
return 5
elif in_ranges([(365, 372), (465, 475), (567, 584)]):
return 6
elif in_ranges([(148, 149), (386, 388), (486, 491), (586, 589), (686, 691), (786, 793)]):
return 7
elif quadrangle == "porter":
if in_ranges([(203, 208), (303, 311), (405, 417)]):
return 1
elif in_ranges([(209, 215), (312, 321), (418, 431)]):
return 2
elif in_ranges([(216, 219), (322, 326), (432, 440)]):
return 3
elif in_ranges([(551, 554), (641, 646), (741, 744), (841, 846), (941, 947), (1041, 1048)]):
return 4
elif in_ranges([(341, 346), (441, 447), (540, 550)]):
return 5
elif in_ranges([(361, 370), (461, 473), (561, 581)]):
return 6
elif in_ranges([(301, 302), (401, 404), (501, 506), (601, 604), (701, 706), (801, 807)]):
return 7
elif quadrangle == "red_jacket":
if in_ranges([(201, 210), (301, 313), (401, 421)]):
return 1
elif in_ranges([(211, 219), (314, 325), (422, 439)]):
return 2
elif in_ranges([(326, 333), (440, 450), (540, 559)]):
return 3
elif in_ranges([(585, 588), (676, 681), (776, 779), (876, 881), (976, 982), (1076, 1083)]):
return 4
elif in_ranges([(361, 372), (461, 475), (561, 584)]):
return 5
elif in_ranges([(191, 192), (391, 394), (491, 496), (591, 594), (691, 696), (791, 797)]):
return 6
elif quadrangle == "richmond":
if in_ranges([(201, 210), (301, 313), (401, 420)]):
return 1
elif in_ranges([(211, 219), (314, 325), (421, 439)]):
return 2
elif in_ranges([(341, 348), (441, 451), (541, 560)]):
return 3
elif in_ranges([(561, 564), (661, 667), (761, 764), (861, 866), (961, 967), (1061, 1068)]):
return 4
elif in_ranges([(371, 382), (471, 485), (571, 594)]):
return 5
elif in_ranges([(191, 192), (391, 394), (491, 496), (595, 598), (691, 696), (791, 797)]):
return 6
elif quadrangle == "spaulding":
if in_ranges([(215, 220), (318, 323), (426, 434)]):
return 1
elif in_ranges([(207, 214), (307, 317), (409, 425)]):
return 2
elif in_ranges([(201, 206), (301, 306), (401, 408)]):
return 3
elif in_ranges([(576, 579), (676, 681), (776, 779), (876, 881), (976, 983), (1076, 1083)]):
return 4
elif in_ranges([(379, 384), (485, 491), (580, 590)]):
return 5
elif in_ranges([(351, 360), (451, 463), (551, 572)]):
return 6
elif in_ranges([(341, 342), (441, 444), (540, 545), (641, 644), (741, 746), (841, 848)]):
return 7
elif quadrangle == "wilkeson":
if in_ranges([(224, 230), (328, 334), (443, 452)]):
return 1
elif in_ranges([(213, 223), (314, 327), (425, 442)]):
return 2
elif in_ranges([(203, 212), (304, 313), (409, 424)]):
return 3
elif in_ranges([(501, 504), (601, 606), (701, 704), (801, 806), (901, 908), (1001, 1008)]):
return 4
elif in_ranges([(377, 380), (477, 480), (588, 595)]):
return 5
elif in_ranges([(367, 376), (467, 476), (574, 587)]):
return 6
elif in_ranges([(256, 257), (356, 366), (456, 466), (556, 573)]):
return 8
return None
| mit | -7,027,488,491,047,506,000 | 43.111111 | 99 | 0.476529 | false |
WillBrennan/SkinDetector | FromFile.py | 1 | 1679 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Will Brennan'
import argparse
import logging
import cv2
import skin_detector
logger = logging.getLogger('main')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('image_paths', type=str, nargs='+', help="paths to one or more images or image directories")
parser.add_argument('-b', '--debug', dest='debug', action='store_true', help='enable debug logging')
parser.add_argument('-q', '--quite', dest='quite', action='store_true', help='disable all logging')
parser.add_argument('-d', '--display', dest='display', action='store_true', help="display result")
parser.add_argument('-s', '--save', dest='save', action='store_true', help="save result to file")
parser.add_argument('-t', '--thresh', dest='thresh', default=0.5, type=float, help='threshold for skin mask')
args = parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("main")
for image_arg in args.image_paths:
for image_path in skin_detector.find_images(image_arg):
logging.info("loading image from {0}".format(image_path))
img_col = cv2.imread(image_path, 1)
img_msk = skin_detector.process(img_col)
if args.display:
skin_detector.scripts.display('img_col', img_col)
skin_detector.scripts.display('img_msk', img_msk)
skin_detector.scripts.display('img_skn', cv2.bitwise_and(img_col, img_col, mask=img_msk))
cv2.waitKey(0)
| mit | -3,747,232,836,383,487,000 | 39.95122 | 116 | 0.636689 | false |
gregpechiro/dndtools | dndtools/dnd/monsters/views.py | 1 | 4836 | # Create your views here.
from django.shortcuts import get_object_or_404, render_to_response
from django.template.context import RequestContext
from dndtools.dnd.menu import menu_item, submenu_item, MenuItem
from dndtools.dnd.dnd_paginator import DndPaginator
from dndtools.dnd.filters import MonsterFilter
from dndtools.dnd.models import Rulebook, Monster
from dndtools.dnd.views import is_3e_edition, permanent_redirect_view, permanent_redirect_object
@menu_item(MenuItem.BESTIARY)
@submenu_item(MenuItem.Bestiary.MONSTERS)
def monster_index(request):
f = MonsterFilter(request.GET, queryset=Monster.objects.select_related(
'rulebook', 'rulebook__dnd_edition', 'school').distinct())
paginator = DndPaginator(f.qs, request)
form_submitted = 1 if '_filter' in request.GET else 0
return render_to_response('dnd/monsters/monster_index.html',
{
'request': request,
'monster_list': paginator.items(),
'paginator': paginator,
'filter': f,
'form_submitted': form_submitted,
}, context_instance=RequestContext(request), )
@menu_item(MenuItem.BESTIARY)
@submenu_item(MenuItem.Bestiary.MONSTERS)
def monster_list_by_rulebook(request):
rulebook_list = Rulebook.objects.select_related('dnd_edition').all()
paginator = DndPaginator(rulebook_list, request)
return render_to_response('dnd/monsters/monster_list_by_rulebook.html',
{
'request': request,
'rulebook_list': paginator.items(),
'paginator': paginator,
}, context_instance=RequestContext(request), )
@menu_item(MenuItem.BESTIARY)
@submenu_item(MenuItem.Bestiary.MONSTERS)
def monsters_in_rulebook(request, rulebook_slug, rulebook_id):
rulebook = get_object_or_404(Rulebook, pk=rulebook_id)
if not rulebook.slug == rulebook_slug:
return permanent_redirect_view(request, 'monsters_in_rulebook',
kwargs={
'rulebook_slug': rulebook.slug,
'rulebook_id': rulebook_id, })
monster_list = rulebook.monster_set.select_related(
'rulebook', 'rulebook__dnd_edition', 'school').all()
paginator = DndPaginator(monster_list, request)
return render_to_response('dnd/monsters/monsters_in_rulebook.html',
{
'rulebook': rulebook,
'monster_list': paginator.items(),
'paginator': paginator,
'request': request,
'display_3e_warning': is_3e_edition(rulebook.dnd_edition),
}, context_instance=RequestContext(request), )
@menu_item(MenuItem.BESTIARY)
@submenu_item(MenuItem.Bestiary.MONSTERS)
def monster_detail(request, rulebook_slug, rulebook_id, monster_slug, monster_id):
monster = get_object_or_404(
Monster.objects.select_related('rulebook', 'rulebook__dnd_edition', 'size',
'type', ),
pk=monster_id)
if (monster.slug != monster_slug or
unicode(monster.rulebook.id) != rulebook_id or
monster.rulebook.slug != rulebook_slug):
return permanent_redirect_object(request, monster)
assert isinstance(monster, Monster)
monster_speeds = monster.monsterspeed_set.select_related('type', ).all()
monster_subtypes = monster.subtypes.all()
monster_skills = monster.skills.select_related('skill').all()
monster_feats = monster.feats.select_related('feat', 'feat__rulebook').all()
return render_to_response('dnd/monsters/monster_detail.html',
{
'monster': monster,
'rulebook': monster.rulebook,
'request': request,
'monster_speeds': monster_speeds,
'monster_subtypes': monster_subtypes,
'monster_skills': monster_skills,
'monster_feats': monster_feats,
'i_like_it_url': request.build_absolute_uri(),
'inaccurate_url': request.build_absolute_uri(),
'display_3e_warning': is_3e_edition(monster.rulebook.dnd_edition),
}, context_instance=RequestContext(request), ) | mit | 7,452,719,161,051,088,000 | 46.421569 | 100 | 0.555211 | false |
brownnrl/moneyguru | core/gui/transaction_panel.py | 1 | 4467 | # Copyright 2019 Virgil Dupras
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
import weakref
from datetime import date
from core.util import first
from ..const import AccountType
from ..model._ccore import AccountList
from ..model.transaction import Transaction
from .base import GUIPanel
from .split_table import SplitTable
from .completable_edit import CompletableEdit
class PanelWithTransaction(GUIPanel):
"""Base class for panels working with a transaction"""
def __init__(self, mainwindow):
GUIPanel.__init__(self, mainwindow)
self.transaction = Transaction(date.today())
self._selected_splits = []
# Place to store temporarily created accounts during the editing of the
# transaction.
self._tmpaccounts = AccountList(self.document.default_currency)
# completable_edit has to be set before split_table is created because split table fetches
# our completable edit on __init__ (for Qt).
self.completable_edit = CompletableEdit(mainwindow)
self.split_table = SplitTable(weakref.proxy(self))
def change_split(self, split, account_name, amount, memo):
if split is None:
split = self.transaction.new_split()
if account_name:
if split.account:
account_type = split.account.type
else:
account_type = AccountType.Expense if split.amount < 0 else AccountType.Income
split.account = self._tmpaccounts.find(account_name, account_type)
else:
split.account = None
split.amount = amount
split.memo = memo
self.transaction.balance(split)
self.split_table.refresh_splits()
self.view.refresh_for_multi_currency()
def delete_split(self, split):
self.transaction.remove_split(split)
self.view.refresh_for_multi_currency()
def select_splits(self, splits):
self._selected_splits = splits
# --- Properties
@property
def description(self):
return self.transaction.description
@description.setter
def description(self, value):
self.transaction.description = value
@property
def payee(self):
return self.transaction.payee
@payee.setter
def payee(self, value):
self.transaction.payee = value
@property
def checkno(self):
return self.transaction.checkno
@checkno.setter
def checkno(self, value):
self.transaction.checkno = value
@property
def notes(self):
return self.transaction.notes
@notes.setter
def notes(self, value):
self.transaction.notes = value
@property
def is_multi_currency(self):
return self.transaction.is_mct
class TransactionPanel(PanelWithTransaction):
# --- Override
def _load(self, transaction):
self.mainwindow.stop_editing()
self.transaction = transaction.replicate()
self.original = transaction
self.view.refresh_for_multi_currency()
self.split_table.refresh_initial()
def _save(self):
self.document.change_transaction(self.original, self.transaction)
self.mainwindow.revalidate()
# --- Public
def mct_balance(self):
"""Balances the mct by using xchange rates.
The currency of the new split is the currency of the currently selected split.
"""
self.split_table.stop_editing()
split = first(self._selected_splits)
new_split_currency = self.document.default_currency
if split is not None and split.amount != 0:
new_split_currency = split.amount.currency_code
self.transaction.mct_balance(new_split_currency)
self.split_table.refresh_splits()
def assign_imbalance(self):
"""Assigns remaining imbalance to the selected split.
If the selected split is not an assigned split, does nothing.
"""
split = first(self._selected_splits)
if split is not None:
self.transaction.assign_imbalance(split)
self.split_table.refresh_splits()
@property
def date(self):
return self.app.format_date(self.transaction.date)
@date.setter
def date(self, value):
self.transaction.date = self.app.parse_date(value)
| gpl-3.0 | -5,317,012,373,037,943,000 | 31.136691 | 98 | 0.660398 | false |
fgdorais/MarkovModel | markov.py | 1 | 2632 | from __future__ import unicode_literals # Python 2/3 compatibility #
from __future__ import print_function # Python 2/3 compatibility #
import sys
import argparse
import modelio
from parser import TokenParser
from trainer import MarkovTrainer
from generator import MarkovGenerator
parser = argparse.ArgumentParser(description='Markov model trainer.')
subparsers = parser.add_subparsers(help='sub-command help')
# 'train' command parser
train_parser = subparsers.add_parser('train', help='train model using text data')
train_parser.add_argument('model', help='output model', nargs=1)
train_parser.add_argument('text', help='training data', nargs='*', type=argparse.FileType('r'), default=sys.stdin)
train_parser.add_argument('-d', '--depth', help='memory depth', nargs=1, type=int, default=[1])
train_parser.add_argument('--matrix', help='matrix format', action="store_true")
def train(args):
"""Command for training a new Markov model."""
if args.depth[0] < 0:
exit('{0}: error: argument -d/--depth: invalid negative value: {1}'.format(sys.argv[0], args.depth[0]))
Trainer = MarkovTrainer(args.depth[0])
for data in args.text:
Trainer.train(TokenParser(data))
if args.matrix:
modelio.write_matrix(args.model[0], Trainer.model())
else:
modelio.write_sparse(args.model[0], Trainer.model())
train_parser.set_defaults(func=train)
# 'random' command parser
random_parser = subparsers.add_parser('random', help='generate random text using model')
random_parser.add_argument('model', help='input model', nargs=1)
random_parser.add_argument('count', help='token count', nargs='?', type=int, default=100)
random_parser.add_argument('-d', '--depth', help='memory depth', nargs=1, type=int, default=[1])
random_parser.add_argument('--matrix', help='matrix format', action="store_true")
def random(args):
"""Command for generating random data using a Markov model."""
if args.depth[0] < 0:
exit('{0}: error: argument -d/--depth: invalid negative value: {1}'.format(sys.argv[0], args.depth[0]))
if args.count <= 0:
exit('{0}: error: token count must be positive, got: {1}.'.format(sys.argv[0], args.count))
if args.matrix:
model = modelio.read_matrix(args.model[0])
args.depth[0] = 1 # Force depth 1 for matrix format
else:
model = modelio.read_sparse(args.model[0])
Generator = MarkovGenerator(model, depth=args.depth[0])
print(' '.join([Generator.next() for i in range(args.count)]))
random_parser.set_defaults(func=random)
args = parser.parse_args()
args.func(args)
| mit | -3,570,370,338,337,834,500 | 37.705882 | 114 | 0.68465 | false |
shashisp/blumix-webpy | app/gluon/utils.py | 1 | 11493 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <[email protected]>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
This file specifically includes utilities for security.
--------------------------------------------------------
"""
import threading
import struct
import uuid
import random
import inspect
import time
import os
import re
import sys
import logging
import socket
import base64
import zlib
_struct_2_long_long = struct.Struct('=QQ')
python_version = sys.version_info[0]
if python_version == 2:
import cPickle as pickle
else:
import pickle
import hashlib
from hashlib import md5, sha1, sha224, sha256, sha384, sha512
try:
from Crypto.Cipher import AES
except ImportError:
import gluon.contrib.aes as AES
import hmac
if hasattr(hashlib, "pbkdf2_hmac"):
def pbkdf2_hex(data, salt, iterations=1000, keylen=24, hashfunc=None):
hashfunc = hashfunc or sha1
return hashlib.pbkdf2_hmac(hashfunc().name,
data, salt, iterations,
keylen).encode("hex")
HAVE_PBKDF2 = True
else:
try:
try:
from gluon.contrib.pbkdf2_ctypes import pbkdf2_hex
except (ImportError, AttributeError):
from gluon.contrib.pbkdf2 import pbkdf2_hex
HAVE_PBKDF2 = True
except ImportError:
try:
from .pbkdf2 import pbkdf2_hex
HAVE_PBKDF2 = True
except (ImportError, ValueError):
HAVE_PBKDF2 = False
logger = logging.getLogger("web2py")
def AES_new(key, IV=None):
""" Returns an AES cipher object and random IV if None specified """
if IV is None:
IV = fast_urandom16()
return AES.new(key, AES.MODE_CBC, IV), IV
def compare(a, b):
""" Compares two strings and not vulnerable to timing attacks """
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def md5_hash(text):
""" Generates a md5 hash with the given text """
return md5(text).hexdigest()
def simple_hash(text, key='', salt='', digest_alg='md5'):
"""
Generates hash with the given text using the specified
digest hashing algorithm
"""
if not digest_alg:
raise RuntimeError("simple_hash with digest_alg=None")
elif not isinstance(digest_alg, str): # manual approach
h = digest_alg(text + key + salt)
elif digest_alg.startswith('pbkdf2'): # latest and coolest!
iterations, keylen, alg = digest_alg[7:-1].split(',')
return pbkdf2_hex(text, salt, int(iterations),
int(keylen), get_digest(alg))
elif key: # use hmac
digest_alg = get_digest(digest_alg)
h = hmac.new(key + salt, text, digest_alg)
else: # compatible with third party systems
h = get_digest(digest_alg)()
h.update(text + salt)
return h.hexdigest()
def get_digest(value):
"""
Returns a hashlib digest algorithm from a string
"""
if not isinstance(value, str):
return value
value = value.lower()
if value == "md5":
return md5
elif value == "sha1":
return sha1
elif value == "sha224":
return sha224
elif value == "sha256":
return sha256
elif value == "sha384":
return sha384
elif value == "sha512":
return sha512
else:
raise ValueError("Invalid digest algorithm: %s" % value)
DIGEST_ALG_BY_SIZE = {
128 / 4: 'md5',
160 / 4: 'sha1',
224 / 4: 'sha224',
256 / 4: 'sha256',
384 / 4: 'sha384',
512 / 4: 'sha512',
}
def get_callable_argspec(fn):
if inspect.isfunction(fn) or inspect.ismethod(fn):
inspectable = fn
elif inspect.isclass(fn):
inspectable = fn.__init__
elif hasattr(fn, '__call__'):
inspectable = fn.__call__
else:
inspectable = fn
return inspect.getargspec(inspectable)
def pad(s, n=32, padchar=' '):
return s + (32 - len(s) % 32) * padchar
def secure_dumps(data, encryption_key, hash_key=None, compression_level=None):
if not hash_key:
hash_key = sha1(encryption_key).hexdigest()
dump = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
if compression_level:
dump = zlib.compress(dump, compression_level)
key = pad(encryption_key[:32])
cipher, IV = AES_new(key)
encrypted_data = base64.urlsafe_b64encode(IV + cipher.encrypt(pad(dump)))
signature = hmac.new(hash_key, encrypted_data).hexdigest()
return signature + ':' + encrypted_data
def secure_loads(data, encryption_key, hash_key=None, compression_level=None):
if not ':' in data:
return None
if not hash_key:
hash_key = sha1(encryption_key).hexdigest()
signature, encrypted_data = data.split(':', 1)
actual_signature = hmac.new(hash_key, encrypted_data).hexdigest()
if not compare(signature, actual_signature):
return None
key = pad(encryption_key[:32])
encrypted_data = base64.urlsafe_b64decode(encrypted_data)
IV, encrypted_data = encrypted_data[:16], encrypted_data[16:]
cipher, _ = AES_new(key, IV=IV)
try:
data = cipher.decrypt(encrypted_data)
data = data.rstrip(' ')
if compression_level:
data = zlib.decompress(data)
return pickle.loads(data)
except Exception, e:
return None
### compute constant CTOKENS
def initialize_urandom():
"""
This function and the web2py_uuid follow from the following discussion:
`http://groups.google.com/group/web2py-developers/browse_thread/thread/7fd5789a7da3f09`
At startup web2py compute a unique ID that identifies the machine by adding
uuid.getnode() + int(time.time() * 1e3)
This is a 48-bit number. It converts the number into 16 8-bit tokens.
It uses this value to initialize the entropy source ('/dev/urandom') and to seed random.
If os.random() is not supported, it falls back to using random and issues a warning.
"""
node_id = uuid.getnode()
microseconds = int(time.time() * 1e6)
ctokens = [((node_id + microseconds) >> ((i % 6) * 8)) %
256 for i in range(16)]
random.seed(node_id + microseconds)
try:
os.urandom(1)
have_urandom = True
try:
# try to add process-specific entropy
frandom = open('/dev/urandom', 'wb')
try:
if python_version == 2:
frandom.write(''.join(chr(t) for t in ctokens)) # python 2
else:
frandom.write(bytes([]).join(bytes([t]) for t in ctokens)) # python 3
finally:
frandom.close()
except IOError:
# works anyway
pass
except NotImplementedError:
have_urandom = False
logger.warning(
"""Cryptographically secure session management is not possible on your system because
your system does not provide a cryptographically secure entropy source.
This is not specific to web2py; consider deploying on a different operating system.""")
if python_version == 2:
packed = ''.join(chr(x) for x in ctokens) # python 2
else:
packed = bytes([]).join(bytes([x]) for x in ctokens) # python 3
unpacked_ctokens = _struct_2_long_long.unpack(packed)
return unpacked_ctokens, have_urandom
UNPACKED_CTOKENS, HAVE_URANDOM = initialize_urandom()
def fast_urandom16(urandom=[], locker=threading.RLock()):
"""
This is 4x faster than calling os.urandom(16) and prevents
the "too many files open" issue with concurrent access to os.urandom()
"""
try:
return urandom.pop()
except IndexError:
try:
locker.acquire()
ur = os.urandom(16 * 1024)
urandom += [ur[i:i + 16] for i in xrange(16, 1024 * 16, 16)]
return ur[0:16]
finally:
locker.release()
def web2py_uuid(ctokens=UNPACKED_CTOKENS):
"""
This function follows from the following discussion:
`http://groups.google.com/group/web2py-developers/browse_thread/thread/7fd5789a7da3f09`
It works like uuid.uuid4 except that tries to use os.urandom() if possible
and it XORs the output with the tokens uniquely associated with this machine.
"""
rand_longs = (random.getrandbits(64), random.getrandbits(64))
if HAVE_URANDOM:
urand_longs = _struct_2_long_long.unpack(fast_urandom16())
byte_s = _struct_2_long_long.pack(rand_longs[0] ^ urand_longs[0] ^ ctokens[0],
rand_longs[1] ^ urand_longs[1] ^ ctokens[1])
else:
byte_s = _struct_2_long_long.pack(rand_longs[0] ^ ctokens[0],
rand_longs[1] ^ ctokens[1])
return str(uuid.UUID(bytes=byte_s, version=4))
REGEX_IPv4 = re.compile('(\d+)\.(\d+)\.(\d+)\.(\d+)')
def is_valid_ip_address(address):
"""
Examples:
Better than a thousand words::
>>> is_valid_ip_address('127.0')
False
>>> is_valid_ip_address('127.0.0.1')
True
>>> is_valid_ip_address('2001:660::1')
True
"""
# deal with special cases
if address.lower() in ('127.0.0.1', 'localhost', '::1', '::ffff:127.0.0.1'):
return True
elif address.lower() in ('unknown', ''):
return False
elif address.count('.') == 3: # assume IPv4
if address.startswith('::ffff:'):
address = address[7:]
if hasattr(socket, 'inet_aton'): # try validate using the OS
try:
socket.inet_aton(address)
return True
except socket.error: # invalid address
return False
else: # try validate using Regex
match = REGEX_IPv4.match(address)
if match and all(0 <= int(match.group(i)) < 256 for i in (1, 2, 3, 4)):
return True
return False
elif hasattr(socket, 'inet_pton'): # assume IPv6, try using the OS
try:
socket.inet_pton(socket.AF_INET6, address)
return True
except socket.error: # invalid address
return False
else: # do not know what to do? assume it is a valid address
return True
def is_loopback_ip_address(ip=None, addrinfo=None):
"""
Determines whether the address appears to be a loopback address.
This assumes that the IP is valid.
"""
if addrinfo: # see socket.getaddrinfo() for layout of addrinfo tuple
if addrinfo[0] == socket.AF_INET or addrinfo[0] == socket.AF_INET6:
ip = addrinfo[4]
if not isinstance(ip, basestring):
return False
# IPv4 or IPv6-embedded IPv4 or IPv4-compatible IPv6
if ip.count('.') == 3:
return ip.lower().startswith(('127', '::127', '0:0:0:0:0:0:127',
'::ffff:127', '0:0:0:0:0:ffff:127'))
return ip == '::1' or ip == '0:0:0:0:0:0:0:1' # IPv6 loopback
def getipaddrinfo(host):
"""
Filter out non-IP and bad IP addresses from getaddrinfo
"""
try:
return [addrinfo for addrinfo in socket.getaddrinfo(host, None)
if (addrinfo[0] == socket.AF_INET or
addrinfo[0] == socket.AF_INET6)
and isinstance(addrinfo[4][0], basestring)]
except socket.error:
return []
| mit | -7,430,571,778,533,494,000 | 31.650568 | 97 | 0.599495 | false |
beccannlittle/gateway | gateway/controllers/minigame/aha.py | 1 | 1033 | """
aha minigame.
introduces user to the game
and assists choosing an idea for a venture
"""
from django.contrib.auth import logout, authenticate, login
from django.shortcuts import render
from gateway.models.Aha import Aha, Aha_Form
from gateway.models.venture import Venture
from django.db import transaction
def aha_view(request):
"""render aha minigame."""
form = Aha_Form
return render(
request,
'minigame/aha.html'
)
"""contains temp data for venture currently"""
with transaction.atomic():
venture = Venture(
title="Murphee's Irish Coats",
image="coat",
progress=90,
step="1",
hook="Bringing the comfort and quality of Traditional Irish coats\
to an Online International Exchange"
)
if request.method == 'POST':
form = Aha_Form(request.POST)
if form.is_valid():
aha = form.save()
venture.aha = aha
venture.save()
| mit | 2,944,371,058,039,807,500 | 26.918919 | 78 | 0.603098 | false |
zenotech/MyCluster | mycluster/slurm.py | 1 | 13281 |
from builtins import str
import os
import re
import math
from string import Template
# from datetime import timedelta
from .mycluster import check_output
from .mycluster import get_timedelta
from .mycluster import get_data
from .mycluster import load_template
from jinja2 import Environment, FileSystemLoader
"""
sacctmgr show cluster
"""
def scheduler_type():
return 'slurm'
def name():
with os.popen('sacctmgr show cluster') as f:
f.readline()
f.readline()
return f.readline().strip().split(' ')[0]
def accounts():
account_list = []
with os.popen('sacctmgr --noheader list assoc user=`id -un` format=Account') as f:
for line in f:
account_list.append(line)
return account_list
def queues():
queue_list = []
with os.popen('sinfo -sh') as f:
for line in f:
q = line.split(' ')[0].strip().replace("*", "")
queue_list.append(q)
return queue_list
def available_tasks(queue_id):
# split queue id into queue and parallel env
# list free slots
free_tasks = 0
max_tasks = 0
queue_name = queue_id
nc = node_config(queue_id)
with os.popen('sinfo -sh -p ' + queue_name) as f:
line = f.readline()
new_line = re.sub(' +', ' ', line.strip())
line = new_line.split(' ')[3]
free_tasks = int(line.split('/')[1]) * nc['max task']
max_tasks = int(line.split('/')[3]) * nc['max task']
return {'available': free_tasks, 'max tasks': max_tasks}
def tasks_per_node(queue_id):
queue_name = queue_id
tasks = 0
with os.popen('sinfo -Nelh -p ' + queue_name) as f:
line = f.readline()
new_line = re.sub(' +', ' ', line.strip())
tasks = int(new_line.split(' ')[4])
return tasks
def node_config(queue_id):
# Find first node with queue and record node config
queue_name = queue_id
tasks = 0
config = {}
with os.popen('sinfo -Nelh -p ' + queue_name) as f:
line = f.readline()
if len(line):
new_line = re.sub(' +', ' ', line.strip())
tasks = int(new_line.split(' ')[4])
memory = int(new_line.split(' ')[6])
config['max task'] = tasks
config['max thread'] = tasks
config['max memory'] = memory
else:
raise Exception(
"Requested partition %s has no nodes" % queue_name)
return config
def create_submit(queue_id, **kwargs):
queue_name = queue_id
num_tasks = 1
if 'num_tasks' in kwargs:
num_tasks = kwargs['num_tasks']
else:
raise ValueError("num_tasks must be specified")
if 'tasks_per_node' in kwargs:
tpn = kwargs['tasks_per_node']
else:
tpn = tasks_per_node(queue_id)
if 'num_threads_per_task' in kwargs:
num_threads_per_task = kwargs['num_threads_per_task']
else:
raise ValueError("num_threads_per_task must be specified")
my_name = kwargs.get('my_name', "myclusterjob")
my_output = kwargs.get('my_output', "myclusterjob.out")
my_script = kwargs.get('my_script', None)
if 'mycluster-' in my_script:
my_script = get_data(my_script)
user_email = kwargs.get('user_email', None)
project_name = kwargs.get('project_name', 'default')
wall_clock = kwargs.get('wall_clock', '12:00:00')
if ':' not in wall_clock:
wall_clock = wall_clock + ':00:00'
num_nodes = int(math.ceil(float(num_tasks) / float(tpn)))
no_syscribe = kwargs.get('no_syscribe', False)
record_job = not no_syscribe
openmpi_args = kwargs.get('openmpi_args', "-bysocket -bind-to-socket")
qos = kwargs.get('qos', None)
template = load_template('slurm.jinja')
script_str = template.render(my_name=my_name,
my_script=my_script,
my_output=my_output,
user_email=user_email,
queue_name=queue_name,
num_tasks=num_tasks,
tpn=tpn,
num_threads_per_task=num_threads_per_task,
num_nodes=num_nodes,
project_name=project_name,
wall_clock=wall_clock,
record_job=record_job,
openmpi_args=openmpi_args,
qos=qos)
return script_str
def submit(script_name, immediate, depends_on=None,
depends_on_always_run=False):
job_id = None
# Enable external specification of additional options
additional_cmd = ''
if 'MYCLUSTER_SUBMIT_OPT' in os.environ:
additional_cmd = os.environ['MYCLUSTER_SUBMIT_OPT']
if not immediate:
if depends_on and depends_on_always_run:
with os.popen('sbatch %s --kill-on-invalid-dep=yes --dependency=afterany:%s %s' % (additional_cmd, depends_on, script_name)) as f:
output = f.readline()
try:
job_id = int(output.split(' ')[-1].strip())
except:
print(('Job submission failed: ' + output))
elif depends_on is not None:
with os.popen('sbatch %s --kill-on-invalid-dep=yes --dependency=afterok:%s %s' % (additional_cmd, depends_on, script_name)) as f:
output = f.readline()
try:
job_id = int(output.split(' ')[-1].strip())
except:
print(('Job submission failed: ' + output))
else:
with os.popen('sbatch %s %s' % (additional_cmd, script_name)) as f:
output = f.readline()
try:
job_id = int(output.split(' ')[-1].strip())
except:
print(('Job submission failed: ' + output))
# Get job id and record in database
else:
with os.popen('grep -- "SBATCH -p" ' + script_name + ' | sed \'s/#SBATCH//\'') as f:
partition = f.readline().rstrip()
with os.popen('grep -- "SBATCH --nodes" ' + script_name + ' | sed \'s/#SBATCH//\'') as f:
nnodes = f.readline().rstrip()
with os.popen('grep -- "SBATCH --ntasks" ' + script_name + ' | sed \'s/#SBATCH//\'') as f:
ntasks = f.readline().rstrip()
with os.popen('grep -- "SBATCH -A" ' + script_name + ' | sed \'s/#SBATCH//\'') as f:
project = f.readline().rstrip()
with os.popen('grep -- "SBATCH -J" ' + script_name + ' | sed \'s/#SBATCH//\'') as f:
job = f.readline().rstrip()
cmd_line = 'salloc --exclusive ' + nnodes + ' ' + partition + ' ' + \
ntasks + ' ' + project + ' ' + job + ' bash ./' + script_name
print(cmd_line)
try:
output = check_output(cmd_line, shell=True)
try:
job_id = int(output.split(' ')[-1].strip())
except:
print(('Job submission failed: ' + output))
except:
print(('Job submission failed: ' + cmd_line))
return job_id
def delete(job_id):
with os.popen('scancel ' + job_id) as f:
pass
def status():
status_dict = {}
with os.popen('squeue -u `whoami`') as f:
try:
f.readline() # read header
for line in f:
new_line = re.sub(' +', ' ', line.strip())
job_id = int(new_line.split(' ')[0])
state = new_line.split(' ')[4]
if state == 'R':
status_dict[job_id] = 'r'
else:
status_dict[job_id] = state
except Exception as e:
print(e)
return status_dict
def job_stats(job_id):
stats_dict = {}
with os.popen('sacct --noheader --format JobId,Elapsed,TotalCPU,Partition,NTasks,AveRSS,State,ExitCode -P -j ' + str(job_id)) as f:
try:
line = f.readline()
first_line = line.split('|')
line = f.readline()
if len(line) > 0:
next_line = line.split('|')
wallclock_str = first_line[1]
stats_dict['wallclock'] = get_timedelta(wallclock_str)
cpu_str = first_line[2]
stats_dict['cpu'] = get_timedelta(cpu_str)
if len(first_line[3]) > 0:
stats_dict['queue'] = first_line[3]
elif next_line:
stats_dict['queue'] = next_line[3]
if len(first_line[4]) > 0:
stats_dict['ntasks'] = int(first_line[4])
elif next_line:
stats_dict['ntasks'] = int(next_line[4])
if len(first_line[6]) > 0:
stats_dict['status'] = first_line[6]
elif next_line:
stats_dict['status'] = next_line[6]
if len(first_line[7]) > 0:
stats_dict['exit_code'] = int(first_line[7].split(':')[0])
elif next_line:
stats_dict['exit_code'] = int(next_line[7].split(':')[0])
# stats_dict['mem'] = 0 #float(new_line.split('
# ')[4])*int(new_line.split(' ')[3])
except:
print('SLURM: Error reading job stats')
with os.popen('squeue --format %%S -h -j ' + str(job_id)) as f:
try:
line = f.readline()
if len(line) > 0:
stats_dict['start_time'] = line
else:
stats_dict['start_time'] = ""
except:
print('SLURM: Error getting start time')
return stats_dict
def job_stats_enhanced(job_id):
"""
Get full job and step stats for job_id
"""
stats_dict = {}
with os.popen('sacct --noheader --format JobId,Elapsed,TotalCPU,Partition,NTasks,AveRSS,State,ExitCode,start,end -P -j ' + str(job_id)) as f:
try:
line = f.readline()
if line in ["SLURM accounting storage is disabled",
"slurm_load_job error: Invalid job id specified"]:
raise
cols = line.split('|')
stats_dict['job_id'] = cols[0]
stats_dict['wallclock'] = get_timedelta(cols[1])
stats_dict['cpu'] = get_timedelta(cols[2])
stats_dict['queue'] = cols[3]
stats_dict['status'] = cols[6]
stats_dict['exit_code'] = cols[7].split(':')[0]
stats_dict['start'] = cols[8]
stats_dict['end'] = cols[9]
steps = []
for line in f:
step = {}
cols = line.split('|')
step_val = cols[0].split('.')[1]
step['step'] = step_val
step['wallclock'] = get_timedelta(cols[1])
step['cpu'] = get_timedelta(cols[2])
step['ntasks'] = cols[4]
step['status'] = cols[6]
step['exit_code'] = cols[7].split(':')[0]
step['start'] = cols[8]
step['end'] = cols[9]
steps.append(step)
stats_dict['steps'] = steps
except:
with os.popen('squeue -h -j %s' % str(job_id)) as f:
try:
for line in f:
new_line = re.sub(' +', ' ', line.strip())
job_id = int(new_line.split(' ')[0])
state = new_line.split(' ')[4]
stats_dict['job_id'] = str(job_id)
stats_dict['status'] = state
except:
print('SLURM: Error reading job stats')
stats_dict['status'] = 'UNKNOWN'
with os.popen('squeue --format %%S -h -j ' + str(job_id)) as f:
try:
line = f.readline()
if len(line) > 0:
stats_dict['start_time'] = line
else:
stats_dict['start_time'] = ""
except:
print('SLURM: Error getting start time')
return stats_dict
def is_in_queue(job_id):
with os.popen('squeue -j %s' % job_id) as f:
try:
f.readline() # read header
for line in f:
new_line = re.sub(' +', ' ', line.strip())
q_id = int(new_line.split(' ')[0])
if q_id == job_id:
return True
except e:
pass
return False
def running_stats(job_id):
stats_dict = {}
with os.popen('sacct --noheader --format Elapsed -j ' + str(job_id)) as f:
try:
line = f.readline()
new_line = re.sub(' +', ' ', line.strip())
stats_dict['wallclock'] = get_timedelta(new_line)
except:
pass
with os.popen('sstat --noheader --format AveCPU,AveRSS,NTasks -j ' + str(job_id)) as f:
try:
line = f.readline()
new_line = re.sub(' +', ' ', line.strip())
ntasks = int(new_line.split(' ')[2])
stats_dict['mem'] = (
float(new_line.split(' ')[1].replace('K', '')) * ntasks)
stats_dict['cpu'] = '-' # float(new_line.split(' ')[0])*ntasks
except:
pass
return stats_dict
| bsd-3-clause | -1,084,601,180,716,060,200 | 32.369347 | 145 | 0.496198 | false |
zerotired/kotori | kotori/io/protocol/influx.py | 1 | 3796 | # -*- coding: utf-8 -*-
# (c) 2016-2017 Andreas Motl <[email protected]>
import arrow
from arrow.parser import DateTimeParser
from datetime import datetime, timedelta
from twisted.logger import Logger
from kotori.io.protocol.util import is_number
from kotori.util.common import tdelta
log = Logger()
class QueryTransformer(object):
@classmethod
def transform(cls, data):
"""
Compute InfluxDB query expression from data in transformation dictionary.
Also compute date range from query parameters "from" and "to".
"""
from pyinfluxql import Query
from pyinfluxql.functions import Mean
measurement = data.measurement
# Vanilla QL (v1)
#expression = 'SELECT * FROM {measurement}'.format(measurement=measurement)
# PyInfluxQL (v2)
# https://github.com/jjmalina/pyinfluxql
# Labs
#time_begin = arrow.utcnow() - arrow.Arrow(hour=1)
#expression = Query('*').from_(measurement).where(time__gt=datetime.utcnow() - timedelta(hours=1))
#expression = Query(Mean('*')).from_(measurement).where(time__gt=datetime.now() - timedelta(1)).group_by(time=timedelta(hours=1))
# Fix up "measurement" if starting with numeric value
# TODO: Fix should go to pyinfluxql
if is_number(measurement[0]):
measurement = '"{measurement}"'.format(measurement=measurement)
# TODO: Use ".date_range" API method
time_begin, time_end = compute_daterange(data.get('from'), data.get('to'))
tags = {}
#tags = InfluxDBAdapter.get_tags(data)
expression = Query('*').from_(measurement).where(time__gte=time_begin, time__lte=time_end, **tags)
result = {
'expression': str(expression),
'time_begin': time_begin,
'time_end': time_end,
}
return result
def get_timedelta(expression):
# TODO: Use pandas' Timedelta. Timedelta('1m2s')
# http://pandas.pydata.org/pandas-docs/stable/timedeltas.html
# FIXME: Sanitize expression
code = expression
delta_raw = code.replace('now-', '')
if code != delta_raw:
code = code.replace(delta_raw, 'delta')
# "code" should now be "now-delta"
#print 'code:', code
now = datetime.utcnow()
delta = tdelta(delta_raw)
# FIXME: This is nasty
try:
td = eval(code)
except:
raise ValueError('Unknown expression: {expression}'.format(expression=expression))
return td
def compute_daterange(raw_begin, raw_end):
# Defaults
raw_begin = raw_begin or 'now-10d'
raw_end = raw_end or 'now'
# Parse dates, absolute or relative
time_begin = grok_datetime(raw_begin)
time_end = grok_datetime(raw_end)
# If end of date range is supplied as date without time ('YYYY-MM-DD' or 'YYYYMMDD'),
# add appropriate offset to mean "end of day" (DWIM).
if 8 <= len(raw_end) <= 10:
offset_endofday = tdelta('23h59m59s') + timedelta(microseconds = 999999)
time_end += offset_endofday
return time_begin, time_end
def grok_datetime(dstring):
more_formats = ['YYYYMMDDTHHmmss', 'YYYYMMDDTHHmmssZ', 'YYYYMMDD']
parser = DateTimeParser()
parser.SEPARATORS += ['']
# Try to parse datetime string in regular ISO 8601 format
try:
return parser.parse_iso(dstring)
# Fall back to parse datetime string in additional convenience formats
except arrow.parser.ParserError as ex:
for format in more_formats:
try:
return parser.parse(dstring, format)
except arrow.parser.ParserError as ex:
pass
# Fall back to attempt to parse as relative datetime expression, e.g. "now-10m"
return get_timedelta(dstring)
| agpl-3.0 | 5,612,831,170,566,948,000 | 29.612903 | 137 | 0.637513 | false |
pchumphreys/ubiquitous-quack | commandLineFileBrowser.py | 1 | 21059 | # Allow access to command-line arguments
import sys
import os
import subprocess
import markdown
import codecs
import pickle
# Import the core and GUI elements of Qt
from PySide.QtCore import *
from PySide.QtGui import *
#Useful script!
from send2trash import send2trash
# Extra module for markdown
from markdownEditor import *
# Remember that we're omitting the import
# section from our examples for brevity
# Every Qt application must have one and only one QApplication object
# it receives the command line arguments passed to the script, as they
# can be used to customize the application's appearance and behavior
qt_app = QApplication.instance()
if qt_app is None:
qt_app = QApplication(sys.argv)
# Class for storing and dealing with metadata!
class Metadata():
@Slot()
def handleDeletedFile(self,fileName):
self.removeFileDescriptions(fileName)
@Slot()
def handleChangedDescription(self,fileName,fileDesc):
self.setFileDescription(fileName,fileDesc)
def __init__(self,filePath = QDir.currentPath()):
self._metadatapath = ''
self._loadMetadataFile(filePath)
def _loadMetadataFile(self,filePath):
# Only load if have changed directory
if self._metadatapath != filePath:
if QFile(filePath + "/" + ".metadata.p").exists():
# Note for future: may need to use QDir::toNativeSeparators( path )
self._metadata = pickle.load( open(filePath + "/" + ".metadata.p", "rb" ) )
self._metadatapath = filePath
self._metadataFileExists = True
else:
self.initialiseMetadata()
self._metadatapath = filePath
self._metadataFileExists = False
def _saveMetadataFile(self):
if self._metadatapath == '':
print('Something has gone wrong!')
else:
pickle.dump(self._metadata, open(self._metadatapath + "/" + ".metadata.p", "wb" ) )
self._metadataFileExists = True
def update(self,filePath):
self._loadMetadataFile(filePath)
self.cleanFileDescriptions()
def initialiseMetadata(self):
self._metadata = dict()
#File description stores the descriptions for both the files and the folders
def getFileDescription(self,fileName):
if 'fileDescriptions' in self._metadata:
if fileName in self._metadata['fileDescriptions']:
return self._metadata['fileDescriptions'][fileName]
else:
return ''
else:
return ''
def setFileDescription(self,fileName,newDesc):
# Update the metadata
if 'fileDescriptions' not in self._metadata:
self._metadata['fileDescriptions'] = dict()
self._metadata['fileDescriptions'][fileName] = newDesc
# Save to disk
self._saveMetadataFile()
def removeFileDescriptions(self,fileNames):
# Delete the metadata
if isinstance(fileNames, list):
for fileName in fileNames:
self._metadata['fileDescriptions'].pop(fileName, None)
else:
self._metadata['fileDescriptions'].pop(fileNames, None)
# Save to disk
self._saveMetadataFile()
def cleanFileDescriptions(self):
if 'fileDescriptions' in self._metadata:
toDeleteFileNames = []
for fileName in self._metadata['fileDescriptions']:
if not QFile(self._metadatapath + "/" + fileName).exists():
print fileName
toDeleteFileNames.append(fileName)
self.removeFileDescriptions(toDeleteFileNames)
# Save to disk
self._saveMetadataFile()
# Helper class for use with the basicViewWidget
class CustomSortingModel(QSortFilterProxyModel):
def lessThan(self,left,right):
col = left.column()
dataleft = left.data()
dataright = right.data()
if (col == 3) or (col == 4):
dataleft = QDate.fromString(dataleft, "dd/MM/yyyy").addYears(100)
dataright = QDate.fromString(dataright, "dd/MM/yyyy").addYears(100)
return dataleft < dataright
# Class that has the basic functionality for the file and folder views.
class basicViewWidget(QWidget):
# Emitted when user wants to open file in application
requestedToOpenFile = Signal(str)
# Emitted when user modifies the description of a file.
fileDescriptionChanged = Signal(str,str)
# Emitted when file deleted
fileDeleted = Signal(str)
def __init__(self,parent=None):
QWidget.__init__(self, parent=None)
self._setupView()
self.viewLayout = QVLayout(self)
self.viewLayout.addWidget(fileView)
def _setupView(self):
# This model is used to store the view data
self.fileListModel = QStandardItemModel(self)
self.fileListProxyModel = CustomSortingModel(self)
self.fileListProxyModel.setSourceModel(self.fileListModel)
self.setHorizontalHeaderLabels()
# Create a QListView to show files.
self.fileView = QTableView(self)
self.fileView.setModel(self.fileListProxyModel)
self.fileView.verticalHeader().setVisible(False)
self.fileView.horizontalHeader().setResizeMode(QHeaderView.Interactive)
# Add the signal for double clicking on files
self.fileView.doubleClicked.connect(self._openFileInApp)
# Right click menu:
self.fileView.setContextMenuPolicy(Qt.CustomContextMenu)
self.fileView.customContextMenuRequested.connect(self._contextMenuRequested)
def setHorizontalHeaderLabels(self,labels = []):
self._horizontalHeaderLabels = labels
def populateView(self,items):
self.fileListModel.clear()
for item in items:
self.fileListModel.appendRow(item)
# For some reason, this needs to be after clear
self.setHorizontalHeaderLabels(self._horizontalHeaderLabels)
self._adjustViewOnPopulate()
def _adjustViewOnPopulate(self):
self.fileView.setColumnHidden(0,True)
# This sets the table to fit the data
tableHeight = self.fileView.horizontalHeader().height() + 2
for i in range(0,self.fileListModel.rowCount()):
tableHeight += self.fileView.rowHeight(i)
self.fileView.setMaximumHeight(tableHeight)
self.fileView.setMinimumHeight(tableHeight)
def _openFileInApp(self,index):
# Emit signal to open file in application
self.requestedToOpenFile.emit(self._getFilePath(index))
def _editFileDescription(self,index):
fileName = self.fileListProxyModel.data(self.fileListProxyModel.index(index.row(),1),0)
fileDesc = self.fileListProxyModel.data(self.fileListProxyModel.index(index.row(),2),2)
text, ok = QInputDialog.getText(win, '',
"Edit description for " + fileName + ":",QLineEdit.Normal,fileDesc)
if ok & (text != fileDesc):
self.fileListModel.setData(self.fileListProxyModel.mapToSource(self.fileListProxyModel.index(index.row(),2)),text,2)
# Emit signal so that metadata class can pick up changes
self.fileDescriptionChanged.emit(fileName,text)
def _deleteFile(self,index):
fileName = self.fileListProxyModel.data(self.fileListProxyModel.index(index.row(),1),0)
filePath = self.fileListProxyModel.data(self.fileListProxyModel.index(index.row(),0),0)
reply = QMessageBox.question(win, fileName,
"Are you sure you want to delete " + fileName + "?", QMessageBox.Yes |
QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
send2trash(filePath)
# Emit signal so that metadata class can pick up changes
self.fileDeleted.emit(fileName)
def _getFilePath(self,index):
return self.fileListProxyModel.data(self.fileListProxyModel.index(index.row(),0),0)
#Add right click menu
def _contextMenuRequested(self,pos):
# Get the file index!
index= self.fileView.indexAt(pos)
filePath = self._getFilePath(index)
# Make a menu
menu= QMenu(win)
if QFileInfo(filePath).completeSuffix() == 'md':
openAction = QAction("Open in viewer", win)
# Lambdas are a cool way of making anonymous functions.
# See e.g. http://stackoverflow.com/questions/23846669/can-i-utilize-pyside-clicked-connect-to-connect-a-function-which-has-a-parameter
openAction.triggered.connect(lambda: self._openFileInApp(filePath))
menu.addAction(openAction)
openAction = QAction("Open externally", win)
# Lambdas are a cool way of making anonymous functions.
# See e.g. http://stackoverflow.com/questions/23846669/can-i-utilize-pyside-clicked-connect-to-connect-a-function-which-has-a-parameter
openAction.triggered.connect(lambda: openInOS(filePath))
menu.addAction(openAction)
editDescAction = QAction("Edit description", win)
editDescAction.triggered.connect(lambda: self._editFileDescription(index))
menu.addAction(editDescAction)
deleteAction = QAction("Delete", win)
deleteAction.triggered.connect(lambda: self._deleteFile(index))
menu.addAction(deleteAction)
menu.popup(self.fileView.viewport().mapToGlobal(pos))
class fileViewWidget(basicViewWidget):
def __init__(self,parent=None):
QWidget.__init__(self, parent=None)
self._setupView()
self.setHorizontalHeaderLabels(['File Path','File Name','Description','Date Modified','Date Created'])
self.fileView.setSortingEnabled(True)
self.fileViewLabel = QLabel(win)
self.fileViewLabel.setText('Files')
self.fileViewLabel.setAlignment(Qt.AlignLeft)
f = QFont()
f.setPointSize(16)
#f.setBold(True)
self.fileViewLabel.setFont(f)
self.viewLayout = QVBoxLayout(self)
self.viewLayout.addWidget(self.fileViewLabel)
self.viewLayout.addWidget(self.fileView)
# Stop from being able to edit
self.fileView.setEditTriggers(QAbstractItemView.NoEditTriggers)
# Select by row
self.fileView.setSelectionMode(QAbstractItemView.SingleSelection)
self.fileView.setSelectionBehavior(QAbstractItemView.SelectRows)
# Overload function
def _adjustViewOnPopulate(self):
basicViewWidget._adjustViewOnPopulate(self)
self.fileView.horizontalHeader().setResizeMode(1,QHeaderView.Interactive)
self.fileView.horizontalHeader().setResizeMode(2,QHeaderView.Stretch)
self.fileView.horizontalHeader().setResizeMode(3,QHeaderView.Fixed)
self.fileView.horizontalHeader().setResizeMode(4,QHeaderView.Fixed)
self.fileView.resizeColumnToContents(1)
self.fileView.resizeColumnToContents(3)
self.fileView.resizeColumnToContents(4)
class folderViewWidget(basicViewWidget):
changeBrowserDirUp = Signal()
def __init__(self,parent=None):
QWidget.__init__(self, parent=None)
self._setupView()
self.setCurrentBrowserFilePath()
self.setHorizontalHeaderLabels(['Folder Path','Folder Name','Description'])
self.fileView.setSortingEnabled(True)
# Stop from being able to edit
self.fileView.setEditTriggers(QAbstractItemView.NoEditTriggers)
# Select by row
self.fileView.setSelectionMode(QAbstractItemView.SingleSelection)
self.fileView.setSelectionBehavior(QAbstractItemView.SelectRows)
# Add a folder address label
self.folderAddressLabel = QLabel(win)
self.folderAddressLabel.setAlignment(Qt.AlignLeft)
# Add a folder name label
self.folderNameLabel = QLabel(win)
self.folderNameLabel.setAlignment(Qt.AlignLeft)
f = QFont()
f.setPointSize(18)
#f.setBold(True)
self.folderNameLabel.setFont(f)
self.navigationButtonRegion = QHBoxLayout()
self.viewLayout = QVBoxLayout(self)
self.viewLayout.addWidget(self.folderNameLabel)
self.viewLayout.addWidget(self.folderAddressLabel)
self.folderBrowserLabel = QLabel(win)
self.folderBrowserLabel.setText("Folders")
self.folderBrowserLabel.setAlignment(Qt.AlignLeft)
f = QFont()
f.setPointSize(16)
#f.setBold(True)
self.folderBrowserLabel.setFont(f)
#Create an upbutton
self.dirUpButton = QPushButton("Up", win)
self.dirUpButton.clicked.connect(self._cdUpHandler)
#Create an open in os button
self.openInOSButton = QPushButton("Open in explorer", win)
self.openInOSButton.clicked.connect(self._openInOSButtonHandler)
#Create an open in os button
self.createFolderButton = QPushButton("Create new folder", win)
self.createFolderButton.clicked.connect(self._createNewFolderHandler)
self.navigationButtonRegion.addWidget(self.dirUpButton)
self.navigationButtonRegion.addWidget(self.openInOSButton)
self.navigationButtonRegion.addWidget(self.createFolderButton)
self.navigationButtonRegion.addStretch(1)
self.viewLayout.addLayout(self.navigationButtonRegion)
self.viewLayout.addWidget(self.folderBrowserLabel)
self.viewLayout.addWidget(self.fileView)
# Overload function
def _adjustViewOnPopulate(self):
basicViewWidget._adjustViewOnPopulate(self)
self.fileView.horizontalHeader().setResizeMode(1,QHeaderView.Interactive)
self.fileView.resizeColumnToContents(1)
self.fileView.horizontalHeader().setStretchLastSection(True)
def setCurrentBrowserFilePath(self, path = ''):
self._currentBrowserFilePath = path
def _cdUpHandler(self):
self.changeBrowserDirUp.emit()
def _openInOSButtonHandler(self):
openInOS(self._currentBrowserFilePath)
def _createNewFolderHandler(self):
text, ok = QInputDialog.getText(win, '',
"Folder name",QLineEdit.Normal)
if ok :
newFolderPath = self._currentBrowserFilePath + '//' + text
if QDir(newFolderPath).exists():
QMessageBox.warning(win, "Error",
"Folder already exists!")
else:
QDir().mkdir(newFolderPath)
def updateLabels(self,folderName,folderAddress):
self.folderNameLabel.setText(folderName)
self.folderAddressLabel.setText(self.folderAddressLabelHelper(folderAddress))
def folderAddressLabelHelper(self,path):
parts = self.os_path_split_asunder(path)
if len(parts) > 2:
parts = parts[-3:]
return os.path.join('...',*parts)
def os_path_split_asunder(self,path, debug=False):
parts = []
while True:
newpath, tail = os.path.split(path)
if debug: print repr(path), (newpath, tail)
if newpath == path:
assert not tail
if path: parts.append(path)
break
parts.append(tail)
path = newpath
parts.reverse()
return parts
class fileBrowserWidget(QWidget):
requestedToOpenFile = Signal(str)
def __init__(self,parent=None,filePath = QDir.currentPath()):
QWidget.__init__(self, parent=None)
# -------------------------------
# Set up the file/folder handling model
self.fileModel = QFileSystemModel(self)
# Metadata handling class
self.metadata = Metadata(filePath)
# Add the two view widgets
self.fileViewWidget = fileViewWidget(self)
self.folderViewWidget = folderViewWidget(self)
# Connect up signals
self.fileViewWidget.fileDescriptionChanged.connect(self.metadata.handleChangedDescription)
self.fileViewWidget.fileDeleted.connect(self.metadata.handleDeletedFile)
self.folderViewWidget.fileDescriptionChanged.connect(self.metadata.handleChangedDescription)
self.folderViewWidget.fileDeleted.connect(self.metadata.handleDeletedFile)
self.fileViewWidget.requestedToOpenFile.connect(self.openFileInViewerHandler)
self.folderViewWidget.requestedToOpenFile.connect(self.setCurrentBrowsingPath)
self.folderViewWidget.changeBrowserDirUp.connect(self.changeBrowsingPathUp)
# Call this to reload once the directory is successfully loaded! This is really important
self.fileModel.directoryLoaded.connect(self.updateBrowser)
# Make it so that can only select in one table at once!
self.fileViewWidget.fileView.pressed.connect(self.folderViewWidget.fileView.clearSelection)
self.folderViewWidget.fileView.pressed.connect(self.fileViewWidget.fileView.clearSelection)
self.layout = QVBoxLayout()
self.setLayout(self.layout)
self.layout.addWidget(self.folderViewWidget)
self.layout.addWidget(self.fileViewWidget)
self.layout.addStretch(1)
self.setMinimumWidth(450)
self.setMaximumWidth(800)
# Pal = QPalette()
# Pal.setColor(QPalette.Background, Qt.black)
# insideBrowserScrollAreaWidget.setAutoFillBackground(True)
# insideBrowserScrollAreaWidget.setPalette(Pal)
# insideBrowserScrollAreaWidget.show()
#Set the current browsing path
self.setCurrentBrowsingPath(filePath)
#This value is used to store where we are in the file system
def currentBrowsingPath(self):
return self._currentBrowsingPath
def setCurrentBrowsingPath(self,newPath):
self._currentBrowsingPath = newPath
self.fileModel.setRootPath(newPath)
self.updateBrowser(newPath)
# Function to move up one level
def changeBrowsingPathUp(self):
parentPath = self.fileModel.filePath(self.fileModel.parent(self.fileModel.index(self.currentBrowsingPath())))
self.setCurrentBrowsingPath(parentPath)
# This function stitches everything together, handling updates when the directory changes
def updateBrowser(self,filePath):
self.metadata.update(filePath)
self.folderViewWidget.setCurrentBrowserFilePath(filePath)
self.populateViews(filePath)
self.updateFolderViewWidgetLabels(filePath)
def updateFolderViewWidgetLabels(self,filePath):
currentLocation = QDir(self.currentBrowsingPath())
self.folderViewWidget.updateLabels(currentLocation.dirName(),currentLocation.absolutePath())
# This function pulls out the data from the file system model, and hands it to the views.
def populateViews(self,filePath):
index = self.fileModel.index(filePath)
# Find number of children in the file model
numRows = self.fileModel.rowCount(index)
# Initialise array to store temporarily.
folderViewEntries = []
fileViewEntries = []
for row in range(0,numRows):
childIndex = self.fileModel.index(row, 0, index)
fileName = self.fileModel.fileName(childIndex)
fileInfo = self.fileModel.fileInfo(childIndex)
filePath = self.fileModel.filePath(childIndex)
fileCreated = fileInfo.created()
fileModified = fileInfo.lastModified()
fileDesc = self.metadata.getFileDescription(fileName)
if fileInfo.isDir():
folderViewEntries.append([QStandardItem(filePath), QStandardItem(fileName), QStandardItem(fileDesc)])
else:
if fileName != 'metadata.p':
fileViewEntries.append([QStandardItem(filePath), QStandardItem(fileName),
QStandardItem(fileDesc),QStandardItem(fileModified.toString("dd/MM/yyyy")),QStandardItem(fileCreated.toString("dd/MM/yyyy"))])
self.fileViewWidget.populateView(fileViewEntries)
self.folderViewWidget.populateView(folderViewEntries)
def openFileInViewerHandler(self,filePath):
self.requestedToOpenFile.emit(filePath)
class fileContentViewerWidget(QWidget):
def __init__(self,parent=None):
QWidget.__init__(self, parent=None)
# Create a QTabWidget to hold rendered files.
self.fileContentTabs = QTabWidget(win)
self.fileContentTabs.setTabsClosable(True)
self.fileContentTabs.tabCloseRequested.connect(self.fileContentTabs_closeTab)
# This could be useful at some point, but makes it a bit weird at the moment:
#fileContentTabs.setDocumentMode(False)
self._openViewerTabs = dict()
layout = QVBoxLayout()
layout.addWidget(self.fileContentTabs)
self.setLayout(layout)
self.setMinimumWidth(500)
@Slot()
def openFileInViewer(self,filePath):
file = QFile(filePath)
fileName = file.fileName(index)
fileInfo = file.fileInfo(index)
#filePath = fileModel.filePath(index)
#TODO add in to viewer
#fileCreated = fileInfo.created()
#fileModified = fileInfo.lastModified()
#fileDesc = metadata.getFileDescription(fileName)
if filePath in self._openViewerTabs:
self.fileContentTabs.setCurrentIndex(self._openViewerTabs[filePath])
else:
if fileInfo.completeSuffix() == 'md':
input_file = codecs.open(filePath, mode="r", encoding="utf-8")
text = input_file.read()
renderedText = markdown.markdown(text,extensions=["markdown.extensions.tables", "markdown.extensions.fenced_code", "markdown.extensions.toc"])
# CSS style for markdown
renderedText = packageHTMLwithCSS(renderedText)
fileContent = QWebView()
fileContent.setHtml(renderedText)
int = self.fileContentTabs.addTab(fileContent,fileName)
self._openViewerTabs[filePath] = int
self.fileContentTabs.setCurrentIndex(int)
# Otherwise just open externally
else:
openInOS(filePath)
def fileContentTabs_closeTab(self,int):
for filePath, intVal in self._openViewerTabs.items():
if intVal == int:
self._openViewerTabs.pop(filePath, None)
self.fileContentTabs.removeTab(int)
# Function that is useful for all widgets
def openInOS(path):
try:
os.startfile(path)
except:
subprocess.Popen(['open', path])
# -------------------------------
# Setting up the gui
# Create a window, set its size
win = QWidget()
win.setWindowTitle('Notebook Browser')
win.setMinimumSize(800, 800)
browserWidget = fileBrowserWidget()
contentWidget = fileContentViewerWidget()
# Deal with the layout
browserScrollArea = QScrollArea(win)
browserScrollArea.setWidgetResizable(True)
browserScrollArea.setWidget(browserWidget)
contentScrollArea = QScrollArea(win)
contentScrollArea.setWidgetResizable(True)
contentScrollArea.setWidget(contentWidget)
fullFrame = QHBoxLayout(win)
splitter = QSplitter(Qt.Horizontal)
splitter.addWidget(browserScrollArea)
splitter.addWidget(contentScrollArea)
splitter.setStretchFactor(2,1)
splitter.setStretchFactor(1,2)
fullFrame.addWidget(splitter)
desktop = QDesktopWidget()
screenSize = desktop.availableGeometry()
win.setGeometry(screenSize.width() * 0.05,0,screenSize.width() * 0.8,screenSize.height()*0.85)
win.setLayout(fullFrame)
win.show()
qt_app.exec_()
| apache-2.0 | -8,520,088,722,618,856,000 | 31.751166 | 147 | 0.75355 | false |
NuGrid/NuGridPy | nugridpy/regression_tests/selftest.py | 1 | 5252 | from __future__ import absolute_import
from builtins import str
from builtins import range
import matplotlib
matplotlib.use('agg')
import unittest
from .tempdir.tempfile_ import TemporaryDirectory
class TestModuleImports(unittest.TestCase):
def test_import_ascii_table(self):
import nugridpy.ascii_table
def test_import_astronomy(self):
import nugridpy.astronomy
def test_import_data_plot(self):
import nugridpy.data_plot
def test_import_grain(self):
import nugridpy.grain
def test_import_h5T(self):
import nugridpy.h5T
def test_import_mesa(self):
import nugridpy.mesa
def test_import_nugridse(self):
import nugridpy.nugridse
def test_import_ppn(self):
import nugridpy.ppn
def test_import_utils(self):
import nugridpy.utils
class TestAbuChart(unittest.TestCase):
def test_abu_chart(self):
from nugridpy import utils,ppn,data_plot
import matplotlib
matplotlib.use('agg')
import matplotlib.pylab as mpy
import os
# Perform tests within temporary directory
with TemporaryDirectory() as tdir:
# wget the data for a ppn run from the CADC VOspace
n = 3
for cycle in range(0,n):
cycle_str = str(cycle).zfill(2)
os.system("wget -q --content-disposition --directory '" + tdir + "' "
+ "'http://www.canfar.phys.uvic.ca/vospace/synctrans?TARGET="\
+ "vos%3A%2F%2Fcadc.nrc.ca%21vospace%2Fnugrid%2Fdata%2Fprojects%2Fppn%2Fexamples%2F"\
+ "ppn_Hburn_simple%2Fiso_massf000" + cycle_str + ".DAT&DIRECTION=pullFromVoSpace&PROTOCOL"\
+ "=ivo%3A%2F%2Fivoa.net%2Fvospace%2Fcore%23httpget'")
# test_data_dir should point to the correct location of a set of abundances data file
#nugrid_dir= os.path.dirname(os.path.dirname(ppn.__file__))
#NuPPN_dir= nugrid_dir + "/NuPPN"
#test_data_dir= NuPPN_dir + "/examples/ppn_C13_pocket/master_results"
p=ppn.abu_vector(tdir) # TODO: this function fails to raise an exception if path is not found!
mp=p.get('mod')
if len(mp) == 0:
raise IOError("Cannot locate a set of abundance data files")
sparse=10
cycles=mp[:1000:sparse]
form_str='%6.1F'
form_str1='%4.3F'
i=0
for cyc in cycles:
T9 = p.get('t9',fname=cyc)
Rho = p.get('rho',fname=cyc)
mod = p.get('mod',fname=cyc)
# time= p.get('agej',fname=cyc)*utils.constants.one_year
time= p.get('agej',fname=cyc)
mpy.close(i);mpy.figure(i);i += 1
p.abu_chart(cyc,mass_range=[0,41],plotaxis=[-1,22,-1,22],lbound=(-6,0),show=False)
mpy.title(str(mod)+' t='+form_str%time+'yr $T_9$='+form_str1%T9+' $\\rho$='+str(Rho))
png_file='abu_chart_'+str(cyc).zfill(len(str(max(mp))))+'.png'
mpy.savefig(png_file)
self.assertTrue(os.path.exists(png_file))
os.remove(png_file)
def test_abu_evolution(self):
from nugridpy import ppn, utils
import matplotlib
matplotlib.use('agg')
import matplotlib.pylab as mpy
import os
# Perform tests within temporary directory
with TemporaryDirectory() as tdir:
# wget the data for a ppn run from the CADC VOspace
os.system("wget -q --content-disposition --directory '" + tdir + "' "\
+ "'http://www.canfar.phys.uvic.ca/vospace/synctrans?TARGET="\
+ "vos%3A%2F%2Fcadc.nrc.ca%21vospace%2Fnugrid%2Fdata%2Fprojects%2Fppn%2Fexamples%2F"\
+ "ppn_Hburn_simple%2Fx-time.dat&DIRECTION=pullFromVoSpace&PROTOCOL"\
+ "=ivo%3A%2F%2Fivoa.net%2Fvospace%2Fcore%23httpget'")
#nugrid_dir= os.path.dirname(os.path.dirname(ppn.__file__))
#NuPPN_dir= nugrid_dir + "/NuPPN"
#test_data_dir= NuPPN_dir + "/examples/ppn_Hburn_simple/RUN_MASTER"
symbs=utils.symbol_list('lines2')
x=ppn.xtime(tdir)
specs=['PROT','HE 4','C 12','N 14','O 16']
i=0
for spec in specs:
x.plot('time',spec,logy=True,logx=True,shape=utils.linestyle(i)[0],show=False,title='')
i += 1
mpy.ylim(-5,0.2)
mpy.legend(loc=0)
mpy.xlabel('$\log t / \mathrm{min}$')
mpy.ylabel('$\log X \mathrm{[mass fraction]}$')
abu_evol_file = 'abu_evolution.png'
mpy.savefig(abu_evol_file)
self.assertTrue(os.path.exists(abu_evol_file))
class ImageCompare(unittest.TestCase):
def test_ppnHburn_abucharts(self):
from .ImageCompare.abu_chart import load_chart_files
from .ImageCompare.compare_image_entropy import compare_images
with TemporaryDirectory() as tdir:
load_chart_files(tdir)
compare_images(tdir)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -3,208,474,241,935,057,400 | 37.617647 | 118 | 0.573305 | false |
gregoil/rotest | tests/core/test_run_delta.py | 1 | 17367 | """Test run delta feature."""
# pylint: disable=too-many-arguments
# pylint: disable=protected-access,too-many-public-methods,invalid-name
from __future__ import absolute_import
from rotest.core.runner import run
from rotest.core.models.run_data import RunData
from rotest.core.models.case_data import TestOutcome
from rotest.core.result.handlers.db_handler import DBHandler
from tests.core.utils import (ErrorCase, SuccessCase, FailureCase, SkipCase,
MockSuite1, MockSuite2, MockNestedTestSuite,
MockTestSuite, FailTwiceCase, MockFlow,
MockFlow1, MockFlow2, SuccessBlock, FailureBlock,
SkipBlock, BasicRotestUnitTest)
class TestRunDelta(BasicRotestUnitTest):
"""Test run delta behavior on successful, failed & skipped components."""
DELTA_ITERATIONS = 3
fixtures = ['case_ut.json']
RESULT_OUTPUTS = [DBHandler]
def setUp(self):
"""Create a run data the enabled running in delta mode."""
super(TestRunDelta, self).setUp()
self.run_data = RunData(run_delta=True)
def test_successful_case(self):
"""Test run delta with two success cases.
* Runs a suite with success cases.
* Validates all tests were ran.
* Runs the same cases with delta flag.
* Validates no test was run.
"""
MockSuite1.components = (SuccessCase,)
MockSuite2.components = (SuccessCase,)
MockTestSuite.components = (MockSuite1, MockSuite2)
main_test = MockTestSuite()
result = self.create_result(main_test)
main_test.run(result)
self.validate_result(result, True, successes=2)
main_test = MockTestSuite(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, None, skips=2)
def test_failure_case(self):
"""Test run delta with failure case.
* Runs a suite with success & failure cases.
* Validates two tests ran.
* Runs the same cases with delta flag.
* Validates that all the tests which weren't successful in previous
run, ran in the second run.
"""
MockSuite1.components = (SuccessCase,)
MockSuite2.components = (FailureCase,)
MockTestSuite.components = (MockSuite1, MockSuite2)
main_test = MockTestSuite()
result = self.create_result(main_test)
main_test.run(result)
self.validate_result(result, False, successes=1, fails=1)
main_test = MockTestSuite(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, False, skips=1, fails=1)
def test_error_case(self):
"""Test run delta with error case.
* Runs a suite with success & error cases.
* Validates all tests were ran.
* Runs the same cases with delta flag.
* Validates that all the tests which weren't successful in previous
run, ran in the second run.
"""
MockSuite1.components = (SuccessCase,)
MockSuite2.components = (ErrorCase,)
MockTestSuite.components = (MockSuite1, MockSuite2)
main_test = MockTestSuite()
result = self.create_result(main_test)
main_test.run(result)
self.validate_result(result, False, successes=1, errors=1)
main_test = MockTestSuite(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, False, skips=1, errors=1)
def test_skipped_case(self):
"""Test run delta with skip case.
* Runs a suite with success & skip cases.
* Validates all tests were ran.
* Runs the same cases with delta flag.
* Validates that all the tests which weren't successful in previous
run, ran in the second run.
"""
MockSuite1.components = (SuccessCase,)
MockSuite2.components = (SkipCase,)
MockTestSuite.components = (MockSuite1, MockSuite2)
main_test = MockTestSuite()
result = self.create_result(main_test)
main_test.run(result)
self.validate_result(result, True, successes=1, skips=1)
main_test = MockTestSuite(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, None, skips=2)
def test_different_result_cases(self):
"""Test run delta with success, failure & skip cases.
* Runs a suite with success failure & skipped cases.
* Validates all the tests were ran.
* Runs the same cases with delta flag.
* Validates that all the tests which weren't successful in previous
run, ran in the second run.
"""
MockSuite1.components = (SuccessCase, FailureCase)
MockSuite2.components = (FailureCase, SkipCase)
MockTestSuite.components = (MockSuite1, MockSuite2)
main_test = MockTestSuite()
result = self.create_result(main_test)
main_test.run(result)
self.validate_result(result, False, successes=1, skips=1, fails=2)
main_test = MockTestSuite(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, False, skips=2, fails=2)
def test_nested_suite(self):
"""Test run delta with nested suite.
* Runs a nested suite.
* Validates all the tests were ran.
* Runs the same suite with delta flag.
* Validates that all the tests which weren't successful in previous
run, ran in the second run.
"""
MockSuite1.components = (SuccessCase, FailureCase)
MockSuite2.components = (ErrorCase, SkipCase)
MockNestedTestSuite.components = (MockSuite1, MockSuite2)
MockTestSuite.components = (MockSuite1, MockNestedTestSuite)
main_test = MockTestSuite()
result = self.create_result(main_test)
main_test.run(result)
self.validate_result(result, False, successes=2, skips=1, fails=2,
errors=1)
main_test = MockTestSuite(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, False, skips=3, fails=2, errors=1)
def test_cases_in_suite(self):
"""Test run delta with success, failure & skip cases.
* Runs a suite with success failure & skipped cases.
* Validates all the tests were run.
* Runs the same suite with delta flag.
* Validates that all the tests which weren't successful in previous
run, ran in the second run.
"""
MockTestSuite.components = (SuccessCase, FailureCase,
ErrorCase, SkipCase)
main_test = MockTestSuite()
result = self.create_result(main_test)
main_test.run(result)
self.validate_result(result, False, successes=1, skips=1, fails=1,
errors=1)
main_test = MockTestSuite(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, False, skips=2, fails=1, errors=1)
def test_flow_success(self):
"""Test run delta with successful flow.
* Runs a flow with success blocks.
* Validates all the tests were run.
* Runs the same suite with delta flag.
* Validates that the flow didn't run again.
* Runs the same suite with delta flag.
* Validates that the flow didn't run again.
"""
MockFlow.blocks = (SuccessBlock, SkipBlock, SuccessBlock)
main_test = MockFlow()
result = self.create_result(main_test)
main_test.run(result)
self.validate_result(result, True, successes=1)
main_test = MockFlow(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, None, skips=1)
main_test = MockFlow(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, None, skips=1)
def test_failing_flow(self):
"""Test run delta with failing flow.
* Runs a flow with success failure & skipped blocks.
* Validates all the block were run.
* Runs the same flow with delta flag.
* Validates that all the blocks ran again.
"""
MockFlow.blocks = (SuccessBlock, SkipBlock, FailureBlock)
main_test = MockFlow()
result = self.create_result(main_test)
main_test.run(result)
self.validate_result(result, False, fails=1)
block_results1 = [block.data.exception_type for block in main_test]
main_test = MockFlow(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, False, fails=1)
block_results2 = [block.data.exception_type for block in main_test]
self.assertEqual(block_results1, block_results2)
def test_flows_in_suite(self):
"""Test run delta with success & failure flows.
* Runs a suite with success & failure flows.
* Validates all the tests were run.
* Runs the same suite with delta flag.
* Validates that all the flows which weren't successful in previous
run, ran in the second run.
"""
MockFlow1.blocks = (SuccessBlock, FailureBlock)
MockFlow2.blocks = (SuccessBlock, SuccessBlock)
MockTestSuite.components = (MockFlow2, MockFlow1, MockFlow2)
main_test = MockTestSuite()
result = self.create_result(main_test)
main_test.run(result)
self.validate_result(result, False, successes=2, fails=1)
main_test = MockTestSuite(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, False, skips=2, fails=1)
def test_blocks_in_different_flows(self):
"""Test run delta with success, failure & skip cases.
* Runs a suite with success failure & skipped cases.
* Validates all the tests were run.
* Runs the same suite with delta flag.
* Validates that all the tests which weren't successful in previous
run, ran in the second run.
"""
MockFlow1.blocks = (SuccessBlock, SuccessBlock)
MockFlow2.blocks = (SuccessBlock, SuccessBlock)
MockTestSuite.components = (MockFlow1, MockFlow2)
main_test = MockTestSuite(run_data=self.run_data)
delta_result = self.create_result(main_test)
main_test.run(delta_result)
self.validate_result(delta_result, True, successes=2)
def validate_suite_data(self, suite_data, was_successful, successes=0,
skips=0, fails=0):
"""Validate a suite's result by its data.
Args:
suite_data (rotest.core.suite_data.SuiteData): data of the run
suite to validate.
was_successful (bool): expected 'success' status of the run suite.
tests_run (number): expected amount of tests in current running.
successes (number): expected amount of successful tests in current
running.
Raises:
AssertionError: expected test results are not the actual results.
"""
self.assertEqual(suite_data.success, was_successful, "The 'success' "
"value was wrong, expected %d, but %d ran"
% (was_successful, suite_data.success))
sub_components = suite_data.get_sub_tests_data()
actual_successful = len([case for case in sub_components
if case.exception_type is TestOutcome.SUCCESS])
self.assertEqual(actual_successful, successes,
"The number of successful cases was wrong, expected "
"%d, got: %d" % (successes, actual_successful))
actual_skips = len([case for case in sub_components
if case.exception_type is TestOutcome.SKIPPED])
self.assertEqual(actual_skips, skips,
"The number of skipped cases was wrong, expected "
"%d, got: %d" % (skips, actual_skips))
actual_fails = len([case for case in sub_components
if case.exception_type is TestOutcome.FAILED])
self.assertEqual(actual_fails, fails,
"The number of failed cases was wrong, expected "
"%d, got: %d" % (fails, actual_fails))
def test_delta_iterations(self):
"""Test run delta with iterations.
* Runs a suite with success & success-after-three-runs cases.
* Validates that in the first run, both tests were run and one
succeeded.
* Validates that in the second run, one test was run, and didn't
succeed.
* Validates that in the third run, one test was run successfully.
"""
MockTestSuite.components = (SuccessCase, FailTwiceCase)
runs_data = run(MockTestSuite, delta_iterations=self.DELTA_ITERATIONS,
outputs=(DBHandler.NAME,))
run_suites = [run_data.main_test for run_data in runs_data]
full_suite, first_delta_suite, second_delta_suite = run_suites
self.validate_suite_data(full_suite, False, successes=1, fails=1)
self.validate_suite_data(first_delta_suite, False, skips=1, fails=1)
self.validate_suite_data(second_delta_suite, True, successes=1,
skips=1)
def test_different_run_names(self):
"""Test run delta on runs with different run names.
* Runs a suite with success & failure cases with name X.
* Validates that both tests were run and one succeeded.
* Runs a suite with success & failure cases with name Y.
* Validates that both tests were run and one succeeded.
* Runs a suite with success & failure cases with name X.
* Validates that only one test was run (the failing test of the
previous X run) and it failed again.
"""
MockTestSuite.components = (SuccessCase, FailureCase)
run_data, = run(MockTestSuite, delta_iterations=1,
outputs=(DBHandler.NAME,), run_name='run1')
self.validate_suite_data(run_data.main_test, False, successes=1,
fails=1)
run_data, = run(MockTestSuite, delta_iterations=1,
outputs=(DBHandler.NAME,), run_name='run2')
self.validate_suite_data(run_data.main_test, False, successes=1,
fails=1)
run_data, = run(MockTestSuite, delta_iterations=1,
outputs=(DBHandler.NAME,), run_name='run1')
self.validate_suite_data(run_data.main_test, False, skips=1, fails=1)
def test_no_run_name_doesnt_filter(self):
"""Test that giving no run name doesn't filter the results.
* Runs a suite with success & failure cases with name X.
* Validates that both tests were run and one succeeded.
* Runs a suite with success & failure cases with no name.
* Validates that only one test was run (the failing test of the
previous X run) and it failed again.
"""
MockTestSuite.components = (SuccessCase, FailureCase)
run_data, = run(MockTestSuite, delta_iterations=1,
outputs=(DBHandler.NAME,), run_name='run1')
self.validate_suite_data(run_data.main_test, False, successes=1,
fails=1)
run_data, = run(MockTestSuite, delta_iterations=1,
outputs=(DBHandler.NAME,), run_name=None)
self.validate_suite_data(run_data.main_test, False, skips=1, fails=1)
def test_no_run_name_is_filtered(self):
"""Test that a run with no run name is filtered when running with name.
* Runs a suite with success & failure cases with no name.
* Validates that both tests were run and one succeeded.
* Runs a suite with success & failure cases with name X.
* Validates that both tests were run and one succeeded.
"""
MockTestSuite.components = (SuccessCase, FailureCase)
run_data, = run(MockTestSuite, delta_iterations=1,
outputs=(DBHandler.NAME,), run_name=None)
self.validate_suite_data(run_data.main_test, False, successes=1,
fails=1)
run_data, = run(MockTestSuite, delta_iterations=1,
outputs=(DBHandler.NAME,), run_name='run1')
self.validate_suite_data(run_data.main_test, False, successes=1,
fails=1)
| mit | 4,671,003,471,482,271,000 | 38.650685 | 79 | 0.620142 | false |
sigmavirus24/glance | glance/tests/functional/test_reload.py | 1 | 8699 | # Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import time
import psutil
import requests
from glance.tests import functional
from glance.tests.utils import execute
TEST_VAR_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../', 'var'))
def set_config_value(filepath, key, value):
"""Set 'key = value' in config file"""
replacement_line = '%s = %s\n' % (key, value)
match = re.compile('^%s\s+=' % key).match
with open(filepath, 'r+') as f:
lines = f.readlines()
f.seek(0, 0)
f.truncate()
for line in lines:
f.write(line if not match(line) else replacement_line)
class TestReload(functional.FunctionalTest):
"""Test configuration reload"""
def setUp(self):
self.workers = 1
super(TestReload, self).setUp()
def tearDown(self):
self.stop_servers()
super(TestReload, self).tearDown()
def ticker(self, message, seconds=60, tick=0.01):
"""
Allows repeatedly testing for an expected result
for a finite amount of time.
:param message: Message to display on timeout
:param seconds: Time in seconds after which we timeout
:param tick: Time to sleep before rechecking for expected result
:returns: 'True' or fails the test with 'message' on timeout
"""
# We default to allowing 60 seconds timeout but
# typically only a few hundredths of a second
# are needed.
num_ticks = seconds * (1.0 / tick)
count = 0
while count < num_ticks:
count += 1
time.sleep(tick)
yield
self.fail(message)
def _get_children(self, server):
pid = None
pid = self._get_parent(server)
process = psutil.Process(pid)
children = process.get_children()
pids = set()
for child in children:
pids.add(child.pid)
return pids
def _get_parent(self, server):
if server == 'api':
return self.api_server.process_pid
elif server == 'registry':
return self.registry_server.process_pid
def _conffile(self, service):
conf_dir = os.path.join(self.test_dir, 'etc')
conf_filepath = os.path.join(conf_dir, '%s.conf' % service)
return conf_filepath
def _url(self, protocol, path):
return '%s://127.0.0.1:%d%s' % (protocol, self.api_port, path)
def test_reload(self):
"""Test SIGHUP picks up new config values"""
def check_pids(pre, post=None, workers=2):
if post is None:
if len(pre) == workers:
return True
else:
return False
if len(post) == workers:
# Check new children have different pids
if post.intersection(pre) == set():
return True
return False
self.api_server.fork_socket = False
self.registry_server.fork_socket = False
self.start_servers(fork_socket=False, **vars(self))
pre_pids = {}
post_pids = {}
# Test changing the workers value creates all new children
# This recycles the existing socket
msg = 'Start timeout'
for _ in self.ticker(msg):
for server in ('api', 'registry'):
pre_pids[server] = self._get_children(server)
if check_pids(pre_pids['api'], workers=1):
if check_pids(pre_pids['registry'], workers=1):
break
for server in ('api', 'registry'):
# Labour costs have fallen
set_config_value(self._conffile(server), 'workers', '2')
cmd = "kill -HUP %s" % self._get_parent(server)
execute(cmd, raise_error=True)
msg = 'Worker change timeout'
for _ in self.ticker(msg):
for server in ('api', 'registry'):
post_pids[server] = self._get_children(server)
if check_pids(pre_pids['registry'], post_pids['registry']):
if check_pids(pre_pids['api'], post_pids['api']):
break
# Test changing from http to https
# This recycles the existing socket
path = self._url('http', '/')
response = requests.get(path)
self.assertEqual(300, response.status_code)
del response # close socket so that process audit is reliable
pre_pids['api'] = self._get_children('api')
key_file = os.path.join(TEST_VAR_DIR, 'privatekey.key')
set_config_value(self._conffile('api'), 'key_file', key_file)
cert_file = os.path.join(TEST_VAR_DIR, 'certificate.crt')
set_config_value(self._conffile('api'), 'cert_file', cert_file)
cmd = "kill -HUP %s" % self._get_parent('api')
execute(cmd, raise_error=True)
msg = 'http to https timeout'
for _ in self.ticker(msg):
post_pids['api'] = self._get_children('api')
if check_pids(pre_pids['api'], post_pids['api']):
break
ca_file = os.path.join(TEST_VAR_DIR, 'ca.crt')
path = self._url('https', '/')
response = requests.get(path, verify=ca_file)
self.assertEqual(300, response.status_code)
del response
# Test https restart
# This recycles the existing socket
pre_pids['api'] = self._get_children('api')
cmd = "kill -HUP %s" % self._get_parent('api')
execute(cmd, raise_error=True)
msg = 'https restart timeout'
for _ in self.ticker(msg):
post_pids['api'] = self._get_children('api')
if check_pids(pre_pids['api'], post_pids['api']):
break
ca_file = os.path.join(TEST_VAR_DIR, 'ca.crt')
path = self._url('https', '/')
response = requests.get(path, verify=ca_file)
self.assertEqual(300, response.status_code)
del response
# Test changing the https bind_host
# This requires a new socket
pre_pids['api'] = self._get_children('api')
set_config_value(self._conffile('api'), 'bind_host', '127.0.0.1')
cmd = "kill -HUP %s" % self._get_parent('api')
execute(cmd, raise_error=True)
msg = 'https bind_host timeout'
for _ in self.ticker(msg):
post_pids['api'] = self._get_children('api')
if check_pids(pre_pids['api'], post_pids['api']):
break
path = self._url('https', '/')
response = requests.get(path, verify=ca_file)
self.assertEqual(300, response.status_code)
del response
# Test https -> http
# This recycles the existing socket
pre_pids['api'] = self._get_children('api')
set_config_value(self._conffile('api'), 'key_file', '')
set_config_value(self._conffile('api'), 'cert_file', '')
cmd = "kill -HUP %s" % self._get_parent('api')
execute(cmd, raise_error=True)
msg = 'https to http timeout'
for _ in self.ticker(msg):
post_pids['api'] = self._get_children('api')
if check_pids(pre_pids['api'], post_pids['api']):
break
path = self._url('http', '/')
response = requests.get(path)
self.assertEqual(300, response.status_code)
del response
# Test changing the http bind_host
# This requires a new socket
pre_pids['api'] = self._get_children('api')
set_config_value(self._conffile('api'), 'bind_host', '127.0.0.1')
cmd = "kill -HUP %s" % self._get_parent('api')
execute(cmd, raise_error=True)
msg = 'http bind_host timeout'
for _ in self.ticker(msg):
post_pids['api'] = self._get_children('api')
if check_pids(pre_pids['api'], post_pids['api']):
break
path = self._url('http', '/')
response = requests.get(path)
self.assertEqual(300, response.status_code)
del response
| apache-2.0 | -7,581,197,238,097,621,000 | 35.245833 | 78 | 0.568686 | false |
clranc/HostGrab | url.py | 1 | 2157 | #Url linked list handler
import os.path
from UrlLL import UrlList
Path = "url.list"
DLPath = "hlist/"
def add(Url):
if os.path.isfile(Path) == False:
UrlFile = open(Path,"w")
List = UrlList()
List.add(Url)
List.fWrite(UrlFile)
List.printList()
return
else:
UrlFile = open(Path,"r")
List = UrlList()
for line in UrlFile:
entry = line.strip().split()
if len(entry) != 0:
if Url == entry[0]:
print("This download url is already in use and will "+
"not be added")
return
List.add(entry[0])
List.add(Url)
UrlFile = open(Path,"w")
List.fWrite(UrlFile)
List.printList()
return
def rm(NumId):
try:
NumId = int(NumId)
except ValueError:
print("Invalid ID input")
return
if os.path.isfile(Path) == False:
UrlFile = open(Path,"w")
print("Your url list is empty")
return
else:
UrlFile = open(Path,"r")
List = UrlList()
count = 0
for line in UrlFile:
if len(line.strip().split())!=0:
List.add((line.strip().split())[0])
count += 1
if List.isEmpty() == True:
print("The IP & Domain list is empty")
return
DLFPath = DLPath + "hosts.id" + str(NumId)
List.rm(NumId)
print("Link removed\n")
if os.path.isfile(DLFPath) == True:
os.remove(DLFPath)
print("& hosts.id"+str(NumId) + " has been deleted\n")
UrlFile = open(Path,"w")
List.fWrite(UrlFile)
List.printList()
return
def printList():
List = None
if os.path.isfile(Path) == False:
UrlFile = open(Path,"w")
List = UrlList()
List.printList()
return
else:
UrlFile = open(Path,"r")
List = UrlList()
for line in UrlFile:
if len(line.strip().split())!=0:
List.add((line.strip().split())[0])
List.printList()
| gpl-2.0 | 6,249,301,886,832,723,000 | 24.678571 | 74 | 0.491887 | false |
bjoernricks/python-quilt | tests/test_pop.py | 1 | 2898 | #!/usr/bin/env python
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2017 Björn Ricks <[email protected]>
#
# See LICENSE comming with the source of python-quilt for details.
import os.path
import six
from helpers import QuiltTest, make_file
from quilt.db import Db
from quilt.error import QuiltError
from quilt.patch import Patch
from quilt.pop import Pop
from quilt.utils import Directory, TmpDirectory, File
test_dir = os.path.dirname(__file__)
class PopTest(QuiltTest):
data_dir = Directory(os.path.join(test_dir, "data", "pop"))
def test_unapply_all(self):
patch2 = Patch("p2.patch")
test_dir = self.data_dir + "test1"
with TmpDirectory(dir=self.data_dir.get_name()) as tmp_dir:
tmp_test_dir = tmp_dir + "test1"
test_dir.copy(tmp_test_dir)
pc_dir = tmp_test_dir + "pc"
f1 = tmp_test_dir + File("f1")
self.assertTrue(f1.exists())
f2 = tmp_test_dir + File("f2")
self.assertTrue(f2.exists())
pop = Pop(tmp_test_dir.get_name(), pc_dir.get_name())
self.assertEqual(patch2, pop.db.top_patch())
pop.unapply_all()
self.assertEqual(None, pop.db.top_patch())
self.assertFalse(f1.exists())
self.assertFalse(f2.exists())
def test_apply_next(self):
patch1 = Patch("p1.patch")
patch2 = Patch("p2.patch")
test_dir = self.data_dir + "test1"
with TmpDirectory(dir=self.data_dir.get_name()) as tmp_dir:
tmp_test_dir = tmp_dir + "test2"
test_dir.copy(tmp_test_dir)
pc_dir = tmp_test_dir + "pc"
f1 = tmp_test_dir + File("f1")
self.assertTrue(f1.exists())
f2 = tmp_test_dir + File("f2")
self.assertTrue(f2.exists())
pop = Pop(tmp_test_dir.get_name(), pc_dir.get_name())
self.assertEqual(patch2, pop.db.top_patch())
pop.unapply_top_patch()
self.assertEqual(patch1, pop.db.top_patch())
self.assertTrue(f1.exists())
self.assertFalse(f2.exists())
pop.unapply_top_patch()
self.assertEqual(None, pop.db.top_patch())
self.assertFalse(f1.exists())
self.assertFalse(f2.exists())
def test_unrefreshed(self):
with TmpDirectory() as dir:
db = Db(dir.get_name())
db.add_patch(Patch("unrefreshed.patch"))
db.save()
make_file(b"", db.dirname, "unrefreshed.patch~refresh")
cmd = Pop(dir.get_name(), db.dirname)
with six.assertRaisesRegex(self, QuiltError,
r"needs to be refreshed"):
cmd.unapply_top_patch()
if __name__ == "__main__":
PopTest.run_tests()
| mit | -5,621,312,368,692,360,000 | 28.262626 | 67 | 0.575078 | false |
intchloe/exitmap | src/exitmap.py | 1 | 12070 | # Copyright 2013, 2014 Philipp Winter <[email protected]>
#
# This file is part of exitmap.
#
# exitmap is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# exitmap is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with exitmap. If not, see <http://www.gnu.org/licenses/>.
"""
Performs a task over (a subset of) all Tor exit relays.
"""
import os
import time
import socket
import pkgutil
import argparse
import datetime
import random
import logging
import ConfigParser
import functools
import stem
import stem.connection
import stem.process
import stem.descriptor
from stem.control import Controller, EventType
import modules
import log
import error
import util
import relayselector
from eventhandler import EventHandler
from stats import Statistics
logger = log.get_logger()
def bootstrap_tor(args):
"""
Invoke a Tor process which is subsequently used by exitmap.
"""
logger.info("Attempting to invoke Tor process in directory \"%s\". This "
"might take a while." % args.tor_dir)
if not args.first_hop:
logger.info("No first hop given. Using randomly determined first "
"hops for circuits.")
ports = {}
partial_parse_log_lines = functools.partial(util.parse_log_lines, ports)
proc = stem.process.launch_tor_with_config(
config={
"SOCKSPort": "auto",
"ControlPort": "auto",
"DataDirectory": args.tor_dir,
"CookieAuthentication": "1",
"LearnCircuitBuildTimeout": "0",
"CircuitBuildTimeout": "40",
"__DisablePredictedCircuits": "1",
"__LeaveStreamsUnattached": "1",
"FetchHidServDescriptors": "0",
"UseMicroDescriptors": "0",
},
timeout=90,
take_ownership=True,
completion_percent=80,
init_msg_handler=partial_parse_log_lines,
)
logger.info("Successfully started Tor process (PID=%d)." % proc.pid)
return ports["socks"], ports["control"]
def parse_cmd_args():
"""
Parse and return command line arguments.
"""
desc = "Perform a task over (a subset of) all Tor exit relays."
parser = argparse.ArgumentParser(description=desc, add_help=False)
parser.add_argument("-f", "--config-file", type=str, default=None,
help="Path to the configuration file.")
args, remaining_argv = parser.parse_known_args()
# First, try to load the configuration file and load its content as our
# defaults.
if args.config_file:
config_file = args.config_file
else:
home_dir = os.path.expanduser("~")
config_file = os.path.join(home_dir, ".exitmaprc")
config_parser = ConfigParser.SafeConfigParser()
file_parsed = config_parser.read([config_file])
if file_parsed:
try:
defaults = dict(config_parser.items("Defaults"))
except ConfigParser.NoSectionError as err:
logger.warning("Could not parse config file \"%s\": %s" %
(config_file, err))
defaults = {}
else:
defaults = {}
parser = argparse.ArgumentParser(parents=[parser])
parser.set_defaults(**defaults)
# Now, load the arguments given over the command line.
group = parser.add_mutually_exclusive_group()
group.add_argument("-C", "--country", type=str, default=None,
help="Only probe exit relays of the country which is "
"determined by the given 2-letter country code.")
group.add_argument("-e", "--exit", type=str, default=None,
help="Only probe the exit relay which has the given "
"20-byte fingerprint.")
parser.add_argument("-d", "--build-delay", type=float, default=3,
help="Wait for the given delay (in seconds) between "
"circuit builds. The default is 3.")
tor_directory = "/tmp/exitmap_tor_datadir"
parser.add_argument("-t", "--tor-dir", type=str,
default=tor_directory,
help="Tor's data directory. If set, the network "
"consensus can be re-used in between scans which "
"speeds up bootstrapping. The default is %s." %
tor_directory)
parser.add_argument("-a", "--analysis-dir", type=str,
default=None,
help="The directory where analysis results are "
"written to. If the directory is used depends "
"on the module. The default is /tmp.")
parser.add_argument("-v", "--verbosity", type=str, default="info",
help="Minimum verbosity level for logging. Available "
"in ascending order: debug, info, warning, "
"error, critical). The default is info.")
parser.add_argument("-i", "--first-hop", type=str, default=None,
help="The 20-byte fingerprint of the Tor relay which "
"is used as first hop. This relay should be "
"under your control.")
parser.add_argument("-V", "--version", action="version",
version="%(prog)s 2015.04.06")
parser.add_argument("module", nargs='+',
help="Run the given module (available: %s)." %
", ".join(get_modules()))
parser.set_defaults(**defaults)
return parser.parse_args(remaining_argv)
def get_modules():
"""
Return all modules located in "modules/".
"""
modules_path = os.path.dirname(modules.__file__)
return [name for _, name, _ in pkgutil.iter_modules([modules_path])]
def main():
"""
The scanner's entry point.
"""
stats = Statistics()
args = parse_cmd_args()
# Create and set the given directories.
if args.tor_dir and not os.path.exists(args.tor_dir):
os.makedirs(args.tor_dir)
if args.analysis_dir and not os.path.exists(args.analysis_dir):
os.makedirs(args.analysis_dir)
util.analysis_dir = args.analysis_dir
logger.setLevel(logging.__dict__[args.verbosity.upper()])
logger.debug("Command line arguments: %s" % str(args))
socks_port, control_port = bootstrap_tor(args)
controller = Controller.from_port(port=control_port)
stem.connection.authenticate(controller)
# Redirect Tor's logging to work around the following problem:
# https://bugs.torproject.org/9862
logger.debug("Redirecting Tor's logging to /dev/null.")
controller.set_conf("Log", "err file /dev/null")
# We already have the current consensus, so we don't need additional
# descriptors or the streams fetching them.
controller.set_conf("FetchServerDescriptors", "0")
cached_consensus_path = os.path.join(args.tor_dir, "cached-consensus")
if args.first_hop and (not util.relay_in_consensus(args.first_hop,
cached_consensus_path)):
raise error.PathSelectionError("Given first hop \"%s\" not found in "
"consensus. Is it offline?" %
args.first_hop)
for module_name in args.module:
try:
run_module(module_name, args, controller, socks_port, stats)
except error.ExitSelectionError as err:
logger.error("failed to run because : %s" %err)
return 0
def select_exits(args, module):
"""
Select exit relays which allow exiting to the module's scan destinations.
We select exit relays based on their published exit policy. In particular,
we check if the exit relay's exit policy specifies that we can connect to
our intended destination(s).
"""
before = datetime.datetime.now()
hosts = []
if module.destinations is not None:
hosts = [(socket.gethostbyname(host), port) for
(host, port) in module.destinations]
# '-e' was used to specify a single exit relay.
if args.exit:
exit_relays = [args.exit]
total = len(exit_relays)
else:
total, exit_relays = relayselector.get_exits(args.tor_dir,
country_code=args.country,
hosts=hosts)
logger.debug("Successfully selected exit relays after %s." %
str(datetime.datetime.now() - before))
logger.info("%d%s exits out of all %s exit relays allow exiting to %s." %
(len(exit_relays), " %s" %
args.country if args.country else "", total, hosts))
assert isinstance(exit_relays, list)
random.shuffle(exit_relays)
return exit_relays
def run_module(module_name, args, controller, socks_port, stats):
"""
Run an exitmap module over all available exit relays.
"""
logger.info("Running module '%s'." % module_name)
stats.modules_run += 1
try:
module = __import__("modules.%s" % module_name, fromlist=[module_name])
except ImportError as err:
logger.error("Failed to load module because: %s" % err)
return
# Let module perform one-off setup tasks.
if hasattr(module, "setup"):
logger.debug("Calling module's setup() function.")
module.setup()
exit_relays = select_exits(args, module)
count = len(exit_relays)
stats.total_circuits += count
if count < 1:
raise error.ExitSelectionError("Exit selection yielded %d exits "
"but need at least one." % count)
handler = EventHandler(controller, module, socks_port, stats)
controller.add_event_listener(handler.new_event,
EventType.CIRC, EventType.STREAM)
duration = count * args.build_delay
logger.info("Scan is estimated to take around %s." %
datetime.timedelta(seconds=duration))
logger.debug("Beginning to trigger %d circuit creation(s)." % count)
iter_exit_relays(exit_relays, controller, stats, args)
def iter_exit_relays(exit_relays, controller, stats, args):
"""
Invoke circuits for all selected exit relays.
"""
before = datetime.datetime.now()
cached_consensus_path = os.path.join(args.tor_dir, "cached-consensus")
fingerprints = relayselector.get_fingerprints(cached_consensus_path)
count = len(exit_relays)
logger.info("Beginning to trigger circuit creations.")
# Start building a circuit for every exit relay we got.
for i, exit_relay in enumerate(exit_relays):
# Determine the hops in our next circuit.
if args.first_hop:
hops = [args.first_hop, exit_relay]
else:
all_hops = list(fingerprints)
all_hops.remove(exit_relay)
first_hop = random.choice(all_hops)
logger.debug("Using random first hop %s for circuit." % first_hop)
hops = [first_hop, exit_relay]
assert len(hops) > 1
try:
controller.new_circuit(hops)
except stem.ControllerError as err:
stats.failed_circuits += 1
logger.debug("Circuit with exit relay \"%s\" could not be "
"created: %s" % (exit_relay, err))
if i != (count - 1):
time.sleep(args.build_delay)
logger.info("Done triggering circuit creations after %s." %
str(datetime.datetime.now() - before))
| gpl-3.0 | -932,819,446,632,745,000 | 32.434903 | 79 | 0.601906 | false |
capiscuas/libreGTFS | other/fixing_boundaries_Jakarta/detect_redundant_ways.py | 1 | 3866 | import xml.etree.cElementTree as ET
input_filename = "unique_nodes.osm"
output_filename = "ready_to_fix.osm"
# Parse the XML from the OSM file
tree = ET.ElementTree(file=input_filename)
#I tried to use the library OSMAPI to parse the OSM files into python structures
# but it hanged the laptop cause it took too much time
#from osmapi import OsmApi
#MyApi = OsmApi(username = u"xxxxxxx", password = u"*******")
#result = MyApi.ParseOsm(file(input_filename).read())
# Get the root of the XML (the <osm> node)
r = tree.getroot()
unique_rw = []
ways_level9 = []
ways_level8 = []
all_ways = {}
#Converting the Ways Tree XML elements into a python dictionary
for way in r.findall("way"):
way_id = way.attrib['id']
all_ways[way_id] = {'id':way_id,'tag':{},'nd':[]}
for c in way.getchildren():
if c.tag == "nd":
node_id = c.attrib['ref']
all_ways[way_id]['nd'].append(node_id)
else:
all_ways[way_id]['tag'][c.attrib['k']] = c.attrib['v']
print 'all_ways: ',len(all_ways)
#Looping the all_ways dictionary to find out what level 8 ways are redundant because they could be generate from the smaller level 9.
#In order to do this, we extract the tag names for the admin_level=8, and admin_level=7
# and we compare them when crawling the ways who have admin_level8...
#if that idx(level8+level7) already exists, it means that this way is redundant and can be generated with the ways of level9.
all_unique_rw = []
for idx,way in all_ways.items():
#print way['tag']
if way['tag'].has_key('admin_level'):
admin_level = int(way['tag']['admin_level'])
if admin_level == 9:
ways_level9.append(way)
if way['tag'].has_key('rw_number'):
try:
rw_number = int(way['tag']['rw_number'])
except:
continue
else:
if way['tag'].has_key('kel_name'):
desa = (way['tag']['kel_name']).lower().replace(" ", "")
#print "Level 9 RW=",rw_number, "Desa:",desa
all_unique_rw.append(str(rw_number)+'_'+desa)
elif admin_level == 8:
ways_level8.append(way)
ways_to_delete = []
for way in ways_level8:
if way['tag'].has_key('name') or way['tag'].has_key('RW'):
if way['tag'].has_key('name'): rwtag = way['tag']['name']
else: rwtag = way['tag']['RW']
if rwtag.startswith('RW'):
try:
rw_number = int(rwtag[2:])
except:
continue
else:
desaname = ''
if way['tag'].has_key('is_in:hamlet'): desaname = way['tag']['is_in:hamlet']
if way['tag'].has_key('KEL_NAME') : desaname = way['tag']['KEL_NAME']
if desaname:
#We remove the spaces because we realized sometimes the tags from level9 and level8 didn't match bcs spaces.
desa = (desaname).lower().replace(" ", "")
rw8 = str(rw_number)+'_'+desa
if rw8 in all_unique_rw:
#print 'Redundant',rw8
ways_to_delete.append(way['id'])
#print "Level 8 RW=",rw_number, "Desa:",desa
print 'Colouring redundant ways, TOTAL: ',len(ways_to_delete)
for way in r.findall("way"):
way_id = way.attrib['id']
if way_id in ways_to_delete:
#We add this tag so we can have BLUE color in JOSM for easy debugging(comparing, deleting, etc)
way.append(ET.Element('tag',{'k':'natural','v':'water'}))
print 'Saving'
tree.write(output_filename, encoding='utf-8', xml_declaration=True) | gpl-3.0 | -3,870,585,583,029,788,000 | 39.28125 | 136 | 0.543973 | false |
beeftornado/sentry | tests/sentry/snuba/test_tasks.py | 1 | 13624 | from __future__ import absolute_import
import abc
from uuid import uuid4
import responses
from exam import patcher
from mock import Mock, patch
from six import add_metaclass
from sentry.snuba.models import QueryDatasets, QuerySubscription, SnubaQuery, SnubaQueryEventType
from sentry.snuba.tasks import (
apply_dataset_query_conditions,
build_snuba_filter,
create_subscription_in_snuba,
update_subscription_in_snuba,
delete_subscription_from_snuba,
)
from sentry.utils import json
from sentry.testutils import TestCase
@add_metaclass(abc.ABCMeta)
class BaseSnubaTaskTest(object):
metrics = patcher("sentry.snuba.tasks.metrics")
status_translations = {
QuerySubscription.Status.CREATING: "create",
QuerySubscription.Status.UPDATING: "update",
QuerySubscription.Status.DELETING: "delete",
}
@abc.abstractproperty
def expected_status(self):
pass
@abc.abstractmethod
def task(self):
pass
def create_subscription(self, status=None, subscription_id=None, dataset=None):
if status is None:
status = self.expected_status
if dataset is None:
dataset = QueryDatasets.EVENTS
dataset = dataset.value
aggregate = "count_unique(tags[sentry:user])"
query = "hello"
time_window = 60
resolution = 60
snuba_query = SnubaQuery.objects.create(
dataset=dataset,
aggregate=aggregate,
query=query,
time_window=time_window,
resolution=resolution,
)
return QuerySubscription.objects.create(
snuba_query=snuba_query,
status=status.value,
subscription_id=subscription_id,
project=self.project,
type="something",
)
def test_no_subscription(self):
self.task(12345)
self.metrics.incr.assert_called_once_with(
"snuba.subscriptions.{}.subscription_does_not_exist".format(
self.status_translations[self.expected_status]
)
)
def test_invalid_status(self):
sub = self.create_subscription(QuerySubscription.Status.ACTIVE)
self.task(sub.id)
self.metrics.incr.assert_called_once_with(
"snuba.subscriptions.{}.incorrect_status".format(
self.status_translations[self.expected_status]
)
)
class CreateSubscriptionInSnubaTest(BaseSnubaTaskTest, TestCase):
expected_status = QuerySubscription.Status.CREATING
task = create_subscription_in_snuba
def test_already_created(self):
sub = self.create_subscription(
QuerySubscription.Status.CREATING, subscription_id=uuid4().hex
)
create_subscription_in_snuba(sub.id)
self.metrics.incr.assert_called_once_with(
"snuba.subscriptions.create.already_created_in_snuba"
)
def test(self):
sub = self.create_subscription(QuerySubscription.Status.CREATING)
create_subscription_in_snuba(sub.id)
sub = QuerySubscription.objects.get(id=sub.id)
assert sub.status == QuerySubscription.Status.ACTIVE.value
assert sub.subscription_id is not None
def test_transaction(self):
sub = self.create_subscription(
QuerySubscription.Status.CREATING, dataset=QueryDatasets.TRANSACTIONS
)
create_subscription_in_snuba(sub.id)
sub = QuerySubscription.objects.get(id=sub.id)
assert sub.status == QuerySubscription.Status.ACTIVE.value
assert sub.subscription_id is not None
@responses.activate
def test_adds_type(self):
sub = self.create_subscription(QuerySubscription.Status.CREATING)
with patch("sentry.snuba.tasks._snuba_pool") as pool:
resp = Mock()
resp.status = 202
resp.data = json.dumps({"subscription_id": "123"})
pool.urlopen.return_value = resp
create_subscription_in_snuba(sub.id)
request_body = json.loads(pool.urlopen.call_args[1]["body"])
assert ["type", "=", "error"] in request_body["conditions"]
class UpdateSubscriptionInSnubaTest(BaseSnubaTaskTest, TestCase):
expected_status = QuerySubscription.Status.UPDATING
task = update_subscription_in_snuba
def test(self):
subscription_id = "1/{}".format(uuid4().hex)
sub = self.create_subscription(
QuerySubscription.Status.UPDATING, subscription_id=subscription_id
)
update_subscription_in_snuba(sub.id)
sub = QuerySubscription.objects.get(id=sub.id)
assert sub.status == QuerySubscription.Status.ACTIVE.value
assert sub.subscription_id is not None
assert sub.subscription_id != subscription_id
def test_no_subscription_id(self):
sub = self.create_subscription(QuerySubscription.Status.UPDATING)
assert sub.subscription_id is None
update_subscription_in_snuba(sub.id)
sub = QuerySubscription.objects.get(id=sub.id)
assert sub.status == QuerySubscription.Status.ACTIVE.value
assert sub.subscription_id is not None
class DeleteSubscriptionFromSnubaTest(BaseSnubaTaskTest, TestCase):
expected_status = QuerySubscription.Status.DELETING
task = delete_subscription_from_snuba
def test(self):
subscription_id = "1/{}".format(uuid4().hex)
sub = self.create_subscription(
QuerySubscription.Status.DELETING, subscription_id=subscription_id
)
delete_subscription_from_snuba(sub.id)
assert not QuerySubscription.objects.filter(id=sub.id).exists()
def test_no_subscription_id(self):
sub = self.create_subscription(QuerySubscription.Status.DELETING)
assert sub.subscription_id is None
delete_subscription_from_snuba(sub.id)
assert not QuerySubscription.objects.filter(id=sub.id).exists()
class BuildSnubaFilterTest(TestCase):
def test_simple_events(self):
snuba_filter = build_snuba_filter(
QueryDatasets.EVENTS, "", "count_unique(user)", None, None
)
assert snuba_filter
assert snuba_filter.conditions == [["type", "=", "error"]]
assert snuba_filter.aggregations == [["uniq", "tags[sentry:user]", u"count_unique_user"]]
def test_simple_transactions(self):
snuba_filter = build_snuba_filter(
QueryDatasets.TRANSACTIONS, "", "count_unique(user)", None, None
)
assert snuba_filter
assert snuba_filter.conditions == []
assert snuba_filter.aggregations == [["uniq", "user", u"count_unique_user"]]
def test_aliased_query_events(self):
snuba_filter = build_snuba_filter(
QueryDatasets.EVENTS, "release:latest", "count_unique(user)", None, None
)
assert snuba_filter
assert snuba_filter.conditions == [
["type", "=", "error"],
["tags[sentry:release]", "=", "latest"],
]
assert snuba_filter.aggregations == [["uniq", "tags[sentry:user]", u"count_unique_user"]]
def test_aliased_query_transactions(self):
snuba_filter = build_snuba_filter(
QueryDatasets.TRANSACTIONS,
"release:latest",
"percentile(transaction.duration,.95)",
None,
None,
)
assert snuba_filter
assert snuba_filter.conditions == [["release", "=", "latest"]]
assert snuba_filter.aggregations == [
[u"quantile(0.95)", "duration", u"percentile_transaction_duration__95"]
]
def test_user_query(self):
snuba_filter = build_snuba_filter(
QueryDatasets.EVENTS, "user:[email protected]", "count()", None, None
)
assert snuba_filter
assert snuba_filter.conditions == [
["type", "=", "error"],
["tags[sentry:user]", "=", "[email protected]"],
]
assert snuba_filter.aggregations == [[u"count", None, u"count"]]
def test_user_query_transactions(self):
snuba_filter = build_snuba_filter(
QueryDatasets.TRANSACTIONS, "user:[email protected]", "p95()", None, None
)
assert snuba_filter
assert snuba_filter.conditions == [["user", "=", "[email protected]"]]
assert snuba_filter.aggregations == [[u"quantile(0.95)", "duration", u"p95"]]
def test_boolean_query(self):
snuba_filter = build_snuba_filter(
QueryDatasets.EVENTS, "release:latest OR release:123", "count_unique(user)", None, None
)
assert snuba_filter
assert snuba_filter.conditions == [
["type", "=", "error"],
[
[
"or",
[
["equals", ["tags[sentry:release]", "'latest'"]],
["equals", ["tags[sentry:release]", "'123'"]],
],
],
"=",
1,
],
]
assert snuba_filter.aggregations == [["uniq", "tags[sentry:user]", u"count_unique_user"]]
def test_event_types(self):
snuba_filter = build_snuba_filter(
QueryDatasets.EVENTS,
"release:latest OR release:123",
"count_unique(user)",
None,
[SnubaQueryEventType.EventType.ERROR, SnubaQueryEventType.EventType.DEFAULT],
)
assert snuba_filter
assert snuba_filter.conditions == [
[["or", [["equals", ["type", "'error'"]], ["equals", ["type", "'default'"]]]], "=", 1],
[
[
"or",
[
["equals", ["tags[sentry:release]", "'latest'"]],
["equals", ["tags[sentry:release]", "'123'"]],
],
],
"=",
1,
],
]
assert snuba_filter.aggregations == [["uniq", "tags[sentry:user]", u"count_unique_user"]]
class TestApplyDatasetQueryConditions(TestCase):
def test_no_event_types_no_discover(self):
assert (
apply_dataset_query_conditions(QueryDatasets.EVENTS, "release:123", None, False)
== "(event.type:error) AND (release:123)"
)
assert (
apply_dataset_query_conditions(
QueryDatasets.EVENTS, "release:123 OR release:456", None, False
)
== "(event.type:error) AND (release:123 OR release:456)"
)
assert (
apply_dataset_query_conditions(QueryDatasets.TRANSACTIONS, "release:123", None, False)
== "release:123"
)
assert (
apply_dataset_query_conditions(
QueryDatasets.TRANSACTIONS, "release:123 OR release:456", None, False
)
== "release:123 OR release:456"
)
def test_no_event_types_discover(self):
assert (
apply_dataset_query_conditions(QueryDatasets.EVENTS, "release:123", None, True)
== "(event.type:error) AND (release:123)"
)
assert (
apply_dataset_query_conditions(
QueryDatasets.EVENTS, "release:123 OR release:456", None, True
)
== "(event.type:error) AND (release:123 OR release:456)"
)
assert (
apply_dataset_query_conditions(QueryDatasets.TRANSACTIONS, "release:123", None, True)
== "(event.type:transaction) AND (release:123)"
)
assert (
apply_dataset_query_conditions(
QueryDatasets.TRANSACTIONS, "release:123 OR release:456", None, True
)
== "(event.type:transaction) AND (release:123 OR release:456)"
)
def test_event_types_no_discover(self):
assert (
apply_dataset_query_conditions(
QueryDatasets.EVENTS, "release:123", [SnubaQueryEventType.EventType.ERROR], False
)
== "(event.type:error) AND (release:123)"
)
assert (
apply_dataset_query_conditions(
QueryDatasets.EVENTS,
"release:123",
[SnubaQueryEventType.EventType.ERROR, SnubaQueryEventType.EventType.DEFAULT],
False,
)
== "(event.type:error OR event.type:default) AND (release:123)"
)
assert (
apply_dataset_query_conditions(
QueryDatasets.TRANSACTIONS,
"release:123",
[SnubaQueryEventType.EventType.TRANSACTION],
False,
)
== "release:123"
)
def test_event_types_discover(self):
assert (
apply_dataset_query_conditions(
QueryDatasets.EVENTS, "release:123", [SnubaQueryEventType.EventType.ERROR], True
)
== "(event.type:error) AND (release:123)"
)
assert (
apply_dataset_query_conditions(
QueryDatasets.EVENTS,
"release:123",
[SnubaQueryEventType.EventType.ERROR, SnubaQueryEventType.EventType.DEFAULT],
True,
)
== "(event.type:error OR event.type:default) AND (release:123)"
)
assert (
apply_dataset_query_conditions(
QueryDatasets.TRANSACTIONS,
"release:123",
[SnubaQueryEventType.EventType.TRANSACTION],
True,
)
== "(event.type:transaction) AND (release:123)"
)
| bsd-3-clause | -5,761,123,563,209,410,000 | 35.427807 | 99 | 0.585437 | false |
Sergey010289/Address_book_flask | models.py | 1 | 1539 | # -*- coding: utf-8 -*-
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///task2.db'
db = SQLAlchemy(app)
class Users(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False)
surname = db.Column(db.String(255), nullable=False)
salt = db.Column(db.String(255), nullable=True)
enc_passwd = db.Column(db.String(255), nullable=False)
addresses = db.relationship('Contacts', backref='user')
def __init__(self, name, surname, salt, enc_passwd):
self.name = name
self.surname = surname
self.salt = salt
self.enc_passwd = enc_passwd
def __repr__(self):
return "{0}: {1}".format(self.name, self.surname,)
class Contacts(db.Model):
__tablename__ = 'contacts'
id = db.Column(db.Integer, primary_key=True)
contact = db.Column(db.String(255), nullable=False)
contact_type = db.Column(db.String(255), nullable=False) # enum(tel, email, skype)
users_id = db.Column(db.Integer, db.ForeignKey('users.id'))
def __init__(self, contact, contact_type, users_id):
self.contact = contact
self.contact_type = contact_type
self.users_id = users_id
def __repr__(self):
return "{0}: {1}".format(self.contact, self.contact_type,)
if __name__ == '__main__':
# init DB
db.create_all()
print 'created'
#app.run()
#app.run(debug=True)
| mit | 3,616,191,841,287,895,600 | 27.5 | 89 | 0.623132 | false |
facebook/fbthrift | thrift/lib/py/server/test/handler.py | 1 | 1557 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
try:
import asyncio
except Exception:
import trollius as asyncio
from thrift_asyncio.sleep import Sleep as AsyncSleep
from thrift_asyncio.sleep.ttypes import OverflowResult
from thrift_asyncio.tutorial import Calculator as AsyncCalculator
class AsyncCalculatorHandler(AsyncCalculator.Iface):
async def add(self, n1, n2):
return 42
async def calculate(self, logid, work):
return 0
async def zip(self):
print("zip")
class AsyncSleepHandler(AsyncSleep.Iface):
def __init__(self, loop):
self._loop = loop
async def echo(self, message, delay):
return await asyncio.sleep(delay, result=message, loop=self._loop)
async def overflow(self, value):
# simply return the value in OverflowResult
return OverflowResult(value)
| apache-2.0 | 7,541,806,661,582,863,000 | 30.14 | 74 | 0.73025 | false |
tiborsimko/cookiecutter-invenio-module | scripts/repocheck.py | 1 | 5832 | # -*- coding: utf-8 -*-
#
# This file is part of Cookiecutter - Invenio Module Template
# Copyright (C) 2016, 2017 CERN
#
# Cookiecutter - Invenio Module Template is free software; you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# Cookiecutter - Invenio Module Template is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cookiecutter - Invenio Module Template; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
# 02111-1307, USA.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
r"""Script for checking repository against cookiecutter output.
Usage:
.. code-block:: console
$ pip install click
$ cookiecutter gh:inveniosoftware/cookiecutter-invenio-module
$ python repocheck.py ~/original/invenio-fungenerator \
~/new/invenio-fungenerator
Note: When running cookiecutter be sure to use same input values as when the
repository was orignally generated.
"""
import os
import subprocess
from os.path import join
import click
manual_diff = [
'setup.py',
'.travis.yml',
'tests/conftest.py',
'examples/app-fixtures.sh',
'examples/app-setup.sh',
'examples/app-teardown.sh',
'examples/app.py',
'tests/test_examples_app.py',
'requirements-devel.txt',
]
diff_files = [
'.editorconfig',
'.tx/config',
'AUTHORS.rst',
'CHANGES.rst',
'docs/conf.py',
'INSTALL.rst',
'{package_name}/version.py',
'MANIFEST.in',
'README.rst',
'RELEASE-NOTES.rst',
]
identical_files = [
'.dockerignore',
'.gitignore',
'babel.ini',
'CONTRIBUTING.rst',
'docs/authors.rst',
'docs/changes.rst',
'docs/configuration.rst',
'docs/contributing.rst',
'docs/index.rst',
'docs/installation.rst',
'docs/license.rst',
'docs/make.bat',
'docs/Makefile',
'docs/requirements.txt',
'docs/usage.rst',
'LICENSE',
'pytest.ini',
'run-tests.sh',
'setup.cfg',
]
identical_setuppy_attrs = [
'name',
'author',
'author-email',
'contact',
'contact-email',
'url',
'license',
'description',
]
def run_in_dir(path, cmd):
"""Run command in directory."""
cur_dir = os.getcwd()
os.chdir(path)
output = subprocess.check_output(cmd, shell=True).decode('utf-8')
os.chdir(cur_dir)
return output
def diff_file(src, dst, f):
"""Diff a file in src and dst."""
# Skip the lines containing copyright year in the header license
cmd = 'diff -I "Copyright (C) .* CERN." {src} {dst}'.format(
src=join(src, f), dst=join(dst, f))
try:
subprocess.check_output(cmd, shell=True).decode('utf-8')
# Exit code 0 means no difference
return ""
except subprocess.CalledProcessError as e:
if e.returncode >= 0:
return e.output
raise
def equal_content(src, dst, f):
"""Check if file content is equal."""
return diff_file(src, dst, f) == ""
def equal_setuppy_attr(src, dst, attr):
src_out = run_in_dir(src, 'python setup.py --{attr}'.format(attr=attr))
dst_out = run_in_dir(dst, 'python setup.py --{attr}'.format(attr=attr))
return src_out == dst_out
def check_setupy(src, dst, attrs):
for a in attrs:
if not equal_setuppy_attr(src, dst, a):
click.secho('ERROR: setup.py:{0} is out of sync.'.format(a))
def check_identical_files(src, dst, files):
"""Check if files in source matches files in destination."""
for f in files:
try:
if not equal_content(src, dst, f):
click.secho('ERROR: {0} is out of sync.'.format(f))
except IOError as e:
click.secho('ERROR: {0}'.format(e))
def get_package_name(dst):
"""Get the package name e.g.: 'invenio-fungenerator'."""
with open(join(dst, '.editorconfig'), 'r') as fp:
for line in fp.readlines():
if line.startswith('known_first_party'):
return line.partition(' = ')[-1].partition(',')[0].strip()
def diff_similar_files(src, dst, files):
"""Diff files which are supposed to be very similar."""
for f in files:
click.secho('Diff of {0}'.format(f), fg='cyan')
click.echo(diff_file(src, dst, f))
@click.command()
@click.argument('repo-dir', type=click.Path(exists=True, file_okay=False))
@click.argument(
'cookiecutter-output-dir', type=click.Path(exists=True, file_okay=False))
def run(repo_dir, cookiecutter_output_dir):
"""Compare repository against CookieCutter output."""
click.secho(
'Please check diff output for almost identical files...', fg='green')
# Format the diff files with the package name
package_name = get_package_name(cookiecutter_output_dir)
f_diff_files = [f.format(package_name=package_name) for f in diff_files]
diff_similar_files(repo_dir, cookiecutter_output_dir, f_diff_files)
click.secho('Checking identical files...', fg='green')
check_identical_files(repo_dir, cookiecutter_output_dir, identical_files)
click.secho('Please check following files manually:', fg='yellow')
for f in manual_diff:
print(f)
click.secho('Checking identical setup.py attributes...', fg='green')
check_setupy(repo_dir, cookiecutter_output_dir, identical_setuppy_attrs)
if __name__ == '__main__':
run()
| gpl-2.0 | 4,150,678,021,414,243,000 | 28.907692 | 77 | 0.656379 | false |
lennin92/peppers | net-test.py | 1 | 2084 | from utils.settings import CAFFE_PATH, DPLOY_MODEL_FILE, \
TRAIN_MODEL_FILE, PRETRAINED, BASE_SHAPE
import sys
sys.path.append(CAFFE_PATH)
import caffe
try:
import cv2
except ImportError as E:
import cv as cv2
import numpy as np
import os
# Set the right path to your model definition file, pretrained model weights,
# and the image you would like to classify.
IMAGE_FILE = 'PT0.ST0.SE2.IM30'
caffe.set_mode_cpu()
net = caffe.Net(DPLOY_MODEL_FILE, caffe.TEST)
# 'model/convnet.prototxt', caffe.TEST)
# DPLOY_MODEL_FILE, caffe.TEST)
# DPLOY_MODEL_FILE, PRETRAINED,caffe.TEST)
# caffe.TEST)
# '/home/lennin92/dicom/caffe/caffe_iter_4000.caffemodel',
print "NET INFO:"
print "NET.INPUTS = ", net.inputs
print "NET.PARAMS = ", net.params
print "NET.LAYERS:"
for bl in net.blobs:
print " BLOB '%s' SHAPE "%(bl), [s for s in net.blobs[bl].shape]
print "NET.TOTAL_PARAMS = ", sum([ (reduce(lambda x,y: x*y, p.data.shape)) for k in net.params for i,p in enumerate(net.params[k]) ])
# plt.imshow(input_image)
#img = cv2.imread('prbdata/' + IMAGE_FILE + '.png', cv2.IMREAD_GRAYSCALE)
img = caffe.io.load_image('prbdata/' + IMAGE_FILE + '.png', color=False)
print 'img shape = ',img.shape
cv2.imwrite("prbdata/plots/input_%s.png"%(IMAGE_FILE), 255*img)
img = img.reshape((1, 512,512))
print 'img after reshape = ',img.shape
# img_blobinp = img[np.newaxis, np.newaxis, :, :]
# net.blobs['image'].reshape(*img_blobinp.shape)
net.blobs['data'].data[...] = img
cv2.imwrite("prbdata/plots/data_%s.png"%(IMAGE_FILE), 255*net.blobs['data'].data[0,0])
for bl in net.blobs:
net.forward()
print "GENERATING '%s' IMAGES"%(bl)
shape = [s for s in net.blobs[bl].shape]
for i in range(shape[1]):
try:
path = os.path.join('prbdata/plots/'+IMAGE_FILE, bl)
if(not os.path.exists(path)): os.makedirs(path)
cv2.imwrite(path+"/%d.png"%(i), 255*net.blobs[bl].data[0,i])
except Exception, e:
print "error on generating '%s'"%(path+"/%d.png"%(i))
| gpl-3.0 | -1,961,589,484,464,984,300 | 34.931034 | 133 | 0.642514 | false |
nanshihui/PocCollect | component/cacti/cactifiledisclosure.py | 1 | 1103 | #!/usr/bin/env python
# encoding: utf-8
from t import T
import requests,urllib2,json,urlparse
class P(T):
def __init__(self):
T.__init__(self)
def verify(self,head='',context='',ip='',port='',productname={},keywords='',hackinfo=''):
target_url = "http://"+ip+":"+str(port)+"/cacti.sql"
result = {}
result['result']=False
r=None
try:
r=requests.get(url=target_url,timeout=2)
if r.status_code==200:
result['result']=True
result['VerifyInfo'] = {}
result['VerifyInfo']['type']='cacti file disclosure'
result['VerifyInfo']['URL'] =ip+"/cacti.sql"
result['VerifyInfo']['payload']='IP/cacti.sql'
result['VerifyInfo']['result'] =''
else:
pass
except Exception,e:
print e.text
finally:
if r is not None:
r.close()
del r
return result
if __name__ == '__main__':
print P().verify(ip='140.114.108.4',port='80')
| mit | -6,480,751,637,654,577,000 | 32.424242 | 93 | 0.485041 | false |
clickwork/clickwork | main/views/base.py | 1 | 10670 | from django import forms
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.urlresolvers import resolve, reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template.loader import get_template
from main.models import Task, WorkInProgress, Response, Result, Review, AutoReview, PageTrack, Announcement
from main.wrapper import get, get_or_post, TemplateResponse, ViewResponse, RefererResponse, \
ForbiddenResponse, RequestGuts
from urlparse import urlparse
import datetime
import sys
from django.db import transaction
import main.views.overview
import main.views.project
import main.views.timesheets
import main.views.task
import user_management.views
###
### WARNING: For efficiency, the list of links displayed on the main
### page is cached in the Django session object (guts.session). If
### you are debugging this page in a way that affects those links, THE
### CACHED LIST WILL NOT BE CLEARED AUTOMATICALLY.
###
@login_required
@get
def home(guts):
"""Manage the display of the homepage. Currently returns a count for
the number of resolvable tasks.
Template data should include the counts of tasks the user can
annotate or merge.
"""
site_messages = Announcement.objects.filter(enabled=True)
respondable_tasks = Task.objects.can_annotate(guts.user)
resolvable_tasks = Task.objects.can_merge(guts.user)
recent_responses = Response.objects.filter(user=guts.user).order_by('-end_time')[0:5]
recent_results = Result.objects.filter(user=guts.user).order_by('-end_time')[0:5]
reviews = Review.objects.filter(complete=False, response__user=guts.user)
if "visitable_pages" not in guts.session:
guts.session["visitable_pages"] = visitable(guts.user)
template = get_template("home.html")
return TemplateResponse(template, {'respondable_tasks': respondable_tasks,
'respondable_task_count': respondable_tasks.count(),
'resolvable_tasks': resolvable_tasks,
'resolvable_task_count': resolvable_tasks.count(),
'recent_responses': recent_responses,
'recent_results': recent_results,
'reviews': reviews,
"pages": guts.session["visitable_pages"],
"messages": site_messages})
def visitable(user):
"""Gather information about which pages this user can visit by
filtering the PAGES variable."""
guts = RequestGuts(user=user)
def reverse_if_visitable(view_function):
try:
seed = view_function.dispatcher["GET"](guts)
if isinstance(seed, ForbiddenResponse):
return None
else:
return reverse(view_function)
except Exception, E:
return None # If a view throws an exception because it's not configured, don't throw errors on the homepage
visitable_pages = [{"category": category,
"url": reverse_if_visitable(view_function),
"description": description}
for category, view_function, description in PAGES
if reverse_if_visitable(view_function)]
## we shouldn't do the "try visiting this page" hack for the next_task page,
## since (a) this should always be accessible, and (b) visiting the page will
## cause a WIP to be assigned to the user as a side effect.
visitable_pages.insert(0, {"category": "Tasks",
"url": reverse(next_task),
"description": "Take next task"})
## hack to include the admin site
if user.is_staff:
visitable_pages.append({"category": "Miscellaneous",
"url": "/admin/",
"description": "Administer the site"})
visitable_pages.append({
"category":"Overviews",
"url": "/user/%s/responses/" % guts.user.username,
"description": "Recently Merged Responses"})
return visitable_pages
@get
def about(guts):
"""Manage the display of the homepage"""
template = get_template("about.html")
return TemplateResponse(template, {})
@transaction.commit_on_success
@login_required
@get
def next_task(guts):
"""Get the next task for a user, and redirect.
It is possible this belongs with the task views.
# TODO: Testing
The input request should have a logged in user. The result
should be:
* If the user has nothing to do, redirect to the home page.
* If the user has a pending review, redirect to that review's page.
* If the user has a task in an auto-review project to look at,
redirect to that page.
* If the user either has a WorkInProgress or there is a task
available for them to work on, a redirect to that task's page.
* If a WorkInProgress exists, the .start_time property of the
WIP should be updated to the current time.
* If no WIP exists, one should be created with the next available
task and the current logged in user.
"""
review = Review.objects.filter(response__user=guts.user, complete=False)
if review.count():
return ViewResponse(main.views.task.next_review)
auto_review_pending = AutoReview.objects.filter(user=guts.user,
start_time__isnull=False,
end_time__isnull=True)
if auto_review_pending.exists():
return ViewResponse(main.views.task.task_view,
auto_review_pending[0].task.id)
new_auto_reviews = AutoReview.objects.filter(
user=guts.user, task__project__priority__gte=0,
start_time__isnull=True, end_time__isnull=True).order_by("-task__project__priority")
if new_auto_reviews.exists():
auto_review = new_auto_reviews[0]
auto_review.start_time = datetime.datetime.now()
auto_review.full_clean()
auto_review.save()
return ViewResponse(main.views.task.task_view,
auto_review.task.id)
wip = None
wips = WorkInProgress.objects.filter(user=guts.user)
if wips.count():
wip = wips[0]
wip.start_time = datetime.datetime.now()
wip.full_clean()
wip.save()
else:
task = Task.objects.next_for(guts.user)
if task:
wip = WorkInProgress(user=guts.user, task=task)
wip.full_clean()
wip.save()
if wip:
return ViewResponse(main.views.task.task_view, wip.task.id)
else:
return ViewResponse(home)
## TODO: Needs testing.
## TODO: This code assumes that each user may only have one WIP.
## The model should enforce that, or the view and template
## need to be adapted to other possibilities.
## TODO: What if the user goes to this page and has no WIPs?
@login_required
@get_or_post
def abandon_wip(get, guts):
"""A view to abandon a WIP. When GETting this page, the user sees
the \"are you sure?\" page. When POSTing, the first WIP that the
user has is deleted.
"""
if get:
wips = WorkInProgress.objects.filter(user=guts.user)
template = get_template("abandon_wip.html")
return TemplateResponse(template, {'wips':wips})
else:
wips = WorkInProgress.objects.filter(user=guts.user)
if wips.count():
wip = wips[0]
wip.delete()
return ViewResponse(home)
else:
template = get_template("abandon_wip.html")
return TemplateResponse(template, {"wips": wips})
class PageTrackForm(forms.Form):
user = forms.ModelChoiceField(queryset=User.objects.order_by("username"))
url = forms.URLField(max_length=100)
focus_time = forms.DateTimeField()
blur_time = forms.DateTimeField(required=False)
from django.template import Template
page_track_template = Template("""{% extends "base.html" %}
{% block title %}Page Track Test (ADMINS ONLY!){% endblock %}
{% block heading %}Page Track Test (ADMINS ONLY!){% endblock %}
{% block content %}
{% if pt %}
<p><b>PageTrack object {{ pt.id }} successfully entered.</b></p>
{% endif %}
<form action="#" method="POST">
{{ form.as_p }}
<input type="submit" value="Submit fake page-tracking info" />
</form>
{% endblock %}""")
@login_required
@get_or_post
def track_page_visit(get, guts):
if get:
form = PageTrackForm()
return TemplateResponse(page_track_template, {"form": form})
else:
if guts.user.is_superuser:
form = PageTrackForm(guts.parameters)
if form.is_valid():
url = form.cleaned_data["url"]
view, view_args, view_kwargs = resolve(urlparse(url).path)
print >>sys.stderr, repr(form.cleaned_data)
pt = PageTrack(user=form.cleaned_data["user"],
view_name=view.__name__,
view_args=repr(view_args),
view_kwargs=repr(view_kwargs),
focus_time=form.cleaned_data["focus_time"])
if "blur_time" in form.cleaned_data:
pt.blur_time = form.cleaned_data["blur_time"]
pt.full_clean()
pt.save()
new_form = PageTrackForm()
return TemplateResponse(page_track_template, {"form": new_form,
"pt": pt})
else:
return TemplateResponse(page_track_template, {"form": form})
else:
return ForbiddenResponse("Only superusers may use this form.")
### These are the pages that might be shown in the sitemap.
### They must all be accessible to at least some users without parameters or URL variations.
PAGES = (("Tasks", abandon_wip, "Abandon the work in progress"),
("Accounts", user_management.views.change_password, "Change your password"),
("Accounts", main.views.timesheets.timesheet, "Generate (estimated) timesheets"),
("Overviews", main.views.task.wip_review, "See works in progress"),
("Overviews", main.views.overview.all_projects_brief, "See projects"),
("Overviews", main.views.overview.all_groups, "See groups"),
("Overviews", main.views.overview.all_users, "See users"),
)
| mit | -6,907,276,051,042,295,000 | 42.198381 | 119 | 0.616401 | false |
lasa/website | app/slide.py | 1 | 3434 | import time
from app import db, utils
from app.models import Post, Slide
from flask import redirect, request
from flask_wtf import Form
from wtforms import validators, StringField, SelectField
URL_REGEX = r'((http(s)?:\/\/.)?(www\.)?[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b([-a-zA-Z0-9@:%_\+.~#?&/=]*))|\/[-a-zA-Z0-9@:%_\+.~#?&/=]*'
def generate_lists():
news = [('none', '')]
posts = Post.query.order_by(Post.timestamp.desc()).limit(10)
for post in posts:
news.append(('/news?postid=' + str(post.id_), post.title if len(post.title) <= 50 else post.title[:50] + "..."))
images, links = utils.get_uploads()
return images, links, news
class NewSlideForm(Form):
image_list = SelectField('Choose from uploads: ')
image_url = StringField('URL (external link or relative path): ', validators=[validators.InputRequired(), validators.Regexp(URL_REGEX, message="Invalid URL. Must be a valid external link or a relative URL beginning with '/'."), validators.Length(min=0, max=200)])
link_list = SelectField('Choose from uploads: ')
news_list = SelectField('Choose from news: ')
url = StringField('URL (external link or relative path): ', validators=[validators.Optional(), validators.Regexp(URL_REGEX, message="Invalid URL. Must be a valid external link or a relative URL beginning with '/'."), validators.Length(min=0, max=200)])
def __init__(self, **kwargs):
Form.__init__(self, **kwargs)
self.image_list.choices, self.link_list.choices, self.news_list.choices = generate_lists()
def validate(self):
is_valid = True
is_valid = Form.validate(self)
if not (self.image_url.data.startswith('/') or self.image_url.data.startswith("http://") or self.image_url.data.startswith("https://")):
self.image_url.data = "http://" + self.image_url.data
if self.url.data and not (self.url.data.startswith('/') or self.url.data.startswith("http://") or self.url.data.startswith("https://")):
self.url.data = "http://" + self.url.data
return is_valid
def new_slide():
form = NewSlideForm()
if form.validate_on_submit():
data = {"image_url": form.image_url.data,
"url": form.url.data}
newslide = Slide(**data)
db.session.add(newslide)
db.session.commit()
time.sleep(0.5)
return redirect("/slides")
return utils.render_with_navbar("slide/form.html", form=form)
def edit_slide():
id_ = request.args.get("id")
if not id_:
return redirect("/newslide")
current_slide = Slide.query.filter_by(id_=id_).first()
if not current_slide:
return redirect("/newslide")
data = {"image_url": current_slide.image_url,
"url": current_slide.url}
form = NewSlideForm(**data)
if form.validate_on_submit():
new_data = {"image_url": form.image_url.data,
"url": form.url.data}
for key, value in new_data.items():
setattr(current_slide, key, value)
db.session.commit()
time.sleep(0.5)
return redirect("/slides")
return utils.render_with_navbar("slide/form.html", form=form)
def delete_slide():
id_ = request.args.get("id")
if not id_:
return redirect("/slides")
slide = Slide.query.filter_by(id_=id_)
slide.delete()
db.session.commit()
time.sleep(0.5)
return redirect("/slides")
| agpl-3.0 | 9,013,213,698,612,016,000 | 35.147368 | 267 | 0.618229 | false |
jhseu/tensorflow | tensorflow/lite/experimental/examples/lstm/unidirectional_sequence_rnn_test.py | 1 | 9486 | # Lint as: python2, python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
import numpy as np
from six.moves import range
import tensorflow as tf
from tensorflow import flags
from tensorflow.examples.tutorials.mnist import input_data
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
FLAGS = flags.FLAGS
# Number of steps to train model.
# Dial to 0 means no training at all, all the weights will be just using their
# initial values. This can help make the test smaller.
TRAIN_STEPS = 0
CONFIG = tf.ConfigProto(device_count={"GPU": 0})
class UnidirectionalSequenceRnnTest(test_util.TensorFlowTestCase):
def __init__(self, *args, **kwargs):
super(UnidirectionalSequenceRnnTest, self).__init__(*args, **kwargs)
# Define constants
# Unrolled through 28 time steps
self.time_steps = 28
# Rows of 28 pixels
self.n_input = 28
# Learning rate for Adam optimizer
self.learning_rate = 0.001
# MNIST is meant to be classified in 10 classes(0-9).
self.n_classes = 10
# Batch size
self.batch_size = 16
# Rnn Units.
self.num_units = 16
def setUp(self):
super(UnidirectionalSequenceRnnTest, self).setUp()
# Import MNIST dataset
data_dir = tempfile.mkdtemp(dir=FLAGS.test_tmpdir)
self.mnist = input_data.read_data_sets(
data_dir, fake_data=True, one_hot=True)
def buildRnnLayer(self):
return tf.keras.layers.StackedRNNCells([
tf.lite.experimental.nn.TfLiteRNNCell(self.num_units, name="rnn1"),
tf.lite.experimental.nn.TfLiteRNNCell(self.num_units, name="rnn2")
])
def buildModel(self, rnn_layer, is_dynamic_rnn):
"""Build Mnist recognition model.
Args:
rnn_layer: The rnn layer either a single rnn cell or a multi rnn cell.
is_dynamic_rnn: Use dynamic_rnn or not.
Returns:
A tuple containing:
- Input tensor of the model.
- Prediction tensor of the model.
- Output class tensor of the model.
"""
# Weights and biases for output softmax layer.
out_weights = tf.Variable(
tf.random.normal([self.num_units, self.n_classes]))
out_bias = tf.Variable(tf.random.normal([self.n_classes]))
# input image placeholder
x = tf.placeholder(
"float", [None, self.time_steps, self.n_input], name="INPUT_IMAGE")
# x is shaped [batch_size,time_steps,num_inputs]
if is_dynamic_rnn:
rnn_input = tf.transpose(x, perm=[1, 0, 2])
outputs, _ = tf.lite.experimental.nn.dynamic_rnn(
rnn_layer, rnn_input, dtype="float32")
outputs = tf.unstack(outputs, axis=0)
else:
rnn_input = tf.unstack(x, self.time_steps, 1)
outputs, _ = tf.nn.static_rnn(rnn_layer, rnn_input, dtype="float32")
# Compute logits by multiplying outputs[-1] of shape [batch_size,num_units]
# by the softmax layer's out_weight of shape [num_units,n_classes]
# plus out_bias
prediction = tf.matmul(outputs[-1], out_weights) + out_bias
output_class = tf.nn.softmax(prediction, name="OUTPUT_CLASS")
return x, prediction, output_class
def trainModel(self, x, prediction, output_class, sess):
"""Train the model.
Args:
x: The input tensor.
prediction: The prediction class tensor.
output_class: The output tensor.
sess: The graph session.
"""
# input label placeholder
y = tf.placeholder("float", [None, self.n_classes])
# Loss function
loss = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y))
# Optimization
opt = tf.train.AdamOptimizer(
learning_rate=self.learning_rate).minimize(loss)
# Initialize variables
sess.run(tf.global_variables_initializer())
for _ in range(TRAIN_STEPS):
batch_x, batch_y = self.mnist.train.next_batch(
batch_size=self.batch_size, fake_data=True)
batch_x = np.array(batch_x)
batch_y = np.array(batch_y)
batch_x = batch_x.reshape((self.batch_size, self.time_steps,
self.n_input))
sess.run(opt, feed_dict={x: batch_x, y: batch_y})
def saveAndRestoreModel(self, rnn_layer, sess, saver, is_dynamic_rnn):
"""Saves and restores the model to mimic the most common use case.
Args:
rnn_layer: The rnn layer either a single rnn cell or a multi rnn cell.
sess: Old session.
saver: saver created by tf.compat.v1.train.Saver()
is_dynamic_rnn: use dynamic_rnn or not.
Returns:
A tuple containing:
- Input tensor of the restored model.
- Prediction tensor of the restored model.
- Output tensor, which is the softwmax result of the prediction tensor.
- new session of the restored model.
"""
model_dir = tempfile.mkdtemp(dir=FLAGS.test_tmpdir)
saver.save(sess, model_dir)
# Reset the graph.
tf.reset_default_graph()
x, prediction, output_class = self.buildModel(rnn_layer, is_dynamic_rnn)
new_sess = tf.compat.v1.Session(config=CONFIG)
saver = tf.train.Saver()
saver.restore(new_sess, model_dir)
return x, prediction, output_class, new_sess
def getInferenceResult(self, x, output_class, sess):
"""Get inference result given input tensor and output tensor.
Args:
x: The input tensor.
output_class: The output tensor.
sess: Current session.
Returns:
A tuple containing:
- Input of the next batch, batch size is 1.
- Expected output.
"""
b1, _ = self.mnist.train.next_batch(batch_size=1, fake_data=True)
b1 = np.array(b1, dtype=np.dtype("float32"))
sample_input = np.reshape(b1, (1, self.time_steps, self.n_input))
expected_output = sess.run(output_class, feed_dict={x: sample_input})
return sample_input, expected_output
def tfliteInvoke(self,
sess,
test_inputs,
input_tensor,
output_tensor,
use_mlir_converter=False):
"""Get tflite inference result.
This method will convert tensorflow from session to tflite model then based
on the inputs, run tflite inference and return the results.
Args:
sess: Current tensorflow session.
test_inputs: The test inputs for tflite.
input_tensor: The input tensor of tensorflow graph.
output_tensor: The output tensor of tensorflow graph.
use_mlir_converter: Whether or not to use MLIRConverter to convert the
model.
Returns:
The tflite inference result.
"""
converter = tf.lite.TFLiteConverter.from_session(sess, [input_tensor],
[output_tensor])
converter.experimental_new_converter = use_mlir_converter
tflite = converter.convert()
interpreter = tf.lite.Interpreter(model_content=tflite)
interpreter.allocate_tensors()
input_index = interpreter.get_input_details()[0]["index"]
interpreter.set_tensor(input_index, test_inputs)
interpreter.invoke()
output_index = interpreter.get_output_details()[0]["index"]
result = interpreter.get_tensor(output_index)
# Reset all variables so it will not pollute other inferences.
interpreter.reset_all_variables()
return result
def testStaticRnnMultiRnnCell(self):
sess = tf.compat.v1.Session(config=CONFIG)
x, prediction, output_class = self.buildModel(
self.buildRnnLayer(), is_dynamic_rnn=False)
self.trainModel(x, prediction, output_class, sess)
saver = tf.train.Saver()
x, prediction, output_class, new_sess = self.saveAndRestoreModel(
self.buildRnnLayer(), sess, saver, is_dynamic_rnn=False)
test_inputs, expected_output = self.getInferenceResult(
x, output_class, new_sess)
# Test Toco-converted model.
result = self.tfliteInvoke(new_sess, test_inputs, x, output_class, False)
self.assertTrue(np.allclose(expected_output, result, rtol=1e-6, atol=1e-2))
@test_util.enable_control_flow_v2
def testDynamicRnnMultiRnnCell(self):
sess = tf.compat.v1.Session(config=CONFIG)
x, prediction, output_class = self.buildModel(
self.buildRnnLayer(), is_dynamic_rnn=True)
self.trainModel(x, prediction, output_class, sess)
saver = tf.train.Saver()
x, prediction, output_class, new_sess = self.saveAndRestoreModel(
self.buildRnnLayer(), sess, saver, is_dynamic_rnn=True)
test_inputs, expected_output = self.getInferenceResult(
x, output_class, new_sess)
# Test Toco-converted model.
result = self.tfliteInvoke(new_sess, test_inputs, x, output_class, False)
self.assertTrue(np.allclose(expected_output, result, rtol=1e-6, atol=1e-2))
if __name__ == "__main__":
test.main()
| apache-2.0 | 328,589,620,900,935,500 | 33.620438 | 80 | 0.668564 | false |
googleapis/googleapis-gen | google/ads/googleads/v6/googleads-py/google/ads/googleads/v6/services/services/account_budget_proposal_service/transports/grpc.py | 1 | 12388 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v6.resources.types import account_budget_proposal
from google.ads.googleads.v6.services.types import account_budget_proposal_service
from .base import AccountBudgetProposalServiceTransport, DEFAULT_CLIENT_INFO
class AccountBudgetProposalServiceGrpcTransport(AccountBudgetProposalServiceTransport):
"""gRPC backend transport for AccountBudgetProposalService.
A service for managing account-level budgets via proposals.
A proposal is a request to create a new budget or make changes
to an existing one.
Reads for account-level budgets managed by these proposals will
be supported in a future version. Until then, please use the
BudgetOrderService from the AdWords API. Learn more at
https://developers.google.com/adwords/api/docs/guides/budget-
order
Mutates:
The CREATE operation creates a new proposal.
UPDATE operations aren't supported.
The REMOVE operation cancels a pending proposal.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(self, *,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning)
host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host,
credentials=credentials,
client_info=client_info,
)
@classmethod
def create_channel(cls,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
scopes=scopes or cls.AUTH_SCOPES,
**kwargs
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def get_account_budget_proposal(self) -> Callable[
[account_budget_proposal_service.GetAccountBudgetProposalRequest],
account_budget_proposal.AccountBudgetProposal]:
r"""Return a callable for the get account budget proposal method over gRPC.
Returns an account-level budget proposal in full
detail.
Returns:
Callable[[~.GetAccountBudgetProposalRequest],
~.AccountBudgetProposal]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_account_budget_proposal' not in self._stubs:
self._stubs['get_account_budget_proposal'] = self.grpc_channel.unary_unary(
'/google.ads.googleads.v6.services.AccountBudgetProposalService/GetAccountBudgetProposal',
request_serializer=account_budget_proposal_service.GetAccountBudgetProposalRequest.serialize,
response_deserializer=account_budget_proposal.AccountBudgetProposal.deserialize,
)
return self._stubs['get_account_budget_proposal']
@property
def mutate_account_budget_proposal(self) -> Callable[
[account_budget_proposal_service.MutateAccountBudgetProposalRequest],
account_budget_proposal_service.MutateAccountBudgetProposalResponse]:
r"""Return a callable for the mutate account budget proposal method over gRPC.
Creates, updates, or removes account budget
proposals. Operation statuses are returned.
Returns:
Callable[[~.MutateAccountBudgetProposalRequest],
~.MutateAccountBudgetProposalResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'mutate_account_budget_proposal' not in self._stubs:
self._stubs['mutate_account_budget_proposal'] = self.grpc_channel.unary_unary(
'/google.ads.googleads.v6.services.AccountBudgetProposalService/MutateAccountBudgetProposal',
request_serializer=account_budget_proposal_service.MutateAccountBudgetProposalRequest.serialize,
response_deserializer=account_budget_proposal_service.MutateAccountBudgetProposalResponse.deserialize,
)
return self._stubs['mutate_account_budget_proposal']
__all__ = (
'AccountBudgetProposalServiceGrpcTransport',
)
| apache-2.0 | 9,219,464,548,002,433,000 | 45.052045 | 118 | 0.629077 | false |
russomi/appengine-pipeline-read-only | test/pipeline_test.py | 2 | 173889 | #!/usr/bin/python2.5
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the Pipeline API."""
from __future__ import with_statement
import base64
import datetime
import logging
import os
import pickle
import sys
import unittest
import urllib
# Fix up paths for running tests.
sys.path.insert(0, '../src/')
from pipeline import simplejson
from pipeline import testutil
from pipeline import common
from pipeline import models
from pipeline import pipeline
import test_shared
from google.appengine.api import mail
from google.appengine.ext import blobstore
from google.appengine.ext import db
# For convenience.
_PipelineRecord = pipeline.models._PipelineRecord
_SlotRecord = pipeline.models._SlotRecord
_BarrierRecord = pipeline.models._BarrierRecord
_StatusRecord = pipeline.models._StatusRecord
class TestBase(unittest.TestCase):
"""Base class for all tests in this module."""
def setUp(self):
testutil.setup_for_testing(define_queues=['other'])
super(TestBase, self).setUp()
def assertIn(self, the_thing, what_thing_should_be_in):
"""Asserts that something is contained in something else."""
if the_thing not in what_thing_should_be_in:
raise AssertionError('Could not find %r in %r' % (
the_thing, what_thing_should_be_in))
class SlotTest(TestBase):
"""Tests for the Slot class."""
def testCreate(self):
"""Tests creating Slots with names and keys."""
slot = pipeline.Slot(name='stuff')
self.assertEquals('stuff', slot.name)
self.assertTrue(slot.key)
self.assertFalse(slot.filled)
self.assertFalse(slot._exists)
self.assertRaises(pipeline.SlotNotFilledError, lambda: slot.value)
self.assertRaises(pipeline.SlotNotFilledError, lambda: slot.filler)
self.assertRaises(pipeline.SlotNotFilledError, lambda: slot.fill_datetime)
slot_key = db.Key.from_path('mykind', 'mykey')
slot = pipeline.Slot(name='stuff', slot_key=slot_key)
self.assertEquals('stuff', slot.name)
self.assertEquals(slot_key, slot.key)
self.assertFalse(slot.filled)
self.assertTrue(slot._exists)
self.assertRaises(pipeline.UnexpectedPipelineError, pipeline.Slot)
def testSlotRecord(self):
"""Tests filling Slot attributes with a _SlotRecord."""
slot_key = db.Key.from_path('myslot', 'mykey')
filler_key = db.Key.from_path('myfiller', 'mykey')
now = datetime.datetime.utcnow()
slot_record = _SlotRecord(
filler=filler_key,
value=simplejson.dumps('my value'),
status=_SlotRecord.FILLED,
fill_time=now)
slot = pipeline.Slot(name='stuff', slot_key=slot_key)
slot._set_value(slot_record)
self.assertTrue(slot._exists)
self.assertTrue(slot.filled)
self.assertEquals('my value', slot.value)
self.assertEquals(filler_key.name(), slot.filler)
self.assertEquals(now, slot.fill_datetime)
def testValueTestMode(self):
"""Tests filling Slot attributes for test mode."""
slot_key = db.Key.from_path('myslot', 'mykey')
filler_key = db.Key.from_path('myfiller', 'mykey')
now = datetime.datetime.utcnow()
value = 'my value'
slot = pipeline.Slot(name='stuff', slot_key=slot_key)
slot._set_value_test(filler_key, value)
self.assertTrue(slot._exists)
self.assertTrue(slot.filled)
self.assertEquals('my value', slot.value)
self.assertEquals(filler_key.name(), slot.filler)
self.assertTrue(isinstance(slot.fill_datetime, datetime.datetime))
class PipelineFutureTest(TestBase):
"""Tests for the PipelineFuture class."""
def testNormal(self):
"""Tests using a PipelineFuture in normal mode."""
future = pipeline.PipelineFuture([])
self.assertTrue('default' in future._output_dict)
default = future.default
self.assertTrue(isinstance(default, pipeline.Slot))
self.assertFalse(default.filled)
self.assertFalse('stuff' in future._output_dict)
stuff = future.stuff
self.assertTrue('stuff' in future._output_dict)
self.assertNotEquals(stuff.key, default.key)
self.assertTrue(isinstance(stuff, pipeline.Slot))
self.assertFalse(stuff.filled)
def testStrictMode(self):
"""Tests using a PipelineFuture that's in strict mode."""
future = pipeline.PipelineFuture(['one', 'two'])
self.assertTrue(future._strict)
self.assertTrue('default' in future._output_dict)
self.assertTrue('one' in future._output_dict)
self.assertTrue('two' in future._output_dict)
default = future.default
self.assertTrue(isinstance(default, pipeline.Slot))
self.assertFalse(default.filled)
one = future.one
self.assertTrue(isinstance(one, pipeline.Slot))
self.assertFalse(one.filled)
self.assertNotEquals(one.key, default.key)
two = future.two
self.assertTrue(isinstance(two, pipeline.Slot))
self.assertFalse(two.filled)
self.assertNotEquals(two.key, default.key)
self.assertNotEquals(two.key, one.key)
self.assertRaises(pipeline.SlotNotDeclaredError, lambda: future.three)
def testReservedOutputs(self):
"""Tests reserved output slot names."""
self.assertRaises(pipeline.UnexpectedPipelineError,
pipeline.PipelineFuture, ['default'])
def testInheritOutputs(self):
"""Tests _inherit_outputs without resolving their values."""
future = pipeline.PipelineFuture([])
already_defined = {
'one': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist1')),
'two': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist2')),
'three': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist3')),
'default': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist4')),
}
future = pipeline.PipelineFuture([])
self.assertFalse(future.default._exists)
future._inherit_outputs('mypipeline', already_defined)
self.assertEquals(already_defined['one'], str(future.one.key))
self.assertEquals(already_defined['two'], str(future.two.key))
self.assertEquals(already_defined['three'], str(future.three.key))
self.assertEquals(already_defined['default'], str(future.default.key))
self.assertTrue(future.one._exists)
self.assertTrue(future.two._exists)
self.assertTrue(future.three._exists)
self.assertTrue(future.default._exists)
def testInheritOutputsStrictMode(self):
"""Tests _inherit_outputs without resolving their values in strict mode."""
already_defined = {
'one': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist1')),
'two': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist2')),
'three': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist3')),
'default': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist4')),
}
future = pipeline.PipelineFuture(['one', 'two', 'three'])
self.assertFalse(future.one._exists)
self.assertFalse(future.two._exists)
self.assertFalse(future.three._exists)
self.assertFalse(future.default._exists)
future._inherit_outputs('mypipeline', already_defined)
self.assertEquals(already_defined['one'], str(future.one.key))
self.assertEquals(already_defined['two'], str(future.two.key))
self.assertEquals(already_defined['three'], str(future.three.key))
self.assertEquals(already_defined['default'], str(future.default.key))
self.assertTrue(future.one._exists)
self.assertTrue(future.two._exists)
self.assertTrue(future.three._exists)
self.assertTrue(future.default._exists)
def testInheritOutputsStrictModeUndeclared(self):
"""Tests _inherit_outputs when an inherited output has not been declared."""
already_defined = {
'one': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist1')),
'two': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist2')),
'three': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist3')),
'default': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist4')),
'five': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist5')),
}
future = pipeline.PipelineFuture(['one', 'two', 'three'])
self.assertRaises(pipeline.UnexpectedPipelineError, future._inherit_outputs,
'mypipeline', already_defined)
def testInheritOutputsResolveValues(self):
"""Tests _inherit_outputs with resolving their current values."""
one = _SlotRecord(
value=simplejson.dumps('hi one'),
status=_SlotRecord.FILLED,
fill_time=datetime.datetime.utcnow(),
filler=db.Key.from_path('mykind', 'mykey1'))
one.put()
two = _SlotRecord(
value=simplejson.dumps('hi two'),
status=_SlotRecord.FILLED,
fill_time=datetime.datetime.utcnow(),
filler=db.Key.from_path('mykind', 'mykey2'))
two.put()
three = _SlotRecord()
three.put()
default = _SlotRecord()
default.put()
already_defined = {
'one': str(one.key()),
'two': str(two.key()),
'three': str(three.key()),
'default': str(default.key()),
}
future = pipeline.PipelineFuture([])
future._inherit_outputs('mypipeline', already_defined, resolve_outputs=True)
self.assertEquals('hi one', future.one.value)
self.assertEquals('hi two', future.two.value)
self.assertFalse(future.three.filled)
def testInheritOutputsResolveValuesMissing(self):
"""Tests when output _SlotRecords are missing for inherited outputs."""
already_defined = {
'four': str(db.Key.from_path(_SlotRecord.kind(), 'does not exist')),
}
future = pipeline.PipelineFuture([])
self.assertRaises(pipeline.UnexpectedPipelineError, future._inherit_outputs,
'mypipeline', already_defined, resolve_outputs=True)
class NothingPipeline(pipeline.Pipeline):
"""Pipeline that does nothing."""
output_names = ['one', 'two']
def run(self):
self.fill('one', 1)
self.fill('two', 1)
class OutputlessPipeline(pipeline.Pipeline):
"""Pipeline that outputs nothing."""
def run(self):
pass
class AsyncOutputlessPipeline(pipeline.Pipeline):
"""Pipeline that outputs nothing."""
async = True
def run(self):
self.complete()
class AsyncCancellable(pipeline.Pipeline):
"""Pipeline that can be cancelled."""
async = True
def run(self):
self.complete()
def try_cancel(self):
return True
class PipelineTest(TestBase):
"""Tests for the Pipeline class."""
def testClassPath(self):
"""Tests the class path resolution class method."""
module_dict = {}
self.assertEquals(None, pipeline.Pipeline._class_path)
pipeline.Pipeline._set_class_path(module_dict)
self.assertEquals(None, pipeline.Pipeline._class_path)
NothingPipeline._class_path = None
self.assertRaises(ImportError, NothingPipeline._set_class_path,
module_dict=module_dict)
self.assertEquals(None, NothingPipeline._class_path)
class MyModule(object):
pass
mymodule = MyModule()
setattr(mymodule, 'NothingPipeline', NothingPipeline)
# Does not require __main__.
module_dict['other'] = mymodule
NothingPipeline._set_class_path(module_dict=module_dict)
self.assertEquals('other.NothingPipeline', NothingPipeline._class_path)
# Will ignore __main__.
NothingPipeline._class_path = None
module_dict['__main__'] = mymodule
NothingPipeline._set_class_path(module_dict=module_dict)
self.assertEquals('other.NothingPipeline', NothingPipeline._class_path)
# Will use __main__ as a last resort.
NothingPipeline._class_path = None
del module_dict['other']
NothingPipeline._set_class_path(module_dict=module_dict)
self.assertEquals('__main__.NothingPipeline', NothingPipeline._class_path)
# Will break if could not find class name and it's not in __main__.
NothingPipeline._class_path = None
setattr(mymodule, 'NothingPipeline', object())
module_dict = {'__main__': mymodule}
self.assertRaises(ImportError, NothingPipeline._set_class_path,
module_dict=module_dict)
def testStart(self):
"""Tests starting a Pipeline."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
self.assertEquals(('one', 'two'), stage.args)
self.assertEquals({'three': 'red', 'four': 1234}, stage.kwargs)
self.assertTrue(stage.start() is None)
self.assertEquals('default', stage.queue_name)
self.assertEquals('/_ah/pipeline', stage.base_path)
self.assertEquals(stage.pipeline_id, stage.root_pipeline_id)
self.assertTrue(stage.is_root)
pipeline_record = _PipelineRecord.get_by_key_name(stage.pipeline_id)
self.assertTrue(pipeline_record is not None)
self.assertEquals('__main__.NothingPipeline', pipeline_record.class_path)
self.assertEquals(_PipelineRecord.WAITING, pipeline_record.status)
params = pipeline_record.params
self.assertEquals(params['args'],
[{'type': 'value', 'value': 'one'}, {'type': 'value', 'value': 'two'}])
self.assertEquals(params['kwargs'],
{'four': {'type': 'value', 'value': 1234},
'three': {'type': 'value', 'value': 'red'}})
self.assertEquals([], params['after_all'])
self.assertEquals('default', params['queue_name'])
self.assertEquals('/_ah/pipeline', params['base_path'])
self.assertEquals(set(NothingPipeline.output_names + ['default']),
set(params['output_slots'].keys()))
self.assertTrue(pipeline_record.is_root_pipeline)
self.assertTrue(isinstance(pipeline_record.start_time, datetime.datetime))
# Verify that all output slots are present.
slot_records = list(_SlotRecord.all().filter(
'root_pipeline =',
db.Key.from_path(_PipelineRecord.kind(), stage.pipeline_id)))
slot_dict = dict((s.key(), s) for s in slot_records)
self.assertEquals(3, len(slot_dict))
for outputs in params['output_slots'].itervalues():
slot_record = slot_dict[db.Key(outputs)]
self.assertEquals(_SlotRecord.WAITING, slot_record.status)
# Verify that trying to add another output slot will fail.
self.assertRaises(pipeline.SlotNotDeclaredError,
lambda: stage.outputs.does_not_exist)
# Verify that the slot existence has been set to true.
for slot in stage.outputs._output_dict.itervalues():
self.assertTrue(slot._exists)
# Verify the enqueued task.
task_list = test_shared.get_tasks()
self.assertEquals(1, len(task_list))
task = task_list[0]
self.assertEquals(
{'pipeline_key': [str(db.Key.from_path(
_PipelineRecord.kind(), stage.pipeline_id))]},
task['params'])
self.assertEquals('/_ah/pipeline/run', task['url'])
def testStartIdempotenceKey(self):
"""Tests starting a pipeline with an idempotence key."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
self.assertTrue(stage.start(idempotence_key='banana') is None)
self.assertEquals('banana', stage.pipeline_id)
def testStartReturnTask(self):
"""Tests starting a pipeline and returning the kick-off task."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
task = stage.start(return_task=True, idempotence_key='banana')
self.assertEquals(0, len(test_shared.get_tasks()))
self.assertEquals('/_ah/pipeline/run', task.url)
self.assertEquals(
'pipeline_key=%s' % db.Key.from_path(_PipelineRecord.kind(), 'banana'),
task.payload)
self.assertTrue(task.name is None)
def testStartQueueName(self):
"""Tests that the start queue name will be preserved."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
self.assertTrue(stage.start(queue_name='other') is None)
self.assertEquals(0, len(test_shared.get_tasks('default')))
self.assertEquals(1, len(test_shared.get_tasks('other')))
def testStartUndeclaredOutputs(self):
"""Tests that accessing undeclared outputs on a root pipeline will err.
Only applies to root pipelines that have no named outputs and only have
the default output slot.
"""
stage = OutputlessPipeline()
stage.start()
self.assertFalse(stage.outputs.default.filled)
self.assertRaises(pipeline.SlotNotDeclaredError, lambda: stage.outputs.blah)
def testStartIdempotenceKeyExists(self):
"""Tests when the idempotence key is a dupe."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
other_stage = OutputlessPipeline()
self.assertRaises(pipeline.PipelineExistsError,
other_stage.start, idempotence_key='banana')
def testStartIdempotenceKeyIsRandomGarbage(self):
"""Tests when the idempotence key binary garbage."""
idempotence_key = '\xfb\xcaOu\t72\xa2\x08\xc9\xb9\x82\xa1\xf4>\xba>SwL'
self.assertRaises(UnicodeDecodeError, idempotence_key.encode, 'utf-8')
stage = OutputlessPipeline()
stage.start(idempotence_key=idempotence_key)
other_stage = OutputlessPipeline()
self.assertRaises(pipeline.PipelineExistsError,
other_stage.start, idempotence_key=idempotence_key)
result = OutputlessPipeline.from_id(idempotence_key)
self.assertTrue(result is not None)
def testStartRetryParameters(self):
"""Tests setting retry backoff parameters before calling start()."""
stage = OutputlessPipeline()
stage.max_attempts = 15
stage.backoff_seconds = 1234.56
stage.backoff_factor = 2.718
stage.start(idempotence_key='banana')
pipeline_record = _PipelineRecord.get_by_key_name(stage.pipeline_id)
self.assertTrue(pipeline_record is not None)
self.assertEquals(15, pipeline_record.params['max_attempts'])
self.assertEquals(1234.56, pipeline_record.params['backoff_seconds'])
self.assertEquals(2.718, pipeline_record.params['backoff_factor'])
def testStartException(self):
"""Tests when a dependent method from start raises an exception."""
def mock_raise(*args, **kwargs):
raise Exception('Doh! Fake error')
stage = OutputlessPipeline()
stage._set_values_internal = mock_raise
try:
stage.start(idempotence_key='banana')
self.fail('Did not raise')
except pipeline.PipelineSetupError, e:
self.assertEquals(
'Error starting __main__.OutputlessPipeline(*(), **{})#banana: '
'Doh! Fake error',
str(e))
def testFromId(self):
"""Tests retrieving a Pipeline instance by ID."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
stage.max_attempts = 15
stage.backoff_seconds = 1234.56
stage.backoff_factor = 2.718
stage.target = 'my-other-target'
stage.start(queue_name='other', base_path='/other', idempotence_key='meep')
other = NothingPipeline.from_id(stage.pipeline_id)
self.assertEquals(('one', 'two'), other.args)
self.assertEquals({'three': 'red', 'four': 1234}, other.kwargs)
self.assertEquals('other', other.queue_name)
self.assertEquals('/other', other.base_path)
self.assertEquals('meep', other.pipeline_id)
self.assertEquals('meep', other.root_pipeline_id)
self.assertTrue(other.is_root)
self.assertEquals(15, other.max_attempts)
self.assertEquals(1234.56, other.backoff_seconds)
self.assertEquals(2.718, other.backoff_factor)
self.assertEquals('my-other-target', other.target)
self.assertEquals(1, other.current_attempt)
self.assertFalse(other.outputs.one.filled)
self.assertEquals(stage.outputs.one.key, other.outputs.one.key)
self.assertFalse(other.outputs.two.filled)
self.assertEquals(stage.outputs.two.key, other.outputs.two.key)
def testFromIdResolveOutputs(self):
"""Tests retrieving a Pipeline instance by ID and resolving its outputs."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
stage.start(queue_name='other', base_path='/other', idempotence_key='meep')
stage.fill('one', 'red')
stage.fill('two', 'blue')
other = NothingPipeline.from_id(stage.pipeline_id)
self.assertTrue(other.outputs.one.filled)
self.assertEquals(stage.outputs.one.key, other.outputs.one.key)
self.assertEquals('red', other.outputs.one.value)
self.assertTrue(other.outputs.two.filled)
self.assertEquals(stage.outputs.two.key, other.outputs.two.key)
self.assertEquals('blue', other.outputs.two.value)
def testFromIdReturnsOriginalClass(self):
"""Tests that from_id() will always return the original class."""
stage = AsyncOutputlessPipeline()
stage.start()
other = pipeline.Pipeline.from_id(stage.pipeline_id)
self.assertTrue(isinstance(other, AsyncOutputlessPipeline))
self.assertTrue(type(other) is not pipeline.Pipeline)
self.assertTrue(other.async) # Class variables preserved
def testFromIdCannotFindOriginalClass(self):
"""Tests when from_id() cannot find the original class."""
stage = NothingPipeline()
stage.start()
pipeline_record = _PipelineRecord.get_by_key_name(stage.pipeline_id)
pipeline_record.class_path = 'does_not_exist.or_something'
pipeline_record.put()
other = pipeline.Pipeline.from_id(stage.pipeline_id)
self.assertTrue(type(other) is pipeline.Pipeline)
def testFillString(self):
"""Tests filling a slot by name."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
stage.start(queue_name='other', base_path='/other', idempotence_key='meep')
stage.fill('one', 'red')
stage.fill('two', 'blue')
other = NothingPipeline.from_id(stage.pipeline_id)
self.assertEquals('red', other.outputs.one.value)
self.assertEquals('blue', other.outputs.two.value)
def testFillSlot(self):
"""Tests filling a slot with a Slot instance."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
stage.start(queue_name='other', base_path='/other', idempotence_key='meep')
stage.fill(stage.outputs.one, 'red')
stage.fill(stage.outputs.two, 'blue')
other = NothingPipeline.from_id(stage.pipeline_id)
self.assertEquals('red', other.outputs.one.value)
self.assertEquals('blue', other.outputs.two.value)
def testFillSlot_Huge(self):
"""Tests filling a slot with over 1MB of data."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
stage.start(queue_name='other', base_path='/other', idempotence_key='meep')
big_data = 'red' * 1000000
self.assertTrue(len(big_data) > 1000000)
small_data = 'blue' * 500
self.assertTrue(len(small_data) < 1000000)
stage.fill(stage.outputs.one, big_data)
stage.fill(stage.outputs.two, small_data)
other = NothingPipeline.from_id(stage.pipeline_id)
self.assertEquals(big_data, other.outputs.one.value)
self.assertEquals(small_data, other.outputs.two.value)
def testFillSlotErrors(self):
"""Tests errors that happen when filling slots."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
stage.start(queue_name='other', base_path='/other', idempotence_key='meep')
self.assertRaises(pipeline.UnexpectedPipelineError,
stage.fill, object(), 'red')
slot = pipeline.Slot(name='one')
self.assertRaises(pipeline.SlotNotDeclaredError,
stage.fill, slot, 'red')
db.delete(stage.outputs.one.key)
self.assertRaises(pipeline.UnexpectedPipelineError,
stage.fill, stage.outputs.one, 'red')
def testComplete(self):
"""Tests asynchronous completion of the pipeline."""
stage = AsyncOutputlessPipeline()
stage.start(idempotence_key='banana')
stage.complete(1234)
other = AsyncOutputlessPipeline.from_id(stage.pipeline_id)
self.assertEquals(1234, other.outputs.default.value)
def testCompleteDisallowed(self):
"""Tests completion of the pipeline when it's not asynchronous."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
stage.start()
self.assertRaises(pipeline.UnexpectedPipelineError, stage.complete)
def testGetCallbackUrl(self):
"""Tests the get_callback_url method."""
stage = AsyncOutputlessPipeline()
stage.start(idempotence_key='banana')
result = stage.get_callback_url(one='red', two='blue', three=12345)
self.assertEquals(
'/_ah/pipeline/callback'
'?pipeline_id=banana&three=12345&two=blue&one=red',
result)
def testGetCallbackTask(self):
"""Tests the get_callback_task method."""
stage = AsyncOutputlessPipeline()
stage.start(idempotence_key='banana')
now = datetime.datetime.utcnow()
task = stage.get_callback_task(
params=dict(one='red', two='blue', three=12345),
method='overridden',
name='my-name',
eta=now)
self.assertEquals('/_ah/pipeline/callback', task.url)
self.assertEquals(
'pipeline_id=banana&three=12345&two=blue&one=red', task.payload)
self.assertEquals('POST', task.method)
self.assertEquals('my-name', task.name)
self.assertEquals(now, task.eta.replace(tzinfo=None))
def testAccesorsUnknown(self):
"""Tests using accessors when they have unknown values."""
stage = OutputlessPipeline()
self.assertTrue(stage.pipeline_id is None)
self.assertTrue(stage.root_pipeline_id is None)
self.assertTrue(stage.queue_name is None)
self.assertTrue(stage.base_path is None)
self.assertFalse(stage.has_finalized)
self.assertFalse(stage.was_aborted)
self.assertFalse(stage.has_finalized)
def testHasFinalized(self):
"""Tests the has_finalized method."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
self.assertFalse(stage.has_finalized)
other = OutputlessPipeline.from_id(stage.pipeline_id)
self.assertFalse(other.has_finalized)
other._context.transition_complete(other._pipeline_key)
another = OutputlessPipeline.from_id(stage.pipeline_id)
self.assertTrue(another.has_finalized)
def testWasAborted(self):
"""Tests the was_aborted method."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
self.assertFalse(stage.was_aborted)
other = OutputlessPipeline.from_id(stage.pipeline_id)
self.assertFalse(other.was_aborted)
other.abort()
# Even after sending the abort signal, it won't show up as aborted.
another = OutputlessPipeline.from_id(stage.pipeline_id)
self.assertFalse(another.was_aborted)
# Now transition to the aborted state.
another._context.transition_aborted(stage._pipeline_key)
yet_another = OutputlessPipeline.from_id(stage.pipeline_id)
self.assertTrue(yet_another.was_aborted)
def testRetryPossible(self):
"""Tests calling retry when it is possible."""
stage = AsyncCancellable()
stage.start(idempotence_key='banana')
self.assertEquals(1, stage.current_attempt)
self.assertTrue(stage.retry('My message 1'))
other = AsyncCancellable.from_id(stage.pipeline_id)
self.assertEquals(2, other.current_attempt)
self.assertTrue(stage.retry())
other = AsyncCancellable.from_id(stage.pipeline_id)
self.assertEquals(3, other.current_attempt)
def testRetryNotPossible(self):
"""Tests calling retry when the pipeline says it's not possible."""
stage = AsyncOutputlessPipeline()
stage.start(idempotence_key='banana')
self.assertEquals(1, stage.current_attempt)
self.assertFalse(stage.retry())
other = AsyncCancellable.from_id(stage.pipeline_id)
self.assertEquals(1, other.current_attempt)
def testRetryDisallowed(self):
"""Tests retry of the pipeline when it's not asynchronous."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
self.assertEquals(1, stage.current_attempt)
self.assertRaises(pipeline.UnexpectedPipelineError, stage.retry)
def testAbortRootSync(self):
"""Tests aborting a non-async, root pipeline."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
self.assertTrue(stage.abort('gotta bail!'))
# Does not effect the current instance; it's just a signal.
self.assertFalse(stage.was_aborted)
def testAbortRootAsync(self):
"""Tests when the root pipeline is async and try_cancel is True."""
stage = AsyncCancellable()
stage.start(idempotence_key='banana')
self.assertTrue(stage.abort('gotta bail!'))
# Does not effect the current instance; it's just a signal.
self.assertFalse(stage.was_aborted)
def testAbortRootAsyncNotPossible(self):
"""Tests when the root pipeline is async and cannot be canceled."""
stage = AsyncOutputlessPipeline()
stage.start(idempotence_key='banana')
self.assertFalse(stage.abort('gotta bail!'))
# Does not effect the current instance; it's just a signal.
self.assertFalse(stage.was_aborted)
def testAbortRootSyncAlreadyAborted(self):
"""Tests aborting when the sync pipeline has already been aborted."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
self.assertTrue(stage.abort('gotta bail!'))
self.assertFalse(stage.abort('gotta bail 2!'))
def testAbortRootAsyncAlreadyAborted(self):
"""Tests aborting when the async pipeline has already been aborted."""
stage = AsyncCancellable()
stage.start(idempotence_key='banana')
self.assertTrue(stage.abort('gotta bail!'))
self.assertFalse(stage.abort('gotta bail 2!'))
def testFinalizeEmailDone_HighReplication(self):
"""Tests completion emails for completed root pipelines on HRD."""
old_app_id = os.environ['APPLICATION_ID']
testutil.TEST_APP_ID = 's~my-hrd-app'
os.environ['APPLICATION_ID'] = testutil.TEST_APP_ID
testutil.setup_for_testing(define_queues=['other'])
try:
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
stage._context.transition_complete(stage._pipeline_key)
other = OutputlessPipeline.from_id(stage.pipeline_id)
result = []
def fake_mail(self, sender, subject, body, html=None):
result.append((sender, subject, body, html))
old_sendmail = pipeline.Pipeline._send_mail
pipeline.Pipeline._send_mail = fake_mail
try:
other.send_result_email()
finally:
pipeline.Pipeline._send_mail = old_sendmail
self.assertEquals(1, len(result))
sender, subject, body, html = result[0]
self.assertEquals('[email protected]', sender)
self.assertEquals(
'Pipeline successful: App "my-hrd-app", '
'__main__.OutputlessPipeline#banana',
subject)
self.assertEquals(
'View the pipeline results here:\n\n'
'http://my-hrd-app.appspot.com/_ah/pipeline/status?root=banana\n\n'
'Thanks,\n\nThe Pipeline API\n',
body)
self.assertEquals(
'<html><body>\n<p>View the pipeline results here:</p>\n\n<p><a href="'
'http://my-hrd-app.appspot.com/_ah/pipeline/status?root=banana"\n'
'>http://my-hrd-app.appspot.com/_ah/pipeline/status?root=banana'
'</a></p>\n\n<p>\nThanks,\n<br>\nThe Pipeline API\n</p>\n'
'</body></html>\n',
html)
finally:
testutil.TEST_APP_ID = old_app_id
os.environ['APPLICATION_ID'] = old_app_id
def testFinalizeEmailDone(self):
"""Tests completion emails for completed root pipelines."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
stage._context.transition_complete(stage._pipeline_key)
other = OutputlessPipeline.from_id(stage.pipeline_id)
result = []
def fake_mail(self, sender, subject, body, html=None):
result.append((sender, subject, body, html))
old_sendmail = pipeline.Pipeline._send_mail
pipeline.Pipeline._send_mail = fake_mail
try:
other.send_result_email()
finally:
pipeline.Pipeline._send_mail = old_sendmail
self.assertEquals(1, len(result))
sender, subject, body, html = result[0]
self.assertEquals('[email protected]', sender)
self.assertEquals(
'Pipeline successful: App "my-app-id", '
'__main__.OutputlessPipeline#banana',
subject)
self.assertEquals(
'View the pipeline results here:\n\n'
'http://my-app-id.appspot.com/_ah/pipeline/status?root=banana\n\n'
'Thanks,\n\nThe Pipeline API\n',
body)
self.assertEquals(
'<html><body>\n<p>View the pipeline results here:</p>\n\n<p><a href="'
'http://my-app-id.appspot.com/_ah/pipeline/status?root=banana"\n'
'>http://my-app-id.appspot.com/_ah/pipeline/status?root=banana'
'</a></p>\n\n<p>\nThanks,\n<br>\nThe Pipeline API\n</p>\n'
'</body></html>\n',
html)
def testFinalizeEmailAborted(self):
"""Tests completion emails for aborted root pipelines."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
stage._context.transition_aborted(stage._pipeline_key)
other = OutputlessPipeline.from_id(stage.pipeline_id)
result = []
def fake_mail(self, sender, subject, body, html=None):
result.append((sender, subject, body, html))
old_sendmail = pipeline.Pipeline._send_mail
pipeline.Pipeline._send_mail = fake_mail
try:
other.send_result_email()
finally:
pipeline.Pipeline._send_mail = old_sendmail
self.assertEquals(1, len(result))
sender, subject, body, html = result[0]
self.assertEquals('[email protected]', sender)
self.assertEquals(
'Pipeline aborted: App "my-app-id", '
'__main__.OutputlessPipeline#banana',
subject)
self.assertEquals(
'View the pipeline results here:\n\n'
'http://my-app-id.appspot.com/_ah/pipeline/status?root=banana\n\n'
'Thanks,\n\nThe Pipeline API\n',
body)
self.assertEquals(
'<html><body>\n<p>View the pipeline results here:</p>\n\n<p><a href="'
'http://my-app-id.appspot.com/_ah/pipeline/status?root=banana"\n'
'>http://my-app-id.appspot.com/_ah/pipeline/status?root=banana'
'</a></p>\n\n<p>\nThanks,\n<br>\nThe Pipeline API\n</p>\n'
'</body></html>\n',
html)
def testFinalizeEmailError(self):
"""Tests when send_result_email raises an error."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
stage._context.transition_complete(stage._pipeline_key)
other = OutputlessPipeline.from_id(stage.pipeline_id)
def fake_mail(*args, **kwargs):
raise mail.InvalidEmailError('Doh!')
old_sendmail = pipeline.Pipeline._send_mail
pipeline.Pipeline._send_mail = fake_mail
try:
other.send_result_email()
finally:
pipeline.Pipeline._send_mail = old_sendmail
def testSetStatus(self):
"""Tests for the set_status method."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
stage.set_status(
message='This is my message',
console_url='/path/to/the/console',
status_links=dict(first='/one', second='/two', third='/three'))
record_list = list(_StatusRecord.all())
self.assertEquals(1, len(record_list))
status_record = record_list[0]
self.assertEquals('This is my message', status_record.message)
self.assertEquals('/path/to/the/console', status_record.console_url)
self.assertEquals(['first', 'second', 'third'], status_record.link_names)
self.assertEquals(['/one', '/two', '/three'], status_record.link_urls)
self.assertTrue(isinstance(status_record.status_time, datetime.datetime))
# Now resetting it will overwrite all fields.
stage.set_status(console_url='/another_console')
after_status_record = db.get(status_record.key())
self.assertEquals(None, after_status_record.message)
self.assertEquals('/another_console', after_status_record.console_url)
self.assertEquals([], after_status_record.link_names)
self.assertEquals([], after_status_record.link_urls)
self.assertNotEquals(after_status_record.status_time,
status_record.status_time)
def testSetStatusError(self):
"""Tests when set_status hits a Datastore error."""
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
try:
stage.set_status(message=object())
except pipeline.PipelineRuntimeError, e:
self.assertEquals(
'Could not set status for __main__.OutputlessPipeline(*(), **{})'
'#banana: Property message must be convertible to a Text instance '
'(Text() argument should be str or unicode, not object)',
str(e))
def testTestMode(self):
"""Tests the test_mode property of Pipelines."""
from pipeline import pipeline as local_pipeline
stage = OutputlessPipeline()
self.assertFalse(stage.test_mode)
local_pipeline._TEST_MODE = True
try:
self.assertTrue(stage.test_mode)
finally:
local_pipeline._TEST_MODE = False
def testCleanup(self):
"""Tests the cleanup method of Pipelines."""
stage = OutputlessPipeline()
self.assertRaises(pipeline.UnexpectedPipelineError, stage.cleanup)
stage.start(idempotence_key='banana')
self.assertTrue(stage.is_root)
stage.cleanup()
task_list = test_shared.get_tasks('default')
self.assertEquals(2, len(task_list))
start_task, cleanup_task = task_list
self.assertEquals('/_ah/pipeline/run', start_task['url'])
self.assertEquals('/_ah/pipeline/cleanup', cleanup_task['url'])
self.assertEquals(
'aglteS1hcHAtaWRyHwsSE19BRV9QaXBlbGluZV9SZWNvcmQiBmJhbmFuYQw',
dict(cleanup_task['headers'])['X-Ae-Pipeline-Key'])
self.assertEquals(
['aglteS1hcHAtaWRyHwsSE19BRV9QaXBlbGluZV9SZWNvcmQiBmJhbmFuYQw'],
cleanup_task['params']['root_pipeline_key'])
# If the stage is actually a child stage, then cleanup does nothing.
stage._root_pipeline_key = db.Key.from_path(
_PipelineRecord.kind(), 'other')
self.assertFalse(stage.is_root)
stage.cleanup()
task_list = test_shared.get_tasks('default')
self.assertEquals(2, len(task_list))
def testWithParams(self):
"""Tests the with_params helper method."""
stage = OutputlessPipeline().with_params(target='my-cool-target')
self.assertEquals('my-cool-target', stage.target)
stage.start(idempotence_key='banana')
task_list = test_shared.get_tasks('default')
self.assertEquals(1, len(task_list))
start_task = task_list[0]
self.assertEquals('/_ah/pipeline/run', start_task['url'])
self.assertEquals(
'my-cool-target.my-app-id.appspot.com',
dict(start_task['headers'])['Host'])
def testWithParams_Errors(self):
"""Tests misuse of the with_params helper method."""
stage = OutputlessPipeline()
# Bad argument
self.assertRaises(
TypeError, stage.with_params, unknown_arg='blah')
# If it's already active then you can't change the parameters.
stage.start(idempotence_key='banana')
self.assertRaises(
pipeline.UnexpectedPipelineError, stage.with_params)
class OrderingTest(TestBase):
"""Tests for the Ordering classes."""
def testAfterEmpty(self):
"""Tests when no futures are passed to the After() constructor."""
pipeline.After._local._after_all_futures = []
futures = []
after = pipeline.After(*futures)
self.assertEquals([], pipeline.After._local._after_all_futures)
after.__enter__()
self.assertEquals([], pipeline.After._local._after_all_futures)
self.assertFalse(after.__exit__(None, None, None))
self.assertEquals([], pipeline.After._local._after_all_futures)
def testAfterParameterNotFuture(self):
"""Tests when some other object is passed to the After() constructor."""
futures = [object(), object()]
self.assertRaises(TypeError, pipeline.After, *futures)
def testAfter(self):
"""Tests the After class."""
pipeline.After._local._after_all_futures = []
futures = [pipeline.PipelineFuture([]), pipeline.PipelineFuture([])]
after = pipeline.After(*futures)
self.assertEquals([], pipeline.After._local._after_all_futures)
after.__enter__()
self.assertEquals(futures, pipeline.After._local._after_all_futures)
self.assertFalse(after.__exit__(None, None, None))
self.assertEquals([], pipeline.After._local._after_all_futures)
def testAfterNested(self):
"""Tests nested behavior of the After class."""
pipeline.After._local._after_all_futures = []
futures = [pipeline.PipelineFuture([]), pipeline.PipelineFuture([])]
after = pipeline.After(*futures)
self.assertEquals([], pipeline.After._local._after_all_futures)
after.__enter__()
self.assertEquals(futures, pipeline.After._local._after_all_futures)
after2 = pipeline.After(*futures)
self.assertEquals(futures, pipeline.After._local._after_all_futures)
after2.__enter__()
self.assertEquals(futures + futures,
pipeline.After._local._after_all_futures)
self.assertFalse(after.__exit__(None, None, None))
self.assertEquals(futures, pipeline.After._local._after_all_futures)
self.assertFalse(after.__exit__(None, None, None))
self.assertEquals([], pipeline.After._local._after_all_futures)
def testInOrder(self):
"""Tests the InOrder class."""
pipeline.InOrder._local._in_order_futures = set()
pipeline.InOrder._local._activated = False
inorder = pipeline.InOrder()
self.assertFalse(pipeline.InOrder._local._activated)
self.assertEquals(set(), pipeline.InOrder._local._in_order_futures)
pipeline.InOrder._add_future(object())
self.assertEquals(set(), pipeline.InOrder._local._in_order_futures)
inorder.__enter__()
self.assertTrue(pipeline.InOrder._local._activated)
one, two, three = object(), object(), object()
pipeline.InOrder._add_future(one)
pipeline.InOrder._add_future(two)
pipeline.InOrder._add_future(three)
pipeline.InOrder._add_future(three)
self.assertEquals(set([one, two, three]),
pipeline.InOrder._local._in_order_futures)
inorder.__exit__(None, None, None)
self.assertFalse(pipeline.InOrder._local._activated)
self.assertEquals(set(), pipeline.InOrder._local._in_order_futures)
def testInOrderNested(self):
"""Tests nested behavior of the InOrder class."""
pipeline.InOrder._local._in_order_futures = set()
pipeline.InOrder._local._activated = False
inorder = pipeline.InOrder()
self.assertFalse(pipeline.InOrder._local._activated)
inorder.__enter__()
self.assertTrue(pipeline.InOrder._local._activated)
inorder2 = pipeline.InOrder()
self.assertRaises(pipeline.UnexpectedPipelineError, inorder2.__enter__)
inorder.__exit__(None, None, None)
class GenerateArgs(pipeline.Pipeline):
"""Pipeline to test the _generate_args helper function."""
output_names = ['three', 'four']
def run(self, *args, **kwargs):
pass
class UtilitiesTest(TestBase):
"""Tests for module-level utilities."""
def testDereferenceArgsNotFilled(self):
"""Tests when an argument was not filled."""
slot_key = db.Key.from_path(_SlotRecord.kind(), 'myslot')
args = [{'type': 'slot', 'slot_key': str(slot_key)}]
self.assertRaises(pipeline.SlotNotFilledError,
pipeline._dereference_args, 'foo', args, {})
def testDereferenceArgsBadType(self):
"""Tests when a positional argument has a bad type."""
self.assertRaises(pipeline.UnexpectedPipelineError,
pipeline._dereference_args, 'foo', [{'type': 'bad'}], {})
def testDereferenceKwargsBadType(self):
"""Tests when a keyword argument has a bad type."""
self.assertRaises(pipeline.UnexpectedPipelineError,
pipeline._dereference_args, 'foo', [], {'one': {'type': 'bad'}})
def testGenerateArgs(self):
"""Tests generating a parameter dictionary from arguments."""
future = pipeline.PipelineFuture(['one', 'two', 'unused'])
other_future = pipeline.PipelineFuture(['three', 'four'])
future.one.key = db.Key.from_path('First', 'one')
future.two.key = db.Key.from_path('First', 'two')
future.default.key = db.Key.from_path('First', 'three')
future.unused.key = db.Key.from_path('First', 'unused')
other_future.three.key = db.Key.from_path('Second', 'three')
other_future.four.key = db.Key.from_path('Second', 'four')
other_future.default.key = db.Key.from_path('Second', 'four')
other_future._after_all_pipelines.add(future)
# When the parameters are small.
stage = GenerateArgs(future.one, 'some value', future,
red=1234, blue=future.two)
(dependent_slots, output_slot_keys,
params_text, params_blob) = pipeline._generate_args(
stage,
other_future,
'my-queue',
'/base-path')
self.assertEquals(
set([future.one.key, future.default.key, future.two.key]),
dependent_slots)
self.assertEquals(
set([other_future.three.key, other_future.four.key,
other_future.default.key]),
output_slot_keys)
self.assertEquals(None, params_blob)
params = simplejson.loads(params_text)
self.assertEquals(
{
'queue_name': 'my-queue',
'after_all': [str(future.default.key)],
'class_path': '__main__.GenerateArgs',
'args': [
{'slot_key': str(future.one.key),
'type': 'slot'},
{'type': 'value', 'value': 'some value'},
{'slot_key': str(future.default.key),
'type': 'slot'}
],
'base_path': '/base-path',
'kwargs': {
'blue': {'slot_key': str(future.two.key),
'type': 'slot'},
'red': {'type': 'value', 'value': 1234}
},
'output_slots': {
'default': str(other_future.default.key),
'four': str(other_future.four.key),
'three': str(other_future.three.key)
},
'max_attempts': 3,
'backoff_factor': 2,
'backoff_seconds': 15,
'task_retry': False,
'target': None,
}, params)
# When the parameters are big enough we need an external blob.
stage = GenerateArgs(future.one, 'some value' * 1000000, future,
red=1234, blue=future.two)
(dependent_slots, output_slot_keys,
params_text, params_blob) = pipeline._generate_args(
stage,
other_future,
'my-queue',
'/base-path')
self.assertEquals(
set([future.one.key, future.default.key, future.two.key]),
dependent_slots)
self.assertEquals(
set([other_future.three.key, other_future.four.key,
other_future.default.key]),
output_slot_keys)
self.assertEquals(None, params_text)
params = simplejson.loads(blobstore.BlobInfo(params_blob).open().read())
self.assertEquals('some value' * 1000000, params['args'][1]['value'])
def testShortRepr(self):
"""Tests for the _short_repr function."""
my_dict = {
'red': 1,
'two': ['hi'] * 100
}
self.assertEquals(
"{'two': ['hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', "
"'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', "
"'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', 'hi', "
"'hi',... (619 bytes)",
pipeline._short_repr(my_dict))
class PipelineContextTest(TestBase):
"""Tests for the internal _PipelineContext class."""
def setUp(self):
"""Sets up the test harness."""
TestBase.setUp(self)
self.pipeline1_key = db.Key.from_path(_PipelineRecord.kind(), '1')
self.pipeline2_key = db.Key.from_path(_PipelineRecord.kind(), '2')
self.pipeline3_key = db.Key.from_path(_PipelineRecord.kind(), '3')
self.pipeline4_key = db.Key.from_path(_PipelineRecord.kind(), '4')
self.pipeline5_key = db.Key.from_path(_PipelineRecord.kind(), '5')
self.slot1_key = db.Key.from_path(_SlotRecord.kind(), 'one')
self.slot2_key = db.Key.from_path(_SlotRecord.kind(), 'missing')
self.slot3_key = db.Key.from_path(_SlotRecord.kind(), 'three')
self.slot4_key = db.Key.from_path(_SlotRecord.kind(), 'four')
self.slot1 = _SlotRecord(
key=self.slot1_key,
status=_SlotRecord.FILLED)
self.slot3 = _SlotRecord(
key=self.slot3_key,
status=_SlotRecord.WAITING)
self.slot4 = _SlotRecord(
key=self.slot4_key,
status=_SlotRecord.FILLED)
self.barrier1 = _BarrierRecord(
parent=self.pipeline1_key,
key_name=_BarrierRecord.FINALIZE,
root_pipeline=self.pipeline1_key,
target=self.pipeline1_key,
blocking_slots=[self.slot1_key])
self.barrier2 = _BarrierRecord(
parent=self.pipeline2_key,
key_name=_BarrierRecord.START,
root_pipeline=self.pipeline2_key,
target=self.pipeline2_key,
blocking_slots=[self.slot1_key, self.slot3_key])
self.barrier3 = _BarrierRecord(
parent=self.pipeline3_key,
key_name=_BarrierRecord.START,
root_pipeline=self.pipeline3_key,
target=self.pipeline3_key,
blocking_slots=[self.slot1_key, self.slot4_key],
status=_BarrierRecord.FIRED)
self.barrier4 = _BarrierRecord(
parent=self.pipeline4_key,
key_name=_BarrierRecord.START,
root_pipeline=self.pipeline4_key,
target=self.pipeline4_key,
blocking_slots=[self.slot1_key, self.slot2_key],
status=_BarrierRecord.FIRED)
self.barrier5 = _BarrierRecord(
parent=self.pipeline5_key,
key_name=_BarrierRecord.START,
root_pipeline=self.pipeline5_key,
target=self.pipeline5_key,
blocking_slots=[self.slot1_key])
self.context = pipeline._PipelineContext(
'my-task1', 'default', '/base-path')
def testNotifyBarrierFire(self):
"""Tests barrier firing behavior."""
self.assertEquals(_BarrierRecord.WAITING, self.barrier1.status)
self.assertEquals(_BarrierRecord.WAITING, self.barrier2.status)
self.assertEquals(_BarrierRecord.FIRED, self.barrier3.status)
self.assertTrue(self.barrier3.trigger_time is None)
self.assertEquals(_BarrierRecord.FIRED, self.barrier4.status)
self.assertEquals(_BarrierRecord.WAITING, self.barrier5.status)
db.put([self.barrier1, self.barrier2, self.barrier3, self.barrier4,
self.barrier5, self.slot1, self.slot3, self.slot4])
self.context.notify_barriers(
self.slot1_key,
None,
max_to_notify=3)
task_list = test_shared.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(3, len(task_list))
task_list.sort(key=lambda x: x['name']) # For deterministic tests.
first_task, second_task, continuation_task = task_list
self.assertEquals(
{'pipeline_key': [str(self.pipeline1_key)],
'purpose': [_BarrierRecord.FINALIZE]},
first_task['params'])
self.assertEquals('/base-path/finalized', first_task['url'])
self.assertEquals(
{'pipeline_key': [str(self.pipeline3_key)],
'purpose': [_BarrierRecord.START]},
second_task['params'])
self.assertEquals('/base-path/run', second_task['url'])
self.assertEquals('/base-path/output', continuation_task['url'])
self.assertEquals(
[str(self.slot1_key)], continuation_task['params']['slot_key'])
self.assertEquals(
'my-task1-ae-barrier-notify-0',
continuation_task['name'])
barrier1, barrier2, barrier3 = db.get(
[self.barrier1.key(), self.barrier2.key(), self.barrier3.key()])
self.assertEquals(_BarrierRecord.FIRED, barrier1.status)
self.assertTrue(barrier1.trigger_time is not None)
self.assertEquals(_BarrierRecord.WAITING, barrier2.status)
self.assertTrue(barrier2.trigger_time is None)
# NOTE: This barrier relies on slots 1 and 4, to force the "blocking slots"
# inner loop to be excerised. By putting slot4 last on the last barrier
# tested in the loop, we ensure that any inner-loop variables do not pollute
# the outer function context.
self.assertEquals(_BarrierRecord.FIRED, barrier3.status)
# Show that if the _BarrierRecord was already in the FIRED state that it
# will not be overwritten again and have its trigger_time changed.
self.assertTrue(barrier3.trigger_time is None)
# Run the first continuation task.
self.context.task_name = 'my-task1-ae-barrier-notify-0'
self.context.notify_barriers(
self.slot1_key,
continuation_task['params']['cursor'][0],
max_to_notify=2)
task_list = test_shared.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(2, len(task_list))
third_task, continuation2_task = task_list
self.assertEquals(
{'pipeline_key': [str(self.pipeline5_key)],
'purpose': [_BarrierRecord.START]},
third_task['params'])
self.assertEquals('/base-path/run', third_task['url'])
self.assertEquals('/base-path/output', continuation2_task['url'])
self.assertEquals(
[str(self.slot1_key)], continuation2_task['params']['slot_key'])
self.assertEquals(
'my-task1-ae-barrier-notify-1',
continuation2_task['name'])
barrier4, barrier5 = db.get([self.barrier4.key(), self.barrier5.key()])
self.assertEquals(_BarrierRecord.FIRED, barrier4.status)
# Shows that the _BarrierRecord entity was not overwritten.
self.assertTrue(barrier4.trigger_time is None)
self.assertEquals(_BarrierRecord.FIRED, barrier5.status)
self.assertTrue(barrier5.trigger_time is not None)
# Running the continuation task again will re-tigger the barriers,
# but no tasks will be inserted because they're already tombstoned.
self.context.task_name = 'my-task1-ae-barrier-notify-0'
self.context.notify_barriers(
self.slot1_key,
continuation_task['params']['cursor'][0],
max_to_notify=2)
self.assertEquals(0, len(test_shared.get_tasks()))
# Running the last continuation task will do nothing.
self.context.task_name = 'my-task1-ae-barrier-notify-1'
self.context.notify_barriers(
self.slot1_key,
continuation2_task['params']['cursor'][0],
max_to_notify=2)
self.assertEquals(0, len(test_shared.get_tasks()))
def testTransitionRunMissing(self):
"""Tests transition_run when the _PipelineRecord is missing."""
self.assertTrue(db.get(self.pipeline1_key) is None)
self.context.transition_run(self.pipeline1_key)
# That's it. No exception raised.
def testTransitionRunBadStatus(self):
"""Tests transition_run when the _PipelineRecord.status is bad."""
pipeline_record = _PipelineRecord(
status=_PipelineRecord.DONE,
key=self.pipeline1_key)
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
self.context.transition_run(self.pipeline1_key)
# That's it. No exception raised.
def testTransitionRunMissingBarrier(self):
"""Tests transition_run when the finalization _BarrierRecord is missing."""
pipeline_record = _PipelineRecord(
status=_PipelineRecord.WAITING,
key=self.pipeline1_key)
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
self.assertRaises(pipeline.UnexpectedPipelineError,
self.context.transition_run,
self.pipeline1_key,
blocking_slot_keys=[self.slot1_key])
def testTransitionCompleteMissing(self):
"""Tests transition_complete when the _PipelineRecord is missing."""
self.assertTrue(db.get(self.pipeline1_key) is None)
self.context.transition_complete(self.pipeline1_key)
# That's it. No exception raised.
def testTransitionCompleteBadStatus(self):
"""Tests transition_complete when the _PipelineRecord.status is bad."""
pipeline_record = _PipelineRecord(
status=_PipelineRecord.DONE,
key=self.pipeline1_key)
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
self.context.transition_complete(self.pipeline1_key)
# That's it. No exception raised.
def testTransitionRetryMissing(self):
"""Tests transition_retry when the _PipelineRecord is missing."""
self.assertTrue(db.get(self.pipeline1_key) is None)
self.assertFalse(
self.context.transition_retry(self.pipeline1_key, 'my message'))
# No exception raised.
self.assertEquals(0, len(test_shared.get_tasks()))
def testTransitionRetryBadStatus(self):
"""Tests transition_retry when the _PipelineRecord.status is bad."""
pipeline_record = _PipelineRecord(
status=_PipelineRecord.DONE,
key=self.pipeline1_key)
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
self.assertFalse(
self.context.transition_retry(self.pipeline1_key, 'my message'))
# No exception raised.
self.assertEquals(0, len(test_shared.get_tasks()))
def testTransitionRetryMaxFailures(self):
"""Tests transition_retry when _PipelineRecord.max_attempts is exceeded."""
params = {
'backoff_seconds': 10,
'backoff_factor': 1.5,
'max_attempts': 15,
'task_retry': False,
}
pipeline_record = _PipelineRecord(
status=_PipelineRecord.WAITING,
key=self.pipeline1_key,
max_attempts=5,
current_attempt=4,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params),
root_pipeline=self.pipeline5_key)
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
self.assertFalse(
self.context.transition_retry(self.pipeline1_key, 'my message'))
# A finalize task should be enqueued.
task_list = test_shared.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(1, len(task_list))
self.assertEquals('/base-path/fanout_abort', task_list[0]['url'])
self.assertEquals(
{'root_pipeline_key': [str(self.pipeline5_key)]},
task_list[0]['params'])
def testTransitionRetryTaskParams(self):
"""Tests that transition_retry will enqueue retry tasks properly.
Attempts multiple retries and verifies ETAs and task parameters.
"""
params = {
'backoff_seconds': 12,
'backoff_factor': 1.5,
'max_attempts': 5,
'task_retry': False,
}
pipeline_record = _PipelineRecord(
status=_PipelineRecord.WAITING,
key=self.pipeline1_key,
max_attempts=5,
current_attempt=0,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params),
root_pipeline=self.pipeline5_key)
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
start_time = datetime.datetime.utcnow()
when_list = [
start_time + datetime.timedelta(seconds=(30 * i))
for i in xrange(5)
]
closure_when_list = list(when_list)
def fake_gettime():
return closure_when_list.pop(0)
self.context._gettime = fake_gettime
for attempt, delay_seconds in enumerate([12, 18, 27, 40.5]):
self.context.transition_retry(
self.pipeline1_key, 'my message %d' % attempt)
task_list = test_shared.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(1, len(task_list))
task = task_list[0]
self.assertEquals('/base-path/run', task['url'])
self.assertEquals(
{
'pipeline_key': [str(self.pipeline1_key)],
'attempt': [str(attempt + 1)],
'purpose': ['start']
}, task['params'])
next_eta = when_list[attempt] + datetime.timedelta(seconds=delay_seconds)
self.assertEquals(next_eta, task['eta'])
pipeline_record = db.get(self.pipeline1_key)
self.assertEquals(attempt + 1, pipeline_record.current_attempt)
self.assertEquals(next_eta, pipeline_record.next_retry_time)
self.assertEquals('my message %d' % attempt,
pipeline_record.retry_message)
# Simulate last attempt.
self.context.transition_retry(self.pipeline1_key, 'my message 5')
# A finalize task should be enqueued.
task_list = test_shared.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(1, len(task_list))
self.assertEquals('/base-path/fanout_abort', task_list[0]['url'])
self.assertEquals(
{'root_pipeline_key': [str(self.pipeline5_key)]},
task_list[0]['params'])
def testBeginAbortMissing(self):
"""Tests begin_abort when the pipeline is missing."""
self.assertTrue(db.get(self.pipeline1_key) is None)
self.assertFalse(
self.context.begin_abort(self.pipeline1_key, 'error message'))
def testBeginAbortAlreadyAborted(self):
"""Tests begin_abort when the pipeline was already aborted."""
params = {
'backoff_seconds': 12,
'backoff_factor': 1.5,
'max_attempts': 5,
'task_retry': False,
}
pipeline_record = _PipelineRecord(
status=_PipelineRecord.ABORTED,
abort_requested=False,
key=self.pipeline1_key,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params))
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
self.assertFalse(
self.context.begin_abort(self.pipeline1_key, 'error message'))
def testBeginAbortAlreadySignalled(self):
"""Tests begin_abort when the pipeline has already been signalled."""
params = {
'backoff_seconds': 12,
'backoff_factor': 1.5,
'max_attempts': 5,
'task_retry': False,
}
pipeline_record = _PipelineRecord(
status=_PipelineRecord.WAITING,
abort_requested=True,
key=self.pipeline1_key,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params))
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
self.assertFalse(
self.context.begin_abort(self.pipeline1_key, 'error message'))
def testBeginAbortTaskEnqueued(self):
"""Tests that a successful begin_abort will enqueue an abort task."""
params = {
'backoff_seconds': 12,
'backoff_factor': 1.5,
'max_attempts': 5,
'task_retry': False,
}
pipeline_record = _PipelineRecord(
status=_PipelineRecord.RUN,
key=self.pipeline1_key,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params))
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
self.assertTrue(
self.context.begin_abort(self.pipeline1_key, 'error message'))
# A finalize task should be enqueued.
task_list = test_shared.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(1, len(task_list))
self.assertEquals('/base-path/fanout_abort', task_list[0]['url'])
self.assertEquals(
{'root_pipeline_key': [str(self.pipeline1_key)]},
task_list[0]['params'])
def testContinueAbort(self):
"""Tests the whole life cycle of continue_abort."""
params = {
'backoff_seconds': 12,
'backoff_factor': 1.5,
'max_attempts': 5,
'task_retry': False,
}
pipeline_record1 = _PipelineRecord(
status=_PipelineRecord.RUN,
key=self.pipeline1_key,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params),
root_pipeline=self.pipeline1_key)
pipeline_record2 = _PipelineRecord(
status=_PipelineRecord.RUN,
key=self.pipeline2_key,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params),
root_pipeline=self.pipeline1_key)
pipeline_record3 = _PipelineRecord(
status=_PipelineRecord.RUN,
key=self.pipeline3_key,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params),
root_pipeline=self.pipeline1_key)
pipeline_record4 = _PipelineRecord(
status=_PipelineRecord.ABORTED,
key=self.pipeline4_key,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params),
root_pipeline=self.pipeline1_key)
pipeline_record5 = _PipelineRecord(
status=_PipelineRecord.DONE,
key=self.pipeline5_key,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params),
root_pipeline=self.pipeline1_key)
db.put([pipeline_record1, pipeline_record2, pipeline_record3,
pipeline_record4, pipeline_record5])
self.context.continue_abort(self.pipeline1_key, max_to_notify=2)
task_list = test_shared.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(3, len(task_list))
# For deterministic tests.
task_list.sort(key=lambda x: x['params'].get('pipeline_key'))
continuation_task, first_task, second_task = task_list
# Abort for the first pipeline
self.assertEquals('/base-path/abort', first_task['url'])
self.assertEquals(
{'pipeline_key': [str(self.pipeline1_key)],
'purpose': ['abort']},
first_task['params'])
# Abort for the second pipeline
self.assertEquals('/base-path/abort', second_task['url'])
self.assertEquals(
{'pipeline_key': [str(self.pipeline2_key)],
'purpose': ['abort']},
second_task['params'])
# Continuation
self.assertEquals('/base-path/fanout_abort', continuation_task['url'])
self.assertEquals(set(['cursor', 'root_pipeline_key']),
set(continuation_task['params'].keys()))
self.assertEquals(str(self.pipeline1_key),
continuation_task['params']['root_pipeline_key'][0])
self.assertTrue(continuation_task['name'].endswith('-0'))
cursor = continuation_task['params']['cursor'][0]
# Now run the continuation task
self.context.task_name = continuation_task['name']
self.context.continue_abort(
self.pipeline1_key, cursor=cursor, max_to_notify=1)
task_list = test_shared.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(2, len(task_list))
# For deterministic tests.
task_list.sort(key=lambda x: x['params'].get('pipeline_key'))
second_continuation_task, fifth_task = task_list
# Abort for the third pipeline
self.assertEquals('/base-path/abort', fifth_task['url'])
self.assertEquals(
{'pipeline_key': [str(self.pipeline3_key)],
'purpose': ['abort']},
fifth_task['params'])
# Another continuation
self.assertEquals('/base-path/fanout_abort',
second_continuation_task['url'])
self.assertEquals(set(['cursor', 'root_pipeline_key']),
set(second_continuation_task['params'].keys()))
self.assertEquals(
str(self.pipeline1_key),
second_continuation_task['params']['root_pipeline_key'][0])
self.assertTrue(second_continuation_task['name'].endswith('-1'))
cursor2 = second_continuation_task['params']['cursor'][0]
# Now run another continuation task.
self.context.task_name = second_continuation_task['name']
self.context.continue_abort(
self.pipeline1_key, cursor=cursor2, max_to_notify=2)
# This task will find two pipelines that are already in terminal states,
# and skip then.
task_list = test_shared.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(1, len(task_list))
third_continuation_task = task_list[0]
self.assertEquals('/base-path/fanout_abort',
third_continuation_task['url'])
self.assertEquals(set(['cursor', 'root_pipeline_key']),
set(third_continuation_task['params'].keys()))
self.assertEquals(
str(self.pipeline1_key),
third_continuation_task['params']['root_pipeline_key'][0])
self.assertTrue(third_continuation_task['name'].endswith('-2'))
cursor3 = third_continuation_task['params']['cursor'][0]
# Run the third continuation task, which will do nothing.
self.context.task_name = second_continuation_task['name']
self.context.continue_abort(
self.pipeline1_key, cursor=cursor3, max_to_notify=2)
# Nothing left to do.
task_list = test_shared.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(0, len(task_list))
def testTransitionAbortedMissing(self):
"""Tests transition_aborted when the pipeline is missing."""
self.assertTrue(db.get(self.pipeline1_key) is None)
self.context.transition_aborted(self.pipeline1_key)
# That's it. No exception raised.
def testTransitionAbortedBadStatus(self):
"""Tests transition_aborted when the pipeline is in a bad state."""
params = {
'backoff_seconds': 12,
'backoff_factor': 1.5,
'max_attempts': 5,
'task_retry': False,
}
finalized_time = datetime.datetime.now()
pipeline_record = _PipelineRecord(
status=_PipelineRecord.ABORTED,
key=self.pipeline1_key,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params),
finalized_time=finalized_time)
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
self.context.transition_aborted(self.pipeline1_key)
# Finalized time will stay the same.
after_record = db.get(self.pipeline1_key)
self.assertEquals(pipeline_record.finalized_time,
after_record.finalized_time)
def testTransitionAbortedSuccess(self):
"""Tests when transition_aborted is successful."""
params = {
'backoff_seconds': 12,
'backoff_factor': 1.5,
'max_attempts': 5,
'task_retry': False,
}
pipeline_record = _PipelineRecord(
status=_PipelineRecord.WAITING,
key=self.pipeline1_key,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(params))
pipeline_record.put()
self.assertTrue(db.get(self.pipeline1_key) is not None)
self.context.transition_aborted(self.pipeline1_key)
after_record = db.get(self.pipeline1_key)
self.assertEquals(_PipelineRecord.ABORTED, after_record.status)
self.assertTrue(pipeline_record.finalized_time is None)
self.assertTrue(isinstance(after_record.finalized_time, datetime.datetime))
class EvaluateErrorTest(test_shared.TaskRunningMixin, TestBase):
"""Task execution tests for error situations."""
def setUp(self):
"""Sets up the test harness."""
super(EvaluateErrorTest, self).setUp()
self.pipeline_key = db.Key.from_path(_PipelineRecord.kind(), '1')
self.slot_key = db.Key.from_path(_SlotRecord.kind(), 'red')
self.context = pipeline._PipelineContext(
'my-task1', 'default', '/base-path')
def testPipelineMissing(self):
"""Tests running a pipeline key that's disappeared."""
self.assertTrue(db.get(self.pipeline_key) is None)
self.context.evaluate(self.pipeline_key)
# That's it. No exception raised.
def testPipelineBadStatus(self):
"""Tests running a pipeline that has an invalid status."""
pipeline_record = _PipelineRecord(
status=_PipelineRecord.DONE,
key=self.pipeline_key)
pipeline_record.put()
self.assertTrue(db.get(self.pipeline_key) is not None)
self.context.evaluate(self.pipeline_key)
def testDefaultSlotMissing(self):
"""Tests when the default slot is missing."""
pipeline_record = _PipelineRecord(
root_pipeline=self.pipeline_key,
status=_PipelineRecord.WAITING,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps({
'output_slots': {'default': str(self.slot_key)}}),
key=self.pipeline_key)
pipeline_record.put()
self.assertTrue(db.get(self.slot_key) is None)
self.assertTrue(db.get(self.pipeline_key) is not None)
self.context.evaluate(self.pipeline_key)
# That's it. No exception raised.
def testRootPipelineMissing(self):
"""Tests when the root pipeline record is missing."""
missing_key = db.Key.from_path(_PipelineRecord.kind(), 'unknown')
slot_record = _SlotRecord(key=self.slot_key)
slot_record.put()
pipeline_record = _PipelineRecord(
root_pipeline=missing_key,
status=_PipelineRecord.WAITING,
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps({
'output_slots': {'default': str(self.slot_key)}}),
key=self.pipeline_key)
pipeline_record.put()
self.assertTrue(db.get(missing_key) is None)
self.assertTrue(db.get(self.slot_key) is not None)
self.assertTrue(db.get(self.pipeline_key) is not None)
self.context.evaluate(self.pipeline_key)
# That's it. No exception raised.
def testResolutionError(self):
"""Tests when the pipeline class couldn't be found."""
slot_record = _SlotRecord(key=self.slot_key)
slot_record.put()
pipeline_record = _PipelineRecord(
root_pipeline=self.pipeline_key,
status=_PipelineRecord.WAITING,
class_path='does.not.exist',
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps({
'output_slots': {'default': str(self.slot_key)}}),
key=self.pipeline_key)
pipeline_record.put()
self.assertTrue(db.get(self.slot_key) is not None)
self.assertTrue(db.get(self.pipeline_key) is not None)
self.assertRaises(ImportError, self.context.evaluate, self.pipeline_key)
class DumbSync(pipeline.Pipeline):
"""A dumb pipeline that's synchronous."""
def run(self, *args):
pass
class DumbAsync(pipeline.Pipeline):
"""A dumb pipeline that's asynchronous."""
async = True
def run(self):
self.complete()
class DumbGenerator(pipeline.Pipeline):
"""A dumb pipeline that's a generator that yeilds nothing."""
def run(self):
if False:
yield 1
class DumbGeneratorYields(pipeline.Pipeline):
"""A dumb pipeline that's a generator that yields something."""
def run(self, block=False):
yield DumbSync(1)
result = yield DumbSync(2)
if block:
yield DumbSync(3, result)
class DiesOnCreation(pipeline.Pipeline):
"""A pipeline that raises an exception on insantiation."""
def __init__(self, *args, **kwargs):
raise Exception('This will not work!')
class DiesOnRun(pipeline.Pipeline):
"""A pipeline that raises an exception when it's executed."""
def run(self):
raise Exception('Cannot run this one!')
class RetryAfterYield(pipeline.Pipeline):
"""A generator pipeline that raises a Retry exception after yielding once."""
def run(self):
yield DumbSync()
raise pipeline.Retry('I want to retry now!')
class DiesAfterYield(pipeline.Pipeline):
"""A generator pipeline that dies after yielding once."""
def run(self):
yield DumbSync()
raise Exception('Whoops I will die now!')
class RetriesOnRun(pipeline.Pipeline):
"""A pipeline that raises a Retry exception on run."""
def run(self):
raise pipeline.Retry('Gotta go and retry now!')
class AbortsOnRun(pipeline.Pipeline):
"""A pipeline that raises an Abort exception on run."""
def run(self):
raise pipeline.Abort('Gotta go and abort now!')
class AsyncCannotAbort(pipeline.Pipeline):
"""An async pipeline that cannot be aborted once active."""
async = True
def run(self):
pass
class AbortAfterYield(pipeline.Pipeline):
"""A generator pipeline that raises an Abort exception after yielding once."""
def run(self):
yield DumbSync()
raise pipeline.Abort('I want to abort now!')
class AsyncCanAbort(pipeline.Pipeline):
"""An async pipeline that cannot be aborted once active."""
async = True
def run(self):
pass
def try_cancel(self):
return True
class SyncMissedOutput(pipeline.Pipeline):
"""A sync pipeline that forgets to fill in a named output slot."""
output_names = ['another']
def run(self):
return 5
class GeneratorMissedOutput(pipeline.Pipeline):
"""A generator pipeline that forgets to fill in a named output slot."""
output_names = ['another']
def run(self):
if False:
yield 1
class TaskRunningTest(test_shared.TaskRunningMixin, TestBase):
"""End-to-end tests for task-running and race-condition situations.
Many of these are cases where an executor task runs for a second time when
it shouldn't have or some kind of transient error occurred.
"""
def setUp(self):
"""Sets up the test harness."""
super(TaskRunningTest, self).setUp()
self.pipeline_key = db.Key.from_path(_PipelineRecord.kind(), 'one')
self.pipeline2_key = db.Key.from_path(_PipelineRecord.kind(), 'two')
self.slot_key = db.Key.from_path(_SlotRecord.kind(), 'red')
self.slot_record = _SlotRecord(key=self.slot_key)
self.pipeline_record = _PipelineRecord(
root_pipeline=self.pipeline_key,
status=_PipelineRecord.WAITING,
class_path='does.not.exist',
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps({
'output_slots': {'default': str(self.slot_key)},
'args': [],
'kwargs': {},
'task_retry': False,
'backoff_seconds': 1,
'backoff_factor': 2,
'max_attempts': 4,
'queue_name': 'default',
'base_path': '',
}),
key=self.pipeline_key,
max_attempts=4)
self.barrier_record = _BarrierRecord(
parent=self.pipeline_key,
key_name=_BarrierRecord.FINALIZE,
target=self.pipeline_key,
root_pipeline=self.pipeline_key,
blocking_slots=[self.slot_key])
self.context = pipeline._PipelineContext(
'my-task1', 'default', '/base-path')
def testSubstagesRunImmediately(self):
"""Tests that sub-stages with no blocking slots are run immediately."""
self.pipeline_record.class_path = '__main__.DumbGeneratorYields'
db.put([self.pipeline_record, self.slot_record, self.barrier_record])
before_record = db.get(self.pipeline_key)
self.assertEquals([], before_record.fanned_out)
self.context.evaluate(self.pipeline_key)
after_record = db.get(self.pipeline_key)
self.assertEquals(2, len(after_record.fanned_out))
child1_key, child2_key = after_record.fanned_out
task_list = test_shared.get_tasks()
self.assertEquals(1, len(task_list))
fanout_task = task_list[0]
# Verify that the start time is set for non-blocked child pipelines.
child_record_list = db.get(after_record.fanned_out)
for child_record in child_record_list:
self.assertTrue(child_record.start_time is not None)
# One fan-out task with both children.
self.assertEquals(
[str(self.pipeline_key)],
fanout_task['params']['parent_key'])
self.assertEquals(
['0', '1'],
fanout_task['params']['child_indexes'])
self.assertEquals('/base-path/fanout', fanout_task['url'])
# Only finalization barriers present.
self.assertTrue(db.get(
db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.START,
parent=child1_key)) is None)
self.assertTrue(db.get(
db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.START,
parent=child2_key)) is None)
self.assertTrue(db.get(
db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.FINALIZE,
parent=child1_key)) is not None)
self.assertTrue(db.get(
db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.FINALIZE,
parent=child2_key)) is not None)
def testSubstagesBlock(self):
"""Tests that sub-stages with pending inputs will have a barrier added."""
self.pipeline_record.class_path = '__main__.DumbGeneratorYields'
params = self.pipeline_record.params.copy()
params.update({
'output_slots': {'default': str(self.slot_key)},
'args': [{'type': 'value', 'value': True}],
'kwargs': {},
})
self.pipeline_record.params_text = simplejson.dumps(params)
db.put([self.pipeline_record, self.slot_record, self.barrier_record])
before_record = db.get(self.pipeline_key)
self.assertEquals([], before_record.fanned_out)
self.context.evaluate(self.pipeline_key)
after_record = db.get(self.pipeline_key)
self.assertEquals(3, len(after_record.fanned_out))
task_list = test_shared.get_tasks()
self.assertEquals(1, len(task_list))
fanout_task = task_list[0]
# Only two children should start.
self.assertEquals('/base-path/fanout', fanout_task['url'])
self.assertEquals(
[str(self.pipeline_key)],
fanout_task['params']['parent_key'])
self.assertEquals(
['0', '1'],
fanout_task['params']['child_indexes'])
run_children = set(after_record.fanned_out[int(i)]
for i in fanout_task['params']['child_indexes'])
self.assertEquals(2, len(run_children))
child1_key, child2_key = run_children
other_child_key = list(set(after_record.fanned_out) - run_children)[0]
# Only a start barrier inserted for the one pending child.
self.assertTrue(db.get(
db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.START,
parent=child1_key)) is None)
self.assertTrue(db.get(
db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.START,
parent=child2_key)) is None)
self.assertTrue(db.get(
db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.START,
parent=other_child_key)) is not None)
self.assertTrue(db.get(
db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.FINALIZE,
parent=child1_key)) is not None)
self.assertTrue(db.get(
db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.FINALIZE,
parent=child2_key)) is not None)
self.assertTrue(db.get(
db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.FINALIZE,
parent=other_child_key)) is not None)
def testFannedOutOrdering(self):
"""Tests that the fanned_out property lists children in code order."""
self.pipeline_record.class_path = '__main__.DumbGeneratorYields'
params = self.pipeline_record.params.copy()
params.update({
'output_slots': {'default': str(self.slot_key)},
'args': [{'type': 'value', 'value': True}],
'kwargs': {},
})
self.pipeline_record.params_text = simplejson.dumps(params)
db.put([self.pipeline_record, self.slot_record, self.barrier_record])
before_record = db.get(self.pipeline_key)
self.assertEquals([], before_record.fanned_out)
self.context.evaluate(self.pipeline_key)
after_record = db.get(self.pipeline_key)
self.assertEquals(3, len(after_record.fanned_out))
children = db.get(after_record.fanned_out)
self.assertEquals(1, children[0].params['args'][0]['value'])
self.assertEquals(2, children[1].params['args'][0]['value'])
self.assertEquals(3, children[2].params['args'][0]['value'])
def testSyncWaitingStartRerun(self):
"""Tests a waiting, sync pipeline being re-run after it already output."""
self.pipeline_record.class_path = '__main__.DumbSync'
db.put([self.pipeline_record, self.slot_record])
before_record = db.get(self.slot_key)
self.assertEquals(_SlotRecord.WAITING, before_record.status)
self.assertTrue(before_record.fill_time is None)
self.context.evaluate(self.pipeline_key)
after_record = db.get(self.slot_key)
self.assertEquals(_SlotRecord.FILLED, after_record.status)
self.assertTrue(after_record.fill_time is not None)
after_pipeline = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_pipeline.status)
self.context.evaluate(self.pipeline_key)
second_after_record = db.get(self.slot_key)
self.assertEquals(_SlotRecord.FILLED, second_after_record.status)
self.assertTrue(second_after_record.fill_time is not None)
# The output slot fill times are different, which means the pipeline re-ran.
self.assertNotEquals(second_after_record.fill_time, after_record.fill_time)
def testSyncFinalizingRerun(self):
"""Tests a finalizing, sync pipeline task being re-run."""
self.pipeline_record.class_path = '__main__.DumbSync'
self.slot_record.status = _SlotRecord.FILLED
self.slot_record.value_text = simplejson.dumps(None)
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.FINALIZE)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.DONE, after_record.status)
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.FINALIZE)
second_after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.DONE, after_record.status)
# Finalized time will stay the same.
self.assertEquals(after_record.finalized_time,
second_after_record.finalized_time)
def testSyncDoneFinalizeRerun(self):
"""Tests a done, sync pipeline task being re-refinalized."""
now = datetime.datetime.utcnow()
self.pipeline_record.class_path = '__main__.DumbSync'
self.pipeline_record.status = _PipelineRecord.DONE
self.pipeline_record.finalized_time = now
self.slot_record.status = _SlotRecord.FILLED
self.slot_record.value_text = simplejson.dumps(None)
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.FINALIZE)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.DONE, after_record.status)
# Finalize time stays the same.
self.assertEquals(now, after_record.finalized_time)
def testAsyncWaitingRerun(self):
"""Tests a waiting, async pipeline task being re-run."""
self.pipeline_record.class_path = '__main__.DumbAsync'
db.put([self.pipeline_record, self.slot_record])
before_record = db.get(self.slot_key)
self.assertEquals(_SlotRecord.WAITING, before_record.status)
self.assertTrue(before_record.fill_time is None)
self.context.evaluate(self.pipeline_key)
after_record = db.get(self.slot_key)
self.assertEquals(_SlotRecord.FILLED, after_record.status)
self.assertTrue(after_record.fill_time is not None)
after_pipeline = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.RUN, after_pipeline.status)
self.context.evaluate(self.pipeline_key)
second_after_record = db.get(self.slot_key)
self.assertEquals(_SlotRecord.FILLED, second_after_record.status)
self.assertTrue(second_after_record.fill_time is not None)
# The output slot fill times are different, which means the pipeline re-ran.
self.assertNotEquals(second_after_record.fill_time, after_record.fill_time)
def testAsyncRunRerun(self):
"""Tests a run, async pipeline task being re-run."""
self.pipeline_record.class_path = '__main__.DumbAsync'
self.pipeline_record.status = _PipelineRecord.RUN
db.put([self.pipeline_record, self.slot_record])
before_record = db.get(self.slot_key)
self.assertEquals(_SlotRecord.WAITING, before_record.status)
self.assertTrue(before_record.fill_time is None)
self.context.evaluate(self.pipeline_key)
after_record = db.get(self.slot_key)
self.assertEquals(_SlotRecord.FILLED, after_record.status)
self.assertTrue(after_record.fill_time is not None)
after_pipeline = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.RUN, after_pipeline.status)
self.context.evaluate(self.pipeline_key)
second_after_record = db.get(self.slot_key)
self.assertEquals(_SlotRecord.FILLED, second_after_record.status)
self.assertTrue(second_after_record.fill_time is not None)
# The output slot fill times are different, which means the pipeline re-ran.
self.assertNotEquals(second_after_record.fill_time, after_record.fill_time)
def testAsyncFinalizingRerun(self):
"""Tests a finalizing, async pipeline task being re-run."""
self.pipeline_record.class_path = '__main__.DumbAsync'
self.slot_record.status = _SlotRecord.FILLED
self.slot_record.value_text = simplejson.dumps(None)
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.FINALIZE)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.DONE, after_record.status)
after_pipeline = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.DONE, after_pipeline.status)
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.FINALIZE)
second_after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.DONE, after_record.status)
# Finalized time will stay the same.
self.assertEquals(after_record.finalized_time,
second_after_record.finalized_time)
def testAsyncDoneFinalizeRerun(self):
"""Tests a done, async pipeline task being re-finalized."""
now = datetime.datetime.utcnow()
self.pipeline_record.class_path = '__main__.DumbAsync'
self.pipeline_record.status = _PipelineRecord.DONE
self.pipeline_record.finalized_time = now
self.slot_record.status = _SlotRecord.FILLED
self.slot_record.value_text = simplejson.dumps(None)
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.FINALIZE)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.DONE, after_record.status)
# Finalize time stays the same.
self.assertEquals(now, after_record.finalized_time)
def testNonYieldingGeneratorWaitingFilled(self):
"""Tests a waiting, non-yielding generator will fill its output slot."""
self.pipeline_record.class_path = '__main__.DumbGenerator'
db.put([self.pipeline_record, self.slot_record])
self.assertEquals(_SlotRecord.WAITING, db.get(self.slot_key).status)
self.context.evaluate(self.pipeline_key)
# Output slot is filled.
after_slot = db.get(self.slot_key)
self.assertEquals(_SlotRecord.FILLED, after_slot.status)
# Pipeline is now in the run state.
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.RUN, after_record.status)
def testNonYieldingGeneratorRunNotFilledRerun(self):
"""Tests a run, non-yielding generator with a not filled output slot.
This happens when the generator yields no children and is moved to the
RUN state but then fails before it could output to the default slot.
"""
self.pipeline_record.class_path = '__main__.DumbGenerator'
self.pipeline_record.status = _PipelineRecord.RUN
db.put([self.pipeline_record, self.slot_record])
self.assertEquals(_SlotRecord.WAITING, db.get(self.slot_key).status)
self.context.evaluate(self.pipeline_key)
# Output slot is filled.
after_slot = db.get(self.slot_key)
self.assertEquals(_SlotRecord.FILLED, after_slot.status)
def testGeneratorRunReRun(self):
"""Tests a run, yielding generator that is re-run."""
self.pipeline_record.class_path = '__main__.DumbGeneratorYields'
self.pipeline_record.status = _PipelineRecord.RUN
self.pipeline_record.fanned_out = [self.pipeline2_key]
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key)
# Output slot wasn't filled.
after_slot = db.get(self.slot_key)
self.assertEquals(_SlotRecord.WAITING, after_slot.status)
# Status hasn't changed.
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.RUN, after_record.status)
def testGeneratorFinalizingRerun(self):
"""Tests a finalizing, generator pipeline task being re-run."""
self.pipeline_record.class_path = '__main__.DumbGeneratorYields'
self.pipeline_record.status = _PipelineRecord.RUN
self.slot_record.status = _SlotRecord.FILLED
self.slot_record.value_text = simplejson.dumps(None)
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.FINALIZE)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.DONE, after_record.status)
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.FINALIZE)
second_after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.DONE, after_record.status)
# Finalized time will stay the same.
self.assertEquals(after_record.finalized_time,
second_after_record.finalized_time)
def testGeneratorDoneFinalizeRerun(self):
"""Tests a done, generator pipeline task being re-run."""
now = datetime.datetime.utcnow()
self.pipeline_record.class_path = '__main__.DumbGeneratorYields'
self.pipeline_record.status = _PipelineRecord.DONE
self.pipeline_record.finalized_time = now
self.slot_record.status = _SlotRecord.FILLED
self.slot_record.value_text = simplejson.dumps(None)
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.FINALIZE)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.DONE, after_record.status)
# Finalize time stays the same.
self.assertEquals(now, after_record.finalized_time)
def testFromIdFails(self):
"""Tests when evaluate's call to from_id fails a retry attempt is made."""
self.pipeline_record.class_path = '__main__.DiesOnCreation'
db.put([self.pipeline_record, self.slot_record])
self.assertEquals(0, self.pipeline_record.current_attempt)
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.START)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(1, after_record.current_attempt)
self.assertEquals('Exception: This will not work!',
after_record.retry_message)
def testMismatchedAttempt(self):
"""Tests when the task's current attempt does not match the datastore."""
self.pipeline_record.class_path = '__main__.DiesOnRun'
self.pipeline_record.current_attempt = 3
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key,
purpose=_BarrierRecord.START,
attempt=1)
# Didn't run because no state change occurred, retry count is the same.
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(3, after_record.current_attempt)
self.assertEquals(None, after_record.retry_message)
def testPastMaxAttempts(self):
"""Tests when the current attempt number is beyond the max attempts.
This could happen if the user edits 'max_attempts' during execution.
"""
self.pipeline_record.class_path = '__main__.DiesOnRun'
self.pipeline_record.current_attempt = 5
self.pipeline_record.max_attempts = 3
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key,
purpose=_BarrierRecord.START,
attempt=5)
# Didn't run because no state change occurred, retry count is the same.
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(5, after_record.current_attempt)
self.assertEquals(None, after_record.retry_message)
def testPrematureRetry(self):
"""Tests when the current retry request came prematurely."""
now = datetime.datetime.utcnow()
self.pipeline_record.class_path = '__main__.DiesOnRun'
self.pipeline_record.current_attempt = 1
self.pipeline_record.max_attempts = 3
self.pipeline_record.next_retry_time = now + datetime.timedelta(seconds=30)
db.put([self.pipeline_record, self.slot_record])
self.assertRaises(
pipeline.UnexpectedPipelineError,
self.context.evaluate,
self.pipeline_key,
purpose=_BarrierRecord.START,
attempt=1)
# Didn't run because no state change occurred, retry count is the same.
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(1, after_record.current_attempt)
self.assertEquals(None, after_record.retry_message)
def testRunExceptionRetry(self):
"""Tests that exceptions in Sync/Async pipelines cause a retry."""
self.pipeline_record.class_path = '__main__.DiesOnRun'
db.put([self.pipeline_record, self.slot_record])
self.assertEquals(0, self.pipeline_record.current_attempt)
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.START)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(1, after_record.current_attempt)
self.assertEquals('Exception: Cannot run this one!',
after_record.retry_message)
def testRunForceRetry(self):
"""Tests that explicit Retry on a synchronous pipeline."""
self.pipeline_record.class_path = '__main__.RetriesOnRun'
db.put([self.pipeline_record, self.slot_record])
self.assertEquals(0, self.pipeline_record.current_attempt)
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.START)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(1, after_record.current_attempt)
self.assertEquals('Gotta go and retry now!',
after_record.retry_message)
def testGeneratorExceptionRetry(self):
"""Tests that exceptions in a generator pipeline cause a retry."""
self.pipeline_record.class_path = '__main__.DiesAfterYield'
db.put([self.pipeline_record, self.slot_record])
self.assertEquals(0, self.pipeline_record.current_attempt)
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.START)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(1, after_record.current_attempt)
self.assertEquals('Exception: Whoops I will die now!',
after_record.retry_message)
def testGeneratorForceRetry(self):
"""Tests when a generator raises a user-initiated retry exception."""
self.pipeline_record.class_path = '__main__.RetryAfterYield'
db.put([self.pipeline_record, self.slot_record])
self.assertEquals(0, self.pipeline_record.current_attempt)
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.START)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(1, after_record.current_attempt)
self.assertEquals('I want to retry now!', after_record.retry_message)
def testNonAsyncAbortSignal(self):
"""Tests when a non-async pipeline receives the abort signal."""
self.pipeline_record.class_path = '__main__.DumbSync'
self.pipeline_record.status = _PipelineRecord.WAITING
self.assertTrue(self.pipeline_record.finalized_time is None)
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.ABORT)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.ABORTED, after_record.status)
self.assertEquals(0, after_record.current_attempt)
self.assertTrue(after_record.retry_message is None)
self.assertTrue(after_record.abort_message is None)
self.assertTrue(after_record.finalized_time is not None)
def testAbortRootPipelineFastPath(self):
"""Tests root pipeline status also functions as the abort signal."""
root_pipeline = _PipelineRecord(
root_pipeline=self.pipeline2_key,
status=_PipelineRecord.RUN,
class_path='does.not.exist',
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps({
'output_slots': {'default': str(self.slot_key)},
'args': [],
'kwargs': {},
'task_retry': False,
'backoff_seconds': 1,
'backoff_factor': 2,
'max_attempts': 4,
'queue_name': 'default',
'base_path': '',
}),
key=self.pipeline2_key,
is_root_pipeline=True,
max_attempts=4,
abort_requested=True)
# Use DiesOnRun to ensure that we don't actually run the pipeline.
self.pipeline_record.class_path = '__main__.DiesOnRun'
self.pipeline_record.root_pipeline = self.pipeline2_key
db.put([self.pipeline_record, self.slot_record, root_pipeline])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.START)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.ABORTED, after_record.status)
self.assertEquals(0, after_record.current_attempt)
self.assertTrue(after_record.retry_message is None)
self.assertTrue(after_record.abort_message is None)
self.assertTrue(after_record.finalized_time is not None)
def testNonAsyncAbortSignalRepeated(self):
"""Tests when a non-async pipeline has the abort request repeated.
Tests the case of getting the abort signal is successful, and that the
pipeline will finalize before being aborted.
"""
self.pipeline_record.class_path = '__main__.DumbSync'
self.pipeline_record.status = _PipelineRecord.WAITING
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.ABORT)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.ABORTED, after_record.status)
self.assertEquals(0, after_record.current_attempt)
self.assertTrue(after_record.retry_message is None)
self.assertTrue(after_record.abort_message is None)
self.assertTrue(after_record.finalized_time is not None)
# Run a second time-- this should be ignored.
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.ABORT)
after_record2 = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.ABORTED, after_record.status)
self.assertEquals(0, after_record2.current_attempt)
self.assertTrue(after_record2.retry_message is None)
self.assertTrue(after_record2.abort_message is None)
self.assertEquals(after_record.finalized_time, after_record2.finalized_time)
def testAsyncAbortSignalBeforeStart(self):
"""Tests when an async pipeline has an abort request and has not run yet.
Verifies that the pipeline will be finalized and transitioned to ABORTED.
"""
self.pipeline_record.class_path = '__main__.DumbAsync'
self.pipeline_record.status = _PipelineRecord.WAITING
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.ABORT)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.ABORTED, after_record.status)
self.assertEquals(0, after_record.current_attempt)
self.assertTrue(after_record.retry_message is None)
self.assertTrue(after_record.abort_message is None)
self.assertTrue(after_record.finalized_time is not None)
def testAsyncAbortSignalDisallowed(self):
"""Tests when an async pipeline receives abort but try_cancel is False."""
self.pipeline_record.class_path = '__main__.AsyncCannotAbort'
self.pipeline_record.status = _PipelineRecord.RUN
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.ABORT)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.RUN, after_record.status)
self.assertEquals(0, after_record.current_attempt)
self.assertTrue(after_record.retry_message is None)
self.assertTrue(after_record.abort_message is None)
self.assertTrue(after_record.finalized_time is None)
def testAsyncAbortSignalAllowed(self):
"""Tests when an async pipeline receives abort but try_cancel is True."""
self.pipeline_record.class_path = '__main__.AsyncCanAbort'
self.pipeline_record.status = _PipelineRecord.RUN
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.ABORT)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.ABORTED, after_record.status)
self.assertEquals(0, after_record.current_attempt)
self.assertTrue(after_record.retry_message is None)
self.assertTrue(after_record.abort_message is None)
self.assertTrue(after_record.finalized_time is not None)
def testGeneratorAbortException(self):
"""Tests when a generator raises an abort after it's begun yielding."""
self.pipeline_record.class_path = '__main__.AbortAfterYield'
self.pipeline_record.status = _PipelineRecord.RUN
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.ABORT)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.ABORTED, after_record.status)
self.assertEquals(0, after_record.current_attempt)
self.assertTrue(after_record.retry_message is None)
self.assertTrue(after_record.abort_message is None)
self.assertTrue(after_record.finalized_time is not None)
def testRetryWhenSyncDoesNotFillSlot(self):
"""Tests when a sync pipeline does not fill a slot that it will retry."""
self.pipeline_record.class_path = '__main__.SyncMissedOutput'
self.pipeline_record.status = _PipelineRecord.WAITING
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(1, after_record.current_attempt)
self.assertEquals(
'SlotNotFilledError: Outputs set([\'another\']) for pipeline ID "one" '
'were never filled by "__main__.SyncMissedOutput".',
after_record.retry_message)
def testNonYieldingGeneratorDoesNotFillSlot(self):
"""Tests non-yielding pipelines that do not fill a slot will retry."""
self.pipeline_record.class_path = '__main__.GeneratorMissedOutput'
self.pipeline_record.status = _PipelineRecord.WAITING
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(1, after_record.current_attempt)
self.assertEquals(
'SlotNotFilledError: Outputs set([\'another\']) for pipeline ID "one" '
'were never filled by "__main__.GeneratorMissedOutput".',
after_record.retry_message)
def testAbortWithBadInputs(self):
"""Tests aborting a pipeline with unresolvable input slots."""
self.pipeline_record.class_path = '__main__.DumbSync'
self.pipeline_record.params['args'] = [
{'type': 'slot',
'slot_key': 'aglteS1hcHAtaWRyGQsSEF9BRV9DYXNjYWRlX1Nsb3QiA3JlZAw'}
]
self.pipeline_record.status = _PipelineRecord.WAITING
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.ABORT)
# Forced into the abort state.
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.ABORTED, after_record.status)
def testPassBadValue(self):
"""Tests when a pipeline passes a non-serializable value to a child."""
self.pipeline_record.class_path = '__main__.PassBadValue'
self.pipeline_record.status = _PipelineRecord.WAITING
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.START)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(1, after_record.current_attempt)
self.assertIn('Bad child arguments. TypeError', after_record.retry_message)
self.assertIn('is not JSON serializable', after_record.retry_message)
self.assertTrue(after_record.abort_message is None)
self.assertTrue(after_record.finalized_time is None)
def testReturnBadValue(self):
"""Tests when a pipeline returns a non-serializable value."""
self.pipeline_record.class_path = '__main__.ReturnBadValue'
self.pipeline_record.status = _PipelineRecord.WAITING
db.put([self.pipeline_record, self.slot_record])
self.context.evaluate(self.pipeline_key, purpose=_BarrierRecord.START)
after_record = db.get(self.pipeline_key)
self.assertEquals(_PipelineRecord.WAITING, after_record.status)
self.assertEquals(1, after_record.current_attempt)
self.assertIn('Bad return value. TypeError', after_record.retry_message)
self.assertIn('is not JSON serializable', after_record.retry_message)
self.assertTrue(after_record.abort_message is None)
self.assertTrue(after_record.finalized_time is None)
class HandlersPrivateTest(TestBase):
"""Tests that the pipeline request handlers are all private."""
def testBarrierHandler(self):
"""Tests the _BarrierHandler."""
handler = test_shared.create_handler(pipeline._BarrierHandler, 'POST', '/')
handler.post()
self.assertEquals((403, 'Forbidden'), handler.response._Response__status)
def testPipelineHandler(self):
"""Tests the _PipelineHandler."""
handler = test_shared.create_handler(pipeline._PipelineHandler, 'POST', '/')
handler.post()
self.assertEquals((403, 'Forbidden'), handler.response._Response__status)
def testFanoutAbortHandler(self):
"""Tests the _FanoutAbortHandler."""
handler = test_shared.create_handler(
pipeline._FanoutAbortHandler, 'POST', '/')
handler.post()
self.assertEquals((403, 'Forbidden'), handler.response._Response__status)
def testFanoutHandler(self):
"""Tests the _FanoutHandler."""
handler = test_shared.create_handler(pipeline._FanoutHandler, 'POST', '/')
handler.post()
self.assertEquals((403, 'Forbidden'), handler.response._Response__status)
def testCleanupHandler(self):
"""Tests the _CleanupHandler."""
handler = test_shared.create_handler(pipeline._CleanupHandler, 'POST', '/')
handler.post()
self.assertEquals((403, 'Forbidden'), handler.response._Response__status)
class InternalOnlyPipeline(pipeline.Pipeline):
"""Pipeline with internal-only callbacks."""
async = True
def run(self):
pass
class AdminOnlyPipeline(pipeline.Pipeline):
"""Pipeline with internal-only callbacks."""
async = True
admin_callbacks = True
def run(self):
pass
def callback(self, **kwargs):
pass
class PublicPipeline(pipeline.Pipeline):
"""Pipeline with public callbacks."""
async = True
public_callbacks = True
def run(self):
pass
def callback(self, **kwargs):
return (200, 'text/plain', repr(kwargs))
class CallbackHandlerTest(TestBase):
"""Tests for the _CallbackHandler class."""
def testErrors(self):
"""Tests for error conditions."""
# No pipeline_id param.
handler = test_shared.create_handler(
pipeline._CallbackHandler, 'GET', '/?red=one&blue=two')
handler.get()
self.assertEquals((400, 'Bad Request'), handler.response._Response__status)
# Non-existent pipeline.
handler = test_shared.create_handler(
pipeline._CallbackHandler, 'GET', '/?pipeline_id=blah&red=one&blue=two')
handler.get()
self.assertEquals((400, 'Bad Request'), handler.response._Response__status)
# Pipeline exists but class path is bogus.
stage = InternalOnlyPipeline()
stage.start()
pipeline_record = pipeline.models._PipelineRecord.get_by_key_name(
stage.pipeline_id)
params = pipeline_record.params
params['class_path'] = 'does.not.exist'
pipeline_record.params_text = simplejson.dumps(params)
pipeline_record.put()
handler = test_shared.create_handler(
pipeline._CallbackHandler,
'GET', '/?pipeline_id=%s&red=one&blue=two' % stage.pipeline_id)
handler.get()
self.assertEquals((400, 'Bad Request'), handler.response._Response__status)
# Internal-only callbacks.
stage = InternalOnlyPipeline()
stage.start()
handler = test_shared.create_handler(
pipeline._CallbackHandler,
'GET', '/?pipeline_id=%s&red=one&blue=two' % stage.pipeline_id)
handler.get()
self.assertEquals((400, 'Bad Request'), handler.response._Response__status)
# Admin-only callbacks but not admin.
stage = AdminOnlyPipeline()
stage.start()
handler = test_shared.create_handler(
pipeline._CallbackHandler,
'GET', '/?pipeline_id=%s&red=one&blue=two' % stage.pipeline_id)
handler.get()
self.assertEquals((400, 'Bad Request'), handler.response._Response__status)
def testAdminOnly(self):
"""Tests accessing a callback that is admin-only."""
stage = AdminOnlyPipeline()
stage.start()
os.environ['USER_IS_ADMIN'] = '1'
try:
handler = test_shared.create_handler(
pipeline._CallbackHandler,
'GET', '/?pipeline_id=%s&red=one&blue=two' % stage.pipeline_id)
handler.get()
finally:
del os.environ['USER_IS_ADMIN']
self.assertEquals((200, 'OK'), handler.response._Response__status)
def testPublic(self):
"""Tests accessing a callback that is public."""
stage = PublicPipeline()
stage.start()
handler = test_shared.create_handler(
pipeline._CallbackHandler,
'GET', '/?pipeline_id=%s&red=one&blue=two' % stage.pipeline_id)
handler.get()
self.assertEquals((200, 'OK'), handler.response._Response__status)
def testReturnValue(self):
"""Tests when the callback has a return value to render as output."""
stage = PublicPipeline()
stage.start()
handler = test_shared.create_handler(
pipeline._CallbackHandler,
'GET', '/?pipeline_id=%s&red=one&blue=two' % stage.pipeline_id)
handler.get()
self.assertEquals((200, 'OK'), handler.response._Response__status)
self.assertEquals(
"{'blue': u'two', 'red': u'one'}",
handler.response.out.getvalue())
self.assertEquals('text/plain', handler.response.headers['Content-Type'])
class CleanupHandlerTest(test_shared.TaskRunningMixin, TestBase):
"""Tests for the _CleanupHandler class."""
def testSuccess(self):
"""Tests successfully deleting all child pipeline elements."""
self.assertEquals(0, len(list(_PipelineRecord.all())))
self.assertEquals(0, len(list(_SlotRecord.all())))
self.assertEquals(0, len(list(_BarrierRecord.all())))
self.assertEquals(0, len(list(_StatusRecord.all())))
stage = OutputlessPipeline()
stage.start(idempotence_key='banana')
stage.set_status('My status here!')
self.assertEquals(1, len(list(_PipelineRecord.all())))
self.assertEquals(1, len(list(_SlotRecord.all())))
self.assertEquals(1, len(list(_BarrierRecord.all())))
self.assertEquals(1, len(list(_StatusRecord.all())))
stage.cleanup()
task_list = self.get_tasks()
self.assertEquals(2, len(task_list))
# The order of the tasks (start or cleanup) is unclear, so
# fish out the one that's the cleanup task and run it directly.
for task in task_list:
if task['url'] == '/_ah/pipeline/cleanup':
self.run_task(task)
self.assertEquals(0, len(list(_PipelineRecord.all())))
self.assertEquals(0, len(list(_SlotRecord.all())))
self.assertEquals(0, len(list(_BarrierRecord.all())))
self.assertEquals(0, len(list(_StatusRecord.all())))
class FanoutHandlerTest(test_shared.TaskRunningMixin, TestBase):
"""Tests for the _FanoutHandler class."""
def testOldStyle(self):
"""Tests the old fanout parameter style for backwards compatibility."""
stage = DumbGeneratorYields()
stage.start(idempotence_key='banana')
task_list = self.get_tasks()
test_shared.delete_tasks(task_list)
self.run_task(task_list[0])
task_list = self.get_tasks()
self.assertEquals(1, len(task_list))
fanout_task = task_list[0]
self.assertEquals('/_ah/pipeline/fanout', fanout_task['url'])
after_record = db.get(stage._pipeline_key)
fanout_task['body'] = base64.b64encode(urllib.urlencode(
[('pipeline_key', str(after_record.fanned_out[0])),
('pipeline_key', str(after_record.fanned_out[1]))]))
test_shared.delete_tasks(task_list)
self.run_task(fanout_task)
task_list = self.get_tasks()
test_shared.delete_tasks(task_list)
self.assertEquals(2, len(task_list))
for task in task_list:
self.assertEquals('/_ah/pipeline/run', task['url'])
children_keys = [
db.Key(t['params']['pipeline_key'][0]) for t in task_list]
self.assertEquals(set(children_keys), set(after_record.fanned_out))
################################################################################
# Begin functional test section!
class RunOrder(db.Model):
"""Saves the order of method calls."""
order = db.ListProperty(db.Text)
@classmethod
def add(cls, message):
def txn():
runorder = RunOrder.get_by_key_name('singleton')
if runorder is None:
runorder = RunOrder(key_name='singleton')
runorder.order.append(db.Text(message))
runorder.put()
db.run_in_transaction(txn)
@classmethod
def get(cls):
runorder = RunOrder.get_by_key_name('singleton')
if runorder is None:
return []
else:
return [str(s) for s in runorder.order]
class SaveRunOrder(pipeline.Pipeline):
"""Pipeline that saves the run order message supplied."""
def run(self, message):
RunOrder.add(message)
class EchoSync(pipeline.Pipeline):
"""Pipeline that echos input."""
def run(self, *args):
if not args:
return None
if len(args) == 1:
return args[0]
return args
class EchoAsync(pipeline.Pipeline):
"""Asynchronous pipeline that echos input."""
async = True
def run(self, *args):
self.get_callback_task(
params=dict(return_value=pickle.dumps(args))).add()
def callback(self, return_value):
args = pickle.loads(str(return_value))
if not args:
self.complete(None)
elif len(args) == 1:
self.complete(args[0])
else:
self.complete(args)
def run_test(self, *args):
self.callback(pickle.dumps(args))
class EchoNamedSync(pipeline.Pipeline):
"""Pipeline that echos named inputs to named outputs."""
def run(self, **kwargs):
prefix = kwargs.get('prefix', '')
if prefix:
del kwargs['prefix']
for name, value in kwargs.iteritems():
self.fill(name, prefix + value)
class EchoParticularNamedSync(EchoNamedSync):
"""Has preexisting output names so it can be used as a root pipeline."""
output_names = ['one', 'two', 'three', 'four']
class EchoNamedAsync(pipeline.Pipeline):
"""Asynchronous pipeline that echos named inputs to named outputs."""
async = True
def run(self, **kwargs):
self.get_callback_task(params=kwargs).add()
def callback(self, **kwargs):
prefix = kwargs.get('prefix', '')
if prefix:
del kwargs['prefix']
for name, value in kwargs.iteritems():
self.fill(name, prefix + value)
self.complete()
def run_test(self, **kwargs):
self.callback(**kwargs)
class EchoNamedHalfAsync(pipeline.Pipeline):
"""Pipeline that echos to named outputs and completes async.
This is different than the other EchoNamedAsync because it fills all the
slots except the default slot immediately, and then uses a callback to
finally complete.
"""
async = True
output_names = ['one', 'two', 'three', 'four']
def run(self, **kwargs):
prefix = kwargs.get('prefix', '')
if prefix:
del kwargs['prefix']
for name, value in kwargs.iteritems():
self.fill(name, prefix + value)
self.get_callback_task(params=kwargs).add()
def callback(self, **kwargs):
self.complete()
def run_test(self, **kwargs):
prefix = kwargs.get('prefix', '')
if prefix:
del kwargs['prefix']
for name, value in kwargs.iteritems():
self.fill(name, prefix + value)
self.callback(**kwargs)
class EchoParticularNamedAsync(EchoNamedAsync):
"""Has preexisting output names so it can be used as a root pipeline."""
output_names = ['one', 'two', 'three', 'four']
class FillAndPass(pipeline.Pipeline):
"""Test pipeline that fills some outputs and passes the rest to a child."""
def run(self, to_fill, **kwargs):
for name in to_fill:
self.fill(name, kwargs.pop(name))
adjusted_kwargs = {}
for name, value in kwargs.iteritems():
adjusted_kwargs[name] = value
if adjusted_kwargs:
yield EchoNamedSync(**adjusted_kwargs)
class FillAndPassParticular(FillAndPass):
"""Has preexisting output names so it can be used as a root pipeline."""
output_names = ['one', 'two', 'three', 'four']
class StrictChildInheritsAll(pipeline.Pipeline):
"""Test pipeline whose strict child inherits all outputs."""
output_names = ['one', 'two', 'three', 'four']
def run(self, **kwargs):
yield EchoParticularNamedSync(**kwargs)
class StrictChildGeneratorInheritsAll(pipeline.Pipeline):
"""Test pipeline whose strict child generator inherits all outputs."""
output_names = ['one', 'two', 'three', 'four']
def run(self, **kwargs):
yield FillAndPassParticular(kwargs.keys(), **kwargs)
class ConsumePartialChildrenStrict(pipeline.Pipeline):
"""Test pipeline that consumes a subset of a strict child's outputs."""
def run(self, **kwargs):
result = yield EchoParticularNamedSync(**kwargs)
yield EchoSync(result.one, result.two)
class ConsumePartialChildren(pipeline.Pipeline):
"""Test pipeline that consumes a subset of a dynamic child's outputs."""
def run(self, **kwargs):
result = yield EchoNamedSync(**kwargs)
yield EchoSync(result.one, result.two)
class DoNotConsumeDefault(pipeline.Pipeline):
"""Test pipeline that does not consume a child's default output."""
def run(self, value):
yield EchoSync('not used')
yield EchoSync(value)
class TwoLevelFillAndPass(pipeline.Pipeline):
"""Two-level deep version of fill and pass."""
output_names = ['one', 'two', 'three', 'four']
def run(self, **kwargs):
# This stage will prefix any keyword args with 'first-'.
stage = yield FillAndPass(
[],
prefix='first-',
one=kwargs.get('one'),
two=kwargs.get('two'))
adjusted_kwargs = kwargs.copy()
adjusted_kwargs['one'] = stage.one
adjusted_kwargs['two'] = stage.two
adjusted_kwargs['prefix'] = 'second-'
# This stage will prefix any keyword args with 'second-'. That means
# any args that were passed in from the output of the first stage will
# be prefixed twice: 'second-first-<kwarg>'.
yield FillAndPass([], **adjusted_kwargs)
class DivideWithRemainder(pipeline.Pipeline):
"""Divides a number, returning the divisor and the quotient."""
output_names = ['remainder']
def run(self, dividend, divisor):
self.fill(self.outputs.remainder, dividend % divisor)
return dividend // divisor
class EuclidGCD(pipeline.Pipeline):
"""Does the Euclidean Greatest Common Factor recursive algorithm."""
output_names = ['gcd']
def run(self, a, b):
a, b = max(a, b), min(a, b)
if b == 0:
self.fill(self.outputs.gcd, a)
return
result = yield DivideWithRemainder(a, b)
recurse = yield EuclidGCD(b, result.remainder)
class UnusedOutputReference(pipeline.Pipeline):
"""Test pipeline that touches a child output but doesn't consume it."""
def run(self):
result = yield EchoParticularNamedSync(
one='red', two='blue', three='green', four='yellow')
print result.one
print result.two
print result.three
yield EchoSync(result.four)
class AccessUndeclaredDefaultOnly(pipeline.Pipeline):
"""Test pipeline accesses undeclared output of a default-only pipeline."""
def run(self):
result = yield EchoSync('hi')
yield EchoSync(result.does_not_exist)
class RunMethod(pipeline.Pipeline):
"""Test pipeline that outputs what method was used for running it."""
def run(self):
return 'run()'
def run_test(self):
return 'run_test()'
class DoAfter(pipeline.Pipeline):
"""Test the After clause."""
def run(self):
first = yield SaveRunOrder('first')
second = yield SaveRunOrder('first')
with pipeline.After(first, second):
third = yield SaveRunOrder('third')
fourth = yield SaveRunOrder('third')
class DoAfterNested(pipeline.Pipeline):
"""Test the After clause in multiple nestings."""
def run(self):
first = yield SaveRunOrder('first')
second = yield SaveRunOrder('first')
with pipeline.After(first, second):
third = yield SaveRunOrder('third')
fourth = yield SaveRunOrder('third')
with pipeline.After(third, fourth):
with pipeline.After(third):
yield SaveRunOrder('fifth')
with pipeline.After(fourth):
yield SaveRunOrder('fifth')
class DoAfterList(pipeline.Pipeline):
"""Test the After clause with a list of jobs."""
def run(self):
job_list = []
for i in xrange(10):
job = yield EchoNamedHalfAsync(
one='red', two='blue', three='green', four='yellow')
job_list.append(job)
with pipeline.After(*job_list):
combined = yield common.Concat(*[job.one for job in job_list])
result = yield SaveRunOrder(combined)
with pipeline.After(result):
yield SaveRunOrder('twelfth')
class DoInOrder(pipeline.Pipeline):
"""Test the InOrder clause."""
def run(self):
with pipeline.InOrder():
yield SaveRunOrder('first')
yield SaveRunOrder('second')
yield SaveRunOrder('third')
yield SaveRunOrder('fourth')
class DoInOrderNested(pipeline.Pipeline):
"""Test the InOrder clause when nested."""
def run(self):
with pipeline.InOrder():
yield SaveRunOrder('first')
yield SaveRunOrder('second')
with pipeline.InOrder():
# Should break.
yield SaveRunOrder('third')
yield SaveRunOrder('fourth')
class MixAfterInOrder(pipeline.Pipeline):
"""Test mixing After and InOrder clauses."""
def run(self):
first = yield SaveRunOrder('first')
with pipeline.After(first):
with pipeline.InOrder():
yield SaveRunOrder('second')
yield SaveRunOrder('third')
fourth = yield SaveRunOrder('fourth')
with pipeline.InOrder():
with pipeline.After(fourth):
yield SaveRunOrder('fifth')
yield SaveRunOrder('sixth')
class RecordFinalized(pipeline.Pipeline):
"""Records calls to finalized."""
def run(self, depth):
yield SaveRunOrder('run#%d' % depth)
def finalized(self):
RunOrder.add('finalized#%d' % self.args[0])
def finalized_test(self):
RunOrder.add('finalized_test#%d' % self.args[0])
class NestedFinalize(pipeline.Pipeline):
"""Test nested pipelines are finalized in a reasonable order."""
def run(self, depth):
if depth == 0:
return
yield RecordFinalized(depth)
yield NestedFinalize(depth - 1)
class YieldBadValue(pipeline.Pipeline):
"""Test pipeline that yields something that's not a pipeline."""
def run(self):
yield 5
class YieldChildTwice(pipeline.Pipeline):
"""Test pipeline that yields the same child pipeline twice."""
def run(self):
child = EchoSync('bad')
yield child
yield child
class FinalizeFailure(pipeline.Pipeline):
"""Test when finalized raises an error."""
def run(self):
pass
def finalized(self):
raise Exception('Doh something broke!')
class SyncForcesRetry(pipeline.Pipeline):
"""Test when a synchronous pipeline raises the Retry exception."""
def run(self):
raise pipeline.Retry('We need to try this again')
class AsyncForcesRetry(pipeline.Pipeline):
"""Test when a synchronous pipeline raises the Retry exception."""
async = True
def run(self):
raise pipeline.Retry('We need to try this again')
def run_test(self):
raise pipeline.Retry('We need to try this again')
class GeneratorForcesRetry(pipeline.Pipeline):
"""Test when a generator pipeline raises the Retry exception."""
def run(self):
if False:
yield 1
raise pipeline.Retry('We need to try this again')
class SyncRaiseAbort(pipeline.Pipeline):
"""Raises an abort signal."""
def run(self):
RunOrder.add('run SyncRaiseAbort')
raise pipeline.Abort('Gotta bail!')
def finalized(self):
RunOrder.add('finalized SyncRaiseAbort: %s' % self.was_aborted)
class AsyncRaiseAbort(pipeline.Pipeline):
"""Raises an abort signal in an asynchronous pipeline."""
async = True
def run(self):
raise pipeline.Abort('Gotta bail!')
def run_test(self):
raise pipeline.Abort('Gotta bail!')
class GeneratorRaiseAbort(pipeline.Pipeline):
"""Raises an abort signal in a generator pipeline."""
def run(self):
if False:
yield 1
raise pipeline.Abort('Gotta bail!')
class AbortAndRecordFinalized(pipeline.Pipeline):
"""Records calls to finalized."""
def run(self):
RunOrder.add('run AbortAndRecordFinalized')
yield SyncRaiseAbort()
def finalized(self):
RunOrder.add('finalized AbortAndRecordFinalized: %s' %
self.was_aborted)
class SetStatusPipeline(pipeline.Pipeline):
"""Simple pipeline that just sets its status a few times."""
def run(self):
self.set_status(message='My message')
self.set_status(console_url='/path/to/my/console')
self.set_status(status_links=dict(one='/red', two='/blue'))
self.set_status(message='My message',
console_url='/path/to/my/console',
status_links=dict(one='/red', two='/blue'))
class PassBadValue(pipeline.Pipeline):
"""Simple pipeline that passes along a non-JSON serializable value."""
def run(self):
yield EchoSync(object())
class ReturnBadValue(pipeline.Pipeline):
"""Simple pipeline that returns a non-JSON serializable value."""
def run(self):
return object()
class EchoParams(pipeline.Pipeline):
"""Echos the parameters this pipeline has."""
def run(self):
ALLOWED = ('backoff_seconds', 'backoff_factor', 'max_attempts', 'target')
return dict((key, getattr(self, key)) for key in ALLOWED)
class WithParams(pipeline.Pipeline):
"""Simple pipeline that uses the with_params helper method."""
def run(self):
foo = yield EchoParams().with_params(
max_attempts=8,
backoff_seconds=99,
target='other-backend')
yield EchoSync(foo, 'stuff')
class FunctionalTest(test_shared.TaskRunningMixin, TestBase):
"""End-to-end tests for various Pipeline constructs."""
def setUp(self):
"""Sets up the test harness."""
super(FunctionalTest, self).setUp()
def testStartSync(self):
"""Tests starting and executing just a synchronous pipeline."""
stage = EchoSync(1, 2, 3)
self.assertFalse(stage.async)
self.assertEquals((1, 2, 3), EchoSync(1, 2, 3).run(1, 2, 3))
outputs = self.run_pipeline(stage)
self.assertEquals([1, 2, 3], outputs.default.value)
def testStartAsync(self):
"""Tests starting and executing an asynchronous pipeline."""
stage = EchoAsync(1, 2, 3)
self.assertTrue(stage.async)
outputs = self.run_pipeline(stage)
self.assertEquals([1, 2, 3], outputs.default.value)
def testSyncNamedOutputs(self):
"""Tests a synchronous pipeline with named outputs."""
stage = EchoParticularNamedSync(
one='red', two='blue', three='green', four='yellow')
self.assertFalse(stage.async)
outputs = self.run_pipeline(stage)
self.assertEquals(None, outputs.default.value)
self.assertEquals('red', outputs.one.value)
self.assertEquals('blue', outputs.two.value)
self.assertEquals('green', outputs.three.value)
self.assertEquals('yellow', outputs.four.value)
def testAsyncNamedOutputs(self):
"""Tests an asynchronous pipeline with named outputs."""
stage = EchoParticularNamedAsync(
one='red', two='blue', three='green', four='yellow')
self.assertTrue(stage.async)
outputs = self.run_pipeline(stage)
self.assertEquals(None, outputs.default.value)
self.assertEquals('red', outputs.one.value)
self.assertEquals('blue', outputs.two.value)
self.assertEquals('green', outputs.three.value)
self.assertEquals('yellow', outputs.four.value)
def testInheirtOutputs(self):
"""Tests when a pipeline generator child inherits all parent outputs."""
stage = FillAndPassParticular(
[],
one='red', two='blue', three='green', four='yellow',
prefix='passed-')
outputs = self.run_pipeline(stage)
self.assertEquals(None, outputs.default.value)
self.assertEquals('passed-red', outputs.one.value)
self.assertEquals('passed-blue', outputs.two.value)
self.assertEquals('passed-green', outputs.three.value)
self.assertEquals('passed-yellow', outputs.four.value)
def testInheritOutputsPartial(self):
"""Tests when a pipeline generator child inherits some parent outputs."""
stage = FillAndPassParticular(
['one', 'three'],
one='red', two='blue', three='green', four='yellow',
prefix='passed-')
outputs = self.run_pipeline(stage)
self.assertEquals(None, outputs.default.value)
self.assertEquals('red', outputs.one.value)
self.assertEquals('passed-blue', outputs.two.value)
self.assertEquals('green', outputs.three.value)
self.assertEquals('passed-yellow', outputs.four.value)
def testInheritOutputsStrict(self):
"""Tests strict child of a pipeline generator inherits all outputs."""
stage = StrictChildInheritsAll(
one='red', two='blue', three='green', four='yellow')
outputs = self.run_pipeline(stage)
self.assertEquals(None, outputs.default.value)
self.assertEquals('red', outputs.one.value)
self.assertEquals('blue', outputs.two.value)
self.assertEquals('green', outputs.three.value)
self.assertEquals('yellow', outputs.four.value)
def testInheritChildSyncStrictMissing(self):
"""Tests when a strict child pipeline does not output to a required slot."""
stage = StrictChildInheritsAll(
one='red', two='blue', three='green')
self.assertRaises(pipeline.SlotNotFilledError, self.run_pipeline, stage)
def testInheritChildSyncStrictNotDeclared(self):
"""Tests when a strict child pipeline outputs to an undeclared name."""
stage = StrictChildInheritsAll(
one='red', two='blue', three='green', four='yellow', five='undeclared')
self.assertRaises(pipeline.SlotNotDeclaredError, self.run_pipeline, stage)
def testInheritGeneratorStrict(self):
"""Tests when a strict child pipeline inherits all outputs."""
stage = StrictChildGeneratorInheritsAll(
one='red', two='blue', three='green', four='yellow')
outputs = self.run_pipeline(stage)
self.assertEquals(None, outputs.default.value)
self.assertEquals('red', outputs.one.value)
self.assertEquals('blue', outputs.two.value)
self.assertEquals('green', outputs.three.value)
self.assertEquals('yellow', outputs.four.value)
def testInheritGeneratorStrictMissing(self):
"""Tests when a strict child generator does not output to a slot."""
stage = StrictChildGeneratorInheritsAll(
one='red', two='blue', three='green')
self.assertRaises(pipeline.SlotNotFilledError, self.run_pipeline, stage)
def testInheritGeneratorStrictNotDeclared(self):
"""Tests when a strict child generator outputs to an undeclared name."""
stage = StrictChildGeneratorInheritsAll(
one='red', two='blue', three='green', four='yellow', five='undeclared')
self.assertRaises(pipeline.SlotNotDeclaredError, self.run_pipeline, stage)
def testPartialConsumptionStrict(self):
"""Tests when a parent pipeline consumes a subset of strict child outputs.
When the child is strict, then partial consumption is fine since all
outputs must be declared ahead of time.
"""
stage = ConsumePartialChildrenStrict(
one='red', two='blue', three='green', four='yellow')
outputs = self.run_pipeline(stage)
self.assertEquals(['red', 'blue'], outputs.default.value)
def testPartialConsumptionDynamic(self):
"""Tests when a parent pipeline consumes a subset of dynamic child outputs.
When the child is dynamic, then all outputs must be consumed by the caller.
"""
stage = ConsumePartialChildren(
one='red', two='blue', three='green', four='yellow')
self.assertRaises(pipeline.SlotNotDeclaredError, self.run_pipeline, stage)
def testNoDefaultConsumption(self):
"""Tests when a parent pipeline does not consume default output."""
stage = DoNotConsumeDefault('hi there')
outputs = self.run_pipeline(stage)
self.assertEquals('hi there', outputs.default.value)
def testGeneratorNoChildren(self):
"""Tests when a generator pipeline yields no children."""
self.assertRaises(StopIteration, FillAndPass([]).run([]).next)
stage = FillAndPass([])
outputs = self.run_pipeline(stage)
self.assertTrue(outputs.default.value is None)
def testSyncMissingNamedOutput(self):
"""Tests when a sync pipeline does not output to a named output."""
stage = EchoParticularNamedSync(one='red', two='blue', three='green')
self.assertFalse(stage.async)
self.assertRaises(pipeline.SlotNotFilledError, self.run_pipeline, stage)
def testGeneratorNoChildrenMissingNamedOutput(self):
"""Tests a missing output from a generator with no child pipelines."""
stage = FillAndPassParticular(
['one', 'two', 'three'],
one='red', two='blue', three='green')
self.assertRaises(pipeline.SlotNotFilledError, self.run_pipeline, stage)
def testSyncUndeclaredOutput(self):
"""Tests when a strict sync pipeline outputs to an undeclared output."""
stage = EchoParticularNamedSync(
one='red', two='blue', three='green', four='yellow', other='stuff')
self.assertFalse(stage.async)
self.assertRaises(pipeline.SlotNotDeclaredError, self.run_pipeline, stage)
def testGeneratorChildlessUndeclaredOutput(self):
"""Tests when a childless generator outputs to an undeclared output."""
stage = FillAndPassParticular(
['one', 'two', 'three', 'four', 'other'],
one='red', two='blue', three='green', four='yellow', other='stuff')
self.assertRaises(pipeline.SlotNotDeclaredError, self.run_pipeline, stage)
def testRootGeneratorChildInheritOutputUndeclared(self):
"""Tests when root's child inherits all and outputs to a bad name."""
stage = FillAndPassParticular(
['one', 'two'],
one='red', two='blue', three='green', four='yellow', other='stuff')
self.assertRaises(pipeline.SlotNotDeclaredError, self.run_pipeline, stage)
def testDeepGeneratorChildInheritOutputUndeclared(self):
"""Tests when a pipeline that is not the root outputs to a bad name."""
stage = TwoLevelFillAndPass(
one='red', two='blue', three='green', four='yellow', other='stuff')
self.assertRaises(pipeline.SlotNotDeclaredError, self.run_pipeline, stage)
def testDeepGenerator(self):
"""Tests a multi-level generator."""
stage = TwoLevelFillAndPass(
one='red', two='blue', three='green', four='yellow')
outputs = self.run_pipeline(stage)
self.assertEquals(None, outputs.default.value)
self.assertEquals('second-first-red', outputs.one.value)
self.assertEquals('second-first-blue', outputs.two.value)
self.assertEquals('second-green', outputs.three.value)
self.assertEquals('second-yellow', outputs.four.value)
def testDeepGenerator_Huge(self):
"""Tests a multi-level generator with huge inputs and outputs."""
big_data = 'blue' * 1000000
stage = TwoLevelFillAndPass(
one='red', two=big_data, three='green', four='yellow')
outputs = self.run_pipeline(stage)
self.assertEquals(None, outputs.default.value)
self.assertEquals('second-first-red', outputs.one.value)
self.assertEquals('second-first-' + big_data, outputs.two.value)
self.assertEquals('second-green', outputs.three.value)
self.assertEquals('second-yellow', outputs.four.value)
def testOnlyConsumePassedOnOutputs(self):
"""Tests that just accessing a Slot on a PipelineFuture won't consume it."""
stage = UnusedOutputReference()
outputs = self.run_pipeline(stage)
self.assertEquals('yellow', outputs.default.value)
def testAccessUndeclaredOutputsBreaks(self):
"""Tests errors accessing undeclared outputs on a default-only pipeline."""
stage = AccessUndeclaredDefaultOnly()
self.assertRaises(pipeline.SlotNotFilledError, self.run_pipeline, stage)
def testGeneratorRecursive(self):
"""Tests a recursive nesting of generators."""
stage = EuclidGCD(1071, 462)
outputs = self.run_pipeline(stage)
self.assertEquals(21, outputs.gcd.value)
stage = EuclidGCD(1071, 463)
outputs = self.run_pipeline(stage)
self.assertEquals(1, outputs.gcd.value)
def testAfter(self):
"""Tests the After() class."""
stage = DoAfter()
self.run_pipeline(stage)
self.assertEquals(['first', 'first', 'third', 'third'],
RunOrder.get())
def testAfterWithNesting(self):
"""Tests that After() nesting of the same dependencies doesn't break."""
stage = DoAfterNested()
self.run_pipeline(stage)
self.assertEquals(['first', 'first', 'third', 'third', 'fifth', 'fifth'],
RunOrder.get())
def testAfterWithList(self):
"""Tests that After() with a list of dependencies works."""
stage = DoAfterList()
self.run_pipeline(stage)
self.assertEquals( ['redredredredredredredredredred', 'twelfth'],
RunOrder.get())
def testInOrder(self):
"""Tests the InOrder() class."""
stage = DoInOrder()
self.run_pipeline(stage)
self.assertEquals(['first', 'second', 'third', 'fourth'],
RunOrder.get())
def testInOrderNesting(self):
"""Tests that InOrder nesting is not allowed."""
stage = DoInOrderNested()
self.assertRaises(
pipeline.UnexpectedPipelineError, self.run_pipeline, stage)
def testMixAfterInOrder(self):
"""Tests nesting Afters in InOrder blocks and vice versa."""
stage = MixAfterInOrder()
self.run_pipeline(stage)
self.assertEquals(['first', 'second', 'third', 'fourth', 'fifth', 'sixth'],
RunOrder.get())
def testFinalized(self):
"""Tests the order of finalization."""
stage = NestedFinalize(5)
self.run_pipeline(stage)
run_order = RunOrder.get()
# Ensure each entry is unique.
self.assertEquals(10, len(set(run_order)))
# That there are 5 run entries that are in reasonable order.
run_entries = [
int(r[len('run#'):]) for r in run_order
if r.startswith('run#')]
self.assertEquals(5, len(run_entries))
self.assertEquals([5, 4, 3, 2, 1], run_entries)
# That there are 5 finalized entries that are in reasonable order.
if self.test_mode:
finalized_name = 'finalized_test#'
else:
finalized_name = 'finalized#'
finalized_entries = [
int(r[len(finalized_name):]) for r in run_order
if r.startswith(finalized_name)]
self.assertEquals(5, len(finalized_entries))
self.assertEquals([5, 4, 3, 2, 1], finalized_entries)
def testRunTest(self):
"""Tests that run_test is preferred over run for test mode."""
stage = RunMethod()
outputs = self.run_pipeline(stage)
if self.test_mode:
self.assertEquals('run_test()', outputs.default.value)
else:
self.assertEquals('run()', outputs.default.value)
def testYieldBadValue(self):
"""Tests yielding something that is invalid."""
stage = YieldBadValue()
self.assertRaises(
pipeline.UnexpectedPipelineError, self.run_pipeline, stage)
def testYieldPipelineInstanceTwice(self):
"""Tests when a Pipeline instance is yielded multiple times."""
stage = YieldChildTwice()
self.assertRaises(
pipeline.UnexpectedPipelineError, self.run_pipeline, stage)
def testFinalizeException(self):
"""Tests that finalized exceptions just raise up without being caught."""
stage = FinalizeFailure()
try:
self.run_pipeline(stage)
self.fail('Should have raised')
except Exception, e:
self.assertEquals('Doh something broke!', str(e))
def testSyncRetryException(self):
"""Tests when a sync generator raises a Retry exception."""
stage = SyncForcesRetry()
self.assertRaises(pipeline.Retry, self.run_pipeline, stage)
def testAsyncRetryException(self):
"""Tests when an async generator raises a Retry exception."""
stage = AsyncForcesRetry()
self.assertRaises(pipeline.Retry, self.run_pipeline, stage)
def testGeneratorRetryException(self):
"""Tests when a generator raises a Retry exception."""
stage = GeneratorForcesRetry()
self.assertRaises(pipeline.Retry, self.run_pipeline, stage)
def testSyncAbortException(self):
"""Tests when a sync pipeline raises an abort exception."""
stage = SyncRaiseAbort()
self.assertRaises(pipeline.Abort, self.run_pipeline, stage)
def testAsyncAbortException(self):
"""Tests when an async pipeline raises an abort exception."""
stage = AsyncRaiseAbort()
self.assertRaises(pipeline.Abort, self.run_pipeline, stage)
def testGeneratorAbortException(self):
"""Tests when a generator pipeline raises an abort exception."""
stage = GeneratorRaiseAbort()
self.assertRaises(pipeline.Abort, self.run_pipeline, stage)
def testAbortThenFinalize(self):
"""Tests that pipelines are finalized after abort is raised.
This test requires special handling for different modes to confirm that
finalization happens after abort in production mode.
"""
stage = AbortAndRecordFinalized()
if self.test_mode:
# Finalize after abort doesn't happen in test mode.
try:
self.run_pipeline(stage)
self.fail('Should have raised')
except Exception, e:
self.assertEquals('Gotta bail!', str(e))
run_order = RunOrder.get()
self.assertEquals(['run AbortAndRecordFinalized', 'run SyncRaiseAbort'],
run_order)
else:
self.run_pipeline(stage, _task_retry=False, _require_slots_filled=False)
# Non-deterministic results for finalize. Must equal one of these two.
expected_order1 = [
'run AbortAndRecordFinalized',
'run SyncRaiseAbort',
'finalized SyncRaiseAbort: True',
'finalized AbortAndRecordFinalized: True',
]
expected_order2 = [
'run AbortAndRecordFinalized',
'run SyncRaiseAbort',
'finalized AbortAndRecordFinalized: True',
'finalized SyncRaiseAbort: True',
]
run_order = RunOrder.get()
self.assertTrue(run_order == expected_order1 or
run_order == expected_order2,
'Found order: %r' % run_order)
def testSetStatus_Working(self):
"""Tests that setting the status does not raise errors."""
stage = SetStatusPipeline()
self.run_pipeline(stage)
# That's it. No exceptions raised.
def testPassBadValue(self):
"""Tests when a pipeline passes a non-serializable value to a child."""
stage = PassBadValue()
self.assertRaises(TypeError, self.run_pipeline, stage)
def testReturnBadValue(self):
"""Tests when a pipeline returns a non-serializable value."""
stage = ReturnBadValue()
self.assertRaises(TypeError, self.run_pipeline, stage)
def testWithParams(self):
"""Tests when a pipeline uses the with_params helper."""
stage = WithParams()
outputs = self.run_pipeline(stage)
if self.test_mode:
# In test mode you cannot modify the runtime parameters.
self.assertEquals(
[
{
'backoff_seconds': 15,
'backoff_factor': 2,
'target': None,
'max_attempts': 3
},
'stuff'
],
outputs.default.value)
else:
self.assertEquals(
[
{
'backoff_seconds': 99,
'backoff_factor': 2,
'target': 'other-backend',
'max_attempts': 8
},
'stuff',
],
outputs.default.value)
class FunctionalTestModeTest(test_shared.TestModeMixin, FunctionalTest):
"""Runs all functional tests in test mode."""
DO_NOT_DELETE = 'Seriously... We only need the class declaration.'
class StatusTest(TestBase):
"""Tests for the status handlers."""
def setUp(self):
"""Sets up the test harness."""
TestBase.setUp(self)
self.fill_time = datetime.datetime(2010, 12, 10, 13, 55, 16, 416567)
self.pipeline1_key = db.Key.from_path(_PipelineRecord.kind(), 'one')
self.pipeline2_key = db.Key.from_path(_PipelineRecord.kind(), 'two')
self.pipeline3_key = db.Key.from_path(_PipelineRecord.kind(), 'three')
self.slot1_key = db.Key.from_path(_SlotRecord.kind(), 'red')
self.slot2_key = db.Key.from_path(_SlotRecord.kind(), 'blue')
self.slot3_key = db.Key.from_path(_SlotRecord.kind(), 'green')
self.slot1_record = _SlotRecord(
key=self.slot1_key,
root_pipeline=self.pipeline1_key)
self.slot2_record = _SlotRecord(
key=self.slot2_key,
root_pipeline=self.pipeline1_key)
self.slot3_record = _SlotRecord(
key=self.slot3_key,
root_pipeline=self.pipeline1_key)
self.base_params = {
'args': [],
'kwargs': {},
'task_retry': False,
'backoff_seconds': 1,
'backoff_factor': 2,
'max_attempts': 4,
'queue_name': 'default',
'base_path': '',
'after_all': [],
}
self.params1 = self.base_params.copy()
self.params1.update({
'output_slots': {'default': str(self.slot1_key)},
})
self.params2 = self.base_params.copy()
self.params2.update({
'output_slots': {'default': str(self.slot2_key)},
})
self.params3 = self.base_params.copy()
self.params3.update({
'output_slots': {'default': str(self.slot3_key)},
})
self.pipeline1_record = _PipelineRecord(
root_pipeline=self.pipeline1_key,
status=_PipelineRecord.RUN,
class_path='does.not.exist1',
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(self.params1),
key=self.pipeline1_key,
max_attempts=4)
self.pipeline2_record = _PipelineRecord(
root_pipeline=self.pipeline1_key,
status=_PipelineRecord.WAITING,
class_path='does.not.exist2',
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(self.params2),
key=self.pipeline2_key,
max_attempts=3)
self.pipeline3_record = _PipelineRecord(
root_pipeline=self.pipeline1_key,
status=_PipelineRecord.DONE,
class_path='does.not.exist3',
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(self.params3),
key=self.pipeline3_key,
max_attempts=2)
self.barrier1_record = _BarrierRecord(
parent=self.pipeline1_key,
key_name=_BarrierRecord.FINALIZE,
target=self.pipeline1_key,
root_pipeline=self.pipeline1_key,
blocking_slots=[self.slot1_key])
self.barrier2_record = _BarrierRecord(
parent=self.pipeline2_key,
key_name=_BarrierRecord.FINALIZE,
target=self.pipeline2_key,
root_pipeline=self.pipeline1_key,
blocking_slots=[self.slot2_key])
self.barrier2_record_start = _BarrierRecord(
parent=self.pipeline2_key,
key_name=_BarrierRecord.START,
target=self.pipeline2_key,
root_pipeline=self.pipeline1_key,
blocking_slots=[])
self.barrier3_record = _BarrierRecord(
parent=self.pipeline3_key,
key_name=_BarrierRecord.FINALIZE,
target=self.pipeline3_key,
root_pipeline=self.pipeline1_key,
blocking_slots=[self.slot3_key])
def testGetTimestampMs(self):
"""Tests for the _get_timestamp_ms function."""
when = datetime.datetime(2010, 12, 10, 13, 55, 16, 416567)
self.assertEquals(1291989316416L, pipeline._get_timestamp_ms(when))
def testGetInternalStatus_Missing(self):
"""Tests for _get_internal_status when the pipeline is missing."""
try:
pipeline._get_internal_status(pipeline_key=self.pipeline1_key)
self.fail('Did not raise')
except pipeline.PipelineStatusError, e:
self.assertEquals('Could not find pipeline ID "one"', str(e))
def testGetInternalStatus_OutputSlotMissing(self):
"""Tests for _get_internal_status when the output slot is missing."""
try:
pipeline._get_internal_status(
pipeline_key=self.pipeline1_key,
pipeline_dict={self.pipeline1_key: self.pipeline1_record},
barrier_dict={self.barrier1_record.key(): self.barrier1_record})
self.fail('Did not raise')
except pipeline.PipelineStatusError, e:
self.assertEquals(
'Default output slot with '
'key=aglteS1hcHAtaWRyGgsSEV9BRV9QaXBlbGluZV9TbG90IgNyZWQM '
'missing for pipeline ID "one"', str(e))
def testGetInternalStatus_FinalizeBarrierMissing(self):
"""Tests for _get_internal_status when the finalize barrier is missing."""
try:
pipeline._get_internal_status(
pipeline_key=self.pipeline1_key,
pipeline_dict={self.pipeline1_key: self.pipeline1_record},
slot_dict={self.slot1_key: self.slot1_record})
self.fail('Did not raise')
except pipeline.PipelineStatusError, e:
self.assertEquals(
'Finalization barrier missing for pipeline ID "one"', str(e))
def testGetInternalStatus_Finalizing(self):
"""Tests for _get_internal_status when the status is finalizing."""
self.slot1_record.status = _SlotRecord.FILLED
self.slot1_record.fill_time = self.fill_time
expected = {
'status': 'finalizing',
'currentAttempt': 1,
'afterSlotKeys': [],
'outputs': {
'default': str(self.slot1_key),
},
'args': [],
'classPath': 'does.not.exist1',
'children': [],
'endTimeMs': 1291989316416L,
'maxAttempts': 4,
'kwargs': {},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default'
}
self.assertEquals(expected, pipeline._get_internal_status(
pipeline_key=self.pipeline1_key,
pipeline_dict={self.pipeline1_key: self.pipeline1_record},
slot_dict={self.slot1_key: self.slot1_record},
barrier_dict={self.barrier1_record.key(): self.barrier1_record}))
def testGetInternalStatus_Retry(self):
"""Tests for _get_internal_status when the status is retry."""
self.pipeline2_record.next_retry_time = self.fill_time
self.pipeline2_record.retry_message = 'My retry message'
expected = {
'status': 'retry',
'lastRetryMessage': 'My retry message',
'currentAttempt': 1,
'afterSlotKeys': [],
'startTimeMs': 1291989316416L,
'outputs': {
'default': str(self.slot2_key),
},
'args': [],
'classPath': 'does.not.exist2',
'children': [],
'maxAttempts': 3,
'kwargs': {},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default'
}
self.assertEquals(expected, pipeline._get_internal_status(
pipeline_key=self.pipeline2_key,
pipeline_dict={self.pipeline2_key: self.pipeline2_record},
slot_dict={self.slot2_key: self.slot1_record},
barrier_dict={self.barrier2_record.key(): self.barrier2_record}))
def testGetInternalStatus_Waiting(self):
"""Tests for _get_internal_status when the status is waiting."""
expected = {
'status': 'waiting',
'currentAttempt': 1,
'afterSlotKeys': [],
'outputs': {
'default': str(self.slot2_key)
},
'args': [],
'classPath': 'does.not.exist2',
'children': [],
'maxAttempts': 3,
'kwargs': {},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default'
}
self.assertEquals(expected, pipeline._get_internal_status(
pipeline_key=self.pipeline2_key,
pipeline_dict={self.pipeline2_key: self.pipeline2_record},
slot_dict={self.slot2_key: self.slot1_record},
barrier_dict={
self.barrier2_record.key(): self.barrier2_record,
self.barrier2_record_start.key(): self.barrier2_record_start}))
def testGetInternalStatus_Run(self):
"""Tests for _get_internal_status when the status is run."""
self.pipeline1_record.start_time = self.fill_time
expected = {
'status': 'run',
'currentAttempt': 1,
'afterSlotKeys': [],
'startTimeMs': 1291989316416L,
'outputs': {
'default': str(self.slot1_key)
},
'args': [],
'classPath': 'does.not.exist1',
'children': [],
'maxAttempts': 4,
'kwargs': {},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default'
}
self.assertEquals(expected, pipeline._get_internal_status(
pipeline_key=self.pipeline1_key,
pipeline_dict={self.pipeline1_key: self.pipeline1_record},
slot_dict={self.slot1_key: self.slot1_record},
barrier_dict={self.barrier1_record.key(): self.barrier1_record}))
def testGetInternalStatus_RunAfterRetry(self):
"""Tests _get_internal_status when a stage is re-run on retrying."""
self.pipeline1_record.start_time = self.fill_time
self.pipeline1_record.next_retry_time = self.fill_time
self.pipeline1_record.retry_message = 'My retry message'
self.pipeline1_record.current_attempt = 1
expected = {
'status': 'run',
'currentAttempt': 2,
'lastRetryMessage': 'My retry message',
'afterSlotKeys': [],
'startTimeMs': 1291989316416L,
'outputs': {
'default': str(self.slot1_key)
},
'args': [],
'classPath': 'does.not.exist1',
'children': [],
'maxAttempts': 4,
'kwargs': {},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default'
}
self.assertEquals(expected, pipeline._get_internal_status(
pipeline_key=self.pipeline1_key,
pipeline_dict={self.pipeline1_key: self.pipeline1_record},
slot_dict={self.slot1_key: self.slot1_record},
barrier_dict={self.barrier1_record.key(): self.barrier1_record}))
def testGetInternalStatus_Aborted(self):
"""Tests for _get_internal_status when the status is aborted."""
self.pipeline1_record.status = _PipelineRecord.ABORTED
self.pipeline1_record.abort_message = 'I had to bail'
expected = {
'status': 'aborted',
'currentAttempt': 1,
'afterSlotKeys': [],
'abortMessage': 'I had to bail',
'outputs': {
'default': str(self.slot1_key),
},
'args': [],
'classPath': 'does.not.exist1',
'children': [],
'maxAttempts': 4,
'kwargs': {},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default'
}
self.assertEquals(expected, pipeline._get_internal_status(
pipeline_key=self.pipeline1_key,
pipeline_dict={self.pipeline1_key: self.pipeline1_record},
slot_dict={self.slot1_key: self.slot1_record},
barrier_dict={self.barrier1_record.key(): self.barrier1_record}))
def testGetInternalStatus_MoreParams(self):
"""Tests for _get_internal_status with children, slots, and outputs."""
self.pipeline1_record.start_time = self.fill_time
self.pipeline1_record.fanned_out = [
self.pipeline2_key, self.pipeline3_key]
self.pipeline1_record.params['args'] = [
{'type': 'slot', 'slot_key': 'foobar'},
{'type': 'slot', 'slot_key': 'meepa'},
]
self.pipeline1_record.params['kwargs'] = {
'my_arg': {'type': 'slot', 'slot_key': 'other'},
'second_arg': {'type': 'value', 'value': 1234},
}
self.pipeline1_record.params['output_slots'] = {
'default': str(self.slot1_key),
'another_one': str(self.slot2_key),
}
self.pipeline1_record.params['after_all'] = [
str(self.slot2_key),
]
expected = {
'status': 'run',
'currentAttempt': 1,
'afterSlotKeys': [
'aglteS1hcHAtaWRyGwsSEV9BRV9QaXBlbGluZV9TbG90IgRibHVlDA'
],
'startTimeMs': 1291989316416L,
'outputs': {
'default': 'aglteS1hcHAtaWRyGgsSEV9BRV9QaXBlbGluZV9TbG90IgNyZWQM',
'another_one':
'aglteS1hcHAtaWRyGwsSEV9BRV9QaXBlbGluZV9TbG90IgRibHVlDA',
},
'args': [
{'type': 'slot', 'slotKey': 'foobar'},
{'type': 'slot', 'slotKey': 'meepa'}
],
'classPath': 'does.not.exist1',
'children': [u'two', u'three'],
'maxAttempts': 4,
'kwargs': {
'my_arg': {'type': 'slot', 'slotKey': 'other'},
'second_arg': {'type': 'value', 'value': 1234},
},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default'
}
self.assertEquals(expected, pipeline._get_internal_status(
pipeline_key=self.pipeline1_key,
pipeline_dict={self.pipeline1_key: self.pipeline1_record},
slot_dict={self.slot1_key: self.slot1_record},
barrier_dict={self.barrier1_record.key(): self.barrier1_record}))
def testGetInternalStatus_StatusRecord(self):
"""Tests for _get_internal_status when the status record is present."""
status_record = _StatusRecord(
key=db.Key.from_path(_StatusRecord.kind(), self.pipeline1_key.name()),
message='My status message',
status_time=self.fill_time,
console_url='/path/to/console',
link_names=[db.Text(x) for x in ('one', 'two', 'three')],
link_urls=[db.Text(x) for x in ('/first', '/second', '/third')],
root_pipeline=self.pipeline1_key)
expected = {
'status': 'run',
'currentAttempt': 1,
'afterSlotKeys': [],
'statusTimeMs': 1291989316416L,
'outputs': {
'default': str(self.slot1_key)
},
'args': [],
'classPath': 'does.not.exist1',
'children': [],
'maxAttempts': 4,
'kwargs': {},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default',
'statusLinks': {
'three': '/third',
'two': '/second',
'one': '/first'
},
'statusConsoleUrl': '/path/to/console',
'statusMessage': 'My status message',
}
self.assertEquals(expected, pipeline._get_internal_status(
pipeline_key=self.pipeline1_key,
pipeline_dict={self.pipeline1_key: self.pipeline1_record},
slot_dict={self.slot1_key: self.slot1_record},
barrier_dict={self.barrier1_record.key(): self.barrier1_record},
status_dict={status_record.key(): status_record}))
def testGetInternalSlot_Missing(self):
"""Tests _get_internal_slot when the slot is missing."""
try:
pipeline._get_internal_slot(slot_key=self.slot1_key)
self.fail('Did not raise')
except pipeline.PipelineStatusError, e:
self.assertEquals(
'Could not find data for output slot key '
'"aglteS1hcHAtaWRyGgsSEV9BRV9QaXBlbGluZV9TbG90IgNyZWQM".',
str(e))
def testGetInternalSlot_Filled(self):
"""Tests _get_internal_slot when the slot is filled."""
self.slot1_record.status = _SlotRecord.FILLED
self.slot1_record.filler = self.pipeline2_key
self.slot1_record.fill_time = self.fill_time
self.slot1_record.root_pipeline = self.pipeline1_key
self.slot1_record.value_text = simplejson.dumps({
'one': 1234, 'two': 'hello'})
expected = {
'status': 'filled',
'fillerPipelineId': 'two',
'value': {'two': 'hello', 'one': 1234},
'fillTimeMs': 1291989316416L
}
self.assertEquals(
expected,
pipeline._get_internal_slot(
slot_key=self.slot1_key,
slot_dict={self.slot1_key: self.slot1_record}))
def testGetInternalSlot_Waiting(self):
"""Tests _get_internal_slot when the slot is waiting."""
self.slot1_record.status = _SlotRecord.WAITING
self.slot1_record.root_pipeline = self.pipeline1_key
expected = {
'status': 'waiting',
'fillerPipelineId': 'two',
}
self.assertEquals(
expected,
pipeline._get_internal_slot(
slot_key=self.slot1_key,
slot_dict={self.slot1_key: self.slot1_record},
filler_pipeline_key=self.pipeline2_key))
def testGetStatusTree_RootMissing(self):
"""Tests get_status_tree when the root pipeline is missing."""
try:
pipeline.get_status_tree(self.pipeline1_key.name())
self.fail('Did not raise')
except pipeline.PipelineStatusError, e:
self.assertEquals('Could not find pipeline ID "one"', str(e))
def testGetStatusTree_NotRoot(self):
"""Tests get_status_tree when the pipeline query is not the root."""
self.pipeline1_record.root_pipeline = self.pipeline2_key
db.put([self.pipeline1_record])
try:
pipeline.get_status_tree(self.pipeline1_key.name())
self.fail('Did not raise')
except pipeline.PipelineStatusError, e:
self.assertEquals('Pipeline ID "one" is not a root pipeline!', str(e))
def testGetStatusTree_ChildMissing(self):
"""Tests get_status_tree when a fanned out child pipeline is missing."""
self.pipeline1_record.fanned_out = [self.pipeline2_key]
db.put([self.pipeline1_record, self.barrier1_record, self.slot1_record])
try:
pipeline.get_status_tree(self.pipeline1_key.name())
self.fail('Did not raise')
except pipeline.PipelineStatusError, e:
self.assertEquals(
'Pipeline ID "one" points to child ID "two" which does not exist.',
str(e))
def testGetStatusTree_Example(self):
"""Tests a full example of a good get_status_tree response."""
self.pipeline1_record.fanned_out = [self.pipeline2_key, self.pipeline3_key]
self.slot1_record.root_pipeline = self.pipeline1_key
self.pipeline3_record.finalized_time = self.fill_time
# This one looks like a child, but it will be ignored since it is not
# reachable from the root via the fanned_out property.
bad_pipeline_key = db.Key.from_path(_PipelineRecord.kind(), 'ignored')
bad_pipeline_record = _PipelineRecord(
root_pipeline=self.pipeline1_key,
status=_PipelineRecord.RUN,
class_path='does.not.exist4',
# Bug in DB means we need to use the storage name here,
# not the local property name.
params=simplejson.dumps(self.params1),
key=bad_pipeline_key,
max_attempts=4)
db.put([
self.pipeline1_record, self.pipeline2_record, self.pipeline3_record,
self.barrier1_record, self.barrier2_record, self.barrier3_record,
self.slot1_record, self.slot2_record, self.slot3_record,
bad_pipeline_record])
expected = {
'rootPipelineId': 'one',
'pipelines': {
'three': {
'status': 'done',
'currentAttempt': 1L,
'afterSlotKeys': [],
'outputs': {
'default': str(self.slot3_key)
},
'args': [],
'classPath': 'does.not.exist3',
'children': [],
'endTimeMs': 1291989316416L,
'maxAttempts': 2L,
'kwargs': {},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default'
},
'two': {
'status': 'run',
'currentAttempt': 1L,
'afterSlotKeys': [],
'outputs': {
'default': str(self.slot2_key)
},
'args': [],
'classPath': 'does.not.exist2',
'children': [],
'maxAttempts': 3L,
'kwargs': {},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default'
},
'one': {
'status': 'run',
'currentAttempt': 1L,
'afterSlotKeys': [],
'outputs': {
'default': str(self.slot1_key)
},
'args': [],
'classPath': 'does.not.exist1',
'children': ['two', 'three'],
'maxAttempts': 4L,
'kwargs': {},
'backoffFactor': 2,
'backoffSeconds': 1,
'queueName': 'default'
}
},
'slots': {
str(self.slot2_key): {
'status': 'waiting',
'fillerPipelineId': 'two'
},
str(self.slot3_key): {
'status': 'waiting',
'fillerPipelineId': 'three'
}
}
}
self.assertEquals(
expected,
pipeline.get_status_tree(self.pipeline1_key.name()))
def testGetPipelineNames(self):
"""Tests the get_pipeline_names function."""
names = pipeline.get_pipeline_names()
self.assertTrue(None not in names) # No base-class Pipeline
self.assertIn('__main__.EchoSync', names)
found = False
for name in names:
# Name may be relative to another module, like 'foo.pipeline.common...'
found = 'pipeline.common.Delay' in name
if found:
break
self.assertTrue(found)
def testGetRootList(self):
"""Tests the get_root_list function."""
stage = NothingPipeline('one', 'two', three='red', four=1234)
stage.start(idempotence_key='banana')
stage.set_status('This one has a message')
stage2 = EchoSync('one')
stage2.start(idempotence_key='lemon')
found = pipeline.get_root_list()
self.assertFalse('cursor' in found) # No next page available
found_names = [
(p['pipelineId'], p['classPath']) for p in found['pipelines']]
expected = [
('lemon', '__main__.EchoSync'),
('banana', '__main__.NothingPipeline')
]
self.assertEquals(expected, found_names)
self.assertEquals('This one has a message',
found['pipelines'][1]['statusMessage'])
def testGetRootListCursor(self):
"""Tests the count and cursor behavior of get_root_list."""
NothingPipeline().start(idempotence_key='banana')
NothingPipeline().start(idempotence_key='lemon')
# Find newest
found = pipeline.get_root_list(count=1)
self.assertIn('cursor', found)
self.assertEquals(1, len(found['pipelines']))
self.assertEquals('lemon', found['pipelines'][0]['pipelineId'])
# Find next newest, and no cursor should be returned.
found = pipeline.get_root_list(count=1, cursor=found['cursor'])
self.assertFalse('cursor' in found)
self.assertEquals(1, len(found['pipelines']))
self.assertEquals('banana', found['pipelines'][0]['pipelineId'])
def testGetRootListClassPath(self):
"""Tests filtering a root list to a single class_path."""
NothingPipeline().start(idempotence_key='banana')
NothingPipeline().start(idempotence_key='lemon')
EchoSync('one').start(idempotence_key='tomato')
found = pipeline.get_root_list(class_path=NothingPipeline.class_path)
self.assertEquals(['__main__.NothingPipeline', '__main__.NothingPipeline'],
[p['classPath'] for p in found['pipelines']])
found = pipeline.get_root_list(class_path=EchoSync.class_path)
self.assertEquals(['__main__.EchoSync'],
[p['classPath'] for p in found['pipelines']])
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
| apache-2.0 | -3,168,384,846,593,172,500 | 35.989789 | 80 | 0.667932 | false |
david-caro/python-foreman | tests/test_defs.py | 1 | 2453 | #!/usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import, division, print_function
import os
import six
import pytest
from foreman.client import Foreman, Resource, requests
from .mocks import SessionMock
URL = 'foreman.example.com'
class HasConflictingMethods(Exception):
def __init__(self, resource, conflicting_methods):
super(HasConflictingMethods, self).__init__(
'%s has conflicting methods:' +
'\n '.join(str(method) for method in conflicting_methods)
)
def check_api(url, foreman_version, api_version, cache_dir):
cli = generate_api(url, foreman_version, api_version, cache_dir)
for value in six.itervalues(cli.__dict__):
if not isinstance(value, Resource):
continue
check_resource(resource=value)
def generate_api(url, foreman_version, api_version, cache_dir):
requests.Session = SessionMock(url, foreman_version)
print("Generating api")
return Foreman(
url,
version=foreman_version,
api_version=api_version,
cache_dir=cache_dir,
)
def check_resource(resource):
print("Checking resource: %s" % resource)
conflicting_methods = getattr(resource, '_conflicting_methods', [])
if getattr(resource, '_conflicting_methods', []):
raise HasConflictingMethods(
resource,
conflicting_methods,
)
assert resource._own_methods
def api_versions_in_dir(defs_dir):
api_versions = []
for json_file in os.listdir(defs_dir):
if json_file.endswith('.json'):
version = json_file.strip('.json').rsplit('-', 1)
api_versions.append(version)
return api_versions
def all_api_versions():
defs_dirs = [
'foreman/definitions',
'tests/fixtures/definitions'
]
api_versions = []
for dirname in defs_dirs:
api_versions.extend(
api_versions_in_dir(defs_dir=dirname)
)
return api_versions
@pytest.mark.parametrize(
'api_version',
all_api_versions(),
ids=[':'.join(ver) for ver in all_api_versions()],
)
def test_apis(api_version, capsys):
try:
check_api(
url=URL,
foreman_version=api_version[0],
api_version=api_version[1].strip('v'),
cache_dir='tests/fixtures',
)
except HasConflictingMethods as error:
print('Got conflicting methods: %s' % error)
| gpl-2.0 | 4,626,727,628,531,730,000 | 24.821053 | 72 | 0.626987 | false |
hubo1016/vlcp | vlcp/service/sdn/ioprocessing.py | 1 | 77228 | '''
Created on 2016/4/13
:author: hubo
'''
from vlcp.service.sdn.flowbase import FlowBase
from vlcp.server.module import depend, ModuleNotification, call_api,api
import vlcp.service.sdn.ofpportmanager as ofpportmanager
import vlcp.service.sdn.ovsdbportmanager as ovsdbportmanager
import vlcp.service.kvdb.objectdb as objectdb
from vlcp.event.event import Event, withIndices, M_
from vlcp.event.runnable import RoutineContainer, RoutineException
from vlcp.config.config import defaultconfig
from vlcp.service.sdn.ofpmanager import FlowInitialize
from vlcp.utils.networkmodel import PhysicalPort, LogicalPort, PhysicalPortSet, LogicalPortSet, LogicalNetwork, \
PhysicalNetwork,SubNet,RouterPort,VRouter, \
PhysicalNetworkMap
from vlcp.utils.flowupdater import FlowUpdater
import itertools
from functools import partial
from contextlib import closing, suppress
from vlcp.utils.exceptions import WalkKeyNotRetrieved
from vlcp.protocol.openflow.openflow import OpenflowConnectionStateEvent
@withIndices('datapathid', 'vhost', 'connection', 'logicalportchanged', 'physicalportchanged',
'logicalnetworkchanged', 'physicalnetworkchanged')
class DataObjectChanged(Event):
pass
class IDAssigner(object):
def __init__(self):
self._indices = {}
self._revindices = {}
# Reserve 0 and 0xffff
self._revindices[0] = '<reserve0>'
self._revindices[0xffff] = '<reserve65535>'
self._lastindex = 1
def assign(self, key):
if key in self._indices:
return self._indices[key]
else:
ind = self._lastindex
while ind in self._revindices:
ind += 1
ind &= 0xffff
self._revindices[ind] = key
self._indices[key] = ind
self._lastindex = ind + 1
return ind
def unassign(self, keys):
for k in keys:
ind = self._indices.pop(k, None)
if ind is not None:
del self._revindices[ind]
def frozen(self):
return dict(self._indices)
def _to32bitport(portno):
if portno >= 0xff00:
portno = 0xffff0000 | portno
return portno
@withIndices('connection')
class FlowReadyEvent(Event):
pass
class IOFlowUpdater(FlowUpdater):
def __init__(self, connection, systemid, bridgename, parent):
FlowUpdater.__init__(self, connection, (PhysicalPortSet.default_key(),),
('ioprocessing', connection),
parent._logger)
self._walkerdict = {PhysicalPortSet.default_key(): partial(self._physicalport_walker, _portnames={})}
self._systemid = systemid
self._bridgename = bridgename
self._portnames = {}
self._portids = {}
self._currentportids = {}
self._currentportnames = {}
self._lastportids = {}
self._lastportnames = {}
self._lastnetworkids = {}
self._networkids = IDAssigner()
self._phynetworkids = IDAssigner()
self._physicalnetworkids = {}
self._logicalportkeys = set()
self._physicalportkeys = set()
self._logicalnetworkkeys = set()
self._physicalnetworkkeys = set()
self._original_initialkeys = []
self._append_initialkeys = []
self._parent = parent
self._flows_sent = set()
async def update_ports(self, ports, ovsdb_ports):
"""
Called from main module to update port information
"""
new_port_names = dict((p['name'], _to32bitport(p['ofport'])) for p in ovsdb_ports)
new_port_ids = dict((p['id'], _to32bitport(p['ofport'])) for p in ovsdb_ports if p['id'])
if new_port_names == self._portnames and new_port_ids == self._portids:
return
self._portnames.clear()
self._portnames.update(new_port_names)
self._portids.clear()
self._portids.update(new_port_ids)
logicalportkeys = [LogicalPort.default_key(id) for id in self._portids]
self._original_initialkeys = logicalportkeys + [PhysicalPortSet.default_key()]
self._initialkeys = tuple(itertools.chain(self._original_initialkeys, self._append_initialkeys))
phy_walker = partial(self._physicalport_walker, _portnames=new_port_names)
log_walker = partial(self._logicalport_walker, _portids=new_port_ids)
self._walkerdict = dict(itertools.chain(
((PhysicalPortSet.default_key(),phy_walker),),
((lgportkey,log_walker) for lgportkey in logicalportkeys)
))
self._portnames = new_port_names
self._portids = new_port_ids
await self.restart_walk()
async def flowready(self, logicalnetworkid, physicalportid):
# 1. Check the current updated flows
# 2. Check the current logicalnetwork and physicalport
# 3. Wait for:
# a. flow updated event
# b. data object change event
# c. connection down event
flowready_matcher = FlowReadyEvent.createMatcher(self._connection)
conn_down = self._connection.protocol.statematcher(self._connection)
dataobjectchanged = DataObjectChanged.createMatcher(None, None, self._connection)
while self._connection.connected:
currentlognetid = dict((id, n) for n, id in self._lastlognets)
currentphyportid = dict((id, (p, p.physicalnetwork)) for p, id in self._lastphyports)
if (logicalnetworkid, physicalportid) in self._flows_sent:
return True
elif logicalnetworkid in currentlognetid and physicalportid in currentphyportid:
conn_down = OpenflowConnectionStateEvent.createMatcher(None, None, OpenflowConnectionStateEvent.CONNECTION_DOWN, self._connection)
await M_(dataobjectchanged, conn_down, flowready_matcher)
else:
return False
return False
def _logicalport_walker(self, key, value, walk, save, _portids):
_, (id,) = LogicalPort._getIndices(key)
if id not in _portids:
return
save(key)
if value is None:
return
with suppress(WalkKeyNotRetrieved):
lognet = walk(value.network.getkey())
save(lognet.getkey())
phynet = walk(lognet.physicalnetwork.getkey())
save(phynet.getkey())
if hasattr(value,"subnet"):
with suppress(WalkKeyNotRetrieved):
subnet = walk(value.subnet.getkey())
save(subnet.getkey())
if hasattr(subnet,"router"):
routerport = walk(subnet.router.getkey())
save(routerport.getkey())
if hasattr(routerport,"router"):
router = walk(routerport.router.getkey())
save(router.getkey())
if router.interfaces.dataset():
for weakobj in router.interfaces.dataset():
with suppress(WalkKeyNotRetrieved):
weakrouterport = walk(weakobj.getkey())
save(weakrouterport.getkey())
s = walk(weakrouterport.subnet.getkey())
save(s.getkey())
lgnet = walk(s.network.getkey())
save(lgnet.getkey())
def _physicalport_walker(self, key, value, walk, save, _portnames):
save(key)
if value is None:
return
physet = value.set
for name in _portnames:
phyports = physet.find(PhysicalPort, self._connection.protocol.vhost, self._systemid, self._bridgename, name)
# There might be more than one match physical port rule for one port, pick the most specified one
namedict = {}
for p in phyports:
_, inds = PhysicalPort._getIndices(p.getkey())
name = inds[-1]
ind_key = [i != '%' for i in inds]
if name != '%':
if name in namedict:
if namedict[name][0] < ind_key:
namedict[name] = (ind_key, p)
else:
namedict[name] = (ind_key, p)
phyports = [v[1] for v in namedict.values()]
for p in phyports:
with suppress(WalkKeyNotRetrieved):
phyp = walk(p.getkey())
save(phyp.getkey())
phynet = walk(phyp.physicalnetwork.getkey())
save(phynet.getkey())
if self._parent.enable_router_forward:
phynetmap = walk(PhysicalNetworkMap.default_key(phynet.id))
save(phynetmap.getkey())
for weak_lgnet in phynetmap.logicnetworks.dataset():
with suppress(WalkKeyNotRetrieved):
lgnet = walk(weak_lgnet.getkey())
save(lgnet.getkey())
def reset_initialkeys(self,keys,values):
subnetkeys = [k for k,v in zip(keys,values) if v is not None and not v.isdeleted() and
v.isinstance(SubNet)]
routerportkeys = [k for k,v in zip(keys,values) if v is not None and not v.isdeleted() and
v.isinstance(RouterPort)]
portkeys = [k for k,v in zip(keys,values) if v is not None and not v.isdeleted() and
v.isinstance(VRouter)]
self._append_initialkeys = subnetkeys + routerportkeys + portkeys
self._initialkeys = tuple(itertools.chain(self._original_initialkeys, self._append_initialkeys))
async def walkcomplete(self, keys, values):
conn = self._connection
dpid = conn.openflow_datapathid
vhost = conn.protocol.vhost
_currentportids = dict(self._portids)
_currentportnames = dict(self._portnames)
updated_data = {}
current_data = {}
for cls, name, idg, assigner in ((LogicalPort, '_logicalportkeys', lambda x: _currentportids.get(x.id), None),
(PhysicalPort, '_physicalportkeys', lambda x: _currentportnames.get(x.name), None),
(LogicalNetwork, '_logicalnetworkkeys', lambda x: self._networkids.assign(x.getkey()), self._networkids),
(PhysicalNetwork, '_physicalnetworkkeys', lambda x: self._phynetworkids.assign(x.getkey()), self._phynetworkids),
):
objs = [v for v in values if v is not None and not v.isdeleted() and v.isinstance(cls)]
cv = [(o, oid) for o,oid in ((o, idg(o)) for o in objs) if oid is not None]
objkeys = set([v.getkey() for v,_ in cv])
oldkeys = getattr(self, name)
current_data[cls] = cv
if objkeys != oldkeys:
if assigner is not None:
assigner.unassign(oldkeys.difference(objkeys))
setattr(self, name, objkeys)
updated_data[cls] = True
if updated_data:
await self.wait_for_send(DataObjectChanged(dpid, vhost, conn, LogicalPort in updated_data,
PhysicalPort in updated_data,
LogicalNetwork in updated_data,
PhysicalNetwork in updated_data,
current = (current_data.get(LogicalPort),
current_data.get(PhysicalPort),
current_data.get(LogicalNetwork),
current_data.get(PhysicalNetwork))))
self._lastlognets = current_data.get(LogicalNetwork)
self._lastphyports = current_data.get(PhysicalPort)
self._currentportids = _currentportids
self._currentportnames = _currentportnames
async def updateflow(self, connection, addvalues, removevalues, updatedvalues):
# We must do these in order, each with a batch:
# 1. Remove flows
# 2. Remove groups
# 3. Add groups, modify groups
# 4. Add flows, modify flows
try:
cmds = []
ofdef = connection.openflowdef
vhost = connection.protocol.vhost
input_table = self._parent._gettableindex('ingress', vhost)
input_next = self._parent._getnexttable('', 'ingress', vhost = vhost)
output_table = self._parent._gettableindex('egress', vhost)
# Cache all IDs, save them into last. We will need them for remove.
_lastportids = self._lastportids
_lastportnames = self._lastportnames
_lastnetworkids = self._lastnetworkids
_portids = dict(self._currentportids)
_portnames = dict(self._currentportnames)
_networkids = self._networkids.frozen()
exist_objs = dict((obj.getkey(), obj) for obj in self._savedresult if obj is not None and not obj.isdeleted())
# We must generate actions from network driver
phyportset = [obj for obj in self._savedresult if obj is not None and not obj.isdeleted() and obj.isinstance(PhysicalPort)]
phynetset = [obj for obj in self._savedresult if obj is not None and not obj.isdeleted() and obj.isinstance(PhysicalNetwork)]
lognetset = [obj for obj in self._savedresult if obj is not None and not obj.isdeleted() and obj.isinstance(LogicalNetwork)]
logportset = [obj for obj in self._savedresult if obj is not None and not obj.isdeleted() and obj.isinstance(LogicalPort)]
# If a port is both a logical port and a physical port, flows may conflict.
# Remove the port from dictionary if it is duplicated.
logportofps = set(_portids[lp.id] for lp in logportset if lp.id in _portids)
_portnames = dict((n,v) for n,v in _portnames.items() if v not in logportofps)
self._lastportids = _portids
self._lastportnames = _portnames
self._lastnetworkids = _networkids
# Group current ports by network for further use
phyportdict = {}
for p in phyportset:
phyportdict.setdefault(p.physicalnetwork, []).append(p)
lognetdict = {}
for n in lognetset:
lognetdict.setdefault(n.physicalnetwork, []).append(n)
logportdict = {}
for p in logportset:
logportdict.setdefault(p.network, []).append(p)
allapis = []
# Updated networks when:
# 1. Network is updated
# 2. Physical network of this logical network is updated
# 3. Logical port is added or removed from the network
# 4. Physical port is added or removed from the physical network
group_updates = set([obj for obj in updatedvalues if obj.isinstance(LogicalNetwork)])
group_updates.update(obj.network for obj in addvalues if obj.isinstance(LogicalPort))
#group_updates.update(obj.network for obj in updatedvalues if obj.isinstance(LogicalPort))
group_updates.update(exist_objs[obj.network.getkey()] for obj in removevalues if obj.isinstance(LogicalPort) and obj.network.getkey() in exist_objs)
updated_physicalnetworks = set(obj for obj in updatedvalues if obj.isinstance(PhysicalNetwork))
updated_physicalnetworks.update(p.physicalnetwork for p in addvalues if p.isinstance(PhysicalPort))
updated_physicalnetworks.update(exist_objs[p.physicalnetwork.getkey()] for p in removevalues if p.isinstance(PhysicalPort) and p.physicalnetwork.getkey() in exist_objs)
updated_physicalnetworks.update(p.physicalnetwork for p in updatedvalues if p.isinstance(PhysicalPort))
group_updates.update(lnet for pnet in updated_physicalnetworks
if pnet in lognetdict
for lnet in lognetdict[pnet])
_flows_sent = set()
for pnet in phynetset:
if pnet in lognetdict and pnet in phyportdict:
for lognet in lognetdict[pnet]:
netid = _networkids.get(lognet.getkey())
if netid is not None:
for p in phyportdict[pnet]:
if lognet in addvalues or lognet in group_updates or p in addvalues or p in updatedvalues:
pid = _portnames.get(p.name)
if pid is not None:
async def subr(lognet, p, netid, pid):
try:
r = await call_api(self, 'public', 'createioflowparts', {'connection': connection,
'logicalnetwork': lognet,
'physicalport': p,
'logicalnetworkid': netid,
'physicalportid': pid})
except Exception:
self._parent._logger.warning("Create flow parts failed for %r and %r", lognet, p, exc_info = True)
return None
else:
_flows_sent.add((netid, pid))
return ((lognet, p), r)
allapis.append(subr(lognet, p, netid, pid))
flowparts_result = await self.execute_all(allapis)
flowparts = dict(r for r in flowparts_result if r is not None)
if connection.protocol.disablenxext:
# Nicira extension is disabled, use metadata instead
# 64-bit metadata is used as:
# | 16-bit input network | 16-bit output network | 16-bit reserved | 16-bit output port |
# When first initialized, input network = output network = Logical Network no.
# output port = OFPP_ANY, reserved bits are 0x0000
# Currently used reserved bits:
# left-most (offset = 15, mask = 0x8000): allow output to IN_PORT
# offset = 14, mask = 0x4000: 1 if is IN_PORT is a logical port, 0 else
# right-most (offset = 0, mask = 0x0001): VXLAN learned
def create_input_instructions(lognetid, extra_actions, is_logport):
lognetid = (lognetid & 0xffff)
instructions = [ofdef.ofp_instruction_write_metadata(
metadata = (lognetid << 48) | (lognetid << 32) | ((0x4000 if is_logport else 0) << 16) | (ofdef.OFPP_ANY & 0xffff),
metadata_mask = 0xffffffffffffffff
),
ofdef.ofp_instruction_goto_table(table_id = input_next)
]
if extra_actions:
instructions.insert(0, ofdef.ofp_instruction_actions(actions = list(extra_actions)))
return instructions
def create_output_oxm(lognetid, portid, in_port = False):
r = [ofdef.create_oxm(ofdef.OXM_OF_METADATA_W, (portid & 0xFFFF) | (0x80000000 if in_port else 0) | ((lognetid & 0xFFFF) << 32), 0x0000FFFF8000FFFF)]
if in_port:
r.append(ofdef.create_oxm(ofdef.OXM_OF_IN_PORT, portid))
return r
else:
# With nicira extension, we store input network, output network and output port in REG4, REG5 and REG6
# REG7 is used as the reserved bits
def create_input_instructions(lognetid, extra_actions, is_logport):
lognetid = (lognetid & 0xffff)
return [ofdef.ofp_instruction_actions(actions = [
ofdef.ofp_action_set_field(
field = ofdef.create_oxm(ofdef.NXM_NX_REG4, lognetid)
),
ofdef.ofp_action_set_field(
field = ofdef.create_oxm(ofdef.NXM_NX_REG5, lognetid)
),
ofdef.ofp_action_set_field(
field = ofdef.create_oxm(ofdef.NXM_NX_REG6, ofdef.OFPP_ANY)
),
ofdef.ofp_action_set_field(
field = ofdef.create_oxm(ofdef.NXM_NX_REG7, (0x4000 if is_logport else 0))
)
] + list(extra_actions)),
ofdef.ofp_instruction_goto_table(table_id = input_next)
]
def create_output_oxm(lognetid, portid, in_port = False):
r = [ofdef.create_oxm(ofdef.NXM_NX_REG5, lognetid),
ofdef.create_oxm(ofdef.NXM_NX_REG6, portid),
ofdef.create_oxm(ofdef.NXM_NX_REG7_W, 0x8000 if in_port else 0, 0x8000)]
if in_port:
r.append(ofdef.create_oxm(ofdef.OXM_OF_IN_PORT, portid))
return r
for obj in removevalues:
if obj.isinstance(LogicalPort):
ofport = _lastportids.get(obj.id)
if ofport is not None:
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT,
ofport
)])
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofport,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm()))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_IN_PORT,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT, ofport)])))
elif obj.isinstance(PhysicalPort):
ofport = _lastportnames.get(obj.name)
if ofport is not None:
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT,
ofport
)])
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffff0000,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm()))
elif obj.isinstance(LogicalNetwork):
groupid = _lastnetworkids.get(obj.getkey())
if groupid is not None:
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
cookie = 0x0001000000000000 | groupid,
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm()
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | groupid,
cookie_mask = 0xffff00000000ffff,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm()
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(groupid, ofdef.OFPP_ANY))
))
# Never use flow mod to update an input flow of physical port, because the input_oxm may change.
for obj in updatedvalues:
if obj.isinstance(PhysicalPort):
ofport = _portnames.get(obj.name)
if ofport is not None:
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT,
ofport
)])
))
elif obj.isinstance(LogicalNetwork):
groupid = _networkids.get(obj.getkey())
if groupid is not None:
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
cookie = 0x0001000000000000 | groupid,
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm()
))
elif obj.isinstance(PhysicalNetwork):
if obj in phyportdict:
for p in phyportdict[obj]:
ofport = _portnames.get(p.name)
if ofport is not None and p not in addvalues and p not in updatedvalues:
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
command = ofdef.OFPFC_DELETE,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT,
ofport
)])
))
await self.execute_commands(connection, cmds)
del cmds[:]
for obj in removevalues:
if obj.isinstance(LogicalNetwork):
groupid = _lastnetworkids.get(obj.getkey())
if groupid is not None:
cmds.append(ofdef.ofp_group_mod(command = ofdef.OFPGC_DELETE,
type = ofdef.OFPGT_ALL,
group_id = groupid
))
await self.execute_commands(connection, cmds)
del cmds[:]
disablechaining = connection.protocol.disablechaining
created_groups = {}
def create_buckets(obj, groupid):
# Generate buckets
buckets = [ofdef.ofp_bucket(actions=[ofdef.ofp_action_output(port = _portids[p.id])])
for p in logportdict[obj]
if p.id in _portids] if obj in logportdict else []
allactions = [ofdef.ofp_action_output(port = _portids[p.id])
for p in logportdict[obj]
if p.id in _portids] if obj in logportdict else []
disablegroup = False
if obj.physicalnetwork in phyportdict:
for p in phyportdict[obj.physicalnetwork]:
if (obj, p) in flowparts:
fp = flowparts[(obj,p)]
allactions.extend(fp[3])
if disablechaining and not disablegroup and any(a.type == ofdef.OFPAT_GROUP for a in fp[3]):
# We cannot use chaining. We use a long action list instead, and hope there is no conflicts
disablegroup = True
else:
buckets.append(ofdef.ofp_bucket(actions=list(fp[3])))
if disablegroup:
created_groups[groupid] = allactions
else:
created_groups[groupid] = [ofdef.ofp_action_group(group_id = groupid)]
return buckets
for obj in addvalues:
if obj.isinstance(LogicalNetwork):
groupid = _networkids.get(obj.getkey())
if groupid is not None:
cmds.append(ofdef.ofp_group_mod(command = ofdef.OFPGC_ADD,
type = ofdef.OFPGT_ALL,
group_id = groupid,
buckets = create_buckets(obj, groupid)
))
for obj in group_updates:
groupid = _networkids.get(obj.getkey())
if groupid is not None:
cmds.append(ofdef.ofp_group_mod(command = ofdef.OFPGC_MODIFY,
type = ofdef.OFPGT_ALL,
group_id = groupid,
buckets = create_buckets(obj, groupid)
))
await self.execute_commands(connection, cmds)
del cmds[:]
# There are 5 kinds of flows:
# 1. in_port = (Logical Port)
# 2. in_port = (Physical_Port), network = (Logical_Network)
# 3. out_port = (Logical Port)
# 4. out_port = (Physical_Port), network = (Logical_Network)
# 5. out_port = OFPP_ANY, network = (Logical_Network)
for obj in addvalues:
if obj.isinstance(LogicalPort):
ofport = _portids.get(obj.id)
lognetid = _networkids.get(obj.network.getkey())
if ofport is not None and lognetid is not None:
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT,
ofport
)]),
instructions = create_input_instructions(lognetid, [], True)
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport)),
instructions = [ofdef.ofp_instruction_actions(actions = [
ofdef.ofp_action_output(port = ofport)
])]
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport, True)),
instructions = [ofdef.ofp_instruction_actions(actions = [
ofdef.ofp_action_output(port = ofdef.OFPP_IN_PORT)
])]
))
# Ignore update of logical port
# Physical port:
for obj in addvalues:
if obj.isinstance(PhysicalPort):
ofport = _portnames.get(obj.name)
if ofport is not None and obj.physicalnetwork in lognetdict:
for lognet in lognetdict[obj.physicalnetwork]:
lognetid = _networkids.get(lognet.getkey())
if lognetid is not None and (lognet, obj) in flowparts:
input_oxm, input_actions, output_actions, _, output_actions2 = flowparts[(lognet, obj)]
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
cookie = 0x0001000000000000 | lognetid,
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT,
ofport
)] + list(input_oxm)),
instructions = create_input_instructions(lognetid, input_actions, False)
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | lognetid | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport, False)),
instructions = [ofdef.ofp_instruction_actions(actions =
list(output_actions))]
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | lognetid | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport, True)),
instructions = [ofdef.ofp_instruction_actions(actions =
list(output_actions2))]
))
for lognet in addvalues:
if lognet.isinstance(LogicalNetwork):
lognetid = _networkids.get(lognet.getkey())
if lognetid is not None and lognet.physicalnetwork in phyportdict:
for obj in phyportdict[lognet.physicalnetwork]:
ofport = _portnames.get(obj.name)
if ofport is not None and (lognet, obj) in flowparts and obj not in addvalues:
input_oxm, input_actions, output_actions, _, output_actions2 = flowparts[(lognet, obj)]
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
cookie = 0x0001000000000000 | lognetid,
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT,
ofport
)] + input_oxm),
instructions = create_input_instructions(lognetid, input_actions, False)
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | lognetid | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport, False)),
instructions = [ofdef.ofp_instruction_actions(actions =
list(output_actions))]
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | lognetid | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport, True)),
instructions = [ofdef.ofp_instruction_actions(actions =
list(output_actions2))]
))
for obj in updatedvalues:
if obj.isinstance(PhysicalPort):
ofport = _portnames.get(obj.name)
if ofport is not None and obj.physicalnetwork in lognetdict:
for lognet in lognetdict[obj.physicalnetwork]:
lognetid = _networkids.get(lognet.getkey())
if lognetid is not None and (lognet, obj) in flowparts and not lognet in addvalues:
input_oxm, input_actions, output_actions, _, output_actions2 = flowparts[(lognet, obj)]
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
cookie = 0x0001000000000000 | lognetid,
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT,
ofport
)] + input_oxm),
instructions = create_input_instructions(lognetid, input_actions, False)
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | lognetid | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_MODIFY,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport, False)),
instructions = [ofdef.ofp_instruction_actions(actions =
list(output_actions))]
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | lognetid | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_MODIFY,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport, True)),
instructions = [ofdef.ofp_instruction_actions(actions =
list(output_actions2))]
))
for lognet in updatedvalues:
if lognet.isinstance(LogicalNetwork):
lognetid = _networkids.get(lognet.getkey())
if lognetid is not None and lognet.physicalnetwork in phyportdict:
for obj in phyportdict[lognet.physicalnetwork]:
ofport = _portnames.get(obj.name)
if ofport is not None and (lognet, obj) in flowparts and obj not in addvalues and obj not in updatedvalues:
input_oxm, input_actions, output_actions, _, output_actions2 = flowparts[(lognet, obj)]
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
cookie = 0x0001000000000000 | lognetid,
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT,
ofport
)] + input_oxm),
instructions = create_input_instructions(lognetid, input_actions, False)
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | lognetid | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_MODIFY,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport)),
instructions = [ofdef.ofp_instruction_actions(actions =
list(output_actions))]
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | lognetid | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_MODIFY,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport, True)),
instructions = [ofdef.ofp_instruction_actions(actions =
list(output_actions2))]
))
# Physical network is updated
for pnet in updatedvalues:
if pnet.isinstance(PhysicalNetwork) and pnet in lognetdict:
for lognet in lognetdict[pnet]:
if lognet.isinstance(LogicalNetwork):
lognetid = _networkids.get(lognet.getkey())
if lognetid is not None and lognet not in updatedvalues and lognet not in addvalues and lognet.physicalnetwork in phyportdict:
for obj in phyportdict[lognet.physicalnetwork]:
ofport = _portnames.get(obj.name)
if ofport is not None and (lognet, obj) in flowparts and obj not in addvalues and obj not in updatedvalues:
input_oxm, input_actions, output_actions, _, output_actions2 = flowparts[(lognet, obj)]
cmds.append(ofdef.ofp_flow_mod(table_id = input_table,
cookie = 0x0001000000000000 | lognetid,
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = [
ofdef.create_oxm(ofdef.OXM_OF_IN_PORT,
ofport
)] + input_oxm),
instructions = create_input_instructions(lognetid, input_actions, False)
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | lognetid | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_MODIFY,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport)),
instructions = [ofdef.ofp_instruction_actions(actions =
list(output_actions))]
))
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
cookie = 0x0001000000000000 | lognetid | ((ofport & 0xffff) << 16),
cookie_mask = 0xffffffffffffffff,
command = ofdef.OFPFC_MODIFY,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofport, True)),
instructions = [ofdef.ofp_instruction_actions(actions =
list(output_actions2))]
))
# Logical network broadcast
for lognet in addvalues:
if lognet.isinstance(LogicalNetwork):
lognetid = _networkids.get(lognet.getkey())
if lognetid is not None and lognetid in created_groups:
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofdef.OFPP_ANY)),
instructions = [ofdef.ofp_instruction_actions(actions =
created_groups.pop(lognetid))]
))
for lognetid, actions in created_groups.items():
cmds.append(ofdef.ofp_flow_mod(table_id = output_table,
command = ofdef.OFPFC_ADD,
priority = ofdef.OFP_DEFAULT_PRIORITY,
buffer_id = ofdef.OFP_NO_BUFFER,
out_port = ofdef.OFPP_ANY,
out_group = ofdef.OFPG_ANY,
match = ofdef.ofp_match_oxm(oxm_fields = create_output_oxm(lognetid, ofdef.OFPP_ANY)),
instructions = [ofdef.ofp_instruction_actions(actions = actions)]
))
# Ignore logical network update
await self.execute_commands(connection, cmds)
self._flows_sent = _flows_sent
await self.wait_for_send(FlowReadyEvent(self._connection))
except Exception:
self._parent._logger.warning("Update flow for connection %r failed with exception", connection, exc_info = True)
# We don't want the whole flow update stops, so ignore the exception and continue
@defaultconfig
@depend(ofpportmanager.OpenflowPortManager, ovsdbportmanager.OVSDBPortManager, objectdb.ObjectDB)
class IOProcessing(FlowBase):
"Ingress and Egress processing"
_tablerequest = (("ingress", (), ''),
("egress", ("ingress",),''))
# vHost map from OpenFlow vHost to OVSDB vHost. If the OpenFlow vHost is not found in this map,
# it will map to the default OVSDB vHost ('')
_default_vhostmap = {}
# Enable forwarding in this server, so it becomes a forwarding node (also known as a N/S gateway)
_default_enable_router_forward = False
def __init__(self, server):
FlowBase.__init__(self, server)
self.apiroutine = RoutineContainer(self.scheduler)
self.apiroutine.main = self._main
self.routines.append(self.apiroutine)
self._flowupdaters = {}
self._portchanging = set()
self._portchanged = set()
self.createAPI(api(self.flowready, self.apiroutine))
async def flowready(self, connection, logicalnetworkid, physicalportid):
"""
Wait until flows are sent to switch
:param connection: Openflow connection
:param logicalnetworkid: logical network id (integer)
:param physicalportid: physical port id (integer)
:return: If connection/network/port not exists, return False, else return True
"""
if connection not in self._flowupdaters:
return False
else:
return await self._flowupdaters[connection].flowready(logicalnetworkid, physicalportid)
async def _main(self):
flow_init = FlowInitialize.createMatcher(_ismatch = lambda x: self.vhostbind is None or x.vhost in self.vhostbind)
port_change = ModuleNotification.createMatcher("openflowportmanager", "update", _ismatch = lambda x: self.vhostbind is None or x.vhost in self.vhostbind)
while True:
e, m = await M_(flow_init, port_change)
c = e.connection
if m is flow_init:
self.apiroutine.subroutine(self._init_conn(c))
else:
if e.reason == 'disconnected':
self.apiroutine.subroutine(self._remove_conn(c))
else:
self.apiroutine.subroutine(self._portchange(c))
async def _init_conn(self, conn):
# Default drop
await conn.protocol.batch((conn.openflowdef.ofp_flow_mod(table_id = self._gettableindex("ingress", conn.protocol.vhost),
command = conn.openflowdef.OFPFC_ADD,
priority = 0,
buffer_id = conn.openflowdef.OFP_NO_BUFFER,
match = conn.openflowdef.ofp_match_oxm(),
instructions = [conn.openflowdef.ofp_instruction_actions(
type = conn.openflowdef.OFPIT_CLEAR_ACTIONS
)]
),
conn.openflowdef.ofp_flow_mod(table_id = self._gettableindex("egress", conn.protocol.vhost),
command = conn.openflowdef.OFPFC_ADD,
priority = 0,
buffer_id = conn.openflowdef.OFP_NO_BUFFER,
match = conn.openflowdef.ofp_match_oxm(),
instructions = [conn.openflowdef.ofp_instruction_actions(
type = conn.openflowdef.OFPIT_CLEAR_ACTIONS
)]
)), conn, self.apiroutine)
if conn in self._flowupdaters:
self._flowupdaters[conn].close()
datapath_id = conn.openflow_datapathid
ovsdb_vhost = self.vhostmap.get(conn.protocol.vhost, "")
bridgename, systemid, _ = await call_api(self.apiroutine, 'ovsdbmanager', 'waitbridgeinfo',
{'datapathid': datapath_id,
'vhost': ovsdb_vhost})
new_updater = IOFlowUpdater(conn, systemid, bridgename, self)
self._flowupdaters[conn] = new_updater
new_updater.start()
await self._portchange(conn)
async def _remove_conn(self, conn):
# Do not need to modify flows
if conn in self._flowupdaters:
self._flowupdaters[conn].close()
del self._flowupdaters[conn]
async def _portchange(self, conn):
# Do not re-enter
if conn in self._portchanging:
self._portchanged.add(conn)
return
self._portchanging.add(conn)
last_portno = set()
try:
while True:
self._portchanged.discard(conn)
flow_updater = self._flowupdaters.get(conn)
if flow_updater is None:
break
if not conn.connected:
break
datapath_id = conn.openflow_datapathid
ovsdb_vhost = self.vhostmap.get(conn.protocol.vhost, "")
ovsdb_update_event_matcher = ModuleNotification.createMatcher(
"ovsdbportmanager",
"update",
_ismatch = lambda x: x.vhost == ovsdb_vhost and x.datapathid == datapath_id)
ovsdb_updated = False
def _ovsdb_update_callback(event, matcher):
nonlocal ovsdb_updated
ovsdb_updated = True
ports, ovsdb_ports = \
await self.apiroutine.with_callback(
self.apiroutine.execute_all(
[call_api(self.apiroutine, 'openflowportmanager', 'getports', {'datapathid': datapath_id,
'vhost': conn.protocol.vhost}),
call_api(self.apiroutine, 'ovsdbportmanager', 'getports', {'datapathid': datapath_id,
'vhost': ovsdb_vhost})]),
_ovsdb_update_callback,
ovsdb_update_event_matcher
)
if conn in self._portchanged or ovsdb_updated:
# Retrieve again
continue
if not conn.connected:
self._portchanged.discard(conn)
return
ovsdb_port_dict = {p['ofport']: p for p in ovsdb_ports}
# Choose the intersection of ports from two sources
port_pairs = [(p, ovsdb_port_dict[p.port_no & 0xffff])
for p in ports
if (p.port_no & 0xffff) in ovsdb_port_dict]
current_portno = {p.port_no for p, _ in port_pairs}
# Get again to prevent concurrent problems
flow_updater = self._flowupdaters.get(conn)
if flow_updater is None:
break
if not conn.connected:
break
if conn in self._portchanged or ovsdb_updated:
continue
# If all openflow ports have their OVSDB ports, we are in sync and can exit
if all((p.port_no & 0xffff) in ovsdb_port_dict for p in ports):
if current_portno != last_portno:
if port_pairs:
await self.apiroutine.with_callback(
flow_updater.update_ports(*zip(*port_pairs)),
_ovsdb_update_callback,
ovsdb_update_event_matcher
)
else:
await self.apiroutine.with_callback(
flow_updater.update_ports((), ()),
_ovsdb_update_callback,
ovsdb_update_event_matcher
)
break
else:
# Partially update
if current_portno and current_portno != last_portno:
if port_pairs:
await self.apiroutine.with_callback(
flow_updater.update_ports(*zip(*port_pairs)),
_ovsdb_update_callback,
ovsdb_update_event_matcher
)
else:
await self.apiroutine.with_callback(
flow_updater.update_ports((), ()),
_ovsdb_update_callback,
ovsdb_update_event_matcher
)
last_portno = current_portno
# Some openflow ports do not have OVSDB information, this may be caused
# by:
# 1. A port is added to OpenFlow, but not yet retrieved from OVSDB
# 2. A port is deleted from OVSDB, but not yet updated in OpenFlow
# 3. Other synchronization problem
port_change = ModuleNotification.createMatcher("openflowportmanager", "update",
_ismatch = lambda x: x.connection == conn)
conndown = conn.protocol.statematcher(conn)
timeout, _, m = await self.apiroutine.wait_with_timeout(5,
port_change,
ovsdb_update_event_matcher,
conndown)
if timeout:
self._logger.warning('OpenFlow ports may not be synchronized. Try resync...')
# Connection is up but ports are not synchronized, try resync
await self.apiroutine.execute_all([call_api(self.apiroutine, 'openflowportmanager', 'resync',
{'datapathid': datapath_id,
'vhost': conn.protocol.vhost}),
call_api(self.apiroutine, 'ovsdbportmanager', 'resync',
{'datapathid': datapath_id,
'vhost': ovsdb_vhost})])
# Wait for a while
await self.apiroutine.wait_with_timeout(5)
continue
elif m is conndown:
# Connection lost, no longer need to trace the port changes
break
finally:
self._portchanging.remove(conn)
| apache-2.0 | 3,838,357,203,293,463,600 | 69.016319 | 180 | 0.406601 | false |
postlund/home-assistant | homeassistant/components/amcrest/camera.py | 1 | 19430 | """Support for Amcrest IP cameras."""
import asyncio
from datetime import timedelta
from functools import partial
import logging
from amcrest import AmcrestError
from haffmpeg.camera import CameraMjpeg
import voluptuous as vol
from homeassistant.components.camera import (
CAMERA_SERVICE_SCHEMA,
SUPPORT_ON_OFF,
SUPPORT_STREAM,
Camera,
)
from homeassistant.components.ffmpeg import DATA_FFMPEG
from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON
from homeassistant.helpers.aiohttp_client import (
async_aiohttp_proxy_stream,
async_aiohttp_proxy_web,
async_get_clientsession,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
CAMERA_WEB_SESSION_TIMEOUT,
CAMERAS,
COMM_TIMEOUT,
DATA_AMCREST,
DEVICES,
SERVICE_UPDATE,
SNAPSHOT_TIMEOUT,
)
from .helpers import log_update_error, service_signal
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=15)
STREAM_SOURCE_LIST = ["snapshot", "mjpeg", "rtsp"]
_SRV_EN_REC = "enable_recording"
_SRV_DS_REC = "disable_recording"
_SRV_EN_AUD = "enable_audio"
_SRV_DS_AUD = "disable_audio"
_SRV_EN_MOT_REC = "enable_motion_recording"
_SRV_DS_MOT_REC = "disable_motion_recording"
_SRV_GOTO = "goto_preset"
_SRV_CBW = "set_color_bw"
_SRV_TOUR_ON = "start_tour"
_SRV_TOUR_OFF = "stop_tour"
_ATTR_PRESET = "preset"
_ATTR_COLOR_BW = "color_bw"
_CBW_COLOR = "color"
_CBW_AUTO = "auto"
_CBW_BW = "bw"
_CBW = [_CBW_COLOR, _CBW_AUTO, _CBW_BW]
_SRV_GOTO_SCHEMA = CAMERA_SERVICE_SCHEMA.extend(
{vol.Required(_ATTR_PRESET): vol.All(vol.Coerce(int), vol.Range(min=1))}
)
_SRV_CBW_SCHEMA = CAMERA_SERVICE_SCHEMA.extend(
{vol.Required(_ATTR_COLOR_BW): vol.In(_CBW)}
)
CAMERA_SERVICES = {
_SRV_EN_REC: (CAMERA_SERVICE_SCHEMA, "async_enable_recording", ()),
_SRV_DS_REC: (CAMERA_SERVICE_SCHEMA, "async_disable_recording", ()),
_SRV_EN_AUD: (CAMERA_SERVICE_SCHEMA, "async_enable_audio", ()),
_SRV_DS_AUD: (CAMERA_SERVICE_SCHEMA, "async_disable_audio", ()),
_SRV_EN_MOT_REC: (CAMERA_SERVICE_SCHEMA, "async_enable_motion_recording", ()),
_SRV_DS_MOT_REC: (CAMERA_SERVICE_SCHEMA, "async_disable_motion_recording", ()),
_SRV_GOTO: (_SRV_GOTO_SCHEMA, "async_goto_preset", (_ATTR_PRESET,)),
_SRV_CBW: (_SRV_CBW_SCHEMA, "async_set_color_bw", (_ATTR_COLOR_BW,)),
_SRV_TOUR_ON: (CAMERA_SERVICE_SCHEMA, "async_start_tour", ()),
_SRV_TOUR_OFF: (CAMERA_SERVICE_SCHEMA, "async_stop_tour", ()),
}
_BOOL_TO_STATE = {True: STATE_ON, False: STATE_OFF}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up an Amcrest IP Camera."""
if discovery_info is None:
return
name = discovery_info[CONF_NAME]
device = hass.data[DATA_AMCREST][DEVICES][name]
async_add_entities([AmcrestCam(name, device, hass.data[DATA_FFMPEG])], True)
class CannotSnapshot(Exception):
"""Conditions are not valid for taking a snapshot."""
class AmcrestCam(Camera):
"""An implementation of an Amcrest IP camera."""
def __init__(self, name, device, ffmpeg):
"""Initialize an Amcrest camera."""
super().__init__()
self._name = name
self._api = device.api
self._ffmpeg = ffmpeg
self._ffmpeg_arguments = device.ffmpeg_arguments
self._stream_source = device.stream_source
self._resolution = device.resolution
self._token = self._auth = device.authentication
self._control_light = device.control_light
self._is_recording = False
self._motion_detection_enabled = None
self._brand = None
self._model = None
self._audio_enabled = None
self._motion_recording_enabled = None
self._color_bw = None
self._rtsp_url = None
self._snapshot_task = None
self._unsub_dispatcher = []
self._update_succeeded = False
def _check_snapshot_ok(self):
available = self.available
if not available or not self.is_on:
_LOGGER.warning(
"Attempt to take snapshot when %s camera is %s",
self.name,
"offline" if not available else "off",
)
raise CannotSnapshot
async def _async_get_image(self):
try:
# Send the request to snap a picture and return raw jpg data
# Snapshot command needs a much longer read timeout than other commands.
return await self.hass.async_add_executor_job(
partial(
self._api.snapshot,
timeout=(COMM_TIMEOUT, SNAPSHOT_TIMEOUT),
stream=False,
)
)
except AmcrestError as error:
log_update_error(_LOGGER, "get image from", self.name, "camera", error)
return None
finally:
self._snapshot_task = None
async def async_camera_image(self):
"""Return a still image response from the camera."""
_LOGGER.debug("Take snapshot from %s", self._name)
try:
# Amcrest cameras only support one snapshot command at a time.
# Hence need to wait if a previous snapshot has not yet finished.
# Also need to check that camera is online and turned on before each wait
# and before initiating shapshot.
while self._snapshot_task:
self._check_snapshot_ok()
_LOGGER.debug("Waiting for previous snapshot from %s ...", self._name)
await self._snapshot_task
self._check_snapshot_ok()
# Run snapshot command in separate Task that can't be cancelled so
# 1) it's not possible to send another snapshot command while camera is
# still working on a previous one, and
# 2) someone will be around to catch any exceptions.
self._snapshot_task = self.hass.async_create_task(self._async_get_image())
return await asyncio.shield(self._snapshot_task)
except CannotSnapshot:
return None
async def handle_async_mjpeg_stream(self, request):
"""Return an MJPEG stream."""
# The snapshot implementation is handled by the parent class
if self._stream_source == "snapshot":
return await super().handle_async_mjpeg_stream(request)
if not self.available:
_LOGGER.warning(
"Attempt to stream %s when %s camera is offline",
self._stream_source,
self.name,
)
return None
if self._stream_source == "mjpeg":
# stream an MJPEG image stream directly from the camera
websession = async_get_clientsession(self.hass)
streaming_url = self._api.mjpeg_url(typeno=self._resolution)
stream_coro = websession.get(
streaming_url, auth=self._token, timeout=CAMERA_WEB_SESSION_TIMEOUT
)
return await async_aiohttp_proxy_web(self.hass, request, stream_coro)
# streaming via ffmpeg
streaming_url = self._rtsp_url
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
await stream.open_camera(streaming_url, extra_cmd=self._ffmpeg_arguments)
try:
stream_reader = await stream.get_reader()
return await async_aiohttp_proxy_stream(
self.hass,
request,
stream_reader,
self._ffmpeg.ffmpeg_stream_content_type,
)
finally:
await stream.close()
# Entity property overrides
@property
def should_poll(self) -> bool:
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return True
@property
def name(self):
"""Return the name of this camera."""
return self._name
@property
def device_state_attributes(self):
"""Return the Amcrest-specific camera state attributes."""
attr = {}
if self._audio_enabled is not None:
attr["audio"] = _BOOL_TO_STATE.get(self._audio_enabled)
if self._motion_recording_enabled is not None:
attr["motion_recording"] = _BOOL_TO_STATE.get(
self._motion_recording_enabled
)
if self._color_bw is not None:
attr[_ATTR_COLOR_BW] = self._color_bw
return attr
@property
def available(self):
"""Return True if entity is available."""
return self._api.available
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_ON_OFF | SUPPORT_STREAM
# Camera property overrides
@property
def is_recording(self):
"""Return true if the device is recording."""
return self._is_recording
@property
def brand(self):
"""Return the camera brand."""
return self._brand
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return self._motion_detection_enabled
@property
def model(self):
"""Return the camera model."""
return self._model
async def stream_source(self):
"""Return the source of the stream."""
return self._rtsp_url
@property
def is_on(self):
"""Return true if on."""
return self.is_streaming
# Other Entity method overrides
async def async_on_demand_update(self):
"""Update state."""
self.async_schedule_update_ha_state(True)
async def async_added_to_hass(self):
"""Subscribe to signals and add camera to list."""
for service, params in CAMERA_SERVICES.items():
self._unsub_dispatcher.append(
async_dispatcher_connect(
self.hass,
service_signal(service, self.entity_id),
getattr(self, params[1]),
)
)
self._unsub_dispatcher.append(
async_dispatcher_connect(
self.hass,
service_signal(SERVICE_UPDATE, self._name),
self.async_on_demand_update,
)
)
self.hass.data[DATA_AMCREST][CAMERAS].append(self.entity_id)
async def async_will_remove_from_hass(self):
"""Remove camera from list and disconnect from signals."""
self.hass.data[DATA_AMCREST][CAMERAS].remove(self.entity_id)
for unsub_dispatcher in self._unsub_dispatcher:
unsub_dispatcher()
def update(self):
"""Update entity status."""
if not self.available or self._update_succeeded:
if not self.available:
self._update_succeeded = False
return
_LOGGER.debug("Updating %s camera", self.name)
try:
if self._brand is None:
resp = self._api.vendor_information.strip()
if resp.startswith("vendor="):
self._brand = resp.split("=")[-1]
else:
self._brand = "unknown"
if self._model is None:
resp = self._api.device_type.strip()
if resp.startswith("type="):
self._model = resp.split("=")[-1]
else:
self._model = "unknown"
self.is_streaming = self._api.video_enabled
self._is_recording = self._api.record_mode == "Manual"
self._motion_detection_enabled = self._api.is_motion_detector_on()
self._audio_enabled = self._api.audio_enabled
self._motion_recording_enabled = self._api.is_record_on_motion_detection()
self._color_bw = _CBW[self._api.day_night_color]
self._rtsp_url = self._api.rtsp_url(typeno=self._resolution)
except AmcrestError as error:
log_update_error(_LOGGER, "get", self.name, "camera attributes", error)
self._update_succeeded = False
else:
self._update_succeeded = True
# Other Camera method overrides
def turn_off(self):
"""Turn off camera."""
self._enable_video_stream(False)
def turn_on(self):
"""Turn on camera."""
self._enable_video_stream(True)
def enable_motion_detection(self):
"""Enable motion detection in the camera."""
self._enable_motion_detection(True)
def disable_motion_detection(self):
"""Disable motion detection in camera."""
self._enable_motion_detection(False)
# Additional Amcrest Camera service methods
async def async_enable_recording(self):
"""Call the job and enable recording."""
await self.hass.async_add_executor_job(self._enable_recording, True)
async def async_disable_recording(self):
"""Call the job and disable recording."""
await self.hass.async_add_executor_job(self._enable_recording, False)
async def async_enable_audio(self):
"""Call the job and enable audio."""
await self.hass.async_add_executor_job(self._enable_audio, True)
async def async_disable_audio(self):
"""Call the job and disable audio."""
await self.hass.async_add_executor_job(self._enable_audio, False)
async def async_enable_motion_recording(self):
"""Call the job and enable motion recording."""
await self.hass.async_add_executor_job(self._enable_motion_recording, True)
async def async_disable_motion_recording(self):
"""Call the job and disable motion recording."""
await self.hass.async_add_executor_job(self._enable_motion_recording, False)
async def async_goto_preset(self, preset):
"""Call the job and move camera to preset position."""
await self.hass.async_add_executor_job(self._goto_preset, preset)
async def async_set_color_bw(self, color_bw):
"""Call the job and set camera color mode."""
await self.hass.async_add_executor_job(self._set_color_bw, color_bw)
async def async_start_tour(self):
"""Call the job and start camera tour."""
await self.hass.async_add_executor_job(self._start_tour, True)
async def async_stop_tour(self):
"""Call the job and stop camera tour."""
await self.hass.async_add_executor_job(self._start_tour, False)
# Methods to send commands to Amcrest camera and handle errors
def _enable_video_stream(self, enable):
"""Enable or disable camera video stream."""
# Given the way the camera's state is determined by
# is_streaming and is_recording, we can't leave
# recording on if video stream is being turned off.
if self.is_recording and not enable:
self._enable_recording(False)
try:
self._api.video_enabled = enable
except AmcrestError as error:
log_update_error(
_LOGGER,
"enable" if enable else "disable",
self.name,
"camera video stream",
error,
)
else:
self.is_streaming = enable
self.schedule_update_ha_state()
if self._control_light:
self._enable_light(self._audio_enabled or self.is_streaming)
def _enable_recording(self, enable):
"""Turn recording on or off."""
# Given the way the camera's state is determined by
# is_streaming and is_recording, we can't leave
# video stream off if recording is being turned on.
if not self.is_streaming and enable:
self._enable_video_stream(True)
rec_mode = {"Automatic": 0, "Manual": 1}
try:
self._api.record_mode = rec_mode["Manual" if enable else "Automatic"]
except AmcrestError as error:
log_update_error(
_LOGGER,
"enable" if enable else "disable",
self.name,
"camera recording",
error,
)
else:
self._is_recording = enable
self.schedule_update_ha_state()
def _enable_motion_detection(self, enable):
"""Enable or disable motion detection."""
try:
self._api.motion_detection = str(enable).lower()
except AmcrestError as error:
log_update_error(
_LOGGER,
"enable" if enable else "disable",
self.name,
"camera motion detection",
error,
)
else:
self._motion_detection_enabled = enable
self.schedule_update_ha_state()
def _enable_audio(self, enable):
"""Enable or disable audio stream."""
try:
self._api.audio_enabled = enable
except AmcrestError as error:
log_update_error(
_LOGGER,
"enable" if enable else "disable",
self.name,
"camera audio stream",
error,
)
else:
self._audio_enabled = enable
self.schedule_update_ha_state()
if self._control_light:
self._enable_light(self._audio_enabled or self.is_streaming)
def _enable_light(self, enable):
"""Enable or disable indicator light."""
try:
self._api.command(
"configManager.cgi?action=setConfig&LightGlobal[0].Enable={}".format(
str(enable).lower()
)
)
except AmcrestError as error:
log_update_error(
_LOGGER,
"enable" if enable else "disable",
self.name,
"indicator light",
error,
)
def _enable_motion_recording(self, enable):
"""Enable or disable motion recording."""
try:
self._api.motion_recording = str(enable).lower()
except AmcrestError as error:
log_update_error(
_LOGGER,
"enable" if enable else "disable",
self.name,
"camera motion recording",
error,
)
else:
self._motion_recording_enabled = enable
self.schedule_update_ha_state()
def _goto_preset(self, preset):
"""Move camera position and zoom to preset."""
try:
self._api.go_to_preset(action="start", preset_point_number=preset)
except AmcrestError as error:
log_update_error(
_LOGGER, "move", self.name, f"camera to preset {preset}", error
)
def _set_color_bw(self, cbw):
"""Set camera color mode."""
try:
self._api.day_night_color = _CBW.index(cbw)
except AmcrestError as error:
log_update_error(
_LOGGER, "set", self.name, f"camera color mode to {cbw}", error
)
else:
self._color_bw = cbw
self.schedule_update_ha_state()
def _start_tour(self, start):
"""Start camera tour."""
try:
self._api.tour(start=start)
except AmcrestError as error:
log_update_error(
_LOGGER, "start" if start else "stop", self.name, "camera tour", error
)
| apache-2.0 | -767,744,544,592,012,000 | 34.263158 | 86 | 0.584457 | false |
alexforencich/python-ivi | ivi/rigol/rigolDS2072A.py | 1 | 1667 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .rigolDS2000A import *
class rigolDS2072A(rigolDS2000A):
"Rigol DS2072A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'DS2072A')
super(rigolDS2072A, self).__init__(*args, **kwargs)
self._analog_channel_count = 2
self._digital_channel_count = 0
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 70e6
self._bandwidth_limit = {'20M': 20e6}
self._init_channels()
| mit | 4,356,496,467,592,193,000 | 36.886364 | 86 | 0.744451 | false |
interactiveaudiolab/nussl | nussl/core/audio_signal.py | 1 | 66286 | import copy
import numbers
import os.path
import warnings
from collections import namedtuple
import audioread
import librosa
import numpy as np
import scipy.io.wavfile as wav
import scipy
from scipy.signal import check_COLA
import soundfile as sf
from . import constants
from . import utils
from . import masks
__all__ = ['AudioSignal', 'STFTParams', 'AudioSignalException']
STFTParams = namedtuple('STFTParams',
['window_length', 'hop_length', 'window_type'],
defaults=(None, None, None)
)
"""
STFTParams object is a container that holds STFT parameters - window_length,
hop_length, and window_type. Not all parameters need to be specified. Ones that
are not specified will be inferred by the AudioSignal parameters and the settings
in `nussl.core.constants`.
"""
class AudioSignal(object):
"""
**Overview**
:class:`AudioSignal` is the main entry and exit point for all source separation algorithms
in *nussl*. The :class:`AudioSignal` class is a general container for all things related to
audio data. It contains utilities for:
* Input and output from an array or from a file,
* Time-series and frequency domain manipulation,
* Plotting and visualizing,
* Playing audio within a terminal or jupyter notebook,
* Applying a mask to estimate signals
and more. The :class:`AudioSignal` class is used in all source separation objects in *nussl*.
:class:`AudioSignal` object stores time-series audio data as a 2D ``numpy`` array in
:attr:`audio_data` (see :attr:`audio_data` for details) and stores Short-Time Fourier Transform
data as 3D ``numpy`` array in :ref:`stft_data` (see :attr:`stft_data` for details).
**Initialization**
There are a few options for initializing an :class:`AudioSignal` object. The first is to
initialize an empty :class:`AudioSignal` object, with no parameters:
>>> import nussl
>>> signal = nussl.AudioSignal()
In this case, there is no data stored in :attr:`audio_data` or in :attr:`stft_data`, though
these attributes can be updated at any time after the object has been created.
Additionally, an :class:`AudioSignal` object can be loaded with exactly one of the following:
1. A path to an input audio file (see :func:`load_audio_from_file` for details).
2. A `numpy` array of 1D or 2D real-valued time-series audio data.
3. A `numpy` array of 2D or 3D complex-valued time-frequency STFT data.
:class:`AudioSignal` will throw an error if it is initialized with more than one of the
previous at once.
Here are examples of all three of these cases:
.. code-block:: python
:linenos:
import numpy as np
import nussl
# Initializing an empty AudioSignal object:
sig_empty = nussl.AudioSignal()
# Initializing from a path:
file_path = 'my/awesome/mixture.wav'
sig_path = nussl.AudioSignal(file_path)
# Initializing with a 1D or 2D numpy array containing audio data:
aud_1d = np.sin(np.linspace(0.0, 1.0, 48000))
sig_1d = nussl.AudioSignal(audio_data_array=aud_1d, sample_rate=48000)
# FYI: The shape doesn't matter, nussl will correct for it
aud_2d = np.array([aud_1d, -2 * aud_1d])
sig_2d = nussl.AudioSignal(audio_data_array=aud_2d)
# Initializing with a 2D or 3D numpy array containing STFT data:
stft_2d = np.random.rand((513, 300)) + 1j * np.random.rand((513, 300))
sig_stft_2d = nussl.AudioSignal(stft=stft_2d)
# Two channels of STFT data:
stft_3d = nussl.utils.complex_randn((513, 300, 2))
sig_stft_3d = nussl.AudioSignal(stft=stft_3d)
# Initializing with more than one of the above methods will raise an exception:
sig_exception = nussl.AudioSignal(audio_data_array=aud_2d, stft=stft_2d)
When initializing from a path, :class:`AudioSignal` can read many types of audio files,
provided that your computer has the backends installed to understand the corresponding codecs.
*nussl* uses ``librosa``'s `load` function to read in audio data. See librosa's documentation
for details: https://github.com/librosa/librosa#audioread
Once initialized with a single type of data (time-series or time-frequency), there are methods
to compute an STFT from time-series data (:func:`stft`) and vice versa (:func:`istft`).
**Sample Rate**
The sample rate of an :class:`AudioSignal` object is set upon initialization. If initializing
from a path, the sample rate of the :class:`AudioSignal` object inherits the native sample
rate from the file. If initialized with an audio or stft data array, the sample rate is passed
in as an optional argument. In these cases, with no sample rate explicitly defined, the default
sample rate is 44.1 kHz (CD quality). If this argument is provided when reading from a file
and the provided sample rate does not match the native sample rate of the file,
:class:`AudioSignal` will resample the data from the file so that it matches the provided
sample rate.
Notes:
There is no guarantee that data in :attr:`audio_data` corresponds to data in
:attr:`stft_data`. E.g., when an :class:`AudioSignal` object is initialized with
:attr:`audio_data` of an audio mixture, its :attr:`stft_data` is ``None`` until :func:`stft`
is called. Once :func:`stft` is called and a mask is applied to :attr:`stft_data` (via some
algorithm), the :attr:`audio_data` in this :class:`AudioSignal` object still contains data
from the original mixture that it was initialized with even though :attr:`stft_data`
contains altered data. (To hear the results, simply call :func:`istft` on the
:class:`AudioSignal` object.) It is up to the user to keep track of the contents of
:attr:`audio_data` and :attr:`stft_data`.
See Also:
For a walk-through of AudioSignal features, see :ref:`audio_signal_basics` and
:ref:`audio_signal_stft`.
Arguments:
path_to_input_file (``str``): Path to an input file to load upon initialization. Audio
gets loaded into :attr:`audio_data`.
audio_data_array (:obj:`np.ndarray`): 1D or 2D numpy array containing a real-valued,
time-series representation of the audio.
stft (:obj:`np.ndarray`): 2D or 3D numpy array containing pre-computed complex-valued STFT
data.
label (``str``): A label for this :class:`AudioSignal` object.
offset (``float``): Starting point of the section to be extracted (in seconds) if
initializing from a file.
duration (``float``): Length of the signal to read from the file (in seconds). Defaults to
full length of the signal (i.e., ``None``).
sample_rate (``int``): Sampling rate of this :class:`AudioSignal` object.
Attributes:
path_to_input_file (``str``): Path to the input file. ``None`` if this AudioSignal never
loaded a file, i.e., initialized with a ``np.ndarray``.
label (``str``): A user-definable label for this :class:`AudioSignal` object.
"""
def __init__(self, path_to_input_file=None, audio_data_array=None, stft=None, label=None,
sample_rate=None, stft_params=None, offset=0, duration=None):
self.path_to_input_file = path_to_input_file
self._audio_data = None
self.original_signal_length = None
self._stft_data = None
self._sample_rate = None
self._active_start = None
self._active_end = None
self.label = label
# Assert that this object was only initialized in one way
got_path = path_to_input_file is not None
got_audio_array = audio_data_array is not None
got_stft = stft is not None
init_inputs = np.array([got_path, got_audio_array, got_stft])
# noinspection PyPep8
if len(init_inputs[init_inputs == True]) > 1: # ignore inspection for clarity
raise AudioSignalException('Can only initialize AudioSignal object with one and only '
'one of {path, audio, stft}!')
if path_to_input_file is not None:
self.load_audio_from_file(self.path_to_input_file, offset, duration, sample_rate)
elif audio_data_array is not None:
self.load_audio_from_array(audio_data_array, sample_rate)
if self._sample_rate is None:
self._sample_rate = constants.DEFAULT_SAMPLE_RATE \
if sample_rate is None else sample_rate
self.stft_data = stft # complex spectrogram data
self.stft_params = stft_params
def __str__(self):
dur = f'{self.signal_duration:0.3f}' if self.signal_duration else '[unknown]'
return (
f"{self.__class__.__name__} "
f"({self.label if self.label else 'unlabeled'}): "
f"{dur} sec @ "
f"{self.path_to_input_file if self.path_to_input_file else 'path unknown'}, "
f"{self.sample_rate if self.sample_rate else '[unknown]'} Hz, "
f"{self.num_channels if self.num_channels else '[unknown]'} ch."
)
##################################################
# Properties
##################################################
@property
def signal_length(self):
"""
``int``
Number of samples in the active region of :attr:`audio_data`.
The length of the audio signal represented by this object in samples.
See Also:
* :func:`signal_duration` for the signal duration in seconds.
* :func:`set_active_region_to_default` for information about active regions.
"""
if self.audio_data is None:
return self.original_signal_length
return self.audio_data.shape[constants.LEN_INDEX]
@property
def signal_duration(self):
"""
``float``
Duration of the active region of :attr:`audio_data` in seconds.
The length of the audio signal represented by this object in seconds.
See Also:
* :func:`signal_length` for the signal length in samples.
* :func:`set_active_region_to_default` for information about active regions.
"""
if self.signal_length is None:
return None
return self.signal_length / self.sample_rate
@property
def num_channels(self):
"""
``int``
Number of channels this :class:`AudioSignal` has.
Defaults to returning number of channels in :attr:`audio_data`. If that is ``None``,
returns number of channels in :attr:`stft_data`. If both are ``None`` then returns
``None``.
See Also:
* :func:`is_mono`
* :func:`is_stereo`
"""
# TODO: what about a mismatch between audio_data and stft_data??
if self.audio_data is not None:
return self.audio_data.shape[constants.CHAN_INDEX]
if self.stft_data is not None:
return self.stft_data.shape[constants.STFT_CHAN_INDEX]
return None
@property
def is_mono(self):
"""
``bool``
Whether or not this signal is mono (i.e., has exactly **one** channel). First
looks at :attr:`audio_data`, then (if that's ``None``) looks at :attr:`stft_data`.
See Also:
* :func:`num_channels`
* :func:`is_stereo`
"""
return self.num_channels == 1
@property
def is_stereo(self):
"""
``bool``
Whether or not this signal is stereo (i.e., has exactly **two** channels). First
looks at :attr:`audio_data`, then (if that's ``None``) looks at :attr:`stft_data`.
See Also:
* :func:`num_channels`
* :func:`is_mono`
"""
return self.num_channels == 2
@property
def audio_data(self):
"""
``np.ndarray``
Stored as a ``numpy`` :obj:`np.ndarray`, :attr:`audio_data` houses the raw, uncompressed
time-domain audio data in the :class:`AudioSignal`. Audio data is stored with shape
``(n_channels, n_samples)`` as an array of floats.
``None`` by default, can be initialized upon object instantiation or set at any time by
accessing this attribute or calling :func:`load_audio_from_array`. It is recommended to
set :attr:`audio_data` by using :func:`load_audio_from_array` if this
:class:`AudioSignal` has been initialized without any audio or STFT data.
Raises:
:class:`AudioSignalException`
If set incorrectly, will raise an error. Expects a real, finite-valued 1D or 2D
``numpy`` :obj:`np.ndarray`-typed array.
Warnings:
:attr:`audio_data` and :attr:`stft_data` are not automatically synchronized, meaning
that if one of them is changed, those changes are not instantly reflected in the other.
To propagate changes, either call :func:`stft` or :func:`istft`.
Notes:
* This attribute only returns values within the active region. For more information
see :func:`set_active_region_to_default`. When setting this attribute, the active
region are reset to default.
* If :attr:`audio_data` is set with an improperly transposed array, it will
automatically transpose it so that it is set the expected way. A warning will be
displayed on the console.
See Also:
* :func:`load_audio_from_file` to load audio into :attr:`audio_data` after
initialization.
* :func:`load_audio_from_array` to safely load audio into :attr:`audio_data` after
initialization.
* :func:`set_active_region_to_default` for more information about the active region.
* :attr:`signal_duration` and :attr:`signal_length` for length of audio data in seconds
and samples, respectively.
* :func:`stft` to calculate an STFT from this data,
and :func:`istft` to calculate the inverse STFT and put it in :attr:`audio_data`.
* :func:`plot_time_domain` to create a plot of audio data stored in this attribute.
* :func:`peak_normalize` to apply gain such that to the absolute max value is exactly
``1.0``.
* :func:`rms` to calculate the root-mean-square of :attr:`audio_data`
* :func:`apply_gain` to apply a gain.
* :func:`get_channel` to safely retrieve a single channel in :attr:`audio_data`.
"""
if self._audio_data is None:
return None
start = 0
end = self._audio_data.shape[constants.LEN_INDEX]
if self._active_end is not None and self._active_end < end:
end = self._active_end
if self._active_start is not None and self._active_start > 0:
start = self._active_start
return self._audio_data[:, start:end]
@audio_data.setter
def audio_data(self, value):
if value is None:
self._audio_data = None
return
elif not isinstance(value, np.ndarray):
raise AudioSignalException('Type of self.audio_data must be of type np.ndarray!')
if not np.isfinite(value).all():
raise AudioSignalException('Not all values of audio_data are finite!')
if value.ndim > 1 and value.shape[constants.CHAN_INDEX] > value.shape[constants.LEN_INDEX]:
value = value.T
if value.ndim > 2:
raise AudioSignalException('self.audio_data cannot have more than 2 dimensions!')
if value.ndim < 2:
value = np.expand_dims(value, axis=constants.CHAN_INDEX)
self._audio_data = value
self.set_active_region_to_default()
@property
def stft_data(self):
"""
``np.ndarray``
Stored as a ``numpy`` :obj:`np.ndarray`, :attr:`stft_data` houses complex-valued data
computed from a Short-time Fourier Transform (STFT) of audio data in the
:class:`AudioSignal`. ``None`` by default, this :class:`AudioSignal` object can be
initialized with STFT data upon initialization or it can be set at any time.
The STFT data is stored with shape ``(n_frequency_bins, n_hops, n_channels)`` as
a complex-valued ``numpy`` array.
Raises:
:class:`AudioSignalException`
if set with an :obj:`np.ndarray` with one dimension or more than three dimensions.
See Also:
* :func:`stft` to calculate an STFT from :attr:`audio_data`, and :func:`istft` to
calculate the inverse STFT from this attribute and put it in :attr:`audio_data`.
* :func:`magnitude_spectrogram` to calculate and get the magnitude spectrogram from
:attr:`stft_data`. :func:`power_spectrogram` to calculate and get the power
spectrogram from :attr:`stft_data`.
* :func:`get_stft_channel` to safely get a specific channel in :attr:`stft_data`.
Notes:
* :attr:`audio_data` and :attr:`stft_data` are not automatically synchronized, meaning
that if one of them is changed, those changes are not instantly reflected in the other.
To propagate changes, either call :func:`stft` or :func:`istft`.
* :attr:`stft_data` will expand a two dimensional array so that it has the expected
shape `(n_frequency_bins, n_hops, n_channels)`.
"""
return self._stft_data
@stft_data.setter
def stft_data(self, value):
if value is None:
self._stft_data = None
return
elif not isinstance(value, np.ndarray):
raise AudioSignalException('Type of self.stft_data must be of type np.ndarray!')
if value.ndim == 1:
raise AudioSignalException('Cannot support arrays with less than 2 dimensions!')
if value.ndim == 2:
value = np.expand_dims(value, axis=constants.STFT_CHAN_INDEX)
if value.ndim > 3:
raise AudioSignalException('Cannot support arrays with more than 3 dimensions!')
if not np.iscomplexobj(value):
warnings.warn('Initializing STFT with data that is non-complex. '
'This might lead to weird results!')
self._stft_data = value
@property
def stft_params(self):
"""
``STFTParams``
STFT parameters are kept in this property. STFT parameters are a ``namedtuple``
called ``STFTParams`` with the following signature:
.. code-block:: python
STFTParams(
window_length=2048,
hop_length=512,
window_type='hann'
)
The defaults are 32ms windows, 8ms hop, and a hann window.
"""
return self._stft_params
@stft_params.setter
def stft_params(self, value):
if value and not isinstance(value, STFTParams):
raise ValueError("stft_params must be of type STFTParams or None!")
default_win_len = int(
2 ** (np.ceil(np.log2(constants.DEFAULT_WIN_LEN_PARAM * self.sample_rate)))
)
default_hop_len = default_win_len // 4
default_win_type = constants.WINDOW_DEFAULT
default_stft_params = STFTParams(
window_length=default_win_len,
hop_length=default_hop_len,
window_type=default_win_type
)._asdict()
value = value._asdict() if value else default_stft_params
for key in default_stft_params:
if value[key] is None:
value[key] = default_stft_params[key]
self._stft_params = STFTParams(**value)
if self._stft_params.window_type == 'sqrt_hann':
window_type = constants.WINDOW_HANN
else:
window_type = self._stft_params.window_type
check_COLA(window_type, self._stft_params.window_length, self._stft_params.hop_length)
@property
def has_data(self):
"""
``bool``
Returns ``False`` if :attr:`audio_data` and :attr:`stft_data` are empty. Else,
returns ``True``.
"""
has_audio_data = self.audio_data is not None and self.audio_data.size != 0
has_stft_data = self.stft_data is not None and self.stft_data.size != 0
return has_audio_data or has_stft_data
@property
def file_name(self):
"""
``str``
The name of the file associated with this object. Includes extension, but not the full
path.
Notes:
This will return ``None`` if this :class:`AudioSignal` object was not
loaded from a file.
See Also:
:attr:`path_to_input_file` for the full path.
"""
if self.path_to_input_file is not None:
return os.path.basename(self.path_to_input_file)
return None
@property
def sample_rate(self):
"""
``int``
Sample rate associated with this object. If audio was read from a file, the sample
rate will be set to the sample rate associated with the file. If this object was
initialized from an array then the sample rate is set upon init. This property is
read-only. To change the sample rate, use :func:`resample`.
Notes:
This property is read-only and cannot be set directly. To change
See Also:
* :func:`resample` to change the sample rate and resample data in :attr:`sample_rate`.
* :func:`load_audio_from_array` to read audio from an array and set the sample rate.
* :var:`nussl.constants.DEFAULT_SAMPLE_RATE` the default sample rate for *nussl*
if not specified
"""
return self._sample_rate
@property
def time_vector(self):
"""
``np.ndarray``
A 1D :obj:`np.ndarray` with timestamps (in seconds) for each sample in
:attr:`audio_data`.
"""
if self.signal_duration is None:
return None
return np.linspace(0.0, self.signal_duration, num=self.signal_length)
@property
def freq_vector(self):
"""
``np.ndarray``
A 1D numpy array with frequency values (in Hz) that correspond
to each frequency bin (vertical axis) in :attr:`stft_data`. Assumes
linearly spaced frequency bins.
Raises:
:class:`AudioSignalException`: If :attr:`stft_data` is ``None``.
Run :func:`stft` before accessing this.
"""
if self.stft_data is None:
raise AudioSignalException(
'Cannot calculate freq_vector until self.stft() is run')
return np.linspace(
0.0, self.sample_rate // 2,
num=self.stft_data.shape[constants.STFT_VERT_INDEX])
@property
def time_bins_vector(self):
"""
``np.ndarray``
A 1D numpy array with time values (in seconds) that correspond
to each time bin (horizontal/time axis) in :attr:`stft_data`.
Raises:
:class:`AudioSignalException`: If :attr:`stft_data` is ``None``. Run :func:`stft`
before accessing this.
"""
if self.stft_data is None:
raise AudioSignalException(
'Cannot calculate time_bins_vector until self.stft() is run')
return np.linspace(0.0, self.signal_duration,
num=self.stft_data.shape[constants.STFT_LEN_INDEX])
@property
def stft_length(self):
"""
``int``
The length of :attr:`stft_data` along the time axis. In units of hops.
Raises:
:class:`AudioSignalException`: If ``self.stft_dat``a is ``None``. Run :func:`stft`
before accessing this.
"""
if self.stft_data is None:
raise AudioSignalException('Cannot calculate stft_length until self.stft() is run')
return self.stft_data.shape[constants.STFT_LEN_INDEX]
@property
def active_region_is_default(self):
"""
``bool``
``True`` if active region is the full length of :attr:`audio_data`. ``False`` otherwise.
See Also:
* :func:`set_active_region` for a description of active regions in :class:`AudioSignal`
* :func:`set_active_region_to_default`
"""
return self._active_start == 0 and self._active_end == self._signal_length
@property
def _signal_length(self):
"""
``int``
This is the length of the full signal, not just the active region.
"""
if self._audio_data is None:
return None
return self._audio_data.shape[constants.LEN_INDEX]
@property
def power_spectrogram_data(self):
"""
``np.ndarray``
Returns a real valued :obj:`np.ndarray` with power
spectrogram data. The power spectrogram is defined as ``(STFT)^2``, where ``^2`` is
element-wise squaring of entries of the STFT. Same shape as :attr:`stft_data`.
Raises:
:class:`AudioSignalException`: if :attr:`stft_data` is ``None``. Run :func:`stft`
before accessing this.
See Also:
* :func:`stft` to calculate the STFT before accessing this attribute.
* :attr:`stft_data` complex-valued Short-time Fourier Transform data.
* :attr:`magnitude_spectrogram_data` to get magnitude spectrogram data.
* :func:`get_power_spectrogram_channel` to get a specific channel
"""
if self.stft_data is None:
raise AudioSignalException('Cannot calculate power_spectrogram_data '
'because self.stft_data is None')
return np.abs(self.stft_data) ** 2
@property
def magnitude_spectrogram_data(self):
"""
``np.ndarray``
Returns a real valued ``np.array`` with magnitude spectrogram data. The magnitude
spectrogram is defined as ``abs(STFT)``, the element-wise absolute value of every item
in the STFT. Same shape as :attr:`stft_data`.
Raises:
AudioSignalException: if :attr:`stft_data` is ``None``. Run :func:`stft` before
accessing this.
See Also:
* :func:`stft` to calculate the STFT before accessing this attribute.
* :attr:`stft_data` complex-valued Short-time Fourier Transform data.
* :attr:`power_spectrogram_data`
* :func:`get_magnitude_spectrogram_channel`
"""
if self.stft_data is None:
raise AudioSignalException('Cannot calculate magnitude_spectrogram_data '
'because self.stft_data is None')
return np.abs(self.stft_data)
@property
def log_magnitude_spectrogram_data(self):
"""
(:obj:`np.ndarray`): Returns a real valued ``np.array`` with log magnitude spectrogram data.
The log magnitude spectrogram is defined as 20*log10(Abs(STFT)). Same shape as :attr:`stft_data`.
Raises:
AudioSignalException: if :attr:`stft_data` is ``None``. Run :func:`stft` before
accessing this.
See Also:
* :func:`stft` to calculate the STFT before accessing this attribute.
* :attr:`stft_data` complex-valued Short-time Fourier Transform data.
* :attr:`power_spectrogram_data`
* :func:`get_magnitude_spectrogram_channel`
"""
if self.stft_data is None:
raise AudioSignalException('Cannot calculate log_magnitude_spectrogram_data '
'because self.stft_data is None')
return 20 * np.log10(np.abs(self.stft_data) + 1e-8)
##################################################
# I/O
##################################################
def load_audio_from_file(self, input_file_path, offset=0, duration=None, new_sample_rate=None):
# type: (str, float, float, int) -> None
"""
Loads an audio signal into memory from a file on disc. The audio is stored in
:class:`AudioSignal` as a :obj:`np.ndarray` of `float` s. The sample rate is read from
the file, and this :class:`AudioSignal` object's sample rate is set from it. If
:param:`new_sample_rate` is not ``None`` nor the same as the sample rate of the file,
the audio will be resampled to the sample rate provided in the :param:`new_sample_rate`
parameter. After reading the audio data into memory, the active region is set to default.
:param:`offset` and :param:`duration` allow the user to determine how much of the audio is
read from the file. If those are non-default, then only the values provided will be stored
in :attr:`audio_data` (unlike with the active region, which has the entire audio data stored
in memory but only allows access to a subset of the audio).
See Also:
* :func:`load_audio_from_array` to read audio data from a :obj:`np.ndarray`.
Args:
input_file_path (str): Path to input file.
offset (float,): The starting point of the section to be extracted (seconds).
Defaults to 0 seconds (i.e., the very beginning of the file).
duration (float): Length of signal to load in second.
signal_length of 0 means read the whole file. Defaults to the full
length of the signal.
new_sample_rate (int): If this parameter is not ``None`` or the same sample rate as
provided by the input file, then the audio data will be resampled to the new
sample rate dictated by this parameter.
"""
assert offset >= 0, 'Parameter `offset` must be >= 0!'
if duration is not None:
assert duration >= 0, 'Parameter `duration` must be >= 0!'
try:
# try reading headers with soundfile for speed
audio_info = sf.info(input_file_path)
file_length = audio_info.duration
except:
# if that doesn't work try audioread
with audioread.audio_open(os.path.realpath(input_file_path)) as input_file:
file_length = input_file.duration
if offset > file_length:
raise AudioSignalException('offset is longer than signal!')
if duration is not None and offset + duration >= file_length:
warnings.warn('offset + duration are longer than the signal.'
' Reading until end of signal...',
UserWarning)
audio_input, self._sample_rate = librosa.load(input_file_path,
sr=None,
offset=offset,
duration=duration,
mono=False)
self.audio_data = audio_input
self.original_signal_length = self.signal_length
if new_sample_rate is not None and new_sample_rate != self._sample_rate:
warnings.warn('Input sample rate is different than the sample rate'
' read from the file! Resampling...',
UserWarning)
self.resample(new_sample_rate)
self.path_to_input_file = input_file_path
self.set_active_region_to_default()
def load_audio_from_array(self, signal, sample_rate=constants.DEFAULT_SAMPLE_RATE):
"""
Loads an audio signal from a :obj:`np.ndarray`. :param:`sample_rate` is the sample
of the signal.
See Also:
* :func:`load_audio_from_file` to read in an audio file from disc.
Notes:
Only accepts float arrays and int arrays of depth 16-bits.
Parameters:
signal (:obj:`np.ndarray`): Array containing the audio signal sampled at
:param:`sample_rate`.
sample_rate (int): The sample rate of signal.
Default is :ref:`constants.DEFAULT_SAMPLE_RATE` (44.1kHz)
"""
assert (type(signal) == np.ndarray)
self.path_to_input_file = None
# Change from fixed point to floating point
if not np.issubdtype(signal.dtype, np.floating):
signal = signal.astype('float') / (np.iinfo(np.dtype('int16')).max + 1.0)
self.audio_data = signal
self.original_signal_length = self.signal_length
self._sample_rate = sample_rate if sample_rate is not None \
else constants.DEFAULT_SAMPLE_RATE
self.set_active_region_to_default()
def write_audio_to_file(self, output_file_path, sample_rate=None):
"""
Outputs the audio signal data in :attr:`audio_data` to a file at :param:`output_file_path`
with sample rate of :param:`sample_rate`.
Parameters:
output_file_path (str): Filename where output file will be saved.
sample_rate (int): The sample rate to write the file at. Default is
:attr:`sample_rate`.
"""
if self.audio_data is None:
raise AudioSignalException("Cannot write audio file because there is no audio data.")
if sample_rate is None:
sample_rate = self.sample_rate
audio_output = np.copy(self.audio_data)
# TODO: better fix
# convert to fixed point again
if not np.issubdtype(audio_output.dtype, np.dtype(int).type):
audio_output = np.multiply(
audio_output,
2 ** (constants.DEFAULT_BIT_DEPTH - 1)).astype('int16')
wav.write(output_file_path, sample_rate, audio_output.T)
##################################################
# Active Region
##################################################
def set_active_region(self, start, end):
"""
Determines the bounds of what gets returned when you access :attr:`audio_data`.
None of the data in :attr:`audio_data` is discarded when you set the active region, it
merely becomes inaccessible until the active region is set back to default (i.e., the full
length of the signal).
This is useful for reusing a single :class:`AudioSignal` object to do multiple operations on
only select parts of the audio data.
Warnings:
Many functions will raise exceptions while the active region is not default. Be aware
that adding, subtracting, concatenating, truncating, and other utilities are not
available when the active region is not default.
See Also:
* :func:`set_active_region_to_default`
* :attr:`active_region_is_default`
Examples:
>>> import nussl
>>> import numpy as np
>>> n = nussl.constants.DEFAULT_SAMPLE_RATE # 1 second of audio at 44.1kHz
>>> np_sin = np.sin(np.linspace(0, 100 * 2 * np.pi, n)) # sine wave @ 100 Hz
>>> sig = nussl.AudioSignal(audio_data_array=np_sin)
>>> sig.signal_duration
1.0
>>> sig.set_active_region(0, n // 2)
>>> sig.signal_duration
0.5
Args:
start (int): Beginning of active region (in samples). Cannot be less than 0.
end (int): End of active region (in samples). Cannot be larger than
:attr:`signal_length`.
"""
start, end = int(start), int(end)
self._active_start = start if start >= 0 else 0
self._active_end = end if end < self._signal_length else self._signal_length
def set_active_region_to_default(self):
"""
Resets the active region of this :class:`AudioSignal` object to its default value of the
entire :attr:`audio_data` array.
See Also:
* :func:`set_active_region` for an explanation of active regions within the
:class:`AudioSignal`.
"""
self._active_start = 0
self._active_end = self._signal_length
##################################################
# STFT Utilities
##################################################
@staticmethod
def get_window(window_type, window_length):
"""
Wrapper around scipy.signal.get_window so one can also get the
popular sqrt-hann window.
Args:
window_type (str): Type of window to get (see constants.ALL_WINDOW).
window_length (int): Length of the window
Returns:
np.ndarray: Window returned by scipy.signa.get_window
"""
if window_type == constants.WINDOW_SQRT_HANN:
window = np.sqrt(scipy.signal.get_window(
'hann', window_length
))
else:
window = scipy.signal.get_window(
window_type, window_length)
return window
def stft(self, window_length=None, hop_length=None, window_type=None, overwrite=True):
"""
Computes the Short Time Fourier Transform (STFT) of :attr:`audio_data`.
The results of the STFT calculation can be accessed from :attr:`stft_data`
if :attr:`stft_data` is ``None`` prior to running this function or ``overwrite == True``
Warning:
If overwrite=True (default) this will overwrite any data in :attr:`stft_data`!
Args:
window_length (int): Amount of time (in samples) to do an FFT on
hop_length (int): Amount of time (in samples) to skip ahead for the new FFT
window_type (str): Type of scaling to apply to the window.
overwrite (bool): Overwrite :attr:`stft_data` with current calculation
Returns:
(:obj:`np.ndarray`) Calculated, complex-valued STFT from :attr:`audio_data`, 3D numpy
array with shape `(n_frequency_bins, n_hops, n_channels)`.
"""
if self.audio_data is None or self.audio_data.size == 0:
raise AudioSignalException(
"No time domain signal (self.audio_data) to make STFT from!")
window_length = (
self.stft_params.window_length
if window_length is None
else int(window_length)
)
hop_length = (
self.stft_params.hop_length
if hop_length is None
else int(hop_length)
)
window_type = (
self.stft_params.window_type
if window_type is None
else window_type
)
stft_data = []
window = self.get_window(window_type, window_length)
for chan in self.get_channels():
_, _, _stft = scipy.signal.stft(
chan, fs=self.sample_rate, window=window,
nperseg=window_length, noverlap=window_length - hop_length)
stft_data.append(_stft)
stft_data = np.array(stft_data).transpose((1, 2, 0))
if overwrite:
self.stft_data = stft_data
return stft_data
def istft(self, window_length=None, hop_length=None, window_type=None, overwrite=True,
truncate_to_length=None):
""" Computes and returns the inverse Short Time Fourier Transform (iSTFT).
The results of the iSTFT calculation can be accessed from :attr:`audio_data`
if :attr:`audio_data` is ``None`` prior to running this function or ``overwrite == True``
Warning:
If overwrite=True (default) this will overwrite any data in :attr:`audio_data`!
Args:
window_length (int): Amount of time (in samples) to do an FFT on
hop_length (int): Amount of time (in samples) to skip ahead for the new FFT
window_type (str): Type of scaling to apply to the window.
overwrite (bool): Overwrite :attr:`stft_data` with current calculation
truncate_to_length (int): truncate resultant signal to specified length. Default ``None``.
Returns:
(:obj:`np.ndarray`) Calculated, real-valued iSTFT from :attr:`stft_data`, 2D numpy array
with shape `(n_channels, n_samples)`.
"""
if self.stft_data is None or self.stft_data.size == 0:
raise AudioSignalException('Cannot do inverse STFT without self.stft_data!')
window_length = (
self.stft_params.window_length
if window_length is None
else int(window_length)
)
hop_length = (
self.stft_params.hop_length
if hop_length is None
else int(hop_length)
)
window_type = (
self.stft_params.window_type
if window_type is None
else window_type
)
signals = []
window = self.get_window(window_type, window_length)
for stft in self.get_stft_channels():
_, _signal = scipy.signal.istft(
stft, fs=self.sample_rate, window=window,
nperseg=window_length, noverlap=window_length - hop_length)
signals.append(_signal)
calculated_signal = np.array(signals)
# Make sure it's shaped correctly
calculated_signal = np.expand_dims(calculated_signal, -1) \
if calculated_signal.ndim == 1 else calculated_signal
# if truncate_to_length isn't provided
if truncate_to_length is None:
truncate_to_length = self.original_signal_length
if self.signal_length is not None:
truncate_to_length = self.signal_length
if truncate_to_length is not None and truncate_to_length > 0:
calculated_signal = calculated_signal[:, :truncate_to_length]
if overwrite or self.audio_data is None:
self.audio_data = calculated_signal
return calculated_signal
def apply_mask(self, mask, overwrite=False):
"""
Applies the input mask to the time-frequency representation in this :class:`AudioSignal`
object and returns a new :class:`AudioSignal` object with the mask applied. The mask
is applied to the magnitude of audio signal. The phase of the original audio
signal is then applied to construct the masked STFT.
Args:
mask (:obj:`MaskBase`-derived object): A ``MaskBase``-derived object
containing a mask.
overwrite (bool): If ``True``, this will alter ``stft_data`` in self.
If ``False``, this function will create a new ``AudioSignal`` object
with the mask applied.
Returns:
A new :class:`AudioSignal`` object with the input mask applied to the STFT,
iff ``overwrite`` is False.
"""
if not isinstance(mask, masks.MaskBase):
raise AudioSignalException(f'Expected MaskBase-derived object, given {type(mask)}')
if self.stft_data is None:
raise AudioSignalException('There is no STFT data to apply a mask to!')
if mask.shape != self.stft_data.shape:
if not mask.shape[:-1] == self.stft_data.shape[:-1]:
raise AudioSignalException(
'Input mask and self.stft_data are not the same shape! mask:'
f' {mask.shape}, self.stft_data: {self.stft_data.shape}'
)
magnitude, phase = np.abs(self.stft_data), np.angle(self.stft_data)
masked_abs = magnitude * mask.mask
masked_stft = masked_abs * np.exp(1j * phase)
if overwrite:
self.stft_data = masked_stft
else:
return self.make_copy_with_stft_data(masked_stft, verbose=False)
def ipd_ild_features(self, ch_one=0, ch_two=1):
"""
Computes interphase difference (IPD) and interlevel difference (ILD) for a
stereo spectrogram. If more than two channels, this by default computes IPD/ILD
between the first two channels. This can be specified by the arguments ch_one
and ch_two. If only one channel, this raises an error.
Args:
ch_one (``int``): index of first channel to compute IPD/ILD.
ch_two (``int``): index of second channel to compute IPD/ILD.
Returns:
ipd (``np.ndarray``): Interphase difference between selected channels
ild (``np.ndarray``): Interlevel difference between selected channels
"""
if self.stft_data is None:
raise AudioSignalException("Cannot compute ipd/ild features without stft_data!")
if self.is_mono:
raise AudioSignalException("Cannot compute ipd/ild features on mono input!")
stft_ch_one = self.get_stft_channel(ch_one)
stft_ch_two = self.get_stft_channel(ch_two)
ild = np.abs(stft_ch_one) / (np.abs(stft_ch_two) + 1e-4)
ild = 20 * np.log10(ild + 1e-8)
frequencies = self.freq_vector
ipd = np.angle(stft_ch_two * np.conj(stft_ch_one))
ipd /= (frequencies + 1.0)[:, None]
ipd = ipd % np.pi
return ipd, ild
##################################################
# Utilities
##################################################
def concat(self, other):
""" Concatenate two :class:`AudioSignal` objects (by concatenating :attr:`audio_data`).
Puts ``other.audio_data`` after :attr:`audio_data`.
Raises:
AudioSignalException: If ``self.sample_rate != other.sample_rate``,
``self.num_channels != other.num_channels``, or ``!self.active_region_is_default``
is ``False``.
Args:
other (:class:`AudioSignal`): :class:`AudioSignal` to concatenate with the current one.
"""
self._verify_audio(other)
self.audio_data = np.concatenate((self.audio_data, other.audio_data),
axis=constants.LEN_INDEX)
def truncate_samples(self, n_samples):
""" Truncates the signal leaving only the first ``n_samples`` samples.
This can only be done if ``self.active_region_is_default`` is True. If
``n_samples > self.signal_length``, then `n_samples = self.signal_length`
(no truncation happens).
Raises:
AudioSignalException: If ``self.active_region_is_default`` is ``False``.
Args:
n_samples: (int) number of samples that will be left.
"""
if not self.active_region_is_default:
raise AudioSignalException('Cannot truncate while active region is not set as default!')
n_samples = int(n_samples)
if n_samples > self.signal_length:
n_samples = self.signal_length
self.audio_data = self.audio_data[:, 0: n_samples]
def truncate_seconds(self, n_seconds):
""" Truncates the signal leaving only the first n_seconds.
This can only be done if self.active_region_is_default is True.
Args:
n_seconds: (float) number of seconds to truncate :attr:`audio_data`.
"""
n_samples = int(n_seconds * self.sample_rate)
self.truncate_samples(n_samples)
def crop_signal(self, before, after):
"""
Get rid of samples before and after the signal on all channels. Contracts the length
of :attr:`audio_data` by before + after. Useful to get rid of zero padding after the fact.
Args:
before: (int) number of samples to remove at beginning of self.audio_data
after: (int) number of samples to remove at end of self.audio_data
"""
if not self.active_region_is_default:
raise AudioSignalException('Cannot crop signal while active region '
'is not set as default!')
num_samples = self.signal_length
self.audio_data = self.audio_data[:, before:num_samples - after]
self.set_active_region_to_default()
def zero_pad(self, before, after):
""" Adds zeros before and after the signal to all channels.
Extends the length of self.audio_data by before + after.
Raises:
Exception: If `self.active_region_is_default`` is ``False``.
Args:
before: (int) number of zeros to be put before the current contents of self.audio_data
after: (int) number of zeros to be put after the current contents fo self.audio_data
"""
if not self.active_region_is_default:
raise AudioSignalException('Cannot zero-pad while active region is not set as default!')
self.audio_data = np.pad(self.audio_data, ((0, 0), (before, after)), 'constant')
def add(self, other):
"""Adds two audio signal objects.
This does element-wise addition on the :attr:`audio_data` array.
Raises:
AudioSignalException: If ``self.sample_rate != other.sample_rate``,
``self.num_channels != other.num_channels``, or
``self.active_region_is_default`` is ``False``.
Parameters:
other (:class:`AudioSignal`): Other :class:`AudioSignal` to add.
Returns:
(:class:`AudioSignal`): New :class:`AudioSignal` object with the sum of
``self`` and ``other``.
"""
if isinstance(other, int):
# this is so that sum(list of audio_signals) works.
# when sum is called on a list it's evaluated as 0 + elem1 + elem2 + ...
# so the 0 case needs to be taken care of (by doing nothing)
return self
self._verify_audio_arithmetic(other)
new_signal = copy.deepcopy(self)
new_signal.audio_data = self.audio_data + other.audio_data
return new_signal
def subtract(self, other):
"""Subtracts two audio signal objects.
This does element-wise subtraction on the :attr:`audio_data` array.
Raises:
AudioSignalException: If ``self.sample_rate != other.sample_rate``,
``self.num_channels != other.num_channels``, or
``self.active_region_is_default`` is ``False``.
Parameters:
other (:class:`AudioSignal`): Other :class:`AudioSignal` to subtract.
Returns:
(:class:`AudioSignal`): New :class:`AudioSignal` object with the difference
between ``self`` and ``other``.
"""
self._verify_audio_arithmetic(other)
other_copy = copy.deepcopy(other)
other_copy *= -1
return self.add(other_copy)
def make_copy_with_audio_data(self, audio_data, verbose=True):
""" Makes a copy of this :class:`AudioSignal` object with :attr:`audio_data` initialized to
the input :param:`audio_data` numpy array. The :attr:`stft_data` of the new :class:`AudioSignal`
object is ``None``.
Args:
audio_data (:obj:`np.ndarray`): Audio data to be put into the new :class:`AudioSignal` object.
verbose (bool): If ``True`` prints warnings. If ``False``, outputs nothing.
Returns:
(:class:`AudioSignal`): A copy of this :class:`AudioSignal` object with :attr:`audio_data`
initialized to the input :param:`audio_data` numpy array.
"""
if verbose:
if not self.active_region_is_default:
warnings.warn('Making a copy when active region is not default.')
if audio_data.shape != self.audio_data.shape:
warnings.warn('Shape of new audio_data does not match current audio_data.')
new_signal = copy.deepcopy(self)
new_signal.audio_data = audio_data
new_signal.stft_data = None
return new_signal
def make_copy_with_stft_data(self, stft_data, verbose=True):
""" Makes a copy of this :class:`AudioSignal` object with :attr:`stft_data` initialized to the
input :param:`stft_data` numpy array. The :attr:`audio_data` of the new :class:`AudioSignal`
object is ``None``.
Args:
stft_data (:obj:`np.ndarray`): STFT data to be put into the new :class:`AudioSignal` object.
Returns:
(:class:`AudioSignal`): A copy of this :class:`AudioSignal` object with :attr:`stft_data`
initialized to the input :param:`stft_data` numpy array.
"""
if verbose:
if not self.active_region_is_default:
warnings.warn('Making a copy when active region is not default.')
if stft_data.shape != self.stft_data.shape:
warnings.warn('Shape of new stft_data does not match current stft_data.')
new_signal = copy.deepcopy(self)
new_signal.stft_data = stft_data
new_signal.original_signal_length = self.original_signal_length
new_signal.audio_data = None
return new_signal
def rms(self, win_len=None, hop_len=None):
""" Calculates the root-mean-square of :attr:`audio_data`.
Returns:
(float): Root-mean-square of :attr:`audio_data`.
"""
if win_len is not None:
hop_len = win_len // 2 if hop_len is None else hop_len
rms_func = lambda arr: librosa.feature.rms(arr, frame_length=win_len,
hop_length=hop_len)[0, :]
else:
rms_func = lambda arr: np.sqrt(np.mean(np.square(arr)))
result = []
for ch in self.get_channels():
result.append(rms_func(ch))
return np.squeeze(result)
def peak_normalize(self):
"""
Peak normalizes the audio signal.
"""
self.apply_gain(1 / self.audio_data.max())
def apply_gain(self, value):
"""
Apply a gain to :attr:`audio_data`
Args:
value (float): amount to multiply self.audio_data by
Returns:
(:class:`AudioSignal`): This :class:`AudioSignal` object with the gain applied.
"""
if not isinstance(value, numbers.Real):
raise AudioSignalException('Can only multiply/divide by a scalar!')
self.audio_data = self.audio_data * value
return self
def resample(self, new_sample_rate):
"""
Resample the data in :attr:`audio_data` to the new sample rate provided by
:param:`new_sample_rate`. If the :param:`new_sample_rate` is the same as :attr:`sample_rate`
then nothing happens.
Args:
new_sample_rate (int): The new sample rate of :attr:`audio_data`.
"""
if new_sample_rate == self.sample_rate:
warnings.warn('Cannot resample to the same sample rate.')
return
resampled_signal = []
for channel in self.get_channels():
resampled_channel = librosa.resample(channel, self.sample_rate, new_sample_rate)
resampled_signal.append(resampled_channel)
self.audio_data = np.array(resampled_signal)
self.original_signal_length = self.signal_length
self._sample_rate = new_sample_rate
##################################################
# Channel Utilities
##################################################
def _verify_get_channel(self, n):
if n >= self.num_channels:
raise AudioSignalException(
f'Cannot get channel {n} when this object only has {self.num_channels}'
' channels! (0-based)'
)
if n < 0:
raise AudioSignalException(
f'Cannot get channel {n}. This will cause unexpected results.'
)
def get_channel(self, n):
"""Gets audio data of n-th channel from :attr:`audio_data` as a 1D :obj:`np.ndarray`
of shape ``(n_samples,)``.
Parameters:
n (int): index of channel to get. **0-based**
See Also:
* :func:`get_channels`: Generator for looping through channels of :attr:`audio_data`.
* :func:`get_stft_channel`: Gets stft data from a specific channel.
* :func:`get_stft_channels`: Generator for looping through channels from
:attr:`stft_data`.
Raises:
:class:`AudioSignalException`: If not ``0 <= n < self.num_channels``.
Returns:
(:obj:`np.array`): The audio data in the n-th channel of the signal, 1D
"""
self._verify_get_channel(n)
return np.asfortranarray(utils._get_axis(self.audio_data, constants.CHAN_INDEX, n))
def get_channels(self):
"""Generator that will loop through channels of :attr:`audio_data`.
See Also:
* :func:`get_channel`: Gets audio data from a specific channel.
* :func:`get_stft_channel`: Gets stft data from a specific channel.
* :func:`get_stft_channels`: Generator to loop through channels of :attr:`stft_data`.
Yields:
(:obj:`np.array`): The audio data in the next channel of this signal as a
1D ``np.ndarray``.
"""
for i in range(self.num_channels):
yield self.get_channel(i)
def get_stft_channel(self, n):
"""Returns STFT data of n-th channel from :attr:`stft_data` as a 2D ``np.ndarray``.
Args:
n: (int) index of stft channel to get. **0-based**
See Also:
* :func:`get_stft_channels`: Generator to loop through channels from :attr:`stft_data`.
* :func:`get_channel`: Gets audio data from a specific channel.
* :func:`get_channels`: Generator to loop through channels of :attr:`audio_data`.
Raises:
:class:`AudioSignalException`: If not ``0 <= n < self.num_channels``.
Returns:
(:obj:`np.array`): the STFT data in the n-th channel of the signal, 2D
"""
if self.stft_data is None:
raise AudioSignalException('Cannot get STFT data before STFT is calculated!')
self._verify_get_channel(n)
return utils._get_axis(self.stft_data, constants.STFT_CHAN_INDEX, n)
def get_stft_channels(self):
"""Generator that will loop through channels of :attr:`stft_data`.
See Also:
* :func:`get_stft_channel`: Gets stft data from a specific channel.
* :func:`get_channel`: Gets audio data from a specific channel.
* :func:`get_channels`: Generator to loop through channels of :attr:`audio_data`.
Yields:
(:obj:`np.array`): The STFT data in the next channel of this signal as a
2D ``np.ndarray``.
"""
for i in range(self.num_channels):
yield self.get_stft_channel(i)
def make_audio_signal_from_channel(self, n):
"""
Makes a new :class:`AudioSignal` object from with data from channel ``n``.
Args:
n (int): index of channel to make a new signal from. **0-based**
Returns:
(:class:`AudioSignal`) new :class:`AudioSignal` object with only data from
channel ``n``.
"""
new_signal = copy.copy(self)
new_signal.audio_data = self.get_channel(n)
return new_signal
def get_power_spectrogram_channel(self, n):
""" Returns the n-th channel from ``self.power_spectrogram_data``.
Raises:
Exception: If not ``0 <= n < self.num_channels``.
Args:
n: (int) index of power spectrogram channel to get **0-based**
Returns:
(:obj:`np.array`): the power spectrogram data in the n-th channel of the signal, 1D
"""
self._verify_get_channel(n)
# np.array helps with duck typing
return utils._get_axis(np.array(self.power_spectrogram_data),
constants.STFT_CHAN_INDEX, n)
def get_magnitude_spectrogram_channel(self, n):
""" Returns the n-th channel from ``self.magnitude_spectrogram_data``.
Raises:
Exception: If not ``0 <= n < self.num_channels``.
Args:
n: (int) index of magnitude spectrogram channel to get **0-based**
Returns:
(:obj:`np.array`): the magnitude spectrogram data in the n-th channel of the signal, 1D
"""
self._verify_get_channel(n)
# np.array helps with duck typing
return utils._get_axis(np.array(self.magnitude_spectrogram_data),
constants.STFT_CHAN_INDEX, n)
def to_mono(self, overwrite=True, keep_dims=False):
""" Converts :attr:`audio_data` to mono by averaging every sample.
Args:
overwrite (bool): If ``True`` this function will overwrite :attr:`audio_data`.
keep_dims (bool): If ``False`` this function will return a 1D array,
else will return array with shape `(1, n_samples)`.
Warning:
If ``overwrite=True`` (default) this will overwrite any data in :attr:`audio_data`!
Returns:
(:obj:`AudioSignal`): Mono-ed version of AudioSignal, either in place or not.
"""
mono = np.mean(self.audio_data, axis=constants.CHAN_INDEX, keepdims=keep_dims)
if overwrite:
self.audio_data = mono
return self
else:
mono_signal = self.make_copy_with_audio_data(mono)
return mono_signal
##################################################
# Utility hooks #
##################################################
def play(self):
"""
Plays this audio signal, using `nussl.play_utils.play`.
Plays an audio signal if ffplay from the ffmpeg suite of tools is installed.
Otherwise, will fail. The audio signal is written to a temporary file
and then played with ffplay.
"""
# lazy load
from . import play_utils
play_utils.play(self)
def embed_audio(self, ext='.mp3', display=True):
"""
Embeds the audio signal into a notebook, using `nussl.play_utils.embed_audio`.
Write a numpy array to a temporary mp3 file using ffmpy, then embeds the mp3
into the notebook.
Args:
ext (str): What extension to use when embedding. '.mp3' is more lightweight
leading to smaller notebook sizes.
Example:
>>> import nussl
>>> audio_file = nussl.efz_utils.download_audio_file('schoolboy_fascination_excerpt.wav')
>>> audio_signal = nussl.AudioSignal(audio_file)
>>> audio_signal.embed_audio()
This will show a little audio player where you can play the audio inline in
the notebook.
"""
# lazy load
from . import play_utils
return play_utils.embed_audio(self, ext=ext, display=display)
##################################################
# Operator overloading #
##################################################
def __add__(self, other):
return self.add(other)
def __radd__(self, other):
return self.add(other)
def __sub__(self, other):
return self.subtract(other)
def _verify_audio(self, other):
if self.num_channels != other.num_channels:
raise AudioSignalException('Cannot do operation with two signals that have '
'a different number of channels!')
if self.sample_rate != other.sample_rate:
raise AudioSignalException('Cannot do operation with two signals that have '
'different sample rates!')
def _verify_audio_arithmetic(self, other):
self._verify_audio(other)
if self.signal_length != other.signal_length:
raise AudioSignalException('Cannot do arithmetic with signals of different length!')
def __iadd__(self, other):
return self + other
def __isub__(self, other):
return self - other
def __mul__(self, value):
if not isinstance(value, numbers.Real):
raise AudioSignalException('Can only multiply/divide by a scalar!')
return self.make_copy_with_audio_data(np.multiply(self.audio_data, value), verbose=False)
def __div__(self, value):
if not isinstance(value, numbers.Real):
raise AudioSignalException('Can only multiply/divide by a scalar!')
return self.make_copy_with_audio_data(np.divide(self.audio_data, float(value)),
verbose=False)
def __truediv__(self, value):
return self.__div__(value)
def __itruediv__(self, value):
return self.__idiv__(value)
def __imul__(self, value):
return self.apply_gain(value)
def __idiv__(self, value):
return self.apply_gain(1 / float(value))
def __len__(self):
return self.signal_length
def __eq__(self, other):
for k, v in list(self.__dict__.items()):
if isinstance(v, np.ndarray):
if not np.array_equal(v, other.__dict__[k]):
return False
elif v != other.__dict__[k]:
return False
return True
def __ne__(self, other):
return not self == other
class AudioSignalException(Exception):
"""
Exception class for :class:`AudioSignal`.
"""
pass
| mit | -3,678,452,288,421,490,000 | 38.338872 | 106 | 0.585991 | false |
harikishen/addons-server | src/olympia/search/tests/test_filters.py | 1 | 16855 | # -*- coding: utf-8 -*-
import json
from django.test.client import RequestFactory
from elasticsearch_dsl import Search
from mock import Mock
from rest_framework import serializers
from olympia import amo
from olympia.amo.tests import create_switch, TestCase
from olympia.constants.categories import CATEGORIES
from olympia.search.filters import (
InternalSearchParameterFilter, ReviewedContentFilter,
SearchParameterFilter, SearchQueryFilter, SortingFilter)
class FilterTestsBase(TestCase):
# Base TestCase class - Does not need to inherit from ESTestCase as the
# queries will never actually be executed.
def setUp(self):
super(FilterTestsBase, self).setUp()
self.req = RequestFactory().get('/')
self.view_class = Mock()
def _filter(self, req=None, data=None):
req = req or RequestFactory().get('/', data=data or {})
queryset = Search()
for filter_class in self.filter_classes:
queryset = filter_class().filter_queryset(req, queryset,
self.view_class)
return queryset.to_dict()
class TestQueryFilter(FilterTestsBase):
filter_classes = [SearchQueryFilter]
def _test_q(self):
qs = self._filter(data={'q': 'tea pot'})
# Spot check a few queries.
should = qs['query']['function_score']['query']['bool']['should']
expected = {
'match': {
'name': {
'query': 'tea pot', 'boost': 4, 'slop': 1, 'type': 'phrase'
}
}
}
assert expected in should
expected = {
'prefix': {'name': {'boost': 1.5, 'value': 'tea pot'}}
}
assert expected in should
expected = {
'match': {
'name_english': {
'query': 'tea pot', 'boost': 2.5,
'analyzer': 'english'
}
}
}
assert expected in should
expected = {
'match': {
'description_english': {
'query': 'tea pot', 'boost': 0.6,
'analyzer': 'english', 'type': 'phrase'
}
}
}
assert expected in should
functions = qs['query']['function_score']['functions']
assert functions[0] == {'field_value_factor': {'field': 'boost'}}
return qs
def test_q(self):
qs = self._test_q()
functions = qs['query']['function_score']['functions']
assert len(functions) == 1
def test_fuzzy_single_word(self):
qs = self._filter(data={'q': 'blah'})
should = qs['query']['function_score']['query']['bool']['should']
expected = {
'fuzzy': {
'name': {
'boost': 2, 'prefix_length': 4, 'value': 'blah'
}
}
}
assert expected in should
def test_no_fuzzy_multi_word(self):
qs = self._filter(data={'q': 'search terms'})
qs_str = json.dumps(qs)
assert 'fuzzy' not in qs_str
def test_webextension_boost(self):
create_switch('boost-webextensions-in-search')
# Repeat base test with the switch enabled.
qs = self._test_q()
functions = qs['query']['function_score']['functions']
assert len(functions) == 2
assert functions[1] == {
'weight': 2.0, # WEBEXTENSIONS_WEIGHT,
'filter': {'term': {'current_version.files.is_webextension': True}}
}
class TestReviewedContentFilter(FilterTestsBase):
filter_classes = [ReviewedContentFilter]
def test_status(self):
qs = self._filter(self.req)
must = qs['query']['filtered']['filter']['bool']['must']
must_not = qs['query']['filtered']['filter']['bool']['must_not']
assert {'terms': {'status': amo.REVIEWED_STATUSES}} in must
assert {'exists': {'field': 'current_version'}} in must
assert {'term': {'is_disabled': True}} in must_not
assert {'term': {'is_deleted': True}} in must_not
class TestSortingFilter(FilterTestsBase):
filter_classes = [SortingFilter]
def _reformat_order(self, key):
# elasticsearch-dsl transforms '-something' for us, so we have to
# expect the sort param in this format when we inspect the resulting
# queryset object.
return {key[1:]: {'order': 'desc'}} if key.startswith('-') else key
def test_sort_default(self):
qs = self._filter(data={'q': 'something'})
assert qs['sort'] == [self._reformat_order('-_score')]
qs = self._filter()
assert qs['sort'] == [self._reformat_order('-weekly_downloads')]
def test_sort_query(self):
SORTING_PARAMS = SortingFilter.SORTING_PARAMS
for param in SORTING_PARAMS:
qs = self._filter(data={'sort': param})
assert qs['sort'] == [self._reformat_order(SORTING_PARAMS[param])]
# Having a search query does not change anything, the requested sort
# takes precedence.
for param in SORTING_PARAMS:
qs = self._filter(data={'q': 'something', 'sort': param})
assert qs['sort'] == [self._reformat_order(SORTING_PARAMS[param])]
# If the sort query is wrong.
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'sort': 'WRONGLOL'})
assert context.exception.detail == ['Invalid "sort" parameter.']
# Same as above but with a search query.
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'q': 'something', 'sort': 'WRONGLOL'})
assert context.exception.detail == ['Invalid "sort" parameter.']
def test_sort_query_multiple(self):
qs = self._filter(data={'sort': ['rating,created']})
assert qs['sort'] == [self._reformat_order('-bayesian_rating'),
self._reformat_order('-created')]
# If the sort query is wrong.
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'sort': ['LOLWRONG,created']})
assert context.exception.detail == ['Invalid "sort" parameter.']
class TestSearchParameterFilter(FilterTestsBase):
filter_classes = [SearchParameterFilter]
def test_search_by_type_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'type': unicode(amo.ADDON_EXTENSION + 666)})
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'type': 'nosuchtype'})
assert context.exception.detail == ['Invalid "type" parameter.']
def test_search_by_type_id(self):
qs = self._filter(data={'type': unicode(amo.ADDON_EXTENSION)})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'type': amo.ADDON_EXTENSION}} in must
qs = self._filter(data={'type': unicode(amo.ADDON_PERSONA)})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'type': amo.ADDON_PERSONA}} in must
def test_search_by_type_string(self):
qs = self._filter(data={'type': 'extension'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'type': amo.ADDON_EXTENSION}} in must
qs = self._filter(data={'type': 'persona'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'type': amo.ADDON_PERSONA}} in must
def test_search_by_app_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'app': unicode(amo.FIREFOX.id + 666)})
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'app': 'nosuchapp'})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_app_id(self):
qs = self._filter(data={'app': unicode(amo.FIREFOX.id)})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'app': amo.FIREFOX.id}} in must
qs = self._filter(data={'app': unicode(amo.THUNDERBIRD.id)})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'app': amo.THUNDERBIRD.id}} in must
def test_search_by_app_string(self):
qs = self._filter(data={'app': 'firefox'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'app': amo.FIREFOX.id}} in must
qs = self._filter(data={'app': 'thunderbird'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'app': amo.THUNDERBIRD.id}} in must
def test_search_by_appversion_app_missing(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'appversion': '46.0'})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_appversion_app_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'appversion': '46.0',
'app': 'internet_explorer'})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_appversion_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'appversion': 'not_a_version',
'app': 'firefox'})
assert context.exception.detail == ['Invalid "appversion" parameter.']
def test_search_by_appversion(self):
qs = self._filter(data={'appversion': '46.0',
'app': 'firefox'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'app': amo.FIREFOX.id}} in must
assert {'range': {'current_version.compatible_apps.1.min':
{'lte': 46000000200100}}} in must
assert {'range': {'current_version.compatible_apps.1.max':
{'gte': 46000000000100}}} in must
def test_search_by_platform_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'platform': unicode(amo.PLATFORM_WIN.id + 42)})
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'platform': 'nosuchplatform'})
assert context.exception.detail == ['Invalid "platform" parameter.']
def test_search_by_platform_id(self):
qs = self._filter(data={'platform': unicode(amo.PLATFORM_WIN.id)})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'terms': {'platforms': [
amo.PLATFORM_WIN.id, amo.PLATFORM_ALL.id]}} in must
qs = self._filter(data={'platform': unicode(amo.PLATFORM_LINUX.id)})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'terms': {'platforms': [
amo.PLATFORM_LINUX.id, amo.PLATFORM_ALL.id]}} in must
def test_search_by_platform_string(self):
qs = self._filter(data={'platform': 'windows'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'terms': {'platforms': [
amo.PLATFORM_WIN.id, amo.PLATFORM_ALL.id]}} in must
qs = self._filter(data={'platform': 'win'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'terms': {'platforms': [
amo.PLATFORM_WIN.id, amo.PLATFORM_ALL.id]}} in must
qs = self._filter(data={'platform': 'darwin'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'terms': {'platforms': [
amo.PLATFORM_MAC.id, amo.PLATFORM_ALL.id]}} in must
qs = self._filter(data={'platform': 'mac'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'terms': {'platforms': [
amo.PLATFORM_MAC.id, amo.PLATFORM_ALL.id]}} in must
qs = self._filter(data={'platform': 'macosx'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'terms': {'platforms': [
amo.PLATFORM_MAC.id, amo.PLATFORM_ALL.id]}} in must
qs = self._filter(data={'platform': 'linux'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'terms': {'platforms': [
amo.PLATFORM_LINUX.id, amo.PLATFORM_ALL.id]}} in must
def test_search_by_category_slug_no_app_or_type(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'category': 'other'})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_category_id_no_app_or_type(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'category': 1})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_category_slug(self):
category = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]['other']
qs = self._filter(data={
'category': 'other',
'app': 'firefox',
'type': 'extension'
})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'category': category.id}} in must
def test_search_by_category_id(self):
qs = self._filter(data={
'category': 1,
'app': 'firefox',
'type': 'extension'
})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'category': 1}} in must
def test_search_by_category_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(
data={'category': 666, 'app': 'firefox', 'type': 'extension'})
assert context.exception.detail == ['Invalid "category" parameter.']
def test_search_by_tag(self):
qs = self._filter(data={'tag': 'foo'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'tags': 'foo'}} in must
qs = self._filter(data={'tag': 'foo,bar'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'tags': 'foo'}} in must
assert {'term': {'tags': 'bar'}} in must
class TestInternalSearchParameterFilter(TestSearchParameterFilter):
filter_classes = [InternalSearchParameterFilter]
def test_search_by_status_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'status': unicode(amo.STATUS_PUBLIC + 999)})
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'status': 'nosuchstatus'})
assert context.exception.detail == ['Invalid "status" parameter.']
def test_search_by_status_id(self):
qs = self._filter(data={'status': unicode(amo.STATUS_PUBLIC)})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'status': amo.STATUS_PUBLIC}} in must
qs = self._filter(data={'status': unicode(amo.STATUS_NULL)})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'status': amo.STATUS_NULL}} in must
def test_search_by_status_string(self):
qs = self._filter(data={'status': 'public'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'status': amo.STATUS_PUBLIC}} in must
qs = self._filter(data={'status': 'incomplete'})
must = qs['query']['filtered']['filter']['bool']['must']
assert {'term': {'status': amo.STATUS_NULL}} in must
class TestCombinedFilter(FilterTestsBase):
"""
Basic test to ensure that when filters are combined they result in the
expected query structure.
"""
filter_classes = [SearchQueryFilter, ReviewedContentFilter, SortingFilter]
def test_combined(self):
qs = self._filter(data={'q': 'test'})
filtered = qs['query']['filtered']
assert filtered['query']['function_score']
assert filtered['filter']
must = filtered['filter']['bool']['must']
assert {'terms': {'status': amo.REVIEWED_STATUSES}} in must
must_not = filtered['filter']['bool']['must_not']
assert {'term': {'is_disabled': True}} in must_not
assert qs['sort'] == [{'_score': {'order': 'desc'}}]
should = filtered['query']['function_score']['query']['bool']['should']
expected = {
'match': {
'name_english': {
'analyzer': 'english', 'boost': 2.5, 'query': u'test'
}
}
}
assert expected in should
| bsd-3-clause | 4,325,633,125,103,865,300 | 38.940758 | 79 | 0.577099 | false |
opennode/nodeconductor-saltstack | src/nodeconductor_saltstack/exchange/migrations/0012_more_members_models.py | 1 | 1452 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('exchange', '0011_conferenceroom'),
]
operations = [
migrations.AddField(
model_name='group',
name='delivery_members',
field=models.ManyToManyField(related_name='+', to='exchange.User'),
preserve_default=True,
),
migrations.AddField(
model_name='group',
name='senders_out',
field=models.BooleanField(default=False, help_text=b'Delivery management for senders outside organizational unit'),
preserve_default=True,
),
migrations.AddField(
model_name='user',
name='send_as_members',
field=models.ManyToManyField(related_name='send_as_members_rel_+', to='exchange.User'),
preserve_default=True,
),
migrations.AddField(
model_name='user',
name='send_on_behalf_members',
field=models.ManyToManyField(related_name='send_on_behalf_members_rel_+', to='exchange.User'),
preserve_default=True,
),
migrations.AlterField(
model_name='group',
name='members',
field=models.ManyToManyField(related_name='+', to='exchange.User'),
preserve_default=True,
),
]
| mit | -2,407,152,448,340,314,600 | 32 | 127 | 0.57438 | false |
shimpe/frescobaldi | frescobaldi_app/variables.py | 1 | 6536 | # This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
Infrastructure to get local variables embedded in comments in a document.
"""
from __future__ import unicode_literals
import re
from PyQt4.QtCore import QTimer
import signals
import plugin
__all__ = ['get', 'update', 'manager', 'variables']
_variable_re = re.compile(r'\s*?([a-z]+(?:-[a-z]+)*):[ \t]*(.*?);')
_LINES = 5 # how many lines from top and bottom to scan for variables
def get(document, varname, default=None):
"""Get a single value from the document.
If a default is given and the type is bool or int, the value is converted to the same type.
If no value exists, the default is returned.
"""
variables = manager(document).variables()
try:
return prepare(variables[varname], default)
except KeyError:
return default
def update(document, dictionary):
"""Updates the given dictionary with values from the document, using present values as default."""
for name, value in manager(document).variables().items():
if name in dictionary:
dictionary[name] = prepare(value, dictionary[name])
return dictionary
def manager(document):
"""Returns a VariableManager for this document."""
return VariableManager.instance(document)
def variables(text):
"""Reads variables from the first and last _LINES lines of text."""
lines = text.splitlines()
start, count = 0, len(lines)
d = {}
if count > 2 * _LINES:
d.update(m.group(1, 2) for n, m in positions(lines[:_LINES]))
start = count - _LINES
d.update(m.group(1, 2) for n, m in positions(lines[start:]))
return d
class VariableManager(plugin.DocumentPlugin):
"""Caches variables in the document and monitors for changes.
The changed() Signal is emitted some time after the list of variables has been changed.
It is recommended to not change the document itself in response to this signal.
"""
changed = signals.Signal() # without argument
def __init__(self, document):
self._updateTimer = QTimer(singleShot=True, timeout=self.slotTimeout)
self._variables = self.readVariables()
document.contentsChange.connect(self.slotContentsChange)
document.closed.connect(self._updateTimer.stop) # just to be sure
def slotTimeout(self):
variables = self.readVariables()
if variables != self._variables:
self._variables = variables
self.changed()
def slotContentsChange(self, position, removed, added):
"""Called if the document changes."""
if (self.document().findBlock(position).blockNumber() < _LINES or
self.document().findBlock(position + added).blockNumber() > self.document().blockCount() - _LINES):
self._updateTimer.start(500)
def variables(self):
"""Returns the document variables (cached) as a dictionary. This method is recommended."""
if self._updateTimer.isActive():
# an update is pending, force it
self._updateTimer.stop()
self.slotTimeout()
return self._variables
def readVariables(self):
"""Reads the variables from the document and returns a dictionary. Internal."""
count = self.document().blockCount()
blocks = [self.document().firstBlock()]
if count > _LINES * 2:
blocks.append(self.document().findBlockByNumber(count - _LINES))
count = _LINES
def lines(block):
for i in range(count):
yield block.text()
block = block.next()
variables = {}
for block in blocks:
variables.update(m.group(1, 2) for n, m in positions(lines(block)))
return variables
def positions(lines):
"""Lines should be an iterable returning lines of text.
Returns an iterable yielding tuples (lineNum, matchObj) for every variable found.
Every matchObj has group(1) pointing to the variable name and group(2) to the value.
"""
commentstart = ''
interesting = False
for lineNum, text in enumerate(lines):
# first check the line start
start = 0
if interesting:
# already parsing? then skip comment start tokens
m = re.match(r'\s*{0}'.format(re.escape(commentstart)), text)
if m:
start = m.end()
else:
# does the line have '-*-' ?
m = re.search(r'(\S*)\s*-\*-', text)
if m:
interesting = True
commentstart = m.group(1)
start = m.end()
# now parse the line
if interesting:
while True:
m = _variable_re.match(text, start)
if m:
yield lineNum, m
start = m.end()
else:
if start < len(text) and not text[start:].isspace():
interesting = False
break
def prepare(value, default):
"""Try to convert the value (which is a string) to the type of the default value.
If (for int and bool) that fails, returns the default, otherwise returns the string unchanged.
"""
if isinstance(default, bool):
if value.lower() in ('true', 'yes', 'on', 't', '1'):
return True
elif value.lower() in ('false', 'no', 'off', 'f', '0'):
return False
return default
elif isinstance(default, int):
try:
return int(value)
except ValueError:
return default
return value
| gpl-2.0 | 2,572,797,856,602,407,000 | 33.582011 | 111 | 0.615667 | false |
low-sky/chempca | chempca.py | 1 | 1340 | import numpy as np
from scipy import linalg as LA
import astropy.io.fits as fits
import matplotlib.pyplot as p
def PCA(data, dims_rescaled_data=2):
"""
returns: data transformed in 2 dims/columns + regenerated original data
pass in: data as 2D NumPy array
"""
mn = np.mean(data, axis=0)
# mean center the data
data -= mn
# calculate the covariance matrix
C = np.cov(data.T)
# calculate eigenvectors & eigenvalues of the covariance matrix
evals, evecs = LA.eig(C)
# sorted them by eigenvalue in decreasing order
idx = np.argsort(evals)[::-1]
evecs = evecs[:,idx]
evals = evals[idx]
print(evals)
# select the first n eigenvectors (n is desired dimension
# of rescaled data array, or dims_rescaled_data)
# evecs = evecs[:,:dims_rescaled_data]
# carry out the transformation on the data using eigenvectors
data_rescaled = np.dot(evecs.T, data.T).T
# reconstruct original data array
# data_original_regen = np.dot(evecs,data).T + mn
return data_rescaled,evals#, data_original_regen
cprops = fits.getdata('multitracer.fits')
array = np.log10(np.vstack((cprops['HCN'],cprops['HCOP'],cprops['H13COP'],cprops['H13CN'],cprops['HNCO'])))#,cprops['HNCO'],cprops['CS'],cprops['SIO'])))
array[np.isnan(array)]=np.nanmin(array)
rs,evals = PCA(array.T)
| bsd-3-clause | 9,199,619,318,298,297,000 | 30.904762 | 153 | 0.678358 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.