repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
freedomtan/tensorflow | tensorflow/python/keras/preprocessing/timeseries.py | 1 | 8536 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras timeseries dataset utilities."""
# pylint: disable=g-classes-have-attributes
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.preprocessing.timeseries_dataset_from_array', v1=[])
def timeseries_dataset_from_array(
data,
targets,
sequence_length,
sequence_stride=1,
sampling_rate=1,
batch_size=128,
shuffle=False,
seed=None,
start_index=None,
end_index=None):
"""Creates a dataset of sliding windows over a timeseries provided as array.
This function takes in a sequence of data-points gathered at
equal intervals, along with time series parameters such as
length of the sequences/windows, spacing between two sequence/windows, etc.,
to produce batches of timeseries inputs and targets.
Arguments:
data: Numpy array or eager tensor
containing consecutive data points (timesteps).
Axis 0 is expected to be the time dimension.
targets: Targets corresponding to timesteps in `data`.
`targets[i]` should be the target
corresponding to the window that starts at index `i`
(see example 2 below).
Pass None if you don't have target data (in this case the dataset will
only yield the input data).
sequence_length: Length of the output sequences (in number of timesteps).
sequence_stride: Period between successive output sequences.
For stride `s`, output samples would
start at index `data[i]`, `data[i + s]`, `data[i + 2 * s]`, etc.
sampling_rate: Period between successive individual timesteps
within sequences. For rate `r`, timesteps
`data[i], data[i + r], ... data[i + sequence_length]`
are used for create a sample sequence.
batch_size: Number of timeseries samples in each batch
(except maybe the last one).
shuffle: Whether to shuffle output samples,
or instead draw them in chronological order.
seed: Optional int; random seed for shuffling.
start_index: Optional int; data points earlier (exclusive)
than `start_index` will not be used
in the output sequences. This is useful to reserve part of the
data for test or validation.
end_index: Optional int; data points later (exclusive) than `end_index`
will not be used in the output sequences.
This is useful to reserve part of the data for test or validation.
Returns:
A tf.data.Dataset instance. If `targets` was passed, the dataset yields
tuple `(batch_of_sequences, batch_of_targets)`. If not, the dataset yields
only `batch_of_sequences`.
Example 1:
Consider indices `[0, 1, ... 99]`.
With `sequence_length=10, sampling_rate=2, sequence_stride=3`,
`shuffle=False`, the dataset will yield batches of sequences
composed of the following indices:
```
First sequence: [0 2 4 6 8 10 12 14 16 18]
Second sequence: [3 5 7 9 11 13 15 17 19 21]
Third sequence: [6 8 10 12 14 16 18 20 22 24]
...
Last sequence: [78 80 82 84 86 88 90 92 94 96]
```
In this case the last 3 data points are discarded since no full sequence
can be generated to include them (the next sequence would have started
at index 81, and thus its last step would have gone over 99).
Example 2: temporal regression. Consider an array `data` of scalar
values, of shape `(steps,)`. To generate a dataset that uses the past 10
timesteps to predict the next timestep, you would use:
```python
input_data = data
offset = 10
targets = data[offset:]
dataset = tf.keras.preprocessing.timeseries_dataset_from_array(
input_data, targets, sequence_length=offset)
for batch in dataset:
inputs, targets = batch
assert np.array_equal(inputs[0], data[:10]) # First sequence: steps [0-9]
assert np.array_equal(targets[0], data[10]) # Corresponding target: step 10
break
```
"""
if start_index and (start_index < 0 or start_index >= len(data)):
raise ValueError('start_index must be higher than 0 and lower than the '
'length of the data. Got: start_index=%s '
'for data of length %s.' % (start_index, len(data)))
if end_index:
if start_index and end_index <= start_index:
raise ValueError('end_index must be higher than start_index. Got: '
'start_index=%s, end_index=%s.' %
(start_index, end_index))
if end_index >= len(data):
raise ValueError('end_index must be lower than the length of the data. '
'Got: end_index=%s' % (end_index,))
if end_index <= 0:
raise ValueError('end_index must be higher than 0. '
'Got: end_index=%s' % (end_index,))
# Validate strides
if sampling_rate <= 0 or sampling_rate >= len(data):
raise ValueError(
'sampling_rate must be higher than 0 and lower than '
'the length of the data. Got: '
'sampling_rate=%s for data of length %s.' % (sampling_rate, len(data)))
if sequence_stride <= 0 or sequence_stride >= len(data):
raise ValueError(
'sequence_stride must be higher than 0 and lower than '
'the length of the data. Got: sequence_stride=%s '
'for data of length %s.' % (sequence_stride, len(data)))
if start_index is None:
start_index = 0
if end_index is None:
end_index = len(data)
# Determine the lowest dtype to store start positions (to lower memory usage).
num_seqs = end_index - start_index - (sequence_length * sampling_rate) + 1
if targets is not None:
num_seqs = min(num_seqs, len(targets))
if num_seqs < 2147483647:
index_dtype = 'int32'
else:
index_dtype = 'int64'
# Generate start positions
start_positions = np.arange(0, num_seqs, sequence_stride, dtype=index_dtype)
if shuffle:
if seed is None:
seed = np.random.randint(1e6)
rng = np.random.RandomState(seed)
rng.shuffle(start_positions)
sequence_length = math_ops.cast(sequence_length, dtype=index_dtype)
sampling_rate = math_ops.cast(sampling_rate, dtype=index_dtype)
positions_ds = dataset_ops.Dataset.from_tensors(start_positions).repeat()
# For each initial window position, generates indices of the window elements
indices = dataset_ops.Dataset.zip(
(dataset_ops.Dataset.range(len(start_positions)), positions_ds)).map(
lambda i, positions: math_ops.range( # pylint: disable=g-long-lambda
positions[i],
positions[i] + sequence_length * sampling_rate,
sampling_rate),
num_parallel_calls=dataset_ops.AUTOTUNE)
dataset = sequences_from_indices(data, indices, start_index, end_index)
if targets is not None:
indices = dataset_ops.Dataset.zip(
(dataset_ops.Dataset.range(len(start_positions)), positions_ds)).map(
lambda i, positions: positions[i],
num_parallel_calls=dataset_ops.AUTOTUNE)
target_ds = sequences_from_indices(
targets, indices, start_index, end_index)
dataset = dataset_ops.Dataset.zip((dataset, target_ds))
if shuffle:
# Shuffle locally at each iteration
dataset = dataset.shuffle(buffer_size=batch_size * 8, seed=seed)
dataset = dataset.batch(batch_size)
return dataset
def sequences_from_indices(array, indices_ds, start_index, end_index):
dataset = dataset_ops.Dataset.from_tensors(array[start_index : end_index])
dataset = dataset_ops.Dataset.zip((dataset.repeat(), indices_ds)).map(
lambda steps, inds: array_ops.gather(steps, inds), # pylint: disable=unnecessary-lambda
num_parallel_calls=dataset_ops.AUTOTUNE)
return dataset
| apache-2.0 | 8,371,571,566,166,718,000 | 40.843137 | 94 | 0.678304 | false |
akintolga/superdesk-aap | server/aap/io/feed_parsers/zczc_medianet.py | 1 | 4102 | #
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license*.
from .zczc import ZCZCFeedParser
from superdesk.metadata.item import FORMAT, FORMATS
from superdesk.io.registry import register_feeding_service_error
from superdesk.errors import AlreadyExistsError
from superdesk.io.registry import register_feed_parser
from aap.errors import AAPParserError
import superdesk
from bs4 import BeautifulSoup, NavigableString
from superdesk.io.iptc import subject_codes
class ZCZCMedianetParser(ZCZCFeedParser):
NAME = 'Medianet_zczc'
place_map = {'MNETALL': 'FED',
'MNETNSW': 'NSW',
'MNETQLD': 'QLD',
'MNETVIC': 'VIC',
'MNETSA': 'SA',
'MNETWA': 'WA',
'MNETACT': 'ACT',
'MNETNT': 'NT',
'MNETTAS': 'TAS'}
subject_map = {'MFI': '04000000',
'MEN': '01021000',
'MSP': '15000000',
'MHE': '07007000',
'MIT': '13010000'}
def set_item_defaults(self, item, provider):
super().set_item_defaults(item, provider)
# Medianet
item[FORMAT] = FORMATS.PRESERVED
item['original_source'] = 'Medianet'
item['urgency'] = 5
self.CATEGORY = '$'
self.TAKEKEY = ':'
self.PLACE = '%'
self.header_map = {self.PLACE: self.ITEM_PLACE, self.TAKEKEY: self.ITEM_TAKE_KEY}
def post_process_item(self, item, provider):
InvestorRelease = (len(item.get('anpa_category', [])) and
item['anpa_category'][0].get('qcode', '').lower() == 'k')
if InvestorRelease:
# IRW News Release:
item['slugline'] = 'IRW News Release'
item['headline'] = 'IRW News Release: ' + item.get(self.ITEM_TAKE_KEY, '')
else:
item['slugline'] = 'Media Release'
item['headline'] = 'Media Release: ' + item.get(self.ITEM_TAKE_KEY, '')
# Truncate the take key if required
if len(item.get(self.ITEM_TAKE_KEY, '')) > 24:
item[self.ITEM_TAKE_KEY] = item.get(self.ITEM_TAKE_KEY, '')[0:24]
genre_map = superdesk.get_resource_service('vocabularies').find_one(req=None, _id='genre')
item['genre'] = [x for x in genre_map.get('items', []) if
x['qcode'] == 'Press Release' and x['is_active']]
soup = BeautifulSoup(item.get('body_html', ''), "html.parser")
ptag = soup.find('pre')
if ptag is not None:
if InvestorRelease:
ptag.insert(0, NavigableString(
'{} '.format('Investor Relations news release distributed by AAP Medianet. \r\n\r\n\r\n')))
else:
ptag.insert(0, NavigableString('{} '.format('Media release distributed by AAP Medianet. \r\n\r\n\r\n')))
item['body_html'] = str(soup)
locator_map = superdesk.get_resource_service('vocabularies').find_one(req=None, _id='locators')
place_strs = item.pop('place').split(' ')
for place in place_strs:
if place in self.place_map:
replace = [x for x in locator_map.get('items', []) if
x['qcode'] == self.place_map.get(place, '').upper()]
if replace is not None:
item[self.ITEM_PLACE] = replace
if place in self.subject_map:
if item.get(self.ITEM_SUBJECT) is None:
item[self.ITEM_SUBJECT] = []
item['subject'].append(
{'qcode': self.subject_map.get(place), 'name': subject_codes[self.subject_map.get(place)]})
return item
try:
register_feed_parser(ZCZCMedianetParser.NAME, ZCZCMedianetParser())
except AlreadyExistsError as ex:
pass
register_feeding_service_error('file', AAPParserError.ZCZCParserError().get_error_description())
| agpl-3.0 | -4,504,097,776,540,009,500 | 39.215686 | 120 | 0.573379 | false |
mdworks2016/work_development | Python/20_Third_Certification/venv/lib/python3.7/site-packages/django/contrib/gis/db/backends/mysql/operations.py | 4 | 3816 | from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.db.backends.base.operations import (
BaseSpatialOperations,
)
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import aggregates
from django.contrib.gis.geos.geometry import GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.db.backends.mysql.operations import DatabaseOperations
from django.utils.functional import cached_property
class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
mysql = True
name = 'mysql'
geom_func_prefix = 'ST_'
Adapter = WKTAdapter
@cached_property
def select(self):
return self.geom_func_prefix + 'AsBinary(%s)'
@cached_property
def from_text(self):
return self.geom_func_prefix + 'GeomFromText'
@cached_property
def gis_operators(self):
return {
'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API
'bboverlaps': SpatialOperator(func='MBROverlaps'), # ...
'contained': SpatialOperator(func='MBRWithin'), # ...
'contains': SpatialOperator(func='ST_Contains'),
'crosses': SpatialOperator(func='ST_Crosses'),
'disjoint': SpatialOperator(func='ST_Disjoint'),
'equals': SpatialOperator(func='ST_Equals'),
'exact': SpatialOperator(func='ST_Equals'),
'intersects': SpatialOperator(func='ST_Intersects'),
'overlaps': SpatialOperator(func='ST_Overlaps'),
'same_as': SpatialOperator(func='ST_Equals'),
'touches': SpatialOperator(func='ST_Touches'),
'within': SpatialOperator(func='ST_Within'),
}
disallowed_aggregates = (
aggregates.Collect, aggregates.Extent, aggregates.Extent3D,
aggregates.MakeLine, aggregates.Union,
)
@cached_property
def unsupported_functions(self):
unsupported = {
'AsGML', 'AsKML', 'AsSVG', 'Azimuth', 'BoundingCircle',
'ForcePolygonCW', 'GeometryDistance', 'LineLocatePoint',
'MakeValid', 'MemSize', 'Perimeter', 'PointOnSurface', 'Reverse',
'Scale', 'SnapToGrid', 'Transform', 'Translate',
}
if self.connection.mysql_is_mariadb:
unsupported.remove('PointOnSurface')
unsupported.update({'GeoHash', 'IsValid'})
if self.connection.mysql_version < (10, 2, 4):
unsupported.add('AsGeoJSON')
elif self.connection.mysql_version < (5, 7, 5):
unsupported.update({'AsGeoJSON', 'GeoHash', 'IsValid'})
return unsupported
def geo_db_type(self, f):
return f.geom_type
def get_distance(self, f, value, lookup_type):
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
raise ValueError(
'Only numeric values of degree units are allowed on '
'geodetic distance queries.'
)
dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection)))
else:
dist_param = value
return [dist_param]
def get_geometry_converter(self, expression):
read = wkb_r().read
srid = expression.output_field.srid
if srid == -1:
srid = None
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
if value is not None:
geom = GEOSGeometryBase(read(memoryview(value)), geom_class)
if srid:
geom.srid = srid
return geom
return converter
| apache-2.0 | 3,364,332,919,509,989,400 | 37.545455 | 95 | 0.622379 | false |
Ghini/ghini.desktop | bauble/utils/test.py | 1 | 10762 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2005,2006,2007,2008,2009 Brett Adams <[email protected]>
# Copyright (c) 2012-2015 Mario Frasca <[email protected]>
# Copyright 2017 Jardín Botánico de Quito
#
# This file is part of ghini.desktop.
#
# ghini.desktop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ghini.desktop is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ghini.desktop. If not, see <http://www.gnu.org/licenses/>.
#
# test.py
#
# Description: test for bauble.utils
import sys
import unittest
from pyparsing import *
from sqlalchemy import *
from nose import SkipTest
import bauble
import bauble.db as db
from bauble.error import check, CheckConditionError
import bauble.utils as utils
from bauble.test import BaubleTestCase
class UtilsGTKTests(unittest.TestCase):
def test_create_message_details_dialog(self):
raise SkipTest('Not Implemented')
details = """these are the lines that i want to test
asdasdadasddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd
dasd
asd
addasdadadad"""
msg = 'msg'
d = utils.create_message_details_dialog(msg, details)
d.run()
def test_create_message_dialog(self):
raise SkipTest('Not Implemented')
msg = 'msg'
#msg = ' this is a longer message to test that the dialog width is correct.....but what if it keeps going'
d = utils.create_message_dialog(msg)
d.run()
def test_search_tree_model(self):
from gi.repository import Gtk
model = Gtk.TreeStore(str)
# the rows that should be found
to_find = []
row = model.append(None, ['1'])
model.append(row, ['1.1'])
to_find.append(model.append(row, ['something']))
model.append(row, ['1.3'])
row = model.append(None, ['2'])
to_find.append(model.append(row, ['something']))
model.append(row, ['2.1'])
to_find.append(model.append(None, ['something']))
root = model.get_iter_first()
results = utils.search_tree_model(model[root], 'something')
self.assertTrue(sorted([model.get_path(r) for r in results]),
sorted(to_find))
class UtilsTests(unittest.TestCase):
def test_xml_safe(self):
class test(object):
def __str__(self):
return repr(self)
def __unicode__(self):
return repr(self)
import re
assert re.match('<.*?>', utils.xml_safe(str(test())))
assert re.match('<.*?>', utils.xml_safe(str(test())))
assert utils.xml_safe('test string') == 'test string'
assert utils.xml_safe('test string') == 'test string'
assert utils.xml_safe('test< string') == 'test< string'
assert utils.xml_safe('test< string') == 'test< string'
def test_range_builder(self):
assert utils.range_builder('1-3') == [1, 2, 3]
assert utils.range_builder('1-3,5-7') == [1, 2, 3, 5, 6 ,7]
assert utils.range_builder('1-3,5') == [1, 2, 3, 5]
assert utils.range_builder('1-3,5,7-9')== [1, 2, 3, 5, 7, 8, 9]
assert utils.range_builder('1,2,3,4') == [1, 2, 3, 4]
assert utils.range_builder('11') == [11]
# bad range strings
assert utils.range_builder('-1') == []
assert utils.range_builder('a-b') == []
#self.assertRaises(ParseException, utils.range_builder, '-1')
self.assertRaises(CheckConditionError, utils.range_builder, '2-1')
#self.assertRaises(ParseException, utils.range_builder, 'a-b')
def test_get_urls(self):
text = 'There a link in here: http://bauble.belizebotanic.org'
urls = utils.get_urls(text)
self.assertTrue(urls == [(None, 'http://bauble.belizebotanic.org')], urls)
text = 'There a link in here: http://bauble.belizebotanic.org '\
'and some text afterwards.'
urls = utils.get_urls(text)
self.assertTrue(urls == [(None, 'http://bauble.belizebotanic.org')], urls)
text = 'There is a link here: http://bauble.belizebotanic.org '\
'and here: https://belizebotanic.org and some text afterwards.'
urls = utils.get_urls(text)
self.assertTrue(urls == [(None, 'http://bauble.belizebotanic.org'),
(None, 'https://belizebotanic.org')], urls)
text = 'There a labeled link in here: '\
'[BBG]http://bauble.belizebotanic.org and some text afterwards.'
urls = utils.get_urls(text)
self.assertTrue(urls == [('BBG', 'http://bauble.belizebotanic.org')],
urls)
class UtilsDBTests(BaubleTestCase):
def test_find_dependent_tables(self):
metadata = MetaData()
metadata.bind = db.engine
# table1 does't depend on any tables
table1 = Table('table1', metadata,
Column('id', Integer, primary_key=True))
# table2 depends on table1
table2 = Table('table2', metadata,
Column('id', Integer, primary_key=True),
Column('table1', Integer, ForeignKey('table1.id')))
# table3 depends on table2
table3 = Table('table3', metadata,
Column('id', Integer, primary_key=True),
Column('table2', Integer, ForeignKey('table2.id')),
Column('table4', Integer, ForeignKey('table4.id'))
)
# table4 depends on table2
table4 = Table('table4', metadata,
Column('id', Integer, primary_key=True),
Column('table2', Integer, ForeignKey('table2.id')))
# tables that depend on table 1 are 3, 4, 2
depends = list(utils.find_dependent_tables(table1, metadata))
print('table1: %s' % [table.name for table in depends])
self.assertTrue(list(depends) == [table2, table4, table3])
# tables that depend on table 2 are 3, 4
depends = list(utils.find_dependent_tables(table2, metadata))
print('table2: %s' % [table.name for table in depends])
self.assertTrue(depends == [table4, table3])
# no tables depend on table 3
depends = list(utils.find_dependent_tables(table3, metadata))
print('table3: %s' % [table.name for table in depends])
self.assertTrue(depends == [])
# table that depend on table 4 are 3
depends = list(utils.find_dependent_tables(table4, metadata))
print('table4: %s' % [table.name for table in depends])
self.assertTrue(depends == [table3])
class ResetSequenceTests(BaubleTestCase):
def setUp(self):
super().setUp()
self.metadata = MetaData()
self.metadata.bind = db.engine
def tearDown(self):
super().tearDown()
self.metadata.drop_all()
@staticmethod
def get_currval(col):
if db.engine.name == 'postgresql':
name = '%s_%s_seq' % (col.table.name, col.name)
stmt = "select currval('%s');" % name
return db.engine.execute(stmt).fetchone()[0]
elif db.engine.name == 'sqlite':
stmt = 'select max(%s) from %s' % (col.name, col.table.name)
return db.engine.execute(stmt).fetchone()[0] + 1
def test_no_col_sequence(self):
# Test utils.reset_sequence on a column without a Sequence()
#
# This only tests that reset_sequence() doesn't fail if there is
# no sequence.
# test that a column without an explicit sequence works
table = Table('test_reset_sequence', self.metadata,
Column('id', Integer, primary_key=True))
self.metadata.create_all()
self.insert = table.insert()#.compile()
db.engine.execute(self.insert, values=[{'id': 1}])
utils.reset_sequence(table.c.id)
def test_empty_col_sequence(self):
# Test utils.reset_sequence on a column without a Sequence()
#
# This only tests that reset_sequence() doesn't fail if there is
# no sequence.
# test that a column without an explicit sequence works
table = Table('test_reset_sequence', self.metadata,
Column('id', Integer, primary_key=True))
self.metadata.create_all()
#self.insert = table.insert()#.compile()
#db.engine.execute(self.insert, values=[{'id': 1}])
utils.reset_sequence(table.c.id)
def test_with_col_sequence(self):
# UPDATE: 10/18/2011 -- we don't use Sequence() explicitly,
# just autoincrement=True on primary_key columns so this test
# probably isn't necessary
table = Table('test_reset_sequence', self.metadata,
Column('id', Integer,
Sequence('test_reset_sequence_id_seq'),
primary_key=True, unique=True))
self.metadata.create_all()
rangemax = 10
for i in range(1, rangemax+1):
table.insert().values(id=i).execute()
utils.reset_sequence(table.c.id)
currval = self.get_currval(table.c.id)
self.assertTrue(currval > rangemax, currval)
from bauble.utils import topological_sort
class TopologicalSortTests(unittest.TestCase):
def test_empty_dependencies(self):
r = topological_sort(['a', 'b', 'c'], [])
self.assertTrue('a' in r)
self.assertTrue('b' in r)
self.assertTrue('c' in r)
def test_full_dependencies(self):
r = topological_sort(['a', 'b', 'c'], [('a', 'b'), ('b', 'c')])
self.assertTrue('a' in r)
self.assertTrue('b' in r)
self.assertTrue('c' in r)
self.assertEqual(r.pop(), 'c')
self.assertEqual(r.pop(), 'b')
self.assertEqual(r.pop(), 'a')
def test_partial_dependencies(self):
r = topological_sort(['b', 'e'], [('a', 'b'), ('b', 'c'), ('b', 'd')])
print(r)
self.assertTrue('e' in r)
r.remove('e')
any = set([r.pop(), r.pop()])
self.assertEqual(any, set(['c', 'd']))
self.assertEqual(r.pop(), 'b')
#self.assertEquals(r, [])
def test_empty_input_full_dependencies(self):
r = topological_sort([], [('a', 'b'), ('b', 'c'), ('b', 'd')])
#self.assertEquals(r, [])
| gpl-2.0 | 2,246,268,003,325,092,400 | 36.103448 | 114 | 0.593401 | false |
dguevel/PyZOGY | PyZOGY/test/mock_image_class.py | 1 | 1822 | import numpy as np
from astropy.io import fits
class MockImageClass(np.ndarray):
"""Creates a mock version of ImageClass for testing"""
def __new__(cls, image_filename='', psf_filename='', mask_filename=None, n_stamps=1, saturation=np.inf, variance=np.inf, shape=(50,50)):
raw_image, header = np.ones(shape), fits.Header()#fits.getdata(image_filename, header=True)
raw_psf = np.ones(shape)
mask = np.zeros(shape)
background_std, background_counts = np.ones(shape), np.zeros(shape)
image_data = np.ones(shape)
obj = np.asarray(image_data).view(cls)
obj.header = header
obj.raw_image = raw_image
obj.raw_psf = raw_psf
obj.background_std = background_std
obj.background_counts = background_counts
obj.image_filename = image_filename
obj.psf_filename = psf_filename
obj.saturation = saturation
obj.mask = mask
obj.psf = raw_psf
obj.zero_point = 1.
obj.variance = variance
return obj
def __array_finalize__(self, obj):
if obj is None:
return
self.raw_image = getattr(obj, 'raw_image', None)
self.header = getattr(obj, 'header', None)
self.raw_psf = getattr(obj, 'raw_psf', None)
self.background_std = getattr(obj, 'background_std', None)
self.background_counts = getattr(obj, 'background_counts', None)
self.image_filename = getattr(obj, 'image_filename', None)
self.psf_filename = getattr(obj, 'psf_filename', None)
self.saturation = getattr(obj, 'saturation', None)
self.mask = getattr(obj, 'mask', None)
self.psf = getattr(obj, 'psf', None)
self.zero_point = getattr(obj, 'zero_point', None)
self.variance = getattr(obj, 'variance', None)
| mit | 7,069,680,640,840,340,000 | 40.409091 | 140 | 0.618551 | false |
rawdlite/mopidy-beets-local | setup.py | 1 | 1430 | from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-BeetsLocal',
version=get_version('mopidy_beetslocal/__init__.py'),
url='https://github.com/rawdlite/mopidy-beets-local',
license='Apache License, Version 2.0',
author='Tom Roth',
author_email='[email protected]',
description='Access local beets library',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy >= 1.0',
'Pykka >= 1.1',
'uritools >= 0.11',
'beets'
],
test_suite='nose.collector',
tests_require=[
'nose',
'mock >= 1.0',
],
entry_points={
'mopidy.ext': [
'beetslocal = mopidy_beetslocal:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
| apache-2.0 | -1,899,545,129,688,441,900 | 27.039216 | 68 | 0.586713 | false |
SalesforceFoundation/CumulusCI | cumulusci/tasks/metadata_etl/value_sets.py | 1 | 3136 | from cumulusci.core.exceptions import TaskOptionsError
from cumulusci.tasks.metadata_etl import MetadataSingleEntityTransformTask
from cumulusci.utils.xml.metadata_tree import MetadataElement
class AddValueSetEntries(MetadataSingleEntityTransformTask):
entity = "StandardValueSet"
task_options = {
**MetadataSingleEntityTransformTask.task_options,
"entries": {
"description": "Array of standardValues to insert. "
"Each standardValue should contain the keys 'fullName', the API name of the entry, "
"and 'label', the user-facing label. OpportunityStage entries require the additional "
"keys 'closed', 'won', 'forecastCategory', and 'probability'; CaseStatus entries "
"require 'closed'.",
"required": True,
},
"api_names": {
"description": "List of API names of StandardValueSets to affect, "
"such as 'OpportunityStage', 'AccountType', 'CaseStatus'",
"required": True,
},
}
def _transform_entity(self, metadata: MetadataElement, api_name: str):
for entry in self.options.get("entries", []):
if "fullName" not in entry or "label" not in entry:
raise TaskOptionsError(
"Standard value set entries must contain the 'fullName' and 'label' keys."
)
# Check for extra metadata on CaseStatus and OpportunityStage
if api_name == "OpportunityStage":
if not all(
[
"closed" in entry,
"forecastCategory" in entry,
"probability" in entry,
"won" in entry,
]
):
raise TaskOptionsError(
"OpportunityStage standard value set entries require the keys "
"'closed', 'forecastCategory', 'probability', and 'won'"
)
if api_name == "CaseStatus":
if "closed" not in entry:
raise TaskOptionsError(
"CaseStatus standard value set entries require the key 'closed'"
)
existing_entry = metadata.findall(
"standardValue", fullName=entry["fullName"]
)
if not existing_entry:
# Entry doesn't exist. Insert it.
elem = metadata.append(tag="standardValue")
elem.append("fullName", text=entry["fullName"])
elem.append("label", text=entry["label"])
elem.append("default", text="false")
if api_name in ["OpportunityStage", "CaseStatus"]:
elem.append("closed", str(entry["closed"]).lower())
if api_name == "OpportunityStage":
elem.append("won", str(entry["won"]).lower())
elem.append("probability", str(entry["probability"]))
elem.append("forecastCategory", entry["forecastCategory"])
return metadata
| bsd-3-clause | -2,364,462,508,562,814,000 | 41.958904 | 98 | 0.546237 | false |
mrtukkin/ifp | olympic_layer/olympic_data_layer.py | 1 | 3300 | import caffe
import numpy as np
from glob import glob
import random
from PIL import Image
from os.path import normpath, basename
from scipy.misc import imresize
from ifp_morris import downsample_segmentation
class OlympicDataLayer(caffe.Layer):
im_factor = 1.0
#label_factor = 0.25
label_factor = 0.5
# im_head = '/export/home/mfrank/data/OlympicSports/clips/'
# label_head = '/export/home/mfrank/results/OlympicSports/segmentations/'
im_head = '/export/home/mfrank/data/OlympicSports/patches/'
label_head = '/export/home/mfrank/results/OlympicSports/segmentation_patches/'
def setup(self, bottom, top):
print 'Setting up the OlympicDataLayer...'
self.top_names = ['data', 'label']
# config
params = eval(self.param_str)
self.path_file = params['path_file']
self.mean = np.array(params['mean'])
self.random = params.get('randomize', False)
self.seed = params.get('seed', None)
self.data_ext = params.get('data_ext', 'jpg')
self.label_ext = params.get('label_ext', 'npy')
# two tops: data and label
if len(top) != 2:
raise Exception("Need to define two tops: data and label.")
# data layers have no bottoms
if len(bottom) != 0:
raise Exception("Do not define a bottom.")
self.paths = open(self.path_file, 'r').read().splitlines()
self.idx = 0
def reshape(self, bottom, top):
# load image + label image pair
self.data = self.load_image(self.paths[self.idx])
self.label = self.load_label(self.paths[self.idx])
# while np.min([self.data.shape[1], self.data.shape[2]]) < 340:
# self.data = imresize(self.data, 2.0).transpose((2, 0, 1))
# self.label = self.label.repeat(2, axis=1).repeat(2, axis=2)
# reshape tops to fit (leading 1 is for batch dimension)
top[0].reshape(1, *self.data.shape)
top[1].reshape(1, *self.label.shape)
def forward(self, bottom, top):
# assign output
top[0].data[...] = self.data
top[1].data[...] = self.label
# pick next input
if self.random:
self.idx = random.randint(0, len(self.paths) - 1)
else:
self.idx += 1
if self.idx == len(self.paths):
self.idx = 0
def backward(self, top, propagate_down, bottom):
pass
def load_image(self, path):
"""
Load input image and preprocess for Caffe:
- cast to float
- switch channels RGB -> BGR
- subtract mean
- transpose to channel x height x width order
"""
im = Image.open(self.im_head + path + self.data_ext)
if self.im_factor == 1:
in_ = im
else:
in_ = imresize(im, self.im_factor)
in_ = np.array(in_, dtype=np.float32)
in_ = in_[:, :, ::-1]
in_ -= self.mean
in_ = in_.transpose((2, 0, 1))
return in_
def load_label(self, path):
label = np.load(self.label_head + path + self.label_ext).astype('int')
if self.label_factor != 1:
label = downsample_segmentation(label, int(1/self.label_factor))
label = label[np.newaxis, ...]
return label
| gpl-3.0 | -33,537,435,243,634,360 | 31.352941 | 82 | 0.578788 | false |
UCHIC/ODM2Sensor | src/sensordatainterface/forms.py | 1 | 31049 | # -*- coding: utf-8 -*-
from django.core.exceptions import ValidationError
from django.db.models import Q
from django.forms import ModelForm, TextInput, Textarea, NumberInput, ModelChoiceField, DateTimeInput, Select, SelectMultiple \
, ModelMultipleChoiceField, FileInput, HiddenInput
from django.forms.models import modelformset_factory
from sensordatainterface.models import *
from django.utils.translation import ugettext_lazy as _
from django import forms
from datetime import datetime
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.forms.util import flatatt
from django.forms.fields import BooleanField
class PrettyCheckboxWidget(forms.widgets.CheckboxInput):
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs, type='checkbox', name=name)
if self.check_test(value):
final_attrs['checked'] = 'checked'
if not (value is True or value is False or value is None or value == ''):
final_attrs['value'] = force_text(value)
if 'prettycheckbox-label' in final_attrs:
label = final_attrs.pop('prettycheckbox-label')
else:
label = ''
return format_html('<label class="checkbox-label" for="{0}"><input{1} /> {2}</label>', attrs['id'], flatatt(final_attrs), label.capitalize())
class PrettyCheckboxField(BooleanField):
widget = PrettyCheckboxWidget
def __init__(self, *args, **kwargs):
if kwargs['label']:
kwargs['widget'].attrs['prettycheckbox-label'] = kwargs['label']
kwargs['label'] = ''
super(PrettyCheckboxField, self).__init__(*args, **kwargs)
class SamplingFeatureChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.samplingfeaturename
class OrganizationChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.organizationname
class EquipmentModelChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.modelname
def validate(self, value):
pass
def to_python(self, value):
try:
value = super(EquipmentModelChoiceField, self).to_python(value)
except self.queryset.model.DoesNotExist:
key = self.to_field_name or 'pk'
value = EquipmentModel.objects.filter(**{key: value})
if not value.exists():
raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice')
else:
value = value.first()
return value
class PeopleChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.personfirstname + " " + obj.personlastname
class PeopleMultipleChoice(ModelMultipleChoiceField):
def label_from_instance(self, obj):
return obj.organizationid.organizationname + ": " + obj.personid.personfirstname + " " + obj.personid.personlastname
class DeploymentActionChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
action = obj.actionid
equipment = obj.equipmentid
equipment_model = equipment.equipmentmodelid
feature_actions = action.featureaction.all()
feature_action = feature_actions[0] if feature_actions.count() > 0 else None
manufacturer = equipment_model.modelmanufacturerid if equipment_model is not None else None
info = str(action.begindatetime) + ' '
info += (str(feature_action.samplingfeatureid.samplingfeaturecode) + ' ') if feature_action is not None else ''
info += (str(equipment.equipmentserialnumber) + ' ' + str(equipment.equipmenttypecv.name) + ' ') if equipment is not None else ''
info += (str(manufacturer.organizationname) + ' ') if manufacturer is not None else ''
info += (str(equipment_model.modelpartnumber) + ' ') if equipment_model is not None else ''
return info
class MethodChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.methodname
class UnitChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.unitsname
class ProcessingLevelChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.processinglevelcode
class MultipleEquipmentChoiceField(ModelMultipleChoiceField):
def label_from_instance(self, obj):
return obj.equipmentcode + ": " + obj.equipmentserialnumber + " (" + obj.equipmenttypecv.name + ", " + obj.equipmentmodelid.modelname + ")"
def clean(self, value):
cleaned_value = self._check_values(value)
return cleaned_value
class SiteVisitChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
start_time = str(obj.begindatetime)
sampling_feature_code = obj.featureaction.filter(actionid=obj).get().samplingfeatureid.samplingfeaturecode
return "(" + start_time + ") " + sampling_feature_code
class EquipmentChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.equipmentcode + ": " + obj.equipmentserialnumber + " (" + obj.equipmenttypecv.name + ", " + obj.equipmentmodelid.modelname + ")"
class CalibrationStandardMultipleChoiceField(ModelMultipleChoiceField):
def label_from_instance(self, obj):
if obj.referencematerialvalue.count() > 0:
referencematerialvalue = obj.referencematerialvalue.get()
value_information = ": " + referencematerialvalue.variableid.variablenamecv.name + " " + \
str(referencematerialvalue.referencematerialvalue) + " " + \
referencematerialvalue.unitsid.unitsabbreviation
else:
value_information = ''
return obj.referencematerialmediumcv.name + ' : ' + obj.referencematerialcode + " " + \
obj.referencemateriallotcode + value_information
class VariableChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.variablecode + ": " + obj.variablenamecv.name
class DeploymentChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.methodname
class InstrumentOutputVariableChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.modelid.modelname + ": " + obj.variableid.variablecode + ' ' + obj.variableid.variablenamecv.name
class ActionAnnotationChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return obj.annotationtext
time_zone_choices = (
(-12, '-12:00'),
(-11, '-11:00'),
(-10, '-10:00'),
(-9, '-9:00'),
(-8, '-8:00 PST'),
(-7, '-7:00 MST'),
(-6, '-6:00 CST'),
(-5, '-5:00 EST'),
(-4, '-4:00'),
(-3, '-3:00'),
(-2, '-2:00'),
(-1, '-1:00'),
(0, '±0:00'),
(1, '+1:00'),
(2, '+2:00'),
(3, '+3:00'),
(4, '+4:00'),
(5, '+5:00'),
(6, '+6:00'),
(7, '+7:00'),
(8, '+8:00'),
(9, '+9:00'),
(10, '+10:00'),
(11, '+11:00'),
(12, '+12:00'),
(13, '+13:00'),
(14, '+14:00'),
)
class SamplingFeatureForm(ModelForm):
required_css_class = 'form-required'
class Meta:
model = SamplingFeature
fields = [
'samplingfeaturecode',
'samplingfeaturename',
'samplingfeaturedescription',
'elevation_m',
'elevationdatumcv',
'samplingfeaturegeotypecv',
]
widgets = {
'samplingfeaturecode': TextInput,
'samplingfeaturename': TextInput,
'elevation_m': NumberInput,
}
labels = {
'samplingfeaturecode': _('Site Code'),
'samplingfeaturename': _('Site Name'),
'samplingfeaturedescription': _('Site Description'),
'elevation_m': _('Elevation (m)'),
'elevationdatumcv': _('Elevation Datum'),
'samplingfeaturegeotypecv': _('Geo-Type'),
}
class SiteForm(ModelForm):
required_css_class = 'form-required'
class Meta:
model = Sites
fields = [
'latitude',
'longitude',
'sitetypecv',
'spatialreferenceid'
]
widgets = {
'samplingfeaturename': TextInput,
'latitude': NumberInput,
'longitude': NumberInput,
}
labels = {
'latlondatumid': _('Spatial Reference'),
'latitude': _('Latitude (dec deg)'),
'longitude': _('Longitude (dec deg)'),
'sitetypecv': _('Site Type'),
'spatialreferenceid': _('Spatial Reference'),
}
class EquipmentForm(ModelForm):
required_css_class = 'form-required'
equipmentvendorid = OrganizationChoiceField(queryset=Organization.objects.all(), label='Equipment Vendor', empty_label='Choose an Organization')
equipmentmodelid = EquipmentModelChoiceField(queryset=EquipmentModel.objects.all(), label='Equipment Model', empty_label='Choose a Model')
equipmentpurchasedate = forms.DateTimeField(initial=datetime.now(), label='Purchase Date')
equipmentownerid = PeopleChoiceField(queryset=People.objects.all(), label='Owner', empty_label='Choose an Owner')
class Meta:
model = Equipment
fields = [
'equipmentcode',
'equipmentserialnumber',
'equipmentname',
'equipmenttypecv',
'equipmentpurchaseordernumber',
'equipmentpurchasedate',
'equipmentdescription',
'equipmentownerid',
'equipmentdocumentationlink',
]
widgets = {
'equipmentname': TextInput,
'equipmentcode': TextInput,
'equipmentserialnumber': TextInput,
'equipmentpurchaseordernumber': TextInput,
'equipmentdocumentationlink': FileInput,
}
labels = {
'equipmentname': _('Equipment Name'),
'equipmentcode': _('Equipment Code'),
'equipmentserialnumber': _('Serial Number'),
'equipmenttypecv': _('Equipment Type'),
'equipmentpurchaseordernumber': _('Purchase Order Number'),
'equipmentdescription': _('Description'),
'equipmentdocumentationlink': _('Documentation Link')
}
class EquipmentModelForm(ModelForm):
required_css_class = 'form-required'
modelmanufacturerid = OrganizationChoiceField(queryset=Organization.objects.all(), label='Equipment Manufacturer',
empty_label='Choose a Manufacturer')
class Meta:
model = EquipmentModel
fields = [
'modelname',
'modelpartnumber',
'modeldescription',
'isinstrument',
'modellink',
'modelspecificationsfilelink',
]
widgets = {
'modelpartnumber': TextInput,
'modelname': TextInput,
'modellink': TextInput,
}
labels = {
'modelpartnumber': _('Part Number'),
'modelname': _('Model Name'),
'modeldescription': _('Description'),
'isinstrument': _('Is Instrument'),
'modellink': _('Model Link'),
'modelspecificationsfilelink': _('Specifications File'),
}
class EquipmentUsedForm(ModelForm):
required_css_class = 'form-required'
equipmentid = EquipmentChoiceField(
queryset=Equipment.objects.all(),
label='Equipment',
empty_label='Choose an Equipment'
)
class Meta:
model = EquipmentUsed
exclude = [
'actionid'
]
class PersonForm(ModelForm):
required_css_class = 'form-required'
class Meta:
model = People
fields = [
'personfirstname',
'personlastname',
]
widgets = {
'personfirstname': TextInput,
'personlastname': TextInput,
}
labels = {
'personfirstname': _('First Name'),
'personlastname': _('Last Name')
}
class AffiliationForm(ModelForm):
required_css_class = 'form-required'
organizationid = OrganizationChoiceField(
queryset=Organization.objects.all(),
# this select will show all organizations and an option to create a new one.
label='Organization',
empty_label='Choose an Organization'
)
class Meta:
model = Affiliation
fields = [
'isprimaryorganizationcontact',
'primaryaddress',
'primaryphone', # gotta set the affiliation start date to current date.`
'primaryemail',
]
widgets = {
'primaryaddress': TextInput,
'primaryphone': TextInput,
'primaryemail': TextInput,
}
labels = {
'isprimaryorganizationcontact': _('Is Primary Organization Contact'),
'primaryaddress': _('Address'),
'primaryphone': _('Phone Number'),
'primaryemail': _('Email'),
}
class VendorForm(ModelForm):
required_css_class = 'form-required'
class Meta:
model = Organization
fields = [
'organizationcode',
'organizationname',
'organizationdescription',
'organizationtypecv',
'organizationlink',
]
widgets = {
'organizationcode': TextInput,
'organizationname': TextInput,
'organizationlink': TextInput,
}
labels = {
'organizationcode': _('Code'),
'organizationname': _('Name'),
'organizationdescription': _('Description'),
'organizationtypecv': _('Organization Type'),
'organizationlink': _('Website'),
}
class ReferenceMaterialForm(ModelForm):
required_css_class = 'form-required'
referencematerialorganizationid = OrganizationChoiceField(
queryset=Organization.objects.all(),
label='Organization',
empty_label='Choose an Organization'
)
class Meta:
model = ReferenceMaterial
fields = [
'referencematerialpurchasedate',
'referencemateriallotcode',
'referencematerialexpirationdate',
'referencematerialcertificatelink',
'referencematerialmediumcv'
]
widgets = {
'referencematerialpurchasedate': DateTimeInput,
'referencemateriallotcode': TextInput,
'referencematerialexpirationdate': DateTimeInput,
}
labels = {
'referencematerialpurchasedate': _('Purchase Date'),
'referencemateriallotcode': _('Lot Code'),
'referencematerialexpirationdate': _('Expiration Date'),
'referencematerialcertificatelink': _('Certificate File'),
'referencematerialmediumcv': _('Medium'),
}
class ReferenceMaterialValueForm(ModelForm):
required_css_class = 'form-required'
variableid = VariableChoiceField(
queryset=Variable.objects.all(),
label='Variable',
empty_label='Choose a Variable'
)
unitsid = UnitChoiceField(
queryset=Units.objects.all(),
label='Units',
empty_label='Choose a Unit'
)
class Meta:
model = ReferenceMaterialValue
fields = [
'referencematerialvalue',
'referencematerialaccuracy'
]
widgets = {
'referencematerialvalue': NumberInput,
}
labels = {
'referencematerialvalue': 'Reference Material Value',
'referencematerialaccuracy': 'Accuracy',
}
class MethodForm(ModelForm):
required_css_class = 'form-required'
organizationid = OrganizationChoiceField(
queryset=Organization.objects.all(),
label='Organization',
empty_label='Choose an Organization',
required=False
)
class Meta:
model = Method
fields = [
'methodcode',
'methodname',
'methodtypecv',
'methoddescription',
'methodlink'
]
widgets = {
'methodcode': TextInput,
'methodlink': TextInput,
'methodname': Textarea,
}
labels = {
'methodcode': _('Method Code'),
'methodname': _('Method Name'),
'methodtypecv': _('Method Type'),
'methoddescription': _('Description'),
'methodlink': _('Method Link')
}
class OutputVariableForm(ModelForm):
required_css_class = 'form-required'
instrumentmethodid = MethodChoiceField(
queryset=Method.objects.all(),
label='Method',
empty_label='Choose a Method'
)
variableid = VariableChoiceField(
queryset=Variable.objects.all(),
label='Variable',
empty_label='Choose a Variable'
)
modelid = EquipmentModelChoiceField(
queryset=EquipmentModel.objects.all(),
label='Model',
empty_label='Choose a Model'
)
instrumentrawoutputunitsid = UnitChoiceField(
queryset=Units.objects.all(),
label='Unit',
empty_label='Choose a Unit'
)
class Meta:
model = InstrumentOutputVariable
fields = [
'variableid',
'modelid',
'instrumentresolution',
'instrumentaccuracy',
'instrumentrawoutputunitsid',
]
widgets = {
'instrumentresolution': TextInput,
'instrumentaccuracy': TextInput
}
labels = {
'instrumentresolution': _('Instrument Resolution'),
'instrumentaccuracy': _('Instrument Accuracy')
}
class SiteDeploymentMeasuredVariableForm(ModelForm):
required_css_class = 'form-required'
instrumentmethodid = MethodChoiceField(
queryset=Method.objects.all(),
label='Method',
empty_label='Choose a Method'
)
variableid = VariableChoiceField(
queryset=Variable.objects.all(),
label='Variable',
empty_label='Choose a Variable'
)
instrumentrawoutputunitsid = UnitChoiceField(
queryset=Units.objects.all(),
label='Unit',
empty_label='Choose a Unit'
)
class Meta:
model = InstrumentOutputVariable
fields = [
'variableid',
'instrumentresolution',
'instrumentaccuracy',
'instrumentrawoutputunitsid',
]
widgets = {
'instrumentresolution': TextInput,
'instrumentaccuracy': TextInput,
}
labels = {
'instrumentresolution': _('Instrument Resolution'),
'instrumentaccuracy': _('Instrument Accuracy')
}
class FactoryServiceActionForm(ModelForm):
required_css_class = 'form-required'
methodid = MethodChoiceField(queryset=Method.objects.all(), label='Method',
empty_label='Choose a Method')
class Meta:
model = Action
fields = [
'begindatetime',
'begindatetimeutcoffset',
'enddatetime',
'enddatetimeutcoffset',
'actiondescription',
'actionfilelink',
]
widgets = {
'begindatetime': DateTimeInput,
'begindatetimeutcoffset': Select(choices=time_zone_choices),
'enddatetime': DateTimeInput,
'enddatetimeutcoffset': Select(choices=time_zone_choices),
'actionfilelink': FileInput,
}
labels = {
'begindatetime': _('Begin Date Time'),
'begindatetimeutcoffset': _('Begin UTC Offset'),
'enddatetime': _('End Date Time'),
'enddatetimeutcoffset': _('End UTC Offset'),
'actionfilelink': _('Action File'),
'actiondescription': _('Description')
}
class MaintenanceActionForm(ModelForm):
required_css_class = 'form-required'
class Meta:
model = MaintenanceAction
fields = [
# 'isfactoryservice' YES
'maintenancecode',
'maintenancereason',
]
widgets = {
# 'isfactoryservice': BooleanField,
'maintenancecode': TextInput,
}
labels = {
# 'isfactoryservice': _('Is Factory Service')
'maintenancecode': _('Maintenance Code'),
'maintenancereason': _('Maintenance Reason')
}
class SiteVisitForm(ModelForm):
required_css_class = 'form-required'
class Meta:
model = Action
fields = [
'begindatetime',
'begindatetimeutcoffset',
'enddatetime',
'enddatetimeutcoffset',
'actiondescription',
]
widgets = {
'begindatetimeutcoffset': Select(choices=time_zone_choices),
'enddatetimeutcoffset': Select(choices=time_zone_choices),
}
labels = {
'begindatetime': _('Begin Date Time'),
'begindatetimeutcoffset': _('Begin UTC Offset'),
'enddatetime': _('End Date Time'),
'enddatetimeutcoffset': _('End UTC Offset'),
'actiondescription': _('Description'),
}
class CrewForm(forms.Form):
required_css_class = 'form-required'
affiliationid = PeopleMultipleChoice(queryset=Affiliation.objects.all(), label="Crew")
def __init__(self, *args, **kwargs):
super(CrewForm, self).__init__(*args, **kwargs)
self.fields['affiliationid'].help_text = None
class FeatureActionForm(ModelForm):
required_css_class = 'form-required'
samplingfeatureid = SamplingFeatureChoiceField(
queryset=SamplingFeature.objects.all(),
label='Site',
empty_label="Choose a Site"
)
class Meta:
model = FeatureAction
fields = [
'samplingfeatureid'
]
class SiteVisitChoiceForm(ModelForm):
required_css_class = 'form-required'
actionid = SiteVisitChoiceField(
queryset=Action.objects.filter(actiontypecv='Site Visit').order_by('-begindatetime'),
label='Site Visit',
empty_label='Choose a Site Visit'
)
class Meta:
model = Action
fields = [
'actionid'
]
class SelectWithClassForOptions(Select):
def render_option(self, *args, **kwargs):
option_html = super(SelectWithClassForOptions, self).render_option(*args, **kwargs)
this_method = args[1]
class_value = "class=\"\""
if this_method != "":
class_value = Method.objects.get(pk=this_method).methodtypecv.name.replace(' ', '')
after_tag = 8
before_tag_close = 7
return option_html[:after_tag] + "class=\"" + class_value + "\"" + option_html[before_tag_close:]
class ActionForm(ModelForm):
def __init__(self, *args, **kwargs):
actiontype = kwargs.pop('actiontype', None)
super(ActionForm, self).__init__(*args, **kwargs)
self.fields['equipmentused'].help_text = None
self.fields['calibrationstandard'].help_text = None
self.fields['calibrationreferenceequipment'].help_text = None
self.fields['equipmentused'].required = False
required_css_class = 'form-required'
methodid = MethodChoiceField(queryset=Method.objects.all(), label='Method',
empty_label='Choose a Method', widget=SelectWithClassForOptions)
# add additional fields and put classes to make visible depending on action type.
# fields for equipment maintenance:
equipmentused = MultipleEquipmentChoiceField(
queryset=Equipment.objects.all(), label='Equipment Used', required=False
)
equipment_by_site = PrettyCheckboxField(widget=PrettyCheckboxWidget(
attrs={'class': 'Instrumentcalibration Notype'}), label='Show All Equipment', required=False
)
equipmentusednumber = forms.IntegerField(widget=HiddenInput(), required=False, initial=0)
calibrationstandard = CalibrationStandardMultipleChoiceField(
widget=forms.SelectMultiple(attrs={'class': 'Instrumentcalibration'}),
queryset=ReferenceMaterial.objects.all(), label='Calibration Standards', required=False
)
calibrationstandardnumber = forms.IntegerField(widget=HiddenInput(), required=False, initial=0)
calibrationreferenceequipment = MultipleEquipmentChoiceField(
widget=forms.SelectMultiple(attrs={'class': 'Instrumentcalibration'}),
queryset=Equipment.objects.all(), label='Reference Equipment',
required=False
)
calibrationreferenceequipmentnumber = forms.IntegerField(widget=HiddenInput(), required=False, initial=0)
isfactoryservice = forms.BooleanField(
widget=forms.CheckboxInput(attrs={'class': 'Equipmentmaintenance'}), label='Is Factory Service', required=False)
isfactoryservicebool = forms.BooleanField(
widget=HiddenInput(), initial='False', required=False
)
maintenancecode = forms.CharField(
widget=forms.TextInput(attrs={'class': 'Equipmentmaintenance'}), label='Maintenance Code', required=False)
maintenancereason = forms.CharField(
widget=forms.Textarea(attrs={'class': 'Equipmentmaintenance'}), label='Maintenance Reason', required=False)
# fields for calibration
instrumentoutputvariable = InstrumentOutputVariableChoiceField(
widget=forms.Select(attrs={'class': 'Instrumentcalibration'}),
queryset=InstrumentOutputVariable.objects.all(), label='Instrument Output Variable', required=False)
calibrationcheckvalue = forms.DecimalField(
widget=forms.NumberInput(attrs={'class': 'Instrumentcalibration'}), label='Calibration Check Value', required=False)
calibrationequation = forms.CharField(
widget=forms.TextInput(attrs={'class': 'Instrumentcalibration'}), label='Calibration Equation', required=False)
# fields for retrieval
deploymentaction = DeploymentActionChoiceField(widget=forms.Select(attrs={'class': 'Instrumentretrieval Equipmentretrieval'}), label='Deployment', to_field_name='actionid',
queryset=EquipmentUsed.objects.filter(Q(actionid__actiontypecv__term='equipmentDeployment') | Q(actionid__actiontypecv__term='instrumentDeployment')),
required=False
)
thisactionid = forms.IntegerField(widget=HiddenInput(), required=False, initial=0)
class Meta:
model = Action
fields = [
'actiontypecv',
'deploymentaction',
'begindatetime',
'begindatetimeutcoffset',
'enddatetime',
'enddatetimeutcoffset',
'actiondescription',
'actionfilelink',
'methodid',
]
widgets = {
# 'actiontypecv': Select(choices=[
# ('Field activity', 'Generic'),
# ('Equipment deployment', 'Deployment'),
# ('Instrument calibration', 'Calibration'),
# ('Equipment maintenance', 'Maintenance')
# ]),
'begindatetime': DateTimeInput,
'begindatetimeutcoffset': Select(choices=time_zone_choices),
'enddatetime': DateTimeInput,
'enddatetimeutcoffset': Select(choices=time_zone_choices),
'actionfilelink': FileInput,
# 'methodid': SelectWithClassForOptions,
}
labels = {
'actiontypecv': _('Action Type'),
'begindatetime': _('Begin Date Time'),
'begindatetimeutcoffset': _('Begin UTC Offset'),
'enddatetime': _('End Date Time'),
'enddatetimeutcoffset': _('End UTC Offset'),
'actionfilelink': _('Action File'),
'actiondescription': _('Description')
}
def clean(self):
return super(ActionForm, self).clean()
def clean_equipmentused(self):
equipment = self.data['equipmentused']
action_type = self.data['actiontypecv']
required_types = ['Equipment maintenance', 'Equipment programming', 'Instrument retrieval',
'Instrument calibration', 'Equipment deployment', 'Instrument deployment', 'Equipment retrieval']
if action_type in required_types and len(equipment) == 0:
raise ValidationError(_('This field is required'))
return self.cleaned_data['equipmentused']
class ResultsForm(forms.Form):
required_css_class = 'form-required'
instrumentoutputvariable = InstrumentOutputVariableChoiceField(
widget=forms.Select(attrs={'class': ''}),
queryset=InstrumentOutputVariable.objects.all(), label='Instrument Output Variable', required=True)
unitsid = UnitChoiceField(
widget=forms.Select(attrs={'class': ''}),
queryset=Units.objects.all(), label='Units', required=True)
processing_level_id = ProcessingLevelChoiceField(
widget=forms.Select(attrs={'class': ''}),
queryset=ProcessingLevel.objects.all(), label='Processing Level', required=True)
sampledmediumcv = forms.ModelChoiceField(
widget=forms.Select(attrs={'class': ''}),
queryset=CvMedium.objects.all(), label='Sampled Medium', required=True)
class AnnotationForm(forms.ModelForm):
required_css_class = 'form-required'
annotationid = ActionAnnotationChoiceField(queryset=Annotation.objects.all(),
label='Annotation', empty_label='Choose an Annotation')
class Meta:
model = Annotation
fields = [
'annotationid',
'annotationcode',
'annotationtext',
'annotationdatetime',
'annotationutcoffset'
]
widgets = {
'annotationcode': forms.TextInput,
'annotationtext': forms.TextInput,
'annotationdatetime': DateTimeInput,
'annotationutcoffset': Select(choices=time_zone_choices),
}
labels = {
'annotationid': _('Annotation'),
'annotationcode': _('Annotation Code'),
'annotationtext': _('Annotation Text'),
'annotationdatetime': _('Annotation Date Time'),
'annotationutcoffset': _('Annotation UTC Offset')
}
def get_cv_model_form(form_model, *args, **kwargs):
class CVForm(ModelForm):
required_css_class = 'form-required'
class Meta:
model = form_model
fields = ['term', 'name', 'definition', 'category', 'sourcevocabularyuri']
labels = {'sourcevocabularyuri': 'Source Vocabulary URI'}
widgets = {
'term': TextInput,
'name': TextInput,
'category': TextInput,
'sourcevocabularyuri': TextInput
}
def __init__(self):
super(CVForm, self).__init__(*args, **kwargs)
return CVForm()
| bsd-3-clause | -1,623,401,265,978,536,200 | 32.492988 | 176 | 0.60735 | false |
zackdever/vsims | vsims/nestedstore.py | 1 | 2886 | from vsims.block import Block
class NestedStore:
"""Simple key-value store that supports nested transactional blocks."""
def __init__(self):
self.blocks = []
self.store = {}
self.value_counts = {}
def set(self, key, value, doLog=True):
"""Add the key to the store if not already present, and set its value.
key - key to add or update
value - value set for key
doLog - determines if a reverse operation should be logged
"""
has_key = self.has_key(key)
if not self.is_flat() and doLog:
block = self.blocks[-1]
if has_key:
block.log(self.set, key, self.get(key), False)
else:
block.log(self.delete, key, False)
if has_key:
old_value = self.get(key)
if old_value != value:
self._update_value_count_(old_value, -1)
self._update_value_count_(value, 1)
else:
self._update_value_count_(value, 1)
self.store[key] = value
def get(self, key):
"""Returns the value of the given key.
throws: KeyError if key is not present in the store
"""
return self.store[key]
def has_key(self, key):
"""Determines if the store contains the key."""
return self.store.has_key(key)
def delete(self, key, doLog=True):
"""Deletes the key from the store if present.
key - key to delete
doLog - determines if a reverse operation should be logged
"""
if self.has_key(key):
if not self.is_flat() and doLog:
self.blocks[-1].log(self.set, key, self.get(key), False)
self._update_value_count_(self.get(key), -1)
del self.store[key]
def nest(self):
"""Start a new transactional block."""
self.blocks.append(Block())
def pop_nest(self):
"""End the currently open transactional block.
throws: IndexError if there are no open transactional blocks.
"""
self.blocks.pop().rollback()
def flatten(self):
"""Permanently stores and closes all open transactional blocks."""
self.blocks = []
def is_flat(self):
"""Returns True if there are no open transactional blocks."""
return len(self.blocks) == 0
def numequalto(self, value):
"""Returns the number of keys set to the provided value."""
if not self.value_counts.has_key(value):
self.value_counts[value] = 0
return 0
return self.value_counts[value]
def _update_value_count_(self, value, count):
"""Set or update the count for the provided value."""
if self.value_counts.has_key(value):
self.value_counts[value] += count
else:
self.value_counts[value] = count
| mit | 7,066,074,681,128,292,000 | 30.369565 | 78 | 0.567221 | false |
Venturi/cms | env/lib/python2.7/site-packages/aldryn_people/south_migrations/0009_rename_tables_because_of_new_cms.py | 1 | 14315 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import connection
class Migration(SchemaMigration):
TABLES_MAPPING = {
'cmsplugin_peopleplugin': 'aldryn_people_peopleplugin',
}
REVERSE_TABLES_MAPPING = dict((v, k) for k, v in TABLES_MAPPING.iteritems())
@staticmethod
def rename_tables_by_map(mapper):
tables_names = connection.introspection.table_names()
for table_name in tables_names:
new_table_name = mapper.get(table_name)
if new_table_name:
db.rename_table(table_name, new_table_name)
def forwards(self, orm):
self.rename_tables_by_map(self.TABLES_MAPPING)
def backwards(self, orm):
self.rename_tables_by_map(self.REVERSE_TABLES_MAPPING)
models = {
u'aldryn_people.group': {
'Meta': {'object_name': 'Group'},
'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
},
u'aldryn_people.grouptranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'GroupTranslation', 'db_table': "u'aldryn_people_group_translation'"},
'company_description': ('djangocms_text_ckeditor.fields.HTMLField', [], {'blank': 'True'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['aldryn_people.Group']"})
},
u'aldryn_people.peopleplugin': {
'Meta': {'object_name': 'PeoplePlugin', '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'group_by_group': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'people': ('sortedm2m.fields.SortedManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['aldryn_people.Person']", 'null': 'True', 'blank': 'True'}),
'show_links': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'aldryn_people.person': {
'Meta': {'object_name': 'Person'},
'email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['aldryn_people.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'visual': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['filer.Image']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'aldryn_people.persontranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'PersonTranslation', 'db_table': "u'aldryn_people_person_translation'"},
'comment': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'function': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['aldryn_people.Person']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['aldryn_people']
| gpl-2.0 | 6,811,224,199,680,188,000 | 80.335227 | 192 | 0.557248 | false |
WebCampZg/conference-web | people/admin.py | 1 | 1117 | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from people.models import User
from django.utils.translation import ugettext as _
from .forms import CustomUserCreationForm, CustomUserChangeForm
class CustomUserAdmin(UserAdmin):
# Set the add/modify forms
add_form = CustomUserCreationForm
form = CustomUserChangeForm
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal info'), {'fields': (
'first_name', 'last_name', 'twitter', 'github', 'tshirt_size')}),
(_('Permissions'), {'fields': (
'is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2'),
}),
)
list_display = ('email', 'first_name', 'last_name', 'is_staff', 'is_superuser')
list_filter = ('is_staff', 'is_superuser', 'is_active', 'groups')
search_fields = ('first_name', 'last_name', 'email')
ordering = ('email',)
admin.site.register(User, CustomUserAdmin)
| bsd-3-clause | -74,037,275,173,721,460 | 35.032258 | 85 | 0.610564 | false |
nyaruka/django-hamlpy | hamlpy/test/test_attributes.py | 1 | 11450 | import unittest
from collections import OrderedDict
from hamlpy.compiler import Compiler
from hamlpy.parser.attributes import read_attribute_dict
from hamlpy.parser.core import ParseException, Stream
class AttributeDictParserTest(unittest.TestCase):
@staticmethod
def _parse(text):
return read_attribute_dict(Stream(text), Compiler())
def test_read_ruby_style_attribute_dict(self):
# empty dict
stream = Stream("{}><")
assert dict(read_attribute_dict(stream, Compiler())) == {}
assert stream.text[stream.ptr :] == "><"
# string values
assert dict(self._parse("{'class': 'test'} =Test")) == {"class": "test"}
assert dict(self._parse("{'class': 'test', 'id': 'something'}")) == {"class": "test", "id": "something"}
# integer values
assert dict(self._parse("{'data-number': 0}")) == {"data-number": "0"}
assert dict(self._parse("{'data-number': 12345}")) == {"data-number": "12345"}
# float values
assert dict(self._parse("{'data-number': 123.456}")) == {"data-number": "123.456"}
assert dict(self._parse("{'data-number': 0.001}")) == {"data-number": "0.001"}
# None value
assert dict(self._parse("{'controls': None}")) == {"controls": None}
# boolean attributes
assert dict(self._parse("{disabled, class:'test', data-number : 123,\n foo:\"bar\"}")) == {
"disabled": True,
"class": "test",
"data-number": "123",
"foo": "bar",
}
assert dict(self._parse("{class:'test', data-number : 123,\n foo:\"bar\", \t disabled}")) == {
"disabled": True,
"class": "test",
"data-number": "123",
"foo": "bar",
}
# attribute name has colon
assert dict(self._parse("{'xml:lang': 'en'}")) == {"xml:lang": "en"}
# attribute value has colon or commas
assert dict(self._parse("{'lang': 'en:g'}")) == {"lang": "en:g"}
assert dict(
self._parse(
'{name:"viewport", content:"width:device-width, initial-scale:1, minimum-scale:1, maximum-scale:1"}'
)
) == {"name": "viewport", "content": "width:device-width, initial-scale:1, minimum-scale:1, maximum-scale:1"}
# double quotes
assert dict(self._parse('{"class": "test", "id": "something"}')) == {"class": "test", "id": "something"}
# no quotes for key
assert dict(self._parse("{class: 'test', id: 'something'}")) == {"class": "test", "id": "something"}
# whitespace is ignored
assert dict(self._parse("{ class \t : 'test', data-number: 123 }")) == {
"class": "test",
"data-number": "123",
}
# trailing commas are fine
assert dict(self._parse("{class: 'test', data-number: 123,}")) == {"class": "test", "data-number": "123"}
# attributes split onto multiple lines
assert dict(self._parse("{class: 'test',\n data-number: 123}")) == {"class": "test", "data-number": "123"}
# old style Ruby
assert dict(self._parse("{:class => 'test', :data-number=>123}")) == {"class": "test", "data-number": "123"}
# list attribute values
assert dict(self._parse("{'class': [ 'a', 'b', 'c' ], data-list:[1, 2, 3]}")) == {
"class": ["a", "b", "c"],
"data-list": ["1", "2", "3"],
}
# tuple attribute values
assert dict(self._parse("{:class=>( 'a', 'b', 'c' ), :data-list => (1, 2, 3)}")) == {
"class": ["a", "b", "c"],
"data-list": ["1", "2", "3"],
}
# attribute order is maintained
assert self._parse("{'class': 'test', 'id': 'something', foo: 'bar'}") == OrderedDict(
[("class", "test"), ("id", "something"), ("foo", "bar")]
)
# attribute values can be multi-line Haml
haml = """{
'class':
- if forloop.first
link-first
\x20
- else
- if forloop.last
link-last
'href':
- url 'some_view'
}"""
assert dict(self._parse(haml)) == {
"class": "{% if forloop.first %} link-first {% else %} {% if forloop.last %} link-last {% endif %} {% endif %}", # noqa
"href": "{% url 'some_view' %}",
}
# non-ascii attribute values
assert dict(self._parse("{class: 'test\u1234'}")) == {"class": "test\u1234"}
def test_read_html_style_attribute_dict(self):
# html style dicts
assert dict(self._parse("()><")) == {}
assert dict(self._parse("( )")) == {}
# string values
assert dict(self._parse("(class='test') =Test")) == {"class": "test"}
assert dict(self._parse("(class='test' id='something')")) == {"class": "test", "id": "something"}
# integer values
assert dict(self._parse("(data-number=0)")) == {"data-number": "0"}
assert dict(self._parse("(data-number=12345)")) == {"data-number": "12345"}
# float values
assert dict(self._parse("(data-number=123.456)")) == {"data-number": "123.456"}
assert dict(self._parse("(data-number=0.001)")) == {"data-number": "0.001"}
# None value
assert dict(self._parse("(controls=None)")) == {"controls": None}
# boolean attributes
assert dict(self._parse("(disabled class='test' data-number = 123\n foo=\"bar\")")) == {
"disabled": True,
"class": "test",
"data-number": "123",
"foo": "bar",
}
assert dict(self._parse("(class='test' data-number = 123\n foo=\"bar\" \t disabled)")) == {
"disabled": True,
"class": "test",
"data-number": "123",
"foo": "bar",
}
# attribute name has colon
assert dict(self._parse('(xml:lang="en")')) == {"xml:lang": "en"}
# attribute names with characters found in JS frameworks
assert dict(self._parse('([foo]="a" ?foo$="b")')) == {"[foo]": "a", "?foo$": "b"}
# double quotes
assert dict(self._parse('(class="test" id="something")')) == {"class": "test", "id": "something"}
# list attribute values
assert dict(self._parse("(class=[ 'a', 'b', 'c' ] data-list=[1, 2, 3])")) == {
"class": ["a", "b", "c"],
"data-list": ["1", "2", "3"],
}
# variable attribute values
assert dict(self._parse("(foo=bar)")) == {"foo": "{{ bar }}"}
# attribute values can be multi-line Haml
haml = """(
class=
- if forloop.first
link-first
\x20
- else
- if forloop.last
link-last
href=
- url 'some_view'
)"""
assert dict(self._parse(haml)) == {
"class": "{% if forloop.first %} link-first {% else %} {% if forloop.last %} link-last {% endif %} {% endif %}", # noqa
"href": "{% url 'some_view' %}",
}
def test_empty_attribute_name_raises_error(self):
# empty quoted string in Ruby new style
with self.assertRaisesRegex(ParseException, r'Attribute name can\'t be an empty string. @ "{\'\':" <-'):
self._parse("{'': 'test'}")
# empty old style Ruby attribute
with self.assertRaisesRegex(ParseException, r'Unexpected " ". @ "{: " <-'):
self._parse("{: 'test'}")
# missing (HTML style)
with self.assertRaisesRegex(ParseException, r'Unexpected "=". @ "\(=" <-'):
self._parse("(='test')")
with self.assertRaisesRegex(ParseException, r'Unexpected "=". @ "\(foo=\'bar\' =" <-'):
self._parse("(foo='bar' ='test')")
def test_empty_attribute_value_raises_error(self):
with self.assertRaisesRegex(ParseException, r'Unexpected "}". @ "{:class=>}" <-'):
self._parse("{:class=>}")
with self.assertRaisesRegex(ParseException, r'Unexpected "}". @ "{class:}" <-'):
self._parse("{class:}")
with self.assertRaisesRegex(ParseException, r'Unexpected "\)". @ "\(class=\)" <-'):
self._parse("(class=)")
def test_unterminated_string_raises_error(self):
# on attribute key
with self.assertRaisesRegex(ParseException, r'Unterminated string \(expected \'\). @ "{\'test: 123}" <-'):
self._parse("{'test: 123}")
# on attribute value
with self.assertRaisesRegex(ParseException, r'Unterminated string \(expected "\). @ "{\'test\': "123}" <-'):
self._parse("{'test': \"123}")
def test_duplicate_attributes_raise_error(self):
with self.assertRaisesRegex(
ParseException, r'Duplicate attribute: "class". @ "{class: \'test\', class: \'bar\'}" <-'
): # noqa
self._parse("{class: 'test', class: 'bar'}")
with self.assertRaisesRegex(
ParseException, r'Duplicate attribute: "class". @ "\(class=\'test\' class=\'bar\'\)" <-'
): # noqa
self._parse("(class='test' class='bar')")
def test_mixing_ruby_and_html_syntax_raises_errors(self):
# omit comma in Ruby style dict
with self.assertRaisesRegex(ParseException, r'Expected ",". @ "{class: \'test\' f" <-'):
self._parse("{class: 'test' foo: 'bar'}")
# use = in Ruby style dict
with self.assertRaisesRegex(ParseException, r'Expected ":". @ "{class=" <-'):
self._parse("{class='test'}")
with self.assertRaisesRegex(ParseException, r'Expected "=>". @ "{:class=" <-'):
self._parse("{:class='test'}")
# use colon as assignment for old style Ruby attribute
with self.assertRaisesRegex(ParseException, r'Expected "=>". @ "{:class:" <-'):
self._parse("{:class:'test'}")
# use comma in HTML style dict
with self.assertRaisesRegex(ParseException, r'Unexpected ",". @ "\(class=\'test\'," <-'):
self._parse("(class='test', foo = 'bar')")
# use : for assignment in HTML style dict (will treat as part of attribute name)
with self.assertRaisesRegex(ParseException, r'Unexpected "\'". @ "\(class:\'" <-'):
self._parse("(class:'test')")
# use attribute quotes in HTML style dict
with self.assertRaisesRegex(ParseException, r'Unexpected "\'". @ "\(\'" <-'):
self._parse("('class'='test')")
# use => in HTML style dict
with self.assertRaisesRegex(ParseException, r'Unexpected ">". @ "\(class=>" <-'):
self._parse("(class=>'test')")
# use tuple syntax in HTML style dict
with self.assertRaisesRegex(ParseException, r'Unexpected "\(". @ "\(class=\(" <-'):
self._parse("(class=(1, 2))")
def test_unexpected_eof(self):
with self.assertRaisesRegex(ParseException, r'Unexpected end of input. @ "{:class=>" <-'):
self._parse("{:class=>")
with self.assertRaisesRegex(ParseException, r'Unexpected end of input. @ "{class:" <-'):
self._parse("{class:")
with self.assertRaisesRegex(ParseException, r'Unexpected end of input. @ "\(class=" <-'):
self._parse("(class=")
| mit | 2,449,532,288,532,912,000 | 40.941392 | 132 | 0.515808 | false |
nearlyfreeapps/python-googleadwords | tests/adspygoogle/adwords/traffic_estimator_service_unittest.py | 1 | 4019 | #!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests to cover TrafficEstimator."""
__author__ = '[email protected] (Stan Grinberg)'
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..'))
import unittest
from tests.adspygoogle.adwords import HTTP_PROXY
from tests.adspygoogle.adwords import SERVER_V201109
from tests.adspygoogle.adwords import TEST_VERSION_V201109
from tests.adspygoogle.adwords import VERSION_V201109
from tests.adspygoogle.adwords import client
class TrafficEstimatorServiceTestV201109(unittest.TestCase):
"""Unittest suite for TrafficEstimatorService using v201109."""
SERVER = SERVER_V201109
VERSION = VERSION_V201109
client.debug = False
service = None
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetTrafficEstimatorService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
def testKeywordTrafficEstimates(self):
"""Test whether we can estimate keyword traffic."""
selector = {
'campaignEstimateRequests': [{
'adGroupEstimateRequests': [{
'keywordEstimateRequests': [
{
'keyword': {
'xsi_type': 'Keyword',
'matchType': 'BROAD',
'text': 'mars cruise'
},
'maxCpc': {
'xsi_type': 'Money',
'microAmount': '1000000'
}
},
{
'keyword': {
'xsi_type': 'Keyword',
'matchType': 'PHRASE',
'text': 'cheap cruise'
},
'maxCpc': {
'xsi_type': 'Money',
'microAmount': '1000000'
}
},
{
'keyword': {
'xsi_type': 'Keyword',
'matchType': 'EXACT',
'text': 'cruise'
},
'maxCpc': {
'xsi_type': 'Money',
'microAmount': '1000000'
}
}
],
'maxCpc': {
'xsi_type': 'Money',
'microAmount': '1000000'
}
}],
'criteria': [
{
'xsi_type': 'Location',
'id': '2044'
},
{
'xsi_type': 'Language',
'id': '1000'
}
]
}]
}
self.assert_(isinstance(self.__class__.service.Get(selector), tuple))
def makeTestSuiteV201109():
"""Set up test suite using v201109.
Returns:
TestSuite test suite using v201109.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(TrafficEstimatorServiceTestV201109))
return suite
if __name__ == '__main__':
suites = []
if TEST_VERSION_V201109:
suites.append(makeTestSuiteV201109())
if suites:
alltests = unittest.TestSuite(suites)
unittest.main(defaultTest='alltests')
| apache-2.0 | -3,430,418,294,230,301,700 | 30.896825 | 74 | 0.497387 | false |
jccotou/OneCodex-Python | src/v0/common/OneCodexRequest.py | 1 | 1451 | import requests
from v0.config import Configuration
class OneCodexRequest(object):
@classmethod
def get(cls, url, **kwargs):
"""
Issue a get request to the given action
:param url: The One Codex URL to which the GET request will be issued.
:return: The request object to the given One Codex URL.
"""
request = requests.get(url, **cls._get_request_configuration_kwargs(kwargs))
request.raise_for_status()
return request
@classmethod
def _get_request_configuration_kwargs(cls, kwargs=None):
"""
Get a dictionary with normalized requests configuration options for creating requests.
:param kwargs: A set of keyword arguments to be applied over the default configuration
options (may be None, in which case only the defaults will be used).
:return: A dictionary with requests configuration values set.
"""
if not kwargs:
kwargs = dict()
config = dict()
config.update(auth=cls._get_authentication_object())
config.update(kwargs)
return config
@classmethod
def _get_authentication_object(cls):
"""
Get the authentication object to be passed into a request.
:return: A (username, password) tuple to be used for authentication with the One Codex
API (via HTTP basic auth).
"""
return (Configuration.get_api_key(), u"") | gpl-2.0 | -3,258,561,765,096,408,000 | 35.3 | 94 | 0.643694 | false |
sapcc/monasca-agent | monasca_setup/detection/plugins/octavia.py | 3 | 1383 | # (C) Copyright 2016 Hewlett Packard Enterprise Development Company LP
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import monasca_setup.detection
class Octavia(monasca_setup.detection.ServicePlugin):
"""Detect Octavia daemons and setup configuration to monitor them.
"""
def __init__(self, template_dir, overwrite=True, args=None):
service_params = {
'args': args,
'template_dir': template_dir,
'overwrite': overwrite,
'service_name': 'octavia',
'process_names': ['octavia-api', 'octavia-worker',
'octavia-health-manager', 'octavia-housekeeping'],
'service_api_url': 'http://localhost:9876',
'search_pattern': '.*200 OK.*',
}
super(Octavia, self).__init__(service_params)
| bsd-3-clause | -7,596,376,750,140,457,000 | 36.378378 | 80 | 0.647144 | false |
juju/python-libjuju | juju/client/facade.py | 1 | 29940 | import argparse
import builtins
import functools
import json
import keyword
import pprint
import re
import textwrap
import typing
import typing_inspect
from collections import defaultdict
from glob import glob
from pathlib import Path
from typing import Any, Mapping, Sequence, TypeVar
from . import codegen
_marker = object()
JUJU_VERSION = re.compile(r'[0-9]+\.[0-9-]+[\.\-][0-9a-z]+(\.[0-9]+)?')
# Workaround for https://bugs.launchpad.net/juju/+bug/1683906
NAUGHTY_CLASSES = ['ClientFacade', 'Client', 'ModelStatusInfo']
# Map basic types to Python's typing with a callable
SCHEMA_TO_PYTHON = {
'string': str,
'integer': int,
'float': float,
'number': float,
'boolean': bool,
'object': Any,
}
# Friendly warning message to stick at the top of generated files.
HEADER = """\
# DO NOT CHANGE THIS FILE! This file is auto-generated by facade.py.
# Changes will be overwritten/lost when the file is regenerated.
"""
# Classes and helper functions that we'll write to _client.py
LOOKUP_FACADE = '''
def lookup_facade(name, version):
"""
Given a facade name and version, attempt to pull that facade out
of the correct client<version>.py file.
"""
for _version in range(int(version), 0, -1):
try:
facade = getattr(CLIENTS[str(_version)], name)
return facade
except (KeyError, AttributeError):
continue
else:
raise ImportError("No supported version for facade: "
"{}".format(name))
'''
TYPE_FACTORY = '''
class TypeFactory:
@classmethod
def from_connection(cls, connection):
"""
Given a connected Connection object, return an initialized and
connected instance of an API Interface matching the name of
this class.
@param connection: initialized Connection object.
"""
facade_name = cls.__name__
if not facade_name.endswith('Facade'):
raise TypeError('Unexpected class name: {}'.format(facade_name))
facade_name = facade_name[:-len('Facade')]
version = connection.facades.get(facade_name)
if version is None:
raise Exception('No facade {} in facades {}'.format(facade_name,
connection.facades))
c = lookup_facade(cls.__name__, version)
c = c()
c.connect(connection)
return c
@classmethod
def best_facade_version(cls, connection):
"""
Returns the best facade version for a given facade. This will help with
trying to provide different functionality for different facade versions.
@param connection: initialized Connection object.
"""
facade_name = cls.__name__
if not facade_name.endswith('Facade'):
raise TypeError('Unexpected class name: {}'.format(facade_name))
facade_name = facade_name[:-len('Facade')]
return connection.facades.get(facade_name)
'''
CLIENT_TABLE = '''
CLIENTS = {{
{clients}
}}
'''
class KindRegistry(dict):
def register(self, name, version, obj):
self[name] = {version: {
"object": obj,
}}
def lookup(self, name, version=None):
"""If version is omitted, max version is used"""
versions = self.get(name)
if not versions:
return None
if version:
return versions[version]
return versions[max(versions)]
def getObj(self, name, version=None):
result = self.lookup(name, version)
if result:
obj = result["object"]
return obj
return None
class TypeRegistry(dict):
def __init__(self, schema):
self.schema = schema
def get(self, name):
# Two way mapping
refname = self.schema.referenceName(name)
if refname not in self:
result = TypeVar(refname)
self[refname] = result
self[result] = refname
return self[refname]
def getRefType(self, ref):
return self.get(ref)
def objType(self, obj):
kind = obj.get('type')
if not kind:
raise ValueError("%s has no type" % obj)
result = SCHEMA_TO_PYTHON.get(kind)
if not result:
raise ValueError("%s has type %s" % (obj, kind))
return result
def refType(self, obj):
return self.getRefType(obj["$ref"])
CLASSES = {}
factories = codegen.Capture()
def booler(v):
if isinstance(v, str):
if v == "false":
return False
return bool(v)
basic_types = [str, bool, int, float]
type_mapping = {
'str': '(bytes, str)',
'Sequence': '(bytes, str, list)',
'Union': 'dict',
'Mapping': 'dict',
}
def name_to_py(name):
result = name.replace("-", "_")
result = result.lower()
if keyword.iskeyword(result) or result in dir(builtins):
result += "_"
return result
def var_type_to_py(kind):
return 'None'
def kind_to_py(kind):
if kind is None or kind is typing.Any:
return 'None', '', False
name = ""
if typing_inspect.is_generic_type(kind):
origin = typing_inspect.get_origin(kind)
name = origin.__name__
else:
name = kind.__name__
if (kind in basic_types or type(kind) in basic_types):
return name, type_mapping.get(name) or name, True
if (name in type_mapping):
return name, type_mapping[name], True
suffix = name.lstrip("~")
return suffix, "(dict, {})".format(suffix), True
def strcast(kind, keep_builtins=False):
if (kind in basic_types or
type(kind) in basic_types) and keep_builtins is False:
return kind.__name__
if str(kind).startswith('~'):
return str(kind)[1:]
if kind is typing.Any:
return 'Any'
try:
if issubclass(kind, typing.GenericMeta):
return str(kind)[1:]
except AttributeError:
pass
return kind
class Args(list):
def __init__(self, schema, defs):
self.schema = schema
self.defs = defs
if defs:
rtypes = schema.registry.getObj(schema.types[defs])
if len(rtypes) == 1:
if not self.do_explode(rtypes[0][1]):
for name, rtype in rtypes:
self.append((name, rtype))
else:
for name, rtype in rtypes:
self.append((name, rtype))
def do_explode(self, kind):
if kind in basic_types or type(kind) is typing.TypeVar:
return False
if typing_inspect.is_generic_type(kind) and issubclass(typing_inspect.get_origin(kind), Sequence):
return False
if typing_inspect.is_generic_type(kind) and issubclass(typing_inspect.get_origin(kind), Mapping):
return False
self.clear()
self.extend(Args(self.schema, kind))
return True
def PyToSchemaMapping(self):
m = {}
for n, rt in self:
m[name_to_py(n)] = n
return m
def SchemaToPyMapping(self):
m = {}
for n, tr in self:
m[n] = name_to_py(n)
return m
def _format(self, name, rtype, typed=True):
if typed:
return "{} : {}".format(
name_to_py(name),
strcast(rtype)
)
else:
return name_to_py(name)
def _get_arg_str(self, typed=False, joined=", "):
if self:
parts = []
for item in self:
parts.append(self._format(item[0], item[1], typed))
if joined:
return joined.join(parts)
return parts
return ''
def as_kwargs(self):
if self:
parts = []
for item in self:
var_name = name_to_py(item[0])
var_type = var_type_to_py(item[1])
parts.append('{}={}'.format(var_name, var_type))
return ', '.join(parts)
return ''
def as_validation(self):
"""
as_validation returns a series of validation statements for every item
in the the Args.
"""
parts = []
for item in self:
var_name = name_to_py(item[0])
var_type, var_sub_type, ok = kind_to_py(item[1])
if ok:
parts.append(buildValidation(var_name, var_type, var_sub_type))
return '\n'.join(parts)
def typed(self):
return self._get_arg_str(True)
def __str__(self):
return self._get_arg_str(False)
def get_doc(self):
return self._get_arg_str(True, "\n")
def buildValidation(name, instance_type, instance_sub_type, ident=None):
INDENT = ident or " "
source = """{ident}if {name} is not None and not isinstance({name}, {instance_sub_type}):
{ident} raise Exception("Expected {name} to be a {instance_type}, received: {{}}".format(type({name})))
""".format(ident=INDENT,
name=name,
instance_type=instance_type,
instance_sub_type=instance_sub_type)
return source
def buildTypes(schema, capture):
INDENT = " "
for kind in sorted((k for k in schema.types if not isinstance(k, str)),
key=lambda x: str(x)):
name = schema.types[kind]
if name in capture and name not in NAUGHTY_CLASSES:
continue
args = Args(schema, kind)
# Write Factory class for _client.py
make_factory(name)
# Write actual class
source = ["""
class {}(Type):
_toSchema = {}
_toPy = {}
def __init__(self{}{}, **unknown_fields):
'''
{}
'''""".format(
name,
# pprint these to get stable ordering across regens
pprint.pformat(args.PyToSchemaMapping(), width=999),
pprint.pformat(args.SchemaToPyMapping(), width=999),
", " if args else "",
args.as_kwargs(),
textwrap.indent(args.get_doc(), INDENT * 2))]
if not args:
source.append("{}self.unknown_fields = unknown_fields".format(INDENT * 2))
else:
# do the validation first, before setting the variables
for arg in args:
arg_name = name_to_py(arg[0])
arg_type = arg[1]
arg_type_name = strcast(arg_type)
if arg_type in basic_types or arg_type is typing.Any:
source.append("{}{}_ = {}".format(INDENT * 2,
arg_name,
arg_name))
elif type(arg_type) is typing.TypeVar:
source.append("{}{}_ = {}.from_json({}) "
"if {} else None".format(INDENT * 2,
arg_name,
arg_type_name,
arg_name,
arg_name))
elif typing_inspect.is_generic_type(arg_type) and issubclass(typing_inspect.get_origin(arg_type), Sequence):
parameters = typing_inspect.get_parameters(arg_type)
value_type = (
parameters[0]
if len(parameters)
else None
)
if type(value_type) is typing.TypeVar:
source.append(
"{}{}_ = [{}.from_json(o) "
"for o in {} or []]".format(INDENT * 2,
arg_name,
strcast(value_type),
arg_name))
else:
source.append("{}{}_ = {}".format(INDENT * 2,
arg_name,
arg_name))
elif typing_inspect.is_generic_type(arg_type) and issubclass(typing_inspect.get_origin(arg_type), Mapping):
parameters = typing_inspect.get_parameters(arg_type)
value_type = (
parameters[0]
if len(parameters)
else None
)
if type(value_type) is typing.TypeVar:
source.append(
"{}{}_ = {{k: {}.from_json(v) "
"for k, v in ({} or dict()).items()}}".format(
INDENT * 2,
arg_name,
strcast(value_type),
arg_name))
else:
source.append("{}{}_ = {}".format(INDENT * 2,
arg_name,
arg_name))
else:
source.append("{}{}_ = {}".format(INDENT * 2,
arg_name,
arg_name))
if len(args) > 0:
source.append('\n{}# Validate arguments against known Juju API types.'.format(INDENT * 2))
for arg in args:
arg_name = "{}_".format(name_to_py(arg[0]))
arg_type, arg_sub_type, ok = kind_to_py(arg[1])
if ok:
source.append('{}'.format(buildValidation(arg_name,
arg_type,
arg_sub_type,
ident=INDENT * 2)))
for arg in args:
arg_name = name_to_py(arg[0])
source.append('{}self.{} = {}_'.format(INDENT * 2, arg_name, arg_name))
# Ensure that we take the kwargs (unknown_fields) and put it on the
# Results/Params so we can inspect it.
source.append("{}self.unknown_fields = unknown_fields".format(INDENT * 2))
source = "\n".join(source)
capture.clear(name)
capture[name].write(source)
capture[name].write("\n\n")
co = compile(source, __name__, "exec")
ns = _getns(schema)
exec(co, ns)
cls = ns[name]
CLASSES[name] = cls
def retspec(schema, defs):
# return specs
# only return 1, so if there is more than one type
# we need to include a union
# In truth there is only 1 return
# Error or the expected Type
if not defs:
return None
if defs in basic_types:
return strcast(defs, False)
return strcast(defs, False)
def ReturnMapping(cls):
# Annotate the method with a return Type
# so the value can be cast
def decorator(f):
@functools.wraps(f)
async def wrapper(*args, **kwargs):
nonlocal cls
reply = await f(*args, **kwargs)
if cls is None:
return reply
if 'error' in reply:
cls = CLASSES['Error']
if typing_inspect.is_generic_type(cls) and issubclass(typing_inspect.get_origin(cls), Sequence):
parameters = typing_inspect.get_parameters(cls)
result = []
item_cls = parameters[0]
for item in reply:
result.append(item_cls.from_json(item))
"""
if 'error' in item:
cls = CLASSES['Error']
else:
cls = item_cls
result.append(cls.from_json(item))
"""
else:
result = cls.from_json(reply['response'])
return result
return wrapper
return decorator
def makeFunc(cls, name, description, params, result, _async=True):
INDENT = " "
args = Args(cls.schema, params)
assignments = []
toschema = args.PyToSchemaMapping()
for arg in args._get_arg_str(False, False):
assignments.append("{}_params[\'{}\'] = {}".format(INDENT,
toschema[arg],
arg))
assignments = "\n".join(assignments)
res = retspec(cls.schema, result)
source = """
@ReturnMapping({rettype})
{_async}def {name}(self{argsep}{args}):
'''
{docstring}
Returns -> {res}
'''
{validation}
# map input types to rpc msg
_params = dict()
msg = dict(type='{cls.name}',
request='{name}',
version={cls.version},
params=_params)
{assignments}
reply = {_await}self.rpc(msg)
return reply
"""
if description != "":
description = "{}\n\n".format(description)
doc_string = "{}{}".format(description, args.get_doc())
fsource = source.format(_async="async " if _async else "",
name=name,
argsep=", " if args else "",
args=args.as_kwargs(),
res=res,
validation=args.as_validation(),
rettype=result.__name__ if result else None,
docstring=textwrap.indent(doc_string, INDENT),
cls=cls,
assignments=assignments,
_await="await " if _async else "")
ns = _getns(cls.schema)
exec(fsource, ns)
func = ns[name]
return func, fsource
def buildMethods(cls, capture):
properties = cls.schema['properties']
for methodname in sorted(properties):
method, source = _buildMethod(cls, methodname)
setattr(cls, methodname, method)
capture["{}Facade".format(cls.__name__)].write(source, depth=1)
def _buildMethod(cls, name):
params = None
result = None
method = cls.schema['properties'][name]
description = ""
if 'description' in method:
description = method['description']
if 'properties' in method:
prop = method['properties']
spec = prop.get('Params')
if spec:
params = cls.schema.types.get(spec['$ref'])
spec = prop.get('Result')
if spec:
if '$ref' in spec:
result = cls.schema.types.get(spec['$ref'])
else:
result = SCHEMA_TO_PYTHON[spec['type']]
return makeFunc(cls, name, description, params, result)
def buildFacade(schema):
cls = type(schema.name, (Type,), dict(name=schema.name,
version=schema.version,
schema=schema))
source = """
class {name}Facade(Type):
name = '{name}'
version = {version}
schema = {schema}
""".format(name=schema.name,
version=schema.version,
schema=textwrap.indent(pprint.pformat(schema), " "))
return cls, source
class TypeEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Type):
return obj.serialize()
return json.JSONEncoder.default(self, obj)
class Type:
def connect(self, connection):
self.connection = connection
def __repr__(self):
return "{}({})".format(self.__class__, self.__dict__)
def __eq__(self, other):
if not isinstance(other, Type):
return NotImplemented
return self.__dict__ == other.__dict__
async def rpc(self, msg):
result = await self.connection.rpc(msg, encoder=TypeEncoder)
return result
@classmethod
def from_json(cls, data):
if isinstance(data, cls):
return data
if isinstance(data, str):
try:
data = json.loads(data)
except json.JSONDecodeError:
raise
d = {}
for k, v in (data or {}).items():
d[cls._toPy.get(k, k)] = v
try:
return cls(**d)
except TypeError:
raise
def serialize(self):
d = {}
for attr, tgt in self._toSchema.items():
d[tgt] = getattr(self, attr)
return d
def to_json(self):
return json.dumps(self.serialize(), cls=TypeEncoder, sort_keys=True)
# treat subscript gets as JSON representation
def __getitem__(self, key):
attr = self._toPy[key]
return getattr(self, attr)
# treat subscript sets as JSON representation
def __setitem__(self, key, value):
attr = self._toPy[key]
setattr(self, attr, value)
# legacy: generated definitions used to not correctly
# create typed objects and would use dict instead (from JSON)
# so we emulate some dict methods.
def get(self, key, default=None):
try:
attr = self._toPy[key]
except KeyError:
return default
return getattr(self, attr, default)
class Schema(dict):
def __init__(self, schema):
self.name = schema['Name']
self.version = schema['Version']
self.update(schema['Schema'])
self.registry = KindRegistry()
self.types = TypeRegistry(self)
def referenceName(self, ref):
if ref.startswith("#/definitions/"):
ref = ref.rsplit("/", 1)[-1]
return ref
def buildDefinitions(self):
# here we are building the types out
# anything in definitions is a type
# but these may contain references themselves
# so we dfs to the bottom and build upwards
# when a types is already in the registry
defs = self.get('definitions')
if not defs:
return
definitions = {}
for d, data in defs.items():
if d in self.registry and d not in NAUGHTY_CLASSES:
continue
if data.get("type") != "object":
continue
definitions[d] = data
for d, definition in definitions.items():
node = self.buildObject(definition, d)
self.registry.register(d, self.version, node)
self.types.getRefType(d)
def buildObject(self, node, name=None):
# we don't need to build types recursively here
# they are all in definitions already
# we only want to include the type reference
# which we can derive from the name
struct = []
add = struct.append
props = node.get("properties")
pprops = node.get("patternProperties")
if props:
# Sort these so the __init__ arg list for each Type remains
# consistently ordered across regens of client.py
for p in sorted(props):
prop = props[p]
if "$ref" in prop:
add((p, self.types.refType(prop)))
else:
kind = prop['type']
if kind == "array":
add((p, self.buildArray(prop)))
elif kind == "object":
struct.extend(self.buildObject(prop, p))
else:
add((p, self.types.objType(prop)))
if pprops:
if ".*" not in pprops:
raise ValueError(
"Cannot handle actual pattern in patternProperties %s" %
pprops)
pprop = pprops[".*"]
if "$ref" in pprop:
add((name, Mapping[str, self.types.refType(pprop)]))
return struct
ppkind = pprop["type"]
if ppkind == "array":
add((name, Mapping[str, self.buildArray(pprop)]))
else:
add((name, Mapping[str, SCHEMA_TO_PYTHON[ppkind]]))
if not struct and node.get('additionalProperties', False):
add((name, SCHEMA_TO_PYTHON.get('object')))
return struct
def buildArray(self, obj):
# return a sequence from an array in the schema
if "$ref" in obj:
return Sequence[self.types.refType(obj)]
else:
kind = obj.get("type")
if kind and kind == "array":
items = obj['items']
return self.buildArray(items)
else:
return Sequence[self.types.objType(obj)]
def _getns(schema):
ns = {'Type': Type,
'typing': typing,
'ReturnMapping': ReturnMapping
}
# Copy our types into the globals of the method
for facade in schema.registry:
ns[facade] = schema.registry.getObj(facade)
return ns
def make_factory(name):
if name in factories:
del factories[name]
factories[name].write("class {}(TypeFactory):\n pass\n\n".format(name))
def write_facades(captures, options):
"""
Write the Facades to the appropriate _client<version>.py
"""
for version in sorted(captures.keys()):
filename = "{}/_client{}.py".format(options.output_dir, version)
with open(filename, "w") as f:
f.write(HEADER)
f.write("from juju.client.facade import Type, ReturnMapping\n")
f.write("from juju.client._definitions import *\n\n")
for key in sorted(
[k for k in captures[version].keys() if "Facade" in k]):
print(captures[version][key], file=f)
# Return the last (most recent) version for use in other routines.
return version
def write_definitions(captures, options):
"""
Write auxillary (non versioned) classes to
_definitions.py The auxillary classes currently get
written redudantly into each capture object, so we can look in
one of them -- we just use the last one from the loop above.
"""
with open("{}/_definitions.py".format(options.output_dir), "w") as f:
f.write(HEADER)
f.write("from juju.client.facade import Type, ReturnMapping\n\n")
for key in sorted(
[k for k in captures.keys() if "Facade" not in k]):
print(captures[key], file=f)
def write_client(captures, options):
"""
Write the TypeFactory classes to _client.py, along with some
imports and tables so that we can look up versioned Facades.
"""
with open("{}/_client.py".format(options.output_dir), "w") as f:
f.write(HEADER)
f.write("from juju.client._definitions import *\n\n")
clients = ", ".join("_client{}".format(v) for v in captures)
f.write("from juju.client import " + clients + "\n\n")
f.write(CLIENT_TABLE.format(clients=",\n ".join(
['"{}": _client{}'.format(v, v) for v in captures])))
f.write(LOOKUP_FACADE)
f.write(TYPE_FACTORY)
for key in sorted([k for k in factories.keys() if "Facade" in k]):
print(factories[key], file=f)
def generate_definitions(schemas):
# Build all of the auxillary (unversioned) classes
# TODO: get rid of some of the excess trips through loops in the
# called functions.
definitions = codegen.Capture()
for juju_version in sorted(schemas.keys()):
for schema in schemas[juju_version]:
schema.buildDefinitions()
# ensure we write the latest ones first, so that earlier revisions
# get dropped.
for juju_version in sorted(schemas.keys(), reverse=True):
for schema in schemas[juju_version]:
buildTypes(schema, definitions)
return definitions
def generate_facades(schemas):
captures = defaultdict(codegen.Capture)
# Build the Facade classes
for juju_version in sorted(schemas.keys()):
for schema in schemas[juju_version]:
cls, source = buildFacade(schema)
cls_name = "{}Facade".format(schema.name)
captures[schema.version].clear(cls_name)
# Make the factory class for _client.py
make_factory(cls_name)
# Make the actual class
captures[schema.version][cls_name].write(source)
# Build the methods for each Facade class.
buildMethods(cls, captures[schema.version])
# Mark this Facade class as being done for this version --
# helps mitigate some excessive looping.
CLASSES[schema.name] = cls
return captures
def load_schemas(options):
schemas = {}
for p in sorted(glob(options.schema)):
if 'latest' in p:
juju_version = 'latest'
else:
try:
juju_version = re.search(JUJU_VERSION, p).group()
except AttributeError:
print("Cannot extract a juju version from {}".format(p))
print("Schemas must include a juju version in the filename")
raise SystemExit(1)
new_schemas = json.loads(Path(p).read_text("utf-8"))
schemas[juju_version] = [Schema(s) for s in new_schemas]
return schemas
def setup():
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--schema", default="juju/client/schemas*")
parser.add_argument("-o", "--output_dir", default="juju/client")
options = parser.parse_args()
return options
def main():
options = setup()
schemas = load_schemas(options)
# Generate some text blobs
definitions = generate_definitions(schemas)
captures = generate_facades(schemas)
# ... and write them out
write_definitions(definitions, options)
write_facades(captures, options)
write_client(captures, options)
if __name__ == '__main__':
main()
| apache-2.0 | -4,172,417,565,822,012,000 | 31.472885 | 124 | 0.52682 | false |
artursmet/django-prices-openexchangerates | setup.py | 1 | 1578 | #! /usr/bin/env python
import os
from setuptools import setup
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_settings')
CLASSIFIERS = [
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules']
setup(
name='django-prices-openexchangerates',
author='Mirumee Software',
author_email='[email protected]',
description='openexchangerates.org support for django-prices',
license='BSD',
version='0.1.11',
url='https://github.com/mirumee/django-prices-openexchanerates',
packages=[
'django_prices_openexchangerates',
'django_prices_openexchangerates.management',
'django_prices_openexchangerates.management.commands',
'django_prices_openexchangerates.migrations',
'django_prices_openexchangerates.templatetags'],
include_package_data=True,
classifiers=CLASSIFIERS,
install_requires=['Django>=1.4', 'django-prices>=0.3.4', 'prices>=0.5.2'],
platforms=['any'],
tests_require=['mock==1.0.1'],
test_suite='django_prices_openexchangerates.tests',
zip_safe=False)
| bsd-3-clause | 7,590,473,101,946,400,000 | 36.571429 | 78 | 0.676806 | false |
Yangqing/caffe2 | caffe2/python/layers/sparse_lookup.py | 1 | 13787 | # Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
## @package sparse_lookup
# Module caffe2.python.layers.sparse_lookup
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python.helpers.arg_scope import get_current_scope
from caffe2.python import schema
from caffe2.python.layers.layers import (
get_categorical_limit,
get_key,
IdList,
IdScoreList,
LayerPsParam,
ModelLayer,
)
import collections
import functools
import math
import numpy as np
import operator
def get_sparse_lookup_predictor_version(version):
assert version in {'fp32', 'fp16', 'uint8rowwise', 'fused_uint8rowwise'},\
"Unexpected version of sparse_lookup layer {0}".format(version)
return version
def _is_id_list(input_record):
return schema.equal_schemas(input_record, IdList)
def _is_id_score_list(input_record):
return schema.equal_schemas(input_record,
IdScoreList,
check_field_types=False)
class SparseLookup(ModelLayer):
_id_list_supported_reducers = [
'LogMeanExp', 'LogSumExp', 'Max', 'Mean', 'Sum',
'WeightedSum', 'WeightedMean', 'Sqrt', 'None']
_id_score_list_supported_reducers = [
'PositionWeighted', 'Mean', 'Sum', 'WeightedSum', 'WeightedMean', 'None']
def __init__(self, model, input_record, inner_shape, reducer,
weight_init=None, weight_optim=None,
name='sparse_lookup', regularizer=None, **kwargs):
super(SparseLookup, self).__init__(model, name, input_record, **kwargs)
# TODO Add some asserts about input type
if isinstance(inner_shape, int):
inner_shape = [inner_shape]
assert isinstance(inner_shape, list) or isinstance(inner_shape, tuple),\
"Unexpected type for inner_shape, expected list or tuple, got {0}".\
format(type(inner_shape))
if reducer == "PositionWeighted":
assert _is_id_score_list(self.input_record), (
"PositionWeighted only support IdScoreList, but got {} " +
"please use PositionWeighted layer to convert IdList " +
"to IdScoreList").format(repr(self.input_record))
self.external_weights = input_record.values()
self.reducer = reducer
input_dim = get_categorical_limit(input_record)
assert input_dim > 0, (
"{} should have categorical limit > 0, but got {}".format(
get_key(input_record)(), input_dim))
scale = math.sqrt(1.0 / input_dim)
self.shape = [input_dim] + inner_shape
self.weight_init = weight_init if weight_init else (
'UniformFill', {'min': -scale, 'max': scale})
if _is_id_list(self.input_record):
sparse_key = self.input_record.items()
elif _is_id_score_list(self.input_record):
sparse_key = self.input_record.keys()
else:
raise NotImplementedError()
if self.input_record.lengths.metadata:
avg_length = self.input_record.lengths.metadata.expected_value
else:
avg_length = None
self.w = self.create_param(
param_name='w',
shape=self.shape,
initializer=self.weight_init,
optimizer=weight_optim,
ps_param=LayerPsParam(
sparse_key=sparse_key,
average_length=avg_length),
regularizer=regularizer
)
self.scale_bias_init = ('ConstantFill', {'value': 0.0})
self.scale_bias = self.create_param(
param_name='scale_bias',
shape=[],
initializer=self.scale_bias_init,
optimizer=model.NoOptim,
)
self.output_schema = schema.Scalar(
(np.float32, inner_shape),
self.get_next_blob_reference('output'),
)
def get_memory_usage(self):
return functools.reduce(operator.mul, self.shape) * 4
def get_fp16_compatible_parameters(self):
return [self.w]
def support_8bit(self):
# Rowwise quantization makes sense only if shape it's 2D matrix with
# second dimension >= 8
if len(self.shape) != 2 or self.shape[1] < 8:
return False
return True
def get_8bits_compatible_parameters(self, fused=True):
if not self.support_8bit():
return []
if fused:
RowwiseQuantized8BitsWeight = collections.namedtuple(
'RowwiseQuantized8BitsWeight', 'w'
)
return [RowwiseQuantized8BitsWeight(self.w)]
else:
RowwiseQuantized8BitsWeight = collections.namedtuple(
'RowwiseQuantized8BitsWeight', 'w, scale_bias'
)
return [RowwiseQuantized8BitsWeight(self.w, self.scale_bias)]
def _gather_wrapper(self, net, version, in_indices, out):
# Gather can work on all kinds of input data types, and output
# data with the same type. Convert the output of Gather to float,
# because the follow-up Ops expect fp32.
if version == 'fp32':
return net.Gather([self.w, in_indices], out)
elif version == 'fp16':
gathered_w = net.Gather([self.w, in_indices], 'gathered_w')
return net.HalfToFloat(gathered_w, out)
elif version == 'uint8rowwise':
gathered_w = net.Gather([self.w, in_indices], 'gathered_w')
gathered_scale_bias = net.Gather(
[self.scale_bias, in_indices],
'gathered_scale_bias'
)
return net.Rowwise8BitQuantizedToFloat(
[gathered_w, gathered_scale_bias], out)
elif version == 'fused_uint8rowwise':
gathered_w = net.Gather([self.w, in_indices], 'gathered_w')
return net.Fused8BitRowwiseQuantizedToFloat(gathered_w, out)
else:
raise "Unsupported version of operators in SparseLookup " +\
"layer: {0}".format(version)
def _sparse_lengths_weighted_reducer(
self, in_indices, weights, reducer,
net, version, grad_on_weights=0):
op_input = [
self.w,
weights,
in_indices,
self.input_record.lengths()
]
layer_name = 'SparseLengths' + reducer
if version in ['fp32', 'fp16']:
# SparseLengths* Ops will accept either fp16 or fp32 embedding
# matrix and output fp32 pooled embedding
net.__getattr__(layer_name)(
op_input,
self.output_schema.field_blobs(),
grad_on_weights=grad_on_weights,
)
elif version == 'uint8rowwise':
op_input.insert(len(op_input), self.scale_bias)
net.__getattr__(layer_name + '8BitsRowwise')(
op_input, self.output_schema.field_blobs())
elif version == 'fused_uint8rowwise':
net.__getattr__(layer_name + 'Fused8BitRowwise')(
op_input, self.output_schema.field_blobs())
else:
raise "Unsupported version of operator in SparseLookUp " +\
"layer: {0}".format(version)
# deal with sparse features of id_list type
def _add_ops_id_list(self, net, version):
assert self.reducer in self._id_list_supported_reducers, (
"Unsupported reducer: {} for ID_LIST".format(self.reducer)
)
if self.reducer in ['Sum', 'Mean', 'WeightedSum', 'WeightedMean']:
op_input = [self.w,
self.input_record.items(),
self.input_record.lengths()]
# For id list features, the behaviors of 'Sum' and
# 'WeightedSum' are identical, since we can regard the weight on each
# id as 1. Similarly, for 'Mean' and 'WeightedMean'.
if self.reducer == 'WeightedSum':
self.reducer = 'Sum'
elif self.reducer == 'WeightedMean':
self.reducer = 'Mean'
layer_name = 'SparseLengths' + self.reducer
if version in ['fp32', 'fp16']:
# SparseLengths* Ops will accept either fp16 or fp32 embedding
# matrix and output fp32 pooled embedding
net.__getattr__(layer_name)(
op_input,
self.output_schema.field_blobs(),
)
elif version == 'uint8rowwise':
op_input.insert(len(op_input), self.scale_bias)
net.__getattr__(layer_name + '8BitsRowwise')(
op_input, self.output_schema.field_blobs())
elif version == 'fused_uint8rowwise':
net.__getattr__(layer_name + 'Fused8BitRowwise')(
op_input, self.output_schema.field_blobs())
else:
raise "Unsupported version of operator in SparseLookUp " +\
"layer: {0}".format(version)
elif self.reducer == 'Sqrt':
sqrt_weight = net.LengthsToWeights(
[self.input_record.lengths()],
[net.NextScopedBlob('lengths_sqrt')],
power=0.5,
)
self._sparse_lengths_weighted_reducer(
self.input_record.items(),
sqrt_weight,
'WeightedSum', net, version)
elif self.reducer == 'None':
# Gather operator will gather the embedding for each id of
# each IdList.
self._gather_wrapper(net, version, self.input_record.items(),
self.output_schema.field_blobs())
else:
table_rows = self._gather_wrapper(
net, version, self.input_record.items(), 'table_rows')
segment_ids = net.LengthsToSegmentIds(
self.input_record.lengths(),
self.input_record.lengths() + '_sid')
net.__getattr__('SortedSegmentRange' + self.reducer)(
[table_rows, segment_ids],
self.output_schema.field_blobs(),
)
# deal with sparse features of id_score_list type
def _add_ops_id_score_list(self, net, version):
assert self.reducer in self._id_score_list_supported_reducers, (
"Unsupported reducer: {} for ID_SCORE_LIST".format(self.reducer)
)
if self.reducer in ['WeightedSum', 'WeightedMean']:
self._sparse_lengths_weighted_reducer(
self.input_record.keys(),
self.input_record.values(),
self.reducer, net, version)
elif self.reducer in ['Sum', 'Mean']:
op_input = [self.w,
self.input_record.keys(),
self.input_record.lengths()]
layer_name = 'SparseLengths' + self.reducer
if version in ['fp32', 'fp16']:
net.__getattr__(layer_name)(
op_input,
self.output_schema.field_blobs(),
)
elif version == 'uint8rowwise':
net.__getattr__(layer_name + '8BitsRowwise')(
op_input, self.output_schema.field_blobs())
elif version == 'fused_uint8rowwise':
net.__getattr__(layer_name + 'Fused8BitRowwise')(
op_input, self.output_schema.field_blobs())
else:
raise "Unsupported version of operator in SparseLookUp " +\
"layer: {0}".format(version)
elif self.reducer == 'PositionWeighted':
self._sparse_lengths_weighted_reducer(
self.input_record.keys(),
self.external_weights,
'WeightedSum', net, version, grad_on_weights=1)
elif self.reducer == 'None':
# Gather operator will gather the embedding for each id of
# each IdList.
self._gather_wrapper(net, version, self.input_record.keys(),
self.output_schema.field_blobs())
else:
raise "Only Sum, Mean, None are supported for IdScoreList input." +\
"Trying to create with {}".format(self.reducer)
def add_ops(self, net):
cur_scope = get_current_scope()
version = get_sparse_lookup_predictor_version(
**cur_scope.get(get_sparse_lookup_predictor_version.__name__,
{'version': 'fp32'}))
# TODO(amalevich): Layer should not be responsible for decision about
# quantization.
if not self.support_8bit() and version in {'uint8rowwise',
'fused_uint8rowwise'}:
version = 'fp32'
if _is_id_list(self.input_record):
self._add_ops_id_list(net, version=version)
elif _is_id_score_list(self.input_record):
self._add_ops_id_score_list(net, version=version)
else:
raise "Unsupported input type {0}".format(self.input_record)
| apache-2.0 | 2,902,705,643,870,155,000 | 38.846821 | 81 | 0.565895 | false |
odahoda/noisicaa | noisicaa/builtin_nodes/instrument/model.py | 1 | 2817 | #!/usr/bin/python3
# @begin:license
#
# Copyright (c) 2015-2019, Benjamin Niemann <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @end:license
import logging
from typing import Any, Optional, Callable
from noisicaa import core
from noisicaa import audioproc
from noisicaa import node_db
from noisicaa import instrument_db
from noisicaa.music import node_connector
from . import node_description
from . import processor_messages
from . import _model
logger = logging.getLogger(__name__)
class Connector(node_connector.NodeConnector):
_node = None # type: Instrument
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.__node_id = self._node.pipeline_node_id
self.__listeners = core.ListenerMap[str]()
self.add_cleanup_function(self.__listeners.cleanup)
def _init_internal(self) -> None:
self.__change_instrument(self._node.instrument_uri)
self.__listeners['instrument_uri'] = self._node.instrument_uri_changed.add(
lambda change: self.__change_instrument(change.new_value))
def __change_instrument(self, instrument_uri: str) -> None:
try:
instrument_spec = instrument_db.create_instrument_spec(instrument_uri)
except instrument_db.InvalidInstrumentURI as exc:
logger.error("Invalid instrument URI '%s': %s", instrument_uri, exc)
return
self._emit_message(processor_messages.change_instrument(
self.__node_id, instrument_spec))
class Instrument(_model.Instrument):
def create(self, *, instrument_uri: Optional[str] = None, **kwargs: Any) -> None:
super().create(**kwargs)
self.instrument_uri = instrument_uri
def create_node_connector(
self, message_cb: Callable[[audioproc.ProcessorMessage], None],
audioproc_client: audioproc.AbstractAudioProcClient,
) -> Connector:
return Connector(
node=self, message_cb=message_cb, audioproc_client=audioproc_client)
@property
def description(self) -> node_db.NodeDescription:
return node_description.InstrumentDescription
| gpl-2.0 | 1,434,338,962,164,650,500 | 34.2125 | 85 | 0.699681 | false |
groovehunter/xmlflat2db | test/test_McApp.py | 1 | 1973 | from TestBase import TestBase
class TestMcApp(TestBase):
""" sollte grundlegende ablaeufe der app testen
"""
def setUp(self):
""" instantiieren; sources laden; db init. """
TestBase.setUp(self)
self.mc.scan_source_dirs_all()
self.mc.scan_sources()
self.mc.source_load_first()
def test_source_exist(self):
""" sollte source attribute haben , bevor...?"""
self.assertTrue( hasattr(self.mc, 'source') )
def test_src_dir(self):
""" scan des source dir sollte bei ein oder mehr files starten """
self.assertIsNot( len(self.mc.ls), 0)
def test_if_src_loaded_if_valid_file(self):
""" entscheide auf xml und pruefe file auf okay oder nicht """
self.assertIn( self.mc.source_cur_checkvalid(), [True,False] )
# def test_ValueError_if_wrong_src(self):
# self.assertRaises(ValueError, self.mc.source_cur_checkvalid)
'''
def test_DSkorrektVerarbeitet(self):
ds = create_test_ds()
self.mc.store.write(ds)
uid = ds.get_uid()
ds_pruef = self.mc.store.read(uid)
self.failUnless( ds_pruef, ds )
'''
def test_xmldateien_mit_problemen(self):
""" xml files mit problemen in der verarbeitung sollen sich
gemerkt werden ,evtl verschieben in unterordner 'problem'
o.ae. """
self.assertTrue( self.mc.report_manual_todo() )
def test_TotaleNrDSgeschriebenBzwVerarbeitet(self):
self.mc.work()
self.assertEqual( self.mc.num_ds_written, self.mc.num_ds_given )
''' FINAL
def test_alle_src_files_werden_verarbeitet(self):
""" in einem Lauf muessen alle files verarbeitet
dh. in die archive verschoben werden """
self.failUnless( self.mc.no_files_left() )
'''
def test_load_config(self):
self.mc.load_config()
self.assertEqual( self.mc.config['test'], 'test' )
| gpl-2.0 | -7,792,078,210,384,689,000 | 27.185714 | 74 | 0.608718 | false |
yuval-harpaz/MNE4D | pyScripts/yoni.py | 1 | 1301 | __author__ = 'yuval'
import mne
from os import chdir
#from IPython.display import Image
#from mayavi import mlab
# mne_bti2fiff.py -p xc,hb,lf,9_34c,rfhp0.1Hz -o idan_raw.fif
chdir("/home/yuval/wsMNE/")
raw=mne.io.bti.read_raw_bti('xc,hb,lf_c,rfDC')
raw.save('idanTest-raw.fif')
raw=mne.io.Raw('idanTest-raw.fif')
subjects_dir='/usr/local/freesurfer/subjects'
mne.viz.plot_bem(subject='aliceIdan',subjects_dir='/usr/local/freesurfer/subjects', orientation='coronal')
raw=mne.io.read_raw_fif('idan_raw.fif')
info = raw.info
mne.gui.coregistration()
mri="/home/yuval/Copy/MEGdata/alice/idan/MNE/idan_raw-trans.fif"
fig = mne.viz.plot_trans(info, mri, subject='aliceIdan', dig=False, subjects_dir=subjects_dir);
mlab.savefig('coreg.jpg')
# Image(filename='coreg.jpg', width=500)
bem_model = mne.make_bem_model(subject='aliceIdan', subjects_dir=subjects_dir, conductivity=(0.3,))
bem_solution = mne.make_bem_solution(bem_model)
# /usr/local/freesurfer/subjects/aliceIdan/bem
mne.write_bem_solution('/usr/local/freesurfer/subjects/aliceIdan/bem/aliceIdan-bem.fif',bem_solution)
src_fsaverage = mne.setup_source_space('fsaverage', spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True)
src_sub = mne.morph_source_spaces(src_fsaverage, 'aliceIdan', subjects_dir=subjects_dir)
#
| gpl-2.0 | 3,468,862,391,317,658,000 | 35.138889 | 126 | 0.749424 | false |
cisalhante/matricula-o-matic | test_bench_disciplina.py | 1 | 1134 | __author__ = "William Batista Aguiar Motta"
__email__ = "[email protected]"
__license__ = "GPL"
import matricula_web_web_crawler as MT
import sys
if len(sys.argv)>1:
for a in sys.argv[1:]:
# print('\n')
D = MT.Disciplina(a)
print(D.codigo)
print(D.nome)
print(D.creditos)
print(D.departamento)
print(D.tipo)
print(D.periodo)
print(D.requisitos)
for t in D.turmas: # BUG
print(t.dias)
print(t.professores)
print(t.cod_disciplina)
print(t.id)
print(t.vagas)
print(t.ocupadas)
print(t.disponiveis)
print(t.locais)
else:
D = MT.Disciplina(167657)
print(D.codigo)
print(D.nome)
print(D.creditos)
print(D.departamento)
print(D.tipo)
print(D.periodo)
print(D.requisitos)
for t in D.turmas:
print(t.dias)
print(t.professores)
print(t.cod_disciplina)
print(t.id)
print(t.vagas)
print(t.ocupadas)
print(t.disponiveis)
print(t.locais)
| gpl-3.0 | -4,359,458,083,531,792,400 | 23.652174 | 44 | 0.538801 | false |
ptressel/sahana-eden-madpub | modules/s3/s3validators.py | 1 | 20338 | # -*- coding: utf-8 -*-
""" Custom Validators
@requires: U{B{I{gluon}} <http://web2py.com>}
@author: Fran Boon <fran[at]aidiq.com>
@author: Dominic König <dominic[at]aidiq.com>
@author: sunneach
@copyright: (c) 2010-2011 Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["IS_LAT",
"IS_LON",
"IS_HTML_COLOUR",
"THIS_NOT_IN_DB",
"IS_UTC_OFFSET",
"IS_UTC_DATETIME",
"IS_ONE_OF",
"IS_ONE_OF_EMPTY",
"IS_NOT_ONE_OF",
"IS_ACL"]
import time
import uuid
import re
from datetime import datetime, timedelta
from gluon.validators import Validator, IS_MATCH, IS_NOT_IN_DB, IS_IN_SET
def options_sorter(x, y):
return (str(x[1]).upper() > str(y[1]).upper() and 1) or -1
class IS_LAT(object):
"""
example:
INPUT(_type="text", _name="name", requires=IS_LAT())
latitude has to be in degrees between -90 & 90
"""
def __init__(self,
error_message = "Latitude/Northing should be between -90 & 90!"):
self.minimum = -90
self.maximum = 90
self.error_message = error_message
def __call__(self, value):
try:
value = float(value)
if self.minimum <= value <= self.maximum:
return (value, None)
except ValueError:
pass
return (value, self.error_message)
class IS_LON(object):
"""
example:
INPUT(_type="text", _name="name" ,requires=IS_LON())
longitude has to be in degrees between -180 & 180
"""
def __init__(self,
error_message = "Longitude/Easting should be between -180 & 180!"):
self.minimum = -180
self.maximum = 180
self.error_message = error_message
def __call__(self, value):
try:
value = float(value)
if self.minimum <= value <= self.maximum:
return (value, None)
except ValueError:
pass
return (value, self.error_message)
# -----------------------------------------------------------------------------
class IS_HTML_COLOUR(IS_MATCH):
"""
example::
INPUT(_type="text", _name="name", requires=IS_HTML_COLOUR())
"""
def __init__(self, error_message="must be a 6 digit hex code!"):
IS_MATCH.__init__(self, "^[0-9a-fA-F]{6}$", error_message)
# -----------------------------------------------------------------------------
class THIS_NOT_IN_DB(object):
"""
Unused currently since doesn't quite work.
See: http://groups.google.com/group/web2py/browse_thread/thread/27b14433976c0540
"""
def __init__(self, dbset, field, this,
error_message = "value already in database!"):
if hasattr(dbset, "define_table"):
self.dbset = dbset()
else:
self.dbset = dbset
self.field = field
self.value = this
self.error_message = error_message
self.record_id = 0
def set_self_id(self, id):
self.record_id = id
def __call__(self, value):
tablename, fieldname = str(self.field).split(".")
field = self.dbset._db[tablename][fieldname]
rows = self.dbset(field == self.value).select(limitby=(0, 1))
if len(rows)>0 and str(rows[0].id) != str(self.record_id):
return (self.value, self.error_message)
return (value, None)
regex1 = re.compile("[\w_]+\.[\w_]+")
regex2 = re.compile("%\((?P<name>[^\)]+)\)s")
# IS_ONE_OF_EMPTY -------------------------------------------------------------------
# by sunneach 2010-02-03
# copy of nursix's IS_ONE_OF with removed 'options' method
class IS_ONE_OF_EMPTY(Validator):
"""
Filtered version of IS_IN_DB():
validates a given value as key of another table, filtered by the 'filterby'
field for one of the 'filter_opts' options (=a selective IS_IN_DB())
NB Filtering isn't active in GQL.
For the dropdown representation:
'label' can be a string template for the record, or a set of field
names of the fields to be used as option labels, or a function or lambda
to create an option label from the respective record (which has to return
a string, of course). The function will take the record as an argument
No 'options' method as designed to be called next to an Autocomplete field so don't download a large dropdown unnecessarily.
"""
def __init__(
self,
dbset,
field,
label=None,
filterby=None,
filter_opts=None,
error_message="invalid value!",
orderby=None,
groupby=None,
cache=None,
multiple=False,
zero="",
sort=False,
_and=None,
):
if hasattr(dbset, "define_table"):
self.dbset = dbset()
else:
self.dbset = dbset
self.field = field
(ktable, kfield) = str(self.field).split(".")
if not label:
label = "%%(%s)s" % kfield
if isinstance(label, str):
if regex1.match(str(label)):
label = "%%(%s)s" % str(label).split(".")[-1]
ks = regex2.findall(label)
if not kfield in ks:
ks += [kfield]
fields = ["%s.%s" % (ktable, k) for k in ks]
else:
ks = [kfield]
fields =[str(f) for f in self.dbset._db[ktable]]
self.fields = fields
self.label = label
self.ktable = ktable
if not kfield or not len(kfield):
self.kfield = "id"
else:
self.kfield = kfield
self.ks = ks
self.error_message = error_message
self.theset = None
self.orderby = orderby
self.groupby = groupby
self.cache = cache
self.multiple = multiple
self.zero = zero
self.sort = sort
self._and = _and
self.filterby = filterby
self.filter_opts = filter_opts
def set_self_id(self, id):
if self._and:
self._and.record_id = id
def build_set(self):
if self.ktable in self.dbset._db:
_table = self.dbset._db[self.ktable]
if self.dbset._db._dbname != "gql":
orderby = self.orderby or ", ".join(self.fields)
groupby = self.groupby
dd = dict(orderby=orderby, groupby=groupby, cache=self.cache)
if "deleted" in _table:
query = (_table["deleted"] == False)
else:
query = (_table["id"] > 0)
if self.filterby and self.filterby in _table:
if self.filter_opts:
query = query & (_table[self.filterby].belongs(self.filter_opts))
if not self.orderby:
dd.update(orderby=_table[self.filterby])
records = self.dbset(query).select(*self.fields, **dd)
else:
import contrib.gql
orderby = self.orderby\
or contrib.gql.SQLXorable("|".join([k for k in self.ks
if k != "id"]))
dd = dict(orderby=orderby, cache=self.cache)
records = \
self.dbset.select(self.dbset._db[self.ktable].ALL, **dd)
self.theset = [str(r[self.kfield]) for r in records]
#labels = []
label = self.label
try:
labels = map(label, records)
except TypeError:
if isinstance(label, str):
labels = map(lambda r: label % dict(r), records)
elif isinstance(label, (list, tuple)):
labels = map(lambda r: \
" ".join([r[l] for l in label if l in r]),
records)
elif hasattr(label, '__call__'):
# Is a function
labels = map(label, records)
elif "name" in _table:
labels = map(lambda r: r.name, records)
else:
labels = map(lambda r: r[self.kfield], records)
self.labels = labels
else:
self.theset = None
self.labels = None
#def options(self):
# "Removed as we don't want any options downloaded unnecessarily"
def __call__(self, value):
try:
_table = self.dbset._db[self.ktable]
deleted_q = ("deleted" in _table) and (_table["deleted"] == False) or False
filter_opts_q = False
if self.filterby and self.filterby in _table:
if self.filter_opts:
filter_opts_q = _table[self.filterby].belongs(self.filter_opts)
if self.multiple:
if isinstance(value, list):
values = value
elif isinstance(value, basestring) and \
value[0] == "|" and value[-1] == "|":
values = value[1:-1].split("|")
elif value:
values = [value]
else:
values = []
if self.theset:
if not [x for x in values if not x in self.theset]:
return ("|%s|" % "|".join(values), None)
else:
return (value, self.error_message)
else:
for v in values:
q = (_table[self.kfield] == v)
query = query is not None and query | q or q
if filter_opts_q != False:
query = query is not None and \
(filter_opts_q & (query)) or filter_opts_q
if deleted_q != False:
query = query is not None and \
(deleted_q & (query)) or deleted_q
if self.dbset(query).count() < 1:
return (value, self.error_message)
return ("|%s|" % "|".join(values), None)
elif self.theset:
if value in self.theset:
if self._and:
return self._and(value)
else:
return (value, None)
else:
values = [value]
query = None
for v in values:
q = (_table[self.kfield] == v)
query = query is not None and query | q or q
if filter_opts_q != False:
query = query is not None and \
(filter_opts_q & (query)) or filter_opts_q
if deleted_q != False:
query = query is not None and \
(deleted_q & (query)) or deleted_q
if self.dbset(query).count():
if self._and:
return self._and(value)
else:
return (value, None)
except:
pass
return (value, self.error_message)
# IS_ONE_OF -------------------------------------------------------------------
# added 2009-08-23 by nursix
# converted to subclass 2010-02-03 by sunneach: NO CHANGES in the method bodies
class IS_ONE_OF(IS_ONE_OF_EMPTY):
"""
Extends IS_ONE_OF_EMPTY by restoring the 'options' method.
"""
def options(self):
self.build_set()
items = [(k, self.labels[i]) for (i, k) in enumerate(self.theset)]
if self.sort:
items.sort(options_sorter)
if self.zero != None and not self.multiple:
items.insert(0,("", self.zero))
return items
# -----------------------------------------------------------------------------
class IS_NOT_ONE_OF(IS_NOT_IN_DB):
"""
Filtered version of IS_NOT_IN_DB()
- understands the 'deleted' field.
- makes the field unique (amongst non-deleted field)
Example:
- INPUT(_type="text", _name="name", requires=IS_NOT_ONE_OF(db, db.table))
"""
def __call__(self, value):
if value in self.allowed_override:
return (value, None)
(tablename, fieldname) = str(self.field).split(".")
_table = self.dbset._db[tablename]
field = _table[fieldname]
query = (field == value)
if "deleted" in _table:
query = (_table["deleted"] == False) & query
rows = self.dbset(query).select(limitby=(0, 1))
if len(rows) > 0 and str(rows[0].id) != str(self.record_id):
return (value, self.error_message)
return (value, None)
# -----------------------------------------------------------------------------
class IS_UTC_OFFSET(Validator):
"""
Validates a given string value as UTC offset in the format +/-HHMM
@author: nursix
@param error_message: the error message to be returned
@note:
all leading parts of the string (before the trailing offset specification)
will be ignored and replaced by 'UTC ' in the return value, if the string
passes through.
"""
def __init__(self,
error_message="invalid UTC offset!"
):
self.error_message = error_message
@staticmethod
def get_offset_value(offset_str):
if offset_str and len(offset_str) >= 5 and \
(offset_str[-5] == "+" or offset_str[-5] == "-") and \
offset_str[-4:].isdigit():
offset_hrs = int(offset_str[-5] + offset_str[-4:-2])
offset_min = int(offset_str[-5] + offset_str[-2:])
offset = 3600*offset_hrs + 60*offset_min
return offset
else:
return None
def __call__(self,value):
if value and isinstance(value, str):
_offset_str = value.strip()
offset = self.get_offset_value(_offset_str)
if offset is not None and offset>-86340 and offset <86340:
# Add a leading 'UTC ',
# otherwise leading '+' and '0' will be stripped away by web2py
return ("UTC " + _offset_str[-5:], None)
return (value, self.error_message)
# -----------------------------------------------------------------------------
#
class IS_UTC_DATETIME(Validator):
"""
Validates a given value as datetime string and returns the corresponding
UTC datetime.
Example:
- INPUT(_type="text", _name="name", requires=IS_UTC_DATETIME())
@author: nursix
@param format: strptime/strftime format template string, for
directives refer to your strptime implementation
@param error_message: dict of error messages to be returned
@param utc_offset: offset to UTC in seconds, if not specified, the value
is considered to be UTC
@param allow_future: whether future date/times are allowed or not, if
set to False, all date/times beyond now+max_future
will fail
@type allow_future: boolean
@param max_future: the maximum acceptable future time interval in
seconds from now for unsynchronized local clocks
@note:
datetime has to be in the ISO8960 format YYYY-MM-DD hh:mm:ss, with an
optional trailing UTC offset specified as +/-HHMM (+ for eastern, - for
western timezones)
"""
isodatetime = "%Y-%m-%d %H:%M:%S"
def __init__(self,
format=None,
error_message=None,
utc_offset=None,
allow_future=True,
max_future=900):
self.format = format or self.isodatetime
self.error_message = dict(
format = "Required format: YYYY-MM-DD HH:MM:SS!",
offset = "Invalid UTC offset!",
future = "Future times not allowed!")
if error_message and isinstance(error_message, dict):
self.error_message["format"] = error_message.get("format", None) or self.error_message["format"]
self.error_message["offset"] = error_message.get("offset", None) or self.error_message["offset"]
self.error_message["future"] = error_message.get("future", None) or self.error_message["future"]
elif error_message:
self.error_message["format"] = error_message
validate = IS_UTC_OFFSET()
offset, error = validate(utc_offset)
if error:
self.utc_offset = "UTC +0000" # fallback to UTC
else:
self.utc_offset = offset
self.allow_future = allow_future
self.max_future = max_future
def __call__(self, value):
_dtstr = value.strip()
if len(_dtstr) > 6 and \
(_dtstr[-6:-4] == " +" or _dtstr[-6:-4] == " -") and \
_dtstr[-4:].isdigit():
# UTC offset specified in dtstr
dtstr = _dtstr[0:-6]
_offset_str = _dtstr[-5:]
else:
# use default UTC offset
dtstr = _dtstr
_offset_str = self.utc_offset
offset_hrs = int(_offset_str[-5] + _offset_str[-4:-2])
offset_min = int(_offset_str[-5] + _offset_str[-2:])
offset = 3600 * offset_hrs + 60 * offset_min
# Offset must be in range -1439 to +1439 minutes
if offset < -86340 or offset > 86340:
return (dt, self.error_message["offset"])
try:
(y, m, d, hh, mm, ss, t0, t1, t2) = time.strptime(dtstr, str(self.format))
dt = datetime(y, m, d, hh, mm, ss)
except:
try:
(y, m, d, hh, mm, ss, t0, t1, t2) = time.strptime(dtstr+":00", str(self.format))
dt = datetime(y, m, d, hh, mm, ss)
except:
return(value, self.error_message["format"])
if self.allow_future:
return (dt, None)
else:
latest = datetime.utcnow() + timedelta(seconds=self.max_future)
dt_utc = dt - timedelta(seconds=offset)
if dt_utc > latest:
return (dt_utc, self.error_message["future"])
else:
return (dt_utc, None)
def formatter(self, value):
# Always format with trailing UTC offset
return value.strftime(str(self.format)) + " +0000"
# -----------------------------------------------------------------------------
class IS_ACL(IS_IN_SET):
"""
Validator for ACLs
@attention: Incomplete! Does not validate yet, but just convert.
@author: Dominic König <[email protected]>
"""
def __call__(self, value):
"""
Validation
@param value: the value to validate
"""
if not isinstance(value, (list, tuple)):
value = [value]
acl = 0x0000
for v in value:
try:
flag = int(v)
except (ValueError, TypeError):
flag = 0x0000
else:
acl |= flag
return (acl, None)
# -----------------------------------------------------------------------------
| mit | 7,582,023,155,462,627,000 | 33.526316 | 132 | 0.511703 | false |
fakusb/FiVES-Nao-Visualisation | WebClient/resources/models/v11/modify.py | 1 | 21395 | # vim: fenc=utf-8 foldmethod=marker
# call this script with nao-dummy.html as argument.
import os
import sys
import math
from subprocess import call, Popen, PIPE
if not len(sys.argv) > 1:
print("No file argument given.")
sys.exit()
infile = sys.argv[1]
if not os.path.isfile(infile):
print("No valid file argument given.")
sys.exit()
vals = {}
HeadYaw = 42
HeadPitch = -23
RShoulderPitch = -50
RShoulderRoll = -50
RElbowRoll = 60
RElbowYaw = 45
RWristYaw = 68
RHand = 0
LShoulderPitch = 0
LShoulderRoll = 0
LElbowYaw = 0
LElbowRoll = -40
LWristYaw = 50
LHand = 0
RHipYawPitch = -65
RHipPitch = -19
RHipRoll = 13
RKneePitch = 55
RAnklePitch = -16
RAnkleRoll = 0
LHipYawPitch = -65
LHipPitch = 0
LHipRoll = 23
LKneePitch = 0
LAnklePitch = 13
LAnkleRoll = -23
fmtstr = "{:.6f}"
# chest & head {{{
vals['chest_1_1'] = '0.010000'
vals['chest_1_2'] = '0.000000'
vals['chest_1_3'] = '0.000000'
vals['chest_2_1'] = '0.000000'
vals['chest_2_2'] = '0.010000'
vals['chest_2_3'] = '0.000000'
vals['chest_3_1'] = '0.000000'
vals['chest_3_2'] = '0.000000'
vals['chest_3_3'] = '0.010000'
vals['neck_1_1'] = fmtstr.format(math.cos(math.radians(-HeadYaw)))#'1.000000'
vals['neck_1_2'] = fmtstr.format(-math.sin(math.radians(-HeadYaw)))#'0.000000'
vals['neck_1_3'] = '0.000000'
vals['neck_2_1'] = fmtstr.format(math.sin(math.radians(-HeadYaw)))#'0.000000'
vals['neck_2_2'] = fmtstr.format(math.cos(math.radians(-HeadYaw)))#'1.000000'
vals['neck_2_3'] = '0.000000'
vals['neck_3_1'] = '0.000000'
vals['neck_3_2'] = '0.000000'
vals['neck_3_3'] = '1.000000'
vals['head_1_1'] = fmtstr.format(math.cos(math.radians(-HeadPitch)))#'1.000000'
vals['head_1_2'] = '0.000000'
vals['head_1_3'] = fmtstr.format(math.sin(math.radians(-HeadPitch)))#'0.000000'
vals['head_2_1'] = '0.000000'
vals['head_2_2'] = '1.000000'
vals['head_2_3'] = '0.000000'
vals['head_3_1'] = fmtstr.format(-math.sin(math.radians(-HeadPitch)))#'0.000000'
vals['head_3_2'] = '0.000000'
vals['head_3_3'] = fmtstr.format(math.cos(math.radians(-HeadPitch)))#'1.000000'
# }}}
# right arm {{{
vals['rshoulder_1_1'] = fmtstr.format(math.cos(math.radians(-RShoulderPitch)))#'1.000000'
vals['rshoulder_1_2'] = '0.000000'
vals['rshoulder_1_3'] = fmtstr.format(math.sin(math.radians(-RShoulderPitch)))#'0.000000'
vals['rshoulder_2_1'] = '0.000000'
vals['rshoulder_2_2'] = '1.000000'
vals['rshoulder_2_3'] = '0.000000'
vals['rshoulder_3_1'] = fmtstr.format(-math.sin(math.radians(-RShoulderPitch)))#'0.000000'
vals['rshoulder_3_2'] = '0.000000'
vals['rshoulder_3_3'] = fmtstr.format(math.cos(math.radians(-RShoulderPitch)))#'1.000000'
vals['rbiceps_1_1'] = fmtstr.format(math.cos(math.radians(-RShoulderRoll)))#'1.000000'
vals['rbiceps_1_2'] = fmtstr.format(-math.sin(math.radians(-RShoulderRoll)))#'0.000000'
vals['rbiceps_1_3'] = '0.000000'
vals['rbiceps_2_1'] = fmtstr.format(math.sin(math.radians(-RShoulderRoll)))#'0.000000'
vals['rbiceps_2_2'] = fmtstr.format(math.cos(math.radians(-RShoulderRoll)))#'1.000000'
vals['rbiceps_2_3'] = '0.000000'
vals['rbiceps_3_1'] = '0.000000'
vals['rbiceps_3_2'] = '0.000000'
vals['rbiceps_3_3'] = '1.000000'
rym11 = 1.0
rym12 = 0.0
rym13 = 0.0
rym21 = 0.0
rym22 = math.cos(math.radians(-RElbowYaw))
rym23 = -math.sin(math.radians(-RElbowYaw))
rym31 = 0.0
rym32 = math.sin(math.radians(-RElbowYaw))
rym33 = math.cos(math.radians(-RElbowYaw))
rrm11 = math.cos(math.radians(-RElbowRoll))
rrm12 = -math.sin(math.radians(-RElbowRoll))
rrm13 = 0.0
rrm21 = math.sin(math.radians(-RElbowRoll))
rrm22 = math.cos(math.radians(-RElbowRoll))
rrm23 = 0.0
rrm31 = 0.0
rrm32 = 0.0
rrm33 = 1.0
# first yaw, then roll
vals['rforearm_1_1'] = fmtstr.format(rrm11*rym11+rrm12*rym21+rrm13*rym31)###'1.000000'
vals['rforearm_1_2'] = fmtstr.format(rrm11*rym12+rrm12*rym22+rrm13*rym32)###'0.000000'
vals['rforearm_1_3'] = fmtstr.format(rrm11*rym13+rrm12*rym23+rrm13*rym33)###'0.000000'
vals['rforearm_2_1'] = fmtstr.format(rrm21*rym11+rrm22*rym21+rrm23*rym31)###'0.000000'
vals['rforearm_2_2'] = fmtstr.format(rrm21*rym12+rrm22*rym22+rrm23*rym32)###'1.000000'
vals['rforearm_2_3'] = fmtstr.format(rrm21*rym13+rrm22*rym23+rrm23*rym33)###'0.000000'
vals['rforearm_3_1'] = fmtstr.format(rrm31*rym11+rrm32*rym21+rrm33*rym31)###'0.000000'
vals['rforearm_3_2'] = fmtstr.format(rrm31*rym12+rrm32*rym22+rrm33*rym32)###'0.000000'
vals['rforearm_3_3'] = fmtstr.format(rrm31*rym13+rrm32*rym23+rrm33*rym33)###'1.000000'
vals['rhand_1_1'] = '1.000000'
vals['rhand_1_2'] = '0.000000'
vals['rhand_1_3'] = '0.000000'
vals['rhand_2_1'] = '0.000000'
vals['rhand_2_2'] = fmtstr.format(math.cos(math.radians(-RWristYaw)))#'1.000000'
vals['rhand_2_3'] = fmtstr.format(-math.sin(math.radians(-RWristYaw)))#'0.000000'
vals['rhand_3_1'] = '0.000000'
vals['rhand_3_2'] = fmtstr.format(math.sin(math.radians(-RWristYaw)))#'0.000000'
vals['rhand_3_3'] = fmtstr.format(math.cos(math.radians(-RWristYaw)))#'1.000000'
vals['rphalanx7_1_1'] = '1.000000'
vals['rphalanx7_1_2'] = '0.000000'
vals['rphalanx7_1_3'] = '0.000000'
vals['rphalanx7_2_1'] = '0.000000'
vals['rphalanx7_2_2'] = '1.000000'
vals['rphalanx7_2_3'] = '0.000000'
vals['rphalanx7_3_1'] = '0.000000'
vals['rphalanx7_3_2'] = '0.000000'
vals['rphalanx7_3_3'] = '1.000000'
vals['rphalanx8_1_1'] = '1.000000'
vals['rphalanx8_1_2'] = '0.000000'
vals['rphalanx8_1_3'] = '0.000000'
vals['rphalanx8_2_1'] = '0.000000'
vals['rphalanx8_2_2'] = '1.000000'
vals['rphalanx8_2_3'] = '0.000000'
vals['rphalanx8_3_1'] = '0.000000'
vals['rphalanx8_3_2'] = '0.000000'
vals['rphalanx8_3_3'] = '1.000000'
vals['rphalanx4_1_1'] = '1.000000'
vals['rphalanx4_1_2'] = '0.000000'
vals['rphalanx4_1_3'] = '0.000000'
vals['rphalanx4_2_1'] = '0.000000'
vals['rphalanx4_2_2'] = '1.000000'
vals['rphalanx4_2_3'] = '0.000000'
vals['rphalanx4_3_1'] = '0.000000'
vals['rphalanx4_3_2'] = '0.000000'
vals['rphalanx4_3_3'] = '1.000000'
vals['rphalanx5_1_1'] = '1.000000'
vals['rphalanx5_1_2'] = '0.000000'
vals['rphalanx5_1_3'] = '0.000000'
vals['rphalanx5_2_1'] = '0.000000'
vals['rphalanx5_2_2'] = '1.000000'
vals['rphalanx5_2_3'] = '0.000000'
vals['rphalanx5_3_1'] = '0.000000'
vals['rphalanx5_3_2'] = '0.000000'
vals['rphalanx5_3_3'] = '1.000000'
vals['rphalanx6_1_1'] = '1.000000'
vals['rphalanx6_1_2'] = '0.000000'
vals['rphalanx6_1_3'] = '0.000000'
vals['rphalanx6_2_1'] = '0.000000'
vals['rphalanx6_2_2'] = '1.000000'
vals['rphalanx6_2_3'] = '0.000000'
vals['rphalanx6_3_1'] = '0.000000'
vals['rphalanx6_3_2'] = '0.000000'
vals['rphalanx6_3_3'] = '1.000000'
vals['rphalanx1_1_1'] = '1.000000'
vals['rphalanx1_1_2'] = '0.000000'
vals['rphalanx1_1_3'] = '0.000000'
vals['rphalanx1_2_1'] = '0.000000'
vals['rphalanx1_2_2'] = '1.000000'
vals['rphalanx1_2_3'] = '0.000000'
vals['rphalanx1_3_1'] = '0.000000'
vals['rphalanx1_3_2'] = '0.000000'
vals['rphalanx1_3_3'] = '1.000000'
vals['rphalanx2_1_1'] = '1.000000'
vals['rphalanx2_1_2'] = '0.000000'
vals['rphalanx2_1_3'] = '0.000000'
vals['rphalanx2_2_1'] = '0.000000'
vals['rphalanx2_2_2'] = '1.000000'
vals['rphalanx2_2_3'] = '0.000000'
vals['rphalanx2_3_1'] = '0.000000'
vals['rphalanx2_3_2'] = '0.000000'
vals['rphalanx2_3_3'] = '1.000000'
vals['rphalanx3_1_1'] = '1.000000'
vals['rphalanx3_1_2'] = '0.000000'
vals['rphalanx3_1_3'] = '0.000000'
vals['rphalanx3_2_1'] = '0.000000'
vals['rphalanx3_2_2'] = '1.000000'
vals['rphalanx3_2_3'] = '0.000000'
vals['rphalanx3_3_1'] = '0.000000'
vals['rphalanx3_3_2'] = '0.000000'
vals['rphalanx3_3_3'] = '1.000000'
# }}}
# left arm {{{
vals['lshoulder_1_1'] = fmtstr.format(math.cos(math.radians(-LShoulderPitch)))#'1.000000'
vals['lshoulder_1_2'] = '0.000000'
vals['lshoulder_1_3'] = fmtstr.format(math.sin(math.radians(-LShoulderPitch)))#'0.000000'
vals['lshoulder_2_1'] = '0.000000'
vals['lshoulder_2_2'] = '1.000000'
vals['lshoulder_2_3'] = '0.000000'
vals['lshoulder_3_1'] = fmtstr.format(-math.sin(math.radians(-LShoulderPitch)))#'0.000000'
vals['lshoulder_3_2'] = '0.000000'
vals['lshoulder_3_3'] = fmtstr.format(math.cos(math.radians(-LShoulderPitch)))#'1.000000'
vals['lbiceps_1_1'] = fmtstr.format(math.cos(math.radians(-LShoulderRoll)))#'1.000000'
vals['lbiceps_1_2'] = fmtstr.format(-math.sin(math.radians(-LShoulderRoll)))#'0.000000'
vals['lbiceps_1_3'] = '0.000000'
vals['lbiceps_2_1'] = fmtstr.format(math.sin(math.radians(-LShoulderRoll)))#'0.000000'
vals['lbiceps_2_2'] = fmtstr.format(math.cos(math.radians(-LShoulderRoll)))#'1.000000'
vals['lbiceps_2_3'] = '0.000000'
vals['lbiceps_3_1'] = '0.000000'
vals['lbiceps_3_2'] = '0.000000'
vals['lbiceps_3_3'] = '1.000000'
lym11 = 1.0
lym12 = 0.0
lym13 = 0.0
lym21 = 0.0
lym22 = math.cos(math.radians(-LElbowYaw))
lym23 = -math.sin(math.radians(-LElbowYaw))
lym31 = 0.0
lym32 = math.sin(math.radians(-LElbowYaw))
lym33 = math.cos(math.radians(-LElbowYaw))
lrm11 = math.cos(math.radians(-LElbowRoll))
lrm12 = -math.sin(math.radians(-LElbowRoll))
lrm13 = 0.0
lrm21 = math.sin(math.radians(-LElbowRoll))
lrm22 = math.cos(math.radians(-LElbowRoll))
lrm23 = 0.0
lrm31 = 0.0
lrm32 = 0.0
lrm33 = 1.0
# first yaw, then roll
vals['lforearm_1_1'] = fmtstr.format(lrm11*lym11+lrm12*lym21+lrm13*lym31)###'1.000000'
vals['lforearm_1_2'] = fmtstr.format(lrm11*lym12+lrm12*lym22+lrm13*lym32)###'0.000000'
vals['lforearm_1_3'] = fmtstr.format(lrm11*lym13+lrm12*lym23+lrm13*lym33)###'0.000000'
vals['lforearm_2_1'] = fmtstr.format(lrm21*lym11+lrm22*lym21+lrm23*lym31)###'0.000000'
vals['lforearm_2_2'] = fmtstr.format(lrm21*lym12+lrm22*lym22+lrm23*lym32)###'1.000000'
vals['lforearm_2_3'] = fmtstr.format(lrm21*lym13+lrm22*lym23+lrm23*lym33)###'0.000000'
vals['lforearm_3_1'] = fmtstr.format(lrm31*lym11+lrm32*lym21+lrm33*lym31)###'0.000000'
vals['lforearm_3_2'] = fmtstr.format(lrm31*lym12+lrm32*lym22+lrm33*lym32)###'0.000000'
vals['lforearm_3_3'] = fmtstr.format(lrm31*lym13+lrm32*lym23+lrm33*lym33)###'1.000000'
vals['lhand_1_1'] = '1.000000'
vals['lhand_1_2'] = '0.000000'
vals['lhand_1_3'] = '0.000000'
vals['lhand_2_1'] = '0.000000'
vals['lhand_2_2'] = fmtstr.format(math.cos(math.radians(-LWristYaw)))#'1.000000'
vals['lhand_2_3'] = fmtstr.format(-math.sin(math.radians(-LWristYaw)))#'0.000000'
vals['lhand_3_1'] = '0.000000'
vals['lhand_3_2'] = fmtstr.format(math.sin(math.radians(-LWristYaw)))#'0.000000'
vals['lhand_3_3'] = fmtstr.format(math.cos(math.radians(-LWristYaw)))#'1.000000'
vals['lphalanx7_1_1'] = '1.000000'
vals['lphalanx7_1_2'] = '0.000000'
vals['lphalanx7_1_3'] = '0.000000'
vals['lphalanx7_2_1'] = '0.000000'
vals['lphalanx7_2_2'] = '1.000000'
vals['lphalanx7_2_3'] = '0.000000'
vals['lphalanx7_3_1'] = '0.000000'
vals['lphalanx7_3_2'] = '0.000000'
vals['lphalanx7_3_3'] = '1.000000'
vals['lphalanx8_1_1'] = '1.000000'
vals['lphalanx8_1_2'] = '0.000000'
vals['lphalanx8_1_3'] = '0.000000'
vals['lphalanx8_2_1'] = '0.000000'
vals['lphalanx8_2_2'] = '1.000000'
vals['lphalanx8_2_3'] = '0.000000'
vals['lphalanx8_3_1'] = '0.000000'
vals['lphalanx8_3_2'] = '0.000000'
vals['lphalanx8_3_3'] = '1.000000'
vals['lphalanx4_1_1'] = '1.000000'
vals['lphalanx4_1_2'] = '0.000000'
vals['lphalanx4_1_3'] = '0.000000'
vals['lphalanx4_2_1'] = '0.000000'
vals['lphalanx4_2_2'] = '1.000000'
vals['lphalanx4_2_3'] = '0.000000'
vals['lphalanx4_3_1'] = '0.000000'
vals['lphalanx4_3_2'] = '0.000000'
vals['lphalanx4_3_3'] = '1.000000'
vals['lphalanx5_1_1'] = '1.000000'
vals['lphalanx5_1_2'] = '0.000000'
vals['lphalanx5_1_3'] = '0.000000'
vals['lphalanx5_2_1'] = '0.000000'
vals['lphalanx5_2_2'] = '1.000000'
vals['lphalanx5_2_3'] = '0.000000'
vals['lphalanx5_3_1'] = '0.000000'
vals['lphalanx5_3_2'] = '0.000000'
vals['lphalanx5_3_3'] = '1.000000'
vals['lphalanx6_1_1'] = '1.000000'
vals['lphalanx6_1_2'] = '0.000000'
vals['lphalanx6_1_3'] = '0.000000'
vals['lphalanx6_2_1'] = '0.000000'
vals['lphalanx6_2_2'] = '1.000000'
vals['lphalanx6_2_3'] = '0.000000'
vals['lphalanx6_3_1'] = '0.000000'
vals['lphalanx6_3_2'] = '0.000000'
vals['lphalanx6_3_3'] = '1.000000'
vals['lphalanx1_1_1'] = '1.000000'
vals['lphalanx1_1_2'] = '0.000000'
vals['lphalanx1_1_3'] = '0.000000'
vals['lphalanx1_2_1'] = '0.000000'
vals['lphalanx1_2_2'] = '1.000000'
vals['lphalanx1_2_3'] = '0.000000'
vals['lphalanx1_3_1'] = '0.000000'
vals['lphalanx1_3_2'] = '0.000000'
vals['lphalanx1_3_3'] = '1.000000'
vals['lphalanx2_1_1'] = '1.000000'
vals['lphalanx2_1_2'] = '0.000000'
vals['lphalanx2_1_3'] = '0.000000'
vals['lphalanx2_2_1'] = '0.000000'
vals['lphalanx2_2_2'] = '1.000000'
vals['lphalanx2_2_3'] = '0.000000'
vals['lphalanx2_3_1'] = '0.000000'
vals['lphalanx2_3_2'] = '0.000000'
vals['lphalanx2_3_3'] = '1.000000'
vals['lphalanx3_1_1'] = '1.000000'
vals['lphalanx3_1_2'] = '0.000000'
vals['lphalanx3_1_3'] = '0.000000'
vals['lphalanx3_2_1'] = '0.000000'
vals['lphalanx3_2_2'] = '1.000000'
vals['lphalanx3_2_3'] = '0.000000'
vals['lphalanx3_3_1'] = '0.000000'
vals['lphalanx3_3_2'] = '0.000000'
vals['lphalanx3_3_3'] = '1.000000'
# }}}
# right leg {{{
rhux = 0
rhuy = -1/math.sqrt(2)
rhuz = -1/math.sqrt(2)
rhl11 = math.cos(math.radians(RHipYawPitch))
# no - here!
rhl12 = math.sin(math.radians(RHipYawPitch)) * (-rhuz)
rhl13 = math.sin(math.radians(RHipYawPitch)) * (rhuy)
rhl21 = math.sin(math.radians(RHipYawPitch)) * (rhuz)
rhl22 = math.cos(math.radians(RHipYawPitch))
rhl23 = math.sin(math.radians(RHipYawPitch)) * (-rhux)
rhl31 = math.sin(math.radians(RHipYawPitch)) * (-rhuy)
rhl32 = math.sin(math.radians(RHipYawPitch)) * (rhux)
rhl33 = math.cos(math.radians(RHipYawPitch))
rhr11 = (1 - math.cos(math.radians(RHipYawPitch))) * rhux * rhux
rhr12 = (1 - math.cos(math.radians(RHipYawPitch))) * rhux * rhuy
rhr13 = (1 - math.cos(math.radians(RHipYawPitch))) * rhux * rhuz
rhr21 = (1 - math.cos(math.radians(RHipYawPitch))) * rhuy * rhux
rhr22 = (1 - math.cos(math.radians(RHipYawPitch))) * rhuy * rhuy
rhr23 = (1 - math.cos(math.radians(RHipYawPitch))) * rhuy * rhuz
rhr31 = (1 - math.cos(math.radians(RHipYawPitch))) * rhuz * rhux
rhr32 = (1 - math.cos(math.radians(RHipYawPitch))) * rhuz * rhuy
rhr33 = (1 - math.cos(math.radians(RHipYawPitch))) * rhuz * rhuz
vals['rhip_1_1'] = fmtstr.format(rhl11 + rhr11)#'1.000000'
vals['rhip_1_2'] = fmtstr.format(rhl12 + rhr12)#'0.000000'
vals['rhip_1_3'] = fmtstr.format(rhl13 + rhr13)#'0.000000'
vals['rhip_2_1'] = fmtstr.format(rhl21 + rhr21)#'0.000000'
vals['rhip_2_2'] = fmtstr.format(rhl22 + rhr22)#'1.000000'
vals['rhip_2_3'] = fmtstr.format(rhl23 + rhr23)#'0.000000'
vals['rhip_3_1'] = fmtstr.format(rhl31 + rhr31)#'0.000000'
vals['rhip_3_2'] = fmtstr.format(rhl32 + rhr32)#'0.000000'
vals['rhip_3_3'] = fmtstr.format(rhl33 + rhr33)#'1.000000'
vals['rupperthigh_1_1'] = '1.000000'
vals['rupperthigh_1_2'] = '0.000000'
vals['rupperthigh_1_3'] = '0.000000'
vals['rupperthigh_2_1'] = '0.000000'
vals['rupperthigh_2_2'] = fmtstr.format(math.cos(math.radians(-RHipRoll)))#'1.000000'
vals['rupperthigh_2_3'] = fmtstr.format(-math.sin(math.radians(-RHipRoll)))#'0.000000'
vals['rupperthigh_3_1'] = '0.000000'
vals['rupperthigh_3_2'] = fmtstr.format(math.sin(math.radians(-RHipRoll)))#'0.000000'
vals['rupperthigh_3_3'] = fmtstr.format(math.cos(math.radians(-RHipRoll)))#'1.000000'
vals['rthigh_1_1'] = fmtstr.format(math.cos(math.radians(-RHipPitch)))#'1.000000'
vals['rthigh_1_2'] = '0.000000'
vals['rthigh_1_3'] = fmtstr.format(math.sin(math.radians(-RHipPitch)))#'0.000000'
vals['rthigh_2_1'] = '0.000000'
vals['rthigh_2_2'] = '1.000000'
vals['rthigh_2_3'] = '0.000000'
vals['rthigh_3_1'] = fmtstr.format(-math.sin(math.radians(-RHipPitch)))#'0.000000'
vals['rthigh_3_2'] = '0.000000'
vals['rthigh_3_3'] = fmtstr.format(math.cos(math.radians(-RHipPitch)))#'1.000000'
vals['rshinebone_1_1'] = fmtstr.format(math.cos(math.radians(-RKneePitch)))#'1.000000'
vals['rshinebone_1_2'] = '0.000000'
vals['rshinebone_1_3'] = fmtstr.format(math.sin(math.radians(-RKneePitch)))#'0.000000'
vals['rshinebone_2_1'] = '0.000000'
vals['rshinebone_2_2'] = '1.000000'
vals['rshinebone_2_3'] = '0.000000'
vals['rshinebone_3_1'] = fmtstr.format(-math.sin(math.radians(-RKneePitch)))#'0.000000'
vals['rshinebone_3_2'] = '0.000000'
vals['rshinebone_3_3'] = fmtstr.format(math.cos(math.radians(-RKneePitch)))#'1.000000'
vals['rankle_1_1'] = fmtstr.format(math.cos(math.radians(-RAnklePitch)))#'1.000000'
vals['rankle_1_2'] = '0.000000'
vals['rankle_1_3'] = fmtstr.format(math.sin(math.radians(-RAnklePitch)))#'0.000000'
vals['rankle_2_1'] = '0.000000'
vals['rankle_2_2'] = '1.000000'
vals['rankle_2_3'] = '0.000000'
vals['rankle_3_1'] = fmtstr.format(-math.sin(math.radians(-RAnklePitch)))#'0.000000'
vals['rankle_3_2'] = '0.000000'
vals['rankle_3_3'] = fmtstr.format(math.cos(math.radians(-RAnklePitch)))#'1.000000'
vals['rfoot_1_1'] = '1.000000'
vals['rfoot_1_2'] = '0.000000'
vals['rfoot_1_3'] = '0.000000'
vals['rfoot_2_1'] = '0.000000'
vals['rfoot_2_2'] = fmtstr.format(math.cos(math.radians(-RAnkleRoll)))#'1.000000'
vals['rfoot_2_3'] = fmtstr.format(-math.sin(math.radians(-RAnkleRoll)))#'0.000000'
vals['rfoot_3_1'] = '0.000000'
vals['rfoot_3_2'] = fmtstr.format(math.sin(math.radians(-RAnkleRoll)))#'0.000000'
vals['rfoot_3_3'] = fmtstr.format(math.cos(math.radians(-RAnkleRoll)))#'1.000000'
# }}}
# left leg {{{
lhux = 0
lhuy = 1/math.sqrt(2)
lhuz = -1/math.sqrt(2)
lhl11 = math.cos(math.radians(-LHipYawPitch))
lhl12 = math.sin(math.radians(-LHipYawPitch)) * (-lhuz)
lhl13 = math.sin(math.radians(-LHipYawPitch)) * (lhuy)
lhl21 = math.sin(math.radians(-LHipYawPitch)) * (lhuz)
lhl22 = math.cos(math.radians(-LHipYawPitch))
lhl23 = math.sin(math.radians(-LHipYawPitch)) * (-lhux)
lhl31 = math.sin(math.radians(-LHipYawPitch)) * (-lhuy)
lhl32 = math.sin(math.radians(-LHipYawPitch)) * (lhux)
lhl33 = math.cos(math.radians(-LHipYawPitch))
lhr11 = (1 - math.cos(math.radians(-LHipYawPitch))) * lhux * lhux
lhr12 = (1 - math.cos(math.radians(-LHipYawPitch))) * lhux * lhuy
lhr13 = (1 - math.cos(math.radians(-LHipYawPitch))) * lhux * lhuz
lhr21 = (1 - math.cos(math.radians(-LHipYawPitch))) * lhuy * lhux
lhr22 = (1 - math.cos(math.radians(-LHipYawPitch))) * lhuy * lhuy
lhr23 = (1 - math.cos(math.radians(-LHipYawPitch))) * lhuy * lhuz
lhr31 = (1 - math.cos(math.radians(-LHipYawPitch))) * lhuz * lhux
lhr32 = (1 - math.cos(math.radians(-LHipYawPitch))) * lhuz * lhuy
lhr33 = (1 - math.cos(math.radians(-LHipYawPitch))) * lhuz * lhuz
vals['lhip_1_1'] = fmtstr.format(lhl11 + lhr11)#'1.000000'
vals['lhip_1_2'] = fmtstr.format(lhl12 + lhr12)#'0.000000'
vals['lhip_1_3'] = fmtstr.format(lhl13 + lhr13)#'0.000000'
vals['lhip_2_1'] = fmtstr.format(lhl21 + lhr21)#'0.000000'
vals['lhip_2_2'] = fmtstr.format(lhl22 + lhr22)#'1.000000'
vals['lhip_2_3'] = fmtstr.format(lhl23 + lhr23)#'0.000000'
vals['lhip_3_1'] = fmtstr.format(lhl31 + lhr31)#'0.000000'
vals['lhip_3_2'] = fmtstr.format(lhl32 + lhr32)#'0.000000'
vals['lhip_3_3'] = fmtstr.format(lhl33 + lhr33)#'1.000000'
vals['lupperthigh_1_1'] = '1.000000'
vals['lupperthigh_1_2'] = '0.000000'
vals['lupperthigh_1_3'] = '0.000000'
vals['lupperthigh_2_1'] = '0.000000'
vals['lupperthigh_2_2'] = fmtstr.format(math.cos(math.radians(-LHipRoll)))#'1.000000'
vals['lupperthigh_2_3'] = fmtstr.format(-math.sin(math.radians(-LHipRoll)))#'0.000000'
vals['lupperthigh_3_1'] = '0.000000'
vals['lupperthigh_3_2'] = fmtstr.format(math.sin(math.radians(-LHipRoll)))#'0.000000'
vals['lupperthigh_3_3'] = fmtstr.format(math.cos(math.radians(-LHipRoll)))#'1.000000'
vals['lthigh_1_1'] = fmtstr.format(math.cos(math.radians(-LHipPitch)))#'1.000000'
vals['lthigh_1_2'] = '0.000000'
vals['lthigh_1_3'] = fmtstr.format(math.sin(math.radians(-LHipPitch)))#'0.000000'
vals['lthigh_2_1'] = '0.000000'
vals['lthigh_2_2'] = '1.000000'
vals['lthigh_2_3'] = '0.000000'
vals['lthigh_3_1'] = fmtstr.format(-math.sin(math.radians(-LHipPitch)))#'0.000000'
vals['lthigh_3_2'] = '0.000000'
vals['lthigh_3_3'] = fmtstr.format(math.cos(math.radians(-LHipPitch)))#'1.000000'
vals['lshinebone_1_1'] = fmtstr.format(math.cos(math.radians(-LKneePitch)))#'1.000000'
vals['lshinebone_1_2'] = '0.000000'
vals['lshinebone_1_3'] = fmtstr.format(math.sin(math.radians(-LKneePitch)))#'0.000000'
vals['lshinebone_2_1'] = '0.000000'
vals['lshinebone_2_2'] = '1.000000'
vals['lshinebone_2_3'] = '0.000000'
vals['lshinebone_3_1'] = fmtstr.format(-math.sin(math.radians(-LKneePitch)))#'0.000000'
vals['lshinebone_3_2'] = '0.000000'
vals['lshinebone_3_3'] = fmtstr.format(math.cos(math.radians(-LKneePitch)))#'1.000000'
vals['lankle_1_1'] = fmtstr.format(math.cos(math.radians(-LAnklePitch)))#'1.000000'
vals['lankle_1_2'] = '0.000000'
vals['lankle_1_3'] = fmtstr.format(math.sin(math.radians(-LAnklePitch)))#'0.000000'
vals['lankle_2_1'] = '0.000000'
vals['lankle_2_2'] = '1.000000'
vals['lankle_2_3'] = '0.000000'
vals['lankle_3_1'] = fmtstr.format(-math.sin(math.radians(-LAnklePitch)))#'0.000000'
vals['lankle_3_2'] = '0.000000'
vals['lankle_3_3'] = fmtstr.format(math.cos(math.radians(-LAnklePitch)))#'1.000000'
vals['lfoot_1_1'] = '1.000000'
vals['lfoot_1_2'] = '0.000000'
vals['lfoot_1_3'] = '0.000000'
vals['lfoot_2_1'] = '0.000000'
vals['lfoot_2_2'] = fmtstr.format(math.cos(math.radians(-LAnkleRoll)))#'1.000000'
vals['lfoot_2_3'] = fmtstr.format(-math.sin(math.radians(-LAnkleRoll)))#'0.000000'
vals['lfoot_3_1'] = '0.000000'
vals['lfoot_3_2'] = fmtstr.format(math.sin(math.radians(-LAnkleRoll)))#'0.000000'
vals['lfoot_3_3'] = fmtstr.format(math.cos(math.radians(-LAnkleRoll)))#'1.000000'
# }}}
s = "{"
for key in vals:
s += "s/_"+key+"_/"+vals[key]+"/ "+os.linesep
s += "}"
# print(s)
with open(os.path.splitext(infile)[0]+".mod.html",'w') as f:
p = call(['sed', '-e', s, infile], stdout=f)
| lgpl-3.0 | 1,122,742,542,264,320,800 | 37.273703 | 90 | 0.665483 | false |
jadhavhninad/-CSE_515_MWD_Analytics- | Phase 1/Project Code/phase1_code/print_actor_vector.py | 1 | 4823 | from mysqlConn import DbConnect
import argparse
import operator
from math import log
import pprint
#DB connector and curosor
db = DbConnect()
db_conn = db.get_connection()
cur2 = db_conn.cursor();
#Argument parser
parser = argparse.ArgumentParser()
parser.add_argument("ACTOR_ID")
parser.add_argument("MODEL")
args = parser.parse_args()
#print args.ACTOR_ID
#TF MODEL
#Subtask:1 - Get tags and movieRank weight for an actor id
#a. A dictionary to store the returned data.
data_dictionary_tf = {}
data_dictionary_tf_idf = {}
#Get sum of the rank_weights for calculating the idf value (sum of all rankweights/ sum of rank_weights for a specific tag)
cur2.execute("SELECT SUM(rank_wt_norm) FROM `movie-actor`")
result0 = cur2.fetchone()
total_rank_weight = result0[0]
total_tag_newness_weight = 0
#Get total movie-acto count for idf calculation. Here every movie-actor row value is a document , ie a combination
#for which a particular tag occurs.
cur2.execute("SELECT COUNT(distinct movieid,actorid) FROM `movie-actor`")
result0 = cur2.fetchone()
total_documents = float(result0[0])
#print total_rank_weight
cur2.execute("SELECT movieid,rank_wt_norm FROM `movie-actor` where actorid = %s",[args.ACTOR_ID])
result1 = cur2.fetchall()
for data1 in result1:
#print data1
act_movie_id = data1[0]
act_movie_rank_wt = data1[1]
actor_tag_id=""
final_tag_wt=""
#Select distint tagIDs for the movieID
cur2.execute("SELECT tagid,newness_wt_norm_nolog FROM mltags WHERE movieid = %s",[act_movie_id])
result2 = cur2.fetchall()
for data2 in result2:
#_size_count = _size_count + 1
actor_tag_id = data2[0]
actor_tag_newness = data2[1]
#Get the tag_name for the tagID. For each tag weight, add the rank_weight as well.
cur2.execute("SELECT tag FROM `genome-tags` WHERE tagID = %s", [actor_tag_id])
result2_sub = cur2.fetchall()
tagName = result2_sub[0]
#tagWeight = round(((float(actor_tag_newness)/ float(total_tag_newness_weight)) * float(act_movie_rank_wt)),10)
tagWeight = round((float(actor_tag_newness) * float(act_movie_rank_wt)), 10)
total_tag_newness_weight = total_tag_newness_weight + tagWeight
if tagName in data_dictionary_tf:
data_dictionary_tf[tagName] = round((data_dictionary_tf[tagName] + tagWeight), 10)
else:
data_dictionary_tf[tagName] = tagWeight
#Make weight of other tags to zero and for all the tags already there, calculate the TF by dividing with total_tag_newness_weight
cur2.execute("SELECT tag FROM `genome-tags`")
tagName = cur2.fetchall()
for key in tagName:
if key in data_dictionary_tf:
#print 'curval',key
data_dictionary_tf[key] = round((float(data_dictionary_tf[key]) / float(total_tag_newness_weight)),10)
else:
data_dictionary_tf[key] = 0
actor_model_value_tf = sorted(data_dictionary_tf.items(), key=operator.itemgetter(1), reverse=True)
#IDF CALCULATION.
if args.MODEL == "tf":
pprint.pprint(actor_model_value_tf)
else:
#TF-IDF CALCULATION
cur2.execute("SELECT movieid FROM `movie-actor` where actorid = %s", [args.ACTOR_ID])
result3 = cur2.fetchall()
for data1 in result3:
# print data1
act_movie_id = data1[0]
# Select tagIDs for the movieID. we choose distinct since the total_weighted_movie_actor_count is already precomputed.
cur2.execute("SELECT distinct(tagid) FROM mltags WHERE movieid = %s", [act_movie_id])
result4 = cur2.fetchall()
for data2 in result4:
actor_tag_id = data2[0]
cur2.execute("SELECT tag,total_wt_movie_actor_count FROM `genome-tags` WHERE tagID = %s", [actor_tag_id])
result2_sub = cur2.fetchone()
tagName = result2_sub[0]
tag_movie_actor_count = result2_sub[1]
if tagName in data_dictionary_tf_idf:
continue
else:
data_dictionary_tf_idf[tagName] = float(tag_movie_actor_count)
#Once all the tag data has been recorded, calculate the idf and tfidf for each tag.
#Make weight of other tags to zero.
cur2.execute("SELECT tag FROM `genome-tags`")
tgName = cur2.fetchall()
for key in tgName:
keyval = key[0]
if keyval in data_dictionary_tf_idf:
data_dictionary_tf_idf[keyval] = round((float(log((total_documents / data_dictionary_tf_idf[keyval]), 2.71828))),10)
data_dictionary_tf_idf[keyval] = round(float(float(data_dictionary_tf[key]) * float(data_dictionary_tf_idf[keyval])), 10)
else:
data_dictionary_tf_idf[keyval] = 0
actor_model_value_tf_idf = sorted(data_dictionary_tf_idf.items(), key=operator.itemgetter(1), reverse=True)
pprint.pprint(actor_model_value_tf_idf)
| gpl-3.0 | -3,575,886,717,172,771,000 | 33.697842 | 133 | 0.671574 | false |
philippj/python-burningseries | notifier.py | 1 | 3494 | #!/usr/local/bin/python
# -*- coding: utf-8 -*-
#python-burningseries Copyright (C) 2015 Philipp "freaK"
'''
YOWSUP 2.0
https://github.com/tgalal/yowsup
'''
'''
python-burningseries
# This does send notifications to either email or a whatsapp number using yowsup-cli
'''
#Example for notify params
'''
# Yowsup
{
'phones': ['12309123892139'],
'configFile': '/home/mayowsupconfigfile.config' #must be absolute
}
# Email
{
'addresses': ['[email protected]'],
'senderAddress': '[email protected]',
'sendmail_location': '/usr/sbin/sendmail'
}
#others will come
'''
import os
import time
import threading
import smtplib
from base import BurningSeriesBase
class BurningSeriesNotifier(threading.Thread):
def __init__(self, notifyType='email', notifyParams={'addresses': ['[email protected]'], 'senderAddress': '[email protected]', 'sendmail_location': '/usr/sbin/sendmail'}):
threading.Thread.__init__(self)
bsBase = BurningSeriesBase(True, False)
self.notifyType = notifyType
self.notifyParams = notifyParams
bsBase.callbacks['newEpisode'] = self.newEpisode
bsBase.callbacks['newSeason'] = self.newSeason
bsBase.callbacks['newEpisodeInLanguage'] = self.newEpisodeInLanguage
self.notify = None
#not using the direct yowsup library, only the console commands to keep it simple
if notifyType == 'yowsup':
if not os.path.isfile(notifyParams['configFile']):
notifyType = 'email'
else:
self.notify = self.sendYowsupCommand
elif notifyType == 'email':
self.notify = self.sendEmailCommand
def newEpisode(self, series, config, params):
string = config['title'] + ' | Season ' + str(params['season']) + ' | Episode ' + str(params['episode']) + ' is now available! Titles: (DE) ' + config['episodes'][params['season']][params['episode']]['titles']['german'] + ' / (EN) ' + config['episodes'][params['season']][params['episode']]['titles']['english']
self.notify(string)
def newSeason(self, series, config, params):
string = config['title'] + ' | Season ' + str(params['newSeason']) + ' is now available!'
self.notify(string)
def newEpisodeInLanguage(self, series, config, params):
string = config['title'] + ' | A episode is now available in - ' + params['language'] + ' - Title: ' + params['title']
self.notify(string)
def sendEmailCommand(self, message):
#self.smtplibServer.sendmail(self.notifyParams['senderAddress'], self.notifyParams['addresses'], final_message)
p = os.popen("%s -t" % self.notifyParams['sendmail_location'], "w")
p.write("From: %s\n" % self.notifyParams['senderAddress'])
p.write("To: %s\n" % ",".join(self.notifyParams['addresses']))
p.write("Subject: BurningSeriesNotifier\n")
p.write("\n")
p.write(message)
p.close()
def sendYowsupCommand(self, message):
for number in self.notifyParams['phones']:
print os.system('yowsup-cli demos -s ' + number + ' "' + message + '" -c ' + self.notifyParams['configFile'])
x = BurningSeriesNotifier()
while True:
try:
time.sleep(1)
except:
raise SystemExit
| gpl-2.0 | -1,798,651,883,595,741,000 | 32.932039 | 321 | 0.59731 | false |
ipa-led/airbus_coop | airbus_docgen/src/airbus_docgen/docgen/pkg/__init__.py | 1 | 7020 | #!/usr/bin/env python
#
# Copyright 2015 Airbus
# Copyright 2017 Fraunhofer Institute for Manufacturing Engineering and Automation (IPA)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
from airbus_docgen import env
from airbus_docgen.common import html
from airbus_docgen.common.html import HtmlElement, HtmlElementTree
from airbus_docgen.docgen.pkg.summary import PackageSummary
from airbus_docgen.docgen.pkg.description import PackageDescription
from airbus_docgen.docgen.pkg.dependencies import PackageDependencies
from airbus_docgen.docgen.pkg.generations import PackageGenerations
from airbus_docgen.docgen.pkg.node import RosNode
class AgiDoc(HtmlElement):
def __init__(self):
HtmlElement.__init__(self,
tag=html.Sections.section,
attrib={"class":"nodes"})
def read(self, pkgdir, agi_xml, index):
index_node=0
for node_xml in agi_xml.iter('node'):
index_node+=1
title = HtmlElement(html.Sections.h3)
node_name = node_xml.attrib['name']
title.text = "%i.%i. %s"%(index, index_node, node_name)
self.append(title)
try:
ros_node = RosNode()
if ros_node.read(node_name, node_xml, index, index_node) is True:
self.append(ros_node)
except Exception as ex:
html.HTMLException(ex, self)
if index_node is 0:
return False
else:
return True
class RosPackage(HtmlElement):
def __init__(self, pkgdir):
HtmlElement.__init__(self,
tag=html.Sections.section,
attrib={"class":"package"})
self._h2_index = 0
self._dep_pkg = None
pkg_xml = None
# Load and read package.xml ressource
pkg_xml_dir = pkgdir+'/package.xml'
if os.access(pkg_xml_dir, os.R_OK):
pkg_xml = html.loadHtml(pkg_xml_dir)
self._read_pkg_xml(pkgdir, pkg_xml)
else:
html.HTMLException("Cannot found %s !"%pkg_xml_dir, self)
# Load and read CMakeLists.txt ressource
cmakelists_dir = pkgdir+'/CMakeLists.txt'
if os.access(cmakelists_dir, os.R_OK):
with open(cmakelists_dir) as fp:
cmakelists = fp.read()
self._read_cmakelists(pkgdir, cmakelists)
else:
html.HTMLException("Cannot found %s !"%cmakelists_dir, self)
if pkg_xml is not None:
self._read_agi_doc_xml(pkgdir, pkg_xml)
def _read_pkg_xml(self, pkgdir, pkg_xml):
pkg_name = HtmlElement(html.Sections.h1)
pkg_name.text = pkg_xml.find("./name").text
self.append(pkg_name)
p = HtmlElement(html.Grouping.p)
p.set("align","center")
img = HtmlElement(html.EmbeddedContent.img)
img.set("src","../dot/gen/%s.png"%pkg_xml.find("./name").text)
p.append(img)
self.append(p)
pkg_summary_title = HtmlElement(html.Sections.h2)
pkg_summary_title.text = "%i. Package Summary"%self.index_h2()
self.append(pkg_summary_title)
try:
self.append(PackageSummary(pkgdir, pkg_xml))
except Exception as ex:
html.HTMLException(ex, self)
pkg_desc_title = HtmlElement(html.Sections.h2)
pkg_desc_title.text = "%i. Package description"%self.index_h2()
self.append(pkg_desc_title)
try:
self.append(PackageDescription(pkgdir, pkg_xml))
except Exception as ex:
html.HTMLException(ex, self)
pkg_dep_title = HtmlElement(html.Sections.h2)
pkg_dep_title.text = "%i. Package dependencies"%self.index_h2()
self.append(pkg_dep_title)
try:
self._dep_pkg = PackageDependencies(pkgdir, pkg_xml)
self.append(self._dep_pkg)
except Exception as ex:
html.HTMLException(ex, self)
def _read_cmakelists(self, pkgdir, cmakefile):
try:
pkg = PackageGenerations()
dep_list = self._dep_pkg.get_dependencies_lists()
if pkg.read(pkgdir, cmakefile, dep_list) is True:
pkg_build_title = HtmlElement(html.Sections.h2)
pkg_build_title.text = "%i. Package generation(s)"%self.index_h2()
self.append(pkg_build_title)
self.append(pkg)
except Exception as ex:
html.HTMLException(ex, self)
def _read_agi_doc_xml(self, pkgdir, pkg_xml):
agidoc_elem = pkg_xml.find("./export/agidoc")
if agidoc_elem is not None:
if 'src' in agidoc_elem.attrib:
fdoc = os.path.join(pkgdir, agidoc_elem.attrib['src'])
if os.path.isfile(fdoc):
agi = AgiDoc()
if agi.read(pkgdir, html.loadHtml(fdoc), self._h2_index+1) is True:
title = HtmlElement(html.Sections.h2)
title.text = "%i. More description"%self.index_h2()
self.append(title)
self.append(agi)
else:
html.HTMLException("Cannot open agidoc '%s'"%fdoc, self)
else:
html.HTMLException("AGI documentation not found !", self)
def index_h2(self):
self._h2_index+=1
return self._h2_index
class HtmlPkgFileGenerator(HtmlElementTree):
def __init__(self, index, pkg_dir, pkg_name):
HtmlElementTree.__init__(self, index.getroot())
self._pkg_name = pkg_name
div = self.getroot().find("./body/div")
try:
pkg = RosPackage(pkg_dir)
div.append(pkg)
except Exception as ex:
html.HTMLException(ex, div)
def save(self):
html.indent(self.getroot())
#print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!",os.path.join(env.ROSDOC_ROOT, "%s.html"%self._pkg_name)
self.write(os.path.join(env.ROSDOC_GEN, "%s.html"%self._pkg_name),
encoding="utf8",
method="xml")
def __str__(self):
html.indent(self.getroot())
return html.tostring(self.getroot())
| apache-2.0 | 3,991,856,374,120,932,000 | 35.753927 | 135 | 0.562963 | false |
chrys87/fenrir | src/fenrirscreenreader/commands/commands/review_next_line.py | 1 | 1627 | #!/bin/python
# -*- coding: utf-8 -*-
# Fenrir TTY screen reader
# By Chrys, Storm Dragon, and contributers.
from fenrirscreenreader.core import debug
from fenrirscreenreader.utils import line_utils
class command():
def __init__(self):
pass
def initialize(self, environment):
self.env = environment
def shutdown(self):
pass
def getDescription(self):
return _('moves review to the next line ')
def run(self):
self.env['screen']['oldCursorReview'] = self.env['screen']['newCursorReview']
if not self.env['screen']['newCursorReview']:
self.env['screen']['newCursorReview'] = self.env['screen']['newCursor'].copy()
self.env['screen']['newCursorReview']['x'], self.env['screen']['newCursorReview']['y'], nextLine, endOfScreen = \
line_utils.getNextLine(self.env['screen']['newCursorReview']['x'], self.env['screen']['newCursorReview']['y'], self.env['screen']['newContentText'])
if nextLine.isspace():
self.env['runtime']['outputManager'].presentText(_("blank"), soundIcon='EmptyLine', interrupt=True, flush=False)
else:
self.env['runtime']['outputManager'].presentText(nextLine, interrupt=True, flush=False)
if endOfScreen:
if self.env['runtime']['settingsManager'].getSettingAsBool('review', 'endOfScreen'):
self.env['runtime']['outputManager'].presentText(_('end of screen'), interrupt=True, soundIcon='EndOfScreen')
def setCallback(self, callback):
pass
| lgpl-3.0 | 8,339,499,634,889,436,000 | 43.194444 | 158 | 0.610326 | false |
lipis/life-line | main/control/user.py | 1 | 12057 | # coding: utf-8
import copy
from flask.ext import login
from flask.ext import wtf
from flask.ext.babel import gettext as __
from flask.ext.babel import lazy_gettext as _
from google.appengine.ext import ndb
import flask
import wtforms
import auth
import cache
import config
import i18n
import model
import task
import util
from main import app
###############################################################################
# User List
###############################################################################
@app.route('/admin/user/')
@auth.admin_required
def user_list():
user_dbs, cursors = model.User.get_dbs(
email=util.param('email'), prev_cursor=True,
)
permissions = list(UserUpdateForm._permission_choices)
permissions += util.param('permissions', list) or []
return flask.render_template(
'user/user_list.html',
html_class='user-list',
title=_('User List'),
user_dbs=user_dbs,
next_url=util.generate_next_url(cursors['next']),
prev_url=util.generate_next_url(cursors['prev']),
api_url=flask.url_for('api.user.list'),
permissions=sorted(set(permissions)),
)
###############################################################################
# User Update
###############################################################################
class UserUpdateForm(i18n.Form):
username = wtforms.StringField(
model.User.username._verbose_name,
[wtforms.validators.required(), wtforms.validators.length(min=2)],
filters=[util.email_filter],
)
name = wtforms.StringField(
model.User.name._verbose_name,
[wtforms.validators.required()], filters=[util.strip_filter],
)
email = wtforms.StringField(
model.User.email._verbose_name,
[wtforms.validators.optional(), wtforms.validators.email()],
filters=[util.email_filter],
)
locale = wtforms.SelectField(
model.User.locale._verbose_name,
choices=config.LOCALE_SORTED, filters=[util.strip_filter],
)
admin = wtforms.BooleanField(model.User.admin._verbose_name)
active = wtforms.BooleanField(model.User.active._verbose_name)
verified = wtforms.BooleanField(model.User.verified._verbose_name)
permissions = wtforms.SelectMultipleField(
model.User.permissions._verbose_name,
filters=[util.sort_filter],
)
_permission_choices = set()
def __init__(self, *args, **kwds):
super(UserUpdateForm, self).__init__(*args, **kwds)
self.permissions.choices = [
(p, p) for p in sorted(UserUpdateForm._permission_choices)
]
@auth.permission_registered.connect
def _permission_registered_callback(sender, permission):
UserUpdateForm._permission_choices.add(permission)
@app.route('/admin/user/create/', methods=['GET', 'POST'])
@app.route('/admin/user/<int:user_id>/update/', methods=['GET', 'POST'])
@auth.admin_required
def user_update(user_id=0):
if user_id:
user_db = model.User.get_by_id(user_id)
else:
user_db = model.User(name='', username='')
if not user_db:
flask.abort(404)
form = UserUpdateForm(obj=user_db)
for permission in user_db.permissions:
form.permissions.choices.append((permission, permission))
form.permissions.choices = sorted(set(form.permissions.choices))
if form.validate_on_submit():
if not util.is_valid_username(form.username.data):
form.username.errors.append(_('This username is invalid.'))
elif not model.User.is_username_available(form.username.data, user_db.key):
form.username.errors.append(_('This username is already taken.'))
else:
form.populate_obj(user_db)
if auth.current_user_key() == user_db.key:
user_db.admin = True
user_db.active = True
user_db.put()
return flask.redirect(flask.url_for(
'user_list', order='-modified', active=user_db.active,
))
return flask.render_template(
'user/user_update.html',
title=user_db.name or _('New User'),
html_class='user-update',
form=form,
user_db=user_db,
api_url=flask.url_for('api.user', user_key=user_db.key.urlsafe()) if user_db.key else ''
)
###############################################################################
# User Verify
###############################################################################
@app.route('/user/verify/<token>/')
@auth.login_required
def user_verify(token):
user_db = auth.current_user_db()
if user_db.token != token:
flask.flash(__('That link is either invalid or expired.'), category='danger')
return flask.redirect(flask.url_for('profile'))
user_db.verified = True
user_db.token = util.uuid()
user_db.put()
flask.flash(__('Hooray! Your email is now verified.'), category='success')
return flask.redirect(flask.url_for('profile'))
###############################################################################
# User Forgot
###############################################################################
class UserForgotForm(i18n.Form):
email = wtforms.StringField(
'Email',
[wtforms.validators.required(), wtforms.validators.email()],
filters=[util.email_filter],
)
recaptcha = wtf.RecaptchaField()
@app.route('/user/forgot/', methods=['GET', 'POST'])
def user_forgot(token=None):
if not config.CONFIG_DB.has_email_authentication:
flask.abort(418)
form = auth.form_with_recaptcha(UserForgotForm(obj=auth.current_user_db()))
if form.validate_on_submit():
cache.bump_auth_attempt()
email = form.email.data
user_dbs, cursors = util.get_dbs(
model.User.query(), email=email, active=True, limit=2,
)
count = len(user_dbs)
if count == 1:
task.reset_password_notification(user_dbs[0])
return flask.redirect(flask.url_for('welcome'))
elif count == 0:
form.email.errors.append('This email was not found')
elif count == 2:
task.email_conflict_notification(email)
form.email.errors.append(
'''We are sorry but it looks like there is a conflict with your
account. Our support team is already informed and we will get back to
you as soon as possible.'''
)
if form.errors:
cache.bump_auth_attempt()
return flask.render_template(
'user/user_forgot.html',
title=_('Forgot Password?'),
html_class='user-forgot',
form=form,
)
###############################################################################
# User Reset
###############################################################################
class UserResetForm(i18n.Form):
new_password = wtforms.StringField(
_('New Password'),
[wtforms.validators.required(), wtforms.validators.length(min=6)],
)
@app.route('/user/reset/<token>/', methods=['GET', 'POST'])
@app.route('/user/reset/')
def user_reset(token=None):
user_db = model.User.get_by('token', token)
if not user_db:
flask.flash(__('That link is either invalid or expired.'), category='danger')
return flask.redirect(flask.url_for('welcome'))
if auth.is_logged_in():
login.logout_user()
return flask.redirect(flask.request.path)
form = UserResetForm()
if form.validate_on_submit():
user_db.password_hash = util.password_hash(user_db, form.new_password.data)
user_db.token = util.uuid()
user_db.verified = True
user_db.put()
flask.flash(__('Your password was changed succesfully.'), category='success')
return auth.signin_user_db(user_db)
return flask.render_template(
'user/user_reset.html',
title='Reset Password',
html_class='user-reset',
form=form,
user_db=user_db,
)
###############################################################################
# User Activate
###############################################################################
class UserActivateForm(i18n.Form):
name = wtforms.StringField(
model.User.name._verbose_name,
[wtforms.validators.required()], filters=[util.strip_filter],
)
password = wtforms.StringField(
_('Password'),
[wtforms.validators.required(), wtforms.validators.length(min=6)],
)
@app.route('/user/activate/<token>/', methods=['GET', 'POST'])
def user_activate(token):
if auth.is_logged_in():
login.logout_user()
return flask.redirect(flask.request.path)
user_db = model.User.get_by('token', token)
if not user_db:
flask.flash(__('That link is either invalid or expired.'), category='danger')
return flask.redirect(flask.url_for('welcome'))
form = UserActivateForm(obj=user_db)
if form.validate_on_submit():
form.populate_obj(user_db)
user_db.password_hash = util.password_hash(user_db, form.password.data)
user_db.token = util.uuid()
user_db.verified = True
user_db.put()
return auth.signin_user_db(user_db)
return flask.render_template(
'user/user_activate.html',
title='Activate Account',
html_class='user-activate',
user_db=user_db,
form=form,
)
###############################################################################
# User Merge
###############################################################################
class UserMergeForm(i18n.Form):
user_key = wtforms.HiddenField('User Key', [wtforms.validators.required()])
user_keys = wtforms.HiddenField('User Keys', [wtforms.validators.required()])
username = wtforms.StringField(_('Username'), [wtforms.validators.optional()])
name = wtforms.StringField(
_('Name (merged)'),
[wtforms.validators.required()], filters=[util.strip_filter],
)
email = wtforms.StringField(
_('Email (merged)'),
[wtforms.validators.optional(), wtforms.validators.email()],
filters=[util.email_filter],
)
@app.route('/admin/user/merge/', methods=['GET', 'POST'])
@auth.admin_required
def user_merge():
user_keys = util.param('user_keys', list)
if not user_keys:
flask.abort(400)
user_db_keys = [ndb.Key(urlsafe=k) for k in user_keys]
user_dbs = ndb.get_multi(user_db_keys)
if len(user_dbs) < 2:
flask.abort(400)
user_dbs.sort(key=lambda user_db: user_db.created)
merged_user_db = user_dbs[0]
auth_ids = []
permissions = []
is_admin = False
is_active = False
for user_db in user_dbs:
auth_ids.extend(user_db.auth_ids)
permissions.extend(user_db.permissions)
is_admin = is_admin or user_db.admin
is_active = is_active or user_db.active
if user_db.key.urlsafe() == util.param('user_key'):
merged_user_db = user_db
auth_ids = sorted(list(set(auth_ids)))
permissions = sorted(list(set(permissions)))
merged_user_db.permissions = permissions
merged_user_db.admin = is_admin
merged_user_db.active = is_active
merged_user_db.verified = False
form_obj = copy.deepcopy(merged_user_db)
form_obj.user_key = merged_user_db.key.urlsafe()
form_obj.user_keys = ','.join(user_keys)
form = UserMergeForm(obj=form_obj)
if form.validate_on_submit():
form.populate_obj(merged_user_db)
merged_user_db.auth_ids = auth_ids
merged_user_db.put()
deprecated_keys = [k for k in user_db_keys if k != merged_user_db.key]
merge_user_dbs(merged_user_db, deprecated_keys)
return flask.redirect(
flask.url_for('user_update', user_id=merged_user_db.key.id()),
)
return flask.render_template(
'user/user_merge.html',
title=_('Merge Users'),
html_class='user-merge',
user_dbs=user_dbs,
merged_user_db=merged_user_db,
form=form,
auth_ids=auth_ids,
api_url=flask.url_for('api.user.list', user_keys=','.join(user_keys)),
)
@ndb.transactional(xg=True)
def merge_user_dbs(user_db, deprecated_keys):
# TODO: Merge possible user data before handling deprecated users
deprecated_dbs = ndb.get_multi(deprecated_keys)
for deprecated_db in deprecated_dbs:
deprecated_db.auth_ids = []
deprecated_db.active = False
deprecated_db.verified = False
if not deprecated_db.username.startswith('_'):
deprecated_db.username = '_%s' % deprecated_db.username
ndb.put_multi(deprecated_dbs)
| mit | -6,274,276,460,085,191,000 | 31.763587 | 94 | 0.606121 | false |
jschornick/i2c_device | setup.py | 1 | 1721 | import os
from glob import glob
from setuptools import setup, find_packages
# Setup flags and parameters
pkg_name = 'i2c_device' # top-level package name
# Cache readme contents for use as long_description
readme = open('readme.md').read()
# Call setup()
setup(
name=pkg_name,
version='0.1',
description='I2C device configuration library',
long_description=readme,
url='https://github.com/jschornick/i2c_device',
author='Jeff Schornick',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
data_files = [ ('configs', glob("configs/*")) ],
scripts = glob("examples/*"),
# NOTE: This module has been most thoroughly tested using the python-smbus
# library, which is NOT available via PyPI. Install separately
# via your favorite package manager or from the source:
# http://www.lm-sensors.org/browser/i2c-tools/trunk/py-smbus/
#
# Alternately, try using smbus-cffi below, which just might work, but
# is definitely slower.
install_requires=[
#'smbus-cffi',
'PyYAML'
],
test_suite=(pkg_name + '.tests'),
tests_require=['mock'],
platforms='any',
keywords='i2c device abstraction development utilities tools',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
])
| mit | -3,040,005,242,851,160,000 | 32.096154 | 77 | 0.677513 | false |
alexandrosstergiou/The-Drivers-Assistant-Traffic-Sign-Recognition | show.py | 1 | 1041 | import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import ImageGrid
import numpy as np
from os import listdir, getcwd
from os import chdir
from PIL import Image
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import matplotlib.image as mimage
from matplotlib.backends.backend_pdf import PdfPages
files = listdir('CNN_run2/Visualisations_w_folders/max_pooling_3')
chdir('CNN_run2/Visualisations_w_folders/max_pooling_3')
images = [Image.open(f).convert('LA') for f in files]
"""
fig = plt.figure()
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols = (2, 5), # creates 2x2 grid of axes
axes_pad=0.1, # pad between axes in inch.
)
"""
num_rows = 1
num_cols = 128
fig = plt.figure()
gs = gridspec.GridSpec(num_rows, num_cols, wspace=0.0)
i = 0
for g in gs:
ax = plt.subplot(g)
ax.imshow(images[i])
ax.set_xticks([])
ax.set_yticks([])
i = i + 1
# ax.set_aspect('auto')
plt.axis('off')
plt.show()
| mit | 2,028,056,922,246,869,200 | 21.148936 | 66 | 0.691643 | false |
andyneff/python-plyfile | examples/plot.py | 1 | 1337 | '''
Example script illustrating plotting of PLY data using Mayavi. Mayavi
is not a dependency of plyfile, but you will need to install it in order
to run this script. Failing to do so will immediately result in
ImportError.
'''
from argparse import ArgumentParser
import numpy
from mayavi import mlab
from plyfile import PlyData
def main():
parser = ArgumentParser()
parser.add_argument('ply_filename')
args = parser.parse_args()
plot(PlyData.read(args.ply_filename))
mlab.show()
def plot(ply):
'''
Plot vertices and triangles from a PlyData instance. Assumptions:
`ply' has a 'vertex' element with 'x', 'y', and 'z'
properties;
`ply' has a 'face' element with an integral list property
'vertex_indices', all of whose elements have length 3.
'''
vertex = ply['vertex']
(x, y, z) = (vertex[t] for t in ('x', 'y', 'z'))
mlab.points3d(x, y, z, color=(1, 1, 1), mode='point')
if 'face' in ply:
tri_idx = ply['face']['vertex_indices']
idx_dtype = tri_idx[0].dtype
triangles = numpy.fromiter(tri_idx, [('data', idx_dtype, (3,))],
count=len(tri_idx))['data']
mlab.triangular_mesh(x, y, z, triangles,
color=(1, 0, 0.4), opacity=0.5)
main()
| gpl-3.0 | -5,918,219,094,796,757,000 | 23.759259 | 72 | 0.59985 | false |
Akuli/porcupine | tests/test_filetypes_plugin.py | 1 | 3057 | import logging
import pathlib
import sys
from tkinter import filedialog
import pytest
from porcupine import dirs, filedialog_kwargs, get_main_window
from porcupine.plugins import filetypes
@pytest.fixture
def custom_filetypes():
# We don't overwrite the user's file because porcupine.dirs is monkeypatched
assert not dirs.user_config_dir.startswith(str(pathlib.Path.home()))
user_filetypes = pathlib.Path(dirs.user_config_dir) / "filetypes.toml"
user_filetypes.write_text(
"""
['Mako template']
filename_patterns = ["mako-templates/*.html"]
pygments_lexer = 'pygments.lexers.MakoHtmlLexer'
"""
)
filetypes.load_filetypes()
filetypes.set_filedialog_kwargs()
yield
user_filetypes.unlink()
filetypes.filetypes.clear()
filetypes.load_filetypes()
filetypes.set_filedialog_kwargs()
def test_filedialog_patterns_got_stripped():
python_patterns = dict(filedialog_kwargs["filetypes"])["Python"]
assert "*.py" not in python_patterns
assert ".py" in python_patterns
@pytest.mark.skipif(sys.platform != "linux", reason="don't know how filedialog works on non-Linux")
def test_actually_running_filedialog(custom_filetypes):
# Wait and then press Esc. That's done as Tcl code because the Tk widget
# representing the dialog can't be used with tkinter.
root = get_main_window().nametowidget(".")
root.after(1000, root.eval, "event generate [focus] <Escape>")
# If filedialog_kwargs are wrong, then this errors.
filedialog.askopenfilename(**filedialog_kwargs)
def test_bad_filetype_on_command_line(run_porcupine):
output = run_porcupine(["-n", "FooBar"], 2)
assert "no filetype named 'FooBar'" in output
def test_unknown_filetype(filetab, tmp_path):
# pygments does not know graphviz, see how it gets handled
filetab.textwidget.insert(
"end",
"""\
digraph G {
Hello->World;
}
""",
)
filetab.path = tmp_path / "graphviz-hello-world.gvz"
filetab.save()
lexer_class_name = filetypes.get_filetype_for_tab(filetab)["pygments_lexer"]
assert lexer_class_name.endswith(".TextLexer")
def test_slash_in_filename_patterns(custom_filetypes, caplog, tmp_path):
def lexer_name(path):
return filetypes.guess_filetype_from_path(path)["pygments_lexer"]
assert lexer_name(tmp_path / "foo" / "bar.html") == "pygments.lexers.HtmlLexer"
assert lexer_name(tmp_path / "lol-mako-templates" / "bar.html") == "pygments.lexers.HtmlLexer"
with caplog.at_level(logging.WARNING):
assert (
lexer_name(tmp_path / "mako-templates" / "bar.html") == "pygments.lexers.MakoHtmlLexer"
)
assert len(caplog.records) == 1
assert "2 file types match" in caplog.records[0].message
assert str(tmp_path) in caplog.records[0].message
assert "HTML, Mako template" in caplog.records[0].message
# filedialog doesn't support slashes in patterns
for filetype_name, patterns in filedialog_kwargs["filetypes"]:
for pattern in patterns:
assert "/" not in pattern
| mit | -3,899,674,040,829,050,400 | 32.228261 | 99 | 0.697089 | false |
GNOME/dots | dots/docdocument.py | 1 | 1733 | # Dots - A braille translation program.
#
# Copyright (C) 2010 Consorcio Fernando de los Rios
# Author: Fernando Herrera <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
from document import Document
from translator import Translator
def get_antiword():
for path in os.environ["PATH"].split(os.pathsep):
f = os.path.join(path, "antiword")
if os.path.exists(f) and os.access(f, os.X_OK):
return f
return None
antiword = get_antiword()
if antiword is None:
raise NameError('Antiword not found')
class DocDocument(Document):
def _get_text(seff, file):
text = subprocess.check_output([antiword, "-x", "db", file])
return text
def translate(self, config):
config['outputFormat']['inputTextEncoding'] = "UTF8"
self.translator = Translator(config)
result = self._get_text (self.input_file)
self.braille_text = self.translator.translate_string (result)
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
document = OdtDocument(sys.argv[1])
print document._get_text(sys.argv[1])
| gpl-3.0 | 1,013,742,404,977,871,100 | 31.092593 | 71 | 0.699365 | false |
shashi28/nuts | port scanner/ui_portScanner.py | 1 | 4932 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'portScanner.ui'
#
# Created: Tue Apr 29 18:10:30 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_portScanner(object):
def setupUi(self, portScanner):
portScanner.setObjectName(_fromUtf8("portScanner"))
portScanner.resize(372, 389)
portScanner.setMinimumSize(QtCore.QSize(372, 389))
portScanner.setMaximumSize(QtCore.QSize(372, 389))
self.hostLabel = QtGui.QLabel(portScanner)
self.hostLabel.setGeometry(QtCore.QRect(20, 30, 61, 16))
self.hostLabel.setObjectName(_fromUtf8("hostLabel"))
self.hostLineEdit = QtGui.QLineEdit(portScanner)
self.hostLineEdit.setGeometry(QtCore.QRect(80, 30, 171, 20))
self.hostLineEdit.setObjectName(_fromUtf8("hostLineEdit"))
self.portFromSpinBox = QtGui.QSpinBox(portScanner)
self.portFromSpinBox.setGeometry(QtCore.QRect(110, 70, 42, 22))
self.portFromSpinBox.setMinimum(20)
self.portFromSpinBox.setMaximum(65535)
self.portFromSpinBox.setObjectName(_fromUtf8("portFromSpinBox"))
self.portToSpinBox = QtGui.QSpinBox(portScanner)
self.portToSpinBox.setGeometry(QtCore.QRect(210, 70, 42, 22))
self.portToSpinBox.setMinimum(21)
self.portToSpinBox.setMaximum(65536)
self.portToSpinBox.setObjectName(_fromUtf8("portToSpinBox"))
self.fromLabel = QtGui.QLabel(portScanner)
self.fromLabel.setGeometry(QtCore.QRect(20, 70, 81, 16))
self.fromLabel.setObjectName(_fromUtf8("fromLabel"))
self.toLabel = QtGui.QLabel(portScanner)
self.toLabel.setGeometry(QtCore.QRect(170, 70, 31, 16))
self.toLabel.setObjectName(_fromUtf8("toLabel"))
self.scanPushButton = QtGui.QPushButton(portScanner)
self.scanPushButton.setGeometry(QtCore.QRect(290, 30, 75, 23))
self.scanPushButton.setObjectName(_fromUtf8("scanPushButton"))
self.resultTable = QtGui.QTableWidget(portScanner)
self.resultTable.setGeometry(QtCore.QRect(10, 110, 351, 271))
self.resultTable.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.resultTable.setTabKeyNavigation(False)
self.resultTable.setProperty("showDropIndicator", False)
self.resultTable.setDragDropOverwriteMode(False)
self.resultTable.setAlternatingRowColors(True)
self.resultTable.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
self.resultTable.setObjectName(_fromUtf8("resultTable"))
self.resultTable.setColumnCount(2)
self.resultTable.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.resultTable.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.resultTable.setHorizontalHeaderItem(1, item)
self.resultTable.horizontalHeader().setStretchLastSection(True)
self.resultTable.verticalHeader().setVisible(False)
self.stopPushButton = QtGui.QPushButton(portScanner)
self.stopPushButton.setGeometry(QtCore.QRect(290, 60, 75, 23))
self.stopPushButton.setObjectName(_fromUtf8("stopPushButton"))
self.statusLabel = QtGui.QLabel(portScanner)
self.statusLabel.setGeometry(QtCore.QRect(265, 90, 91, 20))
self.statusLabel.setText(_fromUtf8(""))
self.statusLabel.setObjectName(_fromUtf8("statusLabel"))
self.hostLabel.setBuddy(self.hostLineEdit)
self.retranslateUi(portScanner)
QtCore.QMetaObject.connectSlotsByName(portScanner)
def retranslateUi(self, portScanner):
portScanner.setWindowTitle(_translate("portScanner", "Port Scanner - Nuts and Bolts", None))
self.hostLabel.setText(_translate("portScanner", "&Host / IP :", None))
self.hostLineEdit.setPlaceholderText(_translate("portScanner", "Enter Hostname or IP Address", None))
self.fromLabel.setText(_translate("portScanner", "Port No from :", None))
self.toLabel.setText(_translate("portScanner", "to :", None))
self.scanPushButton.setText(_translate("portScanner", "Scan", None))
item = self.resultTable.horizontalHeaderItem(0)
item.setText(_translate("portScanner", "Port No", None))
item = self.resultTable.horizontalHeaderItem(1)
item.setText(_translate("portScanner", "Status", None))
self.stopPushButton.setText(_translate("portScanner", "Stop", None))
| mit | -6,696,726,251,446,786,000 | 49.326531 | 109 | 0.708232 | false |
ewejeen/2017sejongAI | week 12/2-1.py | 1 | 2186 |
from nltk.corpus import movie_reviews
from nltk.classify import NaiveBayesClassifier
from nltk.classify.util import accuracy as nltk_accuracy
def extract_features(words):
return dict([(word, True) for word in words])
if __name__=='__main__':
fileids_pos = movie_reviews.fileids('pos')
fileids_neg = movie_reviews.fileids('neg')
features_pos = [(extract_features(movie_reviews.words(
fileids=[f])), 'Positive') for f in fileids_pos]
features_neg = [(extract_features(movie_reviews.words(
fileids=[f])), 'Negative') for f in fileids_neg]
threshold = 0.8
num_pos = int(threshold * len(features_pos))
num_neg = int(threshold * len(features_neg))
features_train = features_pos[:num_pos] + features_neg[:num_neg]
features_test = features_pos[num_pos:] + features_neg[num_neg:]
print('\nNumber of training datapoints:', len(features_train))
print('Number of test datapoints:', len(features_test))
classifier = NaiveBayesClassifier.train(features_train)
print('\nAccuracy of the classifier:', nltk_accuracy(classifier, features_test))
N = 15
print('\nTop ' + str(N) + ' most informative words:')
for i, item in enumerate(classifier.most_informative_features()):
print(str(i+1) + '. ' + item[0])
if i == N - 1:
break
input_reviews = [
"Everything about this movie is outstanding -- the performances, the way the true events are handled, the cinematography. In this day of digital news, this movie makes us stand back and realize what we may lose in the way of investigative journalism as we slowly kill off print media. The focus remains the child abuse scandal in the archdiocese in Boston. That reflects the conflict the characters face and deal with when events make them rethink the focus of their article. The movie is riveting, though we know the outcome."
]
print("\nMovie review predictions:")
for review in input_reviews:
print("\nReview:", review)
probabilities = classifier.prob_classify(extract_features(review.split()))
predicted_sentiment = probabilities.max()
print("Predicted sentiment:", predicted_sentiment)
print("Probability:", round(probabilities.prob(predicted_sentiment), 2))
| gpl-3.0 | 2,249,432,951,698,209,800 | 40.245283 | 531 | 0.723696 | false |
default1406/PhyLab | PythonExperimentDataHandle/phylab.py | 1 | 4356 | # -*- coding: utf-8 -*-
from math import sqrt
#将二维列表x中的每一个值保留b位小数(带四舍五入)
def RoundTwo(x,b):
for i in range(len(x)):
for j in range(len(x[i])):
x[i][j] = round(x[i][j],b)
if b == 0:
x[i][j] = ("%d" %x[i][j])
elif b == 1:
x[i][j] = ("%.1f" %x[i][j])
elif b == 2:
x[i][j] = ("%.2f" %x[i][j])
elif b == 3:
x[i][j] = ("%.3f" %x[i][j])
elif b == 4:
x[i][j] = ("%.4f" %x[i][j])
elif b == 5:
x[i][j] = ("%.5f" %x[i][j])
elif b == 6:
x[i][j] = ("%.6f" %x[i][j])
#将一维列表x中的每一个值保留b位小数(带四舍五入)
def RoundOne(x,b):
for i in range(len(x)):
x[i] = round(x[i],b)
if b == 0:
x[i] = ("%d" %x[i])
elif b == 1:
x[i] = ("%.1f" %x[i])
elif b == 2:
x[i] = ("%.2f" %x[i])
elif b == 3:
x[i] = ("%.3f" %x[i])
elif b == 4:
x[i] = ("%.4f" %x[i])
elif b == 5:
x[i] = ("%.5f" %x[i])
elif b == 6:
x[i] = ("%.6f" %x[i])
#计算a类不确定度:x是一个列表,aver是x的平均值,k是数据的组数(不一定等于len(x),
# 因为x后面可能添加了x的平均值)
def Ua(x, aver, k) :
sumx = 0
for i in range(k):
sumx += (x[i] - aver)**2
return sqrt(sumx/(k*(k-1)))
#匹配最终结果:(f+u_f)
#输入算出来的最终结果和它的不确定度,可以返回最终结果的形式
def BitAdapt(x,u_x) :
ten = 0
ften = 0
if (u_x >= 10):
temp = x
while(temp >= 10):
temp = temp/10
ten += 1
x = float(x)/10**ten
u_x = float(u_x)/10**ten
elif (x < 0.001):
temp = x
ften = 0
while(temp < 1):
temp = temp*10
ften += 1
x = float(x) * 10**ften
u_x = float(u_x) * 10**ften
Tempbit = 0
bit = 0
while (1):
i = 0
while(1):
temp = float(u_x)*(10**i)
if(temp >= 1):
bit = i
break
else :
i+=1
u_x = round(float(u_x),bit)
x = round(float(x),bit)
u_x = ("%.*f"%(bit, u_x))
x = ("%.*f"%(bit, x))
# if bit == 0:
# u_x = ("%d" % u_x)
# x = ("%d" % x)
# elif bit == 1:
# u_x = ("%.1f" % u_x)
# x = ("%.1f" % x)
# elif bit == 2:
# u_x = ("%.2f" % u_x)
# x = ("%.2f" % x)
# elif bit == 3:
# u_x = ("%.3f" % u_x)
# x = ("%.3f" % x)
# elif bit == 4:
# u_x = ("%.4f" % u_x)
# x = ("%.4f" % x)
# elif bit == 5:
# u_x = ("%.5f" % u_x)
# x = ("%.5f" % x)
# elif bit == 6:
# u_x = ("%.6f" % u_x)
# x = ("%.6f" % x)
# elif bit == 7:
# u_x = ("%.7f" % u_x)
# x = ("%.7f" % x)
# elif bit == 8:
# u_x = ("%.8f" % u_x)
# x = ("%.8f" % x)
i = 0
while(1):
temp = float(u_x)*(10**i)
if(temp >= 1):
Tempbit = i
break
else :
i+=1
if Tempbit == bit:
break
if ten > 0:
x = "(" + str(x) + "\\pm"
u_x = str(u_x) + "){\\times}10^{" + str(ten) + "}"
elif ften > 0:
x = "(" + str(x) + "\\pm"
u_x = str(u_x) + "){\\times}10^{-" + str(ften) + "}"
else:
x = "(" + str(x) + "\\pm"
u_x = str(u_x) + ")"
return x + u_x
#转换为科学计数法表示
def ToScience(number):
Tempstr = format(number,'.4g')
#如果发现Tempstr中含有e的话,说明是科学计数法
if 'e' in Tempstr:
index_str = Tempstr.split('e')
if index_str[0] == '1':
return '10^{'+str(int(index_str[1]))+'}'
else:
return index_str[0]+'{\\times}10^{'+str(int(index_str[1]))+'}'
else:
return Tempstr
#对于x和y两个一维列表进行一元线性处理:y = a + bx
#返回列表[b,r]
def ULR(x,y):
size = len(x)-1
x_2 = []
y_2 = []
xy = []
for i in range(size):
x_2.append(x[i]**2)
y_2.append(y[i]**2)
xy.append(x[i] * y[i])
x_2.append(sum(x_2)/size)
y_2.append(sum(y_2)/size)
xy.append(sum(xy)/size)
b = (x[size]*y[size]-xy[size])/(pow(x[size],2)-x_2[size])
r = (xy[size] - x[size]*y[size]) / sqrt((x_2[size] - pow(x[size],2))*(y_2[size]-pow(y[size],2)))
res = [b,r]
return res
#求仪器误差限
def DELTA_R(R):
res = 0.02 + R%1*5/100.0
R = R - R%1
res = res + R%10*5/1000.0
R = R - R%10
res = res + R%100*2/1000.0
R = R - R%100
res = res + R/1000.0
return res
#逐差法求
def DWM(x):
res = []
size = len(x)/2
for i in range(size):
temp = abs(x[i]-x[i+size])
res.append(temp)
return res
#测试时的误差要求,误差范围内就返回1,否则就返回0
def Mistake(x,y):
x = abs(x)
y = abs(y)
r1 = x+x/100
r2 = x-x/100
if (y > r1) | (y <r2):
return 0
else:
return 1
| gpl-2.0 | -669,399,115,657,459,200 | 18.959391 | 97 | 0.453204 | false |
vaibhawvipul/Python-Politics-Game | trailblazers.py | 1 | 20603 | import sys
import math
import time
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vEnter Your name"
name = raw_input("> ")
"""This will display only first name"""
f_name = name.split()
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vWelcome %r! Be a \n\n\n\n\t\t\t...TRAILBLAZER..." %f_name[0]
print"\n Demo version 2"
print "\v\v\v\v1.Play"
print "\n2.About"
print "\n3.Exit"
print "\nCOPYRIGHTS RESERVED"
a = int(raw_input("\n\nEnter your choice - "))
if a == 3:
sys.exit(0)
elif a == 2:
print "\nThis game was concieved by Vipul Vaibhaw. It was build by very creative team of Fergusson College students"
print "\nWe are very collaborative team. We have an out of the box idea ready almost everytime."
print "\nThis game was build using Python."
print "\nWant to contact us, drop an e-mail to [email protected]"
elif a == 1:
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vWelcome President %r to your office." %f_name[0]
print "\n\nHere is a message from intelligence. Press ENTER to see the message"
raw_input("")
print "A Terror outfit has grown very strong"
time.sleep(3)
print "They are constantly attacking Kamuri. Kamuri is a small nation which shares boundary with us. It also has religious importance for a minority group in your country."
time.sleep(5)
print"Kamuri and your Country has ancestral tie-ups"
time.sleep(2)
print "Our espionage have reported that it may soon try to overthrow government of Kamuri"
time.sleep(3)
print "\nPress ENTER to continue..."
raw_input("")
print "\n\v\v\v\v\v\v\v\v\v\v\v\v\vPresident of a Superpower nations has invited you over dinner."
print "\nIt could be benificial to your country. You could sort out issue like economic relations, weapon treaties or nuclear deal etc."
print "\nElse you can stay in our own country and solve internal affairs first."
print "\n\n1.You accept the invitation."
print "\n2.You decline the invitation."
b = int(raw_input("\n> "))
if b == 1:
print "\n\v\v\vGreat thought! It would not have been a good step to decline the invitation from a Superpower."
time.sleep(3)
print "\n\n\n'President Mark will meet you anytime from now. Sorry for inconvinience President %r' says Secretary " %f_name[0]
time.sleep(5)
print "\n\n\n\v\v\vPresident Mark is here!"
time.sleep(3)
print "\n\n\nPresident %r, Nice to meet you" %f_name[0]
time.sleep(3)
print "\nIt is good to know that your country is quite concerned about small countries neighbouring you."
time.sleep(4)
print "\nBut sometimes it is better to detach yourself from weak ones..."
time.sleep(2)
print "...and attach youself to more powerful nations."
time.sleep(3)
print "\n\nPress ENTER to continue..."
raw_input("")
print "\v\v\v\v\v'So here is a deal...'"
print "\n\n1. If you and your ally are ready to let us make army bases in you country, we may support you at war."
print "\n2. If you allow, while your ally deny We 'will' support you at war. Our soldiers will lead from front."
print "\n3. If you both deny, Your enemy will be showered with our benevolence."
print "\n\n\v\v1. You allow them."
print "2. You deny them"
c = int(raw_input("\n> "))
if c == 1:
print "\v\v\v'Great! Now let's see what Your ally has to say'"
time.sleep(3)
print "\nYour ally supported you in this decision. President Mark has built armybase in your country."
time.sleep(3)
print "\nPresident of 'Kamuri' has sent you a message. Press ENTER to read it."
raw_input("")
print "\n\n\v\v\vPresident we need help. Terrorists have attacked us. Help us!!"
print "\n\n1. You send army"
print "2. You ask Mark to help"
print "3. You ignore the problem and do not send Army."
d = int(raw_input("\n> "))
if d == 2:
print "Mark denies help. He had said that he 'may' help you at war."
time.sleep(3)
print "\n\nWhat will you do now?"
print "\n1. You send army"
print "2. You ignore the problem and do not send Army."
e = int(raw_input("> "))
if e == 1:
print "That's like an good ally!"
time.sleep(2)
print "Your army is ready to leave for Kamuri"
time.sleep(3)
print "ALERT!"
time.sleep(1)
print "ALERT!!"
time.sleep(1)
print "ALERT!!!"
time.sleep(2)
print "\n\nThere is massive flood in your country! Lots of lives are in danger!"
print "\nMessage from Cheif Minister of that flood-struck state. Press ENTER to see Message"
raw_input("")
print "\n\n\vPresident! We need Army support. Only trained personnels like Army men can help us"
print "\nHundreds of people are trapped here. Army needed immediately!"
print "\v\v\v\v1. You send your army to Kamuri."
print "2. You re-direct your army to rescue people from flood-struck state."
f = int(raw_input(""))
if f == 1:
print "\n\nInternational relations matters President %r! But your citizens are feeling unsafe in your country." %f_name[0]
time.sleep(2)
print "\nMisiters withdraw support and your government falls..."
time.sleep(2)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU LOSE!!"
else:
print "\n\nGood decision to send army to rescue your people first."
print "\nArmy did fantastic job and saved hundreds of lives."
time.sleep(3)
print "\nYou become peoples favorite President!"
time.sleep(3)
print "\n\nBut Kamuri problem is unsolved yet!"
time.sleep(3)
print "Government is about to collapse. It would be a big threat to your country's security as well."
time.sleep(4)
print "\n1. Should we plan to offer an Armed force help?"
print "2. Or Negotitate with Terrorists."
time.sleep(3)
print "\nTerrorists want to contact you."
time.sleep(2)
print "\nThey have send you a message"
print "\nPress ENTER to see the message..."
raw_input("")
print "\v\v\nPresident %r if you ignore to help Kamuri, We will support you in next elections." %f_name[0]
print "People of our religion will support you. Secondly, we may ignore your country from our HIT LIST as well!!"
time.sleep(1)
print "\nYour options are:\n1. Should we plan to offer an Armed force help?"
print "2. Or Negotitate with Terrorists."
g = int(raw_input("\nTake your decision \n>"))
if g == 2:
print "\nPresident %r day by day conditions in Kamuri got worse." %f_name[0]
time.sleep(2)
print "\nKamuri Government was overthrown by Terrorists"
time.sleep(2)
print "\nYou even lost some reputation in World! News spread that you took this decision as you disbelieved your army!"
time.sleep(3)
print "You lost trust amongsts citizen and they voted against you!"
time.sleep(5)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU LOSE!!"
elif g == 1:
print "\nYou saved Kamuri. But back to back floods and warfare has made your economy weak"
time.sleep(5)
print "\nPresident Mark has come up with another deal"
time.sleep(3)
h = int(raw_input("\n\n1. You agree to meet him. \n2. You deny \n>"))
if h == 2:
print "\n\nSuperpower nation is upset now. He breaks offs economic ties and your economy crashes"
time.sleep(4)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU LOSE!!"
else :
print "\v\v\v\v\vSo Here is the deal!"
print "\n\n1. If you allow us to make more armybases in your country. We WILL help you at any cost!"
print "2. If you deny, we break economic ties with you and your economy may crash!"
raw_input("\nPress ENTER to continue... ")
print "\n\nHere is a message from Minister of Scientific development"
time.sleep(4)
print "\n\n\nWe have developed special kind of rice, which is new to the world market."
print "\nWe may sell it to world market to stabalize our economy."
time.sleep(7)
print "\nBut..."
time.sleep(3)
print "\nWe are not sure about its success."
time.sleep(4)
i = int(raw_input("Take your decision - "))
if i == 2:
print "\n\nSuperPower got upset but our rice was successful invention!"
print "\nYou managed to survive..."
time.sleep(5)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU WIN!!"
else:
print "\nThis time you caught MARK! He had to help your country now because of 'will' which he had said in deal."
time.sleep(5)
print "\nAlso your rice got successful and Mark needed that rice to help his country"
time.sleep(4)
print "\nYou sold that rice to Mark with a deal that from now any of his Army movement won't be allowed without your consesus."
time.sleep(5)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU WIN!!!!!"
else:
print "Being Diplomatic!"
time.sleep(3)
print "Riots!!!!"
time.sleep(2)
print "Religious Minority in your country got upset and their protest have turned into riots. You LOSE!!"
elif d == 1:
print "That's like an good ally!"
time.sleep(2)
print "Your army is ready to leave for Kamuri"
time.sleep(3)
print "ALERT!"
time.sleep(1)
print "ALERT!!"
time.sleep(1)
print "ALERT!!!"
time.sleep(2)
print "\n\nThere is massive flood in your country! Lots of lives are in danger!"
print "\nMessage from Cheif Minister of that flood-struck state. Press ENTER to see Message"
raw_input("")
print "\n\n\vPresident! We need Army support. Only trained personnels like Army men can help us"
print "\nHundreds of people are trapped here. Army needed immediately!"
print "\v\v\v\v1. You send your army to Kamuri."
print "2. You re-direct your army to rescue people from flood-struck state."
f = int(raw_input("\n>"))
if f == 1:
print "\n\nInternational relations matters President %r! But your citizens are feeling unsafe in your country." %f_name[0]
time.sleep(2)
print "\nMisiters withdraw support and your government falls..."
time.sleep(2)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU LOSE!!"
elif f == 2:
print "\n\nGood decision to send army to rescue your people first."
print "\nArmy did fantastic job and saved hundreds of lives."
time.sleep(3)
print "\nYou become peoples favorite President!"
time.sleep(3)
print "\n\nBut Kamuri problem is unsolved yet!"
time.sleep(3)
print "Government is about to collapse. It would be a big threat to your country's security as well."
time.sleep(4)
print "\n1. Should we plan to offer an Armed force help?"
print "2. Or Negotitate with Terrorists."
time.sleep(3)
print "\nTerrorists want to contact you."
time.sleep(2)
print "\nThey have send you a message"
print "\nPress ENTER to see the message..."
raw_input("")
print "\v\v\nPresident %r if you ignore to help Kamuri, We will support you in next elections." %f_name[0]
print "People of our religion will support you. Secondly, we may ignore your country from our HIT LIST as well!!"
g = int(raw_input("\nTake your decision \n>"))
if g == 2:
print "\nPresident %r day by day conditions in Kamuri got worse." %f_name[0]
time.sleep(2)
print "\nKamuri Government was overthrown by Terrorists"
time.sleep(2)
print "\nYou even lost some reputation in World! News spread that you took this decision as you disbelieved your army!"
time.sleep(3)
print "You lost trust amongsts citizen and they voted against you!"
time.sleep(5)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU LOSE!!"
elif g == 1:
print "\nYou saved Kamuri. But back to back floods and warfare has made your economy weak"
time.sleep(5)
print "\nPresident Mark has come up with another deal"
time.sleep(3)
h = int(raw_input("\n\n1. You agree to meet him. \n2. You deny>"))
if h == 2:
print "\n\nSuperpower nation is upset now. He breaks offs economic ties and your economy crashes"
time.sleep(4)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU LOSE!!"
else :
print "\v\v\v\v\vSo Here is the deal!"
print "\n\n1. If you allow us to make more armybases in your country. We WILL help you at any cost!"
print "2. If you deny, we break economic ties with you and your economy may crash!"
raw_input("\nPress ENTER to continue... ")
print "\n\nHere is a message from Minister of Scientific development"
time.sleep(4)
print "\n\n\nWe have developed special kind of rice, which is new to the world market."
print "\nWe may sell it to world market to stabalize our economy."
time.sleep(7)
print "\nBut..."
time.sleep(3)
print "\nWe are not sure about its success."
time.sleep(4)
i = int(raw_input("Take your decision - "))
if i == 2:
print "\n\nSuperPower got upset but our rice was successful invention!"
print "\nYou managed to survive..."
time.sleep(5)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU WIN!!"
else:
print "\nThis time you caught MARK! He had to help your country now because of 'will' which he had said in deal."
time.sleep(5)
print "\nAlso your rice got successful and Mark needed that rice to help his country"
time.sleep(4)
print "\nYou sold that rice to Mark with a deal that from now any of his Army movement won't be allowed without your consesus."
time.sleep(5)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU WIN!!!!!"
else :
print "Bye!"
else:
print "Being Diplomatic!"
time.sleep(3)
print "Riots!!!!"
time.sleep(2)
print "Religious Minority in your country got upset and their protest have turned into riots"
else :
print "'Ok President %r, Hope this decision won't cost you much!'" %f_name[0]
else :
print "Not a good decision to decline invitation from a superpower!"
print "\nPresident of 'Kamuri' has sent you a message. Press ENTER to read it."
raw_input("")
print "\n\n\v\v\vPresident we need help. Terrorists have attacked us. Help us!!"
print "\n\n1. You send army"
print "2. You ignore the problem and do not send Army."
d = int(raw_input("\n> "))
if d == 2:
print "Mark denies help. He had said that he 'may' help you at war."
time.sleep(3)
print "\n\nWhat will you do now?"
print "\n1. You send army"
print "2. You ignore the problem and do not send Army."
e = int(raw_input("> "))
if e == 1:
print "That's like an good ally!"
time.sleep(2)
print "Your army is ready to leave for Kamuri"
time.sleep(3)
print "ALERT!"
time.sleep(1)
print "ALERT!!"
time.sleep(1)
print "ALERT!!!"
time.sleep(2)
print "\n\nThere is massive flood in your country! Lots of lives are in danger!"
print "\nMessage from Cheif Minister of that flood-struck state. Press ENTER to see Message"
raw_input("")
print "\n\n\vPresident! We need Army support. Only trained personnels like Army men can help us"
print "\nHundreds of people are trapped here. Army needed immediately!"
print "\v\v\v\v1. You send your army to Kamuri."
print "2. You re-direct your army to rescue people from flood-struck state."
f = int(raw_input(""))
if f == 1:
print "\n\nInternational relations matters President %r! But your citizens are feeling unsafe in your country." %f_name[0]
time.sleep(2)
print "\nMisiters withdraw support and your government falls..."
time.sleep(2)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU LOSE!!"
else:
print "\n\nGood decision to send army to rescue your people first."
print "\nArmy did fantastic job and saved hundreds of lives."
time.sleep(3)
print "\nYou become peoples favorite President!"
time.sleep(3)
print "\n\nBut Kamuri problem is unsolved yet!"
time.sleep(3)
print "Government is about to collapse. It would be a big threat to your country's security as well."
time.sleep(4)
print "\n1. Should we plan to offer an Armed force help?"
print "2. Or Negotitate with Terrorists."
time.sleep(3)
print "\nTerrorists want to contact you."
time.sleep(2)
print "\nThey have send you a message"
print "\nPress ENTER to see the message..."
raw_input("")
print "\v\v\nPresident %r if you ignore to help Kamuri, We will support you in next elections." %f_name[0]
print "People of our religion will support you. Secondly, we may ignore your country from our HIT LIST as well!!"
g = int(raw_input("\nTake your decision \n>"))
if g == 2:
print "\nNegotitation with terrorists wasn't a good idea President %r" %f_name[0]
time.sleep(2)
print "\nKamuri Government was overthrown by Terrorists"
time.sleep(2)
print "\nCitizen felt that their security was at threat and voted against you!"
time.sleep(3)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU LOSE!!"
else:
print "\nYou saved Kamuri. Your country emerged as a Superpower"
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU WON!!!!!!!!"
else:
print "Being Diplomatic!"
time.sleep(3)
print "Riots!!!!"
time.sleep(2)
print "Religious Minority in your country got upset and their protest have turned into riots"
elif d == 1:
print "That's like an good ally!"
time.sleep(2)
print "Your army is ready to leave for Kamuri"
time.sleep(3)
print "ALERT!"
time.sleep(1)
print "ALERT!!"
time.sleep(1)
print "ALERT!!!"
time.sleep(2)
print "\n\nThere is massive flood in your country! Lots of lives are in danger!"
print "\nMessage from Cheif Minister of that flood-struck state. Press ENTER to see Message"
raw_input("")
print "\n\n\vPresident! We need Army support. Only trained personnels like Army men can help us"
print "\nHundreds of people are trapped here. Army needed immediately!"
print "\v\v\v\v1. You send your army to Kamuri."
print "2. You re-direct your army to rescue people from flood-struck state."
f = int(raw_input("\n>"))
if f == 1:
print "\n\nInternational relations matters President %r! But your citizens are feeling unsafe in your country." %f_name[0]
time.sleep(2)
print "\nMisiters withdraw support and your government falls..."
time.sleep(2)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU LOSE!!"
else:
print "\n\nGood decision to send army to rescue your people first."
print "\nArmy did fantastic job and saved hundreds of lives."
time.sleep(3)
print "\nYou become peoples favorite President!"
time.sleep(3)
print "\n\nBut Kamuri problem is unsolved yet!"
time.sleep(3)
print "Government is about to collapse. It would be a big threat to your country's security as well."
time.sleep(4)
print "\n1. Should we plan to offer an Armed force help?"
print "2. Or Negotitate with Terrorists."
time.sleep(3)
print "\nTerrorists want to contact you."
time.sleep(2)
print "\nThey have send you a message"
print "\nPress ENTER to see the message..."
raw_input("")
print "\v\v\nPresident %r if you ignore to help Kamuri, We will support you in next elections." %f_name[0]
print "People of our religion will support you. Secondly, we may ignore your country from our HIT LIST as well!!"
g = int(raw_input("\nTake your decision \n>"))
if g == 2:
print "\nPresident %r day by day conditions in Kamuri got worse." %f_name[0]
time.sleep(2)
print "\nKamuri Government was overthrown by Terrorists"
time.sleep(2)
print "\nYou even lost some reputation in World! But terrorists ignored to attack your country!"
time.sleep(3)
print "This decision of yours gave some time to recover your country from Financial crisis"
time.sleep(5)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU SURVIVED!!"
elif g == 1:
print "\nYou saved Kamuri. But back to back floods and warfare has made your economy weak"
time.sleep(5)
print "\nPresident Mark has also cut off economic ties with your country"
time.sleep(5)
print "\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\v\vYOU LOSE!!"
else :
print "Bye!"
else:
print "Being Diplomatic!"
time.sleep(3)
print "Riots!!!!"
time.sleep(2)
print "Religious Minority in your country got upset and their protest have turned into riots"
| apache-2.0 | 1,313,378,677,649,439,200 | 45.718821 | 173 | 0.653885 | false |
fniephaus/alfred-dropbox | src/dropbox_filter.py | 1 | 5886 | import os
import sys
import time
from email.utils import parsedate
import config
from helpers import get_resource, get_hash, get_account_info, uid_exists
from dropbox import client
from workflow import Workflow, PasswordNotFound, ICON_TRASH
from workflow.background import run_in_background
def main(wf):
if wf.update_available:
wf.add_item("An update is available!",
autocomplete='workflow:update', valid=False)
user_input = wf.args[0]
command = query = ''
if len(user_input) > 0:
command = user_input.split()[0]
query = user_input[len(command) + 1:]
try:
wf.get_password('dropbox_access_tokens')
accounts = wf.cached_data(
'dropbox_accounts', data_func=get_account_info, max_age=360)
except PasswordNotFound:
accounts = None
if command == 'auth':
if query == '':
wf.add_item(
'Please enter your authorization code',
'If you don\'t have one, simply press enter.',
arg='url %s' % get_auth_url(), valid=True)
else:
wf.add_item(
'Authorize with "%s"' % query, 'Press enter to proceed',
arg='auth %s' % query, valid=True)
elif accounts is not None and command == 'remove':
for account in accounts:
wf.add_item(get_title(account), account[
'email'], arg='remove %s' % account['uid'], valid=True)
elif (accounts is not None and len(user_input) > 0 and
uid_exists(command, accounts)):
file_or_folder = get_file_or_folder(command, query)
if isinstance(file_or_folder, dict): # file
wf.add_item(
'Share', 'Copy link to clipboard',
arg='share %s %s' % (command, file_or_folder['path']),
icon='icons/folder_public.png', valid=True)
wf.add_item(
'Save to Downloads',
arg='download %s %s' % (command, file_or_folder['path']),
icon='icons/download.png', valid=True)
wf.add_item(
'Save to Desktop',
arg='desktop %s %s' % (command, file_or_folder['path']),
icon='icons/desktop.png', valid=True)
wf.add_item(
'Delete',
arg='delete %s %s' % (command, file_or_folder['path']),
icon=ICON_TRASH, valid=True)
elif isinstance(file_or_folder, list) and file_or_folder: # folder
if query and query != '/':
path = file_or_folder[0]['path'].split('/')
path = '/'.join(path[:-2])
wf.add_item(
'..', 'Change to parent directory',
icon='icons/folder.png',
autocomplete='%s %s/' % (command, path), valid=False)
for f in file_or_folder:
title = os.path.basename(f['path'])
subtitle = 'Modified: %s' % time.strftime(
'%Y-%m-%d %H:%M:%S', parsedate(f['modified']))
icon = 'icons/%s.png' % f['icon']
if not os.path.isfile(icon):
icon = 'icons/page_white.png'
if f['is_dir']:
title += '/'
wf.add_item(
title, subtitle, icon=icon,
autocomplete='%s %s/' % (command, f['path']),
valid=False)
else:
title += ' (%s)' % f['size']
wf.add_item(
title, subtitle, icon=icon,
autocomplete='%s %s' % (command, f['path']),
valid=False)
else:
wf.add_item(
'No files were found', 'Try a different request.', valid=False)
else:
if accounts is not None:
for account in accounts:
wf.add_item(get_title(account),
account['email'],
autocomplete='%s ' % account['uid'],
valid=False)
wf.add_item('Add another Dropbox account',
'', autocomplete='auth ', valid=False)
if accounts is not None and len(accounts) > 0:
wf.add_item('Remove an existing Dropbox account',
'', autocomplete='remove', valid=False)
wf.send_feedback()
def prefetch(wf, uid, path):
job_name = 'dropbox_prefetch_%s' % get_hash(uid, path)
cmd = ['/usr/bin/python', wf.workflowfile('dropbox_prefetch.py'), uid, path]
run_in_background(job_name, cmd)
def get_file_or_folder(uid, query):
path = '/' if query == '' else query
if len(path) > 1 and path[-1] == '/':
path = path[:-1]
prefetch(wf, uid, path)
def wrapper():
return get_resource(uid, path)
return wf.cached_data(get_hash(uid, path), wrapper, max_age=120)
def get_auth_url():
flow = client.DropboxOAuth2FlowNoRedirect(
config.APP_KEY, config.APP_SECRET)
return flow.start()
def get_title(account):
normal_use = account['quota_info']['normal']
shared_use = account['quota_info']['shared']
total_quota = account['quota_info']['quota']
total_used = round(100.0 * (normal_use + shared_use) / total_quota, 2)
return '%s (%s%% of %s used)' % (
account['display_name'], total_used,
sizeof(account['quota_info']['quota']))
def sizeof(num):
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
if num < 1024.0:
return "%3.1f %s" % (num, x)
num /= 1024.0
if __name__ == '__main__':
wf = Workflow(
update_settings={'github_slug': 'fniephaus/alfred-dropbox'},
help_url='https://github.com/fniephaus/alfred-dropbox/issues'
)
log = wf.logger
sys.exit(wf.run(main))
| mit | -1,506,039,148,886,385,700 | 34.672727 | 80 | 0.51546 | false |
jakevdp/altair | altair/vegalite/v3/schema/mixins.py | 1 | 47188 | # -*- coding: utf-8 -*-
#
# The contents of this file are automatically written by
# tools/generate_schema_wrapper.py. Do not modify directly.
from . import core
from altair.utils import use_signature
from altair.utils.schemapi import Undefined
class MarkMethodMixin(object):
"""A mixin class that defines mark methods"""
def mark_area(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined, shape=Undefined,
size=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, style=Undefined, tension=Undefined,
text=Undefined, theta=Undefined, thickness=Undefined, tooltip=Undefined,
width=Undefined, x=Undefined, x2=Undefined, x2Offset=Undefined, xOffset=Undefined,
y=Undefined, y2=Undefined, y2Offset=Undefined, yOffset=Undefined, **kwds):
"""Set the chart's mark to 'area'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="area", **kwds)
else:
copy.mark = "area"
return copy
def mark_bar(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined, shape=Undefined,
size=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, style=Undefined, tension=Undefined,
text=Undefined, theta=Undefined, thickness=Undefined, tooltip=Undefined,
width=Undefined, x=Undefined, x2=Undefined, x2Offset=Undefined, xOffset=Undefined,
y=Undefined, y2=Undefined, y2Offset=Undefined, yOffset=Undefined, **kwds):
"""Set the chart's mark to 'bar'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="bar", **kwds)
else:
copy.mark = "bar"
return copy
def mark_line(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined, shape=Undefined,
size=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, style=Undefined, tension=Undefined,
text=Undefined, theta=Undefined, thickness=Undefined, tooltip=Undefined,
width=Undefined, x=Undefined, x2=Undefined, x2Offset=Undefined, xOffset=Undefined,
y=Undefined, y2=Undefined, y2Offset=Undefined, yOffset=Undefined, **kwds):
"""Set the chart's mark to 'line'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="line", **kwds)
else:
copy.mark = "line"
return copy
def mark_trail(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined,
shape=Undefined, size=Undefined, stroke=Undefined, strokeCap=Undefined,
strokeDash=Undefined, strokeDashOffset=Undefined, strokeJoin=Undefined,
strokeMiterLimit=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
style=Undefined, tension=Undefined, text=Undefined, theta=Undefined,
thickness=Undefined, tooltip=Undefined, width=Undefined, x=Undefined, x2=Undefined,
x2Offset=Undefined, xOffset=Undefined, y=Undefined, y2=Undefined, y2Offset=Undefined,
yOffset=Undefined, **kwds):
"""Set the chart's mark to 'trail'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="trail", **kwds)
else:
copy.mark = "trail"
return copy
def mark_point(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined,
shape=Undefined, size=Undefined, stroke=Undefined, strokeCap=Undefined,
strokeDash=Undefined, strokeDashOffset=Undefined, strokeJoin=Undefined,
strokeMiterLimit=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
style=Undefined, tension=Undefined, text=Undefined, theta=Undefined,
thickness=Undefined, tooltip=Undefined, width=Undefined, x=Undefined, x2=Undefined,
x2Offset=Undefined, xOffset=Undefined, y=Undefined, y2=Undefined, y2Offset=Undefined,
yOffset=Undefined, **kwds):
"""Set the chart's mark to 'point'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="point", **kwds)
else:
copy.mark = "point"
return copy
def mark_text(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined, shape=Undefined,
size=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, style=Undefined, tension=Undefined,
text=Undefined, theta=Undefined, thickness=Undefined, tooltip=Undefined,
width=Undefined, x=Undefined, x2=Undefined, x2Offset=Undefined, xOffset=Undefined,
y=Undefined, y2=Undefined, y2Offset=Undefined, yOffset=Undefined, **kwds):
"""Set the chart's mark to 'text'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="text", **kwds)
else:
copy.mark = "text"
return copy
def mark_tick(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined, shape=Undefined,
size=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, style=Undefined, tension=Undefined,
text=Undefined, theta=Undefined, thickness=Undefined, tooltip=Undefined,
width=Undefined, x=Undefined, x2=Undefined, x2Offset=Undefined, xOffset=Undefined,
y=Undefined, y2=Undefined, y2Offset=Undefined, yOffset=Undefined, **kwds):
"""Set the chart's mark to 'tick'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="tick", **kwds)
else:
copy.mark = "tick"
return copy
def mark_rect(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined, shape=Undefined,
size=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, style=Undefined, tension=Undefined,
text=Undefined, theta=Undefined, thickness=Undefined, tooltip=Undefined,
width=Undefined, x=Undefined, x2=Undefined, x2Offset=Undefined, xOffset=Undefined,
y=Undefined, y2=Undefined, y2Offset=Undefined, yOffset=Undefined, **kwds):
"""Set the chart's mark to 'rect'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="rect", **kwds)
else:
copy.mark = "rect"
return copy
def mark_rule(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined, shape=Undefined,
size=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, style=Undefined, tension=Undefined,
text=Undefined, theta=Undefined, thickness=Undefined, tooltip=Undefined,
width=Undefined, x=Undefined, x2=Undefined, x2Offset=Undefined, xOffset=Undefined,
y=Undefined, y2=Undefined, y2Offset=Undefined, yOffset=Undefined, **kwds):
"""Set the chart's mark to 'rule'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="rule", **kwds)
else:
copy.mark = "rule"
return copy
def mark_circle(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined,
shape=Undefined, size=Undefined, stroke=Undefined, strokeCap=Undefined,
strokeDash=Undefined, strokeDashOffset=Undefined, strokeJoin=Undefined,
strokeMiterLimit=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
style=Undefined, tension=Undefined, text=Undefined, theta=Undefined,
thickness=Undefined, tooltip=Undefined, width=Undefined, x=Undefined, x2=Undefined,
x2Offset=Undefined, xOffset=Undefined, y=Undefined, y2=Undefined,
y2Offset=Undefined, yOffset=Undefined, **kwds):
"""Set the chart's mark to 'circle'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="circle", **kwds)
else:
copy.mark = "circle"
return copy
def mark_square(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined,
shape=Undefined, size=Undefined, stroke=Undefined, strokeCap=Undefined,
strokeDash=Undefined, strokeDashOffset=Undefined, strokeJoin=Undefined,
strokeMiterLimit=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
style=Undefined, tension=Undefined, text=Undefined, theta=Undefined,
thickness=Undefined, tooltip=Undefined, width=Undefined, x=Undefined, x2=Undefined,
x2Offset=Undefined, xOffset=Undefined, y=Undefined, y2=Undefined,
y2Offset=Undefined, yOffset=Undefined, **kwds):
"""Set the chart's mark to 'square'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="square", **kwds)
else:
copy.mark = "square"
return copy
def mark_geoshape(self, align=Undefined, angle=Undefined, baseline=Undefined, binSpacing=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined, cursor=Undefined,
dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
interpolate=Undefined, limit=Undefined, line=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, point=Undefined, radius=Undefined,
shape=Undefined, size=Undefined, stroke=Undefined, strokeCap=Undefined,
strokeDash=Undefined, strokeDashOffset=Undefined, strokeJoin=Undefined,
strokeMiterLimit=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
style=Undefined, tension=Undefined, text=Undefined, theta=Undefined,
thickness=Undefined, tooltip=Undefined, width=Undefined, x=Undefined,
x2=Undefined, x2Offset=Undefined, xOffset=Undefined, y=Undefined, y2=Undefined,
y2Offset=Undefined, yOffset=Undefined, **kwds):
"""Set the chart's mark to 'geoshape'
For information on additional arguments, see :class:`MarkDef`
"""
kwds = dict(align=align, angle=angle, baseline=baseline, binSpacing=binSpacing, clip=clip,
color=color, cornerRadius=cornerRadius, cursor=cursor, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight, height=height,
href=href, interpolate=interpolate, limit=limit, line=line, opacity=opacity,
order=order, orient=orient, point=point, radius=radius, shape=shape, size=size,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension, text=text, theta=theta,
thickness=thickness, tooltip=tooltip, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.MarkDef(type="geoshape", **kwds)
else:
copy.mark = "geoshape"
return copy
def mark_boxplot(self, box=Undefined, clip=Undefined, color=Undefined, extent=Undefined,
median=Undefined, opacity=Undefined, orient=Undefined, outliers=Undefined,
rule=Undefined, size=Undefined, ticks=Undefined, **kwds):
"""Set the chart's mark to 'boxplot'
For information on additional arguments, see :class:`BoxPlotDef`
"""
kwds = dict(box=box, clip=clip, color=color, extent=extent, median=median, opacity=opacity,
orient=orient, outliers=outliers, rule=rule, size=size, ticks=ticks, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.BoxPlotDef(type="boxplot", **kwds)
else:
copy.mark = "boxplot"
return copy
def mark_errorbar(self, clip=Undefined, color=Undefined, extent=Undefined, opacity=Undefined,
orient=Undefined, rule=Undefined, ticks=Undefined, **kwds):
"""Set the chart's mark to 'errorbar'
For information on additional arguments, see :class:`ErrorBarDef`
"""
kwds = dict(clip=clip, color=color, extent=extent, opacity=opacity, orient=orient, rule=rule,
ticks=ticks, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.ErrorBarDef(type="errorbar", **kwds)
else:
copy.mark = "errorbar"
return copy
def mark_errorband(self, band=Undefined, borders=Undefined, clip=Undefined, color=Undefined,
extent=Undefined, interpolate=Undefined, opacity=Undefined, orient=Undefined,
tension=Undefined, **kwds):
"""Set the chart's mark to 'errorband'
For information on additional arguments, see :class:`ErrorBandDef`
"""
kwds = dict(band=band, borders=borders, clip=clip, color=color, extent=extent,
interpolate=interpolate, opacity=opacity, orient=orient, tension=tension, **kwds)
copy = self.copy(deep=False)
if any(val is not Undefined for val in kwds.values()):
copy.mark = core.ErrorBandDef(type="errorband", **kwds)
else:
copy.mark = "errorband"
return copy
class ConfigMethodMixin(object):
"""A mixin class that defines config methods"""
@use_signature(core.Config)
def configure(self, *args, **kwargs):
copy = self.copy(deep=False)
copy.config = core.Config(*args, **kwargs)
return copy
@use_signature(core.AreaConfig)
def configure_area(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["area"] = core.AreaConfig(*args, **kwargs)
return copy
@use_signature(core.AxisConfig)
def configure_axis(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["axis"] = core.AxisConfig(*args, **kwargs)
return copy
@use_signature(core.AxisConfig)
def configure_axisBand(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["axisBand"] = core.AxisConfig(*args, **kwargs)
return copy
@use_signature(core.AxisConfig)
def configure_axisBottom(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["axisBottom"] = core.AxisConfig(*args, **kwargs)
return copy
@use_signature(core.AxisConfig)
def configure_axisLeft(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["axisLeft"] = core.AxisConfig(*args, **kwargs)
return copy
@use_signature(core.AxisConfig)
def configure_axisRight(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["axisRight"] = core.AxisConfig(*args, **kwargs)
return copy
@use_signature(core.AxisConfig)
def configure_axisTop(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["axisTop"] = core.AxisConfig(*args, **kwargs)
return copy
@use_signature(core.AxisConfig)
def configure_axisX(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["axisX"] = core.AxisConfig(*args, **kwargs)
return copy
@use_signature(core.AxisConfig)
def configure_axisY(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["axisY"] = core.AxisConfig(*args, **kwargs)
return copy
@use_signature(core.RectConfig)
def configure_bar(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["bar"] = core.RectConfig(*args, **kwargs)
return copy
@use_signature(core.BoxPlotConfig)
def configure_boxplot(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["boxplot"] = core.BoxPlotConfig(*args, **kwargs)
return copy
@use_signature(core.MarkConfig)
def configure_circle(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["circle"] = core.MarkConfig(*args, **kwargs)
return copy
@use_signature(core.CompositionConfig)
def configure_concat(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["concat"] = core.CompositionConfig(*args, **kwargs)
return copy
@use_signature(core.ErrorBandConfig)
def configure_errorband(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["errorband"] = core.ErrorBandConfig(*args, **kwargs)
return copy
@use_signature(core.ErrorBarConfig)
def configure_errorbar(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["errorbar"] = core.ErrorBarConfig(*args, **kwargs)
return copy
@use_signature(core.CompositionConfig)
def configure_facet(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["facet"] = core.CompositionConfig(*args, **kwargs)
return copy
@use_signature(core.MarkConfig)
def configure_geoshape(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["geoshape"] = core.MarkConfig(*args, **kwargs)
return copy
@use_signature(core.HeaderConfig)
def configure_header(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["header"] = core.HeaderConfig(*args, **kwargs)
return copy
@use_signature(core.HeaderConfig)
def configure_headerColumn(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["headerColumn"] = core.HeaderConfig(*args, **kwargs)
return copy
@use_signature(core.HeaderConfig)
def configure_headerFacet(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["headerFacet"] = core.HeaderConfig(*args, **kwargs)
return copy
@use_signature(core.HeaderConfig)
def configure_headerRow(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["headerRow"] = core.HeaderConfig(*args, **kwargs)
return copy
@use_signature(core.LegendConfig)
def configure_legend(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["legend"] = core.LegendConfig(*args, **kwargs)
return copy
@use_signature(core.LineConfig)
def configure_line(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["line"] = core.LineConfig(*args, **kwargs)
return copy
@use_signature(core.MarkConfig)
def configure_mark(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["mark"] = core.MarkConfig(*args, **kwargs)
return copy
@use_signature(core.MarkConfig)
def configure_point(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["point"] = core.MarkConfig(*args, **kwargs)
return copy
@use_signature(core.ProjectionConfig)
def configure_projection(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["projection"] = core.ProjectionConfig(*args, **kwargs)
return copy
@use_signature(core.RangeConfig)
def configure_range(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["range"] = core.RangeConfig(*args, **kwargs)
return copy
@use_signature(core.RectConfig)
def configure_rect(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["rect"] = core.RectConfig(*args, **kwargs)
return copy
@use_signature(core.CompositionConfig)
def configure_repeat(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["repeat"] = core.CompositionConfig(*args, **kwargs)
return copy
@use_signature(core.MarkConfig)
def configure_rule(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["rule"] = core.MarkConfig(*args, **kwargs)
return copy
@use_signature(core.ScaleConfig)
def configure_scale(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["scale"] = core.ScaleConfig(*args, **kwargs)
return copy
@use_signature(core.SelectionConfig)
def configure_selection(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["selection"] = core.SelectionConfig(*args, **kwargs)
return copy
@use_signature(core.MarkConfig)
def configure_square(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["square"] = core.MarkConfig(*args, **kwargs)
return copy
@use_signature(core.TextConfig)
def configure_text(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["text"] = core.TextConfig(*args, **kwargs)
return copy
@use_signature(core.TickConfig)
def configure_tick(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["tick"] = core.TickConfig(*args, **kwargs)
return copy
@use_signature(core.TitleConfig)
def configure_title(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["title"] = core.TitleConfig(*args, **kwargs)
return copy
@use_signature(core.LineConfig)
def configure_trail(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["trail"] = core.LineConfig(*args, **kwargs)
return copy
@use_signature(core.ViewConfig)
def configure_view(self, *args, **kwargs):
copy = self.copy(deep=['config'])
if copy.config is Undefined:
copy.config = core.Config()
copy.config["view"] = core.ViewConfig(*args, **kwargs)
return copy | bsd-3-clause | -2,069,377,104,403,368,400 | 57.330037 | 104 | 0.635946 | false |
alejo8591/maker | core/api/managers.py | 1 | 2413 | # encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of maker.
# License www.tree.io/license
from django.db import models
from django.contrib.auth.models import User
from maker.core.conf import settings
KEY_SIZE = 18
SECRET_SIZE = 32
CONSUMER_DB = getattr(settings, 'MAKER_API_CONSUMER_DB', 'default')
class KeyManager(models.Manager):
"""
Add support for random key/secret generation
"""
def generate_random_codes(self):
key = User.objects.make_random_password(length=KEY_SIZE)
secret = User.objects.make_random_password(length=SECRET_SIZE)
while self.filter(key__exact=key, secret__exact=secret).count():
secret = User.objects.make_random_password(length=SECRET_SIZE)
return key, secret
class ConsumerManager(KeyManager):
def create_consumer(self, name, description=None, user=None, using=CONSUMER_DB):
"""
Shortcut to create a consumer with random key/secret.
"""
consumer, created = self.using(using).get_or_create(name=name)
if user:
consumer.user = user
if description:
consumer.description = description
if created:
consumer.key, consumer.secret = self.generate_random_codes()
consumer.save()
return consumer
_default_consumer = None
class ResourceManager(models.Manager):
_default_resource = None
def get_default_resource(self, name):
"""
Add cache if you use a default resource.
"""
if not self._default_resource:
self._default_resource = self.get(name=name)
return self._default_resource
class TokenManager(KeyManager):
def create_token(self, consumer_id, token_type, timestamp, user=None, using=None):
"""
Shortcut to create a token with random key/secret.
"""
if using:
manager = self.using(using)
else:
manager = self
token, created = manager.get_or_create(consumer_id=consumer_id,
token_type=token_type,
timestamp=timestamp,
user=user)
if created:
token.key, token.secret = self.generate_random_codes()
token.save()
return token
| mit | 2,176,478,604,376,307,200 | 28.426829 | 86 | 0.594281 | false |
wengzhiwen/Your-Vehicle-Status | main.py | 1 | 1119 | # coding=utf-8
import os
import helper
from google.appengine.ext.webapp import template
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.api import users
class MainHandler(webapp.RequestHandler):
def get(self):
user = users.get_current_user()
if user:
nickName = user.nickname()
template_values = {
'nick_name': nickName,
}
path = os.path.join(os.path.dirname(__file__), 'main.html')
self.response.out.write(template.render(path, template_values))
else:
template_values = {
'message': '预期外的严重错误。',
}
path = os.path.join(os.path.dirname(__file__), 'error.html')
self.response.out.write(template.render(path, template_values))
def main():
application = webapp.WSGIApplication([('/main/', MainHandler)],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
| mit | 2,130,439,821,703,919,400 | 28.756757 | 75 | 0.563124 | false |
SushiTee/teerace | teerace/accounts/forms.py | 1 | 4377 | from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from accounts.models import UserProfile
from annoying.functions import get_config
from recaptcha_works.fields import RecaptchaField
class RegisterForm(forms.Form):
username = forms.RegexField(label="Username", regex=r'^\w+$', min_length=2,
max_length=30)
password1 = forms.CharField(label="Password", min_length=4,
widget=forms.PasswordInput(render_value=False),
help_text="At least 4 chars long")
password2 = forms.CharField(label="Password (again)", min_length=4,
widget=forms.PasswordInput(render_value=False))
email1 = forms.EmailField(label="E-mail address",
help_text="We won't share this to any 3rd-parties!")
email2 = forms.EmailField(label="E-mail address (again)")
if get_config('ENABLE_CAPTCHA', False):
if not (get_config('RECAPTCHA_PUBLIC_KEY', False) and
get_config('RECAPTCHA_PRIVATE_KEY', False)):
raise ImproperlyConfigured("You must define the RECAPTCHA_PUBLIC_KEY"
" and/or RECAPTCHA_PRIVATE_KEY setting in order to use reCAPTCHA.")
recaptcha = RecaptchaField(label="Human test", required=True)
def clean_username(self):
username = self.cleaned_data.get('username')
try:
user = User.objects.get(username__iexact=username)
del user
raise forms.ValidationError("Username is already taken")
except User.DoesNotExist:
pass
return username
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 != password2:
raise forms.ValidationError(
"You must type the same password each time")
return password2
def clean_email2(self):
email1 = self.cleaned_data.get('email1')
email2 = self.cleaned_data.get('email2')
if email1 != email2:
raise forms.ValidationError(
"You must type the same e-mail address each time")
return email2
def save(self):
return User.objects.create_user(self.cleaned_data.get('username'),
self.cleaned_data.get('email1'), self.cleaned_data.get('password1'))
class LoginForm(forms.Form):
username = forms.CharField(label="Username")
password = forms.CharField(label="Password",
widget=forms.PasswordInput(render_value=False))
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.user = None
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if not username or not password:
return self.cleaned_data
self.user = authenticate(username=username, password=password)
if self.user == None:
raise forms.ValidationError("Invalid username and/or password")
if not self.user.is_active:
raise forms.ValidationError("Your account has been disabled")
return self.cleaned_data
class SettingsUserForm(forms.ModelForm):
class Meta:
model = User
fields = ('first_name', 'last_name')
class SettingsProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('gender', 'country',)
class PasswordChangeForm(forms.Form):
old_password = forms.CharField(label="Old password",
widget=forms.PasswordInput(render_value=False))
new_password1 = forms.CharField(label="New password", min_length=4,
widget=forms.PasswordInput(render_value=False))
new_password2 = forms.CharField(label="New password (again)", min_length=4,
widget=forms.PasswordInput(render_value=False))
def __init__(self, *args, **kwargs):
self.current_user = kwargs.pop('current_user', None)
if self.current_user is None:
raise AttributeError("current_user missing")
super(PasswordChangeForm, self).__init__(*args, **kwargs)
def clean_old_password(self):
old_password = self.cleaned_data.get('old_password')
if not self.current_user.check_password(old_password):
raise forms.ValidationError(
"You have to type your old password correctly.")
return old_password
def clean_new_password2(self):
new_password1 = self.cleaned_data.get('new_password1')
new_password2 = self.cleaned_data.get('new_password2')
if new_password1 != new_password2:
raise forms.ValidationError(
"You must type the same password each time")
return new_password2
def save(self):
self.current_user.set_password(self.cleaned_data.get('new_password1'))
self.current_user.save() | bsd-3-clause | -4,667,468,854,348,954,000 | 33.203125 | 76 | 0.739319 | false |
bparsons/bootalert | bootalert.py | 1 | 2055 | #!/usr/bin/python2
"""
bootalert
Sends email with hostname and IP address
Brian Parsons <[email protected]>
"""
import ConfigParser
import datetime
import re
import smtplib
import socket
import sys
import urllib2
# Get Hostname
hostname = socket.gethostname()
# Get current IP
try:
ipsite = urllib2.urlopen('http://ip.brian.is')
response = ipsite.read()
ips = re.findall("(?:\d{1,3}\.){3}\d{1,3}", response)
if type(ips) in [list, tuple, set]:
for record in ips:
newip = record
except IOError as e:
print('Connection error getting IP address: %s' % e.reason)
newip = 'Fetching IP address failed with: ' + e.reason[1]
try:
newip
except NameError:
print('Unable to find IP address in response from ip.brian.is.')
newip = 'Fetching IP address failed - no IP found in response from ip.brian.is'
print('Current IP: %s' % newip)
# Parse Config File
config = ConfigParser.ConfigParser()
config.read("/etc/conf.d/bootalert")
try:
confmailto = config.get("bootalert", "mailto")
confmailfrom = config.get("bootalert", "mailfrom")
except ConfigParser.NoSectionError:
print("Config file /etc/conf.d/bootalert not found")
# Send Message
# Get mail to address from conf file or default to root
try:
mailto = confmailto
except NameError:
mailto = "root"
# Get mail from address from conf file or default to root
try:
mailfrom = confmailfrom
except NameError:
mailfrom = "root"
now = datetime.datetime.now()
print("Sending mail from " + mailfrom + " to " + mailto + ".")
# compose boot email
messageheader = "From: Boot Alert <" + mailfrom + ">\n"
messageheader += "To: " + mailto + "\n"
messageheader += "Subject: " + hostname + "\n\n"
message = messageheader + hostname + " booted " + now.strftime("%a %b %d %H:%M:%S %Z %Y") + " with IP: " + newip + ".\n\n"
# send boot email
try:
smtpObj = smtplib.SMTP('localhost')
smtpObj.sendmail(mailfrom, mailto, message)
except:
print("Error: unable to send boot alert email. Mail server running?")
sys.exit(1)
| mit | 3,794,377,055,659,140,600 | 24.6875 | 122 | 0.672019 | false |
papaiking/faceChecker_device | app/lib/search.py | 1 | 3775 | """
@Author: Thuc VX<[email protected]>
@ORG: labsofthings.com
@date: 27 May 2017
Purpose: This package is for search for user in captured image.
It does some processing:
- Request to get Linkedface token,
- Post search image to Linkedace,
- Ssearch user appear in image
"""
import time
import requests
import json
from init import Log
class Search:
def __init__(self, options, config):
self.options = options
self.config = config
self.linkedface_token = self.getLinkedface_token()
# Call to FaceChecker server to get Linkedface access token
def getLinkedface_token(self):
# Get and check token URL
token_url = self.config['Server'].get('FaceChecker_GET_LINKEDFACE_TOKEN')
if not( token_url ):
Log.error('Configuration: FaceChecker_GET_LINKEDFACE_TOKEN for URL to get Linkedface token is invalid')
return None
# Get and check server token
server_token = self.config['Server'].get('FaceChecker_TOKEN')
if not( server_token ):
Log.error('Configuration: FaceChecker_TOKEN to access server APIs is invalid')
return None
headers = {'x-access-token': server_token}
# Request to get Linkedface token
ret = requests.get(token_url, headers=headers)
if not(ret) or (ret.status_code != 200):
Log.error('Cannot request to server to get Linkedface token')
return None
data = ret.json()
# return Linkedface access token
return data.get('token')
# Check if image contain any face
def _validateFaceImage(self, captured_img):
# TODO here
return True
# Upload image for searching
def _uploadImage(self, captured_img):
imgFile = open(captured_img, 'rb')
if imgFile is None:
Log.error('Cannot open image file: ' + captured_img)
return None
upload_url = self.config['Linkedface'].get('LINKEDAFCE_postimg')
# Log.info('Post url: ' + upload_url + ', file: ' + captured_img)
files = { 'faceImage': imgFile }
res = requests.post( upload_url, files=files )
if res is not None and res.status_code==200:
#Log.info('Uploaded file: ' + res.text)
return res.json()
else:
Log.error('Error in uploading image for searching')
return None
"""
This function is for searching users that have similar face in image
It does following steps:
- Check if this image is valid, contain face
- Upload image for searching
- Search and return result
"""
def searchUser(self, captured_img):
# Upload image for searching
uploaded_img = self._uploadImage(captured_img)
# Log.info('Uploaded image: ' + json.dumps(uploaded_img))
if uploaded_img is not None:
# Search for user in image
search_url = self.config['Linkedface'].get('LINKEDAFCE_search')
if search_url is None:
Log.error('Error in configuration for parameter: Linkedface.LINKEDAFCE_search')
search_url = search_url + uploaded_img.get('id')
headers = {'Authorization':'BEARER ' + self.linkedface_token}
#Log.info('Search URL: ' + search_url + ', header: ' + json.dumps(headers))
# Request for searching
res = requests.get(search_url, headers=headers)
if res.status_code == 200:
#Log.info('Search response: ' + res.text)
return res.json()
else:
Log.info('Error in searching user: ' + res.text)
return None
else:
return None
| mit | -2,242,247,548,680,306,700 | 33.009009 | 115 | 0.600795 | false |
networkdynamics/zenlib | src/zen/drawing/ubigraph.py | 1 | 12030 | """
The ``zen.drawing.ubigraph`` module provides support for rendering Zen graphs in the `Ubigraph visualization environment <http://ubietylab.net/ubigraph/>`_. The renderer will update the visualization in real time as changes are made to the underlying graph. Furthermore, edges and nodes can be visually highlighted.
The functionality of this module falls into two areas: rendering the topology of the graph and highlighting nodes and edges. All this functionality is
available through the :py:class:`zen.UbigraphRenderer` class.
Rendering a graph
=================
In order to render a graph, first construct the `UbigraphRenderer` and connect it to an Ubigraph server.
A simple use case involving a connection to a local Ubigraph server would look something like::
G = DiGraph()
ur = UbigraphRenderer('http://localhost:20738/RPC2')
ur.default_node_color = '#00ff00' # all nodes will be green
ur.graph = G
G.add_edge(1,2)
G.add_edge(2,3)
In this example, the graph is empty at first. Because the renderer registers as a graph event listener, the Ubigraph view
will be updated as nodes and edges are added.
Note that it is possible to change the way that nodes and edges will be rendered by default. Currently the following attributes
are supported:
* ``default_node_color``
* ``default_node_shape``
* ``default_edge_color``
* ``default_edge_width``
All these attributes assume values dictated by the `Ubigraph API <http://ubietylab.net/ubigraph/content/Docs/index.html>`_. Both undirected and directed graphs are
supported. Directed graphs will be rendered with directed edges - everything else is the same.
Node/Edge Highlighting
======================
Nodes and edges can be highlighted using the methods :py:meth:`zen.UbigraphRenderer.highlight_nodes`/:py:meth:`zen.UbigraphRenderer.highlight_nodes_` and :py:meth:`zen.UbigraphRenderer.highlight_edges`/:py:meth:`zen.UbigraphRenderer.highlight_edges_`. As always, the underscore allows use of either node/edge indices (with the underscore) or node/edge objects (without the underscore).
The UbigraphRenderer class
==========================
.. autoclass:: zen.UbigraphRenderer()
"""
import logging
import time
import xmlrpclib
from zen.graph import Graph
from zen.digraph import DiGraph
logger = logging.getLogger(__name__)
class UbigraphRenderer(object):
"""
The UbigraphRenderer is constructed with a URL to the Ubigraph server it will connect to. Following this, the graph can be set using the ``.graph`` attribute.
"""
def __init__(self,url,**kwargs):
"""
Create an UbigraphRenderer instance that will render graph events to the server indicated in ``url``.
**Keyword Args**:
* ``graph [=None]`` (:py:class:`Graph` or :py:class:`DiGraph`): the graph that will be rendered. This can also be set using
the ``UbigraphRenderer.graph`` property.
* ``event_delay [=0]`` (float): the number of seconds that each event update call should wait. This is one way of
making the graph render more slowly. Of course, this also slows down the graph construction code itself. Use with care.
"""
graph = kwargs.pop('graph',None)
self._event_delay = kwargs.pop('event_delay',0)
if len(kwargs) > 0:
raise ZenException, 'Unexpected remaining arguments: %s' % kwargs.keys()
logger.debug('connecting to ubigraph server: %s' % url)
self.server = xmlrpclib.Server(url)
self.server_graph = self.server.ubigraph
self.highlighted_node_style = self.server_graph.new_vertex_style(0)
self.highlighted_edge_style = self.server_graph.new_edge_style(0)
self.default_node_color = '#0000bb'
self.default_node_shape = 'sphere'
self.default_edge_color = '#ffffff'
self.default_edge_width = '1.0'
self.highlighted_node_color = '#bb0000'
self.highlighted_node_shape = 'sphere'
self.highlighted_edge_color = '#ffff00'
self.highlighted_edge_width = '6.0'
# now that everything is setup, if a graph was provided, apply it!
self.graph = graph
def __graph(self,graph=None):
if graph is None:
return self._graph
else:
self.server_graph.clear()
####
# reapply defaults to the server
# set the default styles
self.default_node_color = self._default_node_color
self.default_node_shape = self._default_node_shape
self.default_edge_color = self._default_edge_color
self.default_edge_width = self._default_edge_width
if type(graph) == DiGraph:
self.server_graph.set_edge_style_attribute(0, 'arrow', 'true')
# create and set the highlighted styles
self.highlighted_node_style = self.server_graph.new_vertex_style(0)
self.highlighted_edge_style = self.server_graph.new_edge_style(0)
self.highlighted_node_color = self._hlight_node_color
self.highlighted_node_shape = self._hlight_node_shape
self.highlighted_edge_color = self._hlight_edge_color
self.highlighted_edge_width = self._hlight_edge_width
# zero out highlighted anything
self._highlighted_edges = set()
self._highlighted_nodes = set()
####
# initialize graph stuff
self._graph = graph
self.node_map = {}
self.edge_map = {}
self._graph.add_listener(self)
#####
# build up the graph as it currently exists
# briefly suspend the event delay
actual_event_delay = self._event_delay
self._event_delay = 0
for nidx,nobj,data in self._graph.nodes_iter_(obj=True,data=True):
self.node_added(nidx,nobj,data)
for eidx,data,weight in self._graph.edges_iter_(data=True,weight=True):
uidx,vidx = self._graph.endpoints_(eidx)
self.edge_added(eidx,uidx,vidx,data,weight)
# put the event delay back in place
self._event_delay = actual_event_delay
graph = property( __graph, __graph)
def __inner_default_node_color(self,color=None):
"""
If a color is given, the default node color is changed. Otherwise, the default color is returned.
"""
if color is not None:
self.server_graph.set_vertex_style_attribute(0, 'color', color)
self._default_node_color = color
else:
return self._default_node_color
def __inner_default_node_shape(self,shape=None):
"""
If a shape is given, the default node shape is changed. Otherwise, the default shape is returned.
"""
logger.debug('entering inner default node shape with %s' % shape)
if shape is not None:
self.server_graph.set_vertex_style_attribute(0, 'shape', shape)
self._default_node_shape = shape
else:
return self._default_node_shape
def __inner_default_edge_color(self,color=None):
"""
If a shape is given, the default edge color is changed. Otherwise, the default color is returned.
"""
if color is not None:
self.server_graph.set_edge_style_attribute(0, 'color', color)
self._default_edge_color = color
else:
return self._default_edge_color
def __inner_default_edge_width(self,width=None):
"""
If a width (string) is given, the default edge width is changed. Otherwise, the default width is returned.
"""
if width is not None:
self.server_graph.set_edge_style_attribute(0, 'width', width)
self._default_edge_width = width
else:
return self._default_edge_width
default_node_color = property(__inner_default_node_color, __inner_default_node_color)
default_node_shape = property(__inner_default_node_shape, __inner_default_node_shape)
default_edge_color = property(__inner_default_edge_color, __inner_default_edge_color)
default_edge_width = property(__inner_default_edge_width, __inner_default_edge_width)
def __inner_hlight_node_color(self,color=None):
"""
If a color is given, the highlighted node color is changed. Otherwise, the highlighted color is returned.
"""
if color is not None:
self.server_graph.set_vertex_style_attribute(self.highlighted_node_style, 'color', color)
self._hlight_node_color = color
else:
return self._hlight_node_color
def __inner_hlight_node_shape(self,shape=None):
"""
If a shape is given, the hlight node shape is changed. Otherwise, the hlight shape is returned.
"""
logger.debug('entering inner hlight node shape with %s' % shape)
if shape is not None:
self.server_graph.set_vertex_style_attribute(self.highlighted_node_style, 'shape', shape)
self._hlight_node_shape = shape
else:
return self._hlight_node_shape
def __inner_hlight_edge_color(self,color=None):
"""
If a shape is given, the hlight edge color is changed. Otherwise, the hlight color is returned.
"""
if color is not None:
self.server_graph.set_edge_style_attribute(self.highlighted_edge_style, 'color', color)
self._hlight_edge_color = color
else:
return self._hlight_edge_color
def __inner_hlight_edge_width(self,width=None):
"""
If a width (string) is given, the hlight edge width is changed. Otherwise, the hlight width is returned.
"""
if width is not None:
self.server_graph.set_edge_style_attribute(self.highlighted_edge_style, 'width', width)
self._hlight_edge_width = width
else:
return self._hlight_edge_width
highlighted_node_color = property(__inner_hlight_node_color, __inner_hlight_node_color)
highlighted_node_shape = property(__inner_hlight_node_shape, __inner_hlight_node_shape)
highlighted_edge_color = property(__inner_hlight_edge_color, __inner_hlight_edge_color)
highlighted_edge_width = property(__inner_hlight_edge_width, __inner_hlight_edge_width)
def node_added(self,nidx,nobj,data):
# skip nodes that have already been seen
if nidx in self.node_map:
logger.warn('node %d cannot be added. A mapping already exists.' % nidx)
return
logger.debug('registering node %d with the server' % nidx)
self.node_map[nidx] = self.server_graph.new_vertex()
self.server_graph.set_vertex
time.sleep(self._event_delay)
return
def node_removed(self,nidx,nobj):
if nidx in self.node_map:
logger.debug('removing node %d from the server.' % nidx)
self.server_graph.remove_vertex(self.node_map[nidx])
del self.node_map[nidx]
time.sleep(self._event_delay)
else:
logger.warn('node %d cannot be removed. No mapping exists.' % nidx)
def edge_added(self,eidx,uidx,vidx,data,weight):
# skip nodes that have already been seen
if eidx in self.edge_map:
logger.warn('edge %d cannot be added. A mapping already exists.' % eidx)
return
logger.debug('registering edge %d with the server' % eidx)
self.edge_map[eidx] = self.server_graph.new_edge(self.node_map[uidx],self.node_map[vidx])
time.sleep(self._event_delay)
return
def edge_removed(self,eidx,uidx,vidx):
if eidx in self.edge_map:
logger.debug('removing edge %d from the server.' % eidx)
self.server_graph.remove_edge(self.edge_map[eidx])
del self.edge_map[eidx]
time.sleep(self._event_delay)
else:
logger.warn('edge %d cannot be removed. No mapping exists.' % eidx)
def highlight_edges_(self,edges):
for eidx in edges:
if eidx not in self._highlighted_edges:
self.server_graph.change_edge_style(self.edge_map[eidx], self.highlighted_edge_style)
self._highlighted_edges.add(eidx)
return
def highlight_nodes_(self,nodes):
for nidx in nodes:
if nidx not in self._highlighted_nodes:
self.server_graph.change_vertex_style(self.node_map[nidx], self.highlighted_node_style)
self._highlighted_nodes.add(nidx)
return
def highlight_edges(self,edges):
self.highlight_edges_(map(lambda x: self._graph.edge_idx(*x),edges))
def highlight_nodes(self,nodes):
self.highlight_nodes_(map(lambda x: self._graph.node_idx(x),nodes))
if __name__ == '__main__':
import zen
import time
logging.basicConfig(level=logging.DEBUG)
G = zen.DiGraph()
ur = UbigraphRenderer('http://localhost:20738/RPC2')
ur.default_node_shape = 'sphere'
ur.default_node_color = '#1100dd'
ur.graph = G
e1 = G.add_edge(1,2)
time.sleep(1)
e2 = G.add_edge(2,3)
time.sleep(1)
e3 = G.add_edge(3,4)
time.sleep(1)
e4 = G.add_edge(1,4)
ur.highlight_edges([(1,2),(2,3)])
ur.highlight_nodes([1])
| bsd-3-clause | -3,296,160,238,034,257,400 | 34.385294 | 386 | 0.705653 | false |
cjbe/artiqDrivers | artiqDrivers/devices/coherentDds/driver.py | 1 | 6338 | import logging
import serial
import math
import time
logger = logging.getLogger(__name__)
class CoherentDds:
ser = None;
lsbAmp = 1.0 / 16383 # 0x3fff is maximum amplitude
lsbPhase = 360.0 / 65536 # Degrees per LSB.
def __init__(self, addr, clockFreq, baudrate=115200, internal_clock=False,
incoherent_channels=[False, False, False, False]):
# addr : serial port name
# clockFreq : clock frequency in Hz
# internal_clock: if true, use internal 1 GHz clock
# incoherent_channels: array listing which channels coherence is disabled
self.ser = serial.Serial(addr, baudrate=baudrate)
self.lsbFreq = clockFreq / (2**32);
self.clockFreq = clockFreq
self.disableCoherenceMode(*incoherent_channels)
# Write a trivial pulse shape to /disable/ pulse shaping (the VGA is always at max)
self.setPulseShape(0, [1])
self.setPulseShape(1, [1])
self.setPulseShape(2, [1])
self.setPulseShape(3, [1])
if internal_clock:
self.setClockSource(clock_internal=True)
def read_spi_word(self):
self.send("getSpiWord?\n")
line = self.ser.readline().decode().strip()
return int(line, 16)
def get_lsb_freq(self):
return self.lsbFreq
def send(self, data):
self.ser.write(data.encode())
def identity(self):
"""Returns a string representing the firmware name and version"""
self.send('idn?\n')
return self.ser.readline().decode().strip()
def resetPhase(self):
self.send('resetPhase\n');
def setProfile(self, channel, profile, freq, phase=0.0, amp=1.0):
"""Sets a DDS profile frequency (Hz), phase (degrees), and amplitude (full-scale).
phase defaults to 0 and amplitude defaults to 1"""
if amp < 0 or amp > 1:
raise ValueError("DDS amplitude must be between 0 and 1")
if freq < 0 or freq > 450e6: # This should be dependant on the clock frequency
raise ValueError("DDS frequency must be between 0 and 450 MHz")
ampWord = int(round( amp * 0x3fff ))
phaseWord = int(round( (phase % 360) / 360.0 * 0xffff ))
freqWord = int(round( freq / self.lsbFreq ))
self.setProfileWords(channel, profile, freqWord, phaseWord, ampWord)
def setProfileWords(self, channel, profile, freq, phase, amp): # Freq, phase, amp are all in units of lsb
profile = int(profile) # have to do this, because artiq uses a special artiq.integer
if channel < 0 or channel > 3 or not isinstance(channel, int):
raise ValueError("DDS channel should be an integer between 0 and 3")
if profile < 0 or profile > 7 or not isinstance(profile, int):
raise ValueError("DDS profile should be an integer between 0 and 7")
if amp > 0x3fff or amp < 0 or not isinstance(amp, int):
raise ValueError("DDS amplitude word should be an integer between 0 and 0x3fff")
if phase > 0xffff or phase < 0 or not isinstance(phase, int):
raise ValueError("DDS phase word should be an integer between 0 and 0xffff")
if freq < 0 or freq > 0xffffffff or not isinstance(freq, int):
raise ValueError("DDS frequency word should be an integer between 0 and 0xffffffff")
self.send('setProfile {} {} {} {} {}\n'.format( channel, profile, freq, phase, amp) );
def reset(self):
self.send('reset\n');
time.sleep(50e-3);
def disableCoherenceMode(self, ch0=False, ch1=False, ch2=False, ch3=False):
self.send('setDisableCoherence {:d} {:d} {:d} {:d}\n'.\
format(ch0,ch1,ch2,ch3))
self.ser.readline()
def setPulseShape(self, shapeChannel, shapeVec):
if shapeChannel < 0 or shapeChannel > 3 or not isinstance(shapeChannel, int):
raise ValueError("DDS pulse shape channel should be an integer between 0 and 3")
if len(shapeVec) < 1 or len(shapeVec) > 2048:
raise ValueError("DDS pulse shape array length should be between 1 and 2048")
quantisedShapeVec = []
for el in shapeVec:
quantisedEl = round(el*0x3fff)
if quantisedEl < 0 or quantisedEl > 0x3fff:
raise ValueError("DDS pulse shape points should all be between 0.0 and 1.0")
quantisedShapeVec.append(quantisedEl)
self.send('setPulseShape {}\n'.format(shapeChannel))
for i in range(len(quantisedShapeVec)):
self.send('%d' % quantisedShapeVec[i]);
if i != len(quantisedShapeVec)-1:
self.send(',');
self.send('\n');
def setSensiblePulseShape(self, duration, shapeChannel=0):
"""Sets a sensible looking pulse shape with total duration 'duration' seconds. The duration must be between 0 and 10us"""
if duration > 10e-6 or duration < 0.2e-6:
raise ValueError("DDS pulse shape duration must be between 0.2us and 10us")
shapeVec = []
i_max = round(duration*250e6/2) # /2 because clock used is divided by 2, 250MHz is DDS sync clk
for i in range(i_max):
y = 0.209*math.log10( (math.sin((1+i)/float(i_max+1)*math.pi/2))**4 ) + 1
if y < 0:
y = 0
shapeVec.append(y)
self.setPulseShape(shapeChannel, shapeVec)
def setClockSource(self, clock_internal=False):
"""Choose between external clock (default) and internal 1 GHz source"""
self.send('setClockSource {:d}\n'.format(clock_internal))
self.ser.readline()
self.ser.readline()
def ping(self):
return True
class CoherentDdsSim:
def __init__(self):
pass
def identity(self):
return "coherentdds simulation"
def resetPhase(self):
logger.warning("Resetting phase")
pass
def setProfile(self, channel, profile, freq, phase=0.0, amp=1.0):
logger.warning("Setting ch:p {}:{} to freq={}, phase={}, amp={}".format(channel,profile,freq,phase,amp))
pass
def setProfileWords(self, channel, profile, freq, phase, amp): # Freq, phase, amp are all in units of lsb
pass
def reset(self):
pass
def setPulseShape(self, shapeChannel, shapeVec):
pass
def ping(self):
return True
| gpl-3.0 | -1,789,794,188,135,591,000 | 38.36646 | 129 | 0.622436 | false |
enixdark/im-r-e-d-i-s | flask-cook/my_app/catalog/forms.py | 1 | 2193 | from flask_wtf import Form
from wtforms import TextField,DecimalField,SelectField
from decimal import Decimal
from wtforms.validators import InputRequired,NumberRange,Optional
from models import Category,Product
from wtforms.validators import ValidationError
from wtforms.widgets import html_params,Select, HTMLString
from wtforms import FileField
def check_duplicate_category(case_sentisive=True):
def _check_duplicate(form,field):
if case_sentisive:
res = Category.query.filter(
Category.name.like('%' + field.data + '%')
).first()
else:
res = Category.query.filter(
Category.name.ilike('%' + field.data + '%')
).first()
if res:
raise ValidationError('Category named %s already exists' % field.data)
return _check_duplicate
class CustomCategoryInput(Select):
def __call__(self,field,**kwargs):
kwargs.setdefault('id',field.id)
html = []
for val, label, selected in field.iter_choices():
html.append(
'<input type="radio" %s> %s' % (html_params(name=field.name,value=val,checked=selected,**kwargs)
,label)
)
return HTMLString(''.join(html))
class CategoryField(SelectField):
"""docstring for CategoryField"""
widget = CustomCategoryInput()
def iter_choices(self):
categories = [(c.id,c.name) for c in Category.query.all()]
for value,label in categories:
yield (value,label,self.coerce(value) == self.data)
def pre_validate(self,form):
# import ipdb; ipdb.set_trace()
for (v,_) in [(c.id,c.name) for c in Category.query.all()]:
if self.data == v:
break
else:
raise ValueError(self.gettext('Not a valid choice'))
return super(CategoryField,self).pre_validate(form)
class NameForm(Form):
name = TextField('Name', validators=[InputRequired()])
class ProductForm(NameForm):
price = DecimalField('Price',validators=[
InputRequired(),NumberRange(min=Decimal('0.0'))
])
category = CategoryField('Category',validators=[InputRequired()],coerce=int)
company = SelectField('Company',validators=[Optional()])
# company = SelectField('Company')
image_path = FileField('Product image')
class CategoryForm(NameForm):
name = TextField('Name', validators=[
InputRequired(),check_duplicate_category()
])
| mit | -5,309,016,394,700,146,000 | 30.782609 | 100 | 0.720474 | false |
arokem/MRS-old | MRS/qc.py | 1 | 2437 | """
quality control for MRS data
"""
import os
import os.path as op
import nibabel as nib
import numpy as np
import nipype.pipeline.engine as pe
from nipype.interfaces import fsl
def motioncheck(ref_file, end_file, out_path=None, thres=5.0):
"""
Checks motion between structural scans of the same modality.
Ideally obtained at the beginning and end of a scanning session.
Parameters
----------
ref_file: nifti file
Nifti file of first localizer acquired at the beginning of the session
end_file: nifti
nifti file of the localizer acquired at the end of the session
thres: float
threshold in mm of maximum allowed motion. Default 5mm
Returns
-------
rms : float
root mean square of xyz translation
passed: boolean
indicates if motion passed threshold: 1 if passed, 0 if failed.
"""
ref = nib.load(ref_file)
end = nib.load(end_file)
ref_data = ref.get_data()
end_data = end.get_data()
# Check if same affine space. modality must be the same to use realign,
# and prescription must be the same to deduce motion
ref_aff=ref.get_affine()
end_aff=end.get_affine()
if np.array_equal(ref_aff, end_aff):
print 'affines match'
else:
raise ValueError("Affines of start and end images do not match")
# save only axials
refax = ref_data[:, :, :, 0, np.newaxis]
endax = end_data[:, :, :, 0, np.newaxis]
if out_path is None:
path = os.path.dirname(ref_file)
refax_img = nib.Nifti1Image(refax, ref_aff)
nib.save(refax_img, op.join(out_path, 'refax.nii.gz'))
endax_img = nib.Nifti1Image(endax, ref_aff)
nib.save(endax_img, op.join(out_path, 'endax.nii.gz'))
# realignment
ref_file = op.join(out_path, 'refax.nii.gz')
in_file = op.join(out_path, 'endax.nii.gz')
mat_file = op.join(out_path, 'mat.nii.gz')
mcflt = fsl.MCFLIRT(in_file=in_file, ref_file=ref_file, save_mats=True,
cost='mutualinfo')
res = mcflt.run()
print('realignment affine matrix saved in mat_file: %s'
%res.outputs.mat_file)
aff_file=res.outputs.mat_file
aff = np.loadtxt(aff_file, dtype=float)
# compute RMS as indicator of motion
rel=aff[0:3, 3]
rms = np.sqrt(np.mean(rel**2))
if rms>=thres:
passed=False
else:
passed=True
return rms, passed
| mit | -6,128,460,252,175,409,000 | 26.382022 | 78 | 0.629873 | false |
3liz/QuickOSM | QuickOSM/ui/base_overpass_panel.py | 1 | 9703 | """Panel OSM Queries based on Overpass base class."""
import io
from qgis.core import (
Qgis,
QgsCoordinateReferenceSystem,
QgsCoordinateTransform,
QgsGeometry,
QgsProject,
QgsRectangle,
QgsVectorLayer,
)
from qgis.PyQt.QtWidgets import QCompleter, QDialog
from QuickOSM.core.exceptions import MissingLayerUI, NoSelectedFeatures
from QuickOSM.core.utilities.tools import nominatim_file
from QuickOSM.definitions.gui import Panels
from QuickOSM.definitions.osm import QueryLanguage, QueryType
from QuickOSM.qgis_plugin_tools.tools.i18n import tr
from QuickOSM.ui.base_processing_panel import BaseProcessingPanel
__copyright__ = 'Copyright 2019, 3Liz'
__license__ = 'GPL version 3'
__email__ = '[email protected]'
class BaseOverpassPanel(BaseProcessingPanel):
"""Panel OSM Processing base class.
This panels will have an run button.
This is a kind of virtual class.
"""
def __init__(self, dialog: QDialog):
super().__init__(dialog)
self.last_places = []
def setup_panel(self):
"""Function to set custom UI for some panels."""
super().setup_panel()
self.dialog.advanced_panels[self.panel].setSaveCollapsedState(False)
self.dialog.advanced_panels[self.panel].setCollapsed(True)
self.dialog.action_oql[self.panel].setEnabled(False)
def query_language_xml(self):
self.dialog.query_language[self.panel] = QueryLanguage.XML
self.dialog.action_oql[self.panel].setEnabled(True)
self.dialog.action_xml[self.panel].setEnabled(False)
def query_language_oql(self):
self.dialog.query_language[self.panel] = QueryLanguage.OQL
self.dialog.action_xml[self.panel].setEnabled(True)
self.dialog.action_oql[self.panel].setEnabled(False)
def query_language_updated(self):
if self.dialog.query_language[Panels.Query] != self.dialog.query_language[Panels.QuickQuery]:
self.dialog.query_language[Panels.Query] = self.dialog.query_language[Panels.QuickQuery]
if self.dialog.query_language[Panels.Query] == QueryLanguage.OQL:
self.dialog.action_xml[Panels.Query].setEnabled(True)
self.dialog.action_oql[Panels.Query].setEnabled(False)
elif self.dialog.query_language[Panels.Query] == QueryLanguage.XML:
self.dialog.action_oql[Panels.Query].setEnabled(True)
self.dialog.action_xml[Panels.Query].setEnabled(False)
def init_nominatim_autofill(self):
"""Open the nominatim file and start setting up the auto-completion."""
# Useful to avoid duplicate if we add a new completer.
for line_edit in self.dialog.places_edits.values():
line_edit.setCompleter(None)
user_file = nominatim_file()
with io.open(user_file, 'r', encoding='utf8') as f:
self.last_places = []
for line in f:
self.last_places.append(line.rstrip('\n'))
nominatim_completer = QCompleter(self.last_places)
for line_edit in self.dialog.places_edits.values():
line_edit.setCompleter(nominatim_completer)
line_edit.completer().setCompletionMode(
QCompleter.PopupCompletion)
@staticmethod
def sort_nominatim_places(existing_places: list, place: str) -> list:
"""Helper to sort and limit results of saved nominatim places."""
if place in existing_places:
existing_places.pop(existing_places.index(place))
existing_places.insert(0, place)
existing_places = list(dict.fromkeys(existing_places))
return existing_places[:10]
def write_nominatim_file(self, panel: Panels):
"""Write new nominatim value in the file.
:param panel: The panel to use so as to fetch the nominatim value.
:type panel: Panels
"""
value = self.dialog.places_edits[panel].text()
new_list = self.sort_nominatim_places(self.last_places, value)
user_file = nominatim_file()
try:
with io.open(user_file, 'w', encoding='utf8') as f:
for item in new_list:
if item:
f.write('{}\n'.format(item))
except UnicodeDecodeError:
# The file is corrupted ?
# Remove all old places
with io.open(user_file, 'w', encoding='utf8') as f:
f.write('\n')
self.init_nominatim_autofill()
def _core_query_type_updated(self, combo_query_type, widget, spinbox=None, checkbox=None):
"""Enable/disable the extent/layer widget."""
current = combo_query_type.currentData()
if combo_query_type.count() == 2:
# Query tab, widget is the layer selector
if current == 'layer':
widget.setVisible(True)
layer = self.dialog.layers_buttons[self.panel].currentLayer()
if isinstance(layer, QgsVectorLayer):
checkbox.setVisible(True)
else:
checkbox.setVisible(False)
checkbox.setChecked(False)
else:
widget.setVisible(False)
checkbox.setVisible(False)
checkbox.setChecked(False)
else:
# Quick query tab, widget is the stacked widget
if current in ['in', 'around']:
widget.setCurrentIndex(0)
spinbox.setVisible(current == 'around')
elif current in ['layer']:
widget.setCurrentIndex(1)
layer = self.dialog.layers_buttons[self.panel].currentLayer()
if isinstance(layer, QgsVectorLayer):
checkbox.setVisible(True)
else:
checkbox.setVisible(False)
checkbox.setChecked(False)
elif current in ['canvas', 'attributes']:
widget.setCurrentIndex(2)
# TODO remove
def _start_process(self):
"""Make some stuff before launching the process."""
self.dialog.button_show_query.setDisabled(True)
self.dialog.button_generate_query.setDisabled(True)
super()._start_process()
# TODO remove
def _end_process(self):
"""Make some stuff after the process."""
self.dialog.button_show_query.setDisabled(False)
self.dialog.button_generate_query.setDisabled(False)
super()._end_process()
def end_query(self, num_layers):
"""Display the message at the end of the query.
:param num_layers: Number of layers which have been loaded.
:rtype num_layers: int
"""
if num_layers:
text = tr(
'Successful query, {} layer(s) has been loaded.').format(
num_layers)
self.dialog.set_progress_text(text)
self.dialog.display_message_bar(text, level=Qgis.Success, duration=5)
else:
self.dialog.set_progress_text(tr('No result'))
self.dialog.display_message_bar(
tr('Successful query, but no result.'),
level=Qgis.Warning, duration=7)
def gather_values(self):
properties = super().gather_values()
place = self.dialog.places_edits[self.panel].text()
if place == '':
place = None
properties['place'] = place
query_type = self.dialog.query_type_buttons[self.panel].currentData()
if query_type in ['in', 'around']:
place = self.dialog.places_edits[self.panel].text()
properties['place'] = place
properties['bbox'] = None
elif query_type in ['canvas', 'layer']:
if query_type == 'canvas':
geom_extent = self.dialog.iface.mapCanvas().extent()
source_crs = self.dialog.iface.mapCanvas().mapSettings().destinationCrs()
elif query_type == 'layer':
# Else if a layer is checked
layer = self.dialog.layers_buttons[self.panel].currentLayer()
if not layer:
raise MissingLayerUI
if self.dialog.selection_features[self.panel].isChecked() \
and isinstance(layer, QgsVectorLayer):
geom_extent = layer.boundingBoxOfSelected()
if geom_extent == QgsRectangle(0, 0, 0, 0):
raise NoSelectedFeatures
else:
geom_extent = layer.extent()
source_crs = layer.crs()
else:
raise NotImplementedError
# noinspection PyArgumentList
geom_extent = QgsGeometry.fromRect(geom_extent)
epsg_4326 = QgsCoordinateReferenceSystem('EPSG:4326')
# noinspection PyArgumentList
crs_transform = QgsCoordinateTransform(
source_crs, epsg_4326, QgsProject.instance())
geom_extent.transform(crs_transform)
properties['bbox'] = geom_extent.boundingBox()
properties['place'] = None
else:
properties['bbox'] = None
if query_type == 'in':
properties['query_type'] = QueryType.InArea
elif query_type == 'around':
properties['query_type'] = QueryType.AroundArea
elif query_type == 'canvas':
properties['query_type'] = QueryType.BBox
elif query_type == 'layer':
properties['query_type'] = QueryType.BBox
elif query_type == 'attributes':
properties['query_type'] = QueryType.NotSpatial
else:
raise NotImplementedError
return properties
| gpl-2.0 | 7,361,262,418,288,470,000 | 38.283401 | 101 | 0.601154 | false |
maybelinot/clicktrack | setup.py | 1 | 2323 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Eduard Trott
# @Date: 2015-09-07 14:59:52
# @Email: [email protected]
# @Last modified by: etrott
# @Last Modified time: 2015-10-05 11:03:49
from setuptools import setup
VERSION_FILE = "clicktrack/_version.py"
VERSION_EXEC = ''.join(open(VERSION_FILE).readlines())
__version__ = ''
exec(VERSION_EXEC) # update __version__
if not __version__:
raise RuntimeError("Unable to find version string in %s." % VERSION_FILE)
# acceptable version schema: major.minor[.patch][-sub[ab]]
__pkg__ = 'clicktrack'
__pkgdir__ = {'clicktrack': 'clicktrack'}
__pkgs__ = ['clicktrack', ]
__provides__ = ['clicktrack']
__desc__ = 'Get statustics from URL-shortener.'
__irequires__ = [
# CORE DEPENDENCIES
'functioncache==0.92',
'requests==2.7.0',
'requests-kerberos==0.7.0',
'pyyaml==3.11',
'bitly_api'
]
__xrequires__ = {
'tests': [
'pytest==2.7.2',
'instructions',
'pytest-pep8==1.0.6', # run with `py.test --pep8 ...`
],
# 'docs': ['sphinx==1.3.1', ],
# 'github': ['PyGithub==1.25.2', ],
# 'invoke': ['invoke==0.10.1', ],
}
pip_src = 'https://pypi.python.org/packages/src'
__deplinks__ = []
# README is in the parent directory
readme_pth = 'README.rst'
with open(readme_pth) as _file:
readme = _file.read()
github = 'https://github.com/etrott/gfreespace'
download_url = '%s/archive/master.zip' % github
default_setup = dict(
url=github,
license='GPLv3',
author='Eduard Trott',
author_email='[email protected]',
maintainer='Chris Ward',
maintainer_email='[email protected]',
download_url=download_url,
long_description=readme,
data_files=[],
classifiers=[
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Topic :: Office/Business',
'Topic :: Utilities',
],
keywords=['information'],
dependency_links=__deplinks__,
description=__desc__,
install_requires=__irequires__,
extras_require=__xrequires__,
name=__pkg__,
package_dir=__pkgdir__,
packages=__pkgs__,
provides=__provides__,
version=__version__,
zip_safe=False, # we reference __file__; see [1]
)
setup(**default_setup)
| gpl-3.0 | 8,813,746,658,269,782,000 | 26.654762 | 77 | 0.605682 | false |
hzlf/openbroadcast | website/apps/alibrary/migrations/0099_auto__add_distributor.py | 1 | 56270 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Distributor'
db.create_table('alibrary_distributor', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('legacy_id', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('migrated', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('uuid', self.gf('django.db.models.fields.CharField')(max_length=36, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=400)),
('slug', self.gf('django_extensions.db.fields.AutoSlugField')(allow_duplicates=False, max_length=50, separator=u'-', blank=True, populate_from='name', overwrite=True)),
('code', self.gf('django.db.models.fields.CharField')(max_length=50)),
('country', self.gf('django_countries.fields.CountryField')(max_length=2, null=True, blank=True)),
('address', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('email_main', self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True)),
('description', self.gf('lib.fields.extra.MarkdownTextField')(null=True, blank=True)),
('first_placeholder', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['cms.Placeholder'], null=True)),
('created', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
('updated', self.gf('django.db.models.fields.DateField')(auto_now=True, blank=True)),
('parent', self.gf('mptt.fields.TreeForeignKey')(blank=True, related_name='label_children', null=True, to=orm['alibrary.Distributor'])),
('owner', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='distributors_owner', null=True, on_delete=models.SET_NULL, to=orm['auth.User'])),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='distributors_creator', null=True, on_delete=models.SET_NULL, to=orm['auth.User'])),
('publisher', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='distributors_publisher', null=True, on_delete=models.SET_NULL, to=orm['auth.User'])),
('type', self.gf('django.db.models.fields.CharField')(default='unknown', max_length=12)),
('d_tags', self.gf('tagging.fields.TagField')(null=True)),
('description_html', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('lft', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
('rght', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
('tree_id', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
('level', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
))
db.send_create_signal('alibrary', ['Distributor'])
def backwards(self, orm):
# Deleting model 'Distributor'
db.delete_table('alibrary_distributor')
models = {
'actstream.action': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'alibrary.apilookup': {
'Meta': {'ordering': "('created',)", 'object_name': 'APILookup'},
'api_data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'processed': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '2'}),
'provider': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50'}),
'ressource_id': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'alibrary.artist': {
'Meta': {'ordering': "('name',)", 'object_name': 'Artist'},
'aliases': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'aliases_rel_+'", 'null': 'True', 'to': "orm['alibrary.Artist']"}),
'biography': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artists_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'null': 'True'}),
'date_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'disable_editing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'disable_link': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'disambiguation': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'enable_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_folder'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'listed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['alibrary.Artist']", 'through': "orm['alibrary.ArtistMembership']", 'symmetrical': 'False'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artists_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'placeholder_1': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'professions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['alibrary.Profession']", 'through': "orm['alibrary.ArtistProfessions']", 'symmetrical': 'False'}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artists_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.artistmembership': {
'Meta': {'object_name': 'ArtistMembership'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'artist_child'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'artist_parent'", 'to': "orm['alibrary.Artist']"}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_membership_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"})
},
'alibrary.artistplugin': {
'Meta': {'object_name': 'ArtistPlugin', 'db_table': "'cmsplugin_artistplugin'", '_ormbases': ['cms.CMSPlugin']},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Artist']"}),
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'})
},
'alibrary.artistprofessions': {
'Meta': {'object_name': 'ArtistProfessions'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Profession']"})
},
'alibrary.daypart': {
'Meta': {'ordering': "('day', 'time_start')", 'object_name': 'Daypart'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'day': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '1', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time_end': ('django.db.models.fields.TimeField', [], {}),
'time_start': ('django.db.models.fields.TimeField', [], {})
},
'alibrary.distributor': {
'Meta': {'ordering': "('name',)", 'object_name': 'Distributor'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'distributors_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'null': 'True'}),
'description': ('lib.fields.extra.MarkdownTextField', [], {'null': 'True', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_main': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'distributors_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'label_children'", 'null': 'True', 'to': "orm['alibrary.Distributor']"}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'distributors_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '12'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.format': {
'Meta': {'ordering': "('format', 'version')", 'object_name': 'Format'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'default_price': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'format': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'base'", 'max_length': '10'})
},
'alibrary.label': {
'Meta': {'ordering': "('name',)", 'object_name': 'Label'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'labels_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'null': 'True'}),
'description': ('lib.fields.extra.MarkdownTextField', [], {'null': 'True', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disable_editing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'disable_link': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_main': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'label_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'labelcode': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'listed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'labels_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'label_children'", 'null': 'True', 'to': "orm['alibrary.Label']"}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'labels_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '12'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.license': {
'Meta': {'ordering': "('name',)", 'object_name': 'License'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '36', 'null': 'True', 'blank': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'license_children'", 'null': 'True', 'to': "orm['alibrary.License']"}),
'restricted': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'c3cb9777-e3ff-4e41-8cf2-0b19c0e7b258'", 'max_length': '36'})
},
'alibrary.licensetranslation': {
'Meta': {'ordering': "('language_code',)", 'unique_together': "(('language_code', 'master'),)", 'object_name': 'LicenseTranslation', 'db_table': "'alibrary_license_translation'"},
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '15', 'blank': 'True'}),
'license_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'to': "orm['alibrary.License']"}),
'name_translated': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'alibrary.media': {
'Meta': {'ordering': "('tracknumber',)", 'object_name': 'Media'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_artist'", 'null': 'True', 'to': "orm['alibrary.Artist']"}),
'base_bitrate': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'base_duration': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'base_filesize': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'base_format': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'base_samplerate': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'conversion_status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '2'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'echoprint_status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '2'}),
'extra_artists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['alibrary.Artist']", 'null': 'True', 'through': "orm['alibrary.MediaExtraartists']", 'blank': 'True'}),
'folder': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isrc': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_license'", 'null': 'True', 'to': "orm['alibrary.License']"}),
'lock': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '1'}),
'master': ('django.db.models.fields.files.FileField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'master_sha1': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'mediatype': ('django.db.models.fields.CharField', [], {'default': "'track'", 'max_length': '12'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'processed': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '2'}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_release'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['alibrary.Release']"}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'tracknumber': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.mediaextraartists': {
'Meta': {'ordering': "('profession__name', 'artist__name')", 'object_name': 'MediaExtraartists'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'extraartist_artist'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'extraartist_media'", 'to': "orm['alibrary.Media']"}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_extraartist_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"})
},
'alibrary.mediaformat': {
'Meta': {'ordering': "('name',)", 'object_name': 'Mediaformat'},
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_listing': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'alibrary.mediaplugin': {
'Meta': {'object_name': 'MediaPlugin', 'db_table': "'cmsplugin_mediaplugin'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'headline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'media': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Media']"})
},
'alibrary.playlist': {
'Meta': {'ordering': "('-updated',)", 'object_name': 'Playlist'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'd_tags': ('tagging.fields.TagField', [], {'null': 'True'}),
'dayparts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'daypart_plalists'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['alibrary.Daypart']"}),
'description': ('lib.fields.extra.MarkdownTextField', [], {'null': 'True', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duration': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '12', 'null': 'True'}),
'edit_mode': ('django.db.models.fields.PositiveIntegerField', [], {'default': '2'}),
'enable_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['alibrary.PlaylistItem']", 'null': 'True', 'through': "orm['alibrary.PlaylistItemPlaylist']", 'blank': 'True'}),
'main_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'seasons': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'season_plalists'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['alibrary.Season']"}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'target_duration': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'other'", 'max_length': '12', 'null': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'weather': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'weather_plalists'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['alibrary.Weather']"})
},
'alibrary.playlistitem': {
'Meta': {'object_name': 'PlaylistItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.playlistitemplaylist': {
'Meta': {'object_name': 'PlaylistItemPlaylist'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'cue_in': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'cue_out': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_cross': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_in': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_out': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.PlaylistItem']"}),
'playlist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Playlist']"}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.playlistmedia': {
'Meta': {'object_name': 'PlaylistMedia'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'cue_in': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'cue_out': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_cross': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_in': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_out': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Media']"}),
'playlist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Playlist']"}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.profession': {
'Meta': {'ordering': "('name',)", 'object_name': 'Profession'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_listing': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'})
},
'alibrary.relation': {
'Meta': {'ordering': "('url',)", 'object_name': 'Relation'},
'action': ('django.db.models.fields.CharField', [], {'default': "'information'", 'max_length': '50'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'default': "'generic'", 'max_length': '50'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '512'})
},
'alibrary.release': {
'Meta': {'ordering': "('-created',)", 'object_name': 'Release'},
'asin': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'catalognumber': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'cover_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_cover_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'releases_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'null': 'True'}),
'description': ('lib.fields.extra.MarkdownTextField', [], {'null': 'True', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'enable_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'extra_artists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['alibrary.Artist']", 'null': 'True', 'through': "orm['alibrary.ReleaseExtraartists']", 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_folder'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_label'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['alibrary.Label']"}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_license'", 'null': 'True', 'to': "orm['alibrary.License']"}),
'main_format': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Mediaformat']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'media': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'releases'", 'to': "orm['alibrary.Media']", 'through': "orm['alibrary.ReleaseMedia']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'releases_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'placeholder_1': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'pressings': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'releases_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'release_country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'releasedate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'releasedate_approx': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'releasestatus': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}),
'releasetype': ('django.db.models.fields.CharField', [], {'default': "'other'", 'max_length': '12'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'totaltracks': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.releaseextraartists': {
'Meta': {'object_name': 'ReleaseExtraartists'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_extraartist_artist'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_extraartist_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_extraartist_release'", 'to': "orm['alibrary.Release']"})
},
'alibrary.releasemedia': {
'Meta': {'object_name': 'ReleaseMedia'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Media']"}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Release']"})
},
'alibrary.releaseplugin': {
'Meta': {'object_name': 'ReleasePlugin', 'db_table': "'cmsplugin_releaseplugin'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Release']"})
},
'alibrary.releaserelations': {
'Meta': {'object_name': 'ReleaseRelations'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'relation': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_relation_relation'", 'to': "orm['alibrary.Relation']"}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_relation_release'", 'to': "orm['alibrary.Release']"})
},
'alibrary.season': {
'Meta': {'ordering': "('-name',)", 'object_name': 'Season'},
'date_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name_de': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'alibrary.weather': {
'Meta': {'ordering': "('-name',)", 'object_name': 'Weather'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name_de': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'arating.vote': {
'Meta': {'unique_together': "(('user', 'content_type', 'object_id'),)", 'object_name': 'Vote'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': "orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['alibrary'] | gpl-3.0 | -5,774,735,863,854,991,000 | 98.243386 | 240 | 0.560459 | false |
craigbruce/awacs | awacs/elasticache.py | 1 | 2765 | # Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from aws import Action
service_name = 'AWS ElastiCache'
prefix = 'elasticache'
AddTagsToResource = Action(prefix, 'AddTagsToResource')
AuthorizeCacheSecurityGroupIngress = \
Action(prefix, 'AuthorizeCacheSecurityGroupIngress')
CopySnapshot = Action(prefix, 'CopySnapshot')
CreateCacheCluster = Action(prefix, 'CreateCacheCluster')
CreateCacheParameterGroup = Action(prefix, 'CreateCacheParameterGroup')
CreateCacheSecurityGroup = Action(prefix, 'CreateCacheSecurityGroup')
CreateCacheSubnetGroup = Action(prefix, 'CreateCacheSubnetGroup')
CreateReplicationGroup = Action(prefix, 'CreateReplicationGroup')
CreateSnapshot = Action(prefix, 'CreateSnapshot')
DeleteCacheCluster = Action(prefix, 'DeleteCacheCluster')
DeleteCacheParameterGroup = Action(prefix, 'DeleteCacheParameterGroup')
DeleteCacheSecurityGroup = Action(prefix, 'DeleteCacheSecurityGroup')
DeleteCacheSubnetGroup = Action(prefix, 'DeleteCacheSubnetGroup')
DeleteReplicationGroup = Action(prefix, 'DeleteReplicationGroup')
DeleteSnapshot = Action(prefix, 'DeleteSnapshot')
DescribeCacheClusters = Action(prefix, 'DescribeCacheClusters')
DescribeCacheEngineVersions = \
Action(prefix, 'DescribeCacheEngineVersions')
DescribeCacheParameterGroups = \
Action(prefix, 'DescribeCacheParameterGroups')
DescribeCacheParameters = Action(prefix, 'DescribeCacheParameters')
DescribeCacheSecurityGroups = \
Action(prefix, 'DescribeCacheSecurityGroups')
DescribeCacheSubnetGroups = \
Action(prefix, 'DescribeCacheSubnetGroups')
DescribeEngineDefaultParameters = \
Action(prefix, 'DescribeEngineDefaultParameters')
DescribeEvents = Action(prefix, 'DescribeEvents')
DescribeReplicationGroups = Action(prefix, 'DescribeReplicationGroups')
DescribeReservedCacheNodes = \
Action(prefix, 'DescribeReservedCacheNodes')
DescribeReservedCacheNodesOfferings = \
Action(prefix, 'DescribeReservedCacheNodesOfferings')
DescribeSnapshots = Action(prefix, 'DescribeSnapshots')
ListTagsForResource = Action(prefix, 'ListTagsForResource')
ModifyCacheCluster = Action(prefix, 'ModifyCacheCluster')
ModifyCacheParameterGroup = Action(prefix, 'ModifyCacheParameterGroup')
ModifyCacheSubnetGroup = Action(prefix, 'ModifyCacheSubnetGroup')
ModifyReplicationGroup = Action(prefix, 'ModifyReplicationGroup')
PurchaseReservedCacheNodesOffering = \
Action(prefix, 'PurchaseReservedCacheNodesOffering')
RebootCacheCluster = Action(prefix, 'RebootCacheCluster')
RemoveTagsFromResource = Action(prefix, 'RemoveTagsFromResource')
ResetCacheParameterGroup = Action(prefix, 'ResetCacheParameterGroup')
RevokeCacheSecurityGroupIngress = \
Action(prefix, 'RevokeCacheSecurityGroupIngress')
| bsd-2-clause | 2,653,114,337,146,885,000 | 47.508772 | 71 | 0.824955 | false |
christabor/MoAL | MOAL/languages/formal_language_theory/grammars/context_sensitive.py | 1 | 4856 | # -*- coding: utf-8 -*-
__author__ = """Chris Tabor ([email protected])"""
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from MOAL.helpers.display import Section
from MOAL.helpers.display import prnt
from random import choice
from MOAL.languages.formal_language_theory.grammars.context_free \
import ContextFreeGrammar
DEBUG = True if __name__ == '__main__' else False
class ContextSensitiveGrammar(ContextFreeGrammar):
DEBUG = True
def __init__(self):
self.rule_divider = ':'
super(ContextSensitiveGrammar, self).__init__()
self.DEBUG = ContextSensitiveGrammar.DEBUG
@staticmethod
def get_substr_match(rule, string):
"""Return the index of the last matching item of the two strings.
e.g. index = 1 for '[ab]cd' and '[ab]zd'. If the index is the
same as the length then the strings simply match.
"""
if not len(rule) <= string:
raise Exception('Invalid string.')
# Degenerate case
if rule == string:
return len(rule)
rule = list(rule)
string = list(string)
index = 0
for k, letter in enumerate(rule):
if rule[k] != string[k]:
return index
else:
index += 1
return index
@staticmethod
def simple_rule(rule, string=''):
_rule = list(rule)
_string = list(string)
if ContextSensitiveGrammar.DEBUG:
print('simple rule: {} and string: {}'.format(_rule, _string))
# We only replace tokens that match a rule.
# The rest remain unchanged.
for k, char in enumerate(string):
if char in _rule:
_string[k] = ''.join(string).replace(' ', '')
# Replace the token with the rules' string
_string[k] = ContextSensitiveGrammar.simple_rule(
rule, string=_string)
ret = ''.join(_string)
if ContextSensitiveGrammar.DEBUG:
prnt('simple rule retval: ', ret)
return ret
def _evaluate(self, groups, evaluation=''):
for group in groups:
left, right = group
evaluation += ''.join(right)
return evaluation
def evaluate(self, tokens=None, evaluation=''):
"""A basic parser for a custom attribute grammar.
One thing to note is that ambiguous grammars need to be iterated over,
since the duplicate rules can't be mapped via dictionary key.
Unambiguous grammars are therefore more performant,
because the lookup is O(1) vs. O(N).
"""
if tokens is None:
if hasattr(self, 'tokens'):
tokens = self.tokens
else:
raise ContextFreeGrammar.InvalidTokenSet
expressions = [r[0] for r in self.rules]
tokens = [r[1] for r in self.rules]
groups = [[
expressions[k],
tokens[k].split(' ')] for k, _ in enumerate(tokens)
]
prnt('Groups', groups)
evaluation = self._evaluate(groups, evaluation='')
new_tokens = list(evaluation)
for token in new_tokens:
for expression in expressions:
if token in list(expression):
token = self._evaluate(groups, evaluation=evaluation)
if ContextSensitiveGrammar.DEBUG:
print('Final evaluation in `evaluate`: {}'.format(
evaluation, ''.join(new_tokens)))
return evaluation
if DEBUG:
with Section('Grammar parser - basic'):
"""https://en.wikipedia.org/wiki/Context-sensitive_grammar#Examples"""
_csg = ContextSensitiveGrammar
csg_rules = [
'S => a b c',
'S => a S B c',
'c B => W B',
'W B => W X',
'W X => B X',
'B X => B c',
'b B => b b',
]
csg = ContextSensitiveGrammar()
csg.set_rules(csg_rules)
tokens = [choice(
['S', 'S', 'c B', 'W B', 'W X', 'B X', 'b B']) for _ in range(4)]
prnt('Tokens:', tokens)
csg.evaluate(tokens=tokens)
csg.evaluate(tokens=['S'])
# Testing/staticmethods
_csg.simple_rule('S', 'aSaSbb$')
_csg.simple_rule('X', 'aBcXaa')
csg.evaluate(tokens=['S', 'B', 'B X'])
assert len('a b c') == _csg.get_substr_match('a b c', 'a b c')
assert len('a C d') == _csg.get_substr_match('a C d', 'a C d EE')
assert len('a C') == _csg.get_substr_match('a C', 'a C d EE')
assert len('a C d E') == _csg.get_substr_match('a C d E', 'a C d EE')
assert not len('a C d') == _csg.get_substr_match('a C d E', 'a C d EE')
assert not len('a C d') == _csg.get_substr_match('a c d', 'a C d')
| apache-2.0 | 4,727,653,786,450,231,000 | 33.935252 | 79 | 0.545923 | false |
equalitie/EchoChamber | echochamber/tests/test_messaging.py | 1 | 6514 | """
Test client connections to a XMPP chat room
"""
import math
import time
import bisect
import logging
import random
from threading import Thread
import pytest
from echochamber.utils import create_client_connections, establish_channel, find_available_port
from echochamber.proxy import ProxyServer
def read_messages(clients, counters, timeout):
def run(client):
now = time.time()
end = now + timeout
while now < end:
try:
client.read_message(end - now)
counters[client.username] += 1
now = time.time()
except Exception:
break
threads = []
for client in clients:
t = Thread(target=run, args=(client,))
t.start()
threads.append(t)
for t in threads:
t.join()
def read_rest_of_messages(clients, counters, total):
def run(client):
while counters[client.username] < total:
try:
client.read_message(5*60)
counters[client.username] += 1
except Exception:
break
threads = []
for client in clients:
username = client.username
t = Thread(target=run, args=(client,))
t.start()
threads.append((t, username))
success = True
for t, username in threads:
logging.info("Joining %s", username)
t.join()
messages_read = counters[username]
if messages_read != total:
success = False
logging.info("Client %s read only %d out of %d messages",
username, messages_read, total)
assert success
def connect_and_send_messages(client_factory, debug, num_clients, server_port=None):
total_time = 200 # Time period for sending all messages
frequency_high = 0.60 # 6 messages every 10 seconds
frequency_low = 0.10 # 1 message every 10 seconds
# threshhold = 300
percentage_high_users = 0.1
# Join all clients to the room
clients = create_client_connections(client_factory, num_clients, proxy_port=server_port)
establish_channel(clients)
logging.info("All clients have been invited to the channel, sending message tests")
num_high_users = int(math.ceil(num_clients * percentage_high_users))
high_users, low_users = clients[0:num_high_users], clients[num_high_users:]
logging.info("Chose %d high frequency messaging users and %d low frequency users.",
len(high_users), len(low_users))
message_queue = []
for client in clients:
if client in high_users:
msg_freq = frequency_high
else:
msg_freq = frequency_low
num_messages = int(total_time * msg_freq)
# Schedule each message to be sent by this client
for i in range(num_messages):
# Pick a random time in the total_time range to send the message
# bisect.insort will queue messages in the list ordered by scheduled time
queued_time = random.uniform(0, total_time)
bisect.insort_right(message_queue, (queued_time, client))
# Run a loop and send all queued messages at the schedule times
start_time = time.time()
message_id = 0
total_messages = len(message_queue)
ids = {}
recv_count = {}
for client in clients:
ids[client.username] = 0
recv_count[client.username] = 0
while message_queue:
# Check if first message is ready to be sent (we have reached the scheduled send time)
elapsed = time.time() - start_time
send_at = message_queue[0][0]
if elapsed >= send_at:
queued_time, client = message_queue.pop(0)
message_id += 1
logging.info("Sending message %d for %s queued at %0.2f",
message_id, client.username, queued_time)
client.send_message("{message_id} {time:0.2f} {username} {mid}".format(
message_id=message_id,
time=queued_time,
username=client.username,
mid=ids[client.username])
)
ids[client.username] += 1
else:
# Interestingly, using `time.sleep(send_at - elapsed)` here
# instead of read_messages will make the 10 node test pass
# on our Xeon test server while when read_messages is used
# the test fails. But when it fails and the log of each client
# is inspected it can be seen that all messages are actually
# received by jabberites. On the other hand, the 25 node test
# fail in both cases (when time.sleep and read_messages is
# used) but when read_messages is used, it can again be shown
# from the logs that all messages are actually received by
# jabberites but are lost somewhere in the pexpect library.
read_messages(clients, recv_count, send_at - elapsed)
# time.sleep(send_at - elapsed)
logging.info("Finished sending %d messages", total_messages)
# Wait for all messages to arrive
# NOTE: Reading from all clients at once seems to increase chances
# of receiving all the messages from pexpect.
read_rest_of_messages(clients, recv_count, total_messages)
logging.info("All clients received all sent messages")
@pytest.mark.parametrize("num_clients", [
10,
pytest.mark.skipif("os.environ.get('CI', None)")(25),
])
def test_messaging(client_factory, debug, num_clients):
"""
Test that clients connect and can send varying number of messages
"""
connect_and_send_messages(client_factory, debug, num_clients)
@pytest.mark.parametrize("num_clients", [
10,
pytest.mark.skipif("os.environ.get('CI', None)")(25),
])
def test_messaging_high_latency(xmpp_server, client_factory, debug, num_clients):
"""
Connect all clients via the latency proxy server
"""
latency_mean = 0.2
latency_variance = 0.025
proxy_port = find_available_port()
proxy = ProxyServer(("127.0.0.1", proxy_port), ("127.0.0.1", xmpp_server.c2s_port),
latency_mean, latency_variance)
logging.info("Proxy listening on port {} with latency mean {}s and variance {}s".
format(proxy_port, latency_mean, latency_variance))
# Join all clients to the room via a high-latency proxy
connect_and_send_messages(client_factory, debug, num_clients, server_port=proxy_port)
proxy.stop()
| gpl-3.0 | 100,258,749,795,892,370 | 33.284211 | 95 | 0.620663 | false |
pypyr/pypyr-cli | pypyr/parser/jsonfile.py | 1 | 1285 | """Context parser that returns a dictionary from a local json file."""
from collections.abc import Mapping
import logging
import json
# use pypyr logger to ensure loglevel is set correctly
logger = logging.getLogger(__name__)
def get_parsed_context(args):
"""Parse args as path to a json file and returns context as dictionary."""
logger.debug("starting")
if not args:
raise AssertionError(
"pipeline must be invoked with context arg set. For "
"this json parser you're looking for something like:\n"
"pypyr pipelinename ./myjsonfile.json")
path = ' '.join(args)
# open the json file on disk so that you can initialize the dictionary
logger.debug("attempting to open file: %s", path)
with open(path) as json_file:
payload = json.load(json_file)
if not isinstance(payload, Mapping):
raise TypeError("json input should describe an object at the top "
"level. You should have something like\n"
"{\n\"key1\":\"value1\",\n\"key2\":\"value2\"\n}\n"
"at the json top-level, not an [array] or literal.")
logger.debug("json file loaded into context. Count: %d", len(payload))
logger.debug("done")
return payload
| apache-2.0 | -6,151,299,585,859,462,000 | 37.939394 | 78 | 0.638132 | false |
SnowWalkerJ/quantlib | quant/data/wind/tables/sindexperformance.py | 1 | 3151 | from ....common.db.sql import VARCHAR, Numeric as NUMBER, DateTime as DATETIME, Column, BaseModel, CLOB, DATE
VARCHAR2 = VARCHAR
class SIndexPerformance(BaseModel):
"""
4.89 中国股票指数业绩表现
Attributes
----------
object_id: VARCHAR2(100)
对象ID
s_info_windcode: VARCHAR2(40)
Wind代码
trade_dt: VARCHAR2(8)
交易日期
pct_chg_recent1m: NUMBER(20,6)
最近1月涨跌幅
pct_chg_recent3m: NUMBER(20,6)
最近3月涨跌幅
pct_chg_recent6m: NUMBER(20,6)
最近6月涨跌幅
pct_chg_recent1y: NUMBER(20,6)
最近1年涨跌幅
pct_chg_recent2y: NUMBER(20,6)
最近2年涨跌幅
pct_chg_recent3y: NUMBER(20,6)
最近3年涨跌幅
pct_chg_recent4y: NUMBER(20,6)
最近4年涨跌幅
pct_chg_recent5y: NUMBER(20,6)
最近5年涨跌幅
pct_chg_recent6y: NUMBER(20,6)
最近6年涨跌幅
pct_chg_thisweek: NUMBER(20,6)
本周以来涨跌幅
pct_chg_thismonth: NUMBER(20,6)
本月以来涨跌幅
pct_chg_thisquarter: NUMBER(20,6)
本季以来涨跌幅
pct_chg_thisyear: NUMBER(20,6)
本年以来涨跌幅
si_pct_chg: NUMBER(20,6)
发布以来涨跌幅
annualyeild: NUMBER(20,6)
年化收益率
std_dev_6m: NUMBER(20,6)
6个月标准差
std_dev_1y: NUMBER(20,6)
1年标准差
std_dev_2y: NUMBER(20,6)
2年标准差
std_dev_3y: NUMBER(20,6)
3年标准差
sharpratio_6m: NUMBER(20,6)
6个月夏普比率
sharpratio_1y: NUMBER(20,6)
1年夏普比率
sharpratio_2y: NUMBER(20,6)
2年夏普比率
sharpratio_3y: NUMBER(20,6)
3年夏普比率
opdate: DATETIME
opdate
opmode: VARCHAR(1)
opmode
"""
__tablename__ = "SIndexPerformance"
object_id = Column(VARCHAR2(100), primary_key=True)
s_info_windcode = Column(VARCHAR2(40))
trade_dt = Column(VARCHAR2(8))
pct_chg_recent1m = Column(NUMBER(20,6))
pct_chg_recent3m = Column(NUMBER(20,6))
pct_chg_recent6m = Column(NUMBER(20,6))
pct_chg_recent1y = Column(NUMBER(20,6))
pct_chg_recent2y = Column(NUMBER(20,6))
pct_chg_recent3y = Column(NUMBER(20,6))
pct_chg_recent4y = Column(NUMBER(20,6))
pct_chg_recent5y = Column(NUMBER(20,6))
pct_chg_recent6y = Column(NUMBER(20,6))
pct_chg_thisweek = Column(NUMBER(20,6))
pct_chg_thismonth = Column(NUMBER(20,6))
pct_chg_thisquarter = Column(NUMBER(20,6))
pct_chg_thisyear = Column(NUMBER(20,6))
si_pct_chg = Column(NUMBER(20,6))
annualyeild = Column(NUMBER(20,6))
std_dev_6m = Column(NUMBER(20,6))
std_dev_1y = Column(NUMBER(20,6))
std_dev_2y = Column(NUMBER(20,6))
std_dev_3y = Column(NUMBER(20,6))
sharpratio_6m = Column(NUMBER(20,6))
sharpratio_1y = Column(NUMBER(20,6))
sharpratio_2y = Column(NUMBER(20,6))
sharpratio_3y = Column(NUMBER(20,6))
opdate = Column(DATETIME)
opmode = Column(VARCHAR(1))
| gpl-3.0 | 941,308,491,787,101,700 | 28.091837 | 109 | 0.593476 | false |
joeyoung658/A-Level_2016-18 | Challenges/Hangman/Everyones/Samuel/hangman.py | 1 | 2594 | """ Hangman Game (v1.0)
Name: samuel armstrong
Date:
"""
import random
count=0
def load_file(filename):
""" Function to return a word list from a plain text file;
Note: You will need to open the file, read and append
each line to an array (or list), close the file and
then return the array.
"""
word_list = []
with open(filename, "r") as file:
for line in file:
word_list.append(line.replace("\n", "").lower())
return word_list
def select_word(word_list):
""" Function to return a random word from an array of words;
Note: You will need to import the random module and use
random.randint() to select a random index in the array.
"""
rand = random.randint(0, len(word_list)-1)
word = (word_list[rand])
return word
def find_character(char, word,):
global count
""" Function to return the position(s) of a character in word;
Note: This should return the index of the character as an integer,
if the character is not found, it should return a value of -1
"""
index = 0
while index < len(word):
index = word.find(char, index)
if index == -1:
print ("letter not found within the word")
break
index = index + 1
print('your charcter was found at position: ', index)
count = count + 1
## cant append to list correctly
## indices=[]
## indices.append(word.find(char, index))
def main():
global count
""" Note: This is your main function and should contain your game loop.
"""
i= input ("would you like to play a game y/n: ").lower()
while i== "y":
fudge = "word_list.txt"
attempts_remaining = 10
word_list = load_file("word_list.txt")
word = select_word (word_list)
print ("the word is" , len(word), " letters long")
while attempts_remaining !=0:
char = input ("letter: ")
char_pos = find_character (char, word,)
attempts_remaining = attempts_remaining -1
print ("attempts remaining: ",attempts_remaining)
if count == len(word):
print ("well done you have got all the letter the word was",word)
i= input ("would you like to play a game y/n: ").lower()
break
print ("game over")
if __name__ == "__main__":
main()
| gpl-3.0 | -9,055,671,597,323,212,000 | 29.880952 | 81 | 0.543177 | false |
websafe/slpkg | slpkg/sbo/read.py | 1 | 1623 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# read.py file is part of slpkg.
# Copyright 2014-2017 Dimitris Zlatanidis <[email protected]>
# All rights reserved.
# Slpkg is a user-friendly package manager for Slackware installations
# https://github.com/dslackw/slpkg
# Slpkg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from slpkg.url_read import URL
class ReadSBo(object):
"""Read SBo files from urls
"""
def __init__(self, sbo_url):
self.sbo_url = sbo_url
def readme(self, sbo_readme):
"""Read SlackBuild README file
"""
return URL(self.sbo_url + sbo_readme).reading()
def info(self, name, sbo_file):
"""Read info file
"""
return URL(self.sbo_url + name + sbo_file).reading()
def slackbuild(self, name, sbo_file):
"""Read SlackBuild file
"""
return URL(self.sbo_url + name + sbo_file).reading()
def doinst(self, doinst_sh):
"""Read SlackBuild doinst.sh
"""
return URL(self.sbo_url + doinst_sh).reading()
| gpl-3.0 | -4,101,729,055,160,015,000 | 30.211538 | 70 | 0.67098 | false |
peterdv/pyreststore | pyreststore/tests/test_bckt/test_bckt_model.py | 1 | 5830 | # -*- coding: utf-8; mode: Python; -*-
from __future__ import unicode_literals
import json
from django.test import TestCase
from bckt.models import Bckt
from django.contrib.auth.models import User
from django.contrib.auth.hashers import make_password
from tests.test_bckt.utils import create_bckt
class BcktTest(TestCase):
'''
Unit tests for the Bckt Django model
'''
def setUp(self):
self.user = User.objects.create_user('BcktTestUser',
password=make_password('test'))
def tearDown(self):
self.user.delete()
def test_bckt_creation(self):
'''
Bckt model: Creation of a Bckt instance.
'''
title = 'Title: Created by test_bckt_creation()'
contents = 'This is a two line content\nJust to check a newline'
b = Bckt()
b = create_bckt(user=self.user, title=title, contents=contents)
self.assertTrue(isinstance(b, Bckt))
self.assertEqual(b.title, title)
self.assertEqual(b.code, contents)
def test_bckt_creation_da(self):
'''
Bckt model: Use Danish national characters.
'''
title = 'Title: Created by test_bckt_creation() æøå ÆØÅ'
contents = '''This is a 3 line content
Just to check a newline
and Danish national characters æøåÆØÅ'''
b = create_bckt(user=self.user, title=title, contents=contents)
self.assertTrue(isinstance(b, Bckt))
self.assertEqual(b.title, title)
self.assertEqual(b.code, contents)
def test_assert_jsonLoads_improperjson(self):
'''
Bckt model: Assert that json.loads() reports illegal json string.
'''
# Illegal json syntax, control characters *within* a string literal
# in the json data should be escaped.
c_err = '''{"code": "Notes\n\nover 3 lines"}'''
c = '''{"code": "Notes\\n\\nover 3 lines"}'''
self.assertRaises(ValueError, json.loads, c_err)
def test_assert_jsonLoads(self):
'''
Bckt model: Assert that json.loads() works as expected if strict=True
'''
# Illegal json syntax, control characters *within* a string literal
# in the json data should be escaped as in:
# c = 'Notes\\n\\nover 3 lines'
c = 'Notes\n\nover 3 lines'
b1_py = {
'title': 'Assert json.loads() test data',
'language': 'Text',
'code': c,
}
b1_json = '''{
"title": "Assert json.loads() test data",
"code": "Notes\\n\\nover 3 lines",
"language": "Text"
}
'''
print(b1_json)
b1 = json.loads(b1_json)
self.assertEqual(b1['title'], b1_py['title'])
self.assertEqual(b1['language'], b1_py['language'])
self.assertEqual(b1['code'], c)
def test_assert_jsonLoads_relaxed(self):
'''
Bckt model: Assert that json.loads() works as expected if strict=False
'''
# Illegal json syntax, control characters *within* a string literal
# in the json data should be escaped as in:
# c = 'Notes\\n\\nover 3 lines'
c = 'Notes\n\nover 3 lines'
b1_py = {
'title': 'Assert json.loads() test data',
'language': 'Text',
'code': c,
}
b1_json = '''{
"title": "Assert json.loads() test data",
"code": "Notes
over 3 lines",
"language": "Text"
}
'''
b1 = json.loads(b1_json, strict=False)
self.assertEqual(b1['title'], b1_py['title'])
self.assertEqual(b1['language'], b1_py['language'])
self.assertEqual(b1['code'], c)
def test_bckt_creation_from_json(self):
'''
Bckt model: Creation of a Bckt instance from a json string.
'''
b1_json = '''
{
"title": "Title: Created by test_bckt_creation_from_json()",
"code":
"Danish national characters \\n such as æøåÆØÅ\\n might pose a problem",
"language": "Text"
}
'''
print(b1_json)
b1 = json.loads(b1_json, strict=True)
b = create_bckt(user=self.user, jsonBckt=b1_json)
print(type(b))
self.assertTrue(isinstance(b, Bckt))
self.assertEqual(b.title, b1['title'])
self.assertEqual(b.language, b1['language'])
self.assertEqual(b.code, b1['code'])
def test_bckt_creation_max_bckts_limit(self):
'''
Bckt model: Enforcement of PYRESTSTORE_MAX_BCKTS on creation.
'''
from django.conf import settings
def mktitle(n, n_max):
s = 'Test bucket number {:d} of {:d}'.format(n, n_max)
return s
n_max = settings.PYRESTSTORE_MAX_BCKTS
print('PYRESTSTORE_MAX_BCKTS = {!s}'.format(n_max))
# Overshoot by 2
for n in xrange(0, n_max + 2):
title = mktitle(n, n_max)
code = ''.join([
'We should *never* see more than ',
' {:d} \n'.format(n_max),
'concurrent buckets persisted in the database. \n\n',
'When this limit is reached, creation of the ',
'next bucket\n',
'is expected to result in the deletion of the oldest\n',
'\n\nThis is bucket number {:d}'.format(n)
])
data = {'title': title, 'code': code, 'language': 'Text'}
b = create_bckt(user=self.user, jsonBckt=json.dumps(data))
# Get the current Bckt instances in the database
bckts = Bckt.objects.all()
self.assertEqual(len(bckts), settings.PYRESTSTORE_MAX_BCKTS)
for n in xrange(0, n_max):
e_title = mktitle(n + 2, n_max)
print('Expect title ', e_title)
self.assertEqual(e_title, bckts[n].title)
| bsd-3-clause | -6,327,749,684,678,481,000 | 31.651685 | 78 | 0.564006 | false |
Mnk3y/plugin.video.lastship | lastship.py | 1 | 10683 | # -*- coding: UTF-8 -*-
"""
Lastship Add-on (C) 2017
Credits to Placenta and Covenant; our thanks go to their creators
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# Addon Name: Lastship
# Addon id: plugin.video.lastship
# Addon Provider: LastShip
import urlparse,sys,urllib
from resources.lib.modules import control
from resources.lib.modules import cache
from resources.lib.modules import views
from resources.lib.modules import playcount
from resources.lib.modules import trailer
from resources.lib.modules import trakt
from resources.lib.modules import sources
from resources.lib.modules import downloader
from resources.lib.modules import libtools
from resources.lib.indexers import navigator
from resources.lib.indexers import movies
from resources.lib.indexers import channels
from resources.lib.indexers import tvshows
from resources.lib.indexers import episodes
import xbmcgui
params = dict(urlparse.parse_qsl(sys.argv[2].replace('?','')))
action = params.get('action')
name = params.get('name')
title = params.get('title')
year = params.get('year')
imdb = params.get('imdb')
tvdb = params.get('tvdb')
tmdb = params.get('tmdb')
season = params.get('season')
episode = params.get('episode')
tvshowtitle = params.get('tvshowtitle')
premiered = params.get('premiered')
url = params.get('url')
image = params.get('image')
meta = params.get('meta')
select = params.get('select')
query = params.get('query')
source = params.get('source')
content = params.get('content')
windowedtrailer = params.get('windowedtrailer')
windowedtrailer = int(windowedtrailer) if windowedtrailer in ("0","1") else 0
if action == None:
cache.cache_version_check()
navigator.navigator().root()
elif action == 'newsNavigator':
navigator.navigator().news()
elif action == 'movieNavigator':
navigator.navigator().movies()
elif action == 'movieliteNavigator':
navigator.navigator().movies(lite=True)
elif action == 'mymovieNavigator':
navigator.navigator().mymovies()
elif action == 'mymovieliteNavigator':
navigator.navigator().mymovies(lite=True)
elif action == 'tvNavigator':
navigator.navigator().tvshows()
elif action == 'tvliteNavigator':
navigator.navigator().tvshows(lite=True)
elif action == 'mytvNavigator':
navigator.navigator().mytvshows()
elif action == 'mytvliteNavigator':
navigator.navigator().mytvshows(lite=True)
elif action == 'downloadNavigator':
navigator.navigator().downloads()
elif action == 'libraryNavigator':
navigator.navigator().library()
elif action == 'toolNavigator':
navigator.navigator().tools()
elif action == 'searchNavigator':
if not control.setting('search.quick') == '0':
searchSelect = xbmcgui.Dialog().select(control.lang(32010).encode('utf-8'),
[
control.lang(32001).encode('utf-8'),
control.lang(32002).encode('utf-8'),
control.lang(32029).encode('utf-8'),
control.lang(32030).encode('utf-8')
])
if searchSelect == 0:
movies.movies().search()
movies.movies().search_new()
elif searchSelect == 1:
tvshows.tvshows().search()
tvshows.tvshows().search_new()
elif searchSelect == 2:
movies.movies().person()
elif searchSelect == 3:
tvshows.tvshows().person()
else:
pass
else:
navigator.navigator().search()
elif action == 'viewsNavigator':
navigator.navigator().views()
elif action == 'clearCache':
navigator.navigator().clearCache()
elif action == 'clearCacheSearch':
navigator.navigator().clearCacheSearch()
elif action == 'clearCacheAll':
navigator.navigator().clearCacheAll()
elif action == 'clearCacheMeta':
navigator.navigator().clearCacheMeta()
elif action == 'infoCheck':
navigator.navigator().infoCheck('')
elif action == 'movies':
movies.movies().get(url)
elif action == 'moviePage':
movies.movies().get(url)
elif action == 'movieWidget':
movies.movies().widget()
elif action == 'movieSearch':
movies.movies().search()
elif action == 'movieSearchnew':
movies.movies().search_new()
elif action == 'movieSearchterm':
movies.movies().search_term(name)
elif action == 'moviePerson':
movies.movies().person()
elif action == 'movieGenres':
movies.movies().genres()
elif action == 'movieLanguages':
movies.movies().languages()
elif action == 'movieCertificates':
movies.movies().certifications()
elif action == 'movieYears':
movies.movies().years()
elif action == 'moviePersons':
movies.movies().persons(url)
elif action == 'movieUserlists':
movies.movies().userlists()
elif action == 'channels':
channels.channels().get()
elif action == 'tvshows':
tvshows.tvshows().get(url)
elif action == 'tvshowPage':
tvshows.tvshows().get(url)
elif action == 'tvSearch':
tvshows.tvshows().search()
elif action == 'tvSearchnew':
tvshows.tvshows().search_new()
elif action == 'tvSearchterm':
tvshows.tvshows().search_term(name)
elif action == 'tvPerson':
tvshows.tvshows().person()
elif action == 'tvGenres':
tvshows.tvshows().genres()
elif action == 'tvNetworks':
tvshows.tvshows().networks()
elif action == 'tvLanguages':
tvshows.tvshows().languages()
elif action == 'tvCertificates':
tvshows.tvshows().certifications()
elif action == 'tvPersons':
tvshows.tvshows().persons(url)
elif action == 'tvUserlists':
tvshows.tvshows().userlists()
elif action == 'seasons':
episodes.seasons().get(tvshowtitle, year, imdb, tvdb)
elif action == 'episodes':
episodes.episodes().get(tvshowtitle, year, imdb, tvdb, season, episode)
elif action == 'calendar':
episodes.episodes().calendar(url)
elif action == 'tvWidget':
episodes.episodes().widget()
elif action == 'calendars':
episodes.episodes().calendars()
elif action == 'episodeUserlists':
episodes.episodes().userlists()
elif action == 'refresh':
control.refresh()
elif action == 'queueItem':
control.queueItem()
elif action == 'openSettings':
control.openSettings(query)
elif action == 'artwork':
control.artwork()
elif action == 'addView':
views.addView(content)
elif action == 'moviePlaycount':
playcount.movies(imdb, query)
elif action == 'episodePlaycount':
playcount.episodes(imdb, tvdb, season, episode, query)
elif action == 'tvPlaycount':
playcount.tvshows(name, imdb, tvdb, season, query)
elif action == 'trailer':
trailer.trailer().play(name, url, windowedtrailer)
elif action == 'traktManager':
trakt.manager(name, imdb, tvdb, content)
elif action == 'authTrakt':
trakt.authTrakt()
elif action == 'urlResolver':
try: import urlresolver
except: pass
urlresolver.display_settings()
elif action == 'download':
import json
try: downloader.download(name, image, sources.sources().sourcesResolve(json.loads(source)[0], True))
except: pass
elif action == 'play':
sources.sources().play(title, year, imdb, tvdb, season, episode, tvshowtitle, premiered, meta, select)
elif action == 'addItem':
sources.sources().addItem(title)
elif action == 'playItem':
sources.sources().playItem(title, source)
elif action == 'alterSources':
sources.sources().alterSources(url, meta)
elif action == 'clearSources':
sources.sources().clearSources()
elif action == 'random':
rtype = params.get('rtype')
if rtype == 'movie':
rlist = movies.movies().get(url, create_directory=False)
r = sys.argv[0]+"?action=play"
elif rtype == 'episode':
rlist = episodes.episodes().get(tvshowtitle, year, imdb, tvdb, season, create_directory=False)
r = sys.argv[0]+"?action=play"
elif rtype == 'season':
rlist = episodes.seasons().get(tvshowtitle, year, imdb, tvdb, create_directory=False)
r = sys.argv[0]+"?action=random&rtype=episode"
elif rtype == 'show':
rlist = tvshows.tvshows().get(url, create_directory=False)
r = sys.argv[0]+"?action=random&rtype=season"
from random import randint
import json
try:
rand = randint(1,len(rlist))-1
for p in ['title','year','imdb','tvdb','season','episode','tvshowtitle','premiered','select']:
if rtype == "show" and p == "tvshowtitle":
try: r += '&'+p+'='+urllib.quote_plus(rlist[rand]['title'])
except: pass
else:
try: r += '&'+p+'='+urllib.quote_plus(rlist[rand][p])
except: pass
try: r += '&meta='+urllib.quote_plus(json.dumps(rlist[rand]))
except: r += '&meta='+urllib.quote_plus("{}")
if rtype == "movie":
try: control.infoDialog(rlist[rand]['title'], control.lang(32536).encode('utf-8'), time=30000)
except: pass
elif rtype == "episode":
try: control.infoDialog(rlist[rand]['tvshowtitle']+" - Season "+rlist[rand]['season']+" - "+rlist[rand]['title'], control.lang(32536).encode('utf-8'), time=30000)
except: pass
control.execute('RunPlugin(%s)' % r)
except:
control.infoDialog(control.lang(32537).encode('utf-8'), time=8000)
elif action == 'movieToLibrary':
libtools.libmovies().add(name, title, year, imdb, tmdb)
elif action == 'moviesToLibrary':
libtools.libmovies().range(url)
elif action == 'moviesToLibrarySilent':
libtools.libmovies().silent(url)
elif action == 'tvshowToLibrary':
libtools.libtvshows().add(tvshowtitle, year, imdb, tvdb)
elif action == 'tvshowsToLibrary':
libtools.libtvshows().range(url)
elif action == 'tvshowsToLibrarySilent':
libtools.libtvshows().silent(url)
elif action == 'updateLibrary':
libtools.libepisodes().update(query)
elif action == 'service':
libtools.libepisodes().service()
| gpl-3.0 | -6,966,446,947,023,699,000 | 26.748052 | 174 | 0.65403 | false |
Freeseer/freeseer | src/freeseer/plugins/output/videopreview/widget.py | 1 | 1906 | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
freeseer - vga/presentation capture software
Copyright (C) 2013 Free and Open Source Software Learning Centre
http://fosslc.org
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
For support, questions, suggestions or any other inquiries, visit:
http://wiki.github.com/Freeseer/freeseer/
@author: Thanh Ha
'''
from PyQt4.QtGui import QComboBox
from PyQt4.QtGui import QFormLayout
from PyQt4.QtGui import QLabel
from PyQt4.QtGui import QWidget
class ConfigWidget(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
layout = QFormLayout()
self.setLayout(layout)
# Preview
self.previewLabel = QLabel("Preview")
self.previewComboBox = QComboBox()
self.previewComboBox.addItem("autovideosink")
self.previewComboBox.addItem("ximagesink")
self.previewComboBox.addItem("xvimagesink")
self.previewComboBox.addItem("gconfvideosink")
layout.addRow(self.previewLabel, self.previewComboBox)
# Leaky Queue
# Allows user to set queue in video to be leaky - required to work with RTMP streaming plugin
self.leakyQueueLabel = QLabel("Leaky Queue")
self.leakyQueueComboBox = QComboBox()
layout.addRow(self.leakyQueueLabel, self.leakyQueueComboBox)
| gpl-3.0 | 1,840,719,251,519,555,600 | 31.862069 | 101 | 0.730325 | false |
bstroebl/xplanplugin | HandleDb.py | 1 | 2613 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
XPlan
A QGIS plugin
Fachschale XPlan für XPlanung
-------------------
begin : 2011-03-08
copyright : (C) 2011 by Bernhard Stroebl, KIJ/DV
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from builtins import object
from qgis.PyQt import QtCore, QtSql
from qgis.gui import *
from qgis.core import *
class DbHandler(object):
'''class to handle a QtSql.QSqlDatabase connnection to a PostgreSQL server'''
def __init__(self, iface, tools):
self.iface = iface
self.tools = tools
self.db = None
def dbConnect(self, thisPassword = None):
s = QtCore.QSettings( "XPlanung", "XPlanung-Erweiterung" )
service = ( s.value( "service", "" ) )
host = ( s.value( "host", "" ) )
port = ( s.value( "port", "5432" ) )
database = ( s.value( "dbname", "" ) )
authcfg = s.value( "authcfg", "" )
username, passwd, authcfg = self.tools.getAuthUserNamePassword(authcfg)
if authcfg == None:
username = ( s.value( "uid", "" ) )
passwd = ( s.value( "pwd", "" ) )
if thisPassword:
passwd = thisPassword
# connect to DB
db = QtSql.QSqlDatabase.addDatabase ("QPSQL", "XPlanung")
db.setHostName(host)
db.setPort(int(port))
db.setDatabaseName(database)
db.setUserName(username)
db.setPassword(passwd)
db.authcfg = authcfg # für DDIM
ok2 = db.open()
if not ok2:
self.iface.messageBar().pushMessage("Fehler", \
u"Konnte keine Verbindung mit der Datenbank aufbauen", \
level=Qgis.Critical)
return None
else:
return db
def dbDisconnect(self, db):
db.close()
db = None
| gpl-2.0 | 514,960,653,646,335,300 | 34.767123 | 81 | 0.458445 | false |
ipfire/ddns | src/ddns/system.py | 1 | 12221 | #!/usr/bin/python3
###############################################################################
# #
# ddns - A dynamic DNS client for IPFire #
# Copyright (C) 2012 IPFire development team #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
import base64
import re
import ssl
import socket
import urllib.request
import urllib.parse
import urllib.error
from .__version__ import CLIENT_VERSION
from .errors import *
from .i18n import _
# Initialize the logger.
import logging
logger = logging.getLogger("ddns.system")
logger.propagate = 1
class DDNSSystem(object):
"""
The DDNSSystem class adds a layer of abstraction
between the ddns software and the system.
"""
# The default useragent.
USER_AGENT = "IPFireDDNSUpdater/%s" % CLIENT_VERSION
def __init__(self, core):
# Connection to the core of the program.
self.core = core
# Address cache.
self.__addresses = {}
# Find out on which distribution we are running.
self.distro = self._get_distro_identifier()
logger.debug(_("Running on distribution: %s") % self.distro)
@property
def proxy(self):
proxy = self.core.settings.get("proxy")
# Strip http:// at the beginning.
if proxy and proxy.startswith("http://"):
proxy = proxy[7:]
return proxy
def get_local_ip_address(self, proto):
ip_address = self._get_local_ip_address(proto)
# Check if the IP address is usable and only return it then
if self._is_usable_ip_address(proto, ip_address):
return ip_address
def _get_local_ip_address(self, proto):
# Legacy code for IPFire 2.
if self.distro == "ipfire-2" and proto == "ipv4":
try:
with open("/var/ipfire/red/local-ipaddress") as f:
return f.readline()
except IOError as e:
# File not found
if e.errno == 2:
return
raise
# XXX TODO
raise NotImplementedError
def _guess_external_ip_address(self, url, timeout=10):
"""
Sends a request to an external web server
to determine the current default IP address.
"""
try:
response = self.send_request(url, timeout=timeout)
# If the server could not be reached, we will return nothing.
except DDNSNetworkError:
return
if not response.code == 200:
return
match = re.search(b"^Your IP address is: (.*)$", response.read())
if match is None:
return
return match.group(1).decode()
def guess_external_ip_address(self, family, **kwargs):
if family == "ipv6":
url = "https://checkip6.dns.lightningwirelabs.com"
elif family == "ipv4":
url = "https://checkip4.dns.lightningwirelabs.com"
else:
raise ValueError("unknown address family")
return self._guess_external_ip_address(url, **kwargs)
def send_request(self, url, method="GET", data=None, username=None, password=None, timeout=30):
assert method in ("GET", "POST")
# Add all arguments in the data dict to the URL and escape them properly.
if method == "GET" and data:
query_args = self._format_query_args(data)
data = None
if "?" in url:
url = "%s&%s" % (url, query_args)
else:
url = "%s?%s" % (url, query_args)
logger.debug("Sending request (%s): %s" % (method, url))
if data:
logger.debug(" data: %s" % data)
req = urllib.request.Request(url, data=data)
if username and password:
basic_auth_header = self._make_basic_auth_header(username, password)
req.add_header("Authorization", "Basic %s" % basic_auth_header.decode())
# Set the user agent.
req.add_header("User-Agent", self.USER_AGENT)
# All requests should not be cached anywhere.
req.add_header("Pragma", "no-cache")
# Set the upstream proxy if needed.
if self.proxy:
logger.debug("Using proxy: %s" % self.proxy)
# Configure the proxy for this request.
req.set_proxy(self.proxy, "http")
assert req.get_method() == method
logger.debug(_("Request header:"))
for k, v in req.headers.items():
logger.debug(" %s: %s" % (k, v))
try:
resp = urllib.request.urlopen(req, timeout=timeout)
# Log response header.
logger.debug(_("Response header (Status Code %s):") % resp.code)
for k, v in resp.info().items():
logger.debug(" %s: %s" % (k, v))
# Return the entire response object.
return resp
except urllib.error.HTTPError as e:
# Log response header.
logger.debug(_("Response header (Status Code %s):") % e.code)
for k, v in e.hdrs.items():
logger.debug(" %s: %s" % (k, v))
# 400 - Bad request
if e.code == 400:
raise DDNSRequestError(e.reason)
# 401 - Authorization Required
# 403 - Forbidden
elif e.code in (401, 403):
raise DDNSAuthenticationError(e.reason)
# 404 - Not found
# Either the provider has changed the API, or
# there is an error on the server
elif e.code == 404:
raise DDNSNotFound(e.reason)
# 429 - Too Many Requests
elif e.code == 429:
raise DDNSTooManyRequests(e.reason)
# 500 - Internal Server Error
elif e.code == 500:
raise DDNSInternalServerError(e.reason)
# 503 - Service Unavailable
elif e.code == 503:
raise DDNSServiceUnavailableError(e.reason)
# Raise all other unhandled exceptions.
raise
except urllib.error.URLError as e:
if e.reason:
# Handle SSL errors
if isinstance(e.reason, ssl.SSLError):
e = e.reason
if e.reason == "CERTIFICATE_VERIFY_FAILED":
raise DDNSCertificateError
# Raise all other SSL errors
raise DDNSSSLError(e.reason)
# Name or service not known
if e.reason.errno == -2:
raise DDNSResolveError
# Network Unreachable (e.g. no IPv6 access)
if e.reason.errno == 101:
raise DDNSNetworkUnreachableError
# Connection Refused
elif e.reason.errno == 111:
raise DDNSConnectionRefusedError
# No route to host
elif e.reason.errno == 113:
raise DDNSNoRouteToHostError(req.host)
# Raise all other unhandled exceptions.
raise
except socket.timeout as e:
logger.debug(_("Connection timeout"))
raise DDNSConnectionTimeoutError
def _format_query_args(self, data):
args = []
for k, v in data.items():
arg = "%s=%s" % (k, urllib.parse.quote(v))
args.append(arg)
return "&".join(args)
def _make_basic_auth_header(self, username, password):
authstring = "%s:%s" % (username, password)
# Encode authorization data in base64.
authstring = base64.b64encode(authstring.encode())
return authstring
def get_address(self, proto):
"""
Returns the current IP address for
the given IP protocol.
"""
try:
return self.__addresses[proto]
# IP is currently unknown and needs to be retrieved.
except KeyError:
self.__addresses[proto] = address = \
self._get_address(proto)
return address
def _get_address(self, proto):
assert proto in ("ipv6", "ipv4")
# IPFire 2 does not support IPv6.
if self.distro == "ipfire-2" and proto == "ipv6":
return
# Check if the external IP address should be guessed from
# a remote server.
guess_ip = self.core.settings.get("guess_external_ip", "true")
guess_ip = guess_ip in ("true", "yes", "1")
# Get the local IP address.
local_ip_address = None
if not guess_ip:
try:
local_ip_address = self.get_local_ip_address(proto)
except NotImplementedError:
logger.warning(_("Falling back to check the IP address with help of a public server"))
# If no local IP address could be determined, we will fall back to the guess
# it with help of an external server...
if not local_ip_address:
local_ip_address = self.guess_external_ip_address(proto)
return local_ip_address
def _is_usable_ip_address(self, proto, address):
"""
Returns True is the local IP address is usable
for dynamic DNS (i.e. is not a RFC1918 address or similar).
"""
if proto == "ipv4":
# This is not the most perfect solution to match
# these addresses, but instead of pulling in an entire
# library to handle the IP addresses better, we match
# with regular expressions instead.
matches = (
# RFC1918 address space
r"^10\.\d+\.\d+\.\d+$",
r"^192\.168\.\d+\.\d+$",
r"^172\.(1[6-9]|2[0-9]|31)\.\d+\.\d+$",
# Dual Stack Lite address space
r"^100\.(6[4-9]|[7-9][0-9]|1[01][0-9]|12[0-7])\.\d+\.\d+$",
)
for match in matches:
m = re.match(match, address)
if m is None:
continue
# Found a match. IP address is not usable.
return False
# In all other cases, return OK.
return True
def resolve(self, hostname, proto=None):
addresses = []
if proto is None:
family = 0
elif proto == "ipv6":
family = socket.AF_INET6
elif proto == "ipv4":
family = socket.AF_INET
else:
raise ValueError("Protocol not supported: %s" % proto)
# Resolve the host address.
try:
response = socket.getaddrinfo(hostname, None, family)
except socket.gaierror as e:
# Name or service not known
if e.errno == -2:
return []
# Temporary failure in name resolution
elif e.errno == -3:
raise DDNSResolveError(hostname)
# No record for requested family available (e.g. no AAAA)
elif e.errno == -5:
return []
raise
# Handle responses.
for family, socktype, proto, canonname, sockaddr in response:
# IPv6
if family == socket.AF_INET6:
address, port, flow_info, scope_id = sockaddr
# Only use the global scope.
if not scope_id == 0:
continue
# IPv4
elif family == socket.AF_INET:
address, port = sockaddr
# Ignore everything else...
else:
continue
# Add to repsonse list if not already in there.
if address not in addresses:
addresses.append(address)
return addresses
def _get_distro_identifier(self):
"""
Returns a unique identifier for the distribution
we are running on.
"""
os_release = self.__parse_os_release()
if os_release:
return os_release
system_release = self.__parse_system_release()
if system_release:
return system_release
# If nothing else could be found, we return
# just "unknown".
return "unknown"
def __parse_os_release(self):
"""
Tries to parse /etc/os-release and
returns a unique distribution identifier
if the file exists.
"""
try:
f = open("/etc/os-release", "r")
except IOError as e:
# File not found
if e.errno == 2:
return
raise
os_release = {}
with f:
for line in f.readlines():
m = re.match(r"^([A-Z\_]+)=(.*)$", line)
if m is None:
continue
os_release[m.group(1)] = m.group(2)
try:
return "%(ID)s-%(VERSION_ID)s" % os_release
except KeyError:
return
def __parse_system_release(self):
"""
Tries to parse /etc/system-release and
returns a unique distribution identifier
if the file exists.
"""
try:
f = open("/etc/system-release", "r")
except IOError as e:
# File not found
if e.errno == 2:
return
raise
with f:
# Read first line
line = f.readline()
# Check for IPFire systems
m = re.match(r"^IPFire (\d).(\d+)", line)
if m:
return "ipfire-%s" % m.group(1)
| gpl-3.0 | -9,210,014,487,743,441,000 | 25.452381 | 96 | 0.617544 | false |
google-research/google-research | uflow/uflow_flags.py | 1 | 9220 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Flags used by uflow training and evaluation."""
from absl import flags
FLAGS = flags.FLAGS
# General flags.
flags.DEFINE_bool(
'no_tf_function', False, 'If True, run without'
' tf functions. This incurs a performance hit, but can'
' make debugging easier.')
flags.DEFINE_string('train_on', '',
'"format0:path0;format1:path1", e.g. "kitti:/usr/..."')
flags.DEFINE_string('eval_on', '',
'"format0:path0;format1:path1", e.g. "kitti:/usr/..."')
flags.DEFINE_string('plot_dir', '', 'Path to directory where plots are saved.')
flags.DEFINE_string('checkpoint_dir', '',
'Path to directory for saving and restoring checkpoints.')
flags.DEFINE_string('init_checkpoint_dir', '',
'Path to directory for initializing from a checkpoint.')
flags.DEFINE_bool(
'plot_debug_info', False,
'Flag to indicate whether to plot debug info during training.')
flags.DEFINE_bool(
'use_tensorboard', False, 'Toggles logging to tensorboard.')
flags.DEFINE_string(
'tensorboard_logdir', '', 'Where to log tensorboard summaries.')
flags.DEFINE_bool(
'frozen_teacher', False, 'Whether or not to freeze the '
'teacher model during distillation.')
flags.DEFINE_bool(
'reset_global_step', True, 'Reset global step to 0 after '
'loading from init_checkpoint')
flags.DEFINE_bool(
'reset_optimizer', True, 'Reset optimizer internals after '
'loading from init_checkpoint')
# Training flags.
flags.DEFINE_bool('evaluate_during_train', False,
'Whether or not to have the GPU train job perform evaluation '
'between epochs.')
flags.DEFINE_bool('from_scratch', False,
'Train from scratch. Do not restore the last checkpoint.')
flags.DEFINE_bool('no_checkpointing', False,
'Do not save model checkpoints during training.')
flags.DEFINE_integer('epoch_length', 1000,
'Number of gradient steps per epoch.')
flags.DEFINE_integer('num_train_steps', int(1e6),
'Number of gradient steps to train for.')
flags.DEFINE_integer('selfsup_after_num_steps', int(5e5),
'Number of gradient steps before self-supervision.')
flags.DEFINE_integer('selfsup_ramp_up_steps', int(1e5),
'Number of gradient steps for ramping up self-sup.')
flags.DEFINE_integer(
'selfsup_step_cycle', int(1e10),
'Number steps until the step counter for self-supervsion is reset.')
flags.DEFINE_integer('shuffle_buffer_size', 1024,
'Shuffle buffer size for training.')
flags.DEFINE_integer('height', 640, 'Image height for training and evaluation.')
flags.DEFINE_integer('width', 640, 'Image height for training and evaluation.')
flags.DEFINE_bool('crop_instead_of_resize', False, 'Crops images for training '
'instead of resizing the images.')
flags.DEFINE_integer('seq_len', 2, 'Sequence length for training flow.')
flags.DEFINE_integer('batch_size', 1, 'Batch size for training flow on '
'gpu.')
flags.DEFINE_string('optimizer', 'adam', 'One of "adam", "sgd"')
flags.DEFINE_float('gpu_learning_rate', 1e-4, 'Learning rate for training '
'UFlow on GPU.')
flags.DEFINE_integer('lr_decay_after_num_steps', 0, '')
flags.DEFINE_integer('lr_decay_steps', 0, '')
flags.DEFINE_string('lr_decay_type', 'none',
'One of ["none", "exponential", "linear", "gaussian"]')
flags.DEFINE_bool(
'stop_gradient_mask', True, 'Whether or not to stop the '
'gradient propagation through the occlusion mask.')
flags.DEFINE_integer('num_occlusion_iterations', 1,
'If occlusion estimation is "iterative"')
flags.DEFINE_bool('only_forward', False, '')
# Data augmentation (-> now gin configurable)
flags.DEFINE_string('teacher_image_version', 'original',
'one of original, augmented')
flags.DEFINE_float(
'channel_multiplier', 1.,
'Globally multiply the number of model convolution channels'
'by this factor.')
flags.DEFINE_integer('num_levels', 5, 'The number of feature pyramid levels to '
'use.')
flags.DEFINE_bool('use_cost_volume', True, 'Whether or not to compute the '
'cost volume.')
flags.DEFINE_bool(
'use_feature_warp', True, 'Whether or not to warp the '
'model features when computing flow.')
flags.DEFINE_bool(
'accumulate_flow', True, 'Whether or not to predict a flow '
'adjustment on each feature pyramid level.')
flags.DEFINE_integer('level1_num_layers', 3, '')
flags.DEFINE_integer('level1_num_filters', 32, '')
flags.DEFINE_integer('level1_num_1x1', 0, '')
flags.DEFINE_float('dropout_rate', 0.1, 'Amount of level dropout.')
flags.DEFINE_bool('normalize_before_cost_volume', True, '')
flags.DEFINE_bool('original_layer_sizes', False, '')
flags.DEFINE_bool('shared_flow_decoder', False, '')
flags.DEFINE_bool('resize_selfsup', True, '')
flags.DEFINE_integer(
'selfsup_crop_height', 64,
'Number of pixels removed from the image at top and bottom'
'for self-supervision.')
flags.DEFINE_integer(
'selfsup_crop_width', 64,
'Number of pixels removed from the image left and right'
'for self-supervision.')
flags.DEFINE_integer(
'selfsup_max_shift', 0,
'Number of pixels removed from the image at top and bottom, left and right'
'for self-supervision.')
flags.DEFINE_float(
'fb_sigma_teacher', 0.003,
'Forward-backward consistency scaling constant used for self-supervision.')
flags.DEFINE_float(
'fb_sigma_student', 0.03,
'Forward-backward consistency scaling constant used for self-supervision.')
flags.DEFINE_string('selfsup_mask', 'gaussian',
'One of [gaussian, ddflow, advection]')
flags.DEFINE_float('weight_photo', 0.0, 'Weight for photometric loss.')
flags.DEFINE_float('weight_ssim', 0.0, 'Weight for SSIM loss.')
flags.DEFINE_float('weight_census', 1.0, 'Weight for census loss.')
flags.DEFINE_float('weight_smooth1', 0.0, 'Weight for smoothness loss.')
flags.DEFINE_float('weight_smooth2', 2.0, 'Weight for smoothness loss.')
flags.DEFINE_float('smoothness_edge_constant', 150.,
'Edge constant for smoothness loss.')
flags.DEFINE_string('smoothness_edge_weighting', 'exponential',
'One of: gaussian, exponential')
flags.DEFINE_integer('smoothness_at_level', 2, '')
flags.DEFINE_float('weight_selfsup', 0.6, 'Weight for self-supervision loss.')
flags.DEFINE_float('weight_transl_consist', 0.0,
'Weight for loss enforcing uniform source usage.')
# Occlusion estimation parameters
flags.DEFINE_string('occlusion_estimation', 'wang',
'One of: none, brox, wang, uflow')
flags.DEFINE_integer('occ_after_num_steps_brox', 0, '')
flags.DEFINE_integer('occ_after_num_steps_wang', 0, '')
flags.DEFINE_integer('occ_after_num_steps_fb_abs', 0, '')
flags.DEFINE_integer('occ_after_num_steps_forward_collision', 0, '')
flags.DEFINE_integer('occ_after_num_steps_backward_zero', 0, '')
flags.DEFINE_float('occ_weights_fb_abs', 1000.0, '')
flags.DEFINE_float('occ_weights_forward_collision', 1000.0, '')
flags.DEFINE_float('occ_weights_backward_zero', 1000.0, '')
flags.DEFINE_float('occ_thresholds_fb_abs', 1.5, '')
flags.DEFINE_float('occ_thresholds_forward_collision', 0.4, '')
flags.DEFINE_float('occ_thresholds_backward_zero', 0.25, '')
flags.DEFINE_float('occ_clip_max_fb_abs', 10.0, '')
flags.DEFINE_float('occ_clip_max_forward_collision', 5.0, '')
flags.DEFINE_string(
'distance_census', 'ddflow', 'Which type of distance '
'metric to use when computing loss.')
flags.DEFINE_string(
'distance_photo', 'robust_l1', 'Which type of distance '
'metric to use when computing loss.')
flags.DEFINE_bool('use_supervision', False, 'Whether or not to train with '
'a supervised loss.')
flags.DEFINE_bool('resize_gt_flow_supervision', True, 'Whether or not to '
'resize ground truth flow for the supervised loss.')
flags.DEFINE_bool('use_gt_occlusions', False, 'Whether or not to train with '
'a ground trouth occlusion')
# Gin params are used to specify which augmentations to perform.
flags.DEFINE_multi_string(
'config_file', None,
'Path to a Gin config file. Can be specified multiple times. '
'Order matters, later config files override former ones.')
flags.DEFINE_multi_string(
'gin_bindings', None,
'Newline separated list of Gin parameter bindings. Can be specified '
'multiple times. Overrides config from --config_file.')
| apache-2.0 | 2,380,989,639,241,699,000 | 47.020833 | 80 | 0.679935 | false |
mandeepjadon/python-game | battleship.py | 1 | 1203 | from random import randint
board = []
for x in range(5):
board.append(["O"] * 5)
def print_board(board):
for row in board:
print " ".join(row)
print "Let's play Battleship!"
print_board(board)
def random_row(board):
return randint(0, len(board) - 1)
def random_col(board):
return randint(0, len(board[0]) - 1)
ship_row = random_row(board)
ship_col = random_col(board)
# Everything from here on should go in your for loop!
# Be sure to indent four spaces!
for turn in range(4):
guess_row = int(raw_input("Guess Row:"))
guess_col = int(raw_input("Guess Col:"))
if guess_row == ship_row and guess_col == ship_col:
print "Congratulations! You sunk my battleship!"
break
else:
if (guess_row < 0 or guess_row > 4) or (guess_col < 0 or guess_col > 4):
print "Oops, that's not even in the ocean."
elif(board[guess_row][guess_col] == "X"):
print "You guessed that one already."
else:
print "You missed my battleship!"
board[guess_row][guess_col] = "X"
print_board(board)
if turn==3:
print "Game Over !!"
else :
print "turn number ",turn+1
| mit | -3,335,248,240,708,133,000 | 25.733333 | 80 | 0.594347 | false |
dc3-plaso/dfvfs | dfvfs/resolver/gzip_resolver_helper.py | 1 | 1202 | # -*- coding: utf-8 -*-
"""The gzip file path specification resolver helper implementation."""
# This is necessary to prevent a circular import.
import dfvfs.file_io.gzip_file_io
import dfvfs.vfs.gzip_file_system
from dfvfs.lib import definitions
from dfvfs.resolver import resolver
from dfvfs.resolver import resolver_helper
class GzipResolverHelper(resolver_helper.ResolverHelper):
"""Class that implements the gzip file resolver helper."""
TYPE_INDICATOR = definitions.TYPE_INDICATOR_GZIP
def NewFileObject(self, resolver_context):
"""Creates a new file-like object.
Args:
resolver_context: the resolver context (instance of resolver.Context).
Returns:
The file-like object (instance of file_io.FileIO).
"""
return dfvfs.file_io.gzip_file_io.GzipFile(resolver_context)
def NewFileSystem(self, resolver_context):
"""Creates a new file system object.
Args:
resolver_context: the resolver context (instance of resolver.Context).
Returns:
The file system object (instance of vfs.FileSystem).
"""
return dfvfs.vfs.gzip_file_system.GzipFileSystem(resolver_context)
resolver.Resolver.RegisterHelper(GzipResolverHelper())
| apache-2.0 | 1,747,874,747,060,713,000 | 28.317073 | 76 | 0.739601 | false |
TheAlgorithms/Python | bit_manipulation/binary_twos_complement.py | 1 | 1121 | # Information on 2's complement: https://en.wikipedia.org/wiki/Two%27s_complement
def twos_complement(number: int) -> str:
"""
Take in a negative integer 'number'.
Return the two's complement representation of 'number'.
>>> twos_complement(0)
'0b0'
>>> twos_complement(-1)
'0b11'
>>> twos_complement(-5)
'0b1011'
>>> twos_complement(-17)
'0b101111'
>>> twos_complement(-207)
'0b100110001'
>>> twos_complement(1)
Traceback (most recent call last):
...
ValueError: input must be a negative integer
"""
if number > 0:
raise ValueError("input must be a negative integer")
binary_number_length = len(bin(number)[3:])
twos_complement_number = bin(abs(number) - (1 << binary_number_length))[3:]
twos_complement_number = (
(
"1"
+ "0" * (binary_number_length - len(twos_complement_number))
+ twos_complement_number
)
if number < 0
else "0"
)
return "0b" + twos_complement_number
if __name__ == "__main__":
import doctest
doctest.testmod()
| mit | 2,760,411,776,246,601,700 | 25.069767 | 81 | 0.580731 | false |
jobiols/odoo-argentina | l10n_ar_account/models/res_company.py | 1 | 1884 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
from openerp.addons.account_document.models.res_company import ResCompany
localizations = ResCompany._localization_selection
new_selection = localizations.append(('argentina', 'Argentina'))
ResCompany._localization_selection = new_selection
class ResCompany(models.Model):
_inherit = "res.company"
gross_income_number = fields.Char(
related='partner_id.gross_income_number',
string='Gross Income'
)
gross_income_type = fields.Selection(
related='partner_id.gross_income_type',
string='Gross Income'
)
gross_income_jurisdiction_ids = fields.Many2many(
related='partner_id.gross_income_jurisdiction_ids',
)
start_date = fields.Date(
related='partner_id.start_date',
)
afip_responsability_type_id = fields.Many2one(
related='partner_id.afip_responsability_type_id',
)
company_requires_vat = fields.Boolean(
related='afip_responsability_type_id.company_requires_vat',
readonly=True,
)
# use globally as default so that if child companies are created they
# also use this as default
tax_calculation_rounding_method = fields.Selection(
default='round_globally',
)
@api.onchange('localization')
def change_localization(self):
if self.localization == 'argentina' and not self.country_id:
self.country_id = self.env.ref('base.ar')
# TODO ver si lo movemos a account_document
# journal_ids = fields.One2many(
# 'account.journal',
# 'company_id',
# 'Journals'
# )
| agpl-3.0 | 8,990,268,929,781,817,000 | 34.54717 | 78 | 0.612527 | false |
CiscoSystems/nova | nova/compute/utils.py | 1 | 19238 | # Copyright (c) 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Compute-related Utilities and helpers."""
import itertools
import re
import string
import traceback
from oslo.config import cfg
from nova import block_device
from nova.compute import flavors
from nova.compute import power_state
from nova.compute import task_states
from nova import exception
from nova.network import model as network_model
from nova import notifications
from nova.objects import instance as instance_obj
from nova.objects import instance_fault as instance_fault_obj
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log
from nova.openstack.common import timeutils
from nova import rpc
from nova import utils
from nova.virt import driver
CONF = cfg.CONF
CONF.import_opt('host', 'nova.netconf')
LOG = log.getLogger(__name__)
def exception_to_dict(fault):
"""Converts exceptions to a dict for use in notifications."""
#TODO(johngarbutt) move to nova/exception.py to share with wrap_exception
code = 500
if hasattr(fault, "kwargs"):
code = fault.kwargs.get('code', 500)
# get the message from the exception that was thrown
# if that does not exist, use the name of the exception class itself
try:
message = fault.format_message()
# These exception handlers are broad so we don't fail to log the fault
# just because there is an unexpected error retrieving the message
except Exception:
try:
message = unicode(fault)
except Exception:
message = None
if not message:
message = fault.__class__.__name__
# NOTE(dripton) The message field in the database is limited to 255 chars.
# MySQL silently truncates overly long messages, but PostgreSQL throws an
# error if we don't truncate it.
u_message = unicode(message)[:255]
fault_dict = dict(exception=fault)
fault_dict["message"] = u_message
fault_dict["code"] = code
return fault_dict
def _get_fault_details(exc_info, error_code):
details = ''
if exc_info and error_code == 500:
tb = exc_info[2]
if tb:
details = ''.join(traceback.format_tb(tb))
return unicode(details)
def add_instance_fault_from_exc(context, instance, fault, exc_info=None):
"""Adds the specified fault to the database."""
fault_obj = instance_fault_obj.InstanceFault(context=context)
fault_obj.host = CONF.host
fault_obj.instance_uuid = instance['uuid']
fault_obj.update(exception_to_dict(fault))
code = fault_obj.code
fault_obj.details = _get_fault_details(exc_info, code)
fault_obj.create()
def pack_action_start(context, instance_uuid, action_name):
values = {'action': action_name,
'instance_uuid': instance_uuid,
'request_id': context.request_id,
'user_id': context.user_id,
'project_id': context.project_id,
'start_time': context.timestamp}
return values
def pack_action_finish(context, instance_uuid):
values = {'instance_uuid': instance_uuid,
'request_id': context.request_id,
'finish_time': timeutils.utcnow()}
return values
def pack_action_event_start(context, instance_uuid, event_name):
values = {'event': event_name,
'instance_uuid': instance_uuid,
'request_id': context.request_id,
'start_time': timeutils.utcnow()}
return values
def pack_action_event_finish(context, instance_uuid, event_name, exc_val=None,
exc_tb=None):
values = {'event': event_name,
'instance_uuid': instance_uuid,
'request_id': context.request_id,
'finish_time': timeutils.utcnow()}
if exc_tb is None:
values['result'] = 'Success'
else:
values['result'] = 'Error'
values['message'] = str(exc_val)
values['traceback'] = ''.join(traceback.format_tb(exc_tb))
return values
def get_device_name_for_instance(context, instance, bdms, device):
"""Validates (or generates) a device name for instance.
This method is a wrapper for get_next_device_name that gets the list
of used devices and the root device from a block device mapping.
"""
mappings = block_device.instance_block_mapping(instance, bdms)
return get_next_device_name(instance, mappings.values(),
mappings['root'], device)
def default_device_names_for_instance(instance, root_device_name,
*block_device_lists):
"""Generate missing device names for an instance."""
dev_list = [bdm.device_name
for bdm in itertools.chain(*block_device_lists)
if bdm.device_name]
if root_device_name not in dev_list:
dev_list.append(root_device_name)
for bdm in itertools.chain(*block_device_lists):
dev = bdm.device_name
if not dev:
dev = get_next_device_name(instance, dev_list,
root_device_name)
bdm.device_name = dev
bdm.save()
dev_list.append(dev)
def get_next_device_name(instance, device_name_list,
root_device_name=None, device=None):
"""Validates (or generates) a device name for instance.
If device is not set, it will generate a unique device appropriate
for the instance. It uses the root_device_name (if provided) and
the list of used devices to find valid device names. If the device
name is valid but applicable to a different backend (for example
/dev/vdc is specified but the backend uses /dev/xvdc), the device
name will be converted to the appropriate format.
"""
req_prefix = None
req_letter = None
if device:
try:
req_prefix, req_letter = block_device.match_device(device)
except (TypeError, AttributeError, ValueError):
raise exception.InvalidDevicePath(path=device)
if not root_device_name:
root_device_name = block_device.DEFAULT_ROOT_DEV_NAME
try:
prefix = block_device.match_device(root_device_name)[0]
except (TypeError, AttributeError, ValueError):
raise exception.InvalidDevicePath(path=root_device_name)
# NOTE(vish): remove this when xenapi is setting default_root_device
if driver.compute_driver_matches('xenapi.XenAPIDriver'):
prefix = '/dev/xvd'
if req_prefix != prefix:
LOG.debug("Using %(prefix)s instead of %(req_prefix)s",
{'prefix': prefix, 'req_prefix': req_prefix})
used_letters = set()
for device_path in device_name_list:
letter = block_device.strip_prefix(device_path)
# NOTE(vish): delete numbers in case we have something like
# /dev/sda1
letter = re.sub("\d+", "", letter)
used_letters.add(letter)
# NOTE(vish): remove this when xenapi is properly setting
# default_ephemeral_device and default_swap_device
if driver.compute_driver_matches('xenapi.XenAPIDriver'):
flavor = flavors.extract_flavor(instance)
if flavor['ephemeral_gb']:
used_letters.add('b')
if flavor['swap']:
used_letters.add('c')
if not req_letter:
req_letter = _get_unused_letter(used_letters)
if req_letter in used_letters:
raise exception.DevicePathInUse(path=device)
return prefix + req_letter
def _get_unused_letter(used_letters):
doubles = [first + second for second in string.ascii_lowercase
for first in string.ascii_lowercase]
all_letters = set(list(string.ascii_lowercase) + doubles)
letters = list(all_letters - used_letters)
# NOTE(vish): prepend ` so all shorter sequences sort first
letters.sort(key=lambda x: x.rjust(2, '`'))
return letters[0]
def get_image_metadata(context, image_service, image_id, instance):
# If the base image is still available, get its metadata
try:
image = image_service.show(context, image_id)
except Exception as e:
LOG.warning(_("Can't access image %(image_id)s: %(error)s"),
{"image_id": image_id, "error": e}, instance=instance)
image_system_meta = {}
else:
flavor = flavors.extract_flavor(instance)
image_system_meta = utils.get_system_metadata_from_image(image, flavor)
# Get the system metadata from the instance
system_meta = utils.instance_sys_meta(instance)
# Merge the metadata from the instance with the image's, if any
system_meta.update(image_system_meta)
# Convert the system metadata to image metadata
return utils.get_image_from_system_metadata(system_meta)
def notify_usage_exists(notifier, context, instance_ref, current_period=False,
ignore_missing_network_data=True,
system_metadata=None, extra_usage_info=None):
"""Generates 'exists' notification for an instance for usage auditing
purposes.
:param notifier: a messaging.Notifier
:param current_period: if True, this will generate a usage for the
current usage period; if False, this will generate a usage for the
previous audit period.
:param ignore_missing_network_data: if True, log any exceptions generated
while getting network info; if False, raise the exception.
:param system_metadata: system_metadata DB entries for the instance,
if not None. *NOTE*: Currently unused here in trunk, but needed for
potential custom modifications.
:param extra_usage_info: Dictionary containing extra values to add or
override in the notification if not None.
"""
audit_start, audit_end = notifications.audit_period_bounds(current_period)
bw = notifications.bandwidth_usage(instance_ref, audit_start,
ignore_missing_network_data)
if system_metadata is None:
system_metadata = utils.instance_sys_meta(instance_ref)
# add image metadata to the notification:
image_meta = notifications.image_meta(system_metadata)
extra_info = dict(audit_period_beginning=str(audit_start),
audit_period_ending=str(audit_end),
bandwidth=bw, image_meta=image_meta)
if extra_usage_info:
extra_info.update(extra_usage_info)
notify_about_instance_usage(notifier, context, instance_ref, 'exists',
system_metadata=system_metadata, extra_usage_info=extra_info)
def notify_about_instance_usage(notifier, context, instance, event_suffix,
network_info=None, system_metadata=None,
extra_usage_info=None, fault=None):
"""Send a notification about an instance.
:param notifier: a messaging.Notifier
:param event_suffix: Event type like "delete.start" or "exists"
:param network_info: Networking information, if provided.
:param system_metadata: system_metadata DB entries for the instance,
if provided.
:param extra_usage_info: Dictionary containing extra values to add or
override in the notification.
"""
if not extra_usage_info:
extra_usage_info = {}
usage_info = notifications.info_from_instance(context, instance,
network_info, system_metadata, **extra_usage_info)
if fault:
# NOTE(johngarbutt) mirrors the format in wrap_exception
fault_payload = exception_to_dict(fault)
LOG.debug(fault_payload["message"], instance=instance,
exc_info=True)
usage_info.update(fault_payload)
if event_suffix.endswith("error"):
method = notifier.error
else:
method = notifier.info
method(context, 'compute.instance.%s' % event_suffix, usage_info)
def notify_about_aggregate_update(context, event_suffix, aggregate_payload):
"""Send a notification about aggregate update.
:param event_suffix: Event type like "create.start" or "create.end"
:param aggregate_payload: payload for aggregate update
"""
aggregate_identifier = aggregate_payload.get('aggregate_id', None)
if not aggregate_identifier:
aggregate_identifier = aggregate_payload.get('name', None)
if not aggregate_identifier:
LOG.debug("No aggregate id or name specified for this "
"notification and it will be ignored")
return
notifier = rpc.get_notifier(service='aggregate',
host=aggregate_identifier)
notifier.info(context, 'aggregate.%s' % event_suffix, aggregate_payload)
def notify_about_host_update(context, event_suffix, host_payload):
"""Send a notification about host update.
:param event_suffix: Event type like "create.start" or "create.end"
:param host_payload: payload for host update. It is a dict and there
should be at least the 'host_name' key in this
dict.
"""
host_identifier = host_payload.get('host_name')
if not host_identifier:
LOG.warn(_("No host name specified for the notification of "
"HostAPI.%s and it will be ignored"), event_suffix)
return
notifier = rpc.get_notifier(service='api', host=host_identifier)
notifier.info(context, 'HostAPI.%s' % event_suffix, host_payload)
def get_nw_info_for_instance(instance):
if isinstance(instance, instance_obj.Instance):
if instance.info_cache is None:
return network_model.NetworkInfo.hydrate([])
return instance.info_cache.network_info
# FIXME(comstud): Transitional while we convert to objects.
info_cache = instance['info_cache'] or {}
nw_info = info_cache.get('network_info') or []
if not isinstance(nw_info, network_model.NetworkInfo):
nw_info = network_model.NetworkInfo.hydrate(nw_info)
return nw_info
def has_audit_been_run(context, conductor, host, timestamp=None):
begin, end = utils.last_completed_audit_period(before=timestamp)
task_log = conductor.task_log_get(context, "instance_usage_audit",
begin, end, host)
if task_log:
return True
else:
return False
def start_instance_usage_audit(context, conductor, begin, end, host,
num_instances):
conductor.task_log_begin_task(context, "instance_usage_audit", begin,
end, host, num_instances,
"Instance usage audit started...")
def finish_instance_usage_audit(context, conductor, begin, end, host, errors,
message):
conductor.task_log_end_task(context, "instance_usage_audit", begin, end,
host, errors, message)
def usage_volume_info(vol_usage):
def null_safe_str(s):
return str(s) if s else ''
tot_refreshed = vol_usage['tot_last_refreshed']
curr_refreshed = vol_usage['curr_last_refreshed']
if tot_refreshed and curr_refreshed:
last_refreshed_time = max(tot_refreshed, curr_refreshed)
elif tot_refreshed:
last_refreshed_time = tot_refreshed
else:
# curr_refreshed must be set
last_refreshed_time = curr_refreshed
usage_info = dict(
volume_id=vol_usage['volume_id'],
tenant_id=vol_usage['project_id'],
user_id=vol_usage['user_id'],
availability_zone=vol_usage['availability_zone'],
instance_id=vol_usage['instance_uuid'],
last_refreshed=null_safe_str(last_refreshed_time),
reads=vol_usage['tot_reads'] + vol_usage['curr_reads'],
read_bytes=vol_usage['tot_read_bytes'] +
vol_usage['curr_read_bytes'],
writes=vol_usage['tot_writes'] + vol_usage['curr_writes'],
write_bytes=vol_usage['tot_write_bytes'] +
vol_usage['curr_write_bytes'])
return usage_info
def get_reboot_type(task_state, current_power_state):
"""Checks if the current instance state requires a HARD reboot."""
if current_power_state != power_state.RUNNING:
return 'HARD'
soft_types = [task_states.REBOOT_STARTED, task_states.REBOOT_PENDING,
task_states.REBOOTING]
reboot_type = 'SOFT' if task_state in soft_types else 'HARD'
return reboot_type
class EventReporter(object):
"""Context manager to report instance action events."""
def __init__(self, context, conductor, event_name, *instance_uuids):
self.context = context
self.conductor = conductor
self.event_name = event_name
self.instance_uuids = instance_uuids
def __enter__(self):
for uuid in self.instance_uuids:
event = pack_action_event_start(self.context, uuid,
self.event_name)
self.conductor.action_event_start(self.context, event)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
for uuid in self.instance_uuids:
event = pack_action_event_finish(self.context, uuid,
self.event_name, exc_val, exc_tb)
self.conductor.action_event_finish(self.context, event)
return False
def periodic_task_spacing_warn(config_option_name):
"""Decorator to warn about an upcoming breaking change in methods which
use the @periodic_task decorator.
Some methods using the @periodic_task decorator specify spacing=0 or
None to mean "do not call this method", but the decorator itself uses
0/None to mean "call at the default rate".
Starting with the K release the Nova methods will be changed to conform
to the Oslo decorator. This decorator should be present wherever a
spacing value from user-supplied config is passed to @periodic_task, and
there is also a check to skip the method if the value is zero. It will
log a warning if the spacing value from config is 0/None.
"""
# TODO(gilliard) remove this decorator, its usages and the early returns
# near them after the K release.
def wrapper(f):
if (hasattr(f, "_periodic_spacing") and
(f._periodic_spacing == 0 or f._periodic_spacing is None)):
LOG.warning(_("Value of 0 or None specified for %s."
" This behaviour will change in meaning in the K release, to"
" mean 'call at the default rate' rather than 'do not call'."
" To keep the 'do not call' behaviour, use a negative value."),
config_option_name)
return f
return wrapper
| apache-2.0 | 2,790,141,694,222,280,700 | 36.870079 | 79 | 0.647001 | false |
fabiocorneti/xlpo | tests/readers/test_xlsx_reader.py | 1 | 3567 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from xlpo.readers import XLSXTranslationsReader
from tests.base import BaseTestCase
import os
import unittest
class TestXLSXTranslationsReader(BaseTestCase):
def test_valid_files(self):
xlsx_file = os.path.join(self.FILES_DIR, 'en_it__noheaders.xlsx')
reader = XLSXTranslationsReader(xlsx_file)
self.assertEqual(len(reader), 2)
self.assertEqual(reader[0].message, 'Hello')
self.assertEqual(reader[0].translation, 'Ciao')
self.assertEqual(reader[1].message, 'Yes')
self.assertEqual(reader[1].translation, 'Sì')
def test_invalid_files(self):
invalid_file = os.path.join(self.FILES_DIR, 'not_an_xlsx.xlsx')
reader = XLSXTranslationsReader(invalid_file)
self.assertRaises(IOError, lambda: reader.read())
invalid_file = os.path.join(self.FILES_DIR, 'not_an_xlsx.txt')
reader = XLSXTranslationsReader(invalid_file)
self.assertRaises(IOError, lambda: reader.read())
def test_duplicate_messages(self):
xlsx_file = os.path.join(self.FILES_DIR, 'duplicates.xlsx')
reader = XLSXTranslationsReader(xlsx_file)
self.assertRaises(Exception, lambda: reader.read())
def test_invalid_filenames(self):
self.assertRaises(Exception, lambda: XLSXTranslationsReader(None))
reader = XLSXTranslationsReader('not_here')
self.assertRaises(IOError, lambda: reader.read())
reader = XLSXTranslationsReader(self.FILES_DIR)
self.assertRaises(IOError, lambda: reader.read())
def test_invalid_indexes(self):
xlsx_file = os.path.join(self.FILES_DIR, 'en_it__noheaders.xlsx')
self.assertRaises(Exception,
lambda: XLSXTranslationsReader(xlsx_file, sheet=-1))
self.assertRaises(Exception,
lambda: XLSXTranslationsReader(xlsx_file, sheet='a'))
reader = XLSXTranslationsReader(xlsx_file, sheet=1)
self.assertRaises(IOError, lambda: reader.read())
self.assertRaises(Exception,
lambda: XLSXTranslationsReader(xlsx_file,
messages_col=-1))
self.assertRaises(Exception,
lambda: XLSXTranslationsReader(xlsx_file,
messages_col='a'))
reader = XLSXTranslationsReader(xlsx_file, messages_col=3)
self.assertRaises(IOError, lambda: reader.read())
self.assertRaises(Exception,
lambda: XLSXTranslationsReader(
xlsx_file,
translations_col=-1))
self.assertRaises(Exception,
lambda: XLSXTranslationsReader(xlsx_file,
translations_col='a'))
reader = XLSXTranslationsReader(xlsx_file, translations_col=3)
self.assertRaises(IOError, lambda: reader.read())
def test_caching(self):
xlsx_file = os.path.join(self.FILES_DIR, 'en_it__noheaders.xlsx')
reader = XLSXTranslationsReader(xlsx_file)
reader.read()
self.assertTrue(reader._translations is not None)
t1 = reader._translations
self.assertEqual(len(t1), 2)
reader.read()
t2 = reader._translations
self.assertTrue(t1 is t2)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 4,593,761,699,565,382,700 | 36.93617 | 79 | 0.606282 | false |
googlefonts/color-fonts | config/more_samples-glyf_colr_1.py | 1 | 6716 | """Compile samples that are infeasible or difficult by svg compilation.
"""
import datetime
from pathlib import Path
from fontTools import fontBuilder
from fontTools import ttLib
from fontTools.colorLib import builder as colorBuilder
from fontTools.pens.ttGlyphPen import TTGlyphPen
from fontTools.ttLib.tables._g_l_y_f import Glyph
import sys
from typing import Any, Mapping, NamedTuple, Optional
from fontTools.ttLib.tables import otTables as ot
from nanoemoji.colors import css_colors, Color
from fontTools.misc.transform import Transform
_UPEM = 1000
_ASCENT = 950
_DESCENT = 250
_FAMILY = "More COLR v1 Samples"
_STYLE = "Regular"
_PALETTE = {} # <3 mutable globals
class SampleGlyph(NamedTuple):
glyph_name: str
accessor: str
advance: int
glyph: Glyph
colr: Optional[Mapping[str, Any]] = None
def _cpal(color_str):
color = Color.fromstring(color_str).to_ufo_color()
if color not in _PALETTE:
_PALETTE[color] = len(_PALETTE)
return _PALETTE[color]
def _sample_sweep():
glyph_name = "sweep"
pen = TTGlyphPen(None)
pen.moveTo((100, 500))
pen.qCurveTo((500, 1000), (900, 500))
pen.qCurveTo((500, 0), (100, 500))
pen.closePath()
colr = {
"Format": ot.PaintFormat.PaintGlyph,
"Glyph": glyph_name,
"Paint": {
"Format": ot.PaintFormat.PaintSweepGradient,
"ColorLine": {
"ColorStop": [
(0.0, _cpal("red")),
(0.5, _cpal("yellow")),
(1.0, _cpal("red")),
]
},
"centerX": 500,
"centerY": 500,
"startAngle": 0,
"endAngle": 360,
},
}
return SampleGlyph(
glyph_name=glyph_name, accessor="c", advance=_UPEM, glyph=pen.glyph(), colr=colr
)
def _sample_colr_glyph():
glyph_name = "transformed_sweep"
# Paint the sweep shifted and rotated
colr = {
"Format": ot.PaintFormat.PaintTranslate,
"dx": 250,
"dy": 0,
"Paint": {
"Format": ot.PaintFormat.PaintRotate,
"centerX": _UPEM / 2,
"centerY": _UPEM / 2,
"angle": 60,
"Paint": {
"Format": ot.PaintFormat.PaintColrGlyph,
"Glyph": "sweep",
},
},
}
pen = TTGlyphPen(None)
pen.moveTo((0, 0))
pen.lineTo((_UPEM, _UPEM))
pen.endPath()
return SampleGlyph(
glyph_name=glyph_name, accessor="t", advance=_UPEM, glyph=pen.glyph(), colr=colr
)
def _sample_composite_colr_glyph():
glyph_name = "composite_colr_glyph"
# Scale down the sweep and use it to cut a hole in the sweep
# Transforms combine f(g(x)); build up backwards
t = Transform(dx=-500, dy=-500) # move to origin
t = Transform(xx=0.75, yy=0.75).transform(t)
t = Transform(dx=500, dy=500).transform(t)
t = tuple(t)
colr = {
"Format": ot.PaintFormat.PaintComposite,
"CompositeMode": "SRC_OUT",
"SourcePaint": {
"Format": ot.PaintFormat.PaintColrGlyph,
"Glyph": "sweep",
},
"BackdropPaint": {
"Format": ot.PaintFormat.PaintTransform,
"Paint": {
"Format": ot.PaintFormat.PaintColrGlyph,
"Glyph": "sweep",
},
"Transform": t,
},
}
pen = TTGlyphPen(None)
pen.moveTo((0, 0))
pen.lineTo((_UPEM, _UPEM))
pen.endPath()
return SampleGlyph(
glyph_name=glyph_name, accessor="o", advance=_UPEM, glyph=pen.glyph(), colr=colr
)
def _gradient_stops_repeat(first_stop, second_stop, accessor_char):
glyph_name = f"linear_repeat_{first_stop}_{second_stop}"
pen = TTGlyphPen(None)
pen.moveTo((100, 250))
pen.lineTo((100, 950))
pen.lineTo((900, 950))
pen.lineTo((900, 250))
pen.closePath()
colr = {
"Format": ot.PaintFormat.PaintGlyph,
"Glyph": glyph_name,
"Paint": {
"Format": ot.PaintFormat.PaintLinearGradient,
"ColorLine": {
"ColorStop": [
(first_stop, _cpal("red")),
(second_stop, _cpal("blue")),
],
"Extend": ot.ExtendMode.REPEAT,
},
"x0": 100,
"y0": 250,
"x1": 900,
"y1": 250,
"x2": 100,
"y2": 300,
},
}
return SampleGlyph(
glyph_name=glyph_name,
accessor=accessor_char,
advance=_UPEM,
glyph=pen.glyph(),
colr=colr,
)
def main():
assert len(sys.argv) == 2
build_dir = Path(sys.argv[1])
build_dir.mkdir(exist_ok=True)
out_file = (build_dir / _FAMILY.replace(" ", "")).with_suffix(".ttf")
version = datetime.datetime.now().isoformat()
names = {
"familyName": _FAMILY,
"styleName": _STYLE,
"uniqueFontIdentifier": " ".join((_FAMILY, version)),
"fullName": " ".join((_FAMILY, _STYLE)),
"version": version,
"psName": "-".join((_FAMILY.replace(" ", ""), _STYLE)),
}
glyphs = [
SampleGlyph(glyph_name=".notdef", accessor="", advance=600, glyph=Glyph()),
SampleGlyph(glyph_name=".null", accessor="", advance=0, glyph=Glyph()),
_sample_sweep(),
_sample_colr_glyph(),
_sample_composite_colr_glyph(),
_gradient_stops_repeat(0, 1, "p"),
_gradient_stops_repeat(0.2, 0.8, "q"),
_gradient_stops_repeat(0, 1.5, "r"),
_gradient_stops_repeat(0.5, 1.5, "s"),
]
fb = fontBuilder.FontBuilder(_UPEM)
fb.setupGlyphOrder([g.glyph_name for g in glyphs])
fb.setupCharacterMap(
{ord(g.accessor): g.glyph_name for g in glyphs if len(g.accessor) == 1}
)
fb.setupGlyf({g.glyph_name: g.glyph for g in glyphs})
fb.setupHorizontalMetrics({g.glyph_name: (_UPEM, g.glyph.xMin) for g in glyphs})
fb.setupHorizontalHeader(ascent=_ASCENT, descent=-_DESCENT)
fb.setupOS2(sTypoAscender=_ASCENT, usWinAscent=_ASCENT, usWinDescent=_DESCENT)
fb.setupNameTable(names)
fb.setupPost()
fb.font["head"].xMin = 0
fb.font["head"].yMin = -_DESCENT
fb.font["head"].xMax = _UPEM
fb.font["head"].yMax = _ASCENT
fb.font["OS/2"].fsType = 0
fb.font["OS/2"].version = 4
fb.font["OS/2"].fsSelection |= 1 << 7
fb.font["hhea"].advanceWidthMax = _UPEM
fb.font["COLR"] = colorBuilder.buildCOLR(
{g.glyph_name: g.colr for g in glyphs if g.colr}
)
fb.font["CPAL"] = colorBuilder.buildCPAL([list(_PALETTE)])
fb.save(out_file)
print(f"Wrote {out_file}")
if __name__ == "__main__":
main()
| apache-2.0 | -2,336,679,535,853,925,400 | 26.983333 | 88 | 0.558368 | false |
donspaulding/adspygoogle | examples/adspygoogle/adwords/v201302/optimization/get_placement_ideas.py | 1 | 2587 | #!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example retrieves urls that have content keywords related to a given
website.
Tags: TargetingIdeaService.get
"""
__author__ = '[email protected] (Kevin Winter)'
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import AdWordsClient
PAGE_SIZE = 100
def main(client):
# Initialize appropriate service.
targeting_idea_service = client.GetTargetingIdeaService(version='v201302')
# Construct selector object and retrieve related placements.
offset = 0
url = 'http://mars.google.com'
selector = {
'searchParameters': [{
'xsi_type': 'RelatedToUrlSearchParameter',
'urls': [url],
'includeSubUrls': 'false'
}],
'ideaType': 'PLACEMENT',
'requestType': 'IDEAS',
'requestedAttributeTypes': ['SAMPLE_URL'],
'paging': {
'startIndex': str(offset),
'numberResults': str(PAGE_SIZE)
}
}
more_pages = True
while more_pages:
page = targeting_idea_service.Get(selector)[0]
# Display results.
if 'entries' in page:
for result in page['entries']:
result = result['data'][0]['value']
print ('Related content keywords were found at \'%s\' url.'
% result['value'])
print
print ('Total urls found with content keywords related to keywords at '
'\'%s\': %s' % (url, page['totalNumEntries']))
else:
print 'No content keywords were found at \'%s\'.' % url
offset += PAGE_SIZE
selector['paging']['startIndex'] = str(offset)
more_pages = offset < int(page['totalNumEntries'])
print
print ('Usage: %s units, %s operations' % (client.GetUnits(),
client.GetOperations()))
if __name__ == '__main__':
# Initialize client object.
client = AdWordsClient(path=os.path.join('..', '..', '..', '..', '..'))
main(client)
| apache-2.0 | 1,005,778,109,689,792,100 | 29.435294 | 77 | 0.637031 | false |
mganeva/mantid | scripts/test/AbinsIOmoduleTest.py | 1 | 4979 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.simpleapi import logger
import numpy as np
from AbinsModules import IOmodule, AbinsTestHelpers
class AbinsIOmoduleTest(unittest.TestCase):
def tearDown(self):
AbinsTestHelpers.remove_output_files(list_of_names=["Cars", "temphgfrt"])
# noinspection PyMethodMayBeStatic
def _save_stuff(self):
saver = IOmodule(input_filename="Cars.foo", group_name="Volksvagen")
# add some attributes
saver.add_attribute("Fuel", 100)
saver.add_attribute("Speed", 200)
# add some datasets
saver.add_data("Passengers", np.array([4]))
saver.add_data("FireExtinguishers", np.array([2]))
# add some mode complex data sets
wheels = [{"Winter": False, "Punctured": False, "Brand": "Mercedes", "Age": 2},
{"Winter": False, "Punctured": False, "Brand": "Mercedes", "Age": 3},
{"Winter": False, "Punctured": False, "Brand": "Mercedes", "Age": 5},
{"Winter": False, "Punctured": True, "Brand": "Mercedes", "Age": 7}]
chairs = {"AdjustableHeadrests": True, "ExtraPadding": True}
saver.add_data("wheels", wheels)
saver.add_data("chairs", chairs)
# save attributes and datasets
saver.save()
def _save_wrong_attribute(self):
poor_saver = IOmodule(input_filename="BadCars.foo", group_name="Volksvagen")
poor_saver.add_attribute("BadPassengers", np.array([4]))
self.assertRaises(ValueError, poor_saver.save)
def _save_wrong_dataset(self):
poor_saver = IOmodule(input_filename="BadCars.foo", group_name="Volksvagen")
poor_saver.add_data("BadPassengers", 4)
self.assertRaises(ValueError, poor_saver.save)
def _wrong_filename(self):
self.assertRaises(ValueError, IOmodule, input_filename=1, group_name="goodgroup")
def _wrong_groupname(self):
self.assertRaises(ValueError, IOmodule, input_filename="goodfile", group_name=1)
def _wrong_file(self):
poor_loader = IOmodule(input_filename="bumCars", group_name="nice_group")
self.assertRaises(IOError, poor_loader.load, list_of_attributes="one_attribute")
def _loading_attributes(self):
data = self.loader.load(list_of_attributes=["Fuel", "Speed"])
attr_data = data["attributes"]
self.assertEqual(100, attr_data["Fuel"])
self.assertEqual(200, attr_data["Speed"])
self.assertRaises(ValueError, self.loader.load, list_of_attributes=["NiceFuel"])
self.assertRaises(ValueError, self.loader.load, list_of_attributes=1)
self.assertRaises(ValueError, self.loader.load, list_of_attributes=[1, "Speed"])
def _loading_datasets(self):
data = self.loader.load(list_of_datasets=["Passengers", "FireExtinguishers"])
self.assertEqual(np.array([4]), data["datasets"]["Passengers"])
self.assertEqual(np.array([2]), data["datasets"]["FireExtinguishers"])
self.assertRaises(ValueError, self.loader.load, list_of_datasets=["NicePassengers"])
self.assertRaises(ValueError, self.loader.load, list_of_datasets=1)
self.assertRaises(ValueError, self.loader.load, list_of_datasets=[1, "Passengers"])
def _loading_structured_datasets(self):
"""
Loads more complicated data from the hdf file.
"""
data = self.loader.load(list_of_datasets=["wheels", "chairs"])
self.assertEqual([{"Winter": False, "Punctured": False, "Brand": "Mercedes", "Age": 2},
{"Winter": False, "Punctured": False, "Brand": "Mercedes", "Age": 3},
{"Winter": False, "Punctured": False, "Brand": "Mercedes", "Age": 5},
{"Winter": False, "Punctured": True, "Brand": "Mercedes", "Age": 7}],
data["datasets"]["wheels"])
self.assertEqual({"AdjustableHeadrests": True, "ExtraPadding": True},
data["datasets"]["chairs"])
self.assertRaises(ValueError, self.loader.load, list_of_datasets=["WrongDataSet"])
self.assertRaises(ValueError, self.loader.load, list_of_datasets=1)
def runTest(self):
self._save_stuff()
self._save_wrong_attribute()
self._save_wrong_dataset()
self.loader = IOmodule(input_filename="Cars.foo", group_name="Volksvagen")
self._wrong_filename()
self._wrong_groupname()
self._wrong_file()
self._loading_attributes()
self._loading_datasets()
self._loading_structured_datasets()
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -7,953,543,048,989,678,000 | 40.14876 | 97 | 0.630247 | false |
danakj/chromium | mojo/public/tools/bindings/generators/mojom_cpp_generator.py | 1 | 20410 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates C++ source files from a mojom.Module."""
import mojom.generate.generator as generator
import mojom.generate.module as mojom
import mojom.generate.pack as pack
from mojom.generate.template_expander import UseJinja
_kind_to_cpp_type = {
mojom.BOOL: "bool",
mojom.INT8: "int8_t",
mojom.UINT8: "uint8_t",
mojom.INT16: "int16_t",
mojom.UINT16: "uint16_t",
mojom.INT32: "int32_t",
mojom.UINT32: "uint32_t",
mojom.FLOAT: "float",
mojom.INT64: "int64_t",
mojom.UINT64: "uint64_t",
mojom.DOUBLE: "double",
}
_kind_to_cpp_literal_suffix = {
mojom.UINT8: "U",
mojom.UINT16: "U",
mojom.UINT32: "U",
mojom.FLOAT: "f",
mojom.UINT64: "ULL",
}
# TODO(rockot): Get rid of these globals. This requires some refactoring of the
# generator library code so that filters can use the generator as context.
_current_typemap = {}
_for_blink = False
_use_new_wrapper_types = False
# TODO(rockot, yzshen): The variant handling is kind of a hack currently. Make
# it right.
_variant = None
class _NameFormatter(object):
"""A formatter for the names of kinds or values."""
def __init__(self, token, variant):
self._token = token
self._variant = variant
def Format(self, separator, prefixed=False, internal=False,
include_variant=False, add_same_module_namespaces=False):
parts = []
if self._ShouldIncludeNamespace(add_same_module_namespaces):
if prefixed:
parts.append("")
parts.extend(self._GetNamespace())
if include_variant and self._variant:
parts.append(self._variant)
parts.extend(self._GetName(internal))
return separator.join(parts)
def FormatForCpp(self, add_same_module_namespaces=False, internal=False):
return self.Format(
"::", prefixed=True,
add_same_module_namespaces=add_same_module_namespaces,
internal=internal, include_variant=True)
def FormatForMojom(self):
return self.Format(".", add_same_module_namespaces=True)
def _MapKindName(self, token, internal):
if not internal:
return token.name
if (mojom.IsStructKind(token) or mojom.IsUnionKind(token) or
mojom.IsInterfaceKind(token) or mojom.IsEnumKind(token)):
return token.name + "_Data"
return token.name
def _GetName(self, internal):
name = []
if internal:
name.append("internal")
if self._token.parent_kind:
name.append(self._MapKindName(self._token.parent_kind, internal))
# Both variable and enum constants are constructed like:
# Namespace::Struct::CONSTANT_NAME
# For enums, CONSTANT_NAME is EnumName::ENUM_VALUE.
if isinstance(self._token, mojom.EnumValue):
name.extend([self._token.enum.name, self._token.name])
else:
name.append(self._MapKindName(self._token, internal))
return name
def _ShouldIncludeNamespace(self, add_same_module_namespaces):
return add_same_module_namespaces or self._token.imported_from
def _GetNamespace(self):
if self._token.imported_from:
return NamespaceToArray(self._token.imported_from["namespace"])
elif hasattr(self._token, "module"):
return NamespaceToArray(self._token.module.namespace)
return []
def ConstantValue(constant):
return ExpressionToText(constant.value, kind=constant.kind)
# TODO(yzshen): Revisit the default value feature. It was designed prior to
# custom type mapping.
def DefaultValue(field):
if field.default:
if mojom.IsStructKind(field.kind):
assert field.default == "default"
if not IsTypemappedKind(field.kind):
return "%s::New()" % GetNameForKind(field.kind)
return ExpressionToText(field.default, kind=field.kind)
if not _use_new_wrapper_types:
if mojom.IsArrayKind(field.kind) or mojom.IsMapKind(field.kind):
return "nullptr";
if mojom.IsStringKind(field.kind):
return "" if _for_blink else "nullptr"
return ""
def NamespaceToArray(namespace):
return namespace.split(".") if namespace else []
def GetNameForKind(kind, internal=False):
return _NameFormatter(kind, _variant).FormatForCpp(internal=internal)
def GetQualifiedNameForKind(kind, internal=False):
return _NameFormatter(kind, _variant).FormatForCpp(
internal=internal, add_same_module_namespaces=True)
def GetFullMojomNameForKind(kind):
return _NameFormatter(kind, _variant).FormatForMojom()
def IsTypemappedKind(kind):
return hasattr(kind, "name") and \
GetFullMojomNameForKind(kind) in _current_typemap
def IsNativeOnlyKind(kind):
return (mojom.IsStructKind(kind) or mojom.IsEnumKind(kind)) and \
kind.native_only
def GetNativeTypeName(typemapped_kind):
return _current_typemap[GetFullMojomNameForKind(typemapped_kind)]["typename"]
def GetCppPodType(kind):
if mojom.IsStringKind(kind):
return "char*"
return _kind_to_cpp_type[kind]
def GetCppWrapperType(kind):
def _AddOptional(type_name):
pattern = "WTF::Optional<%s>" if _for_blink else "base::Optional<%s>"
return pattern % type_name
if IsTypemappedKind(kind):
type_name = GetNativeTypeName(kind)
if (mojom.IsNullableKind(kind) and
not _current_typemap[GetFullMojomNameForKind(kind)][
"nullable_is_same_type"]):
type_name = _AddOptional(type_name)
return type_name
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStructKind(kind) or mojom.IsUnionKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsArrayKind(kind):
pattern = None
if _use_new_wrapper_types:
pattern = "WTF::Vector<%s>" if _for_blink else "std::vector<%s>"
if mojom.IsNullableKind(kind):
pattern = _AddOptional(pattern)
else:
pattern = "mojo::WTFArray<%s>" if _for_blink else "mojo::Array<%s>"
return pattern % GetCppWrapperType(kind.kind)
if mojom.IsMapKind(kind):
pattern = None
if _use_new_wrapper_types:
pattern = ("WTF::HashMap<%s, %s>" if _for_blink else
"std::unordered_map<%s, %s>")
if mojom.IsNullableKind(kind):
pattern = _AddOptional(pattern)
else:
pattern = "mojo::WTFMap<%s, %s>" if _for_blink else "mojo::Map<%s, %s>"
return pattern % (GetCppWrapperType(kind.key_kind),
GetCppWrapperType(kind.value_kind))
if mojom.IsInterfaceKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsInterfaceRequestKind(kind):
return "%sRequest" % GetNameForKind(kind.kind)
if mojom.IsAssociatedInterfaceKind(kind):
return "%sAssociatedPtrInfo" % GetNameForKind(kind.kind)
if mojom.IsAssociatedInterfaceRequestKind(kind):
return "%sAssociatedRequest" % GetNameForKind(kind.kind)
if mojom.IsStringKind(kind):
if _for_blink:
return "WTF::String"
if not _use_new_wrapper_types:
return "mojo::String"
type_name = "std::string"
return _AddOptional(type_name) if mojom.IsNullableKind(kind) else type_name
if mojom.IsGenericHandleKind(kind):
return "mojo::ScopedHandle"
if mojom.IsDataPipeConsumerKind(kind):
return "mojo::ScopedDataPipeConsumerHandle"
if mojom.IsDataPipeProducerKind(kind):
return "mojo::ScopedDataPipeProducerHandle"
if mojom.IsMessagePipeKind(kind):
return "mojo::ScopedMessagePipeHandle"
if mojom.IsSharedBufferKind(kind):
return "mojo::ScopedSharedBufferHandle"
if not kind in _kind_to_cpp_type:
raise Exception("Unrecognized kind %s" % kind.spec)
return _kind_to_cpp_type[kind]
def IsMoveOnlyKind(kind):
if IsTypemappedKind(kind):
if mojom.IsEnumKind(kind):
return False
return _current_typemap[GetFullMojomNameForKind(kind)]["move_only"]
if mojom.IsStructKind(kind) or mojom.IsUnionKind(kind):
return True
if mojom.IsArrayKind(kind):
return IsMoveOnlyKind(kind.kind) if _use_new_wrapper_types else True
if mojom.IsMapKind(kind):
return IsMoveOnlyKind(kind.value_kind) if _use_new_wrapper_types else True
if mojom.IsAnyHandleOrInterfaceKind(kind):
return True
return False
def IsCopyablePassByValue(kind):
if not IsTypemappedKind(kind):
return False
return _current_typemap[GetFullMojomNameForKind(kind)][
"copyable_pass_by_value"]
def ShouldPassParamByValue(kind):
return ((not mojom.IsReferenceKind(kind)) or IsMoveOnlyKind(kind) or
IsCopyablePassByValue(kind))
def GetCppWrapperParamType(kind):
cpp_wrapper_type = GetCppWrapperType(kind)
return (cpp_wrapper_type if ShouldPassParamByValue(kind)
else "const %s&" % cpp_wrapper_type)
def GetCppDataViewType(kind):
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStructKind(kind) or mojom.IsUnionKind(kind):
return "%sDataView" % GetNameForKind(kind)
if mojom.IsArrayKind(kind):
return "mojo::ArrayDataView<%s>" % GetCppDataViewType(kind.kind)
if mojom.IsMapKind(kind):
return ("mojo::MapDataView<%s, %s>" % (GetCppDataViewType(kind.key_kind),
GetCppDataViewType(kind.value_kind)))
if mojom.IsStringKind(kind):
return "mojo::StringDataView"
return GetCppWrapperType(kind)
def GetCppFieldType(kind):
if mojom.IsStructKind(kind):
return ("mojo::internal::Pointer<%s>" %
GetNameForKind(kind, internal=True))
if mojom.IsUnionKind(kind):
return "%s" % GetNameForKind(kind, internal=True)
if mojom.IsArrayKind(kind):
return ("mojo::internal::Pointer<mojo::internal::Array_Data<%s>>" %
GetCppFieldType(kind.kind))
if mojom.IsMapKind(kind):
return ("mojo::internal::Pointer<mojo::internal::Map_Data<%s, %s>>" %
(GetCppFieldType(kind.key_kind), GetCppFieldType(kind.value_kind)))
if mojom.IsInterfaceKind(kind):
return "mojo::internal::Interface_Data"
if mojom.IsInterfaceRequestKind(kind):
return "mojo::internal::Handle_Data"
if mojom.IsAssociatedInterfaceKind(kind):
return "mojo::internal::AssociatedInterface_Data"
if mojom.IsAssociatedInterfaceRequestKind(kind):
return "mojo::internal::AssociatedInterfaceRequest_Data"
if mojom.IsEnumKind(kind):
return "int32_t"
if mojom.IsStringKind(kind):
return "mojo::internal::Pointer<mojo::internal::String_Data>"
if mojom.IsAnyHandleKind(kind):
return "mojo::internal::Handle_Data"
return _kind_to_cpp_type[kind]
def GetCppUnionFieldType(kind):
if mojom.IsUnionKind(kind):
return ("mojo::internal::Pointer<%s>" % GetNameForKind(kind, internal=True))
return GetCppFieldType(kind)
def GetUnionGetterReturnType(kind):
if mojom.IsReferenceKind(kind):
return "%s&" % GetCppWrapperType(kind)
return GetCppWrapperType(kind)
def GetUnmappedTypeForSerializer(kind):
if mojom.IsEnumKind(kind):
return GetQualifiedNameForKind(kind)
if mojom.IsStructKind(kind) or mojom.IsUnionKind(kind):
return "%sPtr" % GetQualifiedNameForKind(kind)
if mojom.IsArrayKind(kind):
return "mojo::Array<%s>" % GetUnmappedTypeForSerializer(kind.kind)
if mojom.IsMapKind(kind):
return "mojo::Map<%s, %s>" % (
GetUnmappedTypeForSerializer(kind.key_kind),
GetUnmappedTypeForSerializer(kind.value_kind))
if mojom.IsInterfaceKind(kind):
return "%sPtr" % GetQualifiedNameForKind(kind)
if mojom.IsInterfaceRequestKind(kind):
return "%sRequest" % GetQualifiedNameForKind(kind.kind)
if mojom.IsAssociatedInterfaceKind(kind):
return "%sAssociatedPtrInfo" % GetQualifiedNameForKind(kind.kind)
if mojom.IsAssociatedInterfaceRequestKind(kind):
return "%sAssociatedRequest" % GetQualifiedNameForKind(kind.kind)
if mojom.IsStringKind(kind):
return "mojo::String"
if mojom.IsGenericHandleKind(kind):
return "mojo::ScopedHandle"
if mojom.IsDataPipeConsumerKind(kind):
return "mojo::ScopedDataPipeConsumerHandle"
if mojom.IsDataPipeProducerKind(kind):
return "mojo::ScopedDataPipeProducerHandle"
if mojom.IsMessagePipeKind(kind):
return "mojo::ScopedMessagePipeHandle"
if mojom.IsSharedBufferKind(kind):
return "mojo::ScopedSharedBufferHandle"
return _kind_to_cpp_type[kind]
def TranslateConstants(token, kind):
if isinstance(token, mojom.NamedValue):
return _NameFormatter(token, _variant).FormatForCpp()
if isinstance(token, mojom.BuiltinValue):
if token.value == "double.INFINITY" or token.value == "float.INFINITY":
return "INFINITY";
if token.value == "double.NEGATIVE_INFINITY" or \
token.value == "float.NEGATIVE_INFINITY":
return "-INFINITY";
if token.value == "double.NAN" or token.value == "float.NAN":
return "NAN";
if (kind is not None and mojom.IsFloatKind(kind)):
return token if token.isdigit() else token + "f";
# Per C++11, 2.14.2, the type of an integer literal is the first of the
# corresponding list in Table 6 in which its value can be represented. In this
# case, the list for decimal constants with no suffix is:
# int, long int, long long int
# The standard considers a program ill-formed if it contains an integer
# literal that cannot be represented by any of the allowed types.
#
# As it turns out, MSVC doesn't bother trying to fall back to long long int,
# so the integral constant -2147483648 causes it grief: it decides to
# represent 2147483648 as an unsigned integer, and then warns that the unary
# minus operator doesn't make sense on unsigned types. Doh!
if kind == mojom.INT32 and token == "-2147483648":
return "(-%d - 1) /* %s */" % (
2**31 - 1, "Workaround for MSVC bug; see https://crbug.com/445618")
return "%s%s" % (token, _kind_to_cpp_literal_suffix.get(kind, ""))
def ExpressionToText(value, kind=None):
return TranslateConstants(value, kind)
def RequiresContextForDataView(kind):
for field in kind.fields:
if mojom.IsReferenceKind(field.kind):
return True
return False
def ShouldInlineStruct(struct):
# TODO(darin): Base this on the size of the wrapper class.
if len(struct.fields) > 4:
return False
for field in struct.fields:
if mojom.IsReferenceKind(field.kind) and not mojom.IsStringKind(field.kind):
return False
return True
def ShouldInlineUnion(union):
return not any(
mojom.IsReferenceKind(field.kind) and not mojom.IsStringKind(field.kind)
for field in union.fields)
def GetContainerValidateParamsCtorArgs(kind):
if mojom.IsStringKind(kind):
expected_num_elements = 0
element_is_nullable = False
key_validate_params = "nullptr"
element_validate_params = "nullptr"
enum_validate_func = "nullptr"
elif mojom.IsMapKind(kind):
expected_num_elements = 0
element_is_nullable = False
key_validate_params = GetNewContainerValidateParams(mojom.Array(
kind=kind.key_kind))
element_validate_params = GetNewContainerValidateParams(mojom.Array(
kind=kind.value_kind))
enum_validate_func = "nullptr"
else: # mojom.IsArrayKind(kind)
expected_num_elements = generator.ExpectedArraySize(kind) or 0
element_is_nullable = mojom.IsNullableKind(kind.kind)
key_validate_params = "nullptr"
element_validate_params = GetNewContainerValidateParams(kind.kind)
if mojom.IsEnumKind(kind.kind):
enum_validate_func = ("%s::Validate" %
GetQualifiedNameForKind(kind.kind, internal=True))
else:
enum_validate_func = "nullptr"
if enum_validate_func == "nullptr":
if key_validate_params == "nullptr":
return "%d, %s, %s" % (expected_num_elements,
"true" if element_is_nullable else "false",
element_validate_params)
else:
return "%s, %s" % (key_validate_params, element_validate_params)
else:
return "%d, %s" % (expected_num_elements, enum_validate_func)
def GetNewContainerValidateParams(kind):
if (not mojom.IsArrayKind(kind) and not mojom.IsMapKind(kind) and
not mojom.IsStringKind(kind)):
return "nullptr"
return "new mojo::internal::ContainerValidateParams(%s)" % (
GetContainerValidateParamsCtorArgs(kind))
class Generator(generator.Generator):
cpp_filters = {
"constant_value": ConstantValue,
"cpp_wrapper_param_type": GetCppWrapperParamType,
"cpp_data_view_type": GetCppDataViewType,
"cpp_field_type": GetCppFieldType,
"cpp_union_field_type": GetCppUnionFieldType,
"cpp_pod_type": GetCppPodType,
"cpp_union_getter_return_type": GetUnionGetterReturnType,
"cpp_wrapper_type": GetCppWrapperType,
"default_value": DefaultValue,
"expression_to_text": ExpressionToText,
"get_container_validate_params_ctor_args":
GetContainerValidateParamsCtorArgs,
"get_name_for_kind": GetNameForKind,
"get_pad": pack.GetPad,
"get_qualified_name_for_kind": GetQualifiedNameForKind,
"has_callbacks": mojom.HasCallbacks,
"has_sync_methods": mojom.HasSyncMethods,
"requires_context_for_data_view": RequiresContextForDataView,
"should_inline": ShouldInlineStruct,
"should_inline_union": ShouldInlineUnion,
"is_array_kind": mojom.IsArrayKind,
"is_enum_kind": mojom.IsEnumKind,
"is_integral_kind": mojom.IsIntegralKind,
"is_native_only_kind": IsNativeOnlyKind,
"is_any_handle_or_interface_kind": mojom.IsAnyHandleOrInterfaceKind,
"is_associated_kind": mojom.IsAssociatedKind,
"is_map_kind": mojom.IsMapKind,
"is_nullable_kind": mojom.IsNullableKind,
"is_object_kind": mojom.IsObjectKind,
"is_string_kind": mojom.IsStringKind,
"is_struct_kind": mojom.IsStructKind,
"is_typemapped_kind": IsTypemappedKind,
"is_union_kind": mojom.IsUnionKind,
"passes_associated_kinds": mojom.PassesAssociatedKinds,
"struct_size": lambda ps: ps.GetTotalSize() + _HEADER_SIZE,
"stylize_method": generator.StudlyCapsToCamel,
"under_to_camel": generator.UnderToCamel,
"unmapped_type_for_serializer": GetUnmappedTypeForSerializer,
}
def GetExtraTraitsHeaders(self):
extra_headers = set()
for entry in self.typemap.itervalues():
extra_headers.update(entry.get("traits_headers", []))
return list(extra_headers)
def GetExtraPublicHeaders(self):
extra_headers = set()
for entry in self.typemap.itervalues():
extra_headers.update(entry.get("public_headers", []))
return list(extra_headers)
def GetJinjaExports(self):
return {
"module": self.module,
"namespace": self.module.namespace,
"namespaces_as_array": NamespaceToArray(self.module.namespace),
"imports": self.module.imports,
"kinds": self.module.kinds,
"enums": self.module.enums,
"structs": self.GetStructs(),
"unions": self.GetUnions(),
"interfaces": self.GetInterfaces(),
"variant": self.variant,
"extra_traits_headers": self.GetExtraTraitsHeaders(),
"extra_public_headers": self.GetExtraPublicHeaders(),
"for_blink": self.for_blink,
"use_new_wrapper_types": self.use_new_wrapper_types,
"export_attribute": self.export_attribute,
"export_header": self.export_header,
}
@staticmethod
def GetTemplatePrefix():
return "cpp_templates"
@classmethod
def GetFilters(cls):
return cls.cpp_filters
@UseJinja("module.h.tmpl")
def GenerateModuleHeader(self):
return self.GetJinjaExports()
@UseJinja("module-internal.h.tmpl")
def GenerateModuleInternalHeader(self):
return self.GetJinjaExports()
@UseJinja("module.cc.tmpl")
def GenerateModuleSource(self):
return self.GetJinjaExports()
def GenerateFiles(self, args):
global _current_typemap
_current_typemap = self.typemap
global _for_blink
_for_blink = self.for_blink
global _use_new_wrapper_types
_use_new_wrapper_types = self.use_new_wrapper_types
global _variant
_variant = self.variant
suffix = "-%s" % self.variant if self.variant else ""
self.Write(self.GenerateModuleHeader(),
self.MatchMojomFilePath("%s%s.h" % (self.module.name, suffix)))
self.Write(self.GenerateModuleInternalHeader(),
self.MatchMojomFilePath("%s%s-internal.h" % (self.module.name, suffix)))
self.Write(self.GenerateModuleSource(),
self.MatchMojomFilePath("%s%s.cc" % (self.module.name, suffix)))
| bsd-3-clause | -230,045,905,145,959,070 | 36.449541 | 80 | 0.69706 | false |
Damian9449/Python | lab10_abstrakcyjneTypyDanych/10.2.py | 1 | 1396 | #!/usr/bin/python
class Stack:
def __init__(self, size=10):
self.items = size * [None] # utworzenie tablicy
self.n = 0 # liczba elementow na stosie
self.size = size
def is_empty(self):
return self.n == 0
def is_full(self):
return self.size == self.n
def push(self, data):
if self.is_full() == False:
self.items[self.n] = data
self.n = self.n + 1
else:
raise Exception("Stack is full !!!")
def pop(self):
if self.is_empty() == False:
self.n = self.n - 1
data = self.items[self.n]
self.items[self.n] = None # usuwam referencje
return data
else:
raise Exception("Stack is empty !!!")
import unittest
class TestStack(unittest.TestCase):
def setUp(self):
self.st1 = Stack(1)
self.st2 = Stack(0)
self.st3 = Stack(2)
def test_pop(self):
self.assertRaises(Exception, self.st1.pop)
def test_remove(self):
self.assertRaises(Exception, self.st2.push)
def test_isFull(self):
self.st3.push(1)
self.st3.push(2)
self.assertTrue(self.st3.is_full())
def test_isEmpty(self):
self.assertTrue(self.st3.is_empty())
if __name__ == '__main__':
unittest.main() # uruchamia wszystkie testy
| mit | 2,741,167,808,983,571,000 | 22.661017 | 68 | 0.536533 | false |
kittiu/sale-workflow | sale_sourced_by_line/tests/test_sale_is_delivered.py | 1 | 3494 | # -*- coding: utf-8 -*-
# Copyright 2014 Camptocamp SA - Yannick Vaucher
# Copyright 2017 Eficent Business and IT Consulting Services S.L.
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo.tests.common import TransactionCase
class TestSaleIsDelivered(TransactionCase):
"""Check the _get_shipped method of Sale Order. """
def test_sale_no_proc(self):
"""False when no procurement on both sale.order.line"""
self.assertFalse(self.sale.shipped)
def test_sale_no_proc_one_service(self):
"""False when, no procurement on both line but one is service"""
self.sale_line1.product_id = self.service_product
self.assertFalse(self.sale.shipped)
def test_sale_no_proc_all_services(self):
"""True when, no procurement on both lines but both are services"""
self.sale_line1.product_id = self.service_product
self.sale_line2.product_id = self.service_product
self.assertTrue(self.sale.shipped)
def test_sale_not_all_proc(self):
"""False, when one line with and one without procurement done"""
self.sale_line1.procurement_group_id = self.proc_group1
self.proc1.state = 'done'
self.assertFalse(self.sale.shipped)
def test_sale_proc_and_service(self):
"""True when, one line with procurement done and one line for service
"""
self.sale_line1.procurement_group_id = self.proc_group1
self.proc1.state = 'done'
self.sale_line2.product_id = self.service_product
self.assertTrue(self.sale.shipped)
def test_sale_partially_delivered(self):
"""False when, all lines with procurement, one is partially delivered
"""
self.sale_line1.procurement_group_id = self.proc_group1
self.sale_line2.procurement_group_id = self.proc_group2
self.proc1.state = 'done'
self.proc2.state = 'running'
self.assertFalse(self.sale.shipped)
def test_sale_is_delivered(self):
"""True, when both line have a done procurement"""
self.sale_line1.procurement_group_id = self.proc_group1
self.sale_line2.procurement_group_id = self.proc_group2
self.proc1.state = 'done'
self.proc2.state = 'done'
self.assertTrue(self.sale.shipped)
def setUp(self):
"""Setup a Sale Order with 2 lines.
And prepare procurements
I use Model.new to get a model instance that is not saved to the
database, but has working methods.
"""
super(TestSaleIsDelivered, self).setUp()
so = self.env['sale.order']
sol = self.env['sale.order.line']
product = self.env['product.product']
procurement = self.env['procurement.order']
procurement_group = self.env['procurement.group']
self.sale = so.new()
self.sale_line1 = sol.new()
self.sale_line2 = sol.new()
self.sale_line1.order_id = self.sale
self.sale_line2.order_id = self.sale
self.sale.order_line = sol.browse([self.sale_line1.id,
self.sale_line2.id])
self.proc1 = procurement.new()
self.proc_group1 = procurement_group.new()
self.proc_group1.procurement_ids = self.proc1
self.proc2 = procurement.new()
self.proc_group2 = procurement_group.new()
self.proc_group2.procurement_ids = self.proc2
self.service_product = product.new({'type': 'service'})
| agpl-3.0 | -9,047,940,310,241,662,000 | 36.978261 | 77 | 0.641671 | false |
scorpionis/docklet | src/env.py | 1 | 2460 | import os
def getenv(key):
if key == "CLUSTER_NAME":
return os.environ.get("CLUSTER_NAME", "docklet-vc")
elif key == "FS_PREFIX":
return os.environ.get("FS_PREFIX", "/opt/docklet")
elif key == "CLUSTER_SIZE":
return int(os.environ.get("CLUSTER_SIZE", 1))
elif key == "CLUSTER_NET":
return os.environ.get("CLUSTER_NET", "172.16.0.1/16")
elif key == "CONTAINER_CPU":
return int(os.environ.get("CONTAINER_CPU", 100000))
elif key == "CONTAINER_DISK":
return int(os.environ.get("CONTAINER_DISK", 1000))
elif key == "CONTAINER_MEMORY":
return int(os.environ.get("CONTAINER_MEMORY", 1000))
elif key == "DISKPOOL_SIZE":
return int(os.environ.get("DISKPOOL_SIZE", 5000))
elif key == "ETCD":
return os.environ.get("ETCD", "localhost:2379")
elif key == "NETWORK_DEVICE":
return os.environ.get("NETWORK_DEVICE", "eth0")
elif key == "MASTER_IP":
return os.environ.get("MASTER_IP", "0.0.0.0")
elif key == "MASTER_PORT":
return int(os.environ.get("MASTER_PORT", 9000))
elif key == "WORKER_PORT":
return int(os.environ.get("WORKER_PORT", 9001))
elif key == "PROXY_PORT":
return int(os.environ.get("PROXY_PORT", 8000))
elif key == "PROXY_API_PORT":
return int(os.environ.get("PROXY_API_PORT", 8001))
elif key == "WEB_PORT":
return int(os.environ.get("WEB_PORT", 8888))
elif key == "PORTAL_URL":
return os.environ.get("PORTAL_URL",
"http://"+getenv("MASTER_IP") + ":" + str(getenv("PROXY_PORT")))
elif key == "LOG_LEVEL":
return os.environ.get("LOG_LEVEL", "DEBUG")
elif key == "LOG_LIFE":
return int(os.environ.get("LOG_LIFE", 10))
elif key == "WEB_LOG_LEVEL":
return os.environ.get("WEB_LOG_LEVEL", "DEBUG")
elif key == "STORAGE":
return os.environ.get("STORAGE", "file")
elif key =="EXTERNAL_LOGIN":
return os.environ.get("EXTERNAL_LOGIN", "False")
elif key =="EMAIL_FROM_ADDRESS":
return os.environ.get("EMAIL_FROM_ADDRESS", "")
elif key =="ADMIN_EMAIL_ADDRESS":
return os.environ.get("ADMIN_EMAIL_ADDRESS", "")
elif key =="DATA_QUOTA":
return os.environ.get("DATA_QUOTA", "False")
elif key =="DATA_QUOTA_CMD":
return os.environ.get("DATA_QUOTA_CMD", "gluster volume quota docklet-volume limit-usage %s %s")
else:
return os.environ[key]
| bsd-3-clause | 8,593,028,398,002,000,000 | 41.413793 | 104 | 0.595122 | false |
martinzlocha/mad | mad/settings.py | 1 | 3790 | """
Django settings for mad project.
Generated by 'django-admin startproject' using Django 1.10.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import config
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config.SECRET_KEY
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config.DEBUG
ALLOWED_HOSTS = config.ALLOWED_HOSTS
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bootstrap3',
'portal',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mad.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mad.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
if 'RDS_DB_NAME' in os.environ:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': os.environ['RDS_DB_NAME'],
'USER': os.environ['RDS_USERNAME'],
'PASSWORD': os.environ['RDS_PASSWORD'],
'HOST': os.environ['RDS_HOSTNAME'],
'PORT': os.environ['RDS_PORT'],
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
if config.DEBUG:
# Allows static files to be outside of an app
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
'static/',
]
else:
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
| mit | 7,744,071,093,783,556,000 | 25.319444 | 91 | 0.656992 | false |
tombstone/models | research/object_detection/predictors/heads/keras_class_head_tf2_test.py | 1 | 7624 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.predictors.heads.class_head."""
import unittest
import tensorflow.compat.v1 as tf
from google.protobuf import text_format
from object_detection.builders import hyperparams_builder
from object_detection.predictors.heads import keras_class_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
from object_detection.utils import tf_version
@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class ConvolutionalKerasClassPredictorTest(test_case.TestCase):
def _build_conv_hyperparams(self):
conv_hyperparams = hyperparams_pb2.Hyperparams()
conv_hyperparams_text_proto = """
activation: NONE
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
"""
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
def test_prediction_size_depthwise_false(self):
conv_hyperparams = self._build_conv_hyperparams()
class_prediction_head = keras_class_head.ConvolutionalClassHead(
is_training=True,
num_class_slots=20,
use_dropout=True,
dropout_keep_prob=0.5,
kernel_size=3,
conv_hyperparams=conv_hyperparams,
freeze_batchnorm=False,
num_predictions_per_location=1,
use_depthwise=False)
def graph_fn():
image_feature = tf.random_uniform(
[64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
class_predictions = class_prediction_head(image_feature,)
return class_predictions
class_predictions = self.execute(graph_fn, [])
self.assertAllEqual([64, 323, 20], class_predictions.shape)
def test_prediction_size_depthwise_true(self):
conv_hyperparams = self._build_conv_hyperparams()
class_prediction_head = keras_class_head.ConvolutionalClassHead(
is_training=True,
num_class_slots=20,
use_dropout=True,
dropout_keep_prob=0.5,
kernel_size=3,
conv_hyperparams=conv_hyperparams,
freeze_batchnorm=False,
num_predictions_per_location=1,
use_depthwise=True)
def graph_fn():
image_feature = tf.random_uniform(
[64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
class_predictions = class_prediction_head(image_feature,)
return class_predictions
class_predictions = self.execute(graph_fn, [])
self.assertAllEqual([64, 323, 20], class_predictions.shape)
@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class MaskRCNNClassHeadTest(test_case.TestCase):
def _build_fc_hyperparams(self,
op_type=hyperparams_pb2.Hyperparams.FC):
hyperparams = hyperparams_pb2.Hyperparams()
hyperparams_text_proto = """
activation: NONE
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
"""
text_format.Merge(hyperparams_text_proto, hyperparams)
hyperparams.op = op_type
return hyperparams_builder.KerasLayerHyperparams(hyperparams)
def test_prediction_size(self):
class_prediction_head = keras_class_head.MaskRCNNClassHead(
is_training=False,
num_class_slots=20,
fc_hyperparams=self._build_fc_hyperparams(),
freeze_batchnorm=False,
use_dropout=True,
dropout_keep_prob=0.5)
def graph_fn():
roi_pooled_features = tf.random_uniform(
[64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
prediction = class_prediction_head(roi_pooled_features)
return prediction
prediction = self.execute(graph_fn, [])
self.assertAllEqual([64, 1, 20], prediction.shape)
@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class WeightSharedConvolutionalKerasClassPredictorTest(test_case.TestCase):
def _build_conv_hyperparams(self):
conv_hyperparams = hyperparams_pb2.Hyperparams()
conv_hyperparams_text_proto = """
activation: NONE
regularizer {
l2_regularizer {
}
}
initializer {
truncated_normal_initializer {
}
}
"""
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
def test_prediction_size_depthwise_false(self):
conv_hyperparams = self._build_conv_hyperparams()
class_prediction_head = keras_class_head.WeightSharedConvolutionalClassHead(
num_class_slots=20,
conv_hyperparams=conv_hyperparams,
num_predictions_per_location=1,
use_depthwise=False)
def graph_fn():
image_feature = tf.random_uniform(
[64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
class_predictions = class_prediction_head(image_feature)
return class_predictions
class_predictions = self.execute(graph_fn, [])
self.assertAllEqual([64, 323, 20], class_predictions.shape)
def test_prediction_size_depthwise_true(self):
conv_hyperparams = self._build_conv_hyperparams()
class_prediction_head = keras_class_head.WeightSharedConvolutionalClassHead(
num_class_slots=20,
conv_hyperparams=conv_hyperparams,
num_predictions_per_location=1,
use_depthwise=True)
def graph_fn():
image_feature = tf.random_uniform(
[64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
class_predictions = class_prediction_head(image_feature)
return class_predictions
class_predictions = self.execute(graph_fn, [])
self.assertAllEqual([64, 323, 20], class_predictions.shape)
def test_variable_count_depth_wise_true(self):
conv_hyperparams = self._build_conv_hyperparams()
class_prediction_head = (
keras_class_head.WeightSharedConvolutionalClassHead(
num_class_slots=20,
conv_hyperparams=conv_hyperparams,
num_predictions_per_location=1,
use_depthwise=True))
image_feature = tf.random_uniform(
[64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
class_prediction_head(image_feature)
self.assertEqual(len(class_prediction_head.variables), 3)
def test_variable_count_depth_wise_False(self):
conv_hyperparams = self._build_conv_hyperparams()
class_prediction_head = (
keras_class_head.WeightSharedConvolutionalClassHead(
num_class_slots=20,
conv_hyperparams=conv_hyperparams,
num_predictions_per_location=1,
use_depthwise=False))
image_feature = tf.random_uniform(
[64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
class_prediction_head(image_feature)
self.assertEqual(len(class_prediction_head.variables), 2)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | 5,731,678,658,689,163,000 | 36.55665 | 80 | 0.67458 | false |
JohnGriffiths/dipy | dipy/reconst/tests/test_dki.py | 1 | 20608 | """ Testing DKI """
from __future__ import division, print_function, absolute_import
import numpy as np
import random
import dipy.reconst.dki as dki
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
assert_almost_equal)
from nose.tools import assert_raises
from dipy.sims.voxel import multi_tensor_dki
from dipy.io.gradients import read_bvals_bvecs
from dipy.core.gradients import gradient_table
from dipy.data import get_data
from dipy.reconst.dti import (from_lower_triangular, decompose_tensor)
from dipy.reconst.dki import (mean_kurtosis, carlson_rf, carlson_rd,
axial_kurtosis, radial_kurtosis, _positive_evals)
from dipy.core.sphere import Sphere
from dipy.core.geometry import perpendicular_directions
fimg, fbvals, fbvecs = get_data('small_64D')
bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs)
gtab = gradient_table(bvals, bvecs)
# 2 shells for techniques that requires multishell data
bvals_2s = np.concatenate((bvals, bvals * 2), axis=0)
bvecs_2s = np.concatenate((bvecs, bvecs), axis=0)
gtab_2s = gradient_table(bvals_2s, bvecs_2s)
# Simulation 1. signals of two crossing fibers are simulated
mevals_cross = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087],
[0.00099, 0, 0], [0.00226, 0.00087, 0.00087]])
angles_cross = [(80, 10), (80, 10), (20, 30), (20, 30)]
fie = 0.49
frac_cross = [fie*50, (1-fie) * 50, fie*50, (1-fie) * 50]
# Noise free simulates
signal_cross, dt_cross, kt_cross = multi_tensor_dki(gtab_2s, mevals_cross,
S0=100,
angles=angles_cross,
fractions=frac_cross,
snr=None)
evals_cross, evecs_cross = decompose_tensor(from_lower_triangular(dt_cross))
crossing_ref = np.concatenate((evals_cross, evecs_cross[0], evecs_cross[1],
evecs_cross[2], kt_cross), axis=0)
# Simulation 2. Spherical kurtosis tensor.- for white matter, this can be a
# biological implaussible scenario, however this simulation is usefull for
# testing the estimation of directional apparent kurtosis and the mean
# kurtosis, since its directional and mean kurtosis ground truth are a constant
# which can be easly mathematicaly calculated.
Di = 0.00099
De = 0.00226
mevals_sph = np.array([[Di, Di, Di], [De, De, De]])
frac_sph = [50, 50]
signal_sph, dt_sph, kt_sph = multi_tensor_dki(gtab_2s, mevals_sph, S0=100,
fractions=frac_sph,
snr=None)
evals_sph, evecs_sph = decompose_tensor(from_lower_triangular(dt_sph))
params_sph = np.concatenate((evals_sph, evecs_sph[0], evecs_sph[1],
evecs_sph[2], kt_sph), axis=0)
# Compute ground truth - since KT is spherical, appparent kurtosic coeficient
# for all gradient directions and mean kurtosis have to be equal to Kref_sph.
f = 0.5
Dg = f*Di + (1-f)*De
Kref_sphere = 3 * f * (1-f) * ((Di-De) / Dg) ** 2
# Simulation 3. Multi-voxel simulations - dataset of four voxels is simulated.
# Since the objective of this simulation is to see if procedures are able to
# work with multi-dimentional data all voxels contains the same crossing signal
# produced in simulation 1.
DWI = np.zeros((2, 2, 1, len(gtab_2s.bvals)))
DWI[0, 0, 0] = DWI[0, 1, 0] = DWI[1, 0, 0] = DWI[1, 1, 0] = signal_cross
multi_params = np.zeros((2, 2, 1, 27))
multi_params[0, 0, 0] = multi_params[0, 1, 0] = crossing_ref
multi_params[1, 0, 0] = multi_params[1, 1, 0] = crossing_ref
def test_positive_evals():
# Tested evals
L1 = np.array([[1e-3, 1e-3, 2e-3], [0, 1e-3, 0]])
L2 = np.array([[3e-3, 0, 2e-3], [1e-3, 1e-3, 0]])
L3 = np.array([[4e-3, 1e-4, 0], [0, 1e-3, 0]])
# only the first voxels have all eigenvalues larger than zero, thus:
expected_ind = np.array([[True, False, False], [False, True, False]],
dtype=bool)
# test function _positive_evals
ind = _positive_evals(L1, L2, L3)
assert_array_equal(ind, expected_ind)
def test_split_dki_param():
dkiM = dki.DiffusionKurtosisModel(gtab_2s, fit_method="OLS")
dkiF = dkiM.fit(DWI)
evals, evecs, kt = dki.split_dki_param(dkiF.model_params)
assert_array_almost_equal(evals, dkiF.evals)
assert_array_almost_equal(evecs, dkiF.evecs)
assert_array_almost_equal(kt, dkiF.kt)
def test_dki_fits():
""" DKI fits are tested on noise free crossing fiber simulates """
# OLS fitting
dkiM = dki.DiffusionKurtosisModel(gtab_2s, fit_method="OLS")
dkiF = dkiM.fit(signal_cross)
assert_array_almost_equal(dkiF.model_params, crossing_ref)
# WLS fitting
dki_wlsM = dki.DiffusionKurtosisModel(gtab_2s, fit_method="WLS")
dki_wlsF = dki_wlsM.fit(signal_cross)
assert_array_almost_equal(dki_wlsF.model_params, crossing_ref)
# testing multi-voxels
dkiF_multi = dkiM.fit(DWI)
assert_array_almost_equal(dkiF_multi.model_params, multi_params)
dkiF_multi = dki_wlsM.fit(DWI)
assert_array_almost_equal(dkiF_multi.model_params, multi_params)
def test_apparent_kurtosis_coef():
""" Apparent kurtosis coeficients are tested for a spherical kurtosis
tensor """
sph = Sphere(xyz=gtab.bvecs[gtab.bvals > 0])
AKC = dki.apparent_kurtosis_coef(params_sph, sph)
# check all direction
for d in range(len(gtab.bvecs[gtab.bvals > 0])):
assert_array_almost_equal(AKC[d], Kref_sphere)
def test_dki_predict():
dkiM = dki.DiffusionKurtosisModel(gtab_2s)
pred = dkiM.predict(crossing_ref, S0=100)
assert_array_almost_equal(pred, signal_cross)
# just to check that it works with more than one voxel:
pred_multi = dkiM.predict(multi_params, S0=100)
assert_array_almost_equal(pred_multi, DWI)
# check the function predict of the DiffusionKurtosisFit object
dkiF = dkiM.fit(DWI)
pred_multi = dkiF.predict(gtab_2s, S0=100)
assert_array_almost_equal(pred_multi, DWI)
dkiF = dkiM.fit(pred_multi)
pred_from_fit = dkiF.predict(dkiM.gtab, S0=100)
assert_array_almost_equal(pred_from_fit, DWI)
def test_carlson_rf():
# Define inputs that we know the outputs from:
# Carlson, B.C., 1994. Numerical computation of real or complex
# elliptic integrals. arXiv:math/9409227 [math.CA]
# Real values (test in 2D format)
x = np.array([[1.0, 0.5], [2.0, 2.0]])
y = np.array([[2.0, 1.0], [3.0, 3.0]])
z = np.array([[0.0, 0.0], [4.0, 4.0]])
# Defene reference outputs
RF_ref = np.array([[1.3110287771461, 1.8540746773014],
[0.58408284167715, 0.58408284167715]])
# Compute integrals
RF = carlson_rf(x, y, z)
# Compare
assert_array_almost_equal(RF, RF_ref)
# Complex values
x = np.array([1j, 1j - 1, 1j, 1j - 1])
y = np.array([-1j, 1j, -1j, 1j])
z = np.array([0.0, 0.0, 2, 1 - 1j])
# Defene reference outputs
RF_ref = np.array([1.8540746773014, 0.79612586584234 - 1.2138566698365j,
1.0441445654064, 0.93912050218619 - 0.53296252018635j])
# Compute integrals
RF = carlson_rf(x, y, z, errtol=3e-5)
# Compare
assert_array_almost_equal(RF, RF_ref)
def test_carlson_rd():
# Define inputs that we know the outputs from:
# Carlson, B.C., 1994. Numerical computation of real or complex
# elliptic integrals. arXiv:math/9409227 [math.CA]
# Real values
x = np.array([0.0, 2.0])
y = np.array([2.0, 3.0])
z = np.array([1.0, 4.0])
# Defene reference outputs
RD_ref = np.array([1.7972103521034, 0.16510527294261])
# Compute integrals
RD = carlson_rd(x, y, z, errtol=1e-5)
# Compare
assert_array_almost_equal(RD, RD_ref)
# Complex values (testing in 2D format)
x = np.array([[1j, 0.0], [0.0, -2 - 1j]])
y = np.array([[-1j, 1j], [1j-1, -1j]])
z = np.array([[2.0, -1j], [1j, -1 + 1j]])
# Defene reference outputs
RD_ref = np.array([[0.65933854154220, 1.2708196271910 + 2.7811120159521j],
[-1.8577235439239 - 0.96193450888839j,
1.8249027393704 - 1.2218475784827j]])
# Compute integrals
RD = carlson_rd(x, y, z, errtol=1e-5)
# Compare
assert_array_almost_equal(RD, RD_ref)
def test_Wrotate_single_fiber():
# Rotate the kurtosis tensor of single fiber simulate to the diffusion
# tensor diagonal and check that is equal to the kurtosis tensor of the
# same single fiber simulated directly to the x-axis
# Define single fiber simulate
mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087]])
fie = 0.49
frac = [fie*100, (1 - fie)*100]
# simulate single fiber not aligned to the x-axis
theta = random.uniform(0, 180)
phi = random.uniform(0, 320)
angles = [(theta, phi), (theta, phi)]
signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac, snr=None)
evals, evecs = decompose_tensor(from_lower_triangular(dt))
kt_rotated = dki.Wrotate(kt, evecs)
# Now coordinate system has the DT diagonal aligned to the x-axis
# Reference simulation in which DT diagonal is directly aligned to the
# x-axis
angles = (90, 0), (90, 0)
signal, dt_ref, kt_ref = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac, snr=None)
assert_array_almost_equal(kt_rotated, kt_ref)
def test_Wrotate_crossing_fibers():
# Test 2 - simulate crossing fibers intersecting at 70 degrees.
# In this case, diffusion tensor principal eigenvector will be aligned in
# the middle of the crossing fibers. Thus, after rotating the kurtosis
# tensor, this will be equal to a kurtosis tensor simulate of crossing
# fibers both deviating 35 degrees from the x-axis. Moreover, we know that
# crossing fibers will be aligned to the x-y plane, because the smaller
# diffusion eigenvalue, perpendicular to both crossings fibers, will be
# aligned to the z-axis.
# Simulate the crossing fiber
angles = [(90, 30), (90, 30), (20, 30), (20, 30)]
fie = 0.49
frac = [fie*50, (1-fie) * 50, fie*50, (1-fie) * 50]
mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087],
[0.00099, 0, 0], [0.00226, 0.00087, 0.00087]])
signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac, snr=None)
evals, evecs = decompose_tensor(from_lower_triangular(dt))
kt_rotated = dki.Wrotate(kt, evecs)
# Now coordinate system has diffusion tensor diagonal aligned to the x-axis
# Simulate the reference kurtosis tensor
angles = [(90, 35), (90, 35), (90, -35), (90, -35)]
signal, dt, kt_ref = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac, snr=None)
# Compare rotated with the reference
assert_array_almost_equal(kt_rotated, kt_ref)
def test_Wcons():
# Construct the 4D kurtosis tensor manualy from the crossing fiber kt
# simulate
Wfit = np.zeros([3, 3, 3, 3])
# Wxxxx
Wfit[0, 0, 0, 0] = kt_cross[0]
# Wyyyy
Wfit[1, 1, 1, 1] = kt_cross[1]
# Wzzzz
Wfit[2, 2, 2, 2] = kt_cross[2]
# Wxxxy
Wfit[0, 0, 0, 1] = Wfit[0, 0, 1, 0] = Wfit[0, 1, 0, 0] = kt_cross[3]
Wfit[1, 0, 0, 0] = kt_cross[3]
# Wxxxz
Wfit[0, 0, 0, 2] = Wfit[0, 0, 2, 0] = Wfit[0, 2, 0, 0] = kt_cross[4]
Wfit[2, 0, 0, 0] = kt_cross[4]
# Wxyyy
Wfit[0, 1, 1, 1] = Wfit[1, 0, 1, 1] = Wfit[1, 1, 1, 0] = kt_cross[5]
Wfit[1, 1, 0, 1] = kt_cross[5]
# Wxxxz
Wfit[1, 1, 1, 2] = Wfit[1, 2, 1, 1] = Wfit[2, 1, 1, 1] = kt_cross[6]
Wfit[1, 1, 2, 1] = kt_cross[6]
# Wxzzz
Wfit[0, 2, 2, 2] = Wfit[2, 2, 2, 0] = Wfit[2, 0, 2, 2] = kt_cross[7]
Wfit[2, 2, 0, 2] = kt_cross[7]
# Wyzzz
Wfit[1, 2, 2, 2] = Wfit[2, 2, 2, 1] = Wfit[2, 1, 2, 2] = kt_cross[8]
Wfit[2, 2, 1, 2] = kt_cross[8]
# Wxxyy
Wfit[0, 0, 1, 1] = Wfit[0, 1, 0, 1] = Wfit[0, 1, 1, 0] = kt_cross[9]
Wfit[1, 0, 0, 1] = Wfit[1, 0, 1, 0] = Wfit[1, 1, 0, 0] = kt_cross[9]
# Wxxzz
Wfit[0, 0, 2, 2] = Wfit[0, 2, 0, 2] = Wfit[0, 2, 2, 0] = kt_cross[10]
Wfit[2, 0, 0, 2] = Wfit[2, 0, 2, 0] = Wfit[2, 2, 0, 0] = kt_cross[10]
# Wyyzz
Wfit[1, 1, 2, 2] = Wfit[1, 2, 1, 2] = Wfit[1, 2, 2, 1] = kt_cross[11]
Wfit[2, 1, 1, 2] = Wfit[2, 2, 1, 1] = Wfit[2, 1, 2, 1] = kt_cross[11]
# Wxxyz
Wfit[0, 0, 1, 2] = Wfit[0, 0, 2, 1] = Wfit[0, 1, 0, 2] = kt_cross[12]
Wfit[0, 1, 2, 0] = Wfit[0, 2, 0, 1] = Wfit[0, 2, 1, 0] = kt_cross[12]
Wfit[1, 0, 0, 2] = Wfit[1, 0, 2, 0] = Wfit[1, 2, 0, 0] = kt_cross[12]
Wfit[2, 0, 0, 1] = Wfit[2, 0, 1, 0] = Wfit[2, 1, 0, 0] = kt_cross[12]
# Wxyyz
Wfit[0, 1, 1, 2] = Wfit[0, 1, 2, 1] = Wfit[0, 2, 1, 1] = kt_cross[13]
Wfit[1, 0, 1, 2] = Wfit[1, 1, 0, 2] = Wfit[1, 1, 2, 0] = kt_cross[13]
Wfit[1, 2, 0, 1] = Wfit[1, 2, 1, 0] = Wfit[2, 0, 1, 1] = kt_cross[13]
Wfit[2, 1, 0, 1] = Wfit[2, 1, 1, 0] = Wfit[1, 0, 2, 1] = kt_cross[13]
# Wxyzz
Wfit[0, 1, 2, 2] = Wfit[0, 2, 1, 2] = Wfit[0, 2, 2, 1] = kt_cross[14]
Wfit[1, 0, 2, 2] = Wfit[1, 2, 0, 2] = Wfit[1, 2, 2, 0] = kt_cross[14]
Wfit[2, 0, 1, 2] = Wfit[2, 0, 2, 1] = Wfit[2, 1, 0, 2] = kt_cross[14]
Wfit[2, 1, 2, 0] = Wfit[2, 2, 0, 1] = Wfit[2, 2, 1, 0] = kt_cross[14]
# Function to be tested
W4D = dki.Wcons(kt_cross)
Wfit = Wfit.reshape(-1)
W4D = W4D.reshape(-1)
assert_array_almost_equal(W4D, Wfit)
def test_spherical_dki_statistics():
# tests if MK, AK and RK are equal to expected values of a spherical
# kurtosis tensor
# Define multi voxel spherical kurtosis simulations
MParam = np.zeros((2, 2, 2, 27))
MParam[0, 0, 0] = MParam[0, 0, 1] = MParam[0, 1, 0] = params_sph
MParam[0, 1, 1] = MParam[1, 1, 0] = params_sph
# MParam[1, 1, 1], MParam[1, 0, 0], and MParam[1, 0, 1] remains zero
MRef = np.zeros((2, 2, 2))
MRef[0, 0, 0] = MRef[0, 0, 1] = MRef[0, 1, 0] = Kref_sphere
MRef[0, 1, 1] = MRef[1, 1, 0] = Kref_sphere
MRef[1, 1, 1] = MRef[1, 0, 0] = MRef[1, 0, 1] = 0
# Mean kurtosis analytical solution
MK_multi = mean_kurtosis(MParam)
assert_array_almost_equal(MK_multi, MRef)
# radial kurtosis analytical solution
RK_multi = radial_kurtosis(MParam)
assert_array_almost_equal(RK_multi, MRef)
# axial kurtosis analytical solution
AK_multi = axial_kurtosis(MParam)
assert_array_almost_equal(AK_multi, MRef)
def test_compare_MK_method():
# tests if analytical solution of MK is equal to the average of directional
# kurtosis sampled from a sphere
# DKI Model fitting
dkiM = dki.DiffusionKurtosisModel(gtab_2s)
dkiF = dkiM.fit(signal_cross)
# MK analytical solution
MK_as = dkiF.mk()
# MK numerical method
sph = Sphere(xyz=gtab.bvecs[gtab.bvals > 0])
MK_nm = np.mean(dki.apparent_kurtosis_coef(dkiF.model_params, sph),
axis=-1)
assert_array_almost_equal(MK_as, MK_nm, decimal=1)
def test_single_voxel_DKI_stats():
# tests if AK and RK are equal to expected values for a single fiber
# simulate randomly oriented
ADi = 0.00099
ADe = 0.00226
RDi = 0
RDe = 0.00087
# Reference values
AD = fie*ADi + (1-fie)*ADe
AK = 3 * fie * (1-fie) * ((ADi-ADe) / AD) ** 2
RD = fie*RDi + (1-fie)*RDe
RK = 3 * fie * (1-fie) * ((RDi-RDe) / RD) ** 2
ref_vals = np.array([AD, AK, RD, RK])
# simulate fiber randomly oriented
theta = random.uniform(0, 180)
phi = random.uniform(0, 320)
angles = [(theta, phi), (theta, phi)]
mevals = np.array([[ADi, RDi, RDi], [ADe, RDe, RDe]])
frac = [fie*100, (1-fie)*100]
signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, S0=100, angles=angles,
fractions=frac, snr=None)
evals, evecs = decompose_tensor(from_lower_triangular(dt))
dki_par = np.concatenate((evals, evecs[0], evecs[1], evecs[2], kt), axis=0)
# Estimates using dki functions
ADe1 = dki.axial_diffusivity(evals)
RDe1 = dki.radial_diffusivity(evals)
AKe1 = axial_kurtosis(dki_par)
RKe1 = radial_kurtosis(dki_par)
e1_vals = np.array([ADe1, AKe1, RDe1, RKe1])
assert_array_almost_equal(e1_vals, ref_vals)
# Estimates using the kurtosis class object
dkiM = dki.DiffusionKurtosisModel(gtab_2s)
dkiF = dkiM.fit(signal)
e2_vals = np.array([dkiF.ad, dkiF.ak(), dkiF.rd, dkiF.rk()])
assert_array_almost_equal(e2_vals, ref_vals)
# test MK (note this test correspond to the MK singularity L2==L3)
MK_as = dkiF.mk()
sph = Sphere(xyz=gtab.bvecs[gtab.bvals > 0])
MK_nm = np.mean(dkiF.akc(sph))
assert_array_almost_equal(MK_as, MK_nm, decimal=1)
def test_compare_RK_methods():
# tests if analytical solution of RK is equal to the perpendicular kurtosis
# relative to the first diffusion axis
# DKI Model fitting
dkiM = dki.DiffusionKurtosisModel(gtab_2s)
dkiF = dkiM.fit(signal_cross)
# MK analytical solution
RK_as = dkiF.rk()
# MK numerical method
evecs = dkiF.evecs
p_dir = perpendicular_directions(evecs[:, 0], num=30, half=True)
ver = Sphere(xyz=p_dir)
RK_nm = np.mean(dki.apparent_kurtosis_coef(dkiF.model_params, ver),
axis=-1)
assert_array_almost_equal(RK_as, RK_nm)
def test_MK_singularities():
# To test MK in case that analytical solution was a singularity not covered
# by other tests
dkiM = dki.DiffusionKurtosisModel(gtab_2s)
# test singularity L1 == L2 - this is the case of a prolate diffusion
# tensor for crossing fibers at 90 degrees
angles_all = np.array([[(90, 0), (90, 0), (0, 0), (0, 0)],
[(89.9, 0), (89.9, 0), (0, 0), (0, 0)]])
for angles_90 in angles_all:
s_90, dt_90, kt_90 = multi_tensor_dki(gtab_2s, mevals_cross, S0=100,
angles=angles_90,
fractions=frac_cross, snr=None)
dkiF = dkiM.fit(s_90)
MK = dkiF.mk()
sph = Sphere(xyz=gtab.bvecs[gtab.bvals > 0])
MK_nm = np.mean(dkiF.akc(sph))
assert_almost_equal(MK, MK_nm, decimal=2)
# test singularity L1 == L3 and L1 != L2
# since L1 is defined as the larger eigenvalue and L3 the smallest
# eigenvalue, this singularity teoretically will never be called,
# because for L1 == L3, L2 have also to be = L1 and L2.
# Nevertheless, I decided to include this test since this singularity
# is revelant for cases that eigenvalues are not ordered
# artificially revert the eigenvalue and eigenvector order
dki_params = dkiF.model_params.copy()
dki_params[1] = dkiF.model_params[2]
dki_params[2] = dkiF.model_params[1]
dki_params[4] = dkiF.model_params[5]
dki_params[5] = dkiF.model_params[4]
dki_params[7] = dkiF.model_params[8]
dki_params[8] = dkiF.model_params[7]
dki_params[10] = dkiF.model_params[11]
dki_params[11] = dkiF.model_params[10]
MK = dki.mean_kurtosis(dki_params)
MK_nm = np.mean(dki.apparent_kurtosis_coef(dki_params, sph))
assert_almost_equal(MK, MK_nm, decimal=2)
def test_dki_errors():
# first error of DKI module is if a unknown fit method is given
assert_raises(ValueError, dki.DiffusionKurtosisModel, gtab_2s,
fit_method="JOANA")
# second error of DKI module is if a min_signal is defined as negative
assert_raises(ValueError, dki.DiffusionKurtosisModel, gtab_2s,
min_signal=-1)
# try case with correct min_signal
dkiM = dki.DiffusionKurtosisModel(gtab_2s, min_signal=1)
dkiF = dkiM.fit(DWI)
assert_array_almost_equal(dkiF.model_params, multi_params)
# third error is if a given mask do not have same shape as data
dkiM = dki.DiffusionKurtosisModel(gtab_2s)
# test a correct mask
dkiF = dkiM.fit(DWI)
mask_correct = dkiF.fa > 0
mask_correct[1, 1] = False
multi_params[1, 1] = np.zeros(27)
mask_not_correct = np.array([[True, True, False], [True, False, False]])
dkiF = dkiM.fit(DWI, mask=mask_correct)
assert_array_almost_equal(dkiF.model_params, multi_params)
# test a incorrect mask
assert_raises(ValueError, dkiM.fit, DWI, mask=mask_not_correct)
| bsd-3-clause | -1,048,268,427,265,407,500 | 35.603908 | 79 | 0.60724 | false |
HybridF5/hybrid-jacket | nova_jacket/virt/jacket/vcloud/vcloud_client.py | 1 | 17538 | import time
import eventlet
import subprocess
from oslo.config import cfg
from oslo.utils import units
from nova import exception
from nova.compute import power_state
from nova.openstack.common import log as logging
from nova.virt.jacket.vcloud import constants
from nova.virt.jacket.vcloud.vcloud import exceptions
from nova.virt.jacket.vcloud.vcloud import RetryDecorator
from nova.virt.jacket.vcloud.vcloud import VCLOUD_STATUS
from nova.virt.jacket.vcloud.vcloud import VCloudAPISession
from nova.virt.jacket.statuscache.vcloudsynchronizer import HCVCS
from nova.virt.jacket.statuscache.jacketcache import JacketStatusCache
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
status_dict_vapp_to_instance = {
VCLOUD_STATUS.FAILED_CREATION: power_state.CRASHED,
VCLOUD_STATUS.UNRESOLVED: power_state.NOSTATE,
VCLOUD_STATUS.RESOLVED: power_state.NOSTATE,
VCLOUD_STATUS.DEPLOYED: power_state.NOSTATE,
VCLOUD_STATUS.SUSPENDED: power_state.SUSPENDED,
VCLOUD_STATUS.POWERED_ON: power_state.RUNNING,
VCLOUD_STATUS.WAITING_FOR_INPUT: power_state.NOSTATE,
VCLOUD_STATUS.UNKNOWN: power_state.NOSTATE,
VCLOUD_STATUS.UNRECOGNIZED: power_state.NOSTATE,
VCLOUD_STATUS.POWERED_OFF: power_state.SHUTDOWN,
VCLOUD_STATUS.INCONSISTENT_STATE: power_state.NOSTATE,
VCLOUD_STATUS.MIXED: power_state.NOSTATE,
VCLOUD_STATUS.DESCRIPTOR_PENDING: power_state.NOSTATE,
VCLOUD_STATUS.COPYING_CONTENTS: power_state.NOSTATE,
VCLOUD_STATUS.DISK_CONTENTS_PENDING: power_state.NOSTATE,
VCLOUD_STATUS.QUARANTINED: power_state.NOSTATE,
VCLOUD_STATUS.QUARANTINE_EXPIRED: power_state.NOSTATE,
VCLOUD_STATUS.REJECTED: power_state.NOSTATE,
VCLOUD_STATUS.TRANSFER_TIMEOUT: power_state.NOSTATE,
VCLOUD_STATUS.VAPP_UNDEPLOYED: power_state.NOSTATE,
VCLOUD_STATUS.VAPP_PARTIALLY_DEPLOYED: power_state.NOSTATE,
}
class VCloudClient(object):
def __init__(self, scheme):
self._metadata_iso_catalog = CONF.vcloud.metadata_iso_catalog
self._session = VCloudAPISession(
host_ip=CONF.vcloud.vcloud_host_ip,
host_port=CONF.vcloud.vcloud_host_port,
server_username=CONF.vcloud.vcloud_host_username,
server_password=CONF.vcloud.vcloud_host_password,
org=CONF.vcloud.vcloud_org,
vdc=CONF.vcloud.vcloud_vdc,
version=CONF.vcloud.vcloud_version,
service=CONF.vcloud.vcloud_service,
verify=CONF.vcloud.vcloud_verify,
service_type=CONF.vcloud.vcloud_service_type,
retry_count=CONF.vcloud.vcloud_api_retry_count,
create_session=True,
scheme=scheme)
# add for status cache
hcvcs = HCVCS(
host=CONF.vcloud.vcloud_host_ip,
username=CONF.vcloud.vcloud_host_username,
org=CONF.vcloud.vcloud_org,
password=CONF.vcloud.vcloud_host_password,
scheme="https")
self.cache = JacketStatusCache(hcvcs)
# add end for status cache
@property
def org(self):
return self._session.org
@property
def username(self):
return self._session.username
@property
def password(self):
return self._session.password
@property
def vdc(self):
return self._session.vdc
@property
def host_ip(self):
return self._session.host_ip
def _get_vcloud_vdc(self):
return self._invoke_api("get_vdc", self._session.vdc)
def _get_vcloud_vapp(self, vapp_name):
the_vapp = self._invoke_api("get_vapp",
self._get_vcloud_vdc(),
vapp_name)
if not the_vapp:
LOG.info("can't find the vapp %s" % vapp_name)
return None
else:
return the_vapp
def _invoke_api(self, method_name, *args, **kwargs):
res = self._session.invoke_api(self._session.vca,
method_name,
*args, **kwargs)
LOG.info("_invoke_api (%s, %s, %s) = %s" %
(method_name, args, kwargs, res))
return res
def _invoke_vapp_api(self, the_vapp, method_name, *args, **kwargs):
res = self._session.invoke_api(the_vapp, method_name, *args, **kwargs)
LOG.info("_invoke_vapp_api (%s, %s, %s) = %s" %
(method_name, args, kwargs, res))
return res
def get_disk_ref(self, disk_name):
disk_refs = self._invoke_api('get_diskRefs',
self._get_vcloud_vdc())
link = filter(lambda link: link.get_name() == disk_name, disk_refs)
if len(link) == 1:
return True, link[0]
elif len(link) == 0:
return False, 'disk not found'
elif len(link) > 1:
return False, 'more than one disks found with that name.'
def power_off_vapp(self, vapp_name):
@RetryDecorator(max_retry_count=60,
exceptions=exceptions.NoneException)
def _power_off(vapp_name):
expected_vapp_status = constants.VM_POWER_OFF_STATUS
the_vapp = self._get_vcloud_vapp(vapp_name)
vapp_status = self._get_status_first_vm(the_vapp)
if vapp_status == expected_vapp_status:
return the_vapp
task_stop = self._invoke_vapp_api(the_vapp, "undeploy")
if not task_stop:
raise exceptions.NoneException(
"power off vapp failed, task")
self._session.wait_for_task(task_stop)
retry_times = 60
while vapp_status != expected_vapp_status and retry_times > 0:
eventlet.greenthread.sleep(3)
the_vapp = self._get_vcloud_vapp(vapp_name)
vapp_status = self._get_status_first_vm(the_vapp)
LOG.debug('During power off vapp_name: %s, %s' % (vapp_name, vapp_status))
retry_times -= 1
return the_vapp
return _power_off(vapp_name)
def _get_status_first_vm(self, the_vapp):
children = the_vapp.me.get_Children()
if children:
vms = children.get_Vm()
for vm in vms:
return vm.get_status()
return None
def power_on_vapp(self, vapp_name):
@RetryDecorator(max_retry_count=60,
exceptions=exceptions.NoneException)
def _power_on(vapp_name):
the_vapp = self._get_vcloud_vapp(vapp_name)
vapp_status = self._get_status_first_vm(the_vapp)
expected_vapp_status = constants.VM_POWER_ON_STATUS
if vapp_status == expected_vapp_status:
return the_vapp
task = self._invoke_vapp_api(the_vapp, "poweron")
if not task:
raise exceptions.NoneException("power on vapp failed, task")
self._session.wait_for_task(task)
retry_times = 60
while vapp_status != expected_vapp_status and retry_times > 0:
eventlet.greenthread.sleep(3)
the_vapp = self._get_vcloud_vapp(vapp_name)
vapp_status = self._get_status_first_vm(the_vapp)
LOG.debug('During power on vapp_name: %s, %s' %
(vapp_name, vapp_status))
retry_times -= 1
return the_vapp
return _power_on(vapp_name)
def create_vapp(self, vapp_name, template_name, network_configs, root_gb=None):
result, task = self._session.invoke_api(self._session.vca,
"create_vapp",
self.vdc, vapp_name,
template_name, network_configs=network_configs, root_gb=root_gb)
# check the task is success or not
if not result:
raise exceptions.VCloudDriverException(
"Create_vapp error, task:" +
task)
self._session.wait_for_task(task)
the_vdc = self._session.invoke_api(self._session.vca, "get_vdc", self.vdc)
return self._session.invoke_api(self._session.vca, "get_vapp", the_vdc, vapp_name)
def delete_vapp(self, vapp_name):
the_vapp = self._get_vcloud_vapp(vapp_name)
task = self._invoke_vapp_api(the_vapp, "delete")
if not task:
raise exception.NovaException(
"delete vapp failed, task: %s" % task)
self._session.wait_for_task(task)
def reboot_vapp(self, vapp_name):
the_vapp = self._get_vcloud_vapp(vapp_name)
task = self._invoke_vapp_api(the_vapp, "reboot")
if not task:
raise exception.NovaException(
"reboot vapp failed, task: %s" % task)
self._session.wait_for_task(task)
def get_vapp_ip(self, vapp_name):
the_vapp = self._get_vcloud_vapp(vapp_name)
vms_network_infos = self._invoke_vapp_api(the_vapp, "get_vms_network_info")
for vm_network_infos in vms_network_infos:
for vm_network_info in vm_network_infos:
if vm_network_info['ip'] and \
(vm_network_info['network_name'] == CONF.vcloud.provider_tunnel_network_name):
return vm_network_info['ip']
return None
def create_volume(self, disk_name, disk_size):
result, resp = self._session.invoke_api(self._session.vca, "add_disk", self.vdc, disk_name, int(disk_size) * units.Gi)
if result:
self._session.wait_for_task(resp.get_Tasks().get_Task()[0])
LOG.info('Created volume : %s sucess', disk_name)
else:
err_msg = 'Unable to create volume, reason: %s' % resp
LOG.error(err_msg)
raise exception.NovaException(err_msg)
def delete_volume(self, disk_name):
result, resp = self._session.invoke_api(self._session.vca, "delete_disk", self.vdc, disk_name)
if result:
self._session.wait_for_task(resp)
LOG.info('delete volume : %s success', disk_name)
else:
if resp == 'disk not found':
LOG.warning('delete volume: unable to find volume %(name)s', {'name': disk_name})
else:
raise exception.NovaException("Unable to delete volume %s" % disk_name)
def attach_disk_to_vm(self, vapp_name, disk_ref):
@RetryDecorator(max_retry_count=60,
exceptions=exceptions.NoneException)
def _attach_disk(vapp_name, disk_ref):
the_vapp = self._get_vcloud_vapp(vapp_name)
task = the_vapp.attach_disk_to_vm(disk_ref)
if not task:
raise exceptions.NoneException(
"Unable to attach disk to vm %s" % vapp_name)
else:
self._session.wait_for_task(task)
return True
return _attach_disk(vapp_name, disk_ref)
def detach_disk_from_vm(self, vapp_name, disk_ref):
@RetryDecorator(max_retry_count=60,
exceptions=exceptions.NoneException)
def _detach_disk(vapp_name, disk_ref):
the_vapp = self._get_vcloud_vapp(vapp_name)
task = the_vapp.detach_disk_from_vm(disk_ref)
if not task:
raise exceptions.NoneException(
"Unable to detach disk from vm %s" % vapp_name)
else:
self._session.wait_for_task(task)
return True
return _detach_disk(vapp_name, disk_ref)
def get_network_configs(self, network_names):
return self._session.invoke_api(self._session.vca, "get_network_configs", self.vdc, network_names)
def get_network_connections(self, vapp, network_names):
return self._session.invoke_api(vapp, "get_network_connections", network_names)
def update_vms_connections(self, vapp, network_connections):
result, task = self._session.invoke_api(vapp, "update_vms_connections", network_connections)
# check the task is success or not
if not result:
raise exceptions.VCloudDriverException(
"Update_vms_connections error, task:" +
task)
self._session.wait_for_task(task)
def get_disk_attached_vapp(self, disk_name):
vapps = self._invoke_api('get_disk_attached_vapp', self.vdc, disk_name)
if len(vapps) == 1:
return vapps[0]
else:
return None
def modify_vm_cpu(self, vapp, cpus):
result, task = self._session.invoke_api(vapp, "modify_vm_cpu", cpus)
# check the task is success or not
if not result:
raise exceptions.VCloudDriverException(
"Modify_vm_cpu error, task:" +
task)
self._session.wait_for_task(task)
def modify_vm_memory(self, vapp, new_size):
result, task = self._session.invoke_api(vapp, "modify_vm_memory", new_size)
# check the task is success or not
if not result:
raise exceptions.VCloudDriverException(
"Modify_vm_memory error, task:" +
task)
self._session.wait_for_task(task)
def get_vcloud_vapp_status(self, vapp_name):
return self._get_vcloud_vapp(vapp_name).me.status
def query_vmdk_url(self, vapp_name):
# 0. shut down the app first
try:
the_vapp = self.power_off_vapp(vapp_name)
except:
LOG.error('power off failed')
# 1.enable download.
task = self._invoke_vapp_api(the_vapp, 'enableDownload')
if not task:
raise exception.NovaException(
"enable vmdk file download failed, task:")
self._session.wait_for_task(task)
# 2.get vapp info and ovf descriptor
the_vapp = self._get_vcloud_vapp(vapp_name)
ovf = self._invoke_vapp_api(the_vapp, 'get_ovf_descriptor')
# 3.get referenced file url
referenced_file_url = self._invoke_vapp_api(the_vapp,
'get_referenced_file_url',
ovf)
if not referenced_file_url:
raise exception.NovaException(
"get vmdk file url failed")
return referenced_file_url
def insert_media(self, vapp_name, iso_file):
the_vapp = self._get_vcloud_vapp(vapp_name)
task = the_vapp.vm_media(iso_file, 'insert')
if not task:
raise exception.NovaException(
"Unable to insert media vm %s" % vapp_name)
else:
self._session.wait_for_task(task)
return True
def upload_vm(self, ovf_name, vapp_name, api_net, tun_net):
cmd = ('ovftool --net:"vmnetwork-0=%s"'
' --net:"vmnetwork-1=%s"'
' %s "vcloud://%s:%s@%s?org=%s&vdc=%s&vapp=%s"' %
(api_net,
tun_net,
ovf_name,
self.username,
self.password,
self.host_ip,
self.org,
self.vdc,
vapp_name))
LOG.debug("begin run create vapp command '%s'." % cmd)
cmd_result = subprocess.call(cmd, shell=True)
LOG.debug("end run create vapp command '%s'." % cmd)
if cmd_result != 0:
raise exception.NovaException(
"Unable to upload vm %s" % vapp_name)
def _upload_metadata_iso(self, iso_file, media_name, overwrite=False):
overw = ''
if overwrite:
overw = '--overwrite'
cmd = ('ovftool %s --sourceType="ISO" '
' --vCloudTemplate="false"'
' "%s" "vcloud://%s:%s@%s?org=%s&vdc=%s&media=%s'
'&catalog=%s"' %
(overw,
iso_file,
self.username,
self.password,
self.host_ip,
self.org,
self.vdc,
media_name,
self._metadata_iso_catalog))
LOG.debug("begin run upload iso command '%s'." % cmd)
cmd_result = subprocess.call(cmd, shell=True)
LOG.debug("end run upload iso command '%s'." % cmd)
return cmd_result
def upload_metadata_iso(self, iso_file, vapp_name):
media_name = "metadata_%s.iso" % vapp_name
try:
cmd_result = self._upload_metadata_iso(iso_file, media_name)
except Exception as e:
cmd_result = 1
LOG.error('upload meta-data failed without overwrite %s.' % (e))
if cmd_result != 0:
cmd_result = self._upload_metadata_iso(iso_file, media_name, True)
if cmd_result != 0:
raise exception.NovaException(
"Unable to upload meta-data iso file %s" % vapp_name)
return self._invoke_api("get_media",
self._metadata_iso_catalog,
media_name)
def delete_metadata_iso(self, vapp_name):
media_name = "metadata_%s.iso" % vapp_name
# not work for pyvcloud10 but for pyvcloud14
result = self._invoke_api("delete_catalog_item",
self._metadata_iso_catalog,
media_name)
if not result:
raise exception.NovaException(
"delete metadata iso failed vapp_name:%s" % vapp_name)
| apache-2.0 | 8,852,122,167,758,338,000 | 38.32287 | 126 | 0.571217 | false |
xuru/pyvisdk | pyvisdk/do/vm_disk_file_query_filter.py | 1 | 1060 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def VmDiskFileQueryFilter(vim, *args, **kwargs):
'''The filter for the virtual disk primary file.'''
obj = vim.client.factory.create('ns0:VmDiskFileQueryFilter')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'controllerType', 'diskType', 'matchHardwareVersion', 'thin',
'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit | 141,172,380,356,876,660 | 30.205882 | 124 | 0.595283 | false |
fdroidtravis/fdroidserver | fdroidserver/scanner.py | 1 | 20098 | #!/usr/bin/env python3
#
# scanner.py - part of the FDroid server tools
# Copyright (C) 2010-13, Ciaran Gultnieks, [email protected]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import imghdr
import json
import os
import re
import sys
import traceback
from argparse import ArgumentParser
import logging
import itertools
from . import _
from . import common
from . import metadata
from .exception import BuildException, VCSException
config = None
options = None
DEFAULT_JSON_PER_BUILD = {'errors': [], 'warnings': [], 'infos': []} # type: ignore
json_per_build = DEFAULT_JSON_PER_BUILD
MAVEN_URL_REGEX = re.compile(r"""\smaven\s*{.*?(?:setUrl|url)\s*=?\s*(?:uri)?\(?\s*["']?([^\s"']+)["']?[^}]*}""",
re.DOTALL)
CODE_SIGNATURES = {
# The `apkanalyzer dex packages` output looks like this:
# M d 1 1 93 <packagename> <other stuff>
# The first column has P/C/M/F for package, class, method or field
# The second column has x/k/r/d for removed, kept, referenced and defined.
# We already filter for defined only in the apkanalyzer call. 'r' will be
# for things referenced but not distributed in the apk.
exp: re.compile(r'.[\s]*d[\s]*[0-9]*[\s]*[0-9*][\s]*[0-9]*[\s]*' + exp, re.IGNORECASE) for exp in [
r'(com\.google\.firebase[^\s]*)',
r'(com\.google\.android\.gms[^\s]*)',
r'(com\.google\.android\.play\.core[^\s]*)',
r'(com\.google\.tagmanager[^\s]*)',
r'(com\.google\.analytics[^\s]*)',
r'(com\.android\.billing[^\s]*)',
]
}
# Common known non-free blobs (always lower case):
NON_FREE_GRADLE_LINES = {
exp: re.compile(r'.*' + exp, re.IGNORECASE) for exp in [
r'flurryagent',
r'paypal.*mpl',
r'admob.*sdk.*android',
r'google.*ad.*view',
r'google.*admob',
r'google.*play.*services',
r'com.google.android.play:core.*',
r'androidx.work:work-gcm',
r'crittercism',
r'heyzap',
r'jpct.*ae',
r'youtube.*android.*player.*api',
r'bugsense',
r'crashlytics',
r'ouya.*sdk',
r'libspen23',
r'firebase',
r'''["']com.facebook.android['":]''',
r'cloudrail',
r'com.tencent.bugly',
r'appcenter-push',
]
}
def get_gradle_compile_commands(build):
compileCommands = ['compile',
'provided',
'apk',
'implementation',
'api',
'compileOnly',
'runtimeOnly']
buildTypes = ['', 'release']
flavors = ['']
if build.gradle and build.gradle != ['yes']:
flavors += build.gradle
commands = [''.join(c) for c in itertools.product(flavors, buildTypes, compileCommands)]
return [re.compile(r'\s*' + c, re.IGNORECASE) for c in commands]
def scan_binary(apkfile):
"""Scan output of apkanalyzer for known non-free classes
apkanalyzer produces useful output when it can run, but it does
not support all recent JDK versions, and also some DEX versions,
so this cannot count on it to always produce useful output or even
to run without exiting with an error.
"""
logging.info(_('Scanning APK with apkanalyzer for known non-free classes.'))
result = common.SdkToolsPopen(["apkanalyzer", "dex", "packages", "--defined-only", apkfile], output=False)
if result.returncode != 0:
logging.warning(_('scanner not cleanly run apkanalyzer: %s') % result.output)
problems = 0
for suspect, regexp in CODE_SIGNATURES.items():
matches = regexp.findall(result.output)
if matches:
for m in set(matches):
logging.debug("Found class '%s'" % m)
problems += 1
if problems:
logging.critical("Found problems in %s" % apkfile)
return problems
def scan_source(build_dir, build=metadata.Build()):
"""Scan the source code in the given directory (and all subdirectories)
and return the number of fatal problems encountered
"""
count = 0
whitelisted = [
'firebase-jobdispatcher', # https://github.com/firebase/firebase-jobdispatcher-android/blob/master/LICENSE
'com.firebaseui', # https://github.com/firebase/FirebaseUI-Android/blob/master/LICENSE
'geofire-android' # https://github.com/firebase/geofire-java/blob/master/LICENSE
]
def is_whitelisted(s):
return any(wl in s for wl in whitelisted)
def suspects_found(s):
for n, r in NON_FREE_GRADLE_LINES.items():
if r.match(s) and not is_whitelisted(s):
yield n
allowed_repos = [re.compile(r'^https://' + re.escape(repo) + r'/*') for repo in [
'repo1.maven.org/maven2', # mavenCentral()
'jcenter.bintray.com', # jcenter()
'jitpack.io',
'www.jitpack.io',
'repo.maven.apache.org/maven2',
'oss.jfrog.org/artifactory/oss-snapshot-local',
'oss.sonatype.org/content/repositories/snapshots',
'oss.sonatype.org/content/repositories/releases',
'oss.sonatype.org/content/groups/public',
'clojars.org/repo', # Clojure free software libs
's3.amazonaws.com/repo.commonsware.com', # CommonsWare
'plugins.gradle.org/m2', # Gradle plugin repo
'maven.google.com', # Google Maven Repo, https://developer.android.com/studio/build/dependencies.html#google-maven
]
] + [re.compile(r'^file://' + re.escape(repo) + r'/*') for repo in [
'/usr/share/maven-repo', # local repo on Debian installs
]
]
scanignore = common.getpaths_map(build_dir, build.scanignore)
scandelete = common.getpaths_map(build_dir, build.scandelete)
scanignore_worked = set()
scandelete_worked = set()
def toignore(path_in_build_dir):
for k, paths in scanignore.items():
for p in paths:
if path_in_build_dir.startswith(p):
scanignore_worked.add(k)
return True
return False
def todelete(path_in_build_dir):
for k, paths in scandelete.items():
for p in paths:
if path_in_build_dir.startswith(p):
scandelete_worked.add(k)
return True
return False
def ignoreproblem(what, path_in_build_dir):
"""
:param what: string describing the problem, will be printed in log messages
:param path_in_build_dir: path to the file relative to `build`-dir
"returns: 0 as we explicitly ignore the file, so don't count an error
"""
msg = ('Ignoring %s at %s' % (what, path_in_build_dir))
logging.info(msg)
if json_per_build is not None:
json_per_build['infos'].append([msg, path_in_build_dir])
return 0
def removeproblem(what, path_in_build_dir, filepath):
"""
:param what: string describing the problem, will be printed in log messages
:param path_in_build_dir: path to the file relative to `build`-dir
:param filepath: Path (relative to our current path) to the file
"returns: 0 as we deleted the offending file
"""
msg = ('Removing %s at %s' % (what, path_in_build_dir))
logging.info(msg)
if json_per_build is not None:
json_per_build['infos'].append([msg, path_in_build_dir])
try:
os.remove(filepath)
except FileNotFoundError:
# File is already gone, nothing to do.
# This can happen if we find multiple problems in one file that is setup for scandelete
# I.e. build.gradle files containig multiple unknown maven repos.
pass
return 0
def warnproblem(what, path_in_build_dir):
"""
:param what: string describing the problem, will be printed in log messages
:param path_in_build_dir: path to the file relative to `build`-dir
:returns: 0, as warnings don't count as errors
"""
if toignore(path_in_build_dir):
return 0
logging.warning('Found %s at %s' % (what, path_in_build_dir))
if json_per_build is not None:
json_per_build['warnings'].append([what, path_in_build_dir])
return 0
def handleproblem(what, path_in_build_dir, filepath):
"""Dispatches to problem handlers (ignore, delete, warn) or returns 1
for increasing the error count
:param what: string describing the problem, will be printed in log messages
:param path_in_build_dir: path to the file relative to `build`-dir
:param filepath: Path (relative to our current path) to the file
:returns: 0 if the problem was ignored/deleted/is only a warning, 1 otherwise
"""
if toignore(path_in_build_dir):
return ignoreproblem(what, path_in_build_dir)
if todelete(path_in_build_dir):
return removeproblem(what, path_in_build_dir, filepath)
if 'src/test' in path_in_build_dir or '/test/' in path_in_build_dir:
return warnproblem(what, path_in_build_dir)
if options and 'json' in vars(options) and options.json:
json_per_build['errors'].append([what, path_in_build_dir])
if options and (options.verbose or not ('json' in vars(options) and options.json)):
logging.error('Found %s at %s' % (what, path_in_build_dir))
return 1
def is_executable(path):
return os.path.exists(path) and os.access(path, os.X_OK)
textchars = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f})
def is_binary(path):
d = None
with open(path, 'rb') as f:
d = f.read(1024)
return bool(d.translate(None, textchars))
# False positives patterns for files that are binary and executable.
safe_paths = [re.compile(r) for r in [
r".*/drawable[^/]*/.*\.png$", # png drawables
r".*/mipmap[^/]*/.*\.png$", # png mipmaps
]
]
def is_image_file(path):
if imghdr.what(path) is not None:
return True
def safe_path(path_in_build_dir):
for sp in safe_paths:
if sp.match(path_in_build_dir):
return True
return False
gradle_compile_commands = get_gradle_compile_commands(build)
def is_used_by_gradle(line):
return any(command.match(line) for command in gradle_compile_commands)
# Iterate through all files in the source code
for root, dirs, files in os.walk(build_dir, topdown=True):
# It's topdown, so checking the basename is enough
for ignoredir in ('.hg', '.git', '.svn', '.bzr'):
if ignoredir in dirs:
dirs.remove(ignoredir)
for curfile in files:
if curfile in ['.DS_Store']:
continue
# Path (relative) to the file
filepath = os.path.join(root, curfile)
if os.path.islink(filepath):
continue
path_in_build_dir = os.path.relpath(filepath, build_dir)
extension = os.path.splitext(path_in_build_dir)[1]
if curfile in ('gradle-wrapper.jar', 'gradlew', 'gradlew.bat'):
removeproblem(curfile, path_in_build_dir, filepath)
elif extension == '.apk':
removeproblem(_('Android APK file'), path_in_build_dir, filepath)
elif extension == '.a':
count += handleproblem(_('static library'), path_in_build_dir, filepath)
elif extension == '.aar':
count += handleproblem(_('Android AAR library'), path_in_build_dir, filepath)
elif extension == '.class':
count += handleproblem(_('Java compiled class'), path_in_build_dir, filepath)
elif extension == '.dex':
count += handleproblem(_('Android DEX code'), path_in_build_dir, filepath)
elif extension == '.gz':
count += handleproblem(_('gzip file archive'), path_in_build_dir, filepath)
elif extension == '.so':
count += handleproblem(_('shared library'), path_in_build_dir, filepath)
elif extension == '.zip':
count += handleproblem(_('ZIP file archive'), path_in_build_dir, filepath)
elif extension == '.jar':
for name in suspects_found(curfile):
count += handleproblem('usual suspect \'%s\'' % name, path_in_build_dir, filepath)
count += handleproblem(_('Java JAR file'), path_in_build_dir, filepath)
elif extension == '.java':
if not os.path.isfile(filepath):
continue
with open(filepath, 'r', errors='replace') as f:
for line in f:
if 'DexClassLoader' in line:
count += handleproblem('DexClassLoader', path_in_build_dir, filepath)
break
elif extension == '.gradle':
if not os.path.isfile(filepath):
continue
with open(filepath, 'r', errors='replace') as f:
lines = f.readlines()
for i, line in enumerate(lines):
if is_used_by_gradle(line):
for name in suspects_found(line):
count += handleproblem("usual suspect \'%s\'" % (name),
path_in_build_dir, filepath)
noncomment_lines = [line for line in lines if not common.gradle_comment.match(line)]
no_comments = re.sub(r'/\*.*?\*/', '', ''.join(noncomment_lines), flags=re.DOTALL)
for url in MAVEN_URL_REGEX.findall(no_comments):
if not any(r.match(url) for r in allowed_repos):
count += handleproblem('unknown maven repo \'%s\'' % url, path_in_build_dir, filepath)
elif extension in ['', '.bin', '.out', '.exe']:
if is_binary(filepath):
count += handleproblem('binary', path_in_build_dir, filepath)
elif is_executable(filepath):
if is_binary(filepath) and not (safe_path(path_in_build_dir) or is_image_file(filepath)):
warnproblem(_('executable binary, possibly code'), path_in_build_dir)
for p in scanignore:
if p not in scanignore_worked:
logging.error(_('Unused scanignore path: %s') % p)
count += 1
for p in scandelete:
if p not in scandelete_worked:
logging.error(_('Unused scandelete path: %s') % p)
count += 1
return count
def main():
global config, options, json_per_build
# Parse command line...
parser = ArgumentParser(usage="%(prog)s [options] [APPID[:VERCODE] [APPID[:VERCODE] ...]]")
common.setup_global_opts(parser)
parser.add_argument("appid", nargs='*', help=_("application ID with optional versionCode in the form APPID[:VERCODE]"))
parser.add_argument("-f", "--force", action="store_true", default=False,
help=_("Force scan of disabled apps and builds."))
parser.add_argument("--json", action="store_true", default=False,
help=_("Output JSON to stdout."))
metadata.add_metadata_arguments(parser)
options = parser.parse_args()
metadata.warnings_action = options.W
json_output = dict()
if options.json:
if options.verbose:
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
else:
logging.getLogger().setLevel(logging.ERROR)
config = common.read_config(options)
# Read all app and srclib metadata
allapps = metadata.read_metadata()
apps = common.read_app_args(options.appid, allapps, True)
probcount = 0
build_dir = 'build'
if not os.path.isdir(build_dir):
logging.info("Creating build directory")
os.makedirs(build_dir)
srclib_dir = os.path.join(build_dir, 'srclib')
extlib_dir = os.path.join(build_dir, 'extlib')
for appid, app in apps.items():
json_per_appid = dict()
if app.Disabled and not options.force:
logging.info(_("Skipping {appid}: disabled").format(appid=appid))
json_per_appid['disabled'] = json_per_build['infos'].append('Skipping: disabled')
continue
try:
if app.RepoType == 'srclib':
build_dir = os.path.join('build', 'srclib', app.Repo)
else:
build_dir = os.path.join('build', appid)
if app.get('Builds'):
logging.info(_("Processing {appid}").format(appid=appid))
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
else:
logging.info(_("{appid}: no builds specified, running on current source state")
.format(appid=appid))
json_per_build = DEFAULT_JSON_PER_BUILD
json_per_appid['current-source-state'] = json_per_build
count = scan_source(build_dir)
if count > 0:
logging.warning(_('Scanner found {count} problems in {appid}:')
.format(count=count, appid=appid))
probcount += count
app['Builds'] = []
for build in app.get('Builds', []):
json_per_build = DEFAULT_JSON_PER_BUILD
json_per_appid[build.versionCode] = json_per_build
if build.disable and not options.force:
logging.info("...skipping version %s - %s" % (
build.versionName, build.get('disable', build.commit[1:])))
continue
logging.info("...scanning version " + build.versionName)
# Prepare the source code...
common.prepare_source(vcs, app, build,
build_dir, srclib_dir,
extlib_dir, False)
count = scan_source(build_dir, build)
if count > 0:
logging.warning(_('Scanner found {count} problems in {appid}:{versionCode}:')
.format(count=count, appid=appid, versionCode=build.versionCode))
probcount += count
except BuildException as be:
logging.warning('Could not scan app %s due to BuildException: %s' % (
appid, be))
probcount += 1
except VCSException as vcse:
logging.warning('VCS error while scanning app %s: %s' % (appid, vcse))
probcount += 1
except Exception:
logging.warning('Could not scan app %s due to unknown error: %s' % (
appid, traceback.format_exc()))
probcount += 1
for k, v in json_per_appid.items():
if len(v['errors']) or len(v['warnings']) or len(v['infos']):
json_output[appid] = json_per_appid
break
logging.info(_("Finished"))
if options.json:
print(json.dumps(json_output))
else:
print(_("%d problems found") % probcount)
if __name__ == "__main__":
main()
| agpl-3.0 | 577,908,906,297,143,800 | 39.276553 | 123 | 0.574535 | false |
redtoad/python-amazon-product-api | amazonproduct/contrib/retry.py | 1 | 1626 |
import socket
import time
import sys
# support Python 2 and Python 3 without conversion
try:
from urllib.request import URLError
except ImportError:
from urllib2 import URLError
from amazonproduct.api import API
class RetryAPI (API):
"""
API which will try up to ``TRIES`` times to fetch a result from Amazon
should it run into a timeout. For the time being this will remain in
:mod:`amazonproduct.contrib` but its functionality may be merged into the
main API at a later date.
Based on work by Jerry Ji
"""
#: Max number of tries before giving up
TRIES = 5
#: Delay between tries in seconds
DELAY = 3
#: Between each try the delay will be lengthened by this backoff multiplier
BACKOFF = 1
def _fetch(self, url):
"""
Retrieves XML response from Amazon. In case of a timeout, it will try
:const:`~RetryAPI.TRIES`` times before raising an error.
"""
attempts = 0
delay = self.DELAY
while True:
try:
attempts += 1
return API._fetch(self, url)
except URLError:
e = sys.exc_info()[1] # Python 2/3 compatible
# if a timeout occurred
# wait for some time before trying again
reason = getattr(e, 'reason', None)
if isinstance(reason, socket.timeout) and attempts < self.TRIES:
time.sleep(delay)
delay *= self.BACKOFF
continue
# otherwise reraise the original error
raise
| bsd-3-clause | -2,704,879,142,169,400,000 | 26.559322 | 80 | 0.589176 | false |
mapr/sahara | sahara/plugins/mapr/plugin.py | 1 | 2831 | # Copyright (c) 2015, MapR Technologies
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sahara.plugins.mapr.versions.version_handler_factory as vhf
import sahara.plugins.provisioning as p
class MapRPlugin(p.ProvisioningPluginBase):
title = 'MapR Hadoop Distribution'
description = ('The MapR Distribution provides a full Hadoop stack that'
' includes the MapR File System (MapR-FS), MapReduce,'
' a complete Hadoop ecosystem, and the MapR Control System'
' user interface')
user = 'mapr'
def _get_handler(self, hadoop_version):
return vhf.VersionHandlerFactory.get().get_handler(hadoop_version)
def get_title(self):
return MapRPlugin.title
def get_description(self):
return MapRPlugin.description
def get_versions(self):
return vhf.VersionHandlerFactory.get().get_versions()
def get_node_processes(self, hadoop_version):
return self._get_handler(hadoop_version).get_np_dict()
def get_configs(self, hadoop_version):
return self._get_handler(hadoop_version).get_configs()
def configure_cluster(self, cluster):
self._get_handler(cluster.hadoop_version).configure_cluster(cluster)
def start_cluster(self, cluster):
self._get_handler(cluster.hadoop_version).start_cluster(cluster)
def validate(self, cluster):
self._get_handler(cluster.hadoop_version).validate(cluster)
def validate_scaling(self, cluster, existing, additional):
v_handler = self._get_handler(cluster.hadoop_version)
v_handler.validate_scaling(cluster, existing, additional)
def scale_cluster(self, cluster, instances):
v_handler = self._get_handler(cluster.hadoop_version)
v_handler.scale_cluster(cluster, instances)
def decommission_nodes(self, cluster, instances):
v_handler = self._get_handler(cluster.hadoop_version)
v_handler.decommission_nodes(cluster, instances)
def get_edp_engine(self, cluster, job_type):
v_handler = self._get_handler(cluster.hadoop_version)
return v_handler.get_edp_engine(cluster, job_type)
def get_open_ports(self, node_group):
v_handler = self._get_handler(node_group.cluster.hadoop_version)
return v_handler.get_open_ports(node_group)
| apache-2.0 | -8,880,976,792,963,895,000 | 37.780822 | 78 | 0.702932 | false |
jkettleb/iris | lib/iris/tests/unit/experimental/um/test_Field.py | 1 | 6019 | # (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Unit tests for :class:`iris.experimental.um.Field`.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests
import mock
import numpy as np
from iris.experimental.um import Field
class Test_int_headers(tests.IrisTest):
def test(self):
field = Field(np.arange(45), list(range(19)), None)
self.assertArrayEqual(field.int_headers, np.arange(45))
class Test_real_headers(tests.IrisTest):
def test(self):
field = Field(list(range(45)), np.arange(19), None)
self.assertArrayEqual(field.real_headers, np.arange(19))
class Test___eq__(tests.IrisTest):
def test_equal(self):
field1 = Field(list(range(45)), list(range(19)), None)
field2 = Field(np.arange(45), np.arange(19), None)
self.assertTrue(field1.__eq__(field2))
def test_not_equal_ints(self):
field1 = Field(list(range(45)), list(range(19)), None)
field2 = Field(np.arange(45, 90), np.arange(19), None)
self.assertFalse(field1.__eq__(field2))
def test_not_equal_reals(self):
field1 = Field(list(range(45)), list(range(19)), None)
field2 = Field(np.arange(45), np.arange(19, 38), None)
self.assertFalse(field1.__eq__(field2))
def test_not_equal_data(self):
field1 = Field(list(range(45)), list(range(19)), None)
field2 = Field(np.arange(45), np.arange(19), np.zeros(3))
self.assertFalse(field1.__eq__(field2))
def test_invalid(self):
field1 = Field(list(range(45)), list(range(19)), None)
self.assertIs(field1.__eq__('foo'), NotImplemented)
class Test___ne__(tests.IrisTest):
def test_equal(self):
field1 = Field(list(range(45)), list(range(19)), None)
field2 = Field(np.arange(45), np.arange(19), None)
self.assertFalse(field1.__ne__(field2))
def test_not_equal_ints(self):
field1 = Field(list(range(45)), list(range(19)), None)
field2 = Field(np.arange(45, 90), np.arange(19), None)
self.assertTrue(field1.__ne__(field2))
def test_not_equal_reals(self):
field1 = Field(list(range(45)), list(range(19)), None)
field2 = Field(np.arange(45), np.arange(19, 38), None)
self.assertTrue(field1.__ne__(field2))
def test_not_equal_data(self):
field1 = Field(list(range(45)), list(range(19)), None)
field2 = Field(np.arange(45), np.arange(19), np.zeros(3))
self.assertTrue(field1.__ne__(field2))
def test_invalid(self):
field1 = Field(list(range(45)), list(range(19)), None)
self.assertIs(field1.__ne__('foo'), NotImplemented)
class Test_num_values(tests.IrisTest):
def test_64(self):
field = Field(list(range(45)), list(range(19)), None)
self.assertEqual(field.num_values(), 64)
def test_128(self):
field = Field(list(range(45)), list(range(83)), None)
self.assertEqual(field.num_values(), 128)
class Test_get_data(tests.IrisTest):
def test_None(self):
field = Field([], [], None)
self.assertIsNone(field.get_data())
def test_ndarray(self):
data = np.arange(12).reshape(3, 4)
field = Field([], [], data)
self.assertIs(field.get_data(), data)
def test_provider(self):
provider = mock.Mock(read_data=lambda: mock.sentinel.DATA)
field = Field([], [], provider)
self.assertIs(field.get_data(), mock.sentinel.DATA)
class Test_set_data(tests.IrisTest):
def test_None(self):
data = np.arange(12).reshape(3, 4)
field = Field([], [], data)
field.set_data(None)
self.assertIsNone(field.get_data())
def test_ndarray(self):
field = Field([], [], None)
data = np.arange(12).reshape(3, 4)
field.set_data(data)
self.assertArrayEqual(field.get_data(), data)
def test_provider(self):
provider = mock.Mock(read_data=lambda: mock.sentinel.DATA)
field = Field([], [], None)
field.set_data(provider)
self.assertIs(field.get_data(), mock.sentinel.DATA)
class Test__can_copy_deferred_data(tests.IrisTest):
def _check_formats(self,
old_lbpack, new_lbpack,
old_bacc=-6, new_bacc=-6,
absent_provider=False):
lookup_entry = mock.Mock(lbpack=old_lbpack, bacc=old_bacc)
provider = mock.Mock(lookup_entry=lookup_entry)
if absent_provider:
# Replace the provider with a simple array.
provider = np.zeros(2)
field = Field(list(range(45)), list(range(19)), provider)
return field._can_copy_deferred_data(new_lbpack, new_bacc)
def test_okay_simple(self):
self.assertTrue(self._check_formats(1234, 1234))
def test_fail_different_lbpack(self):
self.assertFalse(self._check_formats(1234, 1238))
def test_fail_nodata(self):
self.assertFalse(self._check_formats(1234, 1234, absent_provider=True))
def test_fail_different_bacc(self):
self.assertFalse(self._check_formats(1234, 1234, new_bacc=-8))
if __name__ == '__main__':
tests.main()
| lgpl-3.0 | 4,416,094,728,972,679,700 | 33.994186 | 79 | 0.633328 | false |
pfwangthu/Convolutional-Neural-Networks | display.py | 1 | 7128 | import matplotlib
#Force matplotlib to not use any Xwindows backend.
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import scipy.io as sio
import os
import sys
import numpy
import theano
import theano.tensor as T
import gzip
import cPickle
from convolutional_mlp import LeNetConvPoolLayer
from logistic_sgd import LogisticRegression
from mlp import HiddenLayer
def display(params, digit, epoch, mode = 'mat', size = (56, 56)):
#epoch contains a list of numbers to show
#for example, epoch = [0, 2, 4] can show epoch 0 (original stage) and epoch 2 4
#after running the CNN, params can be used directly, and can also use numpy.load('params.npy') to get
#digit is a single digit of image set, for example, digit = train_set_x.get_value()[number]
nkerns=[20, 50]
rng = numpy.random.RandomState(23455)
#show original digit
if os.path.exists('digit') == 0:
os.mkdir('digit')
if mode == 'png':
plt.figure(1)
plt.gray()
plt.axis('off')
plt.imshow(digit.reshape(size))
plt.savefig('digit/activity of layer0 (original digit).png')
digit = digit.reshape(1, 1, size[0], size[1])
inputdigit = T.tensor4()
#building CNN with exactly the same parameters
print '...building layer1'
layer0_input = inputdigit
layer0 = LeNetConvPoolLayer(rng, input=layer0_input,
image_shape=(1, 1, size[0], size[1]),
filter_shape=(nkerns[0], 1, 5, 5), poolsize=(2, 2))
print '...building layer2'
layer1 = LeNetConvPoolLayer(rng, input=layer0.output,
image_shape=(1, nkerns[0], (size[0] - 4) / 2, (size[1] - 4) / 2),
filter_shape=(nkerns[1], nkerns[0], 5, 5), poolsize=(2, 2))
print '...building layer3'
layer2_input = layer1.output.flatten(2)
layer2 = HiddenLayer(rng, input=layer2_input, n_in=nkerns[1] * (size[0] / 4 - 3) * (size[1] / 4 - 3),
n_out=500, activation=T.tanh)
print '...building layer4'
layer3 = LogisticRegression(input=layer2.output, n_in=500, n_out=10)
f = theano.function(inputs = [inputdigit], outputs = [layer0.conv_out, layer0.output, layer1.conv_out, layer1.output, layer2.output, layer3.p_y_given_x, layer3.y_pred])
#export filters and activity in different epochs
for num in epoch:
print '...epoch ' + str(num)
layer3.W.set_value(params[num][0])
layer3.b.set_value(params[num][1])
layer2.W.set_value(params[num][2])
layer2.b.set_value(params[num][3])
layer1.W.set_value(params[num][4])
layer1.b.set_value(params[num][5])
layer0.W.set_value(params[num][6])
layer0.b.set_value(params[num][7])
[conv0, output0, conv1, output1, output2, output3, y] = f(digit)
if mode == 'png':
plt.figure(2)
plt.gray()
for i in range(nkerns[0]):
plt.subplot(4, 5, i + 1)
plt.axis('off')
plt.imshow(layer0.W.get_value()[i, 0])
plt.savefig('digit/filter of layer1 in epoch ' + str(num) + '.png')
plt.figure(3)
plt.gray()
for i in range(nkerns[1]):
plt.subplot(5, 10, i + 1)
plt.axis('off')
plt.imshow(layer1.W.get_value()[i, 0])
plt.savefig('digit/filter of layer2 in epoch ' + str(num) + '.png')
plt.figure(4)
plt.gray()
plt.axis('off')
plt.imshow(layer2.W.get_value())
plt.savefig('digit/filter of layer3 in epoch ' + str(num) + '.png')
plt.figure(5)
plt.gray()
plt.axis('off')
plt.imshow(layer3.W.get_value())
plt.savefig('digit/filter of layer4 in epoch ' + str(num) + '.png')
plt.figure(6)
plt.gray()
for i in range(nkerns[0]):
plt.subplot(4, 5, i + 1)
plt.axis('off')
plt.imshow(output0[0, i])
plt.savefig('digit/activity of layer1 after downsampling in epoch ' + str(num) + '.png')
plt.figure(7)
plt.gray()
plt.axis('off')
for i in range(nkerns[1]):
plt.subplot(5, 10, i + 1)
plt.axis('off')
plt.imshow(conv1[0, i])
plt.savefig('digit/activity of layer2 before downsampling in epoch ' + str(num) + '.png')
plt.figure(8)
plt.gray()
plt.axis('off')
for i in range(nkerns[0]):
plt.subplot(4, 5, i + 1)
plt.axis('off')
plt.imshow(conv0[0, i])
plt.savefig('digit/activity of layer1 before downsampling in epoch ' + str(num) + '.png')
plt.figure(9)
plt.gray()
for i in range(nkerns[1]):
plt.subplot(5, 10, i + 1)
plt.axis('off')
plt.imshow(output1[0, i])
plt.savefig('digit/activity of layer2 after downsampling in epoch ' + str(num) + '.png')
plt.figure(10)
plt.gray()
plt.axis('off')
plt.imshow(numpy.tile(output2, (10, 1)))
plt.savefig('digit/activity of layer3 in epoch ' + str(num) + '.png')
plt.figure(11)
plt.gray()
plt.axis('off')
plt.imshow(numpy.tile(output3, (10, 1)))
plt.savefig('digit/activity of layer4 in epoch ' + str(num) + '.png')
if mode == 'mat':
sio.savemat('digit in epoch ' + str(num) + '.mat', {'ActivityOfLayer0' : digit.reshape(size),
'ActivityOfLayer1before' : conv0[0],
'ActivityOfLayer1after' : output0[0],
'ActivityOfLayer2before' : conv1[0],
'ActivityOfLayer2after' : output1[0],
'ActivityOfLayer3' : output2,
'ActivityOfLayer4' : output3,
'FilterOfLayer1' : layer0.W.get_value()[:, 0, :, :],
'FilterOfLayer2' : layer1.W.get_value()[:, 0, :, :],
'FilterOfLayer3' : layer2.W.get_value(),
'FilterOfLayer4' : layer3.W.get_value(),
'y_predict' : y})
return y
if __name__ == '__main__':
#when using shell, the first parameter is name of digit as .npy format
#the second and other parameters are the epochs to export
params = numpy.load('params.npy')
if sys.argv[1].find('.npy') != -1:
digit = numpy.load(sys.argv[1])
elif sys.argv[1].find('.txt') != -1:
digit = numpy.loadtxt(sys.argv[1])
size = [int(sys.argv[3]), int(sys.argv[4])]
epoch = []
for i in sys.argv[5:]:
epoch.append(int(i))
y = display(params, digit, epoch, sys.argv[2])
print 'classification result of ' + sys.argv[1] + ' is ' + str(y) | mpl-2.0 | -2,081,077,153,515,145,500 | 36.128342 | 172 | 0.525814 | false |
umich-brcf-bioinf/Jacquard | jacquard/utils/command_validator.py | 1 | 7653 | """Validates command preconditions.
Specifically checks that the command, arguments, and environment
(e.g. input/output directories or files) are consistent and plausible.
Each validation function evaluates a specific precondition.
Each function is allowed to:
* change the environment (e.g. create a dir)
* change the args (replace the original output dir with a new temp output dir)
* add arguments which may be required for sub-commands
* delegate to/interact with a sub-command
* raise a UsageException if things look problematic
"""
from __future__ import print_function, absolute_import, division
import errno
import glob
import os
import time
import jacquard.utils.utils as utils
_TEMP_WORKING_DIR_FORMAT = "jacquard.{}.{}.tmp"
def _actual_type(path):
if os.path.isdir(path):
return "directory"
else:
return "file"
def _build_collision_message(command, collisions):
total_collisions = len(collisions)
if total_collisions == 1:
return ("The {} command would "
"overwrite the existing file [{}]; review "
"command/output dir to avoid overwriting or "
"use the flag '--force'.").format(command,
collisions[0])
cutoff = 5
collision_list = ", ".join(collisions[0:min(cutoff, total_collisions)])
if total_collisions > cutoff:
omitted = total_collisions - cutoff
collision_list += ", ...({} file(s) omitted)".format(omitted)
return ("The {} command would "
"overwrite {} existing files [{}]; review "
"command/output dir to avoid overwriting or "
"use the flag '--force'.").format(command,
total_collisions,
collision_list)
def _check_input_correct_type(dummy, args):
module_name = args.subparser_name
input_path = args.input
required_type = args.required_input_type
actual_type = _actual_type(input_path)
if required_type != actual_type:
raise utils.UsageError(("The {} command requires a {} as "
"input, but the specified input [{}] is a {}. "
"Review inputs and try again.") \
.format(module_name,
required_type,
input_path,
actual_type))
def _check_input_exists(dummy, args):
if not os.path.exists(args.input):
raise utils.UsageError(("Specified input [{}] does not exist. Review "\
"inputs and try again.").format(args.input))
def _check_input_readable(dummy, args):
try:
if os.path.isdir(args.input):
os.listdir(args.input)
else:
open(args.input, "r").close()
except (OSError, IOError):
raise utils.UsageError(("Specified input [{}] cannot be read. Review "
"inputs and try again.").format(args.input))
def _check_output_correct_type(module_name, output_path, required_type):
actual_type = _actual_type(output_path)
if required_type != actual_type:
raise utils.UsageError(("The {} command outputs a {}, but the "
"specified output [{}] is a {}. "
"Review inputs and try again.")\
.format(module_name,
required_type,
output_path,
actual_type))
def _check_output_exists(dummy, args):
if os.path.exists(args.output_path):
_check_output_correct_type(args.subparser_name,
args.output_path,
args.required_output_type)
def _check_overwrite_existing_files(module, args):
output = args.output_path
if not os.path.isdir(output):
output = os.path.dirname(output)
existing_output_paths = sorted(glob.glob(os.path.join(output, "*")))
existing_output = set([os.path.basename(i) for i in existing_output_paths])
predicted_output = module.report_prediction(args)
collisions = sorted(list(existing_output.intersection(predicted_output)))
if collisions and not args.force:
message = _build_collision_message(args.subparser_name, collisions)
raise utils.UsageError(message)
def _check_there_will_be_output(module, args):
predicted_output = module.report_prediction(args)
if not predicted_output:
message = ("Executing the {} command with the input [{}] would not "
"create any output files. Review inputs and try again.")\
.format(args.subparser_name, args.input)
raise utils.UsageError(message)
def _check_valid_args(module, args):
module.validate_args(args)
def _create_temp_working_dir(dummy, args):
try:
_makepath(args.temp_working_dir)
if args.required_output_type == "directory":
_makepath(args.output)
except OSError:
parent_dir = os.path.dirname(args.temp_working_dir)
raise utils.UsageError(("Jacquard cannot write to output directory "
"[{}]. Review inputs and try again.")\
.format(parent_dir))
def _set_temp_working_dir(dummy, args):
original_output = args.original_output
required_output = args.required_output_type
abs_original_output = os.path.abspath(original_output)
pid = os.getpid()
microseconds_since_epoch = int(time.time() * 1000 * 1000)
dir_name = _TEMP_WORKING_DIR_FORMAT.format(str(pid),
str(microseconds_since_epoch))
place_temp_inside_output = True
if required_output == "file":
place_temp_inside_output = False
elif required_output == "directory" and not os.path.isdir(original_output):
place_temp_inside_output = False
if place_temp_inside_output:
base_dir = abs_original_output
temp_working_dir = os.path.join(base_dir, dir_name)
new_output = temp_working_dir
else:
base_dir = os.path.dirname(abs_original_output)
temp_working_dir = os.path.join(base_dir, dir_name)
new_output = os.path.join(temp_working_dir,
os.path.basename(abs_original_output))
args.temp_working_dir = temp_working_dir
args.output = new_output
def _makepath(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
def _set_required_types(module, args):
(args.required_input_type,
args.required_output_type) = module.get_required_input_output_types()
def _set_output_paths(dummy, args):
args.original_output = args.output
args.output_path = os.path.abspath(args.original_output)
_VALIDATION_TASKS = [_set_output_paths,
_set_required_types,
_set_temp_working_dir,
_check_input_exists,
_check_input_readable,
_check_input_correct_type,
_check_output_exists,
_create_temp_working_dir,
_check_there_will_be_output,
_check_overwrite_existing_files,
_check_valid_args]
def preflight(command, args):
for validate in _VALIDATION_TASKS:
validate(command, args)
| apache-2.0 | 766,562,585,724,645,900 | 38.448454 | 79 | 0.580818 | false |
lptorres/noah-inasafe | web_api/third_party/simplejson/__init__.py | 1 | 22985 | r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print(json.dumps("\"foo\bar"))
"\"foo\bar"
>>> print(json.dumps(u'\u1234'))
"\u1234"
>>> print(json.dumps('\\'))
"\\"
>>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True))
{"a": 0, "b": 0, "c": 0}
>>> from simplejson.compat import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> obj = [1,2,3,{'4': 5, '6': 7}]
>>> json.dumps(obj, separators=(',',':'), sort_keys=True)
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' '))
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from simplejson.compat import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 3 (char 2)
"""
from __future__ import absolute_import
__version__ = '3.3.0'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
'OrderedDict', 'simple_first',
]
__author__ = 'Bob Ippolito <[email protected]>'
from decimal import Decimal
from .scanner import JSONDecodeError
from .decoder import JSONDecoder
from .encoder import JSONEncoder, JSONEncoderForHTML
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
from . import ordered_dict
return ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
def _import_c_make_encoder():
try:
from ._speedups import make_encoder
return make_encoder
except ImportError:
return None
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
use_decimal=True,
namedtuple_as_object=True,
tuple_as_array=True,
bigint_as_string=False,
item_sort_key=None,
for_json=False,
ignore_nan=False,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
for_json=False, ignore_nan=False, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If *skipkeys* is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If *ensure_ascii* is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If *check_circular* is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If *allow_nan* is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the original JSON specification, instead of using
the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). See
*ignore_nan* for ECMA-262 compliant behavior.
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, *separators* should be an
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
compact JSON representation, you should specify ``(',', ':')`` to eliminate
whitespace.
*encoding* is the character encoding for str instances, default is UTF-8.
*default(obj)* is a function that should return a serializable version
of obj or raise ``TypeError``. The default simply raises ``TypeError``.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *bigint_as_string* is true (default: ``False``), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise. Note that this is still a
lossy operation that will not round-trip correctly and should be used
sparingly.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precedence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
If *for_json* is true (default: ``False``), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as
``null`` in compliance with the ECMA-262 specification. If true, this will
override *allow_nan*.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg. NOTE: You should use *default* or *for_json* instead
of subclassing whenever possible.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array
and not bigint_as_string and not item_sort_key
and not for_json and not ignore_nan and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
for_json=for_json,
ignore_nan=ignore_nan,
**kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
for_json=False, ignore_nan=False, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, ``separators`` should be an
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
compact JSON representation, you should specify ``(',', ':')`` to eliminate
whitespace.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *bigint_as_string* is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precendence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
If *for_json* is true (default: ``False``), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as
``null`` in compliance with the ECMA-262 specification. If true, this will
override *allow_nan*.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg. NOTE: You should use *default* instead of subclassing
whenever possible.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array
and not bigint_as_string and not sort_keys
and not item_sort_key and not for_json
and not ignore_nan and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
for_json=for_json,
ignore_nan=ignore_nan,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, namedtuple_as_object=True, tuple_as_array=True,
**kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
of subclassing whenever possible.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
use_decimal=use_decimal, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
of subclassing whenever possible.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None
and not use_decimal and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
if use_decimal:
if parse_float is not None:
raise TypeError("use_decimal=True implies parse_float=Decimal")
kw['parse_float'] = Decimal
return cls(encoding=encoding, **kw).decode(s)
def _toggle_speedups(enabled):
from . import decoder as dec
from . import encoder as enc
from . import scanner as scan
c_make_encoder = _import_c_make_encoder()
if enabled:
dec.scanstring = dec.c_scanstring or dec.py_scanstring
enc.c_make_encoder = c_make_encoder
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
enc.py_encode_basestring_ascii)
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
else:
dec.scanstring = dec.py_scanstring
enc.c_make_encoder = None
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
scan.make_scanner = scan.py_make_scanner
dec.make_scanner = scan.make_scanner
global _default_decoder
_default_decoder = JSONDecoder(
encoding=None,
object_hook=None,
object_pairs_hook=None,
)
global _default_encoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def simple_first(kv):
"""Helper function to pass to item_sort_key to sort simple
elements to the top, then container elements.
"""
return (isinstance(kv[1], (list, dict, tuple)), kv[0])
| gpl-3.0 | 5,512,813,960,546,909,000 | 40.02011 | 79 | 0.641157 | false |
cmacro/flaskblog | app/main/views.py | 1 | 9124 | from flask import render_template, redirect, url_for, abort, flash, request, current_app, make_response
from flask_login import login_required, current_user
from . import main
from ..models import User, Role, Permission, Post, Comment
from ..decorators import admin_required, permission_required
from .. import db
from .forms import EditProfileForm, EditProfileAdminForm, PostForm, CommentForm
@main.route('/', methods = ['GET', 'POST'])
def index():
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and \
form.validate_on_submit():
post = Post(body=form.body.data, author=current_user._get_current_object())
db.session.add(post)
return redirect(url_for('.index'))
show_followed = False
User.is_authenticated
if current_user.is_authenticated:
show_followed = bool(request.cookies.get('show_followed', ''))
if show_followed:
query = current_user.followed_posts
else:
query = Post.query
# 分页处理
page = request.args.get('page', 1, type=int)
pagination = query.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts= pagination.items
cfgtag=current_app.config['SQLALCHEMY_DATABASE_URI']
return render_template('index.html', form=form, posts=posts,
show_followed=show_followed, pagination=pagination, cfgtag=cfgtag)
def show_index_resp(followed):
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', followed, max_age=30*24*60*60)
return resp
@main.route('/all')
@login_required
def show_all():
return show_index_resp('')
@main.route('/followed')
@login_required
def show_followed():
return show_index_resp('1')
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
pagination = user.posts.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('user.html', user=user, posts=posts,
pagination=pagination)
@main.route('/edit-profile', methods = ['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated.')
return redirect(url_for('.user', username=current_user.username))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form = form)
@main.route('/edit-profile/<int:id>', methods = ['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('The profile has been updated.')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>', methods=['GET', 'POST'])
def post(id):
post = Post.query.get_or_404(id)
form = CommentForm()
if form.validate_on_submit():
comment = Comment(body=form.body.data,
post=post,
author=current_user._get_current_object())
db.session.add(comment)
flash('Your comment has been published.')
return redirect(url_for('.post', id=post.id, page=-1))
page = request.args.get('page', 1, type=int)
if page == -1:
page = (post.comments.count() - 1) / current_app.config['FLASKY_COMMENTS_PER_PAGE'] + 1
pagination = post.comments.order_by(Comment.timestamp.asc()).paginate(
page, per_page=current_app.config['FLASKY_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('post.html', posts=[post], form=form,
comments=comments, pagination=pagination)
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit(id):
post = Post.query.get_or_404(id)
if current_user != post.author and \
not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('.post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
@main.route('/follow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if current_user.is_following(user):
flash('You are already following this user.')
return redirect(url_for('.user', username=username))
current_user.follow(user)
flash('You are now following %s.' % username)
return redirect(url_for('.user', username=username))
@main.route('/unfollow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if not current_user.is_following(user):
flash('You are not following this user.')
return redirect(url_for('.user', username=username))
current_user.unfollow(user)
flash('You are not following %s anymore.' % username)
return redirect(url_for('.user', username=username))
@main.route('/followers/<username>')
def followers(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination= user.followers.paginate(
page, per_page=current_app.config['FLASKY_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.follower, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title='Followers of',
endpoint='.followers', pagination=pagination, follows=follows)
@main.route('/followed-by/<username>')
def followed_by(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followed.paginate(
page, per_page=current_app.config['FLASKY_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.followed, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followed by",
endpoint='.followed_by', pagination=pagination,
follows=follows)
@main.route('/moderate')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate():
page = request.args.get('page', 1, type=int)
pagination = Comment.query.order_by(Comment.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('moderate.html', comments=comments, pagination=pagination, page=page)
@main.route('/moderate/enable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_enable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = False
db.session.add(comment)
return redirect(url_for('.moderate', page=request.args.get('page', 1, type=int)))
@main.route('/moderate/disable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_disable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = True
db.session.add(comment)
return redirect(url_for('.moderate', page=request.args.get('page', 1, type=int)))
| mit | -2,767,377,335,529,196,500 | 36.208163 | 103 | 0.658183 | false |
nagyistoce/geokey | geokey/contributions/serializers.py | 1 | 21039 | import requests
import tempfile
from django.core import files
from django.core.exceptions import PermissionDenied, ValidationError
from easy_thumbnails.files import get_thumbnailer
from rest_framework import serializers
from rest_framework_gis import serializers as geoserializers
from rest_framework.serializers import BaseSerializer
from geokey.categories.serializer import CategorySerializer
from geokey.categories.models import Category
from geokey.users.serializers import UserSerializer
from .models import Observation, Comment
from .models import Location
from .models import MediaFile, ImageFile, VideoFile
class LocationSerializer(geoserializers.GeoFeatureModelSerializer):
"""
Serialiser for geokey.contribtions.models.Location
"""
class Meta:
model = Location
geo_field = 'geometry'
fields = ('id', 'name', 'description', 'status', 'created_at')
write_only_fields = ('status',)
class LocationContributionSerializer(serializers.ModelSerializer):
"""
Serialiser for `Location`; to be used within `ContributionSerializer`.
"""
class Meta:
model = Location
fields = ('id', 'name', 'description', 'status', 'created_at',
'geometry', 'private', 'private_for_project')
write_only_fields = ('status', 'private', 'private_for_project')
def create(self, validated_data):
"""
Creates a new contribution from `validated_data`
Parameter
---------
validated_data : dict
Input data after validation
Returns
-------
Location
"""
validated_data['creator'] = self.context.get('user')
return super(
LocationContributionSerializer,
self
).create(validated_data)
class ContributionSerializer(BaseSerializer):
"""
Serialiser for geokey.contribtions.models.Observations. This is a custom
serialiser, not a standard ModelSerializer
"""
@classmethod
def many_init(cls, *args, **kwargs):
"""
Is called when many=True property is set when instantiating the
serialiser.
"""
kwargs['context']['many'] = True
return super(ContributionSerializer, cls).many_init(*args, **kwargs)
def validate_category(self, project, category_id):
"""
Validates if the category can be used with the project
Parameters
----------
project : geokey.projects.models.Project
Project that the category is used for
category_id : int
identifies the category in the database
Returns
-------
geokey.categories.models.Category
The valid category
"""
errors = []
category = None
try:
category = project.categories.get(pk=category_id)
if category.status == 'inactive':
errors.append('The category can not be used because it is '
'inactive.')
else:
self._validated_data['meta']['category'] = category
except Category.DoesNotExist:
errors.append('The category can not be used with the project '
'or does not exist.')
if errors:
self._errors['category'] = errors
return category
def replace_null(self, properties):
"""
Replaces all empty str or unicode values with None and returns the
properies dict
Parameter
---------
properties : dict
Contribution properties
Returns
-------
dict
Contribution properties with replaced null values
"""
for key, value in properties.iteritems():
if isinstance(value, (str, unicode)) and len(value) == 0:
properties[key] = None
return properties
def validate_properties(self, properties, category=None, status=None):
"""
Validates the properties and adds error messages to self._errors
Parameter
---------
properties : dict
Contribution properties
category : geokey.categories.models.Category
Category the properties are validated against
status : str
Status for the contribution
"""
errors = []
if self.instance:
status = status or self.instance.status
if self.instance.properties:
update = self.instance.properties.copy()
update.update(properties)
properties = update
else:
status = status or category.default_status
properties = self.replace_null(properties)
try:
if status == 'draft':
Observation.validate_partial(category, properties)
else:
Observation.validate_full(category, properties)
except ValidationError, e:
errors.append(e)
self._validated_data['properties'] = properties
self._validated_data['meta']['status'] = status
if errors:
self._errors['properties'] = errors
def validate_location(self, project, location_id):
"""
Validates if the location can be used with the project
Parameters
----------
project : geokey.projects.models.Project
Project that the category is used for
location_id : int
identifies the location in the database
"""
errors = []
self.location = None
try:
if location_id is not None:
self.location = Location.objects.get_single(
self.context.get('user'),
project.id,
location_id
)
except PermissionDenied, error:
errors.append(error)
except Location.DoesNotExist, error:
errors.append(error)
if errors:
self._errors['location'] = errors
def is_valid(self, raise_exception=False):
"""
Checks if the contribution that is deserialised is valid. Validates
location, category and properties.
Parameter
---------
raise_exception : Boolean
indicates if an exeption should be raised if the data is invalid.
If set to false, this method will return False if the data is
invalid.
Returns
-------
Boolean
indicating if data is valid
Raises
------
ValidationError
If data is invalid. Exception is raised when raise_exception is set
tp True.
"""
self._errors = {}
self._validated_data = self.initial_data
project = self.context.get('project')
meta = self.initial_data.get('meta')
if meta is None:
self._validated_data['meta'] = dict()
# Validate location
location_id = None
if self.initial_data.get('location') is not None:
location_id = self.initial_data.get('location').get('id')
self.validate_location(project, location_id)
# Validate category
category = None
if self.instance is None and meta is not None:
category = self.validate_category(project, meta.get('category'))
else:
category = self.instance.category
self._validated_data['meta']['category'] = category
# Validatie properties
properties = self.initial_data.get('properties') or {}
status = None
if meta is not None:
status = meta.get('status', None)
if properties is not None and category is not None:
self.validate_properties(
properties,
category=category,
status=status
)
# raise the exception
if self._errors and raise_exception:
raise ValidationError(self._errors)
return not bool(self._errors)
def create(self, validated_data):
"""
Creates a new observation and returns the instance.
Parameter
---------
validated_data : dict
the data dict after validation
Returns
-------
geokey.contributions.models.Observation
The instance created
"""
project = self.context.get('project')
meta = validated_data.pop('meta')
location_serializer = LocationContributionSerializer(
self.location,
data=validated_data.pop('location', None),
context=self.context
)
if location_serializer.is_valid():
location_serializer.save()
self.instance = Observation.create(
properties=validated_data.get('properties'),
creator=self.context.get('user'),
location=location_serializer.instance,
project=project,
category=meta.get('category'),
status=meta.pop('status', None)
)
return self.instance
def update(self, instance, validated_data):
"""
Updates an existing observation and returns the instance.
Parameter
---------
instance : geokey.contributions.models.Observation
the instance to be updated
validated_data : dict
the data dict after validation
Returns
-------
geokey.contributions.models.Observation
The instance update
"""
meta = validated_data.get('meta')
status = None
if meta is not None:
status = meta.get('status', None)
location_serializer = LocationContributionSerializer(
instance.location,
data=validated_data.pop('location', {}),
context=self.context,
partial=True
)
if location_serializer.is_valid():
location_serializer.save()
return instance.update(
properties=validated_data.get('properties'),
updator=self.context.get('user'),
status=status
)
def get_display_field(self, obj):
"""
Returns a native representation of the display_field property.
Parameter
---------
obj : geokey.contributions.models.Observation
The instance that is serialised
Returns
-------
dict
serialised display_field; e.g.
{
'key': 'field_key',
'value': 'The value of the field'
}
"""
if obj.display_field is not None:
display_field = obj.display_field.split(':', 1)
value = display_field[1] if display_field[1] != 'None' else None
return {
'key': display_field[0],
'value': value
}
else:
return None
def get_search_result(self, obj, q):
"""
Returns all fields which values have matched a search query
Parameter
---------
obj : geokey.contributions.models.Observation
The instance that is serialised
q : str
The query string of the search
Return
------
dict
the field that matched the query, e.g.
{
'field_key_1': 'value 1',
'field_key_2': 'value 2',
}
"""
search_matches = {}
matcher = obj.search_matches.split('#####')
for field in matcher:
match = field.split(':', 1)
if q.lower() in match[1].lower():
search_matches[match[0]] = match[1]
return search_matches
def to_representation(self, obj):
"""
Returns the native representation of a contribution
Parameter
---------
obj : geokey.contributions.models.Observation
The instance that is serialised
Returns
-------
dict
Native represenation of the Contribution
"""
location = obj.location
isowner = False
if not self.context.get('user').is_anonymous():
isowner = obj.creator == self.context.get('user')
updator = None
if obj.updator is not None:
updator = {
'id': obj.updator.id,
'display_name': obj.updator.display_name
}
feature = {
'id': obj.id,
'properties': obj.properties,
'display_field': self.get_display_field(obj),
'meta': {
'status': obj.status,
'creator': {
'id': obj.creator.id,
'display_name': obj.creator.display_name
},
'updator': updator,
'created_at': str(obj.created_at),
'updated_at': str(obj.updated_at),
'version': obj.version,
'isowner': isowner,
'num_media': obj.num_media,
'num_comments': obj.num_comments
},
'location': {
'id': location.id,
'name': location.name,
'description': location.description,
'geometry': location.geometry.geojson
}
}
if self.context.get('many'):
cat = obj.category
feature['meta']['category'] = {
'id': cat.id,
'name': cat.name,
'description': cat.description,
'symbol': cat.symbol.url if cat.symbol else None,
'colour': cat.colour
}
q = self.context.get('search')
if q is not None:
feature['search_matches'] = self.get_search_result(obj, q)
else:
category_serializer = CategorySerializer(
obj.category, context=self.context)
feature['meta']['category'] = category_serializer.data
comment_serializer = CommentSerializer(
obj.comments.filter(respondsto=None),
many=True,
context=self.context
)
feature['comments'] = comment_serializer.data
review_serializer = CommentSerializer(
obj.comments.filter(review_status='open'),
many=True,
context=self.context
)
feature['review_comments'] = review_serializer.data
file_serializer = FileSerializer(
obj.files_attached.all(),
many=True,
context=self.context
)
feature['media'] = file_serializer.data
return feature
class CommentSerializer(serializers.ModelSerializer):
"""
Serialiser for geokey.contributions.models.Comment
"""
creator = UserSerializer(fields=('id', 'display_name'), read_only=True)
isowner = serializers.SerializerMethodField()
class Meta:
model = Comment
fields = ('id', 'respondsto', 'created_at', 'text', 'isowner',
'creator', 'review_status')
read_only = ('id', 'respondsto', 'created_at')
def to_representation(self, obj):
"""
Returns native represenation of the Comment. Adds reponses to comment
Parameter
---------
obj : geokey.contributions.models.Comment
The instance that is serialised
Returns
-------
dict
Native represenation of the Comment
"""
native = super(CommentSerializer, self).to_representation(obj)
native['responses'] = CommentSerializer(
obj.responses.all(),
many=True,
context=self.context
).data
return native
def get_isowner(self, comment):
"""
Returns True if the user serialising the Comment has created the
comment
Parameter
---------
comment : geokey.contributions.models.Comment
The instance that is serialised
Returns
-------
Boolean
indicating of user is creator of comment
"""
if not self.context.get('user').is_anonymous():
return comment.creator == self.context.get('user')
else:
return False
class FileSerializer(serializers.ModelSerializer):
"""
Serialiser for geokey.contributions.models.MediaFile instances
"""
creator = UserSerializer(fields=('id', 'display_name'))
isowner = serializers.SerializerMethodField()
url = serializers.SerializerMethodField()
file_type = serializers.SerializerMethodField()
thumbnail_url = serializers.SerializerMethodField()
class Meta:
model = MediaFile
fields = (
'id', 'name', 'description', 'created_at', 'creator', 'isowner',
'url', 'thumbnail_url', 'file_type'
)
def get_file_type(self, obj):
"""
Returns the type of the MediaFile
Parameter
---------
obj : geokey.contributions.models.MediaFile
The instance that is serialised
Returns
-------
str
The type of the, e.g. 'ImageFile'
"""
return obj.type_name
def get_isowner(self, obj):
"""
Returns `True` if the user provided in the serializer context is the
creator of this file
Parameter
---------
obj : geokey.contributions.models.MediaFile
The instance that is serialised
Returns
-------
Boolean
indicating if user created the file
"""
if not self.context.get('user').is_anonymous():
return obj.creator == self.context.get('user')
else:
return False
def get_url(self, obj):
"""
Return the url to access this file based on its file type
Parameter
---------
obj : geokey.contributions.models.MediaFile
The instance that is serialised
Returns
-------
str
The URL to embed the file on client side
"""
if isinstance(obj, ImageFile):
return obj.image.url
elif isinstance(obj, VideoFile):
return obj.youtube_link
def _get_thumb(self, image, size=(300, 300)):
"""
Returns the thumbnail of the media file base on the size privoded
Parameter
---------
image : Image
The image to be thumbnailed
size : tuple
width and height of the thumbnail, defaults to 300 by 300
Returns
-------
Image
The thumbnail
"""
thumbnailer = get_thumbnailer(image)
thumb = thumbnailer.get_thumbnail({
'crop': True,
'size': size
})
return thumb
def get_thumbnail_url(self, obj):
"""
Creates and returns a thumbnail for the MediaFile object
Parameter
---------
obj : geokey.contributions.models.MediaFile
The instance that is serialised
Returns
-------
str
The url to embed thumbnails on client side
"""
if isinstance(obj, ImageFile):
# Some of the imported image files in the original community maps
# seem to be broken. The error thrown when the image can not be
# read is caught here.
try:
return self._get_thumb(obj.image).url
except IOError:
return ''
elif isinstance(obj, VideoFile):
if obj.thumbnail:
# thumbnail has been downloaded, return the link
return self._get_thumb(obj.thumbnail).url
request = requests.get(
'http://img.youtube.com/vi/%s/0.jpg' % obj.youtube_id,
stream=True
)
if request.status_code != requests.codes.ok:
# Image not found, return placeholder thumbnail
return '/static/img/play.png'
lf = tempfile.NamedTemporaryFile()
# Read the streamed image in sections
for block in request.iter_content(1024 * 8):
# If no more file then stop
if not block:
break
# Write image block to temporary file
lf.write(block)
file_name = obj.youtube_id + '.jpg'
obj.thumbnail.save(file_name, files.File(lf))
from PIL import Image
w, h = Image.open(obj.thumbnail).size
thumb = self._get_thumb(obj.thumbnail, size=(h, h))
obj.thumbnail.save(file_name, thumb)
return self._get_thumb(obj.thumbnail).url
| apache-2.0 | -2,704,248,957,482,316,000 | 28.884943 | 79 | 0.547032 | false |
elec-otago/agbase | testing/selenium-tests/test_farm_delete_member.py | 1 | 1298 | from end_to_end_test import EndToEndTest
import test_config as config
#=============================================================
# This tests that an account with permission to manage a farm
# can delete another account management with farm management
# Permissions.
# Requires test farm, test farmer, and test farm manager to
# be created, and for test farmer and test manager to have farm
# management permissions
# TODO this test fails even though it should pass
#=============================================================
class UserDeleteFarmPermissionTest(EndToEndTest):
def test(self):
print "\nTEST user delete farm permission"
# Login as test farmer
self.login_user(config.test_farmer_email, config.test_password)
# Navigate to the test farm page
self.navigation.click_farms_dropdown_farm(config.test_farm)
# Select farm members tab
self.farm_page.click_members_tab()
# Delete farm manager
self.farm_page.delete_member(config.test_manager_first, config.test_manager_last)
deleted_user = self.farm_page.is_member_in_table(
config.test_manager_first,
config.test_manager_last)
self.assertFalse(deleted_user)
| mpl-2.0 | -3,059,780,765,964,965,000 | 37.176471 | 89 | 0.613251 | false |
davehunt/selenium | py/selenium/webdriver/ie/webdriver.py | 1 | 3912 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import warnings
from selenium.webdriver.common import utils
from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver
from .service import Service
from .options import Options
DEFAULT_TIMEOUT = 30
DEFAULT_PORT = 0
DEFAULT_HOST = None
DEFAULT_LOG_LEVEL = None
DEFAULT_SERVICE_LOG_PATH = None
class WebDriver(RemoteWebDriver):
""" Controls the IEServerDriver and allows you to drive Internet Explorer """
def __init__(self, executable_path='IEDriverServer.exe', capabilities=None,
port=DEFAULT_PORT, timeout=DEFAULT_TIMEOUT, host=DEFAULT_HOST,
log_level=DEFAULT_LOG_LEVEL, service_log_path=DEFAULT_SERVICE_LOG_PATH, options=None,
ie_options=None, desired_capabilities=None, log_file=None):
"""
Creates a new instance of the chrome driver.
Starts the service and then creates new instance of chrome driver.
:Args:
- executable_path - path to the executable. If the default is used it assumes the executable is in the $PATH
- capabilities: capabilities Dictionary object
- port - port you would like the service to run, if left as 0, a free port will be found.
- log_level - log level you would like the service to run.
- service_log_path - target of logging of service, may be "stdout", "stderr" or file path.
- options: IE Options instance, providing additional IE options
- desired_capabilities: alias of capabilities; this will make the signature consistent with RemoteWebDriver.
"""
if log_file:
warnings.warn('use service_log_path instead of log_file', DeprecationWarning)
service_log_path = log_file
if ie_options:
warnings.warn('use options instead of ie_options', DeprecationWarning)
options = ie_options
self.port = port
if self.port == 0:
self.port = utils.free_port()
self.host = host
# If both capabilities and desired capabilities are set, ignore desired capabilities.
if capabilities is None and desired_capabilities:
capabilities = desired_capabilities
if options is None:
if capabilities is None:
capabilities = self.create_options().to_capabilities()
else:
if capabilities is None:
capabilities = options.to_capabilities()
else:
# desired_capabilities stays as passed in
capabilities.update(options.to_capabilities())
self.iedriver = Service(
executable_path,
port=self.port,
host=self.host,
log_level=log_level,
log_file=service_log_path)
self.iedriver.start()
RemoteWebDriver.__init__(
self,
command_executor='http://localhost:%d' % self.port,
desired_capabilities=capabilities)
self._is_remote = False
def quit(self):
RemoteWebDriver.quit(self)
self.iedriver.stop()
def create_options(self):
return Options()
| apache-2.0 | -8,441,446,402,448,930,000 | 39.329897 | 117 | 0.665644 | false |
YRSNorwich/ProjectBlock | classicmaterials.py | 1 | 6273 | from materials import MCMaterials
classicMaterials = MCMaterials(defaultName = "Not present in Classic");
classicMaterials.name = "Classic"
cm = classicMaterials
cm.Air = cm.Block(0,
name="Air",
texture=(0x80,0xB0),
)
cm.Rock = cm.Block(1,
name="Rock",
texture=(0x10,0x00),
)
cm.Grass = cm.Block(2,
name="Grass",
texture=((0x30,0x00), (0x30,0x00), (0x00,0x00), (0x20,0x00), (0x30,0x00), (0x30,0x00)),
)
cm.Dirt = cm.Block(3,
name="Dirt",
texture=(0x20,0x00),
)
cm.Cobblestone = cm.Block(4,
name="Cobblestone",
texture=(0x00,0x10),
)
cm.WoodPlanks = cm.Block(5,
name="Wood Planks",
texture=(0x40,0x00),
)
cm.Sapling = cm.Block(6,
name="Sapling",
texture=(0xF0,0x00),
)
cm.Adminium = cm.Block(7,
name="Adminium",
texture=(0x10,0x10),
)
cm.WaterActive = cm.Block(8,
name="Water (active)",
texture=(0xE0,0x00),
)
cm.WaterStill = cm.Block(9,
name="Water (still)",
texture=(0xE0,0x00),
)
cm.LavaActive = cm.Block(10,
name="Lava (active)",
texture=(0xE0,0x10),
)
cm.LavaStill = cm.Block(11,
name="Lava (still)",
texture=(0xE0,0x10),
)
cm.Sand = cm.Block(12,
name="Sand",
texture=(0x20,0x10),
)
cm.Gravel = cm.Block(13,
name="Gravel",
texture=(0x30,0x10),
)
cm.GoldOre = cm.Block(14,
name="Gold Ore",
texture=(0x00,0x20),
)
cm.IronOre = cm.Block(15,
name="Iron Ore",
texture=(0x10,0x20),
)
cm.CoalOre = cm.Block(16,
name="Coal Ore",
texture=(0x20,0x20),
)
cm.Wood = cm.Block(17,
name="Wood",
texture=((0x40,0x10), (0x40,0x10), (0x50,0x10), (0x50,0x10), (0x40,0x10), (0x40,0x10)),
)
cm.Leaves = cm.Block(18,
name="Leaves",
texture=(0x50,0x30),
)
cm.Sponge = cm.Block(19,
name="Sponge",
texture=(0x00,0x30),
)
cm.Glass = cm.Block(20,
name="Glass",
texture=(0x10,0x30),
)
cm.RedCloth = cm.Block(21,
name="Red Cloth",
texture=(0x00,0x40),
)
cm.OrangeCloth = cm.Block(22,
name="Orange Cloth",
texture=(0x10,0x40),
)
cm.YellowCloth = cm.Block(23,
name="Yellow Cloth",
texture=(0x20,0x40),
)
cm.LightGreenCloth = cm.Block(24,
name="Light Green Cloth",
texture=(0x30,0x40),
)
cm.GreenCloth = cm.Block(25,
name="Green Cloth",
texture=(0x40,0x40),
)
cm.AquaCloth = cm.Block(26,
name="Aqua Cloth",
texture=(0x50,0x40),
)
cm.CyanCloth = cm.Block(27,
name="Cyan Cloth",
texture=(0x60,0x40),
)
cm.BlueCloth = cm.Block(28,
name="Blue Cloth",
texture=(0x70,0x40),
)
cm.PurpleCloth = cm.Block(29,
name="Purple Cloth",
texture=(0x80,0x40),
)
cm.IndigoCloth = cm.Block(30,
name="Indigo Cloth",
texture=(0x90,0x40),
)
cm.VioletCloth = cm.Block(31,
name="Violet Cloth",
texture=(0xA0,0x40),
)
cm.MagentaCloth = cm.Block(32,
name="Magenta Cloth",
texture=(0xB0,0x40),
)
cm.PinkCloth = cm.Block(33,
name="Pink Cloth",
texture=(0xC0,0x40),
)
cm.BlackCloth = cm.Block(34,
name="Black Cloth",
texture=(0xD0,0x40),
)
cm.GrayCloth = cm.Block(35,
name="Gray Cloth",
texture=(0xE0,0x40),
)
cm.WhiteCloth = cm.Block(36,
name="White Cloth",
texture=(0xF0,0x40),
)
cm.Flower = cm.Block(37,
name="Flower",
texture=(0xD0,0x00),
)
cm.Rose = cm.Block(38,
name="Rose",
texture=(0xC0,0x00),
)
cm.BrownMushroom = cm.Block(39,
name="Brown Mushroom",
texture=(0xD0,0x10),
)
cm.RedMushroom = cm.Block(40,
name="Red Mushroom",
texture=(0xC0,0x10),
)
cm.BlockOfGold = cm.Block(41,
name="Block of Gold",
texture=((0x70,0x20), (0x70,0x20), (0x70,0x10), (0x70,0x30), (0x70,0x20), (0x70,0x20)),
)
cm.BlockOfIron = cm.Block(42,
name="Block of Iron",
texture=((0x60,0x20), (0x60,0x20), (0x60,0x10), (0x60,0x30), (0x60,0x20), (0x60,0x20)),
)
cm.DoubleStoneSlab = cm.Block(43,
name="Double Stone Slab",
texture=((0x50,0x00), (0x50,0x00), (0x60,0x00), (0x60,0x00), (0x50,0x00), (0x50,0x00)),
)
cm.SingleStoneSlab = cm.Block(44,
name="Stone Slab",
texture=((0x50,0x00), (0x50,0x00), (0x60,0x00), (0x60,0x00), (0x50,0x00), (0x50,0x00)),
)
cm.Brick = cm.Block(45,
name="Brick",
texture=(0x70,0x00),
)
cm.TNT = cm.Block(46,
name="TNT",
texture=((0x80,0x00), (0x80,0x00), (0x90,0x00), (0xA0,0x00), (0x80,0x00), (0x80,0x00)),
)
cm.Bookshelf = cm.Block(47,
name="Bookshelf",
texture=((0x30,0x20), (0x30,0x20), (0x40,0x00), (0x40,0x00), (0x30,0x20), (0x30,0x20)),
)
cm.MossStone = cm.Block(48,
name="Moss Stone",
texture=(0x40,0x20),
)
cm.Obsidian = cm.Block(49,
name="Obsidian",
texture=(0x50,0x20),
)
cm.Torch = cm.Block(50,
name="Torch",
texture=(0x00,0x50),
)
cm.Fire = cm.Block(51,
name="Fire",
texture=(0xF0,0x30),
)
cm.InfiniteWaterSource = cm.Block(52,
name="Infinite water source",
texture=(0xE0,0x00),
)
cm.InfiniteLavaSource = cm.Block(53,
name="Infinite lava source",
texture=(0xE0,0x10),
)
cm.Chest = cm.Block(54,
name="Chest",
texture=((0xA0,0x10), (0xA0,0x10), (0xA0,0x10), (0xB0,0x10), (0x90,0x10), (0x90,0x10)),
)
cm.Cog = cm.Block(55,
name="Cog",
texture=(0xF0,0x30),
)
cm.DiamondOre = cm.Block(56,
name="Diamond Ore",
texture=(0x20,0x30),
)
cm.BlockOfDiamond = cm.Block(57,
name="Block Of Diamond",
texture=((0x80,0x20), (0x80,0x20), (0x80,0x10), (0x80,0x30), (0x80,0x20), (0x80,0x20)),
)
cm.CraftingTable = cm.Block(58,
name="Crafting Table",
texture=((0xB0,0x30), (0xB0,0x30), (0xB0,0x20), (0x40,0x10), (0xC0,0x30), (0xC0,0x30)),
)
cm.Crops = cm.Block(59,
name="Crops",
texture=(0xF0,0x50),
)
cm.Farmland = cm.Block(60,
name="Farmland",
texture=(0x60,0x50),
)
cm.Furnace = cm.Block(61,
name="Furnace",
texture=((0xD0,0x20), (0xD0,0x20), (0x10,0x00), (0x10,0x00), (0xC0,0x20), (0xC0,0x20)),
)
cm.LitFurnace = cm.Block(62,
name="Lit Furnace",
texture=((0xD0,0x20), (0xD0,0x20), (0x10,0x00), (0x10,0x00), (0xD0,0x30), (0xD0,0x30)),
)
del cm
| isc | 4,731,822,456,268,912,000 | 18.481366 | 91 | 0.57883 | false |
ioam/holoviews | holoviews/core/data/grid.py | 1 | 28568 | from __future__ import absolute_import
import sys
import datetime as dt
from collections import OrderedDict, defaultdict, Iterable
try:
import itertools.izip as zip
except ImportError:
pass
import numpy as np
from .dictionary import DictInterface
from .interface import Interface, DataError
from ..dimension import dimension_name
from ..element import Element
from ..dimension import OrderedDict as cyODict
from ..ndmapping import NdMapping, item_check, sorted_context
from .. import util
from .interface import is_dask, dask_array_module, get_array_types
class GridInterface(DictInterface):
"""
Interface for simple dictionary-based dataset format using a
compressed representation that uses the cartesian product between
key dimensions. As with DictInterface, the dictionary keys correspond
to the column (i.e dimension) names and the values are NumPy arrays
representing the values in that column.
To use this compressed format, the key dimensions must be orthogonal
to one another with each key dimension specifying an axis of the
multidimensional space occupied by the value dimension data. For
instance, given an temperature recordings sampled regularly across
the earth surface, a list of N unique latitudes and M unique
longitudes can specify the position of NxM temperature samples.
"""
types = (dict, OrderedDict, cyODict)
datatype = 'grid'
gridded = True
@classmethod
def init(cls, eltype, data, kdims, vdims):
if kdims is None:
kdims = eltype.kdims
if vdims is None:
vdims = eltype.vdims
if not vdims:
raise ValueError('GridInterface interface requires at least '
'one value dimension.')
ndims = len(kdims)
dimensions = [dimension_name(d) for d in kdims+vdims]
if isinstance(data, tuple):
data = {d: v for d, v in zip(dimensions, data)}
elif isinstance(data, list) and data == []:
data = OrderedDict([(d, []) for d in dimensions])
elif not any(isinstance(data, tuple(t for t in interface.types if t is not None))
for interface in cls.interfaces.values()):
data = {k: v for k, v in zip(dimensions, zip(*data))}
elif isinstance(data, np.ndarray):
if data.ndim == 1:
if eltype._auto_indexable_1d and len(kdims)+len(vdims)>1:
data = np.column_stack([np.arange(len(data)), data])
else:
data = np.atleast_2d(data).T
data = {k: data[:,i] for i,k in enumerate(dimensions)}
elif isinstance(data, list) and data == []:
data = {d: np.array([]) for d in dimensions[:ndims]}
data.update({d: np.empty((0,) * ndims) for d in dimensions[ndims:]})
elif not isinstance(data, dict):
raise TypeError('GridInterface must be instantiated as a '
'dictionary or tuple')
for dim in kdims+vdims:
name = dimension_name(dim)
if name not in data:
raise ValueError("Values for dimension %s not found" % dim)
if not isinstance(data[name], get_array_types()):
data[name] = np.array(data[name])
kdim_names = [dimension_name(d) for d in kdims]
vdim_names = [dimension_name(d) for d in vdims]
expected = tuple([len(data[kd]) for kd in kdim_names])
irregular_shape = data[kdim_names[0]].shape if kdim_names else ()
valid_shape = irregular_shape if len(irregular_shape) > 1 else expected[::-1]
shapes = tuple([data[kd].shape for kd in kdim_names])
for vdim in vdim_names:
shape = data[vdim].shape
error = DataError if len(shape) > 1 else ValueError
if (not expected and shape == (1,)) or (len(set((shape,)+shapes)) == 1 and len(shape) > 1):
# If empty or an irregular mesh
pass
elif len(shape) != len(expected):
raise error('The shape of the %s value array does not '
'match the expected dimensionality indicated '
'by the key dimensions. Expected %d-D array, '
'found %d-D array.' % (vdim, len(expected), len(shape)))
elif any((s!=e and (s+1)!=e) for s, e in zip(shape, valid_shape)):
raise error('Key dimension values and value array %s '
'shapes do not match. Expected shape %s, '
'actual shape: %s' % (vdim, valid_shape, shape), cls)
return data, {'kdims':kdims, 'vdims':vdims}, {}
@classmethod
def concat(cls, datasets, dimensions, vdims):
from . import Dataset
with sorted_context(False):
datasets = NdMapping(datasets, kdims=dimensions)
datasets = datasets.clone([(k, v.data if isinstance(v, Dataset) else v)
for k, v in datasets.data.items()])
if len(datasets.kdims) > 1:
items = datasets.groupby(datasets.kdims[:-1]).data.items()
return cls.concat([(k, cls.concat(v, v.kdims, vdims=vdims)) for k, v in items],
datasets.kdims[:-1], vdims)
return cls.concat_dim(datasets, datasets.kdims[0], vdims)
@classmethod
def concat_dim(cls, datasets, dim, vdims):
values, grids = zip(*datasets.items())
new_data = {k: v for k, v in grids[0].items() if k not in vdims}
new_data[dim.name] = np.array(values)
for vdim in vdims:
arrays = [grid[vdim.name] for grid in grids]
shapes = set(arr.shape for arr in arrays)
if len(shapes) > 1:
raise DataError('When concatenating gridded data the shape '
'of arrays must match. %s found that arrays '
'along the %s dimension do not match.' %
(cls.__name__, vdim.name))
stack = dask_array_module().stack if any(is_dask(arr) for arr in arrays) else np.stack
new_data[vdim.name] = stack(arrays, -1)
return new_data
@classmethod
def irregular(cls, dataset, dim):
return dataset.data[dimension_name(dim)].ndim > 1
@classmethod
def isscalar(cls, dataset, dim):
values = cls.values(dataset, dim, expanded=False)
return values.shape in ((), (1,)) or len(np.unique(values)) == 1
@classmethod
def validate(cls, dataset, vdims=True):
Interface.validate(dataset, vdims)
@classmethod
def dimension_type(cls, dataset, dim):
if dim in dataset.dimensions():
arr = cls.values(dataset, dim, False, False)
else:
return None
return arr.dtype.type
@classmethod
def shape(cls, dataset, gridded=False):
shape = dataset.data[dataset.vdims[0].name].shape
if gridded:
return shape
else:
return (np.product(shape, dtype=np.intp), len(dataset.dimensions()))
@classmethod
def length(cls, dataset):
return cls.shape(dataset)[0]
@classmethod
def _infer_interval_breaks(cls, coord, axis=0):
"""
>>> GridInterface._infer_interval_breaks(np.arange(5))
array([-0.5, 0.5, 1.5, 2.5, 3.5, 4.5])
>>> GridInterface._infer_interval_breaks([[0, 1], [3, 4]], axis=1)
array([[-0.5, 0.5, 1.5],
[ 2.5, 3.5, 4.5]])
"""
coord = np.asarray(coord)
if sys.version_info.major == 2 and len(coord) and isinstance(coord[0], (dt.datetime, dt.date)):
# np.diff does not work on datetimes in python 2
coord = coord.astype('datetime64')
deltas = 0.5 * np.diff(coord, axis=axis)
first = np.take(coord, [0], axis=axis) - np.take(deltas, [0], axis=axis)
last = np.take(coord, [-1], axis=axis) + np.take(deltas, [-1], axis=axis)
trim_last = tuple(slice(None, -1) if n == axis else slice(None)
for n in range(coord.ndim))
return np.concatenate([first, coord[trim_last] + deltas, last], axis=axis)
@classmethod
def coords(cls, dataset, dim, ordered=False, expanded=False, edges=False):
"""
Returns the coordinates along a dimension. Ordered ensures
coordinates are in ascending order and expanded creates
ND-array matching the dimensionality of the dataset.
"""
dim = dataset.get_dimension(dim, strict=True)
irregular = cls.irregular(dataset, dim)
if irregular or expanded:
if irregular:
data = dataset.data[dim.name]
else:
data = util.expand_grid_coords(dataset, dim)
if edges and data.shape == dataset.data[dataset.vdims[0].name].shape:
data = cls._infer_interval_breaks(data, axis=1)
data = cls._infer_interval_breaks(data, axis=0)
return data
data = dataset.data[dim.name]
if ordered and np.all(data[1:] < data[:-1]):
data = data[::-1]
shape = cls.shape(dataset, True)
if dim in dataset.kdims:
idx = dataset.get_dimension_index(dim)
isedges = (dim in dataset.kdims and len(shape) == dataset.ndims
and len(data) == (shape[dataset.ndims-idx-1]+1))
else:
isedges = False
if edges and not isedges:
data = cls._infer_interval_breaks(data)
elif not edges and isedges:
data = data[:-1] + np.diff(data)/2.
return data
@classmethod
def canonicalize(cls, dataset, data, data_coords=None, virtual_coords=[]):
"""
Canonicalize takes an array of values as input and reorients
and transposes it to match the canonical format expected by
plotting functions. In certain cases the dimensions defined
via the kdims of an Element may not match the dimensions of
the underlying data. A set of data_coords may be passed in to
define the dimensionality of the data, which can then be used
to np.squeeze the data to remove any constant dimensions. If
the data is also irregular, i.e. contains multi-dimensional
coordinates, a set of virtual_coords can be supplied, required
by some interfaces (e.g. xarray) to index irregular datasets
with a virtual integer index. This ensures these coordinates
are not simply dropped.
"""
if data_coords is None:
data_coords = dataset.dimensions('key', label='name')[::-1]
# Transpose data
dims = [name for name in data_coords
if isinstance(cls.coords(dataset, name), get_array_types())]
dropped = [dims.index(d) for d in dims
if d not in dataset.kdims+virtual_coords]
if dropped:
data = np.squeeze(data, axis=tuple(dropped))
if not any(cls.irregular(dataset, d) for d in dataset.kdims):
inds = [dims.index(kd.name) for kd in dataset.kdims]
inds = [i - sum([1 for d in dropped if i>=d]) for i in inds]
if inds:
data = data.transpose(inds[::-1])
# Reorient data
invert = False
slices = []
for d in dataset.kdims[::-1]:
coords = cls.coords(dataset, d)
if np.all(coords[1:] < coords[:-1]) and not coords.ndim > 1:
slices.append(slice(None, None, -1))
invert = True
else:
slices.append(slice(None))
data = data[tuple(slices)] if invert else data
# Allow lower dimensional views into data
if len(dataset.kdims) < 2:
data = data.flatten()
return data
@classmethod
def invert_index(cls, index, length):
if np.isscalar(index):
return length - index
elif isinstance(index, slice):
start, stop = index.start, index.stop
new_start, new_stop = None, None
if start is not None:
new_stop = length - start
if stop is not None:
new_start = length - stop
return slice(new_start-1, new_stop-1)
elif isinstance(index, Iterable):
new_index = []
for ind in index:
new_index.append(length-ind)
return new_index
@classmethod
def ndloc(cls, dataset, indices):
selected = {}
adjusted_inds = []
all_scalar = True
for i, (kd, ind) in enumerate(zip(dataset.kdims[::-1], indices)):
coords = cls.coords(dataset, kd.name, True)
if np.isscalar(ind):
ind = [ind]
else:
all_scalar = False
selected[kd.name] = coords[ind]
adjusted_inds.append(ind)
for kd in dataset.kdims:
if kd.name not in selected:
coords = cls.coords(dataset, kd.name)
selected[kd.name] = coords
all_scalar = False
for d in dataset.dimensions():
if d in dataset.kdims and not cls.irregular(dataset, d):
continue
arr = cls.values(dataset, d, flat=False, compute=False)
if all_scalar and len(dataset.vdims) == 1:
return arr[tuple(ind[0] for ind in adjusted_inds)]
selected[d.name] = arr[tuple(adjusted_inds)]
return tuple(selected[d.name] for d in dataset.dimensions())
@classmethod
def values(cls, dataset, dim, expanded=True, flat=True, compute=True):
dim = dataset.get_dimension(dim, strict=True)
if dim in dataset.vdims or dataset.data[dim.name].ndim > 1:
data = dataset.data[dim.name]
data = cls.canonicalize(dataset, data)
da = dask_array_module()
if compute and da and isinstance(data, da.Array):
data = data.compute()
return data.T.flatten() if flat else data
elif expanded:
data = cls.coords(dataset, dim.name, expanded=True)
return data.T.flatten() if flat else data
else:
return cls.coords(dataset, dim.name, ordered=True)
@classmethod
def groupby(cls, dataset, dim_names, container_type, group_type, **kwargs):
# Get dimensions information
dimensions = [dataset.get_dimension(d, strict=True) for d in dim_names]
if 'kdims' in kwargs:
kdims = kwargs['kdims']
else:
kdims = [kdim for kdim in dataset.kdims if kdim not in dimensions]
kwargs['kdims'] = kdims
invalid = [d for d in dimensions if dataset.data[d.name].ndim > 1]
if invalid:
if len(invalid) == 1: invalid = "'%s'" % invalid[0]
raise ValueError("Cannot groupby irregularly sampled dimension(s) %s."
% invalid)
# Update the kwargs appropriately for Element group types
group_kwargs = {}
group_type = dict if group_type == 'raw' else group_type
if issubclass(group_type, Element):
group_kwargs.update(util.get_param_values(dataset))
else:
kwargs.pop('kdims')
group_kwargs.update(kwargs)
drop_dim = any(d not in group_kwargs['kdims'] for d in kdims)
# Find all the keys along supplied dimensions
keys = [cls.coords(dataset, d.name) for d in dimensions]
transpose = [dataset.ndims-dataset.kdims.index(kd)-1 for kd in kdims]
transpose += [i for i in range(dataset.ndims) if i not in transpose]
# Iterate over the unique entries applying selection masks
grouped_data = []
for unique_key in zip(*util.cartesian_product(keys)):
select = dict(zip(dim_names, unique_key))
if drop_dim:
group_data = dataset.select(**select)
group_data = group_data if np.isscalar(group_data) else group_data.columns()
else:
group_data = cls.select(dataset, **select)
if np.isscalar(group_data) or (isinstance(group_data, get_array_types()) and group_data.shape == ()):
group_data = {dataset.vdims[0].name: np.atleast_1d(group_data)}
for dim, v in zip(dim_names, unique_key):
group_data[dim] = np.atleast_1d(v)
elif not drop_dim:
if isinstance(group_data, get_array_types()):
group_data = {dataset.vdims[0].name: group_data}
for vdim in dataset.vdims:
data = group_data[vdim.name]
data = data.transpose(transpose[::-1])
group_data[vdim.name] = np.squeeze(data)
group_data = group_type(group_data, **group_kwargs)
grouped_data.append((tuple(unique_key), group_data))
if issubclass(container_type, NdMapping):
with item_check(False):
return container_type(grouped_data, kdims=dimensions)
else:
return container_type(grouped_data)
@classmethod
def key_select_mask(cls, dataset, values, ind):
if isinstance(ind, tuple):
ind = slice(*ind)
if isinstance(ind, get_array_types()):
mask = ind
elif isinstance(ind, slice):
mask = True
if ind.start is not None:
mask &= ind.start <= values
if ind.stop is not None:
mask &= values < ind.stop
# Expand empty mask
if mask is True:
mask = np.ones(values.shape, dtype=np.bool)
elif isinstance(ind, (set, list)):
iter_slcs = []
for ik in ind:
iter_slcs.append(values == ik)
mask = np.logical_or.reduce(iter_slcs)
elif callable(ind):
mask = ind(values)
elif ind is None:
mask = None
else:
index_mask = values == ind
if (dataset.ndims == 1 or dataset._binned) and np.sum(index_mask) == 0:
data_index = np.argmin(np.abs(values - ind))
mask = np.zeros(len(values), dtype=np.bool)
mask[data_index] = True
else:
mask = index_mask
if mask is None:
mask = np.ones(values.shape, dtype=bool)
return mask
@classmethod
def select(cls, dataset, selection_mask=None, **selection):
dimensions = dataset.kdims
val_dims = [vdim for vdim in dataset.vdims if vdim in selection]
if val_dims:
raise IndexError('Cannot slice value dimensions in compressed format, '
'convert to expanded format before slicing.')
indexed = cls.indexed(dataset, selection)
full_selection = [(d, selection.get(d.name, selection.get(d.label)))
for d in dimensions]
data = {}
value_select = []
for i, (dim, ind) in enumerate(full_selection):
irregular = cls.irregular(dataset, dim)
values = cls.coords(dataset, dim, irregular)
mask = cls.key_select_mask(dataset, values, ind)
if irregular:
if np.isscalar(ind) or isinstance(ind, (set, list)):
raise IndexError("Indexing not supported for irregularly "
"sampled data. %s value along %s dimension."
"must be a slice or 2D boolean mask."
% (ind, dim))
mask = mask.max(axis=i)
elif dataset._binned:
edges = cls.coords(dataset, dim, False, edges=True)
inds = np.argwhere(mask)
if np.isscalar(ind):
emin, emax = edges.min(), edges.max()
if ind < emin:
raise IndexError("Index %s less than lower bound "
"of %s for %s dimension." % (ind, emin, dim))
elif ind >= emax:
raise IndexError("Index %s more than or equal to upper bound "
"of %s for %s dimension." % (ind, emax, dim))
idx = max([np.digitize([ind], edges)[0]-1, 0])
mask = np.zeros(len(values), dtype=np.bool)
mask[idx] = True
values = edges[idx:idx+2]
elif len(inds):
values = edges[inds.min(): inds.max()+2]
else:
values = edges[0:0]
else:
values = values[mask]
values, mask = np.asarray(values), np.asarray(mask)
value_select.append(mask)
data[dim.name] = np.array([values]) if np.isscalar(values) else values
int_inds = [np.argwhere(v) for v in value_select][::-1]
index = np.ix_(*[np.atleast_1d(np.squeeze(ind)) if ind.ndim > 1 else np.atleast_1d(ind)
for ind in int_inds])
for kdim in dataset.kdims:
if cls.irregular(dataset, dim):
da = dask_array_module()
if da and isinstance(dataset.data[kdim.name], da.Array):
data[kdim.name] = dataset.data[kdim.name].vindex[index]
else:
data[kdim.name] = np.asarray(data[kdim.name])[index]
for vdim in dataset.vdims:
da = dask_array_module()
if da and isinstance(dataset.data[vdim.name], da.Array):
data[vdim.name] = dataset.data[vdim.name].vindex[index]
else:
data[vdim.name] = np.asarray(dataset.data[vdim.name])[index]
if indexed:
if len(dataset.vdims) == 1:
da = dask_array_module()
arr = np.squeeze(data[dataset.vdims[0].name])
if da and isinstance(arr, da.Array):
arr = arr.compute()
return arr if np.isscalar(arr) else arr[()]
else:
return np.array([np.squeeze(data[vd.name])
for vd in dataset.vdims])
return data
@classmethod
def sample(cls, dataset, samples=[]):
"""
Samples the gridded data into dataset of samples.
"""
ndims = dataset.ndims
dimensions = dataset.dimensions(label='name')
arrays = [dataset.data[vdim.name] for vdim in dataset.vdims]
data = defaultdict(list)
for sample in samples:
if np.isscalar(sample): sample = [sample]
if len(sample) != ndims:
sample = [sample[i] if i < len(sample) else None
for i in range(ndims)]
sampled, int_inds = [], []
for d, ind in zip(dimensions, sample):
cdata = dataset.data[d]
mask = cls.key_select_mask(dataset, cdata, ind)
inds = np.arange(len(cdata)) if mask is None else np.argwhere(mask)
int_inds.append(inds)
sampled.append(cdata[mask])
for d, arr in zip(dimensions, np.meshgrid(*sampled)):
data[d].append(arr)
for vdim, array in zip(dataset.vdims, arrays):
da = dask_array_module()
flat_index = np.ravel_multi_index(tuple(int_inds)[::-1], array.shape)
if da and isinstance(array, da.Array):
data[vdim.name].append(array.flatten().vindex[tuple(flat_index)])
else:
data[vdim.name].append(array.flat[flat_index])
concatenated = {d: np.concatenate(arrays).flatten() for d, arrays in data.items()}
return concatenated
@classmethod
def aggregate(cls, dataset, kdims, function, **kwargs):
kdims = [dimension_name(kd) for kd in kdims]
data = {kdim: dataset.data[kdim] for kdim in kdims}
axes = tuple(dataset.ndims-dataset.get_dimension_index(kdim)-1
for kdim in dataset.kdims if kdim not in kdims)
da = dask_array_module()
dropped = []
for vdim in dataset.vdims:
values = dataset.data[vdim.name]
atleast_1d = da.atleast_1d if is_dask(values) else np.atleast_1d
try:
data[vdim.name] = atleast_1d(function(values, axis=axes, **kwargs))
except TypeError:
dropped.append(vdim)
return data, dropped
@classmethod
def reindex(cls, dataset, kdims, vdims):
dropped_kdims = [kd for kd in dataset.kdims if kd not in kdims]
dropped_vdims = ([vdim for vdim in dataset.vdims
if vdim not in vdims] if vdims else [])
constant = {}
for kd in dropped_kdims:
vals = cls.values(dataset, kd.name, expanded=False)
if len(vals) == 1:
constant[kd.name] = vals[0]
data = {k: values for k, values in dataset.data.items()
if k not in dropped_kdims+dropped_vdims}
if len(constant) == len(dropped_kdims):
joined_dims = kdims+dropped_kdims
axes = tuple(dataset.ndims-dataset.kdims.index(d)-1
for d in joined_dims)
dropped_axes = tuple(dataset.ndims-joined_dims.index(d)-1
for d in dropped_kdims)
for vdim in vdims:
vdata = data[vdim.name]
if len(axes) > 1:
vdata = vdata.transpose(axes[::-1])
if dropped_axes:
vdata = np.squeeze(vdata, axis=dropped_axes)
data[vdim.name] = vdata
return data
elif dropped_kdims:
return tuple(dataset.columns(kdims+vdims).values())
return data
@classmethod
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):
if not vdim:
raise Exception("Cannot add key dimension to a dense representation.")
dim = dimension_name(dimension)
return dict(dataset.data, **{dim: values})
@classmethod
def sort(cls, dataset, by=[], reverse=False):
if not by or by in [dataset.kdims, dataset.dimensions()]:
return dataset.data
else:
raise Exception('Compressed format cannot be sorted, either instantiate '
'in the desired order or use the expanded format.')
@classmethod
def iloc(cls, dataset, index):
rows, cols = index
scalar = False
if np.isscalar(cols):
scalar = np.isscalar(rows)
cols = [dataset.get_dimension(cols, strict=True)]
elif isinstance(cols, slice):
cols = dataset.dimensions()[cols]
else:
cols = [dataset.get_dimension(d, strict=True) for d in cols]
if np.isscalar(rows):
rows = [rows]
new_data = []
for d in cols:
new_data.append(cls.values(dataset, d, compute=False)[rows])
if scalar:
da = dask_array_module()
if new_data and (da and isinstance(new_data[0], da.Array)):
return new_data[0].compute()[0]
return new_data[0][0]
return tuple(new_data)
@classmethod
def range(cls, dataset, dimension):
if dataset._binned and dimension in dataset.kdims:
expanded = cls.irregular(dataset, dimension)
column = cls.coords(dataset, dimension, expanded=expanded, edges=True)
else:
column = cls.values(dataset, dimension, expanded=False, flat=False)
da = dask_array_module()
if column.dtype.kind == 'M':
dmin, dmax = column.min(), column.max()
if da and isinstance(column, da.Array):
return da.compute(dmin, dmax)
return dmin, dmax
elif len(column) == 0:
return np.NaN, np.NaN
else:
try:
dmin, dmax = (np.nanmin(column), np.nanmax(column))
if da and isinstance(column, da.Array):
return da.compute(dmin, dmax)
return dmin, dmax
except TypeError:
column.sort()
return column[0], column[-1]
Interface.register(GridInterface)
| bsd-3-clause | 5,546,692,618,131,833,000 | 40.402899 | 113 | 0.555027 | false |
whiteclover/dbpy | tests/pymysqlt.py | 1 | 12224 | #!/usr/bin/env python
# Copyright (C) 2014-2015 Thomas Huang
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import db
import logging
global config
config = {
'passwd': 'test',
'user': 'test',
'host': 'localhost',
'db': 'test'
}
def _create():
db.execute('DROP TABLE IF EXISTS `users`')
db.execute("""CREATE TABLE `users` (
`uid` int(10) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(20),
PRIMARY KEY (`uid`))""")
class TestDBBase(unittest.TestCase):
def setUp(self):
setattr(db, '__db', {})
def test_dup_key(self):
db.setup(config,adapter='pymysql')
f = lambda: db.setup(config,adapter='pymysql')
self.assertRaises(db.DBError, f)
def test_invalid_key(self):
f = lambda: db.setup(config, key='dd.xx')
self.assertRaises(TypeError, f)
def test_database(self):
db.setup(config,adapter='pymysql')
self.assertEqual(db.database(), db.database('default', slave=True))
conns = getattr(db, '__db', [])
self.assertEqual(len(conns['default.slave']), 1)
db.setup(config, slave=True)
self.assertNotEqual(db.database(), db.database('default', slave=True))
conns = getattr(db, '__db', [])
self.assertEqual(len(conns['default.slave']), 1)
db.setup(config, slave=True)
conns = getattr(db, '__db', [])
self.assertEqual(len(conns['default.slave']), 2)
class TestBase(unittest.TestCase):
def setUp(self):
setattr(db, '__db', {})
db.setup(config,adapter='pymysql')
db.execute('DROP TABLE IF EXISTS `users`')
db.execute("""CREATE TABLE `users` (
`uid` int(10) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(20) NOT NULL,
PRIMARY KEY (`uid`))""")
def test_query(self):
self.assertEqual(1, db.query('SELECT 1')[0][0])
self.assertEqual(0, len(db.query('SELECT * FROM users')))
def test_execute(self):
res = db.execute('INSERT INTO users VALUES(%s, %s)', [(10, 'execute_test'), (9, 'execute_test')])
self.assertTrue(res)
res = db.execute('DELETE FROM users WHERE name=%s', ('execute_test',))
self.assertEqual(res, 2)
def test_pool(self):
import threading
def q(n):
for i in range(10):
res = db.query('select count(*) from users')
self.assertEqual(0, res[0][0])
n = 50
ts = []
for i in range(n):
t = threading.Thread(target=q, args=(i,))
ts.append(t)
for t in ts:
t.start()
for t in ts:
t.join()
class TestMultilDB(unittest.TestCase):
def setUp(self):
setattr(db, '__db', {})
db.setup(config, key='test')
db.setup(config, key='test', slave=True)
db.execute('DROP TABLE IF EXISTS `users`', key='test')
db.execute("""CREATE TABLE `users` (
`uid` int(10) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(20) NOT NULL,
PRIMARY KEY (`uid`))""", key='test')
rows = []
for _ in range(1, 10):
rows.append('(%d , "name_%d")' % (_, _))
db.execute('INSERT INTO users VALUES ' + ', '.join(rows), key='test')
def tearDown(self):
db.execute('DELETE FROM users', key='test')
def test_excute(self):
res = db.execute('insert into users values(%s, %s)', [(10L, 'thomas'), (11L, 'animer')], key='test')
res = db.query('SELECT count(*) FROM users WHERE uid>=10', key='test')
self.assertEqual(2, res[0][0])
def test_query(self):
res = db.query('select name from users limit 5', key='test')
self.assertEqual(len(res), 5)
res = db.query('select name from users limit %s', (100,), many=20, key='test')
rows = []
for r in res:
rows.append(r)
self.assertTrue(10, len(rows))
class TestSelectQuery(unittest.TestCase):
def setUp(self):
setattr(db, '__db', {})
db.setup(config,adapter='pymysql')
_create()
users = []
for i in range(1, 5):
users.append((i, 'user_' + str(i)))
users.append((5, None))
db.execute('INSERT INTO users VALUES(%s, %s)', users)
self.select = db.select('users')
def tearDown(self):
db.execute('delete from users')
def test_select_all(self):
self.assertEquals(len(self.select
.execute()), 5)
def test_select_as_dict(self):
res = self.select.condition('uid', 1).execute(as_dict=True)
self.assertEqual(len(res), 1)
self.assertEqual(type(res[0]), dict)
self.assertEqual(res[0]['uid'], 1)
def test_select_many(self):
res = (self.select.fields('*')
.execute(many=2))
rows = []
for row in res:
rows.append(row)
self.assertEquals(len(rows), 5)
def test_select_condition(self):
res = (self.select
.condition('name', 'user_1')
.condition('uid', 1)
.execute())
self.assertEquals(res[0][1], 'user_1')
def test_select_or_condition(self):
from db import or_
or_con = or_()
or_con.condition('name', 'user_1')
or_con.condition('name', 'user_2')
res = (self.select
.condition(or_con)
.execute())
self.assertEquals(res[0][1], 'user_1')
def test_select_like(self):
res = (self.select
.condition('name', 'user_%', 'like')
.execute())
self.assertEquals(len(res), 4)
def test_select_in(self):
res = (self.select.fields('*')
.condition('name', ['user_1', 'user_2'])
.execute())
self.assertEquals(res[0][1], 'user_1')
self.assertEquals(res[1][1], 'user_2')
def test_select_group_by(self):
self.assertEquals(len(self.select
.group_by('name', 'uid')
.execute()), 5)
def test_select_order_by_ASC(self):
self.assertEquals(len(self.select
.order_by('name')
.execute()), 5)
def test_select_order_by_DESC(self):
self.assertEquals(len(self.select
.order_by('name', 'DESC')
.execute()), 5)
def test_select_limit(self):
self.assertEquals(len(self.select.limit(2).execute()), 2)
def test_table_dot_condition(self):
res = self.select.condition('users.uid', 5).execute()
self.assertEqual(res[0][0], 5)
def test_is_null(self):
res = self.select.is_null('name').condition('uid', 5).execute()
self.assertEqual(res[0][0], 5)
def test_is_not_null(self):
self.assertEqual(len(self.select.is_not_null('uid').execute()), 5)
def test_expr(self):
from db import expr
res = self.select.fields(expr('count(*)')).execute()
self.assertEqual(res[0][0], 5)
res = db.select('users').fields(expr('count(uid)', 'total')).execute()
self.assertEqual(res[0][0], 5)
class TestUpdateQuery(unittest.TestCase):
def setUp(self):
setattr(db, '__db', {})
db.setup(config,adapter='pymysql')
_create()
users = []
for i in range(1, 6):
users.append((i, 'user_' + str(i)))
db.execute('delete from users')
db.execute('INSERT INTO users VALUES(%s, %s)', users)
self.update = db.update('users')
def tearDown(self):
db.execute('delete from users')
def test_update_on_name(self):
res = (self.update.
mset({'name':'update_test'})
.condition('name','user_1')
.execute())
self.assertEquals(res, 1)
def test_update_on_name_and_uid(self):
res = (self.update.
set('name', 'update_test')
.condition('name', 'user_2')
.condition('uid', 2)
.execute())
self.assertEquals(res, 1)
def test_update_not_exists(self):
res = (self.update.
mset({'name':'update', 'uid': 10})
.condition('name', 'not_exists')
.execute())
self.assertEquals(res, 0)
class TestInsertQuery(unittest.TestCase):
def setUp(self):
setattr(db, '__db', {})
db.setup(config,adapter='pymysql')
_create()
users = []
for i in range(1, 6):
users.append((i, 'user_' + str(i)))
db.execute('delete from users')
db.execute('INSERT INTO users VALUES(%s, %s)', users)
self.insert = db.insert('users')
self.select = db.select('users')
def tearDown(self):
db.execute('delete from users')
def test_insert(self):
res = self.insert.values((10, 'test_insert')).execute()
res = self.select.condition('name', 'test_insert').execute()
self.assertEqual(res[0][1], 'test_insert')
def test_insert_dict_values(self):
self.insert.fields('name').values({'name': 'insert_1'}).values(('insert_2',)).execute()
res = self.select.condition('name', ['insert_1', 'insert_2']).execute()
self.assertEqual(len(res), 2)
class TestDeleteQuery(unittest.TestCase):
def setUp(self):
setattr(db, '__db', {})
db.setup(config,adapter='pymysql')
_create()
users = []
for i in range(1, 6):
users.append((i, 'user_' + str(i)))
db.execute('INSERT INTO users VALUES(%s, %s)', users)
self.delete = db.delete('users')
def tearDown(self):
db.execute('delete from users')
def test_delete_by_uid(self):
res = self.delete.condition('uid', 1).execute()
self.assertEqual(res, 1)
def test_delete_by_condtions(self):
res = self.delete.condition('uid', 2).condition('name', 'user_2').execute()
self.assertEqual(res, 1)
def test_delete_or_condtions(self):
from db import or_
or_con = or_().condition('name', 'user_1').condition('name', 'user_2')
res = self.delete.condition(or_con).execute()
self.assertEqual(res, 2)
class TestTransaction(unittest.TestCase):
def setUp(self):
setattr(db, '__db', {})
db.setup(config,adapter='pymysql')
_create()
users = []
for i in range(1, 6):
users.append((i, 'user_' + str(i)))
db.execute('INSERT INTO users VALUES(%s, %s)', users)
def tearDown(self):
db.execute('delete from users')
def test_with(self):
with db.transaction() as t:
t.delete('users').condition('uid', 1).execute()
res = db.select('users').condition('uid', 1).execute()
self.assertEqual(len(res), 1)
res = db.select('users').condition('uid', 1).execute()
self.assertEqual(len(res), 0)
def test_begin_commit(self):
t = db.transaction()
t.begin()
t.delete('users').condition('uid', 1).execute()
res = db.select('users').condition('uid', 1).execute()
self.assertEqual(len(res), 1)
t.commit()
res = db.select('users').condition('uid', 1).execute()
self.assertEqual(len(res), 0)
if __name__ == '__main__':
debug = True
level = logging.DEBUG if debug else logging.INFO
logging.basicConfig(level=level,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S', filemode='a+')
unittest.main(verbosity=2 if debug else 0) | gpl-2.0 | 4,985,435,720,029,420,000 | 30.589147 | 108 | 0.551865 | false |
akx/shoop | _misc/ensure_license_headers.py | 1 | 4075 | #!/usr/bin/env python3
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
"""
License header updater.
"""
from __future__ import unicode_literals
import argparse
import os
import sys
import sanity_utils
HEADER = """
This file is part of Shoop.
Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
This source code is licensed under the AGPLv3 license found in the
LICENSE file in the root directory of this source tree.
""".strip()
PY_HEADER = '\n'.join(('# ' + line).strip() for line in HEADER.splitlines())
JS_HEADER = (
'/**\n' +
'\n'.join((' * ' + line).rstrip() for line in HEADER.splitlines()) +
'\n */')
PY_HEADER_LINES = PY_HEADER.encode('utf-8').splitlines()
JS_HEADER_LINES = JS_HEADER.encode('utf-8').splitlines()
def get_adders():
return {
'.py': add_header_to_python_file,
'.js': add_header_to_javascript_file
}
def main():
ap = argparse.ArgumentParser()
ap.add_argument("root", nargs="+", help="Directory roots to recurse through")
ap.add_argument("-w", "--write", help="Actually write changes", action="store_true")
ap.add_argument("-s", "--exit-status", help="Exit with error status when missing headers", action="store_true")
ap.add_argument("-v", "--verbose", help="Log OK files too", action="store_true")
args = ap.parse_args()
adders = get_adders()
paths = find_files(roots=args.root, extensions=set(adders.keys()))
missing = process_files(paths, adders, verbose=args.verbose, write=args.write)
if args.exit_status and missing:
return 1
return 0
def process_files(paths, adders, verbose, write):
width = max(len(s) for s in paths)
missing = set()
for path in sorted(paths):
if os.stat(path).st_size == 0:
if verbose:
print('[+]:%-*s: File is empty' % (width, path))
elif not has_header(path):
missing.add(path)
if write:
adder = adders[os.path.splitext(path)[1]]
adder(path)
print('[!]:%-*s: Modified' % (width, path))
else:
print('[!]:%-*s: Requires license header' % (width, path))
else:
if verbose:
print('[+]:%-*s: File has license header' % (width, path))
return missing
def find_files(roots, extensions):
paths = set()
generated_resources = set()
for root in roots:
for file in sanity_utils.find_files(
root,
generated_resources=generated_resources,
allowed_extensions=extensions,
ignored_dirs=sanity_utils.IGNORED_DIRS + ["migrations"]
):
if not is_file_ignored(file):
paths.add(file)
paths -= generated_resources
return paths
def is_file_ignored(filepath):
filepath = filepath.replace(os.sep, "/")
return (
('vendor' in filepath) or
('doc/_ext/djangodocs.py' in filepath)
)
def has_header(path):
with open(path, 'rb') as fp:
return b"This file is part of Shoop." in fp.read(256)
def add_header_to_python_file(path):
lines = get_lines(path)
if lines:
i = 0
if lines[i].startswith(b'#!'):
i += 1
if i < len(lines) and b'coding' in lines[i]:
i += 1
new_lines = lines[:i] + PY_HEADER_LINES + lines[i:]
write_lines(path, new_lines)
def add_header_to_javascript_file(path):
lines = get_lines(path)
if lines:
new_lines = JS_HEADER_LINES + lines
write_lines(path, new_lines)
def get_lines(path):
with open(path, 'rb') as fp:
contents = fp.read()
if not contents.strip():
return []
return contents.splitlines()
def write_lines(path, new_lines):
with open(path, 'wb') as fp:
for line in new_lines:
fp.write(line + b'\n')
if __name__ == '__main__':
sys.exit(main())
| agpl-3.0 | 5,779,910,031,184,858,000 | 26.910959 | 115 | 0.593374 | false |
GNOME/gegl | tests/test-runner.py | 1 | 10509 | #!/usr/bin/env python3
#
# Copyright John Marshall 2020
#
# Flatten function is from a comment by Jordan Callicoat on
# http://code.activestate.com/recipes/363051-flatten/
#
from __future__ import print_function
import os
import sys
import argparse
import errno
import subprocess
class Args():
def __init__(self):
parser = argparse.ArgumentParser()
parser.add_argument(
'--verbose',
action='store_true'
)
parser.add_argument(
'--test-name',
required=True,
metavar='TEST_NAME',
help='test name'
)
parser.add_argument(
'--build-root',
metavar='BUILD_ROOT',
help='root directory for build'
)
parser.add_argument(
'--output-dir',
default='output',
metavar='OUTPUT_DIR',
help='directory for output files'
)
parser.add_argument(
'--reference-path',
metavar='REF_PATH',
help='reference file or directory'
)
parser.add_argument(
'--input-file',
required=True,
metavar='INPUT_FILE',
help='input file for processing'
)
parser.add_argument(
'--gegl-exe',
metavar='GEGL',
help='gegl program'
)
parser.add_argument(
'--gegl-scale',
metavar='GEGL_SCALE',
help='gegl --scale value'
)
parser.add_argument(
'--gegl-ops',
nargs='*',
metavar='OPS',
help='gegl operations'
)
parser.add_argument(
'--imgcmp-exe',
metavar='IMGCMP',
help='imgcmp program'
)
parser.add_argument(
'--imgcmp-args',
nargs='*',
metavar='ARGS',
help='imgcmp runtime arguments'
)
parser.add_argument(
'--with-opencl',
action='store_true',
help='enable OpenCL'
)
parser.add_argument(
'--detect-opencl-exe',
metavar='DETECT_OPENCL',
help='OpenCL enabled check program'
)
parser.add_argument(
'--generate-reference',
action='store_true',
help='generate non OpenCL reference for OpenCL test'
)
# verbose
self.verbose = parser.parse_args().verbose
# test name
self.test_name = parser.parse_args().test_name
# set source dir from this file
self.source_dir = os.path.realpath(
os.path.join(os.path.dirname(__file__))
)
# get build directory from parameter
if parser.parse_args().build_root:
self.build_root = os.path.realpath(
parser.parse_args().build_root
)
else:
self.build_root = os.environ.get('ABS_TOP_BUILDDIR')
if self.verbose: print('build root: %s' % self.build_root)
# get output directory from parameter
self.output_dir = os.path.realpath(
os.path.join(parser.parse_args().output_dir)
)
if self.verbose: print('output dir: %s' % self.output_dir)
# get reference directory from parameter
if parser.parse_args().reference_path:
self.reference_path = os.path.realpath(
parser.parse_args().reference_path
)
else:
self.reference_path = os.path.realpath(
os.path.join(self.source_dir, 'reference')
)
if self.verbose: print('ref path: %s' % self.reference_path)
# input file from parameter
if parser.parse_args().input_file:
self.input_file = os.path.realpath(
parser.parse_args().input_file
)
else:
self.input_file = None
if self.verbose: print('input files: %s' % self.input_file)
# gegl args
if parser.parse_args().gegl_scale:
self.gegl_args = ['-s', parser.parse_args().gegl_scale]
else:
self.gegl_args = None
# gegl operations
self.gegl_ops = parser.parse_args().gegl_ops
# gegl-imgcmp arguments
self.imgcmp_args = parser.parse_args().imgcmp_args
# with OpenCL
self.with_opencl = parser.parse_args().with_opencl
# generate refrerence
self.generate_ref = parser.parse_args().generate_reference
if self.generate_ref and not self.with_opencl:
self.generate_ref = False
print('--generate-reference only valid with --with-opencl '
+ '- option ignored'
)
# executables
if sys.platform == 'win32':
exe_ext = '.exe'
else:
exe_ext = ''
# gegl
if parser.parse_args().gegl_exe:
self.gegl_exe = parser.parse_args().gegl_exe
else:
self.gegl_exe = os.path.join(
self.build_root, 'bin', 'gegl' + exe_ext
)
self.gegl_exe = os.path.realpath(self.gegl_exe)
if self.verbose: print('gegl exe: %s' % self.gegl_exe)
# imgcmp
if parser.parse_args().imgcmp_exe:
self.imgcmp_exe = parser.parse_args().imgcmp_exe
else:
self.imgcmp_exe = os.path.join(
self.build_root, 'tools', 'gegl-imgcmp' + exe_ext
)
self.imgcmp_exe = os.path.realpath(self.imgcmp_exe)
if self.verbose: print('imgcmp exe: %s' % self.imgcmp_exe)
# detect opencl
if parser.parse_args().detect_opencl_exe:
self.detect_ocl_exe = parser.parse_args().detect_opencl_exe
else:
self.detect_ocl_exe = os.path.realpath(os.path.join(
self.build_root, 'tools', 'detect_opencl' + exe_ext
)
)
self.detect_ocl_exe = os.path.realpath(self.detect_ocl_exe)
if self.verbose: print(
'detect OpenCL exe: %s' % self.detect_ocl_exe
)
def flatten(l, ltypes=(list, tuple)):
ltype = type(l)
l = list(l)
i = 0
while i < len(l):
while isinstance(l[i], ltypes):
if not l[i]:
l.pop(i)
i -= 1
break
else:
l[i:i + 1] = l[i]
i += 1
return ltype(l)
def main():
args = Args()
# set test environment
test_env = os.environ.copy()
if args.with_opencl:
try:
subprocess.check_output(args.detect_ocl_exe, env=test_env)
except:
print('Skipping - OpenCL not available')
sys.exit(77)
if args.verbose: print('Running with OpenCL')
if not os.path.exists(args.input_file):
print('Skipping - cannot find input file: %s' % args.input_file)
sys.exit(77)
if not os.path.exists(args.gegl_exe):
print('Skipping - cannot find gegl: %s' % args.gegl_exe)
sys.exit(77)
if not os.path.exists(args.imgcmp_exe):
print('Skipping - cannot find imgcmp: %s' % args.imgcmp_exe)
sys.exit(77)
# find reference file for comparison
if args.generate_ref:
file_ext = '.png'
if not os.path.exists(args.reference_path):
try:
os.makedirs(args.reference_path, 0o700)
except OSError as err:
if err.errno != errno.EEXIST:
raise
if args.reference_path == args.output_dir:
ref_file = os.path.join(
args.reference_path, args.test_name + '_ref' + file_ext
)
else:
ref_file = os.path.join(
args.reference_path, args.test_name + file_ext
)
else:
if os.path.isfile(args.reference_path):
ref_file = args.reference_path
file_ext = os.path.splitext(ref_file)[1]
elif os.path.isdir(args.reference_path):
# find reference file matching test name in ref dir
for file_ext in ['.png', '.hdr', '.gegl']:
ref_file = os.path.join(
args.reference_path, args.test_name + file_ext
)
if os.path.exists(ref_file):
break
else:
print('Skipping - cannot find test reference file')
sys.exit(77)
else:
print('Skipping - cannot find test reference file')
sys.exit(77)
if args.verbose: print('reference file: %s' % ref_file)
output_file = os.path.join(
args.output_dir, args.test_name + file_ext
)
if args.verbose: print('output file: %s' % output_file)
if not os.path.exists(args.output_dir):
try:
os.makedirs(args.output_dir, 0o700)
except OSError as err:
if err.errno != errno.EEXIST:
raise
if args.generate_ref:
ref_cmd = [args.gegl_exe, args.input_file, '-o', ref_file]
if args.gegl_args: ref_cmd += args.gegl_args
if args.gegl_ops: ref_cmd += ['--', args.gegl_ops]
ref_cmd = flatten(ref_cmd)
gegl_cmd = [args.gegl_exe, args.input_file, '-o', output_file]
if args.gegl_args: gegl_cmd += args.gegl_args
if args.gegl_ops: gegl_cmd += ['--', args.gegl_ops]
gegl_cmd = flatten(gegl_cmd)
imgcmp_cmd = [args.imgcmp_exe, ref_file, output_file]
if args.imgcmp_args: imgcmp_cmd += args.imgcmp_args
imgcmp_cmd = flatten(imgcmp_cmd)
no_ocl_env = test_env
no_ocl_env["GEGL_USE_OPENCL"] = "no"
if args.verbose:
if args.generate_ref:
print('ref cmd: %s' % ' '.join(ref_cmd))
print('gegl cmd: %s' % ' '.join(gegl_cmd))
print('imgcmp cmd: %s' % ' '.join(imgcmp_cmd))
sys.stdout.flush()
try:
# run gegl to produce reference image
if args.generate_ref:
subprocess.check_call(ref_cmd, env=no_ocl_env)
# run gegl to produce test image
subprocess.check_call(gegl_cmd, env=test_env)
# run imgcmp to compare output against reference
subprocess.check_call(imgcmp_cmd, env=no_ocl_env)
except KeyboardInterrupt:
raise
except subprocess.CalledProcessError as err:
sys.stdout.flush()
sys.exit(err.returncode)
sys.stdout.flush()
sys.exit(0)
if __name__ == '__main__':
main() | lgpl-3.0 | 8,229,793,823,248,587,000 | 30.186944 | 72 | 0.530307 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.