repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
kata198/VirtualEnvOnDemand | VirtualEnvOnDemand/utils.py | 1 | 4791 | # Copyright (c) 2015, 2016 Timothy Savannah under terms of LGPLv3. You should have received a copy of this with this distribution as "LICENSE"
'''
utils - Some general-purpose utility functions
'''
import re
__all__ = ('cmp_version', )
# Yes, cmp is DA BOMB. What a huge mistake removing it from the language!!
try:
cmp
except NameError:
def cmp(a, b):
if a < b:
return -1
elif a > b:
return 1
return 0
# ALPHA_OR_NUM_RE - groups of letters OR numbers
ALPHA_OR_NUM_RE = re.compile('([a-zA-Z]+)|([0-9]+)')
# The following method is slightly-modified from my Public Domain project, cmp_version
# https://pypi.python.org/pypi/cmp_version
# This is copied so as to retain the "all you need is virtualenv and this module" promise.
def cmp_version(version1, version2):
'''
cmp_version - Compare version1 and version2.
Returns cmp-style (C-style), i.e. < 0 if lefthand (version1) is less, 0 if equal, > 0 if righthand (version2) is greater.
@param version1 <str> - String of a version
@param version2 <str> - String of a version
@return <int> -
-1 if version1 is < version2
0 if version1 is = version2
1 if version1 is > version2
'''
version1 = str(version1)
version2 = str(version2)
if version1 == version2:
return 0
# Pad left or right if we have empty blocks
if version1.startswith('.'):
version1 = '0' + version1
if version1.endswith('.'):
version1 = version1 + '0'
if version2.startswith('.'):
version2 = '0' + version2
if version2.endswith('.'):
version2 = version2 + '0'
# Consider dots as separating "blocks", i.e. major/minor/patch/subpatch/monkey-finger, whatever.
version1Split = version1.split('.')
version2Split = version2.split('.')
version1Len = len(version1Split)
version2Len = len(version2Split)
# Ensure we have the same number of blocks in both versions, by padding '0' blocks on the end of the shorter.
# This ensures that "1.2" is equal to "1.2.0", and greatly simplifies the comparison loop below otherwise.
while version1Len < version2Len:
version1Split += ['0']
version1Len += 1
while version2Len < version1Len:
version2Split += ['0']
version2Len += 1
# See if the padding has made these equal
if version1Split == version2Split:
return 0
# Go through each block (they have same len at this point)
for i in range(version1Len):
try:
# Try to compare this block as an integer. If both are integers, but different,
# we have our answer.
cmpRes = cmp(int(version1Split[i]), int(version2Split[i]))
if cmpRes != 0:
return cmpRes
except ValueError:
# Some sort of letter in here
# So split up the sub-blocks of letters OR numbers for comparison
# Note, we don't try to pad here.
# i.e. "1.2a" < "1.2a0".
# This is subjective. I personaly think this is correct the way it is.
try1 = ALPHA_OR_NUM_RE.findall(version1Split[i])
try1Len = len(try1)
try2 = ALPHA_OR_NUM_RE.findall(version2Split[i])
try2Len = len(try2)
# Go block-by-block. Each block is a set of contiguous numbers or letters.
# Letters are greater than numbers.
for j in range(len(try1)):
if j >= try2Len:
return 1
testSet1 = try1[j]
testSet2 = try2[j]
res1 = cmp(testSet1[0], testSet2[0])
if res1 != 0:
return res1
res2 = 0
if testSet1[1].isdigit():
if testSet2[1].isdigit():
res2 = cmp(int(testSet1[1]), int(testSet2[1]))
else:
return 1
else:
if testSet2[1].isdigit():
return 1
if res2 != 0:
return res2
if try2Len > try1Len:
return -1
# Equal !
return 0
def writeStrToFile(filename, contents):
'''
writeStrToFile - Writes some data to a provided filename.
@param filename <str> - A path to a file
@param contents <str> - The contents to write to the file, replacing any previous contents.
@return <None/Exception> - None if all goes well, otherwise the Exception raised
'''
try:
with open(filename, 'wt') as f:
f.write(contents)
except Exception as e:
return e
return None
| lgpl-3.0 | -1,650,266,796,145,774,600 | 31.371622 | 142 | 0.567314 | false |
domain51/d51.django.apps.logger | d51/django/apps/logger/tests/views.py | 1 | 1154 | import datetime
from django.test import TestCase
from django.test.client import Client
from ..models import Hit
from .utils import build_hit_url, random_url
class TestOfHitView(TestCase):
def test_logs_hit(self):
url = random_url()
c = Client()
response = c.get(build_hit_url(url))
hit = Hit.objects.get(url=url)
def test_stores_current_time(self):
url = random_url()
response = Client().get(build_hit_url(url))
hit = Hit.objects.get(url=url)
self.assert_(isinstance(hit.created_on, datetime.datetime))
self.assert_((datetime.datetime.now() - hit.created_on).seconds < 1,
"Check creation time, might fail on slow machines/network connections.")
def test_redirects_to_url(self):
url = random_url()
response = Client().get(build_hit_url(url))
self.assertEquals(response.status_code, 302)
# TODO: refactor this - we can't use assertRedirect() because it
# tries to load crap, but this test should be simplified
self.assertEquals(response._headers['location'][1], url, "ensure redirection took place")
| gpl-3.0 | -987,452,888,625,049,100 | 36.225806 | 97 | 0.652513 | false |
dls-controls/scanpointgenerator | tests/test_core/test_get_points_performance.py | 1 | 5693 | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import unittest
import time
from test_util import ScanPointGeneratorTest
from scanpointgenerator import CompoundGenerator
from scanpointgenerator import LineGenerator, StaticPointGenerator, LissajousGenerator, SpiralGenerator
from scanpointgenerator import CircularROI, RectangularROI, ROIExcluder
from scanpointgenerator import RandomOffsetMutator
# Jython gets 3x as long
TIMELIMIT = 3 if os.name == "java" else 1
# Goal is 10khz, 1s per 10,000(10e4) points
class GetPointsPerformanceTest(ScanPointGeneratorTest):
def test_90_thousand_time_constraint(self):
z = LineGenerator("z", "mm", 0, 1, 300, True)
w = LineGenerator("w", "mm", 0, 1, 300, True)
g = CompoundGenerator([z, w], [], [])
g.prepare() # 9e4 points
start_time = time.time()
C = g.get_points(0, 90000)
end_time = time.time()
self.assertLess(end_time - start_time, TIMELIMIT*9)
def test_30_thousand_time_constraint_with_outer(self):
a = StaticPointGenerator(1)
z = LineGenerator("z", "mm", 0, 1, 300, True)
w = LineGenerator("w", "mm", 0, 1, 300, True)
g = CompoundGenerator([a, z, w], [], [])
g.prepare() # 9e4 points
start_time = time.time()
C = g.get_points(0, 30000)
end_time = time.time()
self.assertLess(end_time - start_time, TIMELIMIT*3)
def test_30_thousand_time_constraint_with_inner(self):
a = StaticPointGenerator(1)
z = LineGenerator("z", "mm", 0, 1, 300, True)
w = LineGenerator("w", "mm", 0, 1, 300, True)
g = CompoundGenerator([z, w, a], [], [])
g.prepare() # 9e4 points
start_time = time.time()
C = g.get_points(0, 30000)
end_time = time.time()
self.assertLess(end_time - start_time, TIMELIMIT*3)
@unittest.skip("Unsuitable")
def test_1_million_time_constraint_complex(self):
a = LineGenerator("a", "mm", 0, 1, 10, True)
b = LineGenerator("b", "mm", 0, 1, 10)
c = LineGenerator("c", "mm", 0, 1, 10)
d = LineGenerator("d", "mm", 0, 1, 10)
e = LineGenerator("e", "mm", 0, 1, 10)
f = LineGenerator("f", "mm", 0, 1, 10)
g = CompoundGenerator([b, c, d, e, f, a], [], [])
g.prepare() # 1e6 points
start_time = time.time()
C = g.get_points(0, 1000000)
end_time = time.time()
# if this test becomes problematic then we'll just have to remove it
self.assertLess(end_time - start_time, TIMELIMIT*100)
@unittest.skip("Unsuitable")
def test_small_time_constraint_complex(self):
a = LineGenerator("a", "mm", 0, 1, 10, True)
b = LineGenerator("b", "mm", 0, 1, 10)
c = LineGenerator("c", "mm", 0, 1, 10)
d = LineGenerator("d", "mm", 0, 1, 10)
e = LineGenerator("e", "mm", 0, 1, 10)
f = LineGenerator("f", "mm", 0, 1, 10)
g = CompoundGenerator([b, c, d, e, f, a], [], [])
g.prepare() # 1e6 points
start_time = time.time()
C = g.get_points(0, 1)
end_time = time.time()
# if this test becomes problematic then we'll just have to remove it
self.assertLess(end_time - start_time, TIMELIMIT*1e-4)
start_time = time.time()
C = g.get_points(0, 10)
end_time = time.time()
# if this test becomes problematic then we'll just have to remove it
self.assertLess(end_time - start_time, TIMELIMIT*1e-3)
start_time = time.time()
C = g.get_points(0, 100)
end_time = time.time()
# if this test becomes problematic then we'll just have to remove it
self.assertLess(end_time - start_time, TIMELIMIT*1e-2)
def test_roi_time_constraint(self):
a = LineGenerator("a", "mm", 0, 1, 300, True)
b = LineGenerator("b", "mm", 0, 1, 300)
r1 = CircularROI([0.25, 0.33], 0.1)
e1 = ROIExcluder([r1], ["a", "b"])
g = CompoundGenerator([b, a], [e1], [])
g.prepare() # ~2,800 points
start_time = time.time()
C = g.get_points(0, 1000)
end_time = time.time()
# if this test becomes problematic then we'll just have to remove it
self.assertLess(end_time - start_time, TIMELIMIT*0.1)
start_time = time.time()
C = g.get_points(0, 2800)
end_time = time.time()
# if this test becomes problematic then we'll just have to remove it
self.assertLess(end_time - start_time, TIMELIMIT*0.28)
@unittest.skip("Unsuitable")
def test_time_constraint_complex(self):
a = LineGenerator("a", "eV", 0, 1, 10)
b = LineGenerator("b", "rad", 0, 1, 10)
c = LissajousGenerator(["c", "d"], ["mm", "cm"], [0, 0], [5, 5], 3, 10)
d = SpiralGenerator(["e", "f"], ["mm", "s"], [10, 5], 7, 0.6)
e = LineGenerator("g", "mm", 0, 1, 10)
f = LineGenerator("h", "mm", 0, 5, 20)
r1 = CircularROI([0.2, 0.2], 0.1)
r2 = CircularROI([0.4, 0.2], 0.1)
r3 = CircularROI([0.6, 0.2], 0.1)
e1 = ROIExcluder([r1, r2, r3], ["a", "b"])
m1 = RandomOffsetMutator(12, ["a"], [0.1])
m2 = RandomOffsetMutator(200, ["c", "f"], [0.01, 0.5])
g = CompoundGenerator([c, d, e, f, b, a], [e1], [m1, m2])
g.prepare() # 1e6 points
for i in [1, 2, 3, 4, 5, 6]:
p = 10**i
start_time = time.time()
g.get_points(0, p)
end_time = time.time()
self.assertLess(end_time - start_time, TIMELIMIT*p/1e4)
if __name__ == "__main__":
unittest.main(verbosity=2)
| apache-2.0 | -2,565,975,008,624,779,000 | 40.253623 | 103 | 0.560864 | false |
roam/machete | machete/endpoints.py | 1 | 25618 | # -*- coding: utf-8 -*-
from __future__ import (unicode_literals, print_function, division,
absolute_import)
import sys
import hashlib
from contextlib import contextmanager
from django.views.decorators.csrf import csrf_exempt
from django.db import transaction, models
from django.views.generic import View
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.http import HttpResponse, Http404
from django.utils.http import quote_etag, parse_etags
from .serializers import serialize
from .urls import create_resource_view_name
from .exceptions import (JsonApiError, MissingRequestBody, InvalidDataFormat,
IdMismatch, FormValidationError)
from .utils import (RequestContext, RequestWithResourceContext, pluck_ids,
RequestPayloadDescriptor)
from . import compat, json
@contextmanager
def not_atomic(using=None):
yield
class GetEndpoint(View):
"""
Extends a generic View to provide support for retrieving resources.
Some methods might seem convoluted, but they're mostly built that
way to provide useful points of extension/override. Methods are
rarely passed all information, but request-method methods
(get, post,...) should provide a context object containing the
necessary information under ``self.context``.
"""
context = None
content_type = 'application/json' # Default to this for now; works better in browsers
methods = ['get']
pks_url_key = 'pks'
pk_field = 'pk'
queryset = None
model = None
form_class = None
filter_class = None
include_link_to_self = False
etag_attribute = None
def __init__(self, *args, **kwargs):
super(GetEndpoint, self).__init__(*args, **kwargs)
# Django uses http_method_names to know which methods are
# supported, we always add options on top which will advertise
# the actual methods we support.
self.http_method_names = self.get_methods() + ['options']
@classmethod
def endpoint(cls, **initkwargs):
return csrf_exempt(cls.as_view(**initkwargs))
def dispatch(self, request, *args, **kwargs):
# Override dispatch to enable the handling or errors we can
# handle.
# Because Django 1.4 only sets the request parameters in
# dispatch we'll set them right now ourselves.
self.request = request
self.args = args
self.kwargs = kwargs
manager, m_args, m_kwargs = self.context_manager()
try:
with manager(*m_args, **m_kwargs):
return super(GetEndpoint, self).dispatch(request, *args, **kwargs)
except Exception as error:
et, ei, tb = sys.exc_info()
return self.handle_error(error, tb)
def options(self, request, *args, **kwargs):
# From the JSON API FAQ:
# http://jsonapi.org/faq/#how-to-discover-resource-possible-actions
self.context = self.create_get_context(request)
actions = self.possible_actions()
return HttpResponse(','.join(a.upper() for a in actions))
def possible_actions(self):
"""
Returns a list of allowed methods for this endpoint.
You can use the context (a GET context) to determine what's
possible. By default this simply returns all allowed methods.
"""
return self.get_methods()
def get(self, request, *args, **kwargs):
self.context = self.create_get_context(request)
if not self.has_etag_changed():
content_type = self.get_content_type()
return HttpResponse(status=304, content_type=content_type)
collection = False
if self.context.requested_single_resource:
data = self.get_resource()
else:
data = self.get_resources()
collection = True
return self.create_http_response(data, collection=collection, compound=True)
def has_etag_changed(self):
if not self.etag_attribute:
return True
etag = self.generate_etag()
if not etag:
return True
match = self.request.META.get('HTTP_IF_NONE_MATCH')
if match:
values = parse_etags(match)
for value in values:
# Django appends ";gzip" when gzip is enabled
clean_value = value.split(';')[0]
if clean_value == '*' or clean_value == etag:
return False
return True
def generate_etag(self):
if not self.etag_attribute:
return None
qs = self.get_filtered_queryset()
values = qs.values_list(self.etag_attribute, flat=True)
etag = ','.join('%s' % value for value in values)
return hashlib.md5(etag).hexdigest()
def create_http_response(self, data, collection=False, compound=False):
"""
Creates a HTTP response from the data.
The data might be an (a) HttpResponse object, (b) dict or (c)
object that can be serialized.
HttpResponse objects will simply be returned without further
processing, dicts will be turned into JSON and returned as a
response using the status attribute of the context. Other
objects will be serialized using ``serialize`` method.
"""
if isinstance(data, HttpResponse):
# No more processing necessary
return data
if isinstance(data, dict):
# How nice. Use it!
response_data = data
else:
# Everything else: run it through the serialization process
response_data = self.serialize(data, collection=collection, compound=compound)
json_data = self.create_json(response_data, indent=2)
status = self.context.status
content_type = self.get_content_type()
response = HttpResponse(json_data, content_type=content_type, status=status)
return self.postprocess_response(response, data, response_data, collection)
def serialize(self, data, collection=False, compound=False):
"""
Serializes the data.
Note that a serializer must have been registered with the name
of this resource or relationship, depending on the request type.
"""
name = self.get_resource_type()
context = self.context.__dict__
self_link = self.include_link_to_self
fields = self.context.resource_descriptor.fields
only = fields if fields else None
return serialize(name, data, many=collection, compound=compound, context=context, self_link=self_link, only=only)
def get_resource_type(self):
return self.resource_name
def handle_error(self, error, traceback=None):
# TODO Improve error reporting
error_object = {}
if isinstance(error, FormValidationError):
errors = []
for field, itemized_errors in error.form.errors.items():
composite = field == '__all__'
for e in itemized_errors:
detail = {'detail': '%s' % e}
if not composite:
detail['member'] = field
detail['member_label'] = '%s' % error.form.fields.get(field).label
errors.append(detail)
return HttpResponse(self.create_json({'errors': errors}), status=400)
if isinstance(error, Http404):
error_object['message'] = '%s' % error
return HttpResponse(self.create_json({'errors': [error_object]}), status=404)
if isinstance(error, JsonApiError):
error_object['message'] = '%s' % error
return HttpResponse(self.create_json({'errors': [error_object]}), status=500)
raise error.__class__, error, traceback
def postprocess_response(self, response, data, response_data, collection):
"""
If you need to do any further processing of the HttpResponse
objects, this is the place to do it.
"""
etag = self.generate_etag()
if etag:
response['ETag'] = quote_etag(etag)
response['Cache-Control'] = 'private, max-age=0'
return response
def get_resource(self):
"""
Grabs the resource for a resource request.
Maps to ``GET /posts/1``.
"""
filter = {self.get_pk_field(): self.context.pk}
return self.get_filtered_queryset().get(**filter)
def get_resources(self):
"""
Grabs the resources for a collection request.
Maps to ``GET /posts/1,2,3`` or ``GET /posts``.
"""
qs = self.get_filtered_queryset()
if self.context.pks:
filter = {'%s__in' % self.get_pk_field(): self.context.pks}
qs = qs.filter(**filter)
if self.context.pks and not qs.exists():
raise Http404()
return qs
def get_filtered_queryset(self):
qs = self.get_queryset()
if self.filter_class:
return self.filter_class(self.request.GET, queryset=qs).qs
return qs
def is_changed_besides(self, resource, model):
# TODO Perform simple diff of serialized model with resource
return False
def get_pk_field(self):
"""
Determines the name of the primary key field of the model.
Either set the ``pk_field`` on the class or override this method
when your model's primary key points to another field than the
default.
"""
return self.pk_field
def get_queryset(self):
"""
Get the list of items for this main resource.
This must be an iterable, and may be a queryset
(in which qs-specific behavior will be enabled).
"""
if self.queryset is not None:
queryset = self.queryset
if hasattr(queryset, '_clone'):
queryset = queryset._clone()
elif self.model is not None:
queryset = self.model._default_manager.all()
else:
raise ImproperlyConfigured("'%s' must define 'queryset' or 'model'"
% self.__class__.__name__)
return queryset
def get_content_type(self):
"""
Determines the content type of responses.
Override this method or set ``content_type`` on the class.
"""
return self.content_type
def create_get_context(self, request):
"""Creates the context for a GET request."""
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
fields = request.GET.get('fields')
fields = None if not fields else fields.split(',')
resource_descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks, fields=fields)
context = RequestContext(request, resource_descriptor)
context.update_mode('GET')
return context
def extract_resources(self, request):
"""
Extracts resources from the request body.
This should probably be moved elsewhere since it doesn't make
sense in a GET request. But still.
"""
body = request.body
if not body:
raise MissingRequestBody()
resource_name = self.resource_name
try:
data = self.parse_json(body)
if not resource_name in data:
raise InvalidDataFormat('Missing %s as key' % resource_name)
obj = data[resource_name]
if isinstance(obj, list):
resource = None
resources = obj
else:
resource = obj
resources = [obj]
return RequestPayloadDescriptor(resource_name, resources, resource)
except ValueError:
raise InvalidDataFormat()
def parse_json(self, data):
return json.loads(data)
def create_json(self, data, *args, **kwargs):
return json.dumps(data, *args, **kwargs)
def get_methods(self):
return self.methods
def context_manager(self):
if self.request.method in ['POST', 'PUT', 'DELETE', 'PATCH']:
return (transaction.atomic, [], {})
return (not_atomic, [], {})
class GetLinkedEndpoint(GetEndpoint):
relationship_name = None
relationship_pks_url_keys = None
relationship_pk_fields = None
@classmethod
def endpoint(cls, relationship_name=None, **initkwargs):
initkwargs['relationship_name'] = relationship_name
return csrf_exempt(cls.as_view(**initkwargs))
def dispatch(self, request, *args, **kwargs):
if not self.relationship_name:
self.relationship_name = kwargs.get('relationship')
return super(GetLinkedEndpoint, self).dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
self.context = self.create_get_context(request)
collection = False
# We're dealing with a request for a related resource
if self.context.requested_single_related_resource or not self.context.to_many:
# Either a single relationship id was passed in or the
# relationship is a to-one
data = self.get_related_resource()
else:
# Multiple relationship ids or a to-many relationship
data = self.get_related_resources()
collection = True
return self.create_http_response(data, collection=collection)
def get_related_resource(self):
"""
Handles the retrieval of a related resource.
This will be called when either a single relationship instance
was requested or the relationship is to-one.
"""
qs = self.get_related_queryset()
if not self.context.to_many:
# Since it's not a to-many, we can simply return the value
return qs
pk_field = self.get_relationship_pk_field()
filter = {pk_field: self.context.relationship_pk}
return qs.get(**filter)
def get_related_resources(self):
"""
Handles the retrieval of multiple related resources.
This will be called when either a multiple relationship
instances were requested or no ids were supplied.
"""
qs = self.get_related_queryset().all()
if self.context.relationship_pks:
pk_field = self.get_relationship_pk_field()
filter = {'%s__in' % pk_field: self.context.relationship_pks}
qs = qs.filter(**filter)
if not qs.exists():
raise Http404()
return qs
def get_related_queryset(self):
field_name = self.get_related_field_name()
resource = self.get_resource()
return getattr(resource, field_name)
def get_resource_type(self):
return self.relationship_name
def create_get_context(self, request):
"""Creates the context for a GET request."""
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
rel_pks_url_key = self.get_relationship_pks_url_key()
rel_pks = self.kwargs.get(rel_pks_url_key, '')
rel_pks = rel_pks.split(',') if rel_pks else []
many = self.is_to_many_relationship()
rel_descriptor = RequestContext.create_relationship_descriptor(self.relationship_name, rel_pks, many)
resource_descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks, rel_descriptor)
context = RequestContext(request, resource_descriptor)
context.update_mode('GET')
return context
def get_related_field_name(self):
# TODO Use serializer to find correct name by default
return self.relationship_name
def get_relationship_pks_url_key(self):
rel_name = self.get_related_field_name()
keys = self.relationship_pks_url_keys
keys = keys if keys else {}
return keys.get(rel_name, 'rel_pks')
def get_relationship_pk_field(self):
rel_name = self.get_related_field_name()
fields = self.relationship_pk_fields
fields = fields if fields else {}
return fields.get(rel_name, 'pk')
def is_to_many_relationship(self):
rel_name = self.get_related_field_name()
if self.model:
model = self.model
elif self.queryset:
model = self.queryset.model
else:
model = self.get_queryset().model
meta = model._meta
field_object, model, direct, m2m = compat.get_field_by_name(meta, rel_name)
if direct:
return m2m
return field_object.field.rel.multiple
class WithFormMixin(object):
"""
Mixin supporting create and update of resources with a model form.
Note that it relies on some methods made available by the
GetEndpoint.
"""
form_class = None
def get_form_kwargs(self, **kwargs):
return kwargs
def get_form_class(self):
return self.form_class
def form_valid(self, form):
return form.save()
def form_invalid(self, form):
raise FormValidationError('', form=form)
def get_form(self, resource, instance=None):
"""Constructs a new form instance with the supplied data."""
data = self.prepare_form_data(resource, instance)
form_kwargs = {'data': data, 'instance': instance}
form_kwargs = self.get_form_kwargs(**form_kwargs)
form_class = self.get_form_class()
if not form_class:
raise ImproperlyConfigured('Missing form_class')
return form_class(**form_kwargs)
def prepare_form_data(self, resource, instance=None):
"""Last chance to tweak the data being passed to the form."""
if instance:
# The instance is converted to JSON and then loaded to ensure
# special encodings (like timezone-conversion) are performed
as_json = self.create_json(self.serialize(instance, compound=False))
original = json.loads(as_json)
original = original[self.resource_name]
merged = dict(original.items() + original.get('links', {}).items())
data = dict(resource.items() + resource.get('links', {}).items())
for field, value in data.items():
if value is None:
merged[field] = None
else:
merged[field] = value
return merged
return dict(resource.items() + resource.get('links', {}).items())
class PostMixin(object):
"""
Provides support for POST requests on resources.
The ``create_resource`` method must be implemented to actually do
something.
"""
def get_methods(self):
return super(PostMixin, self).get_methods() + ['post']
def post(self, request, *args, **kwargs):
self.context = self.create_post_context(request)
collection = False
payload = self.context.payload
if payload.many:
data = self.create_resources(payload.resources)
collection = True
else:
data = self.create_resource(payload.resource)
return self.create_http_response(data, collection=collection)
def create_post_context(self, request):
payload = self.extract_resources(request)
descriptor = RequestContext.create_resource_descriptor(self.resource_name)
context = RequestWithResourceContext(request, descriptor, payload, status=201)
context.update_mode('POST')
return context
def create_resources(self, resources):
return [self.create_resource(r) for r in resources]
def create_resource(self, resource):
"""Create the resource and return the corresponding model."""
pass
def postprocess_response(self, response, data, response_data, collection):
response = super(PostMixin, self).postprocess_response(response, data, response_data, collection)
if self.context.status != 201:
return response
pks = ','.join(pluck_ids(response_data, self.resource_name))
location = self.create_resource_url(pks)
response['Location'] = location
return response
def create_resource_url(self, pks):
kwargs = {self.pks_url_key: pks}
return reverse(self.get_url_name(), kwargs=kwargs)
def get_url_name(self):
return create_resource_view_name(self.resource_name)
class PostWithFormMixin(PostMixin, WithFormMixin):
"""
Provides an implementation of ``create_resource`` using a form.
"""
def create_resource(self, resource):
form = self.get_form(resource)
if form.is_valid():
return self.form_valid(form)
return self.form_invalid(form)
class PutMixin(object):
"""
Provides support for PUT requests on resources.
This supports both full and partial updates, on single and multiple
resources.
Requires ``update_resource`` to be implemented.
"""
def get_methods(self):
return super(PutMixin, self).get_methods() + ['put']
def put(self, request, *args, **kwargs):
self.context = self.create_put_context(request)
collection = False
payload = self.context.payload
if payload.many:
changed_more, data = self.update_resources(payload.resources)
collection = True
else:
changed_more, data = self.update_resource(payload.resource)
if not changed_more:
# > A server MUST return a 204 No Content status code if an update
# > is successful and the client's current attributes remain up to
# > date. This applies to PUT requests as well as POST and DELETE
# > requests that modify links without affecting other attributes
# > of a resource.
return HttpResponse(status=204)
return self.create_http_response(data, collection=collection)
def create_put_context(self, request):
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
payload = self.extract_resources(request)
descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks)
context = RequestWithResourceContext(request, descriptor, payload, status=200)
context.update_mode('PUT')
return context
def update_resources(self, resources):
updated = []
changed = []
for res in resources:
changed_more, result = self.update_resource(res)
updated.append(result)
changed.append(changed_more)
return any(changed), updated
def update_resource(self, resource):
pass
class PutWithFormMixin(PutMixin, WithFormMixin):
"""
Provides an implementation of ``update_resource`` using a form.
"""
def update_resource(self, resource):
resource_id = resource['id']
if resource_id not in self.context.pks:
message = 'Id %s in request body but not in URL' % resource_id
raise IdMismatch(message)
filter = {self.get_pk_field(): resource_id}
instance = self.get_queryset().get(**filter)
form = self.get_form(resource, instance)
if form.is_valid():
model = self.form_valid(form)
return self.is_changed_besides(resource, model), model
return self.form_invalid(form)
class DeleteMixin(object):
"""
Provides support for DELETE request on single + multiple resources.
"""
def get_methods(self):
return super(DeleteMixin, self).get_methods() + ['delete']
def delete(self, request, *args, **kwargs):
self.context = self.create_delete_context(request)
if not self.context.pks:
raise Http404('Missing ids')
# Although the default implementation defers DELETE request for
# both single and multiple resources to the ``perform_delete``
# method, we still split based on
if self.context.requested_single_resource:
not_deleted = self.delete_resource()
else:
not_deleted = self.delete_resources()
if not_deleted:
raise Http404('Resources %s not found' % ','.join(not_deleted))
return HttpResponse(status=204)
def create_delete_context(self, request):
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks)
context = RequestContext(request, descriptor)
context.update_mode('DELETE')
return context
def delete_resources(self):
return self.perform_delete(self.context.pks)
def delete_resource(self):
return self.perform_delete(self.context.pks)
def perform_delete(self, pks):
not_deleted = pks[:]
filter = {'%s__in' % self.get_pk_field(): pks}
for item in self.get_queryset().filter(**filter).iterator():
# Fetch each item separately to actually trigger any logic
# performed in the delete method (like implicit deletes)
not_deleted.remove('%s' % item.pk)
item.delete()
return not_deleted
class Endpoint(PostWithFormMixin, PutWithFormMixin, DeleteMixin, GetEndpoint):
"""
Ties everything together.
Use this base class when you need to support GET, POST, PUT and
DELETE and want to use a form to process incoming data.
"""
pass
| bsd-2-clause | 7,528,123,250,925,826,000 | 35.183616 | 121 | 0.61851 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/application_gateway_path_rule_py3.py | 1 | 3100 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class ApplicationGatewayPathRule(SubResource):
"""Path rule of URL path map of an application gateway.
:param id: Resource ID.
:type id: str
:param paths: Path rules of URL path map.
:type paths: list[str]
:param backend_address_pool: Backend address pool resource of URL path map
path rule.
:type backend_address_pool:
~azure.mgmt.network.v2017_08_01.models.SubResource
:param backend_http_settings: Backend http settings resource of URL path
map path rule.
:type backend_http_settings:
~azure.mgmt.network.v2017_08_01.models.SubResource
:param redirect_configuration: Redirect configuration resource of URL path
map path rule.
:type redirect_configuration:
~azure.mgmt.network.v2017_08_01.models.SubResource
:param provisioning_state: Path rule of URL path map resource. Possible
values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: Name of the resource that is unique within a resource group.
This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'paths': {'key': 'properties.paths', 'type': '[str]'},
'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'},
'backend_http_settings': {'key': 'properties.backendHttpSettings', 'type': 'SubResource'},
'redirect_configuration': {'key': 'properties.redirectConfiguration', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, *, id: str=None, paths=None, backend_address_pool=None, backend_http_settings=None, redirect_configuration=None, provisioning_state: str=None, name: str=None, etag: str=None, type: str=None, **kwargs) -> None:
super(ApplicationGatewayPathRule, self).__init__(id=id, **kwargs)
self.paths = paths
self.backend_address_pool = backend_address_pool
self.backend_http_settings = backend_http_settings
self.redirect_configuration = redirect_configuration
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
self.type = type
| mit | 1,708,336,736,314,929,700 | 44.588235 | 232 | 0.638387 | false |
Zhang-O/small | tensor__cpu/http/spyser_liyou.py | 1 | 5473 | import urllib.request
from bs4 import BeautifulSoup
import re
import urllib.parse
import xlsxwriter
import pandas as pd
import numpy as np
from urllib import request, parse
from urllib.error import URLError
import json
import multiprocessing
import time
# 详情页面的 地址 存放在这里面
urls_of_detail = []
total_pages = 0
# 要爬取的内容 按序存成数组
_1 = []
_2 = []
_3 = []
_4 = []
_5 = []
issue_date_sum = []
project_address_sum = []
project_sector_sum = []
project_content_sum = []
company_name_sum = []
company_staff_sum = []
company_phone_sum = []
# 一级网址
url = 'http://www.stc.gov.cn/ZWGK/TZGG/GGSB/'
# page 表示第几页
def get_urls(url,page):
# 构造 form 数据
# postdata = urllib.parse.urlencode({'currDistrict': '', 'pageNo': page,'hpjgName_hidden':'','keyWordName':''})
# postdata = postdata.encode('utf-8')
#
# #发送请求
# response = urllib.request.urlopen(url, data=postdata)
# html_cont = response.read()
if page == 0:
url = url + 'index.htm'
else:
url = url + 'index_' + str(page) + '.htm'
req = request.Request(url=url)
res_data = request.urlopen(req)
# print(res_data)
html_cont = res_data.read()
# 解析文档树
soup = BeautifulSoup(html_cont, 'html.parser', from_encoding='utf-8')
#
# # 用正则表达式 查找 二级网站的网址 所在的 元素 tr
trs = soup.find_all('a', href=re.compile(r"^./201"))
# # 把 二级网站的网址存到 urls_of_detail 中
for i in trs:
# print(i['href'][2:])
urls_of_detail.append(i['href'][2:])
def get_info(url,second_url):
# s = urllib.request.urlopen(urls_of_detail[0])
# 请求文档
second_url = url + second_url
s = urllib.request.urlopen(second_url)
# 解析文档
soup = BeautifulSoup(s, 'html.parser', from_encoding='utf-8')
# 查找的内容 在 td 元素内 ,且没有任何唯一标识 ,找到所有td ,查看每个待爬取得内容在 list 中 的索引
div = soup.find_all('div', class_=re.compile(r"TRS_Editor"))
trs = div[0].find_all('tr')
trs = trs[1:]
# print(trs[0])
print('trs num',len(trs))
for tr in trs:
tds = tr.find_all('td')
if len(tds[0].find_all('font')) > 0 :
if tds[3].find_all('font')[0].string == None:
print(second_url)
_1.append(tds[0].find_all('font')[0].string)
_2.append(tds[1].find_all('font')[0].string)
_3.append(tds[2].find_all('font')[0].string)
_4.append(tds[3].find_all('font')[0].string)
if len(tds) == 5:
_5.append(tds[4].find_all('font')[0].string)
else:
_5.append('null')
elif len(tds[0].find_all('p')) > 0 :
# if tds[3].find_all('p')[0].string == None:
# print(second_url)
_1.append(tds[0].find_all('p')[0].string)
_2.append(tds[1].find_all('p')[0].string)
_3.append(tds[2].find_all('p')[0].string)
if len(tds[3].find_all('p')) > 0:
_4.append(tds[3].find_all('p')[0].string)
else:
_4.append(tds[3].string)
if len(tds) == 5:
_5.append(tds[4])
else:
_5.append('null')
else:
if tds[3].string == None:
print(second_url)
_1.append(tds[0].string)
_2.append(tds[1].string)
if len(tds[2].find_all('span'))>0 and tds[2].find_all('span')[0].string == None:
_3.append(tds[2].string)
else:
_3.append(tds[2].string)
_4.append(tds[3].string)
if len(tds) == 5:
_5.append(tds[4].string)
else:
_5.append('null')
# elif len(tds[0].find_all('td'))
# print(len(tds))
# print(tds[0].string)
# print(tds[1].string)
# print(tds[2].string)
# print(tds[3].string)
# print(response.read().decode('utf-8','ignore'))
# 网站显示一共有 1036 页
num0 =0
for page in range(0,7):
num0 += 1
# print(num0)
get_urls(url, page)
# 把所有的二级网站 存成文本
with open('urls_all_liyou','w') as f:
f.write(str(urls_of_detail))
# print(len(urls_of_detail))
# print(len(set(urls_of_detail)))
print('urls num :' , len(urls_of_detail))
num=0 # 这个主要用于调试 爬的过程中如果出错 看看是在哪个网址出的
for second_url in urls_of_detail:
num += 1
print('page num : ', num)
if num in [15,42]:
continue
if num > 54:
break
get_info(url, second_url)
print('end ----------')
print(len(_1))
workbook = xlsxwriter.Workbook('./liyou.xlsx')
# 1.------------------ 创建一个 worksheet 存放具体分数-------------------------------
ws = workbook.add_worksheet('liyou')
#设置宽度
ws.set_column('A:A', 25)
ws.set_column('B:B', 25)
ws.set_column('C:C', 15)
ws.set_column('D:D', 15)
ws.set_column('E:E', 15)
# 写表头
ws.write(0, 0, '序号')
ws.write(0, 1, '区域')
ws.write(0, 2, '类型')
ws.write(0, 3, '设置地点')
ws.write(0, 4, '方向')
number = len(_1)
for i in range(number):
ws.write(i + 1, 0, str(_1[i]))
ws.write(i + 1, 1, str(_2[i]))
ws.write(i + 1, 2, str(_3[i]))
ws.write(i + 1, 3, str(_4[i]))
ws.write(i + 1, 4, str(_5[i]))
workbook.close()
| mit | -3,506,324,734,389,136,400 | 22.686916 | 115 | 0.533241 | false |
DStauffman/dstauffman | dstauffman/numba/optimized.py | 1 | 6619 | r"""
Replacement utilities that are optimized for speed using numba but not numpy.
Notes
-----
#. Written by David C. Stauffer in July 2020.
#. Moved into a submodule by David C. Stauffer in February 2021.
"""
#%% Imports
from __future__ import annotations
import doctest
import math
from typing import Sequence
import unittest
from dstauffman.numba.passthrough import fake_jit, HAVE_NUMBA, ncjit, TARGET
if HAVE_NUMBA:
from numba import float32, float64, int32, int64, vectorize # type: ignore[attr-defined]
else:
float32 = float64 = int32 = int64 = vectorize = fake_jit
#%% np_any
@ncjit
def np_any(x: Sequence, /) -> bool:
r"""
Returns true if anything in the vector is true.
Parameters
----------
x : array_like
Input array
Notes
-----
#. Replacement for np.any with short-circuiting.
It is faster if something is likely True, but slower if it has to check the entire array.
#. Written by David C. Stauffer in July 2020.
Examples
--------
>>> from dstauffman.numba import np_any
>>> import numpy as np
>>> x = np.zeros(1000, dtype=bool)
>>> print(np_any(x))
False
>>> x[333] = True
>>> print(np_any(x))
True
"""
for i in range(len(x)):
if x[i]:
return True
return False
#%% np_all
@ncjit
def np_all(x: Sequence, /) -> bool:
r"""
Returns true if everything in the vector is true.
Parameters
----------
x : array_like
Input array
Notes
-----
#. Replacement for np.all with short-circuiting.
It is faster if something is likely False, but slower if it has to check the entire array.
#. Written by David C. Stauffer in July 2020.
Examples
--------
>>> from dstauffman.numba import np_all
>>> import numpy as np
>>> x = np.ones(1000, dtype=bool)
>>> print(np_all(x))
True
>>> x[333] = False
>>> print(np_all(x))
False
"""
for i in range(len(x)):
if not x[i]:
return False
return True
#%% issorted_opt
@ncjit
def issorted_opt(x: Sequence, /, descend: bool = False) -> bool:
r"""
Tells whether the given array is sorted or not.
Parameters
----------
x : array_like
Input array
descend : bool, optional, default is False
Whether to check that the array is sorted in descending order
Notes
-----
#. Written by David C. Stauffer in July 2020.
Examples
--------
>>> from dstauffman.numba import issorted_opt
>>> import numpy as np
>>> x = np.array([1, 3, 3, 5, 7])
>>> print(issorted_opt(x))
True
>>> y = np.array([3, 5, 1, 7])
>>> print(issorted_opt(y))
False
"""
if descend:
for i in range(len(x)-1):
if x[i+1] > x[i]:
return False
else:
for i in range(len(x)-1):
if x[i+1] < x[i] :
return False
return True
#%% Functions - prob_to_rate_opt
@vectorize([float64(float64, float64)], nopython=True, target=TARGET, cache=True)
def prob_to_rate_opt(prob: float, time: float) -> float:
r"""
Convert a given probability and time to a rate.
Parameters
----------
prob : numpy.ndarray
Probability of event happening over the given time
time : float
Time for the given probability in years
Returns
-------
rate : numpy.ndarray
Equivalent annual rate for the given probability and time
Notes
-----
#. Written by David C. Stauffer in January 2016.
Examples
--------
>>> from dstauffman.numba import HAVE_NUMBA, prob_to_rate_opt
>>> import numpy as np
>>> prob = np.array([0, 0.1, 1])
>>> time = 3
>>> rate = prob_to_rate_opt(prob, time) if HAVE_NUMBA else [prob_to_rate_opt(p, time) for p in prob]
>>> print(np.array_str(np.asanyarray(rate), precision=8)) # doctest: +NORMALIZE_WHITESPACE
[0. 0.03512017 inf]
"""
# check ranges
if prob < 0:
raise ValueError('Probability must be >= 0')
if prob > 1:
raise ValueError('Probability must be <= 1')
# calculate rate
if prob == 1:
return math.inf
if prob == 0:
return prob
return -math.log(1 - prob) / time
#%% Functions - rate_to_prob_opt
@vectorize([float64(float64, float64)], nopython=True, target=TARGET, cache=True)
def rate_to_prob_opt(rate: float, time: float) -> float:
r"""
Convert a given rate and time to a probability.
Parameters
----------
rate : float
Annual rate for the given time
time : float
Time period for the desired probability to be calculated from, in years
Returns
-------
float
Equivalent probability of event happening over the given time
Notes
-----
#. Written by David C. Stauffer in January 2016.
#. Converted to numba version by David C. Stauffer in November 2020.
Examples
--------
>>> from dstauffman.numba import HAVE_NUMBA, rate_to_prob_opt
>>> import numpy as np
>>> rate = np.array([0, 0.1, 1, 100, np.inf])
>>> time = 1./12
>>> prob = rate_to_prob_opt(rate, time) if HAVE_NUMBA else [rate_to_prob_opt(r, time) for r in rate]
>>> print(np.array_str(np.asanyarray(prob), precision=8)) # doctest: +NORMALIZE_WHITESPACE
[0. 0.00829871 0.07995559 0.99975963 1. ]
"""
# check ranges
if rate < 0:
raise ValueError('Rate must be >= 0')
# calculate probability
return 1 - math.exp(-rate * time)
#%% Functions - zero_divide
@vectorize([float64(float64, float64), float32(float32, float32), float32(int32, int32), \
float64(int64, int64)], nopython=True, target=TARGET, cache=True)
def zero_divide(num: float, den: float) -> float:
r"""
Numba compatible version of np.divide(num, den, out=np.zeros_like(num), where=den!=0).
Parameters
----------
num : float
Numerator
den : float
Denominator
Returns
-------
float
result of divison, except return zero for anything divided by zero, including 0/0
Notes
-----
#. Written by David C. Stauffer in February 2021.
Examples
--------
>>> from dstauffman.numba import zero_divide
>>> print(zero_divide(1., .2))
5.0
>>> print(zero_divide(3.14, 0.))
0.0
>>> print(zero_divide(0., 0.))
0.0
"""
if den == 0.:
return 0.
return num / den
#%% Unit test
if __name__ == '__main__':
unittest.main(module='dstauffman.tests.test_numba_optimized', exit=False)
doctest.testmod(verbose=False)
| lgpl-3.0 | 5,076,988,683,992,885,000 | 24.263359 | 104 | 0.591026 | false |
Nik0las1984/mudpyl | mudpyl/net/nvt.py | 1 | 5604 | """This module contains tools for emulating a network virtual terminal. See
RFC 854 for details of the NVT commands, and VT100 documentation for the
colour codes.
"""
from mudpyl.metaline import Metaline, RunLengthList
from mudpyl.colours import NORMAL_CODES, fg_code, bg_code, WHITE, BLACK
import re
ALL_RESET = '0'
BOLDON = '1'
BOLDOFF = '22'
FG_FLAG = '3'
BG_FLAG = '4'
GROUND_RESET = '8'
colour_pattern = re.compile( "\x1b" + #ESC
r"\[" #open square bracket
r"(\d+" #open group, initial digits
r"(?:;\d{1,2})*" #following digits
r")" #close the group
"m" #just an 'm'
)
toremove = set('\000' #NUL
'\007' #BEL
'\013' #VT
'\014') #FF
BS = '\010'
HT = '\011' #AKA '\t' and tab.
HT_replacement = ' ' #four spaces
def make_string_sane(string):
"""Process (in most cases, this means 'ignore') the NVT characters in the
input string.
"""
#simple characters don't need any special machinery.
for char in toremove:
string = string.replace(char, '')
#do it backspace by backspace because otherwise, if there were multiple
#backspaces in a row, it gets confused and backspaces over backspaces.
while BS in string:
#take off leading backspaces so that the following regex doesn't get
#confused.
string = string.lstrip(BS)
string = re.sub('.' + BS, '', string, 1)
#swap tabs for four whitespaces.
string = string.replace(HT, HT_replacement)
return string
class ColourCodeParser(object):
"""A stateful colour code parser."""
def __init__(self):
self.fore = WHITE
self.back = BLACK
self.bold = False
def _parseline(self, line):
"""Feed it lines of VT100-infested text, and it splits it all up.
This returns a threeple: a string, the foreground colours, and the
background colours. The string is simple enough. The background list
is a list of integers corresponding to WHITE, GREEN, etc. The
foreground list is made up of two-ples: the first is the integer
colour, and the second is whether bold is on or off.
The lists of fore and back changes isn't redundant -- there are no
changes that could be removed without losing colour information.
"""
#this is a performance hotspot, so minimise the number of attribute
#lookups and modifications
fore = self.fore
bold = self.bold
back = self.back
backs = [(0, back)]
fores = [(0, (fore, bold))]
text = ''
prev_end = 0
for match in colour_pattern.finditer(line):
text += line[prev_end:match.start()]
prev_end = match.end()
codes = match.group(1)
for code in codes.split(';'):
code = code.lstrip('0') #normalisation.
if not code:
#leading zeroes been stripped from ALL_RESET
if fore != WHITE or bold:
fore = WHITE
bold = False
fores.append((len(text), (fore, bold)))
if back != BLACK:
back = BLACK
backs.append((len(text), back))
elif code == BOLDON and not bold:
bold = True
fores.append((len(text), (fore, bold)))
elif code == BOLDOFF and bold:
bold = False
fores.append((len(text), (fore, bold)))
elif code.startswith(FG_FLAG):
code = code[1:]
if code == GROUND_RESET:
code = WHITE
if code in NORMAL_CODES and code != fore:
fore = code
fores.append((len(text), (fore, bold)))
elif code.startswith(BG_FLAG):
code = code[1:]
if code == GROUND_RESET:
code = BLACK
if code in NORMAL_CODES and code != back:
back = code
backs.append((len(text), back))
#We don't really care about chopped colour codes. This class is
#actually going to be tossed whole lines (ie, \r\n or similar
#terminated), and any escape code of the form "\x1b[\r\n30m" or
#similar is broken anyway. I'll probably be proved wrong somehow
#on this one...
if len(line) - 1 > prev_end:
text += line[prev_end:]
self.fore = fore
self.back = back
self.bold = bold
return (fores, backs, text)
def parseline(self, line):
"""Interpret the VT100 codes in line and returns a Metaline, replete
with RunLengthLists, that splits the text, foreground and background
into three separate channels.
"""
fores, backs, cleanline = self._parseline(line)
rlfores = RunLengthList(((length, fg_code(colour, bold))
for (length, (colour, bold)) in fores),
_normalised = True)
rlbacks = RunLengthList(((length, bg_code(colour))
for (length, colour) in backs),
_normalised = True)
return Metaline(cleanline, rlfores, rlbacks)
| gpl-2.0 | -6,692,970,020,253,625,000 | 35.868421 | 77 | 0.526588 | false |
lloeki/python-dcpu_16 | test.py | 1 | 8924 | import unittest
import random
import dcpu_16
from dcpu_16 import CPU
class TestInstructions(unittest.TestCase):
"""Instruction set"""
def setUp(self):
pass
def test_SET(self):
dcpu_16.SET.opcode = (0x1,)
c = CPU()
c.a = 0x0
c.b = 0x42
dcpu_16.SET(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x42)
self.assertEqual(c.b, 0x42)
def test_ADD(self):
dcpu_16.SET.opcode = (0x2,)
c = CPU()
c.a = 0x17
c.b = 0x42
dcpu_16.ADD(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x17+0x42)
self.assertEqual(c.b, 0x42)
self.assertEqual(c.o, 0x0)
def test_SUB(self):
dcpu_16.SET.opcode = (0x3,)
c = CPU()
c.a = 0x42
c.b = 0x17
dcpu_16.SUB(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x42-0x17)
self.assertEqual(c.b, 0x17)
self.assertEqual(c.o, 0x0)
def test_MUL(self):
dcpu_16.SET.opcode = (0x4,)
c = CPU()
c.a = 0x17
c.b = 0x42
dcpu_16.MUL(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x17*0x42)
self.assertEqual(c.b, 0x42)
self.assertEqual(c.o, 0x0)
def test_DIV(self):
dcpu_16.SET.opcode = (0x5,)
c = CPU()
c.a = 0x17
c.b = 0x42
dcpu_16.DIV(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x17/0x42)
self.assertEqual(c.b, 0x42)
self.assertEqual(c.o, ((0x17<<16)/0x42)&0xFFFF)
def test_MOD(self):
dcpu_16.SET.opcode = (0x6,)
c = CPU()
c.a = 0x17
c.b = 0x42
dcpu_16.MOD(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x17%0x42)
self.assertEqual(c.b, 0x42)
self.assertEqual(c.o, 0x0)
def test_SHL(self):
dcpu_16.SET.opcode = (0x7,)
c = CPU()
c.a = 0x17
c.b = 0x4
dcpu_16.SHL(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x17<<0x4 & dcpu_16.wmask)
self.assertEqual(c.b, 0x4)
self.assertEqual(c.o, 0x0)
def test_SHR(self):
dcpu_16.SET.opcode = (0x8,)
c = CPU()
c.a = 0x17
c.b = 0x42
dcpu_16.SHR(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x17>>0x42)
self.assertEqual(c.b, 0x42)
self.assertEqual(c.o, 0x0)
def test_AND(self):
dcpu_16.SET.opcode = (0x9,)
c = CPU()
c.a = 0x17
c.b = 0x42
dcpu_16.AND(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x17&0x42)
self.assertEqual(c.b, 0x42)
self.assertEqual(c.o, 0x0)
def test_BOR(self):
dcpu_16.SET.opcode = (0xA,)
c = CPU()
c.a = 0x17
c.b = 0x42
dcpu_16.BOR(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x17|0x42)
self.assertEqual(c.b, 0x42)
self.assertEqual(c.o, 0x0)
def test_XOR(self):
dcpu_16.SET.opcode = (0xB,)
c = CPU()
c.a = 0x17
c.b = 0x42
dcpu_16.XOR(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.a, 0x17^0x42)
self.assertEqual(c.b, 0x42)
self.assertEqual(c.o, 0x0)
def test_IFE(self):
dcpu_16.SET.opcode = (0xC,)
c = CPU()
c.a = 0x17
c.b = 0x42
dcpu_16.IFE(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.skip, True)
c.a = 0x42
c.b = 0x42
dcpu_16.IFE(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.skip, False)
def test_IFN(self):
dcpu_16.SET.opcode = (0xD,)
c = CPU()
c.a = 0x17
c.b = 0x42
dcpu_16.IFN(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.skip, False)
c.a = 0x42
c.b = 0x42
dcpu_16.IFN(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.skip, True)
def test_IFG(self):
dcpu_16.SET.opcode = (0xE,)
c = CPU()
c.a = 0x41
c.b = 0x42
dcpu_16.IFG(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.skip, True)
c.a = 0x42
c.b = 0x42
dcpu_16.IFG(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.skip, True)
c.a = 0x42
c.b = 0x41
dcpu_16.IFG(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.skip, False)
def test_IFB(self):
dcpu_16.SET.opcode = (0xF,)
c = CPU()
c.a = 0xF0F0
c.b = 0x0F0F
dcpu_16.IFB(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.skip, True)
c.a = 0xF1F0
c.b = 0x0F0F
dcpu_16.IFB(c, c._pointer(0x0), c._pointer(0x1))
self.assertEqual(c.skip, False)
def test_JSR(self):
dcpu_16.JSR.opcode = (0x0, 0x1)
c = CPU()
c.a = 0xDEAD
c.pc = 0xBEEF
dcpu_16.JSR(c, c._pointer(0x0))
self.assertEqual(c.pc, 0xDEAD)
self.assertEqual(c.sp, 0xFFFF)
self.assertEqual(c.m[0xFFFF], 0xBEEF)
class TestCPU(unittest.TestCase):
"""CPU behavior"""
def setUp(self):
pass
def test_initial_state(self):
"""Initial state shall be all zero"""
c = CPU()
for r in c.r:
self.assertEqual(r, 0)
self.assertEqual(c.pc, 0)
self.assertEqual(c.o, 0)
self.assertEqual(c.sp, 0)
self.assertEqual(c.skip, False)
self.assertEqual(c.pc, 0)
for r in c.m:
self.assertEqual(r, 0)
def test_reset(self):
"""Reset shall bring CPU register state to initial"""
c = CPU()
for i in xrange(0x8):
c.r[i] = random.randrange(0x10000)
c.reset()
for r in c.r:
self.assertEqual(r, 0)
self.assertEqual(c.pc, 0)
self.assertEqual(c.o, 0)
self.assertEqual(c.sp, 0)
self.assertEqual(c.skip, False)
self.assertEqual(c.pc, 0)
def test_clear(self):
"""Clear shall zero memory"""
c = CPU()
for i in xrange(0x10000):
c.m[i] = random.randrange(0x10000)
c.clear()
for r in c.m:
self.assertEqual(r, 0)
class TestCPUWithPrograms(unittest.TestCase):
def setUP(self):
pass
def test_spec_demo(self):
c = CPU()
data = [
0x7c01, 0x0030, 0x7de1, 0x1000, 0x0020, 0x7803, 0x1000, 0xc00d,
0x7dc1, 0x001a, 0xa861, 0x7c01, 0x2000, 0x2161, 0x2000, 0x8463,
0x806d, 0x7dc1, 0x000d, 0x9031, 0x7c10, 0x0018, 0x7dc1, 0x001a,
0x9037, 0x61c1, 0x7dc1, 0x001a, 0x0000, 0x0000, 0x0000, 0x0000,
]
c.load_m(data=data)
self.assertEqual(c.pc, 0)
c.step() # SET A, 0x30
self.assertEqual(c.a, 0x30)
self.assertEqual(c.pc, 2)
c.step() # SET [0x1000], 0x20
self.assertEqual(c.m[0x1000], 0x20)
self.assertEqual(c.pc, 5)
c.step() # SUB A, [0x1000]
self.assertEqual(c.a, 0x10)
self.assertEqual(c.pc, 7)
c.step() # IFN A, 0x10
self.assertEqual(c.pc, 8)
self.assertEqual(c.skip, True)
c.step() # skip SET PC, crash
self.assertEqual(c.skip, False)
self.assertEqual(c.pc, 10)
c.step() # SET I, 10
self.assertEqual(c.i, 0x0A)
self.assertEqual(c.pc, 11)
c.step() # SET A, 0x2000
self.assertEqual(c.a, 0x2000)
for i in range(10, 0, -1):
self.assertEqual(c.pc, 13)
c.step() # SET [0x2000+I], [A]
self.assertEqual(c.m[0x2000+i], 0x0)
self.assertEqual(c.pc, 15)
c.step() # SUB I, 1
self.assertEqual(c.i, i-1)
self.assertEqual(c.pc, 16)
c.step() # IFN I, 0
self.assertEqual(c.skip, i-1==0)
self.assertEqual(c.pc, 17)
c.step() # SET PC, loop (with skip if c.i==0)
self.assertEqual(c.pc, 19)
c.step() # SET X, 0x4
self.assertEqual(c.x, 0x4)
self.assertEqual(c.pc, 20)
c.step() # JSR testsub
self.assertEqual(c.sp, 0xFFFF)
self.assertEqual(c.m[0xFFFF], 22)
self.assertEqual(c.pc, 24)
c.step() # SHL X, 4
self.assertEqual(c.x, 0x40)
self.assertEqual(c.pc, 25)
c.step() # SET PC, POP
self.assertEqual(c.pc, 22)
c.step() # SET PC, crash
self.assertEqual(c.pc, 26)
c.step() # SET PC, crash
self.assertEqual(c.pc, 26)
# endless loop
if __name__ == '__main__':
cases = [
'test.TestInstructions',
'test.TestCPU',
'test.TestCPUWithPrograms'
]
suite = unittest.TestLoader().loadTestsFromNames(cases)
unittest.TextTestRunner(verbosity=2).run(suite)
| bsd-3-clause | 4,863,031,636,498,932,000 | 26.29052 | 75 | 0.520506 | false |
RCAD/ringling-render-tools | src/rrt/maya/ui/submit.py | 1 | 13088 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\src\rrt\maya\ui\submit.ui'
#
# Created: Wed Oct 24 16:19:16 2012
# by: PyQt4 UI code generator 4.7.7
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_SubmitMainWindow(object):
def setupUi(self, SubmitMainWindow):
SubmitMainWindow.setObjectName(_fromUtf8("SubmitMainWindow"))
SubmitMainWindow.setEnabled(True)
SubmitMainWindow.resize(445, 283)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(SubmitMainWindow.sizePolicy().hasHeightForWidth())
SubmitMainWindow.setSizePolicy(sizePolicy)
SubmitMainWindow.setMinimumSize(QtCore.QSize(445, 283))
SubmitMainWindow.setWindowTitle(_fromUtf8("hpc-submit-maya"))
self.verticalLayout = QtGui.QVBoxLayout(SubmitMainWindow)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.formLayout = QtGui.QFormLayout()
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setLabelAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.formLayout.setHorizontalSpacing(6)
self.formLayout.setVerticalSpacing(8)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.head_node_label = QtGui.QLabel(SubmitMainWindow)
self.head_node_label.setObjectName(_fromUtf8("head_node_label"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.head_node_label)
self.head_node_field = QtGui.QComboBox(SubmitMainWindow)
self.head_node_field.setObjectName(_fromUtf8("head_node_field"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.head_node_field)
self.title_label = QtGui.QLabel(SubmitMainWindow)
self.title_label.setLayoutDirection(QtCore.Qt.LeftToRight)
self.title_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.title_label.setObjectName(_fromUtf8("title_label"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.title_label)
self.project_label = QtGui.QLabel(SubmitMainWindow)
self.project_label.setMinimumSize(QtCore.QSize(0, 0))
self.project_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.project_label.setObjectName(_fromUtf8("project_label"))
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.project_label)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setSpacing(6)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.project_field = QtGui.QLineEdit(SubmitMainWindow)
self.project_field.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.project_field.sizePolicy().hasHeightForWidth())
self.project_field.setSizePolicy(sizePolicy)
self.project_field.setMinimumSize(QtCore.QSize(161, 26))
self.project_field.setReadOnly(True)
self.project_field.setObjectName(_fromUtf8("project_field"))
self.horizontalLayout.addWidget(self.project_field)
self.browse_button = QtGui.QPushButton(SubmitMainWindow)
self.browse_button.setMinimumSize(QtCore.QSize(85, 27))
self.browse_button.setObjectName(_fromUtf8("browse_button"))
self.horizontalLayout.addWidget(self.browse_button)
self.formLayout.setLayout(2, QtGui.QFormLayout.FieldRole, self.horizontalLayout)
self.scene_label = QtGui.QLabel(SubmitMainWindow)
self.scene_label.setObjectName(_fromUtf8("scene_label"))
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.scene_label)
self.horizontalLayout1 = QtGui.QHBoxLayout()
self.horizontalLayout1.setSpacing(6)
self.horizontalLayout1.setObjectName(_fromUtf8("horizontalLayout1"))
self.scene_field = QtGui.QLineEdit(SubmitMainWindow)
self.scene_field.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scene_field.sizePolicy().hasHeightForWidth())
self.scene_field.setSizePolicy(sizePolicy)
self.scene_field.setMinimumSize(QtCore.QSize(161, 26))
self.scene_field.setReadOnly(True)
self.scene_field.setObjectName(_fromUtf8("scene_field"))
self.horizontalLayout1.addWidget(self.scene_field)
self.scene_button = QtGui.QPushButton(SubmitMainWindow)
self.scene_button.setMinimumSize(QtCore.QSize(85, 27))
self.scene_button.setObjectName(_fromUtf8("scene_button"))
self.horizontalLayout1.addWidget(self.scene_button)
self.formLayout.setLayout(3, QtGui.QFormLayout.FieldRole, self.horizontalLayout1)
self.start_label = QtGui.QLabel(SubmitMainWindow)
self.start_label.setObjectName(_fromUtf8("start_label"))
self.formLayout.setWidget(4, QtGui.QFormLayout.LabelRole, self.start_label)
self.start_field = QtGui.QSpinBox(SubmitMainWindow)
self.start_field.setMinimum(1)
self.start_field.setMaximum(999999999)
self.start_field.setObjectName(_fromUtf8("start_field"))
self.formLayout.setWidget(4, QtGui.QFormLayout.FieldRole, self.start_field)
self.end_label = QtGui.QLabel(SubmitMainWindow)
self.end_label.setObjectName(_fromUtf8("end_label"))
self.formLayout.setWidget(5, QtGui.QFormLayout.LabelRole, self.end_label)
self.end_field = QtGui.QSpinBox(SubmitMainWindow)
self.end_field.setMinimum(1)
self.end_field.setMaximum(999999999)
self.end_field.setObjectName(_fromUtf8("end_field"))
self.formLayout.setWidget(5, QtGui.QFormLayout.FieldRole, self.end_field)
self.step_label = QtGui.QLabel(SubmitMainWindow)
self.step_label.setObjectName(_fromUtf8("step_label"))
self.formLayout.setWidget(6, QtGui.QFormLayout.LabelRole, self.step_label)
self.horizontalLayout_11 = QtGui.QHBoxLayout()
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
self.step_field = QtGui.QSpinBox(SubmitMainWindow)
self.step_field.setMinimum(1)
self.step_field.setMaximum(999999999)
self.step_field.setObjectName(_fromUtf8("step_field"))
self.horizontalLayout_11.addWidget(self.step_field)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem1)
spacerItem2 = QtGui.QSpacerItem(50, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem2)
self.render_label = QtGui.QLabel(SubmitMainWindow)
self.render_label.setObjectName(_fromUtf8("render_label"))
self.horizontalLayout_11.addWidget(self.render_label)
self.render_field = QtGui.QComboBox(SubmitMainWindow)
self.render_field.setObjectName(_fromUtf8("render_field"))
self.horizontalLayout_11.addWidget(self.render_field)
spacerItem3 = QtGui.QSpacerItem(10, 10, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem3)
self.formLayout.setLayout(6, QtGui.QFormLayout.FieldRole, self.horizontalLayout_11)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.rrt_debug = QtGui.QCheckBox(SubmitMainWindow)
self.rrt_debug.setLayoutDirection(QtCore.Qt.LeftToRight)
self.rrt_debug.setObjectName(_fromUtf8("rrt_debug"))
self.horizontalLayout_5.addWidget(self.rrt_debug)
self.pause = QtGui.QCheckBox(SubmitMainWindow)
self.pause.setLayoutDirection(QtCore.Qt.LeftToRight)
self.pause.setObjectName(_fromUtf8("pause"))
self.horizontalLayout_5.addWidget(self.pause)
self.formLayout.setLayout(7, QtGui.QFormLayout.FieldRole, self.horizontalLayout_5)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.title_field = QtGui.QLineEdit(SubmitMainWindow)
self.title_field.setObjectName(_fromUtf8("title_field"))
self.horizontalLayout_4.addWidget(self.title_field)
self.formLayout.setLayout(1, QtGui.QFormLayout.FieldRole, self.horizontalLayout_4)
self.verticalLayout.addLayout(self.formLayout)
self.line = QtGui.QFrame(SubmitMainWindow)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.verticalLayout.addWidget(self.line)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setSpacing(6)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
spacerItem4 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem4)
self.submit_button = QtGui.QPushButton(SubmitMainWindow)
self.submit_button.setObjectName(_fromUtf8("submit_button"))
self.horizontalLayout_2.addWidget(self.submit_button)
self.cancel_button = QtGui.QPushButton(SubmitMainWindow)
self.cancel_button.setObjectName(_fromUtf8("cancel_button"))
self.horizontalLayout_2.addWidget(self.cancel_button)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.retranslateUi(SubmitMainWindow)
QtCore.QObject.connect(self.browse_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.browse)
QtCore.QObject.connect(self.cancel_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.quit)
QtCore.QObject.connect(self.submit_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.submit_job)
QtCore.QObject.connect(self.scene_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.scene)
QtCore.QMetaObject.connectSlotsByName(SubmitMainWindow)
def retranslateUi(self, SubmitMainWindow):
self.head_node_label.setToolTip(QtGui.QApplication.translate("SubmitMainWindow", "which cluster to use", None, QtGui.QApplication.UnicodeUTF8))
self.head_node_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Head Node", None, QtGui.QApplication.UnicodeUTF8))
self.head_node_field.setToolTip(QtGui.QApplication.translate("SubmitMainWindow", "Which cluster to submit to", None, QtGui.QApplication.UnicodeUTF8))
self.title_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Job Title", None, QtGui.QApplication.UnicodeUTF8))
self.project_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Project Folder", None, QtGui.QApplication.UnicodeUTF8))
self.browse_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Set", None, QtGui.QApplication.UnicodeUTF8))
self.scene_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Maya Scene File", None, QtGui.QApplication.UnicodeUTF8))
self.scene_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Browse", None, QtGui.QApplication.UnicodeUTF8))
self.start_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Start Frame", None, QtGui.QApplication.UnicodeUTF8))
self.end_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "End Frame", None, QtGui.QApplication.UnicodeUTF8))
self.step_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Frame Step", None, QtGui.QApplication.UnicodeUTF8))
self.render_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Renderer", None, QtGui.QApplication.UnicodeUTF8))
self.rrt_debug.setText(QtGui.QApplication.translate("SubmitMainWindow", "Show Debug Messages", None, QtGui.QApplication.UnicodeUTF8))
self.pause.setText(QtGui.QApplication.translate("SubmitMainWindow", "Pause before exit", None, QtGui.QApplication.UnicodeUTF8))
self.submit_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Submit Job", None, QtGui.QApplication.UnicodeUTF8))
self.cancel_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Cancel", None, QtGui.QApplication.UnicodeUTF8))
| mit | 7,755,343,654,843,894,000 | 66.117949 | 157 | 0.737164 | false |
kunalarya/simple-sat-solver | satsolver/solver.py | 1 | 9011 | from __future__ import print_function
import argparse
import logging
from collections import namedtuple
import satsolver.parser as parser
from satsolver.util import Success, Failure
from satsolver.state import Instance
class Node(object):
def __init__(self, lit, asg, level):
assert lit > 0
self.lit = lit
self.asg = asg
self.level = level
def __repr__(self):
return '<x_{} = {} @ {}>'.format(
self.lit, self.asg, self.level)
class ImplicationGraph(object):
"""Implication Graph"""
def __init__(self):
self.nodes = set()
self.lits = set() # set of literals in nodes
# map lit -> nodes w/assignments
# dict[int, list[Node]]
self.nodes_by_lit = {}
self.fwd_edges = {}
# maps (x -> y) tuple edge to clause
self.edge_annot = {}
def add_node(self, node):
self.nodes.add(node)
self.lits.add(node.lit)
self.fwd_edges[node] = []
self.nodes_by_lit[node.lit] = node
def del_node(self, node):
self.lits.remove(node.lit)
self.nodes.remove(node)
del self.fwd_edges[node]
del self.nodes_by_lit[node.lit]
def add_edge(self, src, dst, reason):
self.fwd_edges[src].append(dst)
self.edge_annot[src, dst] = reason
Decision = namedtuple('Decision', ['level', 'lit', 'value'])
Implication = namedtuple('Implication', ['clause', 'lit', 'value'])
class Solver(object):
"""Main Solver"""
def __init__(self, instance, recipe=None):
self.instance = instance
# Pick variables in this order, if given.
self.recipe = recipe
self.recipe_index = 0
# def new_var(self):
# pass
# def add_clause(self, lits):
# pass
# def simplify_db(self):
# pass
def solve(self):
result = self.decide([], 1)
return result
def determine_next_var(self):
"""Choose the next variable to assign.
It will run the recipe if given, otherwise select a random unassigned
variable.
Returns:
tuple(variable, value)
"""
if self.recipe is not None:
if len(self.recipe) > 0:
next_var_and_value = self.recipe[0]
self.recipe = self.recipe[1:]
return next_var_and_value
# Otherwise, choose a variable randomly.
next_var = next(iter(self.instance.unasg_vars))
return next_var, 1
def bcp(self, decision_level, igraph):
"""Boolean Constrain Propagation
Returns:
Success | Failure
Success result:
{lit: Implication}
Failure means UNSAT
"""
any_unit = True
implications = {} # Keyed on int
while any_unit:
any_unit = False
for clause_index, clause in enumerate(self.instance.clauses):
r = self.instance.is_unit(clause)
if not r.success: return r
is_unit, implied = r.result
if is_unit:
lit = abs(implied)
if implied > 0:
r = self.instance.set_lit(lit, 1)
if not r.success: return r
implications[lit] = Implication(clause_index, lit, 1)
value = 1
else:
r = self.instance.set_lit(lit, 0)
if not r.success: return r
implications[lit] = Implication(clause_index, lit, 0)
value = 0
logging.debug('implied=%d -> %d', lit, value)
# Create a node in the ImplicationGraph if it doesn't yet exist.
if not lit in igraph.nodes_by_lit:
lit_node = Node(lit, value, decision_level)
igraph.add_node(lit_node)
# Create any edges
for implicating_lit in clause:
implicating_pair = self.instance.get_value(implicating_lit)
implicating_lit, implicating_value = implicating_pair
if implicating_lit != lit:
# create the implicating lit if needed
if implicating_lit not in igraph.lits:
inode = Node(implicating_lit, implicating_value,
decision_level)
igraph.add_node(inode)
else:
inode = igraph.nodes_by_lit[implicating_lit]
# create an edge for this node
lit_node = igraph.nodes_by_lit[lit]
igraph.add_edge(inode, lit_node, clause)
logging.debug('add edge %s->%s because of %s',
inode, lit_node, clause)
any_unit = True
return Success(implications)
def decide(self, decisions, level):
"""
Args:
decisions (list[Decision]):
level (int):
Returns:
Success | Failure
"""
# choose a variable to decide
print('.', end='')
logging.debug('______________________________')
logging.debug('[level: %d]', level)
# Choose a variable to set.
next_var, next_value = self.determine_next_var()
# Create a new copy of the decisions.
decisions = list(decisions)
decisions.append(Decision(level, next_var, next_value))
logging.debug('try_assignment(level=%d, %d->%d)', level, next_var,
next_value)
result = self.try_assignment(level, decisions, next_var, next_value)
if not result.success:
logging.debug('caused unsat: try_assignment(level=%d, %d->%d)',
level, next_var, next_value)
# try the other branch
inverted_value = 1 - next_value
# remove last decision
decisions = decisions[:-1]
# add new decision
decisions.append(Decision(level, next_var, inverted_value))
r = self.try_assignment(level, decisions, next_var, inverted_value)
# If we reached UNSAT here, then there's no solution here, so propagate
# this issue up.
if not r.success:
return r
else:
# If all variables have been assigned, store this as a solution.
if len(self.instance.unasg_vars) == 0:
if self.instance.verify():
self.instance.save_solution()
print('satisfied!')
else:
raise ValueError('All variables assigned, but UNSAT')
return Success()
def try_assignment(self, level, decisions, lit, value):
logging.debug('try_assignment: lit = %d -- setting to %d', lit, value)
# assign it True
r = self.instance.set_lit(lit, value)
if not r.success:
return r
igraph = ImplicationGraph()
# build the graph
for decision in decisions:
# create a node for each decision
node = Node(decision.lit, decision.value, decision.level)
igraph.add_node(node)
logging.debug('adding node %s', node)
logging.debug('running bcp...')
r = self.bcp(level, igraph)
if not r.success: # Meaning UNSAT:
logging.debug('decision led to UNSAT. unsetting')
self.instance.unset_lit(lit)
# If it's UNSAT, we need to backtrack
return Failure('Unsat!')
# Otherwise it was a Success
implications = r.result
if len(self.instance.unasg_vars) > 0:
# increase the decision level
r = self.decide(decisions, level+1)
self.instance.unset_lit(lit)
return r
# otherwise, return igraph
return Success(result=(igraph, None))
def solve(instance):
"""
Args:
instance (Instance): parsed SAT instance
Returns:
Success | Failure
"""
solver = Solver(instance)
result = solver.solve()
if not result.success:
print('Unsatisfiable')
return result
def main():
cmdline_parser = argparse.ArgumentParser()
cmdline_parser.add_argument('filename', action='store', type=str)
args = cmdline_parser.parse_args()
file_parser = parser.CNFFileParser(args.filename)
inst = Instance(var_count=file_parser.var_count, clauses=file_parser.clauses)
result = solve(inst)
if result.success:
# Print the solutions
print('Satisfying solutions:')
for solution in inst.solutions:
print(solution)
if __name__ == '__main__':
main()
| apache-2.0 | -7,440,824,354,011,734,000 | 29.545763 | 84 | 0.52669 | false |
luyijun/evennia_worldloader | worldloader/example_tutorial_world/worlddata/migrations/0001_initial.py | 1 | 5070 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='personal_objects',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'Personal Object List',
'verbose_name_plural': 'Personal Object List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_details',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Detail List',
'verbose_name_plural': 'World Detail List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_exits',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Exit List',
'verbose_name_plural': 'World Exit List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_objects',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Object List',
'verbose_name_plural': 'World Object List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_rooms',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Room List',
'verbose_name_plural': 'World Room List',
},
bases=(models.Model,),
),
]
| bsd-3-clause | 3,046,071,758,788,312,000 | 44.675676 | 93 | 0.522288 | false |
CMUSV-VisTrails/WorkflowRecommendation | vistrails/packages/analytics/__init__.py | 1 | 1938 | ###############################################################################
##
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected]
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
name = "VisTrails Analytics"
identifier = "edu.utah.sci.vistrails.analytics"
version = "0.0.1"
| bsd-3-clause | -8,424,415,816,666,833,000 | 51.378378 | 79 | 0.691434 | false |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247971765/PyQt4/QtGui/QAbstractSpinBox.py | 1 | 9176 | # encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python3/dist-packages/PyQt4/QtGui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
from .QWidget import QWidget
class QAbstractSpinBox(QWidget):
""" QAbstractSpinBox(QWidget parent=None) """
def alignment(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.alignment() -> Qt.Alignment """
pass
def buttonSymbols(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.buttonSymbols() -> QAbstractSpinBox.ButtonSymbols """
pass
def changeEvent(self, QEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.changeEvent(QEvent) """
pass
def clear(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.clear() """
pass
def closeEvent(self, QCloseEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.closeEvent(QCloseEvent) """
pass
def contextMenuEvent(self, QContextMenuEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.contextMenuEvent(QContextMenuEvent) """
pass
def correctionMode(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.correctionMode() -> QAbstractSpinBox.CorrectionMode """
pass
def editingFinished(self, *args, **kwargs): # real signature unknown
""" QAbstractSpinBox.editingFinished [signal] """
pass
def event(self, QEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.event(QEvent) -> bool """
return False
def fixup(self, p_str): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.fixup(str) -> str """
return ""
def focusInEvent(self, QFocusEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.focusInEvent(QFocusEvent) """
pass
def focusOutEvent(self, QFocusEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.focusOutEvent(QFocusEvent) """
pass
def hasAcceptableInput(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.hasAcceptableInput() -> bool """
return False
def hasFrame(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.hasFrame() -> bool """
return False
def hideEvent(self, QHideEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.hideEvent(QHideEvent) """
pass
def initStyleOption(self, QStyleOptionSpinBox): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.initStyleOption(QStyleOptionSpinBox) """
pass
def inputMethodQuery(self, Qt_InputMethodQuery): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.inputMethodQuery(Qt.InputMethodQuery) -> object """
return object()
def interpretText(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.interpretText() """
pass
def isAccelerated(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.isAccelerated() -> bool """
return False
def isReadOnly(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.isReadOnly() -> bool """
return False
def keyboardTracking(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.keyboardTracking() -> bool """
return False
def keyPressEvent(self, QKeyEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.keyPressEvent(QKeyEvent) """
pass
def keyReleaseEvent(self, QKeyEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.keyReleaseEvent(QKeyEvent) """
pass
def lineEdit(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.lineEdit() -> QLineEdit """
return QLineEdit
def minimumSizeHint(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.minimumSizeHint() -> QSize """
pass
def mouseMoveEvent(self, QMouseEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.mouseMoveEvent(QMouseEvent) """
pass
def mousePressEvent(self, QMouseEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.mousePressEvent(QMouseEvent) """
pass
def mouseReleaseEvent(self, QMouseEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.mouseReleaseEvent(QMouseEvent) """
pass
def paintEvent(self, QPaintEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.paintEvent(QPaintEvent) """
pass
def resizeEvent(self, QResizeEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.resizeEvent(QResizeEvent) """
pass
def selectAll(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.selectAll() """
pass
def setAccelerated(self, bool): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setAccelerated(bool) """
pass
def setAlignment(self, Qt_Alignment): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setAlignment(Qt.Alignment) """
pass
def setButtonSymbols(self, QAbstractSpinBox_ButtonSymbols): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setButtonSymbols(QAbstractSpinBox.ButtonSymbols) """
pass
def setCorrectionMode(self, QAbstractSpinBox_CorrectionMode): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setCorrectionMode(QAbstractSpinBox.CorrectionMode) """
pass
def setFrame(self, bool): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setFrame(bool) """
pass
def setKeyboardTracking(self, bool): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setKeyboardTracking(bool) """
pass
def setLineEdit(self, QLineEdit): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setLineEdit(QLineEdit) """
pass
def setReadOnly(self, bool): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setReadOnly(bool) """
pass
def setSpecialValueText(self, p_str): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setSpecialValueText(str) """
pass
def setWrapping(self, bool): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setWrapping(bool) """
pass
def showEvent(self, QShowEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.showEvent(QShowEvent) """
pass
def sizeHint(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.sizeHint() -> QSize """
pass
def specialValueText(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.specialValueText() -> str """
return ""
def stepBy(self, p_int): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.stepBy(int) """
pass
def stepDown(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.stepDown() """
pass
def stepEnabled(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.stepEnabled() -> QAbstractSpinBox.StepEnabled """
pass
def stepUp(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.stepUp() """
pass
def text(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.text() -> str """
return ""
def timerEvent(self, QTimerEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.timerEvent(QTimerEvent) """
pass
def validate(self, p_str, p_int): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.validate(str, int) -> (QValidator.State, str, int) """
pass
def wheelEvent(self, QWheelEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.wheelEvent(QWheelEvent) """
pass
def wrapping(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.wrapping() -> bool """
return False
def __init__(self, QWidget_parent=None): # real signature unknown; restored from __doc__
pass
ButtonSymbols = None # (!) real value is ''
CorrectionMode = None # (!) real value is ''
CorrectToNearestValue = 1
CorrectToPreviousValue = 0
NoButtons = 2
PlusMinus = 1
StepDownEnabled = 2
StepEnabled = None # (!) real value is ''
StepEnabledFlag = None # (!) real value is ''
StepNone = 0
StepUpEnabled = 1
UpDownArrows = 0
| gpl-2.0 | 770,578,344,612,015,000 | 36.761317 | 113 | 0.650065 | false |
spencerpomme/coconuts-on-fire | person.py | 1 | 1237 | from classtools import AttrDisplay
class Person(AttrDisplay):
'''
Create and process person records
'''
def __init__(self, name, job=None, pay=0):
self.name = name
self.job = job
self.pay = pay
def lastName(self):
return self.name.split()[-1]
def giveRaise(self, percent):
self.pay = int(self.pay *(1 + percent))
class Manager(Person):
def __init__(self, name, pay):
Person.__init__(self, name, 'mgr', pay)
def giveRaise(self, percent, bonus=.10):
Person.giveRaise(self, percent+bonus)
class Department:
def __init__(self, *args):
self.members = list(args)
def addMember(self, person):
self.members.append(person)
def giveRaise(self, percent):
for person in self.members:
person.giveRaise(percent)
def showAll(self):
for person in self.members:
print(person)
if __name__ == '__main__':
bob = Person('Bob Smith')
sue = Person('Sue Jones', job='dev', pay=100000)
tom = Manager('Tom Jones', pay=50000)
development = Department(bob, sue)
development.addMember(tom)
development.giveRaise(.10)
development.showAll()
| apache-2.0 | -7,894,379,898,848,833,000 | 21.089286 | 52 | 0.587712 | false |
mauzeh/formation-flight | runs/singlehub/z/run.py | 1 | 1259 | #!/usr/bin/env python
"""Simulation bootstrapper"""
from formation_flight.formation import handlers as formation_handlers
from formation_flight.aircraft import handlers as aircraft_handlers
from formation_flight.aircraft import generators
from formation_flight.hub import builders
from formation_flight.hub import allocators
from lib import sim, debug, sink
from lib.debug import print_line as p
from formation_flight import statistics
import config
import os
import numpy as np
config.sink_dir = '%s/sink' % os.path.dirname(__file__)
def init():
sink.init(config.sink_dir)
def execute():
init()
for z in np.linspace(0, 1, 250):
config.Z = z
single_run()
def single_run():
sim.init()
aircraft_handlers.init()
formation_handlers.init()
statistics.init()
# Construct flight list
planes = generators.get_via_stdin()
# Find hubs
config.hubs = builders.build_hubs(planes, config.count_hubs, config.Z)
# Allocate hubs to flights
allocators.allocate(planes, config.hubs)
for flight in planes:
sim.events.append(sim.Event('aircraft-init', flight, 0))
sim.run()
sink.push(statistics.vars)
debug.print_dictionary(statistics.vars)
| mit | -6,382,942,506,317,856,000 | 21.482143 | 74 | 0.693407 | false |
octopicorn/cloudbrain | cloudbrain/connectors/MockConnector.py | 1 | 1165 | import time
import random
from cloudbrain.connectors.ConnectorInterface import Connector
from cloudbrain.utils.metadata_info import get_num_channels
class MockConnector(Connector):
def __init__(self, publishers, buffer_size, device_name, device_port='mock_port', device_mac=None):
"""
:return:
"""
super(MockConnector, self).__init__(publishers, buffer_size, device_name, device_port, device_mac)
self.data_generators = [self.data_generator_factory(metric, get_num_channels(self.device_name, metric)) for metric in self.metrics]
def connect_device(self):
"""
Mock connector so actually, don't do anything there :-)
:return:
"""
pass
def start(self):
while 1:
for data_generator in self.data_generators:
data_generator()
time.sleep(1)
def data_generator_factory(self, metric_name, num_channels):
def data_generator():
message = {"channel_%s" % i: random.random() * 10 for i in xrange(num_channels)}
message['timestamp'] = int(time.time() * 1000000) # micro seconds
print message
self.buffers[metric_name].write(message)
return data_generator
| agpl-3.0 | -3,348,609,461,343,070,700 | 22.77551 | 135 | 0.678112 | false |
insequent/quark | quark/cache/security_groups_client.py | 1 | 8108 | # Copyright 2014 Openstack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
#
import json
import netaddr
from oslo_log import log as logging
from quark.cache import redis_base
from quark import exceptions as q_exc
from quark import protocols
from quark import utils
LOG = logging.getLogger(__name__)
SECURITY_GROUP_RULE_KEY = "rules"
SECURITY_GROUP_HASH_ATTR = "security group rules"
SECURITY_GROUP_ACK = "security group ack"
ALL_V4 = netaddr.IPNetwork("::ffff:0.0.0.0/96")
ALL_V6 = netaddr.IPNetwork("::/0")
class SecurityGroupsClient(redis_base.ClientBase):
def _convert_remote_network(self, remote_ip_prefix):
# NOTE(mdietz): RM11364 - While a /0 is valid and should be supported,
# it breaks OVS to apply a /0 as the source or
# destination network.
net = netaddr.IPNetwork(remote_ip_prefix).ipv6()
if net.cidr == ALL_V4 or net.cidr == ALL_V6:
return ''
return str(net)
def serialize_rules(self, rules):
"""Creates a payload for the redis server."""
# TODO(mdietz): If/when we support other rule types, this comment
# will have to be revised.
# Action and direction are static, for now. The implementation may
# support 'deny' and 'egress' respectively in the future. We allow
# the direction to be set to something else, technically, but current
# plugin level call actually raises. It's supported here for unit
# test purposes at this time
serialized = []
for rule in rules:
direction = rule["direction"]
source = ''
destination = ''
if rule.get("remote_ip_prefix"):
prefix = rule["remote_ip_prefix"]
if direction == "ingress":
source = self._convert_remote_network(prefix)
else:
destination = self._convert_remote_network(prefix)
optional_fields = {}
# NOTE(mdietz): this will expand as we add more protocols
protocol_map = protocols.PROTOCOL_MAP[rule["ethertype"]]
if rule["protocol"] == protocol_map["icmp"]:
optional_fields["icmp type"] = rule["port_range_min"]
optional_fields["icmp code"] = rule["port_range_max"]
else:
optional_fields["port start"] = rule["port_range_min"]
optional_fields["port end"] = rule["port_range_max"]
payload = {"ethertype": rule["ethertype"],
"protocol": rule["protocol"],
"source network": source,
"destination network": destination,
"action": "allow",
"direction": direction}
payload.update(optional_fields)
serialized.append(payload)
return serialized
def serialize_groups(self, groups):
"""Creates a payload for the redis server
The rule schema is the following:
REDIS KEY - port_device_id.port_mac_address/sg
REDIS VALUE - A JSON dump of the following:
port_mac_address must be lower-cased and stripped of non-alphanumeric
characters
{"id": "<arbitrary uuid>",
"rules": [
{"ethertype": <hexademical integer>,
"protocol": <integer>,
"port start": <integer>, # optional
"port end": <integer>, # optional
"icmp type": <integer>, # optional
"icmp code": <integer>, # optional
"source network": <string>,
"destination network": <string>,
"action": <string>,
"direction": <string>},
],
"security groups ack": <boolean>
}
Example:
{"id": "004c6369-9f3d-4d33-b8f5-9416bf3567dd",
"rules": [
{"ethertype": 0x800,
"protocol": "tcp",
"port start": 1000,
"port end": 1999,
"source network": "10.10.10.0/24",
"destination network": "",
"action": "allow",
"direction": "ingress"},
],
"security groups ack": "true"
}
port start/end and icmp type/code are mutually exclusive pairs.
"""
rules = []
for group in groups:
rules.extend(self.serialize_rules(group.rules))
return rules
def get_rules_for_port(self, device_id, mac_address):
rules = self.get_field(
self.vif_key(device_id, mac_address), SECURITY_GROUP_HASH_ATTR)
if rules:
return json.loads(rules)
def apply_rules(self, device_id, mac_address, rules):
"""Writes a series of security group rules to a redis server."""
LOG.info("Applying security group rules for device %s with MAC %s" %
(device_id, mac_address))
if not self._use_master:
raise q_exc.RedisSlaveWritesForbidden()
rule_dict = {SECURITY_GROUP_RULE_KEY: rules}
redis_key = self.vif_key(device_id, mac_address)
# TODO(mdietz): Pipeline these. Requires some rewriting
self.set_field(redis_key, SECURITY_GROUP_HASH_ATTR, rule_dict)
self.set_field_raw(redis_key, SECURITY_GROUP_ACK, False)
def delete_vif_rules(self, device_id, mac_address):
# Redis HDEL command will ignore key safely if it doesn't exist
self.delete_field(self.vif_key(device_id, mac_address),
SECURITY_GROUP_HASH_ATTR)
self.delete_field(self.vif_key(device_id, mac_address),
SECURITY_GROUP_ACK)
def delete_vif(self, device_id, mac_address):
# Redis DEL command will ignore key safely if it doesn't exist
self.delete_key(self.vif_key(device_id, mac_address))
@utils.retry_loop(3)
def get_security_group_states(self, interfaces):
"""Gets security groups for interfaces from Redis
Returns a dictionary of xapi.VIFs with values of the current
acknowledged status in Redis.
States not explicitly handled:
* ack key, no rules - This is the same as just tagging the VIF,
the instance will be inaccessible
* rules key, no ack - Nothing will happen, the VIF will
not be tagged.
"""
LOG.debug("Getting security groups from Redis for {0}".format(
interfaces))
interfaces = tuple(interfaces)
vif_keys = [self.vif_key(vif.device_id, vif.mac_address)
for vif in interfaces]
security_groups = self.get_fields(vif_keys, SECURITY_GROUP_ACK)
ret = {}
for vif, security_group_ack in zip(interfaces, security_groups):
if security_group_ack:
security_group_ack = security_group_ack.lower()
if "true" in security_group_ack:
ret[vif] = True
elif "false" in security_group_ack:
ret[vif] = False
else:
LOG.debug("Skipping bad ack value %s" % security_group_ack)
return ret
@utils.retry_loop(3)
def update_group_states_for_vifs(self, vifs, ack):
"""Updates security groups by setting the ack field"""
if not self._use_master:
raise q_exc.RedisSlaveWritesForbidden()
vif_keys = [self.vif_key(vif.device_id, vif.mac_address)
for vif in vifs]
self.set_fields(vif_keys, SECURITY_GROUP_ACK, ack)
| apache-2.0 | 9,148,292,089,688,650,000 | 37.980769 | 79 | 0.585101 | false |
harisbal/pandas | pandas/tests/test_panel.py | 1 | 95658 | # -*- coding: utf-8 -*-
# pylint: disable=W0612,E1101
from warnings import catch_warnings, simplefilter
from datetime import datetime
import operator
import pytest
import numpy as np
from pandas.core.dtypes.common import is_float_dtype
from pandas import (Series, DataFrame, Index, date_range, isna, notna,
MultiIndex)
from pandas.core.nanops import nanall, nanany
from pandas.core.panel import Panel
from pandas.io.formats.printing import pprint_thing
from pandas import compat
from pandas.compat import range, lrange, StringIO, OrderedDict, signature
from pandas.tseries.offsets import BDay, MonthEnd
from pandas.util.testing import (assert_panel_equal, assert_frame_equal,
assert_series_equal, assert_almost_equal,
ensure_clean, makeMixedDataFrame,
makeCustomDataframe as mkdf)
import pandas.core.panel as panelm
import pandas.util.testing as tm
import pandas.util._test_decorators as td
def make_test_panel():
with catch_warnings(record=True):
simplefilter("ignore", FutureWarning)
_panel = tm.makePanel()
tm.add_nans(_panel)
_panel = _panel.copy()
return _panel
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class PanelTests(object):
panel = None
def test_pickle(self):
unpickled = tm.round_trip_pickle(self.panel)
assert_frame_equal(unpickled['ItemA'], self.panel['ItemA'])
def test_rank(self):
pytest.raises(NotImplementedError, lambda: self.panel.rank())
def test_cumsum(self):
cumsum = self.panel.cumsum()
assert_frame_equal(cumsum['ItemA'], self.panel['ItemA'].cumsum())
def not_hashable(self):
c_empty = Panel()
c = Panel(Panel([[[1]]]))
pytest.raises(TypeError, hash, c_empty)
pytest.raises(TypeError, hash, c)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class SafeForLongAndSparse(object):
def test_repr(self):
repr(self.panel)
def test_copy_names(self):
for attr in ('major_axis', 'minor_axis'):
getattr(self.panel, attr).name = None
cp = self.panel.copy()
getattr(cp, attr).name = 'foo'
assert getattr(self.panel, attr).name is None
def test_iter(self):
tm.equalContents(list(self.panel), self.panel.items)
def test_count(self):
f = lambda s: notna(s).sum()
self._check_stat_op('count', f, obj=self.panel, has_skipna=False)
def test_sum(self):
self._check_stat_op('sum', np.sum, skipna_alternative=np.nansum)
def test_mean(self):
self._check_stat_op('mean', np.mean)
@td.skip_if_no("numpy", min_version="1.10.0")
def test_prod(self):
self._check_stat_op('prod', np.prod, skipna_alternative=np.nanprod)
@pytest.mark.filterwarnings("ignore:Invalid value:RuntimeWarning")
@pytest.mark.filterwarnings("ignore:All-NaN:RuntimeWarning")
def test_median(self):
def wrapper(x):
if isna(x).any():
return np.nan
return np.median(x)
self._check_stat_op('median', wrapper)
@pytest.mark.filterwarnings("ignore:Invalid value:RuntimeWarning")
def test_min(self):
self._check_stat_op('min', np.min)
@pytest.mark.filterwarnings("ignore:Invalid value:RuntimeWarning")
def test_max(self):
self._check_stat_op('max', np.max)
@td.skip_if_no_scipy
def test_skew(self):
from scipy.stats import skew
def this_skew(x):
if len(x) < 3:
return np.nan
return skew(x, bias=False)
self._check_stat_op('skew', this_skew)
def test_var(self):
def alt(x):
if len(x) < 2:
return np.nan
return np.var(x, ddof=1)
self._check_stat_op('var', alt)
def test_std(self):
def alt(x):
if len(x) < 2:
return np.nan
return np.std(x, ddof=1)
self._check_stat_op('std', alt)
def test_sem(self):
def alt(x):
if len(x) < 2:
return np.nan
return np.std(x, ddof=1) / np.sqrt(len(x))
self._check_stat_op('sem', alt)
def _check_stat_op(self, name, alternative, obj=None, has_skipna=True,
skipna_alternative=None):
if obj is None:
obj = self.panel
# # set some NAs
# obj.loc[5:10] = np.nan
# obj.loc[15:20, -2:] = np.nan
f = getattr(obj, name)
if has_skipna:
skipna_wrapper = tm._make_skipna_wrapper(alternative,
skipna_alternative)
def wrapper(x):
return alternative(np.asarray(x))
for i in range(obj.ndim):
result = f(axis=i, skipna=False)
assert_frame_equal(result, obj.apply(wrapper, axis=i))
else:
skipna_wrapper = alternative
wrapper = alternative
for i in range(obj.ndim):
result = f(axis=i)
if name in ['sum', 'prod']:
assert_frame_equal(result, obj.apply(skipna_wrapper, axis=i))
pytest.raises(Exception, f, axis=obj.ndim)
# Unimplemented numeric_only parameter.
if 'numeric_only' in signature(f).args:
tm.assert_raises_regex(NotImplementedError, name, f,
numeric_only=True)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class SafeForSparse(object):
def test_get_axis(self):
assert (self.panel._get_axis(0) is self.panel.items)
assert (self.panel._get_axis(1) is self.panel.major_axis)
assert (self.panel._get_axis(2) is self.panel.minor_axis)
def test_set_axis(self):
new_items = Index(np.arange(len(self.panel.items)))
new_major = Index(np.arange(len(self.panel.major_axis)))
new_minor = Index(np.arange(len(self.panel.minor_axis)))
# ensure propagate to potentially prior-cached items too
item = self.panel['ItemA']
self.panel.items = new_items
if hasattr(self.panel, '_item_cache'):
assert 'ItemA' not in self.panel._item_cache
assert self.panel.items is new_items
# TODO: unused?
item = self.panel[0] # noqa
self.panel.major_axis = new_major
assert self.panel[0].index is new_major
assert self.panel.major_axis is new_major
# TODO: unused?
item = self.panel[0] # noqa
self.panel.minor_axis = new_minor
assert self.panel[0].columns is new_minor
assert self.panel.minor_axis is new_minor
def test_get_axis_number(self):
assert self.panel._get_axis_number('items') == 0
assert self.panel._get_axis_number('major') == 1
assert self.panel._get_axis_number('minor') == 2
with tm.assert_raises_regex(ValueError, "No axis named foo"):
self.panel._get_axis_number('foo')
with tm.assert_raises_regex(ValueError, "No axis named foo"):
self.panel.__ge__(self.panel, axis='foo')
def test_get_axis_name(self):
assert self.panel._get_axis_name(0) == 'items'
assert self.panel._get_axis_name(1) == 'major_axis'
assert self.panel._get_axis_name(2) == 'minor_axis'
def test_get_plane_axes(self):
# what to do here?
index, columns = self.panel._get_plane_axes('items')
index, columns = self.panel._get_plane_axes('major_axis')
index, columns = self.panel._get_plane_axes('minor_axis')
index, columns = self.panel._get_plane_axes(0)
def test_truncate(self):
dates = self.panel.major_axis
start, end = dates[1], dates[5]
trunced = self.panel.truncate(start, end, axis='major')
expected = self.panel['ItemA'].truncate(start, end)
assert_frame_equal(trunced['ItemA'], expected)
trunced = self.panel.truncate(before=start, axis='major')
expected = self.panel['ItemA'].truncate(before=start)
assert_frame_equal(trunced['ItemA'], expected)
trunced = self.panel.truncate(after=end, axis='major')
expected = self.panel['ItemA'].truncate(after=end)
assert_frame_equal(trunced['ItemA'], expected)
def test_arith(self):
self._test_op(self.panel, operator.add)
self._test_op(self.panel, operator.sub)
self._test_op(self.panel, operator.mul)
self._test_op(self.panel, operator.truediv)
self._test_op(self.panel, operator.floordiv)
self._test_op(self.panel, operator.pow)
self._test_op(self.panel, lambda x, y: y + x)
self._test_op(self.panel, lambda x, y: y - x)
self._test_op(self.panel, lambda x, y: y * x)
self._test_op(self.panel, lambda x, y: y / x)
self._test_op(self.panel, lambda x, y: y ** x)
self._test_op(self.panel, lambda x, y: x + y) # panel + 1
self._test_op(self.panel, lambda x, y: x - y) # panel - 1
self._test_op(self.panel, lambda x, y: x * y) # panel * 1
self._test_op(self.panel, lambda x, y: x / y) # panel / 1
self._test_op(self.panel, lambda x, y: x ** y) # panel ** 1
pytest.raises(Exception, self.panel.__add__,
self.panel['ItemA'])
@staticmethod
def _test_op(panel, op):
result = op(panel, 1)
assert_frame_equal(result['ItemA'], op(panel['ItemA'], 1))
def test_keys(self):
tm.equalContents(list(self.panel.keys()), self.panel.items)
def test_iteritems(self):
# Test panel.iteritems(), aka panel.iteritems()
# just test that it works
for k, v in self.panel.iteritems():
pass
assert len(list(self.panel.iteritems())) == len(self.panel.items)
def test_combineFrame(self):
def check_op(op, name):
# items
df = self.panel['ItemA']
func = getattr(self.panel, name)
result = func(df, axis='items')
assert_frame_equal(
result['ItemB'], op(self.panel['ItemB'], df))
# major
xs = self.panel.major_xs(self.panel.major_axis[0])
result = func(xs, axis='major')
idx = self.panel.major_axis[1]
assert_frame_equal(result.major_xs(idx),
op(self.panel.major_xs(idx), xs))
# minor
xs = self.panel.minor_xs(self.panel.minor_axis[0])
result = func(xs, axis='minor')
idx = self.panel.minor_axis[1]
assert_frame_equal(result.minor_xs(idx),
op(self.panel.minor_xs(idx), xs))
ops = ['add', 'sub', 'mul', 'truediv', 'floordiv', 'pow', 'mod']
if not compat.PY3:
ops.append('div')
for op in ops:
try:
check_op(getattr(operator, op), op)
except:
pprint_thing("Failing operation: %r" % op)
raise
if compat.PY3:
try:
check_op(operator.truediv, 'div')
except:
pprint_thing("Failing operation: %r" % 'div')
raise
def test_combinePanel(self):
result = self.panel.add(self.panel)
assert_panel_equal(result, self.panel * 2)
def test_neg(self):
assert_panel_equal(-self.panel, self.panel * -1)
# issue 7692
def test_raise_when_not_implemented(self):
p = Panel(np.arange(3 * 4 * 5).reshape(3, 4, 5),
items=['ItemA', 'ItemB', 'ItemC'],
major_axis=date_range('20130101', periods=4),
minor_axis=list('ABCDE'))
d = p.sum(axis=1).iloc[0]
ops = ['add', 'sub', 'mul', 'truediv',
'floordiv', 'div', 'mod', 'pow']
for op in ops:
with pytest.raises(NotImplementedError):
getattr(p, op)(d, axis=0)
def test_select(self):
p = self.panel
# select items
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = p.select(lambda x: x in ('ItemA', 'ItemC'), axis='items')
expected = p.reindex(items=['ItemA', 'ItemC'])
assert_panel_equal(result, expected)
# select major_axis
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = p.select(lambda x: x >= datetime(
2000, 1, 15), axis='major')
new_major = p.major_axis[p.major_axis >= datetime(2000, 1, 15)]
expected = p.reindex(major=new_major)
assert_panel_equal(result, expected)
# select minor_axis
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = p.select(lambda x: x in ('D', 'A'), axis=2)
expected = p.reindex(minor=['A', 'D'])
assert_panel_equal(result, expected)
# corner case, empty thing
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = p.select(lambda x: x in ('foo', ), axis='items')
assert_panel_equal(result, p.reindex(items=[]))
def test_get_value(self):
for item in self.panel.items:
for mjr in self.panel.major_axis[::2]:
for mnr in self.panel.minor_axis:
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = self.panel.get_value(item, mjr, mnr)
expected = self.panel[item][mnr][mjr]
assert_almost_equal(result, expected)
def test_abs(self):
result = self.panel.abs()
result2 = abs(self.panel)
expected = np.abs(self.panel)
assert_panel_equal(result, expected)
assert_panel_equal(result2, expected)
df = self.panel['ItemA']
result = df.abs()
result2 = abs(df)
expected = np.abs(df)
assert_frame_equal(result, expected)
assert_frame_equal(result2, expected)
s = df['A']
result = s.abs()
result2 = abs(s)
expected = np.abs(s)
assert_series_equal(result, expected)
assert_series_equal(result2, expected)
assert result.name == 'A'
assert result2.name == 'A'
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class CheckIndexing(object):
def test_getitem(self):
pytest.raises(Exception, self.panel.__getitem__, 'ItemQ')
def test_delitem_and_pop(self):
expected = self.panel['ItemA']
result = self.panel.pop('ItemA')
assert_frame_equal(expected, result)
assert 'ItemA' not in self.panel.items
del self.panel['ItemB']
assert 'ItemB' not in self.panel.items
pytest.raises(Exception, self.panel.__delitem__, 'ItemB')
values = np.empty((3, 3, 3))
values[0] = 0
values[1] = 1
values[2] = 2
panel = Panel(values, lrange(3), lrange(3), lrange(3))
# did we delete the right row?
panelc = panel.copy()
del panelc[0]
tm.assert_frame_equal(panelc[1], panel[1])
tm.assert_frame_equal(panelc[2], panel[2])
panelc = panel.copy()
del panelc[1]
tm.assert_frame_equal(panelc[0], panel[0])
tm.assert_frame_equal(panelc[2], panel[2])
panelc = panel.copy()
del panelc[2]
tm.assert_frame_equal(panelc[1], panel[1])
tm.assert_frame_equal(panelc[0], panel[0])
def test_setitem(self):
lp = self.panel.filter(['ItemA', 'ItemB']).to_frame()
with pytest.raises(ValueError):
self.panel['ItemE'] = lp
# DataFrame
df = self.panel['ItemA'][2:].filter(items=['A', 'B'])
self.panel['ItemF'] = df
self.panel['ItemE'] = df
df2 = self.panel['ItemF']
assert_frame_equal(df, df2.reindex(
index=df.index, columns=df.columns))
# scalar
self.panel['ItemG'] = 1
self.panel['ItemE'] = True
assert self.panel['ItemG'].values.dtype == np.int64
assert self.panel['ItemE'].values.dtype == np.bool_
# object dtype
self.panel['ItemQ'] = 'foo'
assert self.panel['ItemQ'].values.dtype == np.object_
# boolean dtype
self.panel['ItemP'] = self.panel['ItemA'] > 0
assert self.panel['ItemP'].values.dtype == np.bool_
pytest.raises(TypeError, self.panel.__setitem__, 'foo',
self.panel.loc[['ItemP']])
# bad shape
p = Panel(np.random.randn(4, 3, 2))
with tm.assert_raises_regex(ValueError,
r"shape of value must be "
r"\(3, 2\), shape of given "
r"object was \(4, 2\)"):
p[0] = np.random.randn(4, 2)
def test_setitem_ndarray(self):
timeidx = date_range(start=datetime(2009, 1, 1),
end=datetime(2009, 12, 31),
freq=MonthEnd())
lons_coarse = np.linspace(-177.5, 177.5, 72)
lats_coarse = np.linspace(-87.5, 87.5, 36)
P = Panel(items=timeidx, major_axis=lons_coarse,
minor_axis=lats_coarse)
data = np.random.randn(72 * 36).reshape((72, 36))
key = datetime(2009, 2, 28)
P[key] = data
assert_almost_equal(P[key].values, data)
def test_set_minor_major(self):
# GH 11014
df1 = DataFrame(['a', 'a', 'a', np.nan, 'a', np.nan])
df2 = DataFrame([1.0, np.nan, 1.0, np.nan, 1.0, 1.0])
panel = Panel({'Item1': df1, 'Item2': df2})
newminor = notna(panel.iloc[:, :, 0])
panel.loc[:, :, 'NewMinor'] = newminor
assert_frame_equal(panel.loc[:, :, 'NewMinor'],
newminor.astype(object))
newmajor = notna(panel.iloc[:, 0, :])
panel.loc[:, 'NewMajor', :] = newmajor
assert_frame_equal(panel.loc[:, 'NewMajor', :],
newmajor.astype(object))
def test_major_xs(self):
ref = self.panel['ItemA']
idx = self.panel.major_axis[5]
xs = self.panel.major_xs(idx)
result = xs['ItemA']
assert_series_equal(result, ref.xs(idx), check_names=False)
assert result.name == 'ItemA'
# not contained
idx = self.panel.major_axis[0] - BDay()
pytest.raises(Exception, self.panel.major_xs, idx)
def test_major_xs_mixed(self):
self.panel['ItemD'] = 'foo'
xs = self.panel.major_xs(self.panel.major_axis[0])
assert xs['ItemA'].dtype == np.float64
assert xs['ItemD'].dtype == np.object_
def test_minor_xs(self):
ref = self.panel['ItemA']
idx = self.panel.minor_axis[1]
xs = self.panel.minor_xs(idx)
assert_series_equal(xs['ItemA'], ref[idx], check_names=False)
# not contained
pytest.raises(Exception, self.panel.minor_xs, 'E')
def test_minor_xs_mixed(self):
self.panel['ItemD'] = 'foo'
xs = self.panel.minor_xs('D')
assert xs['ItemA'].dtype == np.float64
assert xs['ItemD'].dtype == np.object_
def test_xs(self):
itemA = self.panel.xs('ItemA', axis=0)
expected = self.panel['ItemA']
tm.assert_frame_equal(itemA, expected)
# Get a view by default.
itemA_view = self.panel.xs('ItemA', axis=0)
itemA_view.values[:] = np.nan
assert np.isnan(self.panel['ItemA'].values).all()
# Mixed-type yields a copy.
self.panel['strings'] = 'foo'
result = self.panel.xs('D', axis=2)
assert result._is_copy is not None
def test_getitem_fancy_labels(self):
p = self.panel
items = p.items[[1, 0]]
dates = p.major_axis[::2]
cols = ['D', 'C', 'F']
# all 3 specified
with catch_warnings():
simplefilter("ignore", FutureWarning)
# XXX: warning in _validate_read_indexer
assert_panel_equal(p.loc[items, dates, cols],
p.reindex(items=items, major=dates, minor=cols))
# 2 specified
assert_panel_equal(p.loc[:, dates, cols],
p.reindex(major=dates, minor=cols))
assert_panel_equal(p.loc[items, :, cols],
p.reindex(items=items, minor=cols))
assert_panel_equal(p.loc[items, dates, :],
p.reindex(items=items, major=dates))
# only 1
assert_panel_equal(p.loc[items, :, :], p.reindex(items=items))
assert_panel_equal(p.loc[:, dates, :], p.reindex(major=dates))
assert_panel_equal(p.loc[:, :, cols], p.reindex(minor=cols))
def test_getitem_fancy_slice(self):
pass
def test_getitem_fancy_ints(self):
p = self.panel
# #1603
result = p.iloc[:, -1, :]
expected = p.loc[:, p.major_axis[-1], :]
assert_frame_equal(result, expected)
def test_getitem_fancy_xs(self):
p = self.panel
item = 'ItemB'
date = p.major_axis[5]
col = 'C'
# get DataFrame
# item
assert_frame_equal(p.loc[item], p[item])
assert_frame_equal(p.loc[item, :], p[item])
assert_frame_equal(p.loc[item, :, :], p[item])
# major axis, axis=1
assert_frame_equal(p.loc[:, date], p.major_xs(date))
assert_frame_equal(p.loc[:, date, :], p.major_xs(date))
# minor axis, axis=2
assert_frame_equal(p.loc[:, :, 'C'], p.minor_xs('C'))
# get Series
assert_series_equal(p.loc[item, date], p[item].loc[date])
assert_series_equal(p.loc[item, date, :], p[item].loc[date])
assert_series_equal(p.loc[item, :, col], p[item][col])
assert_series_equal(p.loc[:, date, col], p.major_xs(date).loc[col])
def test_getitem_fancy_xs_check_view(self):
item = 'ItemB'
date = self.panel.major_axis[5]
# make sure it's always a view
NS = slice(None, None)
# DataFrames
comp = assert_frame_equal
self._check_view(item, comp)
self._check_view((item, NS), comp)
self._check_view((item, NS, NS), comp)
self._check_view((NS, date), comp)
self._check_view((NS, date, NS), comp)
self._check_view((NS, NS, 'C'), comp)
# Series
comp = assert_series_equal
self._check_view((item, date), comp)
self._check_view((item, date, NS), comp)
self._check_view((item, NS, 'C'), comp)
self._check_view((NS, date, 'C'), comp)
def test_getitem_callable(self):
p = self.panel
# GH 12533
assert_frame_equal(p[lambda x: 'ItemB'], p.loc['ItemB'])
assert_panel_equal(p[lambda x: ['ItemB', 'ItemC']],
p.loc[['ItemB', 'ItemC']])
def test_ix_setitem_slice_dataframe(self):
a = Panel(items=[1, 2, 3], major_axis=[11, 22, 33],
minor_axis=[111, 222, 333])
b = DataFrame(np.random.randn(2, 3), index=[111, 333],
columns=[1, 2, 3])
a.loc[:, 22, [111, 333]] = b
assert_frame_equal(a.loc[:, 22, [111, 333]], b)
def test_ix_align(self):
from pandas import Series
b = Series(np.random.randn(10), name=0)
b.sort_values()
df_orig = Panel(np.random.randn(3, 10, 2))
df = df_orig.copy()
df.loc[0, :, 0] = b
assert_series_equal(df.loc[0, :, 0].reindex(b.index), b)
df = df_orig.swapaxes(0, 1)
df.loc[:, 0, 0] = b
assert_series_equal(df.loc[:, 0, 0].reindex(b.index), b)
df = df_orig.swapaxes(1, 2)
df.loc[0, 0, :] = b
assert_series_equal(df.loc[0, 0, :].reindex(b.index), b)
def test_ix_frame_align(self):
p_orig = tm.makePanel()
df = p_orig.iloc[0].copy()
assert_frame_equal(p_orig['ItemA'], df)
p = p_orig.copy()
p.iloc[0, :, :] = df
assert_panel_equal(p, p_orig)
p = p_orig.copy()
p.iloc[0] = df
assert_panel_equal(p, p_orig)
p = p_orig.copy()
p.iloc[0, :, :] = df
assert_panel_equal(p, p_orig)
p = p_orig.copy()
p.iloc[0] = df
assert_panel_equal(p, p_orig)
p = p_orig.copy()
p.loc['ItemA'] = df
assert_panel_equal(p, p_orig)
p = p_orig.copy()
p.loc['ItemA', :, :] = df
assert_panel_equal(p, p_orig)
p = p_orig.copy()
p['ItemA'] = df
assert_panel_equal(p, p_orig)
p = p_orig.copy()
p.iloc[0, [0, 1, 3, 5], -2:] = df
out = p.iloc[0, [0, 1, 3, 5], -2:]
assert_frame_equal(out, df.iloc[[0, 1, 3, 5], [2, 3]])
# GH3830, panel assignent by values/frame
for dtype in ['float64', 'int64']:
panel = Panel(np.arange(40).reshape((2, 4, 5)),
items=['a1', 'a2'], dtype=dtype)
df1 = panel.iloc[0]
df2 = panel.iloc[1]
tm.assert_frame_equal(panel.loc['a1'], df1)
tm.assert_frame_equal(panel.loc['a2'], df2)
# Assignment by Value Passes for 'a2'
panel.loc['a2'] = df1.values
tm.assert_frame_equal(panel.loc['a1'], df1)
tm.assert_frame_equal(panel.loc['a2'], df1)
# Assignment by DataFrame Ok w/o loc 'a2'
panel['a2'] = df2
tm.assert_frame_equal(panel.loc['a1'], df1)
tm.assert_frame_equal(panel.loc['a2'], df2)
# Assignment by DataFrame Fails for 'a2'
panel.loc['a2'] = df2
tm.assert_frame_equal(panel.loc['a1'], df1)
tm.assert_frame_equal(panel.loc['a2'], df2)
def _check_view(self, indexer, comp):
cp = self.panel.copy()
obj = cp.loc[indexer]
obj.values[:] = 0
assert (obj.values == 0).all()
comp(cp.loc[indexer].reindex_like(obj), obj)
def test_logical_with_nas(self):
d = Panel({'ItemA': {'a': [np.nan, False]},
'ItemB': {'a': [True, True]}})
result = d['ItemA'] | d['ItemB']
expected = DataFrame({'a': [np.nan, True]})
assert_frame_equal(result, expected)
# this is autodowncasted here
result = d['ItemA'].fillna(False) | d['ItemB']
expected = DataFrame({'a': [True, True]})
assert_frame_equal(result, expected)
def test_neg(self):
assert_panel_equal(-self.panel, -1 * self.panel)
def test_invert(self):
assert_panel_equal(-(self.panel < 0), ~(self.panel < 0))
def test_comparisons(self):
p1 = tm.makePanel()
p2 = tm.makePanel()
tp = p1.reindex(items=p1.items + ['foo'])
df = p1[p1.items[0]]
def test_comp(func):
# versus same index
result = func(p1, p2)
tm.assert_numpy_array_equal(result.values,
func(p1.values, p2.values))
# versus non-indexed same objs
pytest.raises(Exception, func, p1, tp)
# versus different objs
pytest.raises(Exception, func, p1, df)
# versus scalar
result3 = func(self.panel, 0)
tm.assert_numpy_array_equal(result3.values,
func(self.panel.values, 0))
with np.errstate(invalid='ignore'):
test_comp(operator.eq)
test_comp(operator.ne)
test_comp(operator.lt)
test_comp(operator.gt)
test_comp(operator.ge)
test_comp(operator.le)
def test_get_value(self):
for item in self.panel.items:
for mjr in self.panel.major_axis[::2]:
for mnr in self.panel.minor_axis:
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = self.panel.get_value(item, mjr, mnr)
expected = self.panel[item][mnr][mjr]
assert_almost_equal(result, expected)
with catch_warnings():
simplefilter("ignore", FutureWarning)
with tm.assert_raises_regex(TypeError,
"There must be an argument "
"for each axis"):
self.panel.get_value('a')
def test_set_value(self):
for item in self.panel.items:
for mjr in self.panel.major_axis[::2]:
for mnr in self.panel.minor_axis:
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
self.panel.set_value(item, mjr, mnr, 1.)
tm.assert_almost_equal(self.panel[item][mnr][mjr], 1.)
# resize
with catch_warnings():
simplefilter("ignore", FutureWarning)
res = self.panel.set_value('ItemE', 'foo', 'bar', 1.5)
assert isinstance(res, Panel)
assert res is not self.panel
assert res.get_value('ItemE', 'foo', 'bar') == 1.5
res3 = self.panel.set_value('ItemE', 'foobar', 'baz', 5)
assert is_float_dtype(res3['ItemE'].values)
msg = ("There must be an argument for each "
"axis plus the value provided")
with tm.assert_raises_regex(TypeError, msg):
self.panel.set_value('a')
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class TestPanel(PanelTests, CheckIndexing, SafeForLongAndSparse,
SafeForSparse):
def setup_method(self, method):
self.panel = make_test_panel()
self.panel.major_axis.name = None
self.panel.minor_axis.name = None
self.panel.items.name = None
def test_constructor(self):
# with BlockManager
wp = Panel(self.panel._data)
assert wp._data is self.panel._data
wp = Panel(self.panel._data, copy=True)
assert wp._data is not self.panel._data
tm.assert_panel_equal(wp, self.panel)
# strings handled prop
wp = Panel([[['foo', 'foo', 'foo', ], ['foo', 'foo', 'foo']]])
assert wp.values.dtype == np.object_
vals = self.panel.values
# no copy
wp = Panel(vals)
assert wp.values is vals
# copy
wp = Panel(vals, copy=True)
assert wp.values is not vals
# GH #8285, test when scalar data is used to construct a Panel
# if dtype is not passed, it should be inferred
value_and_dtype = [(1, 'int64'), (3.14, 'float64'),
('foo', np.object_)]
for (val, dtype) in value_and_dtype:
wp = Panel(val, items=range(2), major_axis=range(3),
minor_axis=range(4))
vals = np.empty((2, 3, 4), dtype=dtype)
vals.fill(val)
tm.assert_panel_equal(wp, Panel(vals, dtype=dtype))
# test the case when dtype is passed
wp = Panel(1, items=range(2), major_axis=range(3),
minor_axis=range(4),
dtype='float32')
vals = np.empty((2, 3, 4), dtype='float32')
vals.fill(1)
tm.assert_panel_equal(wp, Panel(vals, dtype='float32'))
def test_constructor_cast(self):
zero_filled = self.panel.fillna(0)
casted = Panel(zero_filled._data, dtype=int)
casted2 = Panel(zero_filled.values, dtype=int)
exp_values = zero_filled.values.astype(int)
assert_almost_equal(casted.values, exp_values)
assert_almost_equal(casted2.values, exp_values)
casted = Panel(zero_filled._data, dtype=np.int32)
casted2 = Panel(zero_filled.values, dtype=np.int32)
exp_values = zero_filled.values.astype(np.int32)
assert_almost_equal(casted.values, exp_values)
assert_almost_equal(casted2.values, exp_values)
# can't cast
data = [[['foo', 'bar', 'baz']]]
pytest.raises(ValueError, Panel, data, dtype=float)
def test_constructor_empty_panel(self):
empty = Panel()
assert len(empty.items) == 0
assert len(empty.major_axis) == 0
assert len(empty.minor_axis) == 0
def test_constructor_observe_dtype(self):
# GH #411
panel = Panel(items=lrange(3), major_axis=lrange(3),
minor_axis=lrange(3), dtype='O')
assert panel.values.dtype == np.object_
def test_constructor_dtypes(self):
# GH #797
def _check_dtype(panel, dtype):
for i in panel.items:
assert panel[i].values.dtype.name == dtype
# only nan holding types allowed here
for dtype in ['float64', 'float32', 'object']:
panel = Panel(items=lrange(2), major_axis=lrange(10),
minor_axis=lrange(5), dtype=dtype)
_check_dtype(panel, dtype)
for dtype in ['float64', 'float32', 'int64', 'int32', 'object']:
panel = Panel(np.array(np.random.randn(2, 10, 5), dtype=dtype),
items=lrange(2),
major_axis=lrange(10),
minor_axis=lrange(5), dtype=dtype)
_check_dtype(panel, dtype)
for dtype in ['float64', 'float32', 'int64', 'int32', 'object']:
panel = Panel(np.array(np.random.randn(2, 10, 5), dtype='O'),
items=lrange(2),
major_axis=lrange(10),
minor_axis=lrange(5), dtype=dtype)
_check_dtype(panel, dtype)
for dtype in ['float64', 'float32', 'int64', 'int32', 'object']:
panel = Panel(
np.random.randn(2, 10, 5),
items=lrange(2), major_axis=lrange(10),
minor_axis=lrange(5),
dtype=dtype)
_check_dtype(panel, dtype)
for dtype in ['float64', 'float32', 'int64', 'int32', 'object']:
df1 = DataFrame(np.random.randn(2, 5),
index=lrange(2), columns=lrange(5))
df2 = DataFrame(np.random.randn(2, 5),
index=lrange(2), columns=lrange(5))
panel = Panel.from_dict({'a': df1, 'b': df2}, dtype=dtype)
_check_dtype(panel, dtype)
def test_constructor_fails_with_not_3d_input(self):
with tm.assert_raises_regex(ValueError, "The number of dimensions required is 3"): # noqa
Panel(np.random.randn(10, 2))
def test_consolidate(self):
assert self.panel._data.is_consolidated()
self.panel['foo'] = 1.
assert not self.panel._data.is_consolidated()
panel = self.panel._consolidate()
assert panel._data.is_consolidated()
def test_ctor_dict(self):
itema = self.panel['ItemA']
itemb = self.panel['ItemB']
d = {'A': itema, 'B': itemb[5:]}
d2 = {'A': itema._series, 'B': itemb[5:]._series}
d3 = {'A': None,
'B': DataFrame(itemb[5:]._series),
'C': DataFrame(itema._series)}
wp = Panel.from_dict(d)
wp2 = Panel.from_dict(d2) # nested Dict
# TODO: unused?
wp3 = Panel.from_dict(d3) # noqa
tm.assert_index_equal(wp.major_axis, self.panel.major_axis)
assert_panel_equal(wp, wp2)
# intersect
wp = Panel.from_dict(d, intersect=True)
tm.assert_index_equal(wp.major_axis, itemb.index[5:])
# use constructor
assert_panel_equal(Panel(d), Panel.from_dict(d))
assert_panel_equal(Panel(d2), Panel.from_dict(d2))
assert_panel_equal(Panel(d3), Panel.from_dict(d3))
# a pathological case
d4 = {'A': None, 'B': None}
# TODO: unused?
wp4 = Panel.from_dict(d4) # noqa
assert_panel_equal(Panel(d4), Panel(items=['A', 'B']))
# cast
dcasted = {k: v.reindex(wp.major_axis).fillna(0)
for k, v in compat.iteritems(d)}
result = Panel(dcasted, dtype=int)
expected = Panel({k: v.astype(int)
for k, v in compat.iteritems(dcasted)})
assert_panel_equal(result, expected)
result = Panel(dcasted, dtype=np.int32)
expected = Panel({k: v.astype(np.int32)
for k, v in compat.iteritems(dcasted)})
assert_panel_equal(result, expected)
def test_constructor_dict_mixed(self):
data = {k: v.values for k, v in self.panel.iteritems()}
result = Panel(data)
exp_major = Index(np.arange(len(self.panel.major_axis)))
tm.assert_index_equal(result.major_axis, exp_major)
result = Panel(data, items=self.panel.items,
major_axis=self.panel.major_axis,
minor_axis=self.panel.minor_axis)
assert_panel_equal(result, self.panel)
data['ItemC'] = self.panel['ItemC']
result = Panel(data)
assert_panel_equal(result, self.panel)
# corner, blow up
data['ItemB'] = data['ItemB'][:-1]
pytest.raises(Exception, Panel, data)
data['ItemB'] = self.panel['ItemB'].values[:, :-1]
pytest.raises(Exception, Panel, data)
def test_ctor_orderedDict(self):
keys = list(set(np.random.randint(0, 5000, 100)))[
:50] # unique random int keys
d = OrderedDict([(k, mkdf(10, 5)) for k in keys])
p = Panel(d)
assert list(p.items) == keys
p = Panel.from_dict(d)
assert list(p.items) == keys
def test_constructor_resize(self):
data = self.panel._data
items = self.panel.items[:-1]
major = self.panel.major_axis[:-1]
minor = self.panel.minor_axis[:-1]
result = Panel(data, items=items,
major_axis=major, minor_axis=minor)
expected = self.panel.reindex(
items=items, major=major, minor=minor)
assert_panel_equal(result, expected)
result = Panel(data, items=items, major_axis=major)
expected = self.panel.reindex(items=items, major=major)
assert_panel_equal(result, expected)
result = Panel(data, items=items)
expected = self.panel.reindex(items=items)
assert_panel_equal(result, expected)
result = Panel(data, minor_axis=minor)
expected = self.panel.reindex(minor=minor)
assert_panel_equal(result, expected)
def test_from_dict_mixed_orient(self):
df = tm.makeDataFrame()
df['foo'] = 'bar'
data = {'k1': df, 'k2': df}
panel = Panel.from_dict(data, orient='minor')
assert panel['foo'].values.dtype == np.object_
assert panel['A'].values.dtype == np.float64
def test_constructor_error_msgs(self):
def testit():
Panel(np.random.randn(3, 4, 5),
lrange(4), lrange(5), lrange(5))
tm.assert_raises_regex(ValueError,
r"Shape of passed values is "
r"\(3, 4, 5\), indices imply "
r"\(4, 5, 5\)",
testit)
def testit():
Panel(np.random.randn(3, 4, 5),
lrange(5), lrange(4), lrange(5))
tm.assert_raises_regex(ValueError,
r"Shape of passed values is "
r"\(3, 4, 5\), indices imply "
r"\(5, 4, 5\)",
testit)
def testit():
Panel(np.random.randn(3, 4, 5),
lrange(5), lrange(5), lrange(4))
tm.assert_raises_regex(ValueError,
r"Shape of passed values is "
r"\(3, 4, 5\), indices imply "
r"\(5, 5, 4\)",
testit)
def test_conform(self):
df = self.panel['ItemA'][:-5].filter(items=['A', 'B'])
conformed = self.panel.conform(df)
tm.assert_index_equal(conformed.index, self.panel.major_axis)
tm.assert_index_equal(conformed.columns, self.panel.minor_axis)
def test_convert_objects(self):
# GH 4937
p = Panel(dict(A=dict(a=['1', '1.0'])))
expected = Panel(dict(A=dict(a=[1, 1.0])))
result = p._convert(numeric=True, coerce=True)
assert_panel_equal(result, expected)
def test_dtypes(self):
result = self.panel.dtypes
expected = Series(np.dtype('float64'), index=self.panel.items)
assert_series_equal(result, expected)
def test_astype(self):
# GH7271
data = np.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]])
panel = Panel(data, ['a', 'b'], ['c', 'd'], ['e', 'f'])
str_data = np.array([[['1', '2'], ['3', '4']],
[['5', '6'], ['7', '8']]])
expected = Panel(str_data, ['a', 'b'], ['c', 'd'], ['e', 'f'])
assert_panel_equal(panel.astype(str), expected)
pytest.raises(NotImplementedError, panel.astype, {0: str})
def test_apply(self):
# GH1148
# ufunc
applied = self.panel.apply(np.sqrt)
with np.errstate(invalid='ignore'):
expected = np.sqrt(self.panel.values)
assert_almost_equal(applied.values, expected)
# ufunc same shape
result = self.panel.apply(lambda x: x * 2, axis='items')
expected = self.panel * 2
assert_panel_equal(result, expected)
result = self.panel.apply(lambda x: x * 2, axis='major_axis')
expected = self.panel * 2
assert_panel_equal(result, expected)
result = self.panel.apply(lambda x: x * 2, axis='minor_axis')
expected = self.panel * 2
assert_panel_equal(result, expected)
# reduction to DataFrame
result = self.panel.apply(lambda x: x.dtype, axis='items')
expected = DataFrame(np.dtype('float64'),
index=self.panel.major_axis,
columns=self.panel.minor_axis)
assert_frame_equal(result, expected)
result = self.panel.apply(lambda x: x.dtype, axis='major_axis')
expected = DataFrame(np.dtype('float64'),
index=self.panel.minor_axis,
columns=self.panel.items)
assert_frame_equal(result, expected)
result = self.panel.apply(lambda x: x.dtype, axis='minor_axis')
expected = DataFrame(np.dtype('float64'),
index=self.panel.major_axis,
columns=self.panel.items)
assert_frame_equal(result, expected)
# reductions via other dims
expected = self.panel.sum(0)
result = self.panel.apply(lambda x: x.sum(), axis='items')
assert_frame_equal(result, expected)
expected = self.panel.sum(1)
result = self.panel.apply(lambda x: x.sum(), axis='major_axis')
assert_frame_equal(result, expected)
expected = self.panel.sum(2)
result = self.panel.apply(lambda x: x.sum(), axis='minor_axis')
assert_frame_equal(result, expected)
# pass kwargs
result = self.panel.apply(
lambda x, y: x.sum() + y, axis='items', y=5)
expected = self.panel.sum(0) + 5
assert_frame_equal(result, expected)
def test_apply_slabs(self):
# same shape as original
result = self.panel.apply(lambda x: x * 2,
axis=['items', 'major_axis'])
expected = (self.panel * 2).transpose('minor_axis', 'major_axis',
'items')
assert_panel_equal(result, expected)
result = self.panel.apply(lambda x: x * 2,
axis=['major_axis', 'items'])
assert_panel_equal(result, expected)
result = self.panel.apply(lambda x: x * 2,
axis=['items', 'minor_axis'])
expected = (self.panel * 2).transpose('major_axis', 'minor_axis',
'items')
assert_panel_equal(result, expected)
result = self.panel.apply(lambda x: x * 2,
axis=['minor_axis', 'items'])
assert_panel_equal(result, expected)
result = self.panel.apply(lambda x: x * 2,
axis=['major_axis', 'minor_axis'])
expected = self.panel * 2
assert_panel_equal(result, expected)
result = self.panel.apply(lambda x: x * 2,
axis=['minor_axis', 'major_axis'])
assert_panel_equal(result, expected)
# reductions
result = self.panel.apply(lambda x: x.sum(0), axis=[
'items', 'major_axis'
])
expected = self.panel.sum(1).T
assert_frame_equal(result, expected)
result = self.panel.apply(lambda x: x.sum(1), axis=[
'items', 'major_axis'
])
expected = self.panel.sum(0)
assert_frame_equal(result, expected)
# transforms
f = lambda x: ((x.T - x.mean(1)) / x.std(1)).T
# make sure that we don't trigger any warnings
result = self.panel.apply(f, axis=['items', 'major_axis'])
expected = Panel({ax: f(self.panel.loc[:, :, ax])
for ax in self.panel.minor_axis})
assert_panel_equal(result, expected)
result = self.panel.apply(f, axis=['major_axis', 'minor_axis'])
expected = Panel({ax: f(self.panel.loc[ax])
for ax in self.panel.items})
assert_panel_equal(result, expected)
result = self.panel.apply(f, axis=['minor_axis', 'items'])
expected = Panel({ax: f(self.panel.loc[:, ax])
for ax in self.panel.major_axis})
assert_panel_equal(result, expected)
# with multi-indexes
# GH7469
index = MultiIndex.from_tuples([('one', 'a'), ('one', 'b'), (
'two', 'a'), ('two', 'b')])
dfa = DataFrame(np.array(np.arange(12, dtype='int64')).reshape(
4, 3), columns=list("ABC"), index=index)
dfb = DataFrame(np.array(np.arange(10, 22, dtype='int64')).reshape(
4, 3), columns=list("ABC"), index=index)
p = Panel({'f': dfa, 'g': dfb})
result = p.apply(lambda x: x.sum(), axis=0)
# on windows this will be in32
result = result.astype('int64')
expected = p.sum(0)
assert_frame_equal(result, expected)
def test_apply_no_or_zero_ndim(self):
# GH10332
self.panel = Panel(np.random.rand(5, 5, 5))
result_int = self.panel.apply(lambda df: 0, axis=[1, 2])
result_float = self.panel.apply(lambda df: 0.0, axis=[1, 2])
result_int64 = self.panel.apply(
lambda df: np.int64(0), axis=[1, 2])
result_float64 = self.panel.apply(lambda df: np.float64(0.0),
axis=[1, 2])
expected_int = expected_int64 = Series([0] * 5)
expected_float = expected_float64 = Series([0.0] * 5)
assert_series_equal(result_int, expected_int)
assert_series_equal(result_int64, expected_int64)
assert_series_equal(result_float, expected_float)
assert_series_equal(result_float64, expected_float64)
def test_reindex(self):
ref = self.panel['ItemB']
# items
result = self.panel.reindex(items=['ItemA', 'ItemB'])
assert_frame_equal(result['ItemB'], ref)
# major
new_major = list(self.panel.major_axis[:10])
result = self.panel.reindex(major=new_major)
assert_frame_equal(result['ItemB'], ref.reindex(index=new_major))
# raise exception put both major and major_axis
pytest.raises(Exception, self.panel.reindex,
major_axis=new_major,
major=new_major)
# minor
new_minor = list(self.panel.minor_axis[:2])
result = self.panel.reindex(minor=new_minor)
assert_frame_equal(result['ItemB'], ref.reindex(columns=new_minor))
# raise exception put both major and major_axis
pytest.raises(Exception, self.panel.reindex,
minor_axis=new_minor,
minor=new_minor)
# this ok
result = self.panel.reindex()
assert_panel_equal(result, self.panel)
assert result is not self.panel
# with filling
smaller_major = self.panel.major_axis[::5]
smaller = self.panel.reindex(major=smaller_major)
larger = smaller.reindex(major=self.panel.major_axis, method='pad')
assert_frame_equal(larger.major_xs(self.panel.major_axis[1]),
smaller.major_xs(smaller_major[0]))
# don't necessarily copy
result = self.panel.reindex(
major=self.panel.major_axis, copy=False)
assert_panel_equal(result, self.panel)
assert result is self.panel
def test_reindex_axis_style(self):
panel = Panel(np.random.rand(5, 5, 5))
expected0 = Panel(panel.values).iloc[[0, 1]]
expected1 = Panel(panel.values).iloc[:, [0, 1]]
expected2 = Panel(panel.values).iloc[:, :, [0, 1]]
result = panel.reindex([0, 1], axis=0)
assert_panel_equal(result, expected0)
result = panel.reindex([0, 1], axis=1)
assert_panel_equal(result, expected1)
result = panel.reindex([0, 1], axis=2)
assert_panel_equal(result, expected2)
result = panel.reindex([0, 1], axis=2)
assert_panel_equal(result, expected2)
def test_reindex_multi(self):
# with and without copy full reindexing
result = self.panel.reindex(
items=self.panel.items,
major=self.panel.major_axis,
minor=self.panel.minor_axis, copy=False)
assert result.items is self.panel.items
assert result.major_axis is self.panel.major_axis
assert result.minor_axis is self.panel.minor_axis
result = self.panel.reindex(
items=self.panel.items,
major=self.panel.major_axis,
minor=self.panel.minor_axis, copy=False)
assert_panel_equal(result, self.panel)
# multi-axis indexing consistency
# GH 5900
df = DataFrame(np.random.randn(4, 3))
p = Panel({'Item1': df})
expected = Panel({'Item1': df})
expected['Item2'] = np.nan
items = ['Item1', 'Item2']
major_axis = np.arange(4)
minor_axis = np.arange(3)
results = []
results.append(p.reindex(items=items, major_axis=major_axis,
copy=True))
results.append(p.reindex(items=items, major_axis=major_axis,
copy=False))
results.append(p.reindex(items=items, minor_axis=minor_axis,
copy=True))
results.append(p.reindex(items=items, minor_axis=minor_axis,
copy=False))
results.append(p.reindex(items=items, major_axis=major_axis,
minor_axis=minor_axis, copy=True))
results.append(p.reindex(items=items, major_axis=major_axis,
minor_axis=minor_axis, copy=False))
for i, r in enumerate(results):
assert_panel_equal(expected, r)
def test_reindex_like(self):
# reindex_like
smaller = self.panel.reindex(items=self.panel.items[:-1],
major=self.panel.major_axis[:-1],
minor=self.panel.minor_axis[:-1])
smaller_like = self.panel.reindex_like(smaller)
assert_panel_equal(smaller, smaller_like)
def test_take(self):
# axis == 0
result = self.panel.take([2, 0, 1], axis=0)
expected = self.panel.reindex(items=['ItemC', 'ItemA', 'ItemB'])
assert_panel_equal(result, expected)
# axis >= 1
result = self.panel.take([3, 0, 1, 2], axis=2)
expected = self.panel.reindex(minor=['D', 'A', 'B', 'C'])
assert_panel_equal(result, expected)
# neg indices ok
expected = self.panel.reindex(minor=['D', 'D', 'B', 'C'])
result = self.panel.take([3, -1, 1, 2], axis=2)
assert_panel_equal(result, expected)
pytest.raises(Exception, self.panel.take, [4, 0, 1, 2], axis=2)
def test_sort_index(self):
import random
ritems = list(self.panel.items)
rmajor = list(self.panel.major_axis)
rminor = list(self.panel.minor_axis)
random.shuffle(ritems)
random.shuffle(rmajor)
random.shuffle(rminor)
random_order = self.panel.reindex(items=ritems)
sorted_panel = random_order.sort_index(axis=0)
assert_panel_equal(sorted_panel, self.panel)
# descending
random_order = self.panel.reindex(items=ritems)
sorted_panel = random_order.sort_index(axis=0, ascending=False)
assert_panel_equal(
sorted_panel,
self.panel.reindex(items=self.panel.items[::-1]))
random_order = self.panel.reindex(major=rmajor)
sorted_panel = random_order.sort_index(axis=1)
assert_panel_equal(sorted_panel, self.panel)
random_order = self.panel.reindex(minor=rminor)
sorted_panel = random_order.sort_index(axis=2)
assert_panel_equal(sorted_panel, self.panel)
def test_fillna(self):
filled = self.panel.fillna(0)
assert np.isfinite(filled.values).all()
filled = self.panel.fillna(method='backfill')
assert_frame_equal(filled['ItemA'],
self.panel['ItemA'].fillna(method='backfill'))
panel = self.panel.copy()
panel['str'] = 'foo'
filled = panel.fillna(method='backfill')
assert_frame_equal(filled['ItemA'],
panel['ItemA'].fillna(method='backfill'))
empty = self.panel.reindex(items=[])
filled = empty.fillna(0)
assert_panel_equal(filled, empty)
pytest.raises(ValueError, self.panel.fillna)
pytest.raises(ValueError, self.panel.fillna, 5, method='ffill')
pytest.raises(TypeError, self.panel.fillna, [1, 2])
pytest.raises(TypeError, self.panel.fillna, (1, 2))
# limit not implemented when only value is specified
p = Panel(np.random.randn(3, 4, 5))
p.iloc[0:2, 0:2, 0:2] = np.nan
pytest.raises(NotImplementedError,
lambda: p.fillna(999, limit=1))
# Test in place fillNA
# Expected result
expected = Panel([[[0, 1], [2, 1]], [[10, 11], [12, 11]]],
items=['a', 'b'], minor_axis=['x', 'y'],
dtype=np.float64)
# method='ffill'
p1 = Panel([[[0, 1], [2, np.nan]], [[10, 11], [12, np.nan]]],
items=['a', 'b'], minor_axis=['x', 'y'],
dtype=np.float64)
p1.fillna(method='ffill', inplace=True)
assert_panel_equal(p1, expected)
# method='bfill'
p2 = Panel([[[0, np.nan], [2, 1]], [[10, np.nan], [12, 11]]],
items=['a', 'b'], minor_axis=['x', 'y'],
dtype=np.float64)
p2.fillna(method='bfill', inplace=True)
assert_panel_equal(p2, expected)
def test_ffill_bfill(self):
assert_panel_equal(self.panel.ffill(),
self.panel.fillna(method='ffill'))
assert_panel_equal(self.panel.bfill(),
self.panel.fillna(method='bfill'))
def test_truncate_fillna_bug(self):
# #1823
result = self.panel.truncate(before=None, after=None, axis='items')
# it works!
result.fillna(value=0.0)
def test_swapaxes(self):
result = self.panel.swapaxes('items', 'minor')
assert result.items is self.panel.minor_axis
result = self.panel.swapaxes('items', 'major')
assert result.items is self.panel.major_axis
result = self.panel.swapaxes('major', 'minor')
assert result.major_axis is self.panel.minor_axis
panel = self.panel.copy()
result = panel.swapaxes('major', 'minor')
panel.values[0, 0, 1] = np.nan
expected = panel.swapaxes('major', 'minor')
assert_panel_equal(result, expected)
# this should also work
result = self.panel.swapaxes(0, 1)
assert result.items is self.panel.major_axis
# this works, but return a copy
result = self.panel.swapaxes('items', 'items')
assert_panel_equal(self.panel, result)
assert id(self.panel) != id(result)
def test_transpose(self):
result = self.panel.transpose('minor', 'major', 'items')
expected = self.panel.swapaxes('items', 'minor')
assert_panel_equal(result, expected)
# test kwargs
result = self.panel.transpose(items='minor', major='major',
minor='items')
expected = self.panel.swapaxes('items', 'minor')
assert_panel_equal(result, expected)
# text mixture of args
result = self.panel.transpose(
'minor', major='major', minor='items')
expected = self.panel.swapaxes('items', 'minor')
assert_panel_equal(result, expected)
result = self.panel.transpose('minor',
'major',
minor='items')
expected = self.panel.swapaxes('items', 'minor')
assert_panel_equal(result, expected)
# duplicate axes
with tm.assert_raises_regex(TypeError,
'not enough/duplicate arguments'):
self.panel.transpose('minor', maj='major', minor='items')
with tm.assert_raises_regex(ValueError,
'repeated axis in transpose'):
self.panel.transpose('minor', 'major', major='minor',
minor='items')
result = self.panel.transpose(2, 1, 0)
assert_panel_equal(result, expected)
result = self.panel.transpose('minor', 'items', 'major')
expected = self.panel.swapaxes('items', 'minor')
expected = expected.swapaxes('major', 'minor')
assert_panel_equal(result, expected)
result = self.panel.transpose(2, 0, 1)
assert_panel_equal(result, expected)
pytest.raises(ValueError, self.panel.transpose, 0, 0, 1)
def test_transpose_copy(self):
panel = self.panel.copy()
result = panel.transpose(2, 0, 1, copy=True)
expected = panel.swapaxes('items', 'minor')
expected = expected.swapaxes('major', 'minor')
assert_panel_equal(result, expected)
panel.values[0, 1, 1] = np.nan
assert notna(result.values[1, 0, 1])
def test_to_frame(self):
# filtered
filtered = self.panel.to_frame()
expected = self.panel.to_frame().dropna(how='any')
assert_frame_equal(filtered, expected)
# unfiltered
unfiltered = self.panel.to_frame(filter_observations=False)
assert_panel_equal(unfiltered.to_panel(), self.panel)
# names
assert unfiltered.index.names == ('major', 'minor')
# unsorted, round trip
df = self.panel.to_frame(filter_observations=False)
unsorted = df.take(np.random.permutation(len(df)))
pan = unsorted.to_panel()
assert_panel_equal(pan, self.panel)
# preserve original index names
df = DataFrame(np.random.randn(6, 2),
index=[['a', 'a', 'b', 'b', 'c', 'c'],
[0, 1, 0, 1, 0, 1]],
columns=['one', 'two'])
df.index.names = ['foo', 'bar']
df.columns.name = 'baz'
rdf = df.to_panel().to_frame()
assert rdf.index.names == df.index.names
assert rdf.columns.names == df.columns.names
def test_to_frame_mixed(self):
panel = self.panel.fillna(0)
panel['str'] = 'foo'
panel['bool'] = panel['ItemA'] > 0
lp = panel.to_frame()
wp = lp.to_panel()
assert wp['bool'].values.dtype == np.bool_
# Previously, this was mutating the underlying
# index and changing its name
assert_frame_equal(wp['bool'], panel['bool'], check_names=False)
# GH 8704
# with categorical
df = panel.to_frame()
df['category'] = df['str'].astype('category')
# to_panel
# TODO: this converts back to object
p = df.to_panel()
expected = panel.copy()
expected['category'] = 'foo'
assert_panel_equal(p, expected)
def test_to_frame_multi_major(self):
idx = MultiIndex.from_tuples(
[(1, 'one'), (1, 'two'), (2, 'one'), (2, 'two')])
df = DataFrame([[1, 'a', 1], [2, 'b', 1],
[3, 'c', 1], [4, 'd', 1]],
columns=['A', 'B', 'C'], index=idx)
wp = Panel({'i1': df, 'i2': df})
expected_idx = MultiIndex.from_tuples(
[
(1, 'one', 'A'), (1, 'one', 'B'),
(1, 'one', 'C'), (1, 'two', 'A'),
(1, 'two', 'B'), (1, 'two', 'C'),
(2, 'one', 'A'), (2, 'one', 'B'),
(2, 'one', 'C'), (2, 'two', 'A'),
(2, 'two', 'B'), (2, 'two', 'C')
],
names=[None, None, 'minor'])
expected = DataFrame({'i1': [1, 'a', 1, 2, 'b', 1, 3,
'c', 1, 4, 'd', 1],
'i2': [1, 'a', 1, 2, 'b',
1, 3, 'c', 1, 4, 'd', 1]},
index=expected_idx)
result = wp.to_frame()
assert_frame_equal(result, expected)
wp.iloc[0, 0].iloc[0] = np.nan # BUG on setting. GH #5773
result = wp.to_frame()
assert_frame_equal(result, expected[1:])
idx = MultiIndex.from_tuples(
[(1, 'two'), (1, 'one'), (2, 'one'), (np.nan, 'two')])
df = DataFrame([[1, 'a', 1], [2, 'b', 1],
[3, 'c', 1], [4, 'd', 1]],
columns=['A', 'B', 'C'], index=idx)
wp = Panel({'i1': df, 'i2': df})
ex_idx = MultiIndex.from_tuples([(1, 'two', 'A'), (1, 'two', 'B'),
(1, 'two', 'C'),
(1, 'one', 'A'),
(1, 'one', 'B'),
(1, 'one', 'C'),
(2, 'one', 'A'),
(2, 'one', 'B'),
(2, 'one', 'C'),
(np.nan, 'two', 'A'),
(np.nan, 'two', 'B'),
(np.nan, 'two', 'C')],
names=[None, None, 'minor'])
expected.index = ex_idx
result = wp.to_frame()
assert_frame_equal(result, expected)
def test_to_frame_multi_major_minor(self):
cols = MultiIndex(levels=[['C_A', 'C_B'], ['C_1', 'C_2']],
labels=[[0, 0, 1, 1], [0, 1, 0, 1]])
idx = MultiIndex.from_tuples([(1, 'one'), (1, 'two'), (2, 'one'), (
2, 'two'), (3, 'three'), (4, 'four')])
df = DataFrame([[1, 2, 11, 12], [3, 4, 13, 14],
['a', 'b', 'w', 'x'],
['c', 'd', 'y', 'z'], [-1, -2, -3, -4],
[-5, -6, -7, -8]], columns=cols, index=idx)
wp = Panel({'i1': df, 'i2': df})
exp_idx = MultiIndex.from_tuples(
[(1, 'one', 'C_A', 'C_1'), (1, 'one', 'C_A', 'C_2'),
(1, 'one', 'C_B', 'C_1'), (1, 'one', 'C_B', 'C_2'),
(1, 'two', 'C_A', 'C_1'), (1, 'two', 'C_A', 'C_2'),
(1, 'two', 'C_B', 'C_1'), (1, 'two', 'C_B', 'C_2'),
(2, 'one', 'C_A', 'C_1'), (2, 'one', 'C_A', 'C_2'),
(2, 'one', 'C_B', 'C_1'), (2, 'one', 'C_B', 'C_2'),
(2, 'two', 'C_A', 'C_1'), (2, 'two', 'C_A', 'C_2'),
(2, 'two', 'C_B', 'C_1'), (2, 'two', 'C_B', 'C_2'),
(3, 'three', 'C_A', 'C_1'), (3, 'three', 'C_A', 'C_2'),
(3, 'three', 'C_B', 'C_1'), (3, 'three', 'C_B', 'C_2'),
(4, 'four', 'C_A', 'C_1'), (4, 'four', 'C_A', 'C_2'),
(4, 'four', 'C_B', 'C_1'), (4, 'four', 'C_B', 'C_2')],
names=[None, None, None, None])
exp_val = [[1, 1], [2, 2], [11, 11], [12, 12],
[3, 3], [4, 4],
[13, 13], [14, 14], ['a', 'a'],
['b', 'b'], ['w', 'w'],
['x', 'x'], ['c', 'c'], ['d', 'd'], [
'y', 'y'], ['z', 'z'],
[-1, -1], [-2, -2], [-3, -3], [-4, -4],
[-5, -5], [-6, -6],
[-7, -7], [-8, -8]]
result = wp.to_frame()
expected = DataFrame(exp_val, columns=['i1', 'i2'], index=exp_idx)
assert_frame_equal(result, expected)
def test_to_frame_multi_drop_level(self):
idx = MultiIndex.from_tuples([(1, 'one'), (2, 'one'), (2, 'two')])
df = DataFrame({'A': [np.nan, 1, 2]}, index=idx)
wp = Panel({'i1': df, 'i2': df})
result = wp.to_frame()
exp_idx = MultiIndex.from_tuples(
[(2, 'one', 'A'), (2, 'two', 'A')],
names=[None, None, 'minor'])
expected = DataFrame({'i1': [1., 2], 'i2': [1., 2]}, index=exp_idx)
assert_frame_equal(result, expected)
def test_to_panel_na_handling(self):
df = DataFrame(np.random.randint(0, 10, size=20).reshape((10, 2)),
index=[[0, 0, 0, 0, 0, 0, 1, 1, 1, 1],
[0, 1, 2, 3, 4, 5, 2, 3, 4, 5]])
panel = df.to_panel()
assert isna(panel[0].loc[1, [0, 1]]).all()
def test_to_panel_duplicates(self):
# #2441
df = DataFrame({'a': [0, 0, 1], 'b': [1, 1, 1], 'c': [1, 2, 3]})
idf = df.set_index(['a', 'b'])
tm.assert_raises_regex(
ValueError, 'non-uniquely indexed', idf.to_panel)
def test_panel_dups(self):
# GH 4960
# duplicates in an index
# items
data = np.random.randn(5, 100, 5)
no_dup_panel = Panel(data, items=list("ABCDE"))
panel = Panel(data, items=list("AACDE"))
expected = no_dup_panel['A']
result = panel.iloc[0]
assert_frame_equal(result, expected)
expected = no_dup_panel['E']
result = panel.loc['E']
assert_frame_equal(result, expected)
expected = no_dup_panel.loc[['A', 'B']]
expected.items = ['A', 'A']
result = panel.loc['A']
assert_panel_equal(result, expected)
# major
data = np.random.randn(5, 5, 5)
no_dup_panel = Panel(data, major_axis=list("ABCDE"))
panel = Panel(data, major_axis=list("AACDE"))
expected = no_dup_panel.loc[:, 'A']
result = panel.iloc[:, 0]
assert_frame_equal(result, expected)
expected = no_dup_panel.loc[:, 'E']
result = panel.loc[:, 'E']
assert_frame_equal(result, expected)
expected = no_dup_panel.loc[:, ['A', 'B']]
expected.major_axis = ['A', 'A']
result = panel.loc[:, 'A']
assert_panel_equal(result, expected)
# minor
data = np.random.randn(5, 100, 5)
no_dup_panel = Panel(data, minor_axis=list("ABCDE"))
panel = Panel(data, minor_axis=list("AACDE"))
expected = no_dup_panel.loc[:, :, 'A']
result = panel.iloc[:, :, 0]
assert_frame_equal(result, expected)
expected = no_dup_panel.loc[:, :, 'E']
result = panel.loc[:, :, 'E']
assert_frame_equal(result, expected)
expected = no_dup_panel.loc[:, :, ['A', 'B']]
expected.minor_axis = ['A', 'A']
result = panel.loc[:, :, 'A']
assert_panel_equal(result, expected)
def test_filter(self):
pass
def test_compound(self):
compounded = self.panel.compound()
assert_series_equal(compounded['ItemA'],
(1 + self.panel['ItemA']).product(0) - 1,
check_names=False)
def test_shift(self):
# major
idx = self.panel.major_axis[0]
idx_lag = self.panel.major_axis[1]
shifted = self.panel.shift(1)
assert_frame_equal(self.panel.major_xs(idx),
shifted.major_xs(idx_lag))
# minor
idx = self.panel.minor_axis[0]
idx_lag = self.panel.minor_axis[1]
shifted = self.panel.shift(1, axis='minor')
assert_frame_equal(self.panel.minor_xs(idx),
shifted.minor_xs(idx_lag))
# items
idx = self.panel.items[0]
idx_lag = self.panel.items[1]
shifted = self.panel.shift(1, axis='items')
assert_frame_equal(self.panel[idx], shifted[idx_lag])
# negative numbers, #2164
result = self.panel.shift(-1)
expected = Panel({i: f.shift(-1)[:-1]
for i, f in self.panel.iteritems()})
assert_panel_equal(result, expected)
# mixed dtypes #6959
data = [('item ' + ch, makeMixedDataFrame())
for ch in list('abcde')]
data = dict(data)
mixed_panel = Panel.from_dict(data, orient='minor')
shifted = mixed_panel.shift(1)
assert_series_equal(mixed_panel.dtypes, shifted.dtypes)
def test_tshift(self):
# PeriodIndex
ps = tm.makePeriodPanel()
shifted = ps.tshift(1)
unshifted = shifted.tshift(-1)
assert_panel_equal(unshifted, ps)
shifted2 = ps.tshift(freq='B')
assert_panel_equal(shifted, shifted2)
shifted3 = ps.tshift(freq=BDay())
assert_panel_equal(shifted, shifted3)
tm.assert_raises_regex(ValueError, 'does not match',
ps.tshift, freq='M')
# DatetimeIndex
panel = make_test_panel()
shifted = panel.tshift(1)
unshifted = shifted.tshift(-1)
assert_panel_equal(panel, unshifted)
shifted2 = panel.tshift(freq=panel.major_axis.freq)
assert_panel_equal(shifted, shifted2)
inferred_ts = Panel(panel.values, items=panel.items,
major_axis=Index(np.asarray(panel.major_axis)),
minor_axis=panel.minor_axis)
shifted = inferred_ts.tshift(1)
unshifted = shifted.tshift(-1)
assert_panel_equal(shifted, panel.tshift(1))
assert_panel_equal(unshifted, inferred_ts)
no_freq = panel.iloc[:, [0, 5, 7], :]
pytest.raises(ValueError, no_freq.tshift)
def test_pct_change(self):
df1 = DataFrame({'c1': [1, 2, 5], 'c2': [3, 4, 6]})
df2 = df1 + 1
df3 = DataFrame({'c1': [3, 4, 7], 'c2': [5, 6, 8]})
wp = Panel({'i1': df1, 'i2': df2, 'i3': df3})
# major, 1
result = wp.pct_change() # axis='major'
expected = Panel({'i1': df1.pct_change(),
'i2': df2.pct_change(),
'i3': df3.pct_change()})
assert_panel_equal(result, expected)
result = wp.pct_change(axis=1)
assert_panel_equal(result, expected)
# major, 2
result = wp.pct_change(periods=2)
expected = Panel({'i1': df1.pct_change(2),
'i2': df2.pct_change(2),
'i3': df3.pct_change(2)})
assert_panel_equal(result, expected)
# minor, 1
result = wp.pct_change(axis='minor')
expected = Panel({'i1': df1.pct_change(axis=1),
'i2': df2.pct_change(axis=1),
'i3': df3.pct_change(axis=1)})
assert_panel_equal(result, expected)
result = wp.pct_change(axis=2)
assert_panel_equal(result, expected)
# minor, 2
result = wp.pct_change(periods=2, axis='minor')
expected = Panel({'i1': df1.pct_change(periods=2, axis=1),
'i2': df2.pct_change(periods=2, axis=1),
'i3': df3.pct_change(periods=2, axis=1)})
assert_panel_equal(result, expected)
# items, 1
result = wp.pct_change(axis='items')
expected = Panel(
{'i1': DataFrame({'c1': [np.nan, np.nan, np.nan],
'c2': [np.nan, np.nan, np.nan]}),
'i2': DataFrame({'c1': [1, 0.5, .2],
'c2': [1. / 3, 0.25, 1. / 6]}),
'i3': DataFrame({'c1': [.5, 1. / 3, 1. / 6],
'c2': [.25, .2, 1. / 7]})})
assert_panel_equal(result, expected)
result = wp.pct_change(axis=0)
assert_panel_equal(result, expected)
# items, 2
result = wp.pct_change(periods=2, axis='items')
expected = Panel(
{'i1': DataFrame({'c1': [np.nan, np.nan, np.nan],
'c2': [np.nan, np.nan, np.nan]}),
'i2': DataFrame({'c1': [np.nan, np.nan, np.nan],
'c2': [np.nan, np.nan, np.nan]}),
'i3': DataFrame({'c1': [2, 1, .4],
'c2': [2. / 3, .5, 1. / 3]})})
assert_panel_equal(result, expected)
def test_round(self):
values = [[[-3.2, 2.2], [0, -4.8213], [3.123, 123.12],
[-1566.213, 88.88], [-12, 94.5]],
[[-5.82, 3.5], [6.21, -73.272], [-9.087, 23.12],
[272.212, -99.99], [23, -76.5]]]
evalues = [[[float(np.around(i)) for i in j] for j in k]
for k in values]
p = Panel(values, items=['Item1', 'Item2'],
major_axis=date_range('1/1/2000', periods=5),
minor_axis=['A', 'B'])
expected = Panel(evalues, items=['Item1', 'Item2'],
major_axis=date_range('1/1/2000', periods=5),
minor_axis=['A', 'B'])
result = p.round()
assert_panel_equal(expected, result)
def test_numpy_round(self):
values = [[[-3.2, 2.2], [0, -4.8213], [3.123, 123.12],
[-1566.213, 88.88], [-12, 94.5]],
[[-5.82, 3.5], [6.21, -73.272], [-9.087, 23.12],
[272.212, -99.99], [23, -76.5]]]
evalues = [[[float(np.around(i)) for i in j] for j in k]
for k in values]
p = Panel(values, items=['Item1', 'Item2'],
major_axis=date_range('1/1/2000', periods=5),
minor_axis=['A', 'B'])
expected = Panel(evalues, items=['Item1', 'Item2'],
major_axis=date_range('1/1/2000', periods=5),
minor_axis=['A', 'B'])
result = np.round(p)
assert_panel_equal(expected, result)
msg = "the 'out' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.round, p, out=p)
# removing Panel before NumPy enforces, so just ignore
@pytest.mark.filterwarnings("ignore:Using a non-tuple:FutureWarning")
def test_multiindex_get(self):
ind = MultiIndex.from_tuples(
[('a', 1), ('a', 2), ('b', 1), ('b', 2)],
names=['first', 'second'])
wp = Panel(np.random.random((4, 5, 5)),
items=ind,
major_axis=np.arange(5),
minor_axis=np.arange(5))
f1 = wp['a']
f2 = wp.loc['a']
assert_panel_equal(f1, f2)
assert (f1.items == [1, 2]).all()
assert (f2.items == [1, 2]).all()
MultiIndex.from_tuples([('a', 1), ('a', 2), ('b', 1)],
names=['first', 'second'])
@pytest.mark.filterwarnings("ignore:Using a non-tuple:FutureWarning")
def test_multiindex_blocks(self):
ind = MultiIndex.from_tuples([('a', 1), ('a', 2), ('b', 1)],
names=['first', 'second'])
wp = Panel(self.panel._data)
wp.items = ind
f1 = wp['a']
assert (f1.items == [1, 2]).all()
f1 = wp[('b', 1)]
assert (f1.columns == ['A', 'B', 'C', 'D']).all()
def test_repr_empty(self):
empty = Panel()
repr(empty)
# ignore warning from us, because removing panel
@pytest.mark.filterwarnings("ignore:Using:FutureWarning")
def test_rename(self):
mapper = {'ItemA': 'foo', 'ItemB': 'bar', 'ItemC': 'baz'}
renamed = self.panel.rename(items=mapper)
exp = Index(['foo', 'bar', 'baz'])
tm.assert_index_equal(renamed.items, exp)
renamed = self.panel.rename(minor_axis=str.lower)
exp = Index(['a', 'b', 'c', 'd'])
tm.assert_index_equal(renamed.minor_axis, exp)
# don't copy
renamed_nocopy = self.panel.rename(items=mapper, copy=False)
renamed_nocopy['foo'] = 3.
assert (self.panel['ItemA'].values == 3).all()
def test_get_attr(self):
assert_frame_equal(self.panel['ItemA'], self.panel.ItemA)
# specific cases from #3440
self.panel['a'] = self.panel['ItemA']
assert_frame_equal(self.panel['a'], self.panel.a)
self.panel['i'] = self.panel['ItemA']
assert_frame_equal(self.panel['i'], self.panel.i)
def test_from_frame_level1_unsorted(self):
tuples = [('MSFT', 3), ('MSFT', 2), ('AAPL', 2), ('AAPL', 1),
('MSFT', 1)]
midx = MultiIndex.from_tuples(tuples)
df = DataFrame(np.random.rand(5, 4), index=midx)
p = df.to_panel()
assert_frame_equal(p.minor_xs(2), df.xs(2, level=1).sort_index())
def test_to_excel(self):
try:
import xlwt # noqa
import xlrd # noqa
import openpyxl # noqa
from pandas.io.excel import ExcelFile
except ImportError:
pytest.skip("need xlwt xlrd openpyxl")
for ext in ['xls', 'xlsx']:
with ensure_clean('__tmp__.' + ext) as path:
self.panel.to_excel(path)
try:
reader = ExcelFile(path)
except ImportError:
pytest.skip("need xlwt xlrd openpyxl")
for item, df in self.panel.iteritems():
recdf = reader.parse(str(item), index_col=0)
assert_frame_equal(df, recdf)
def test_to_excel_xlsxwriter(self):
try:
import xlrd # noqa
import xlsxwriter # noqa
from pandas.io.excel import ExcelFile
except ImportError:
pytest.skip("Requires xlrd and xlsxwriter. Skipping test.")
with ensure_clean('__tmp__.xlsx') as path:
self.panel.to_excel(path, engine='xlsxwriter')
try:
reader = ExcelFile(path)
except ImportError as e:
pytest.skip("cannot write excel file: %s" % e)
for item, df in self.panel.iteritems():
recdf = reader.parse(str(item), index_col=0)
assert_frame_equal(df, recdf)
@pytest.mark.filterwarnings("ignore:'.reindex:FutureWarning")
def test_dropna(self):
p = Panel(np.random.randn(4, 5, 6), major_axis=list('abcde'))
p.loc[:, ['b', 'd'], 0] = np.nan
result = p.dropna(axis=1)
exp = p.loc[:, ['a', 'c', 'e'], :]
assert_panel_equal(result, exp)
inp = p.copy()
inp.dropna(axis=1, inplace=True)
assert_panel_equal(inp, exp)
result = p.dropna(axis=1, how='all')
assert_panel_equal(result, p)
p.loc[:, ['b', 'd'], :] = np.nan
result = p.dropna(axis=1, how='all')
exp = p.loc[:, ['a', 'c', 'e'], :]
assert_panel_equal(result, exp)
p = Panel(np.random.randn(4, 5, 6), items=list('abcd'))
p.loc[['b'], :, 0] = np.nan
result = p.dropna()
exp = p.loc[['a', 'c', 'd']]
assert_panel_equal(result, exp)
result = p.dropna(how='all')
assert_panel_equal(result, p)
p.loc['b'] = np.nan
result = p.dropna(how='all')
exp = p.loc[['a', 'c', 'd']]
assert_panel_equal(result, exp)
def test_drop(self):
df = DataFrame({"A": [1, 2], "B": [3, 4]})
panel = Panel({"One": df, "Two": df})
def check_drop(drop_val, axis_number, aliases, expected):
try:
actual = panel.drop(drop_val, axis=axis_number)
assert_panel_equal(actual, expected)
for alias in aliases:
actual = panel.drop(drop_val, axis=alias)
assert_panel_equal(actual, expected)
except AssertionError:
pprint_thing("Failed with axis_number %d and aliases: %s" %
(axis_number, aliases))
raise
# Items
expected = Panel({"One": df})
check_drop('Two', 0, ['items'], expected)
pytest.raises(KeyError, panel.drop, 'Three')
# errors = 'ignore'
dropped = panel.drop('Three', errors='ignore')
assert_panel_equal(dropped, panel)
dropped = panel.drop(['Two', 'Three'], errors='ignore')
expected = Panel({"One": df})
assert_panel_equal(dropped, expected)
# Major
exp_df = DataFrame({"A": [2], "B": [4]}, index=[1])
expected = Panel({"One": exp_df, "Two": exp_df})
check_drop(0, 1, ['major_axis', 'major'], expected)
exp_df = DataFrame({"A": [1], "B": [3]}, index=[0])
expected = Panel({"One": exp_df, "Two": exp_df})
check_drop([1], 1, ['major_axis', 'major'], expected)
# Minor
exp_df = df[['B']]
expected = Panel({"One": exp_df, "Two": exp_df})
check_drop(["A"], 2, ['minor_axis', 'minor'], expected)
exp_df = df[['A']]
expected = Panel({"One": exp_df, "Two": exp_df})
check_drop("B", 2, ['minor_axis', 'minor'], expected)
def test_update(self):
pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]],
[[1.5, np.nan, 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]]])
other = Panel(
[[[3.6, 2., np.nan], [np.nan, np.nan, 7]]], items=[1])
pan.update(other)
expected = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.], [1.5, np.nan, 3.]],
[[3.6, 2., 3], [1.5, np.nan, 7],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]]])
assert_panel_equal(pan, expected)
def test_update_from_dict(self):
pan = Panel({'one': DataFrame([[1.5, np.nan, 3],
[1.5, np.nan, 3],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]]),
'two': DataFrame([[1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]])})
other = {'two': DataFrame(
[[3.6, 2., np.nan], [np.nan, np.nan, 7]])}
pan.update(other)
expected = Panel(
{'one': DataFrame([[1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]]),
'two': DataFrame([[3.6, 2., 3],
[1.5, np.nan, 7],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]])
}
)
assert_panel_equal(pan, expected)
def test_update_nooverwrite(self):
pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]],
[[1.5, np.nan, 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]]])
other = Panel(
[[[3.6, 2., np.nan], [np.nan, np.nan, 7]]], items=[1])
pan.update(other, overwrite=False)
expected = Panel([[[1.5, np.nan, 3], [1.5, np.nan, 3],
[1.5, np.nan, 3.], [1.5, np.nan, 3.]],
[[1.5, 2., 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]]])
assert_panel_equal(pan, expected)
def test_update_filtered(self):
pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]],
[[1.5, np.nan, 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]]])
other = Panel(
[[[3.6, 2., np.nan], [np.nan, np.nan, 7]]], items=[1])
pan.update(other, filter_func=lambda x: x > 2)
expected = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.], [1.5, np.nan, 3.]],
[[1.5, np.nan, 3], [1.5, np.nan, 7],
[1.5, np.nan, 3.], [1.5, np.nan, 3.]]])
assert_panel_equal(pan, expected)
def test_update_raise(self):
pan = Panel([[[1.5, np.nan, 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]],
[[1.5, np.nan, 3.], [1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3.]]])
pytest.raises(Exception, pan.update, *(pan, ),
**{'raise_conflict': True})
def test_all_any(self):
assert (self.panel.all(axis=0).values == nanall(
self.panel, axis=0)).all()
assert (self.panel.all(axis=1).values == nanall(
self.panel, axis=1).T).all()
assert (self.panel.all(axis=2).values == nanall(
self.panel, axis=2).T).all()
assert (self.panel.any(axis=0).values == nanany(
self.panel, axis=0)).all()
assert (self.panel.any(axis=1).values == nanany(
self.panel, axis=1).T).all()
assert (self.panel.any(axis=2).values == nanany(
self.panel, axis=2).T).all()
def test_all_any_unhandled(self):
pytest.raises(NotImplementedError, self.panel.all, bool_only=True)
pytest.raises(NotImplementedError, self.panel.any, bool_only=True)
# GH issue 15960
def test_sort_values(self):
pytest.raises(NotImplementedError, self.panel.sort_values)
pytest.raises(NotImplementedError, self.panel.sort_values, 'ItemA')
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
class TestPanelFrame(object):
"""
Check that conversions to and from Panel to DataFrame work.
"""
def setup_method(self, method):
panel = make_test_panel()
self.panel = panel.to_frame()
self.unfiltered_panel = panel.to_frame(filter_observations=False)
def test_ops_differently_indexed(self):
# trying to set non-identically indexed panel
wp = self.panel.to_panel()
wp2 = wp.reindex(major=wp.major_axis[:-1])
lp2 = wp2.to_frame()
result = self.panel + lp2
assert_frame_equal(result.reindex(lp2.index), lp2 * 2)
# careful, mutation
self.panel['foo'] = lp2['ItemA']
assert_series_equal(self.panel['foo'].reindex(lp2.index),
lp2['ItemA'],
check_names=False)
def test_ops_scalar(self):
result = self.panel.mul(2)
expected = DataFrame.__mul__(self.panel, 2)
assert_frame_equal(result, expected)
def test_combineFrame(self):
wp = self.panel.to_panel()
result = self.panel.add(wp['ItemA'].stack(), axis=0)
assert_frame_equal(result.to_panel()['ItemA'], wp['ItemA'] * 2)
def test_combinePanel(self):
wp = self.panel.to_panel()
result = self.panel.add(self.panel)
wide_result = result.to_panel()
assert_frame_equal(wp['ItemA'] * 2, wide_result['ItemA'])
# one item
result = self.panel.add(self.panel.filter(['ItemA']))
def test_combine_scalar(self):
result = self.panel.mul(2)
expected = DataFrame(self.panel._data) * 2
assert_frame_equal(result, expected)
def test_combine_series(self):
s = self.panel['ItemA'][:10]
result = self.panel.add(s, axis=0)
expected = DataFrame.add(self.panel, s, axis=0)
assert_frame_equal(result, expected)
s = self.panel.iloc[5]
result = self.panel + s
expected = DataFrame.add(self.panel, s, axis=1)
assert_frame_equal(result, expected)
def test_operators(self):
wp = self.panel.to_panel()
result = (self.panel + 1).to_panel()
assert_frame_equal(wp['ItemA'] + 1, result['ItemA'])
def test_arith_flex_panel(self):
ops = ['add', 'sub', 'mul', 'div',
'truediv', 'pow', 'floordiv', 'mod']
if not compat.PY3:
aliases = {}
else:
aliases = {'div': 'truediv'}
self.panel = self.panel.to_panel()
for n in [np.random.randint(-50, -1), np.random.randint(1, 50), 0]:
for op in ops:
alias = aliases.get(op, op)
f = getattr(operator, alias)
exp = f(self.panel, n)
result = getattr(self.panel, op)(n)
assert_panel_equal(result, exp, check_panel_type=True)
# rops
r_f = lambda x, y: f(y, x)
exp = r_f(self.panel, n)
result = getattr(self.panel, 'r' + op)(n)
assert_panel_equal(result, exp)
def test_sort(self):
def is_sorted(arr):
return (arr[1:] > arr[:-1]).any()
sorted_minor = self.panel.sort_index(level=1)
assert is_sorted(sorted_minor.index.labels[1])
sorted_major = sorted_minor.sort_index(level=0)
assert is_sorted(sorted_major.index.labels[0])
def test_to_string(self):
buf = StringIO()
self.panel.to_string(buf)
def test_to_sparse(self):
if isinstance(self.panel, Panel):
msg = 'sparsifying is not supported'
tm.assert_raises_regex(NotImplementedError, msg,
self.panel.to_sparse)
def test_truncate(self):
dates = self.panel.index.levels[0]
start, end = dates[1], dates[5]
trunced = self.panel.truncate(start, end).to_panel()
expected = self.panel.to_panel()['ItemA'].truncate(start, end)
# TODO truncate drops index.names
assert_frame_equal(trunced['ItemA'], expected, check_names=False)
trunced = self.panel.truncate(before=start).to_panel()
expected = self.panel.to_panel()['ItemA'].truncate(before=start)
# TODO truncate drops index.names
assert_frame_equal(trunced['ItemA'], expected, check_names=False)
trunced = self.panel.truncate(after=end).to_panel()
expected = self.panel.to_panel()['ItemA'].truncate(after=end)
# TODO truncate drops index.names
assert_frame_equal(trunced['ItemA'], expected, check_names=False)
# truncate on dates that aren't in there
wp = self.panel.to_panel()
new_index = wp.major_axis[::5]
wp2 = wp.reindex(major=new_index)
lp2 = wp2.to_frame()
lp_trunc = lp2.truncate(wp.major_axis[2], wp.major_axis[-2])
wp_trunc = wp2.truncate(wp.major_axis[2], wp.major_axis[-2])
assert_panel_equal(wp_trunc, lp_trunc.to_panel())
# throw proper exception
pytest.raises(Exception, lp2.truncate, wp.major_axis[-2],
wp.major_axis[2])
def test_axis_dummies(self):
from pandas.core.reshape.reshape import make_axis_dummies
minor_dummies = make_axis_dummies(self.panel, 'minor').astype(np.uint8)
assert len(minor_dummies.columns) == len(self.panel.index.levels[1])
major_dummies = make_axis_dummies(self.panel, 'major').astype(np.uint8)
assert len(major_dummies.columns) == len(self.panel.index.levels[0])
mapping = {'A': 'one', 'B': 'one', 'C': 'two', 'D': 'two'}
transformed = make_axis_dummies(self.panel, 'minor',
transform=mapping.get).astype(np.uint8)
assert len(transformed.columns) == 2
tm.assert_index_equal(transformed.columns, Index(['one', 'two']))
# TODO: test correctness
def test_get_dummies(self):
from pandas.core.reshape.reshape import get_dummies, make_axis_dummies
self.panel['Label'] = self.panel.index.labels[1]
minor_dummies = make_axis_dummies(self.panel, 'minor').astype(np.uint8)
dummies = get_dummies(self.panel['Label'])
tm.assert_numpy_array_equal(dummies.values, minor_dummies.values)
def test_mean(self):
means = self.panel.mean(level='minor')
# test versus Panel version
wide_means = self.panel.to_panel().mean('major')
assert_frame_equal(means, wide_means)
def test_sum(self):
sums = self.panel.sum(level='minor')
# test versus Panel version
wide_sums = self.panel.to_panel().sum('major')
assert_frame_equal(sums, wide_sums)
def test_count(self):
index = self.panel.index
major_count = self.panel.count(level=0)['ItemA']
labels = index.labels[0]
for i, idx in enumerate(index.levels[0]):
assert major_count[i] == (labels == i).sum()
minor_count = self.panel.count(level=1)['ItemA']
labels = index.labels[1]
for i, idx in enumerate(index.levels[1]):
assert minor_count[i] == (labels == i).sum()
def test_join(self):
lp1 = self.panel.filter(['ItemA', 'ItemB'])
lp2 = self.panel.filter(['ItemC'])
joined = lp1.join(lp2)
assert len(joined.columns) == 3
pytest.raises(Exception, lp1.join,
self.panel.filter(['ItemB', 'ItemC']))
def test_panel_index():
index = panelm.panel_index([1, 2, 3, 4], [1, 2, 3])
expected = MultiIndex.from_arrays([np.tile([1, 2, 3, 4], 3),
np.repeat([1, 2, 3], 4)],
names=['time', 'panel'])
tm.assert_index_equal(index, expected)
@pytest.mark.filterwarnings("ignore:\\nPanel:FutureWarning")
def test_panel_np_all():
wp = Panel({"A": DataFrame({'b': [1, 2]})})
result = np.all(wp)
assert result == np.bool_(True)
| bsd-3-clause | 1,923,868,422,517,964,500 | 35.580497 | 98 | 0.519267 | false |
maaaks/andreas | andreas/db/model.py | 1 | 3293 | from typing import Dict, List, Optional, Tuple, Type
from playhouse import signals
from andreas.db.database import db
class Model(signals.Model):
class Meta:
database = db
schema = 'andreas'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._models_to_save_after_myself: List[Tuple[Model,Dict]] = []
@classmethod
def table(cls) -> str:
return f'{cls._meta.schema}.{cls._meta.table_name}'
@classmethod
def triggers(cls) -> Optional[Dict[str,str]]:
return None
@classmethod
def create_table(cls, fail_silently=False):
"""
Creates a table for given model and creates/recreates all the triggers on it.
"""
super().create_table(fail_silently=fail_silently)
if cls.triggers():
# Remove the old triggers
for event in 'insert', 'update', 'delete', 'truncate':
for when in 'before', 'after', 'instead_of':
db.execute_sql(f'drop trigger if exists {when}_{event} on {cls.table()}')
db.execute_sql(f'drop function if exists on_{cls.table()}_{when}_{event}()')
# Create new triggers
for when, code in cls.triggers().items():
trigger_name = when.replace(' ', '_')
code = code.rstrip('; \t\n\t')
db.execute_sql(
f'create or replace function {cls.table()}_{trigger_name}() returns trigger '
f'as $$ begin {code}; end $$ language plpgsql')
db.execute_sql(
f'create trigger {trigger_name} {when} on {cls.table()} '
f'for each row execute procedure {cls.table()}_{trigger_name}()')
def reload(self):
"""
Updates all the fields from the database.
"""
newer_self = self.get(self._pk_expr())
for field_name in self._meta.fields.keys():
val = getattr(newer_self, field_name)
setattr(self, field_name, val)
self._dirty.clear()
def save_after(self, dependency: 'Model', **kwargs) -> None:
"""
Registers handler that will automatically save this model right as soon as `dependency` will be saved.
This handler works only once and unregisters itself after finishing its work.
"""
dependency._models_to_save_after_myself.append((self, kwargs))
@classmethod
def create_after(cls, dependency: 'Model', **kwargs) -> 'Model':
"""
Creates instance and registers handler that will automatically save it as soon as `dependency` will be saved.
This handler works only once and unregisters itself after finishing its work.
"""
instance = cls(**kwargs)
dependency._models_to_save_after_myself.append((instance, {}))
return instance
@signals.post_save()
def post_save(model_class: Type[Model], instance: Model, created: bool):
"""
After an object is saved, all other models that waited for it will be automatically saved, too.
"""
for model, kwargs in instance._models_to_save_after_myself:
model.save(**kwargs)
instance._models_to_save_after_myself = [] | mit | 6,112,106,448,212,380,000 | 37.302326 | 117 | 0.580929 | false |
enavarro222/bblamp | webserver.py | 1 | 5293 | #!/usr/bin/python
#-*- coding:utf-8 -*-
import os
import sys
import json
# Make sure your gevent version is >= 1.0
import gevent
from gevent.wsgi import WSGIServer
from gevent.queue import Queue
from flask import Flask, Response
from flask import render_template, jsonify
from utils import ServerSentEvent
from api import lapps
from api import get_lapp_status
from errors import BBLampException
#TODO: hardware ?
from simulate import simu
import config
# the Flask app
bblamp_app = Flask(__name__)
bblamp_app.debug = True
# app API
bblamp_app.register_blueprint(lapps, url_prefix="/v1")
# lamp simulation API
bblamp_app.register_blueprint(simu, url_prefix="/simu/v1")
# app shared state variables
subscriptions = []
#-------------------------------------------------------------------------------
@bblamp_app.errorhandler(BBLampException)
def handle_invalid_lapp_name(error):
""" BBLampException handler
"""
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
#-------------------------------------------------------------------------------
# lapp log API (push)
@bblamp_app.route("/log/debug")
def log_debug():
return "Currently %d subscriptions" % len(subscriptions)
@bblamp_app.route("/log/subscribe")
def log_subscribe():
def gen():
q = Queue()
subscriptions.append(q)
try:
while True:
result = q.get()
ev = ServerSentEvent(str(result))
yield ev.encode()
except GeneratorExit: # Or maybe use flask signals
subscriptions.remove(q)
return Response(gen(), mimetype="text/event-stream")
def send_data(dtype, data):
""" Send data to the clients
"""
output = {
"dtype": dtype,
"data": data
}
for sub in subscriptions[:]:
print("%s : %s" % (dtype, data))
sub.put(json.dumps(output))
def new_lapp_output(msg):
send_data("output", msg)
def new_lapp_logmsg(msg):
send_data("log", msg)
def new_lapp_status():
send_data("status", get_lapp_status())
def monitor_logging_file(filename, output_fct):
""" pseudo therad (gevent) that monitor a log file
"""
while True:
try:
with open(filename, "r") as in_file:
#seek to the end
# in order to not send all already in the file lines
in_file.seek(0, os.SEEK_END)
while True:
# check the file still exist
# cf: http://stackoverflow.com/a/12690767
if os.fstat(in_file.fileno()).st_nlink == 0:
break # try to reopen it if it has been deleted
# try to read next line
log_line = in_file.readline()
if log_line != "":
# Search if next lines are for the same log "line"
## wait short time to be sure to not miss next "same log line"
gevent.sleep(2e-3)
last_pos = in_file.tell()
nextline = in_file.readline()
while not (nextline == "" or nextline.startswith("LampApp")): # = not a new log line
log_line += nextline
# wait short time to be sure to not miss next "same log line"
gevent.sleep(2e-3)
last_pos = in_file.tell()
nextline = in_file.readline()
# push log_line
output_fct(log_line)
# and seek back to the next log line (seek to the same position)
in_file.seek(last_pos)
gevent.sleep(0.1)
except IOError as error:
# file doesn't exist or not
if error.errno == 2:
#TODO: add logging
gevent.sleep(1)
else:
raise
def monitor_lapp_logfile():
monitor_logging_file(config.LAPP_LOGFILE, new_lapp_logmsg)
def monitor_lapp_outfile():
monitor_logging_file(config.LAPP_OUTFILE, new_lapp_output)
def monitor_lapp_status():
while True:
last_status = get_lapp_status()
while last_status["hash"] == get_lapp_status()["hash"]:
gevent.sleep(0.4)
new_lapp_status()
gevent.sleep(0.4)
#-------------------------------------------------------------------------------
# single page app getter
@bblamp_app.route("/")
@bblamp_app.route("/<string:lapp_name>")
def main_page(lapp_name=None):
return render_template("index.html")
@bblamp_app.route("/ltest")
def logging_test():
return render_template("log_test.html")
#-------------------------------------------------------------------------------
def main():
print("<run>")
# file monitoring
monitor_log_worker = gevent.spawn(monitor_lapp_logfile)
monitor_output_worker = gevent.spawn(monitor_lapp_outfile)
monitor_status_worker = gevent.spawn(monitor_lapp_status)
# web server
server = WSGIServer(("0.0.0.0", 5000), bblamp_app)
server.serve_forever()
print("<run_done>")
return 0
if __name__ == "__main__":
sys.exit(main())
| agpl-3.0 | 1,784,097,008,131,728,100 | 29.595376 | 108 | 0.536936 | false |
molly/brandeis | tests/testvalidator.py | 1 | 3946 | # -*- coding: utf-8 -*-
# Brandeis - A tool to convert plaintext court cases (from the lochner
# tool: http://gitorious.org/lochner/) to wikitext.
#
# Copyright (C) 2013 Molly White
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from validator import Validator
from bexceptions import *
import os, unittest
class TestFileValidation(unittest.TestCase):
'''Test functions that validate the input files.'''
def setUp(self):
pass
def tearDown(self):
self.buffer.close()
def testGoodTitlePlacement(self):
with open('buffer.txt', 'w', encoding='utf-8') as self.buffer:
self.buffer.write('\n\n\t\t\t\t<h1>Person v. Person - 100 U.S. 25 (2000)</h1>')
v = Validator('buffer.txt')
try:
v.validateTitlePlacement()
except:
self.fail('Validator did not pass a good title.')
def testPoorlyPlacedTitle(self):
with open('buffer.txt', 'w', encoding='utf-8') as self.buffer:
self.buffer.write('\n\n\t\t\t\t<div></div><h1>Person v. Person - 100 U.S. 25 (2000)</h1>')
v = Validator('buffer.txt')
with self.assertRaises(BadTitle, msg='Validator passed a title that was not at the '
'beginning of the file.'):
v.validateTitlePlacement()
def testNoTitle(self):
with open('buffer.txt', 'w', encoding='utf-8') as self.buffer:
self.buffer.write('\n\n\t\t\t')
v = Validator('buffer.txt')
with self.assertRaises(BadTitle, msg='Validator passed a file with no title.'):
v.validateTitlePlacement()
def testGoodTitleParts(self):
with open('buffer.txt', 'w', encoding='utf-8') as self.buffer:
self.buffer.write('\t\t\t\t<h1>Foo v. Bar - 100 U.S. 200 (2013)</h1><div>Extra stuff</div>')
v = Validator('buffer.txt')
try:
v.validateTitleParts()
except:
self.fail('Validator did not pass a title with good parts.')
def testIdentifyCaseGroup(self):
with open('buffer.txt', 'w', encoding='utf-8') as self.buffer:
self.buffer.write('\t\t\t<h1>Group of Cases - 100 U.S. 200 (2013)</h1>\t\t\t')
v = Validator('buffer.txt')
with self.assertRaises(GroupedCase, msg='Validator failed to identify a group of cases'
' as such.'):
v.validateTitleParts()
def testBadTitleDate(self):
with open('buffer.txt', 'w', encoding='utf-8') as self.buffer:
self.buffer.write('<h1>Foo v. Bar - 100 U.S. 200 (203)</h1>')
v = Validator('buffer.txt')
with self.assertRaises(BadTitle, msg='Validator passed a title containing an improperly'
'formatted date.'):
v.validateTitleParts()
def testBadTitleNumber(self):
with open('buffer.txt', 'w', encoding='utf-8') as self.buffer:
self.buffer.write('<h1>Foo v. Bar - U.S. 200 (2013)</h1>')
v = Validator('buffer.txt')
with self.assertRaises(BadTitle, msg='Validator passed a title containing an improperly'
'formatted case number.'):
v.validateTitleParts()
if __name__ == "__main__":
unittest.main()
try:
os.remove('buffer.txt')
except:
pass | gpl-3.0 | 1,786,174,775,887,380,200 | 40.114583 | 104 | 0.610492 | false |
mind1master/aiohttp | tests/test_parser_buffer.py | 1 | 5653 | from unittest import mock
import pytest
from aiohttp import errors, parsers
@pytest.fixture
def stream():
return mock.Mock()
@pytest.fixture
def buf():
return parsers.ParserBuffer()
def test_feed_data(buf):
buf.feed_data(b'')
assert len(buf) == 0
buf.feed_data(b'data')
assert len(buf) == 4
assert bytes(buf), b'data'
def test_feed_data_after_exception(buf):
buf.feed_data(b'data')
exc = ValueError()
buf.set_exception(exc)
buf.feed_data(b'more')
assert len(buf) == 4
assert bytes(buf) == b'data'
def test_read_exc(buf):
p = buf.read(3)
next(p)
p.send(b'1')
exc = ValueError()
buf.set_exception(exc)
assert buf.exception() is exc
with pytest.raises(ValueError):
p.send(b'1')
def test_read_exc_multiple(buf):
p = buf.read(3)
next(p)
p.send(b'1')
exc = ValueError()
buf.set_exception(exc)
assert buf.exception() is exc
p = buf.read(3)
with pytest.raises(ValueError):
next(p)
def test_read(buf):
p = buf.read(3)
next(p)
p.send(b'1')
try:
p.send(b'234')
except StopIteration as exc:
res = exc.value
assert res == b'123'
assert b'4' == bytes(buf)
def test_readsome(buf):
p = buf.readsome(3)
next(p)
try:
p.send(b'1')
except StopIteration as exc:
res = exc.value
assert res == b'1'
p = buf.readsome(2)
next(p)
try:
p.send(b'234')
except StopIteration as exc:
res = exc.value
assert res == b'23'
assert b'4' == bytes(buf)
def test_readsome_exc(buf):
buf.set_exception(ValueError())
p = buf.readsome(3)
with pytest.raises(ValueError):
next(p)
def test_wait(buf):
p = buf.wait(3)
next(p)
p.send(b'1')
try:
p.send(b'234')
except StopIteration as exc:
res = exc.value
assert res == b'123'
assert b'1234' == bytes(buf)
def test_wait_exc(buf):
buf.set_exception(ValueError())
p = buf.wait(3)
with pytest.raises(ValueError):
next(p)
def test_skip(buf):
p = buf.skip(3)
next(p)
p.send(b'1')
try:
p.send(b'234')
except StopIteration as exc:
res = exc.value
assert res is None
assert b'4' == bytes(buf)
def test_skip_exc(buf):
buf.set_exception(ValueError())
p = buf.skip(3)
with pytest.raises(ValueError):
next(p)
def test_readuntil_limit(buf):
p = buf.readuntil(b'\n', 4)
next(p)
p.send(b'1')
p.send(b'234')
with pytest.raises(errors.LineLimitExceededParserError):
p.send(b'5')
def test_readuntil_limit2(buf):
p = buf.readuntil(b'\n', 4)
next(p)
with pytest.raises(errors.LineLimitExceededParserError):
p.send(b'12345\n6')
def test_readuntil_limit3(buf):
p = buf.readuntil(b'\n', 4)
next(p)
with pytest.raises(errors.LineLimitExceededParserError):
p.send(b'12345\n6')
def test_readuntil(buf):
p = buf.readuntil(b'\n', 4)
next(p)
p.send(b'123')
try:
p.send(b'\n456')
except StopIteration as exc:
res = exc.value
assert res == b'123\n'
assert b'456' == bytes(buf)
def test_readuntil_exc(buf):
buf.set_exception(ValueError())
p = buf.readuntil(b'\n', 4)
with pytest.raises(ValueError):
next(p)
def test_waituntil_limit(buf):
p = buf.waituntil(b'\n', 4)
next(p)
p.send(b'1')
p.send(b'234')
with pytest.raises(errors.LineLimitExceededParserError):
p.send(b'5')
def test_waituntil_limit2(buf):
p = buf.waituntil(b'\n', 4)
next(p)
with pytest.raises(errors.LineLimitExceededParserError):
p.send(b'12345\n6')
def test_waituntil_limit3(buf):
p = buf.waituntil(b'\n', 4)
next(p)
with pytest.raises(errors.LineLimitExceededParserError):
p.send(b'12345\n6')
def test_waituntil(buf):
p = buf.waituntil(b'\n', 4)
next(p)
p.send(b'123')
try:
p.send(b'\n456')
except StopIteration as exc:
res = exc.value
assert res == b'123\n'
assert b'123\n456' == bytes(buf)
def test_waituntil_exc(buf):
buf.set_exception(ValueError())
p = buf.waituntil(b'\n', 4)
with pytest.raises(ValueError):
next(p)
def test_skipuntil(buf):
p = buf.skipuntil(b'\n')
next(p)
p.send(b'123')
try:
p.send(b'\n456\n')
except StopIteration:
pass
assert b'456\n' == bytes(buf)
p = buf.skipuntil(b'\n')
try:
next(p)
except StopIteration:
pass
assert b'' == bytes(buf)
def test_skipuntil_exc(buf):
buf.set_exception(ValueError())
p = buf.skipuntil(b'\n')
with pytest.raises(ValueError):
next(p)
def test_lines_parser(buf, stream, loop):
out = parsers.FlowControlDataQueue(stream, loop=loop)
p = parsers.LinesParser()(out, buf)
next(p)
for d in (b'line1', b'\r\n', b'lin', b'e2\r', b'\ndata'):
p.send(d)
assert ([(bytearray(b'line1\r\n'), 7), (bytearray(b'line2\r\n'), 7)] ==
list(out._buffer))
try:
p.throw(parsers.EofStream())
except StopIteration:
pass
assert bytes(buf) == b'data'
def test_chunks_parser(stream, loop, buf):
out = parsers.FlowControlDataQueue(stream, loop=loop)
p = parsers.ChunksParser(5)(out, buf)
next(p)
for d in (b'line1', b'lin', b'e2d', b'ata'):
p.send(d)
assert ([(bytearray(b'line1'), 5), (bytearray(b'line2'), 5)] ==
list(out._buffer))
try:
p.throw(parsers.EofStream())
except StopIteration:
pass
assert bytes(buf) == b'data'
| apache-2.0 | -3,526,368,417,391,025,700 | 18.628472 | 75 | 0.584115 | false |
jantman/awslimitchecker | awslimitchecker/tests/test_utils.py | 1 | 19953 | """
awslimitchecker/tests/test_utils.py
The latest version of this package is available at:
<https://github.com/jantman/awslimitchecker>
##############################################################################
Copyright 2015-2018 Jason Antman <[email protected]>
This file is part of awslimitchecker, also known as awslimitchecker.
awslimitchecker is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
awslimitchecker is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with awslimitchecker. If not, see <http://www.gnu.org/licenses/>.
The Copyright and Authors attributions contained herein may not be removed or
otherwise altered, except to add the Author attribution of a contributor to
this work. (Additional Terms pursuant to Section 7b of the AGPL v3)
##############################################################################
While not legally required, I sincerely request that anyone who finds
bugs please submit them at <https://github.com/jantman/awslimitchecker> or
to me via email, and that you send any contributions or improvements
either as a pull request on GitHub, or to me via email.
##############################################################################
AUTHORS:
Jason Antman <[email protected]> <http://www.jasonantman.com>
##############################################################################
"""
import argparse
import pytest
import sys
import termcolor
from awslimitchecker.limit import AwsLimit, AwsLimitUsage
from awslimitchecker.utils import (
StoreKeyValuePair, dict2cols, paginate_dict, _get_dict_value_by_path,
_set_dict_value_by_path, _get_latest_version, color_output,
issue_string_tuple
)
# https://code.google.com/p/mock/issues/detail?id=249
# py>=3.4 should use unittest.mock not the mock package on pypi
if (
sys.version_info[0] < 3 or
sys.version_info[0] == 3 and sys.version_info[1] < 4
):
from mock import call, Mock, patch
else:
from unittest.mock import call, Mock, patch
pbm = 'awslimitchecker.utils'
class TestStoreKeyValuePair(object):
def test_argparse_works(self):
parser = argparse.ArgumentParser()
parser.add_argument('--foo', action='store', type=str)
res = parser.parse_args(['--foo=bar'])
assert res.foo == 'bar'
def test_long(self):
parser = argparse.ArgumentParser()
parser.add_argument('--one', action=StoreKeyValuePair)
res = parser.parse_args(['--one=foo=bar'])
assert res.one == {'foo': 'bar'}
def test_short(self):
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--one', action=StoreKeyValuePair)
res = parser.parse_args(['-o', 'foo=bar'])
assert res.one == {'foo': 'bar'}
def test_multi_long(self):
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--one', action=StoreKeyValuePair)
res = parser.parse_args(['--one=foo=bar', '--one=baz=blam'])
assert res.one == {'foo': 'bar', 'baz': 'blam'}
def test_multi_short(self):
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--one', action=StoreKeyValuePair)
res = parser.parse_args(['-o', 'foo=bar', '-o', 'baz=blam'])
assert res.one == {'foo': 'bar', 'baz': 'blam'}
def test_no_equals(self):
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--one', action=StoreKeyValuePair)
with pytest.raises(SystemExit) as excinfo:
parser.parse_args(['-o', 'foobar'])
if sys.version_info[0] > 2:
msg = excinfo.value.args[0]
else:
msg = excinfo.value.message
assert msg == 2
def test_quoted(self):
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--one', action=StoreKeyValuePair)
res = parser.parse_args([
'-o',
'"foo some"=bar',
'--one="baz other"=blam'
])
assert res.one == {'foo some': 'bar', 'baz other': 'blam'}
class Test_dict2cols(object):
def test_simple(self):
d = {'foo': 'bar', 'baz': 'blam'}
res = dict2cols(d)
assert res == 'baz blam\nfoo bar\n'
def test_spaces(self):
d = {'foo': 'bar', 'baz': 'blam'}
res = dict2cols(d, spaces=4)
assert res == 'baz blam\nfoo bar\n'
def test_less_simple(self):
d = {
'zzz': 'bar',
'aaa': 'blam',
'abcdefghijklmnopqrstuv': 'someothervalue',
}
res = dict2cols(d)
assert res == '' + \
'aaa blam\n' + \
'abcdefghijklmnopqrstuv someothervalue\n' + \
'zzz bar\n'
def test_separator(self):
d = {'foo': 'bar', 'baz': 'blam'}
res = dict2cols(d, spaces=4, separator='.')
assert res == 'baz....blam\nfoo....bar\n'
def test_empty(self):
d = {}
res = dict2cols(d)
assert res == ''
class TestPaginateDict(object):
def test_no_marker_path(self):
func = Mock()
with pytest.raises(Exception) as excinfo:
paginate_dict(func)
ex_str = "alc_marker_path must be specified for queries " \
"that return a dict."
assert ex_str in str(excinfo.value)
def test_no_data_path(self):
func = Mock()
with pytest.raises(Exception) as excinfo:
paginate_dict(func, alc_marker_path=[])
ex_str = "alc_data_path must be specified for queries " \
"that return a dict."
assert ex_str in str(excinfo.value)
def test_no_marker_param(self):
func = Mock()
with pytest.raises(Exception) as excinfo:
paginate_dict(
func,
alc_marker_path=[],
alc_data_path=[]
)
ex_str = "alc_marker_param must be specified for queries " \
"that return a dict."
assert ex_str in str(excinfo.value)
def test_bad_path(self):
result = {
'k1': {
'badpath': {}
}
}
func = Mock()
func.return_value = result
res = paginate_dict(
func,
alc_marker_path=['k1', 'k2', 'Marker'],
alc_data_path=['k1', 'k2', 'Data'],
alc_marker_param='Marker'
)
assert res == result
assert func.mock_calls == [call()]
def test_no_marker(self):
result = {
'k1': {
'k2': {
'Data': []
}
}
}
func = Mock()
func.return_value = result
res = paginate_dict(
func,
alc_marker_path=['k1', 'k2', 'Marker'],
alc_data_path=['k1', 'k2', 'Data'],
alc_marker_param='Marker'
)
assert res == result
assert func.mock_calls == [call()]
def test_two_iterations(self):
e1 = Mock()
e2 = Mock()
e3 = Mock()
e4 = Mock()
e5 = Mock()
e6 = Mock()
func = Mock()
res1 = {
'k1': {
'k2': {
'Data': [e1, e2],
'Foo1': 'bar1',
'Marker': 'marker1'
}
}
}
res2 = {
'k1': {
'k2': {
'Data': [e3, e4],
'Foo2': 'bar2',
'Marker': 'marker2'
}
}
}
res3 = {
'k1': {
'k2': {
'Data': [e5, e6],
'Foo3': 'bar3'
}
}
}
expected = {
'k1': {
'k2': {
'Data': [e1, e2, e3, e4, e5, e6],
'Foo3': 'bar3'
}
}
}
func.side_effect = [res1, res2, res3]
res = paginate_dict(
func,
'foo',
bar='baz',
alc_marker_path=['k1', 'k2', 'Marker'],
alc_data_path=['k1', 'k2', 'Data'],
alc_marker_param='MarkerParam'
)
assert res == expected
assert func.mock_calls == [
call('foo', bar='baz'),
call(
'foo',
bar='baz',
MarkerParam='marker1'
),
call(
'foo',
bar='baz',
MarkerParam='marker2'
)
]
class TestDictFuncs(object):
def test_get_dict_value_by_path(self):
d = {
'foo': {
'bar': {
'baz': 'bazval'
}
}
}
path = ['foo', 'bar', 'baz']
res = _get_dict_value_by_path(d, path)
assert res == 'bazval'
# make sure we don't modify inputs
assert path == ['foo', 'bar', 'baz']
assert d == {
'foo': {
'bar': {
'baz': 'bazval'
}
}
}
def test_get_dict_value_by_path_obj(self):
e1 = Mock()
e2 = Mock()
d = {
'k1': {
'k2': {
'Marker': 'marker2',
'Data': [e1, e2],
'Foo2': 'bar2'
}
}
}
res = _get_dict_value_by_path(d, ['k1', 'k2', 'Data'])
assert res == [e1, e2]
def test_get_dict_value_by_path_none(self):
d = {
'foo': {
'bar': {
'blam': 'blarg'
}
}
}
res = _get_dict_value_by_path(d, ['foo', 'bar', 'baz'])
assert res is None
def test_get_dict_value_by_path_deep_none(self):
d = {'baz': 'blam'}
res = _get_dict_value_by_path(d, ['foo', 'bar', 'baz'])
assert res is None
def test_set_dict_value_by_path(self):
d = {
'foo': {
'bar': {
'baz': 'bazval'
}
}
}
path = ['foo', 'bar', 'baz']
res = _set_dict_value_by_path(d, 'blam', path)
assert res == {
'foo': {
'bar': {
'baz': 'blam'
}
}
}
# make sure we don't modify inputs
assert path == ['foo', 'bar', 'baz']
assert d == {
'foo': {
'bar': {
'baz': 'bazval'
}
}
}
def test_set_dict_value_by_path_none(self):
d = {
'foo': {
'bar': {
'blam': 'blarg'
}
}
}
res = _set_dict_value_by_path(d, 'blam', ['foo', 'bar', 'baz'])
assert res == {
'foo': {
'bar': {
'baz': 'blam',
'blam': 'blarg'
}
}
}
def test_set_dict_value_by_path_deep_none(self):
d = {'foo': 'bar'}
with pytest.raises(TypeError):
_set_dict_value_by_path(d, 'blam', ['foo', 'bar', 'baz'])
def test_set_dict_value_by_path_empty(self):
d = {'foo': 'bar'}
res = _set_dict_value_by_path(d, 'baz', [])
assert res == d
class TestGetCurrentVersion(object):
def test_exception(self):
mock_http = Mock()
with patch('%s._VERSION_TUP' % pbm, (0, 2, 3)):
with patch('%s.urllib3.PoolManager' % pbm, autospec=True) as m_pm:
with patch('%s.logger' % pbm, autospec=True) as mock_logger:
m_pm.return_value = mock_http
mock_http.request.side_effect = RuntimeError()
res = _get_latest_version()
assert res is None
assert mock_logger.mock_calls == [
call.debug('Error getting latest version from PyPI', exc_info=True)
]
def test_older(self):
mock_http = Mock()
mock_resp = Mock(
status=200, data='{"info": {"version": "1.0.1"}}'
)
with patch('%s._VERSION_TUP' % pbm, (0, 2, 3)):
with patch('%s.urllib3.PoolManager' % pbm, autospec=True) as m_pm:
with patch('%s.logger' % pbm, autospec=True) as mock_logger:
m_pm.return_value = mock_http
mock_http.request.return_value = mock_resp
res = _get_latest_version()
assert res == '1.0.1'
assert mock_logger.mock_calls == []
def test_equal(self):
mock_http = Mock()
mock_resp = Mock(
status=200, data='{"info": {"version": "0.2.3"}}'
)
with patch('%s._VERSION_TUP' % pbm, (0, 2, 3)):
with patch('%s.urllib3.PoolManager' % pbm, autospec=True) as m_pm:
with patch('%s.logger' % pbm, autospec=True) as mock_logger:
m_pm.return_value = mock_http
mock_http.request.return_value = mock_resp
res = _get_latest_version()
assert res is None
assert mock_logger.mock_calls == []
def test_newer(self):
mock_http = Mock()
mock_resp = Mock(
status=200, data='{"info": {"version": "0.1.2"}}'
)
with patch('%s._VERSION_TUP' % pbm, (0, 2, 3)):
with patch('%s.urllib3.PoolManager' % pbm, autospec=True) as m_pm:
with patch('%s.logger' % pbm, autospec=True) as mock_logger:
m_pm.return_value = mock_http
mock_http.request.return_value = mock_resp
res = _get_latest_version()
assert res is None
assert mock_logger.mock_calls == []
class TestColorOutput(object):
def test_colored(self):
assert color_output('foo', 'yellow') == termcolor.colored(
'foo', 'yellow')
def test_not_colored(self):
assert color_output(
'foo', 'yellow', colorize=False
) == 'foo'
class TestIssueStringTuple(object):
def test_crit_one(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 12
c1 = AwsLimitUsage(mock_limit, 56)
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[c1],
[]
)
assert res == ('svcname/limitname',
'(limit 12) xXCRITICAL: 56Xx')
assert m_co.mock_calls == [
call('CRITICAL: 56', 'red', colorize=True)
]
def test_crit_multi(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 5
c1 = AwsLimitUsage(mock_limit, 10)
c2 = AwsLimitUsage(mock_limit, 12, resource_id='c2id')
c3 = AwsLimitUsage(mock_limit, 8)
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[c1, c2, c3],
[]
)
assert res == ('svcname/limitname',
'(limit 5) xXCRITICAL: 8, 10, c2id=12Xx')
assert m_co.mock_calls == [
call('CRITICAL: 8, 10, c2id=12', 'red', colorize=True)
]
def test_warn_one(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 12
w1 = AwsLimitUsage(mock_limit, 11)
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[],
[w1]
)
assert res == ('svcname/limitname', '(limit 12) xXWARNING: 11Xx')
assert m_co.mock_calls == [
call('WARNING: 11', 'yellow', colorize=True)
]
def test_warn_multi(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 12
w1 = AwsLimitUsage(mock_limit, 11)
w2 = AwsLimitUsage(mock_limit, 10, resource_id='w2id')
w3 = AwsLimitUsage(mock_limit, 10, resource_id='w3id')
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[],
[w1, w2, w3]
)
assert res == ('svcname/limitname',
'(limit 12) xXWARNING: w2id=10, w3id=10, 11Xx')
assert m_co.mock_calls == [
call('WARNING: w2id=10, w3id=10, 11', 'yellow', colorize=True)
]
def test_both_one(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 12
c1 = AwsLimitUsage(mock_limit, 10)
w1 = AwsLimitUsage(mock_limit, 10, resource_id='w3id')
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[c1],
[w1],
colorize=False
)
assert res == ('svcname/limitname',
'(limit 12) xXCRITICAL: 10Xx xXWARNING: w3id=10Xx')
assert m_co.mock_calls == [
call('CRITICAL: 10', 'red', colorize=False),
call('WARNING: w3id=10', 'yellow', colorize=False)
]
def test_both_multi(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 12
c1 = AwsLimitUsage(mock_limit, 10)
c2 = AwsLimitUsage(mock_limit, 12, resource_id='c2id')
c3 = AwsLimitUsage(mock_limit, 8)
w1 = AwsLimitUsage(mock_limit, 11)
w2 = AwsLimitUsage(mock_limit, 10, resource_id='w2id')
w3 = AwsLimitUsage(mock_limit, 10, resource_id='w3id')
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[c1, c2, c3],
[w1, w2, w3]
)
assert res == ('svcname/limitname',
'(limit 12) xXCRITICAL: 8, 10, c2id=12Xx '
'xXWARNING: w2id=10, w3id=10, 11Xx')
assert m_co.mock_calls == [
call('CRITICAL: 8, 10, c2id=12', 'red', colorize=True),
call('WARNING: w2id=10, w3id=10, 11', 'yellow', colorize=True)
]
| agpl-3.0 | 8,122,354,500,113,535,000 | 30.225352 | 79 | 0.481481 | false |
onebit0fme/conveyance-tornado | examples.py | 1 | 4899 |
PAYLOAD_GET_EXAMPLE = {
"compose": {
"body": {
"type": "object",
"value": {
"response_code": {
"type": "integer",
"value": "@question.$resp.$$status_code",
},
"answer_ids": {
"type": "array",
"value": "@answers",
},
"public": {
"type": "boolean",
"value": "@question.$resp.is_public",
},
"id": {
"type": "integer",
"value": "@question.$resp.id"
}
}
}
},
"resources": {
"post": {
"url": {
"protocol": "http",
"hostname": "jsonplaceholder.typicode.com",
# "path": "/posts/{$post_id}" # TODO: handle references inside strings
"path": "/posts/1"
},
"method": "GET",
"headers": {
"Content-Type": "application/json"
},
},
"comments": {
"url": {
"hostname": "@post.url.hostname",
"protocol": "@post.url.protocol",
"path": "/comments"
},
"method": "GET",
"headers": {
"Content-Type": "application/json"
},
"parameters": {
"post_id": "$post_id"
}
}
},
"definitions": {
"post_id": {
# "type": 'integer',
"value": 1,
"schema": {
"type": "integer"
}
},
"text_obj": {
# "type": 'object',
"value": {
"id": 1,
"name": "Jeronimo"
}
},
"how_to": {
# "type": 'string',
"value": '$this.$do.$$that',
"verbatim": False,
"default": "This is default value"
},
"test": {
# "type": 'object',
"value": '$text_obj.name',
"schema": {
"type": "string"
}
},
"object": {
"value": {
"id": "$post_id",
"name": "$how_to"
},
"schema": {
"type": "object",
"properties": {
"id": {
"type": "integer"
},
"name": {
"type": "string"
}
}
}
}
}
}
PAYLOAD_GET_EXAMPLE_v2 = {
"compose": {
"body": {
"type": "object",
"value": {
"POST": "$post",
"COMMENTS": "$comments"
}
}
},
"resources": {
"post": {
"url": {
"protocol": "http",
"hostname": "jsonplaceholder.typicode.com",
"path": "/posts/1"
},
"method": "GET",
"headers": {
"Content-Type": "application/json"
},
},
"comments": {
"url": {
"hostname": "@post.url.hostname",
"protocol": "@post.url.protocol",
"path": "/posts/1/comments"
},
"method": "GET",
"headers": {
"Content-Type": "application/json"
},
"parameters": {
"post_id": "$post_id"
}
}
},
"definitions": {
"post": {
"value": "@post.$resp"
},
"comments": {
"value": "@comments.$resp"
}
}
}
PAYLOAD_GET_EXAMPLE_3 = {
"compose": {
"body": {
"type": "object",
"value": {
"POST": "@post.$resp",
"USER": "@user.$resp"
}
}
},
"resources": {
"post": {
"url": {
"protocol": "http",
"hostname": "jsonplaceholder.typicode.com",
"path": "/posts/{$post_id}"
},
"method": "GET",
"headers": {
"Content-Type": "application/json"
}
},
"user": {
"url": {
"hostname": "@post.url.hostname",
"protocol": "@post.url.protocol",
"path": "/users/{@post.$resp.userId}"
},
"method": "GET",
"headers": {
"Content-Type": "application/json"
},
"parameters": {
"post_id": "$post_id"
}
}
},
"definitions": {
"post_id": {
"value": 1
}
}
}
| gpl-2.0 | 4,043,723,553,069,304,000 | 24.38342 | 86 | 0.306185 | false |
brettc/bricolage | tests/test_core.py | 1 | 6385 | import bricolage.core as T
import cPickle as pickle
import pathlib
import numpy
def make_target1(a, b, c):
f1 = 0.5 if a and b or not c else 1.0
f2 = 1 if ((a or c) and not (a and b)) and b else 0
return f1, f2
def make_target2(a, b, c):
f1 = 0.25 if (a or b) and (not a and not c) else 1.0
f2 = 1 if ((a or b) and not (a and b)) and c else 0
return f1, f2
def bowtie_target(a, b, c):
if (a and not c) or (b and c):
return [1, 0.5, 0.25]
return [0, 0, 0]
def test_world():
cue = 3
reg = 4
out = 3
p = T.Parameters(cue_channels=cue, reg_channels=reg, out_channels=out)
w = T.World(p)
assert w.cue_channels == cue
assert w.reg_channels == reg
assert w.out_channels == out
assert w.channel_count == 2 + cue + reg + out
def test_target():
p = T.Parameters(cue_channels=3, reg_channels=3, out_channels=2)
w = T.World(p)
t = T.DefaultTarget(w, make_target1)
assert t.as_array().shape == (pow(2, 3), 2)
# Default
assert t.weighting == [0.5, 0.5]
t.weighting = [1, 4]
assert t.weighting == [0.2, 0.8]
def test_pickling_world(tmpdir):
tmpdir = pathlib.Path(str(tmpdir))
p = T.Parameters(seed=99, cue_channels=3, reg_channels=3, out_channels=2)
w = T.World(p)
with open(str(tmpdir / "world1.pickle"), "wb") as f:
pickle.dump(w, f, -1)
with open(str(tmpdir / "world1.pickle"), "rb") as f:
w2 = pickle.load(f)
assert dir(w2.params) == dir(w.params)
assert w.cue_channels == w2.cue_channels
assert w.reg_channels == w2.reg_channels
assert w.out_channels == w2.out_channels
assert w.get_random_state() == w2.get_random_state()
assert w.next_network_id == w2.next_network_id
assert w.next_target_id == w2.next_target_id
def test_pickling_default_target(tmpdir):
tmpdir = pathlib.Path(str(tmpdir))
p = T.Parameters(cue_channels=3, reg_channels=3, out_channels=2)
w = T.World(p)
# Now ensure that pickling Targets works too
t1 = T.DefaultTarget(w, make_target1, name="a")
assert t1.scoring_method == T.ScoringMethod.LINEAR
assert t1.strength == 0.0
t2 = T.DefaultTarget(
w,
make_target2,
name="b",
scoring_method=T.ScoringMethod.EXPONENTIAL,
strength=4.0,
)
t2.weighting = [1, 2]
with open(str(tmpdir / "target1.pickle"), "wb") as f:
pickle.dump((t1, t2), f, -1)
with open(str(tmpdir / "target1.pickle"), "rb") as f:
rt1, rt2 = pickle.load(f)
assert (t1.as_array() == rt1.as_array()).all()
assert (t2.as_array() == rt2.as_array()).all()
assert t1.name == rt1.name
assert t2.name == rt2.name
assert t1.identifier == rt1.identifier
assert t2.identifier == rt2.identifier
assert t1.weighting == rt1.weighting
assert t2.weighting == rt2.weighting
assert t1.scoring_method == rt1.scoring_method
assert t2.scoring_method == rt2.scoring_method
assert t1.strength == rt1.strength
assert t2.strength == rt2.strength
def test_pickling_noisy_target(tmpdir):
tmpdir = pathlib.Path(str(tmpdir))
p = T.Parameters(cue_channels=3, reg_channels=3, out_channels=2)
w = T.World(p)
# Now ensure that pickling Targets works too
t1 = T.NoisyTarget(w, make_target1, name="a")
assert t1.scoring_method == T.ScoringMethod.LINEAR
assert t1.strength == 0.0
assert t1.perturb_count == 1
assert t1.perturb_prop == 1.0
assert t1.env_only == True
t2 = T.NoisyTarget(
w, make_target2, name="b", perturb_count=3, perturb_prop=0.5, env_only=False
)
assert t2.perturb_count == 3
assert t2.perturb_prop == 0.5
assert t2.env_only == False
with open(str(tmpdir / "target1.pickle"), "wb") as f:
pickle.dump((t1, t2), f, -1)
with open(str(tmpdir / "target1.pickle"), "rb") as f:
rt1, rt2 = pickle.load(f)
assert (t1.as_array() == rt1.as_array()).all()
assert (t2.as_array() == rt2.as_array()).all()
assert t1.env_only == rt1.env_only
assert t2.env_only == rt2.env_only
assert t1.perturb_count == rt1.perturb_count
assert t2.perturb_count == rt2.perturb_count
assert t1.perturb_prop == rt1.perturb_prop
assert t2.perturb_prop == rt2.perturb_prop
def test_scoring_methods(bowtie_database):
pop = bowtie_database.population
# Use different identifiers to force recalculation
targ1 = T.DefaultTarget(pop.factory.world, bowtie_target, ident=2)
targ2 = T.DefaultTarget(
pop.factory.world,
bowtie_target,
ident=3,
scoring_method=T.ScoringMethod.EXPONENTIAL,
strength=1,
)
targ3 = T.DefaultTarget(
pop.factory.world,
bowtie_target,
ident=4,
scoring_method=T.ScoringMethod.EXPONENTIAL_VEC,
strength=1,
)
f1 = targ1.assess_collection(pop)
f2 = targ2.assess_collection(pop)
f3 = targ3.assess_collection(pop)
ones1 = numpy.where(f1 == 1.0)[0]
ones2 = numpy.where(f2 == 1.0)[0]
ones3 = numpy.where(f3 == 1.0)[0]
assert (ones1 == ones2).all()
assert (ones1 == ones3).all()
def test_channelstate():
p = T.Parameters(cue_channels=3, reg_channels=4, out_channels=3)
w = T.World(p)
e2 = w.environments[-1]
e2_again = w.environments[-1]
# We should get the same channels states out.
assert e2 == e2_again
# assert e2 is e2_again
# When we copy, they should be the same, but not identical.
copy_e2 = e2.copy()
assert e2 == copy_e2
assert e2 is not copy_e2
# Modify the state -- testing still work
copy_e2.flip(0)
assert e2 != copy_e2
copy_e2.flip(0)
assert e2 == copy_e2
def test_random_engine():
p = T.Parameters(cue_channels=3, reg_channels=4, out_channels=3)
w = T.World(p)
w.seed_random_engine(1)
first_time = [w.get_random_double(0, 1) for _ in range(20)]
first_time += [w.get_random_int(0, 100) for _ in range(20)]
w.seed_random_engine(1)
second_time = [w.get_random_double(0, 1) for _ in range(20)]
second_time += [w.get_random_int(0, 100) for _ in range(20)]
assert first_time == second_time
# Now try with state setting
ss = w.get_random_state()
a = [w.get_random_double(0, 1) for _ in range(100)]
w.set_random_state(ss)
b = [w.get_random_double(0, 1) for _ in range(100)]
assert a == b
| gpl-3.0 | 8,647,691,530,775,057,000 | 27.504464 | 84 | 0.617384 | false |
DOAJ/doaj | portality/forms/application_processors.py | 1 | 46290 | import uuid
from datetime import datetime
import portality.notifications.application_emails as emails
from portality.core import app
from portality import models, constants, app_email
from portality.lib.formulaic import FormProcessor
from portality.ui.messages import Messages
from portality.crosswalks.application_form import ApplicationFormXWalk
from portality.crosswalks.journal_form import JournalFormXWalk
from portality.formcontext.choices import Choices
from portality.bll import exceptions
from flask import url_for, request, has_request_context
from flask_login import current_user
from wtforms import FormField, FieldList
class ApplicationProcessor(FormProcessor):
def pre_validate(self):
# to bypass WTForms insistence that choices on a select field match the value, outside of the actual validation
# chain
super(ApplicationProcessor, self).pre_validate()
def _carry_fixed_aspects(self):
if self.source is None:
raise Exception("Cannot carry data from a non-existent source")
now = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
# copy over any important fields from the previous version of the object
created_date = self.source.created_date if self.source.created_date else now
self.target.set_created(created_date)
if "id" in self.source.data:
self.target.data['id'] = self.source.data['id']
try:
if self.source.date_applied is not None:
self.target.date_applied = self.source.date_applied
except AttributeError:
# fixme: should there always be a date_applied? Only true for applications
pass
try:
if self.source.current_application:
self.target.set_current_application(self.source.current_application)
except AttributeError:
# this means that the source doesn't know about current_applications, which is fine
pass
try:
if self.source.current_journal:
self.target.set_current_journal(self.source.current_journal)
except AttributeError:
# this means that the source doesn't know about current_journals, which is fine
pass
try:
if self.source.related_journal:
self.target.set_related_journal(self.source.related_journal)
except AttributeError:
# this means that the source doesn't know about related_journals, which is fine
pass
try:
if self.source.related_applications:
related = self.source.related_applications
for rel in related:
self.target.add_related_application(rel.get("application_id"), rel.get("date_accepted"))
except AttributeError:
# this means that the source doesn't know about related_applications, which is fine
pass
# if the source is a journal, we need to carry the in_doaj flag
if isinstance(self.source, models.Journal):
self.target.set_in_doaj(self.source.is_in_doaj())
def _merge_notes_forward(self, allow_delete=False):
if self.source is None:
raise Exception("Cannot carry data from a non-existent source")
if self.target is None:
raise Exception("Cannot carry data on to a non-existent target - run the xwalk first")
# first off, get the notes (by reference) in the target and the notes from the source
tnotes = self.target.notes
snotes = self.source.notes
# if there are no notes, we might not have the notes by reference, so later will
# need to set them by value
apply_notes_by_value = len(tnotes) == 0
# for each of the target notes we need to get the original dates from the source notes
for n in tnotes:
for sn in snotes:
if n.get("id") == sn.get("id"):
n["date"] = sn.get("date")
# record the positions of any blank notes
i = 0
removes = []
for n in tnotes:
if n.get("note").strip() == "":
removes.append(i)
i += 1
# actually remove all the notes marked for deletion
removes.sort(reverse=True)
for r in removes:
tnotes.pop(r)
# finally, carry forward any notes that aren't already in the target
if not allow_delete:
for sn in snotes:
found = False
for tn in tnotes:
if sn.get("id") == tn.get("id"):
found = True
if not found:
tnotes.append(sn)
if apply_notes_by_value:
self.target.set_notes(tnotes)
def _carry_continuations(self):
if self.source is None:
raise Exception("Cannot carry data from a non-existent source")
try:
sbj = self.source.bibjson()
tbj = self.target.bibjson()
if sbj.replaces:
tbj.replaces = sbj.replaces
if sbj.is_replaced_by:
tbj.is_replaced_by = sbj.is_replaced_by
if sbj.discontinued_date:
tbj.discontinued_date = sbj.discontinued_date
except AttributeError:
# this means that the source doesn't know about current_applications, which is fine
pass
class NewApplication(ApplicationProcessor):
"""
Public Application Form Context. This is also a sort of demonstrator as to how to implement
one, so it will do unnecessary things like override methods that don't actually need to be overridden.
This should be used in a context where an unauthenticated user is making a request to put a journal into the
DOAJ. It does not have any edit capacity (i.e. the form can only be submitted once), and it does not provide
any form fields other than the essential journal bibliographic, application bibliographc and contact information
for the suggester. On submission, it will set the status to "pending" and the item will be available for review
by the editors
"""
############################################################
# PublicApplicationForm versions of FormProcessor lifecycle functions
############################################################
def draft(self, account, id=None, *args, **kwargs):
# check for validity
valid = self.validate()
# FIXME: if you can only save a valid draft, you cannot save a draft
# the draft to be saved needs to be valid
#if not valid:
# return None
def _resetDefaults(form):
for field in form:
if field.errors:
if isinstance(field, FormField):
_resetDefaults(field.form)
elif isinstance(field, FieldList):
for sub in field:
if isinstance(sub, FormField):
_resetDefaults(sub)
else:
sub.data = sub.default
else:
field.data = field.default
# if not valid, then remove all fields which have validation errors
if not valid:
_resetDefaults(self.form)
self.form2target()
draft_application = models.DraftApplication(**self.target.data)
if id is not None:
draft_application.set_id(id)
draft_application.set_application_status("draft")
draft_application.set_owner(account.id)
draft_application.save()
return draft_application
def finalise(self, account, save_target=True, email_alert=True, id=None):
super(NewApplication, self).finalise()
# set some administrative data
now = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
self.target.date_applied = now
self.target.set_application_status(constants.APPLICATION_STATUS_PENDING)
self.target.set_owner(account.id)
self.target.set_last_manual_update()
if id:
replacing = models.Application.pull(id)
if replacing is None:
self.target.set_id(id)
else:
if replacing.application_status == constants.APPLICATION_STATUS_PENDING and replacing.owner == account.id:
self.target.set_id(id)
self.target.set_created(replacing.created_date)
# Finally save the target
if save_target:
self.target.save()
# a draft may have been saved, so also remove that
if id:
models.DraftApplication.remove_by_id(id)
if email_alert:
try:
emails.send_received_email(self.target)
except app_email.EmailException as e:
self.add_alert(Messages.FORMS__APPLICATION_PROCESSORS__NEW_APPLICATION__FINALISE__USER_EMAIL_ERROR)
app.logger.exception(Messages.FORMS__APPLICATION_PROCESSORS__NEW_APPLICATION__FINALISE__LOG_EMAIL_ERROR)
class AdminApplication(ApplicationProcessor):
"""
Managing Editor's Application Review form. Should be used in a context where the form warrants full
admin priviledges. It will permit conversion of applications to journals, and assignment of owner account
as well as assignment to editorial group.
"""
def pre_validate(self):
# to bypass WTForms insistence that choices on a select field match the value, outside of the actual validation
# chain
super(AdminApplication, self).pre_validate()
self.form.editor.choices = [(self.form.editor.data, self.form.editor.data)]
# TODO: Should quick_reject be set through this form at all?
self.form.quick_reject.choices = [(self.form.quick_reject.data, self.form.quick_reject.data)]
def patch_target(self):
super(AdminApplication, self).patch_target()
# This patches the target with things that shouldn't change from the source
self._carry_fixed_aspects()
self._merge_notes_forward(allow_delete=True)
# NOTE: this means you can't unset an owner once it has been set. But you can change it.
if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None):
self.target.set_owner(self.source.owner)
def finalise(self, account, save_target=True, email_alert=True):
"""
account is the administrator account carrying out the action
"""
if self.source is None:
raise Exception("You cannot edit a not-existent application")
if self.source.application_status == constants.APPLICATION_STATUS_ACCEPTED:
raise Exception("You cannot edit applications which have been accepted into DOAJ.")
# if we are allowed to finalise, kick this up to the superclass
super(AdminApplication, self).finalise()
# TODO: should these be a BLL feature?
# If we have changed the editors assigned to this application, let them know.
is_editor_group_changed = ApplicationFormXWalk.is_new_editor_group(self.form, self.source)
is_associate_editor_changed = ApplicationFormXWalk.is_new_editor(self.form, self.source)
# record the event in the provenance tracker
models.Provenance.make(account, "edit", self.target)
# delayed import of the DOAJ BLL
from portality.bll.doaj import DOAJ
applicationService = DOAJ.applicationService()
# if the application is already rejected, and we are moving it back into a non-rejected status
if self.source.application_status == constants.APPLICATION_STATUS_REJECTED and self.target.application_status != constants.APPLICATION_STATUS_REJECTED:
try:
applicationService.unreject_application(self.target, current_user._get_current_object(), disallow_status=[])
except exceptions.DuplicateUpdateRequest as e:
self.add_alert(Messages.FORMS__APPLICATION_PROCESSORS__ADMIN_APPLICATION__FINALISE__COULD_NOT_UNREJECT)
return
# if this application is being accepted, then do the conversion to a journal
if self.target.application_status == constants.APPLICATION_STATUS_ACCEPTED:
j = applicationService.accept_application(self.target, account)
# record the url the journal is available at in the admin are and alert the user
if has_request_context(): # fixme: if we handle alerts via a notification service we won't have to toggle on request context
jurl = url_for("doaj.toc", identifier=j.toc_id)
if self.source.current_journal is not None: # todo: are alerts displayed?
self.add_alert('<a href="{url}" target="_blank">Existing journal updated</a>.'.format(url=jurl))
else:
self.add_alert('<a href="{url}" target="_blank">New journal created</a>.'.format(url=jurl))
# Add the journal to the account and send the notification email
try:
owner = models.Account.pull(j.owner)
self.add_alert('Associating the journal with account {username}.'.format(username=owner.id))
owner.add_journal(j.id)
if not owner.has_role('publisher'):
owner.add_role('publisher')
owner.save()
# for all acceptances, send an email to the owner of the journal
if email_alert:
self._send_application_approved_email(j.bibjson().title, owner.name, owner.email, self.source.current_journal is not None)
except AttributeError:
raise Exception("Account {owner} does not exist".format(owner=j.owner))
except app_email.EmailException:
self.add_alert("Problem sending email to suggester - probably address is invalid")
app.logger.exception("Acceptance email to owner failed.")
# if the application was instead rejected, carry out the rejection actions
elif self.source.application_status != constants.APPLICATION_STATUS_REJECTED and self.target.application_status == constants.APPLICATION_STATUS_REJECTED:
# remember whether this was an update request or not
is_update_request = self.target.current_journal is not None
# reject the application
applicationService.reject_application(self.target, current_user._get_current_object())
# if this was an update request, send an email to the owner
if is_update_request and email_alert:
sent = False
send_report = []
try:
send_report = emails.send_publisher_reject_email(self.target, update_request=is_update_request)
sent = True
except app_email.EmailException as e:
pass
if sent:
self.add_alert(Messages.SENT_REJECTED_UPDATE_REQUEST_EMAIL.format(user=self.target.owner, email=send_report[0].get("email"), name=send_report[0].get("name")))
else:
self.add_alert(Messages.NOT_SENT_REJECTED_UPDATE_REQUEST_EMAIL.format(user=self.target.owner))
# the application was neither accepted or rejected, so just save it
else:
self.target.set_last_manual_update()
self.target.save()
if email_alert:
# if revisions were requested, email the publisher
if self.source.application_status != constants.APPLICATION_STATUS_REVISIONS_REQUIRED and self.target.application_status == constants.APPLICATION_STATUS_REVISIONS_REQUIRED:
try:
emails.send_publisher_update_request_revisions_required(self.target)
self.add_alert(Messages.SENT_REJECTED_UPDATE_REQUEST_REVISIONS_REQUIRED_EMAIL.format(user=self.target.owner))
except app_email.EmailException as e:
self.add_alert(Messages.NOT_SENT_REJECTED_UPDATE_REQUEST_REVISIONS_REQUIRED_EMAIL.format(user=self.target.owner))
# if we need to email the editor and/or the associate, handle those here
if is_editor_group_changed:
try:
emails.send_editor_group_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to editor - probably address is invalid")
app.logger.exception("Email to associate failed.")
if is_associate_editor_changed:
try:
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception("Email to associate failed.")
# If this is the first time this application has been assigned to an editor, notify the publisher.
old_ed = self.source.editor
if (old_ed is None or old_ed == '') and self.target.editor is not None:
is_update_request = self.target.current_journal is not None
if is_update_request:
alerts = emails.send_publisher_update_request_editor_assigned_email(self.target)
else:
alerts = emails.send_publisher_application_editor_assigned_email(self.target)
for alert in alerts:
self.add_alert(alert)
# Inform editor and associate editor if this application was 'ready' or 'completed', but has been changed to 'in progress'
if (self.source.application_status == constants.APPLICATION_STATUS_READY or self.source.application_status == constants.APPLICATION_STATUS_COMPLETED) and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS:
# First, the editor
try:
emails.send_editor_inprogress_email(self.target)
self.add_alert('An email has been sent to notify the editor of the change in status.')
except AttributeError:
magic = str(uuid.uuid1())
self.add_alert('Couldn\'t find a recipient for this email - check editor groups are correct. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('No editor recipient for failed review email - ' + magic)
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Sending the failed review email to editor didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending review failed email to editor - ' + magic)
# Then the associate
try:
emails.send_assoc_editor_inprogress_email(self.target)
self.add_alert('An email has been sent to notify the assigned associate editor of the change in status.')
except AttributeError:
magic = str(uuid.uuid1())
self.add_alert('Couldn\'t find a recipient for this email - check an associate editor is assigned. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('No associate editor recipient for failed review email - ' + magic)
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Sending the failed review email to associate editor didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending review failed email to associate editor - ' + magic)
# email other managing editors if this was newly set to 'ready'
if self.source.application_status != constants.APPLICATION_STATUS_READY and self.target.application_status == constants.APPLICATION_STATUS_READY:
# this template requires who made the change, say it was an Admin
ed_id = 'an administrator'
try:
emails.send_admin_ready_email(self.target, editor_id=ed_id)
self.add_alert('A confirmation email has been sent to the Managing Editors.')
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Sending the ready status to managing editors didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending ready status email to managing editors - ' + magic)
def _send_application_approved_email(self, journal_title, publisher_name, email, update_request=False):
"""Email the publisher when an application is accepted (it's here because it's too troublesome to factor out)"""
url_root = request.url_root
if url_root.endswith("/"):
url_root = url_root[:-1]
to = [email]
fro = app.config.get('SYSTEM_EMAIL_FROM', '[email protected]')
if update_request:
subject = app.config.get("SERVICE_NAME", "") + " - update request accepted"
else:
subject = app.config.get("SERVICE_NAME", "") + " - journal accepted"
publisher_name = publisher_name if publisher_name is not None else "Journal Owner"
try:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
msg = Messages.SENT_ACCEPTED_APPLICATION_EMAIL.format(email=email)
template = "email/publisher_application_accepted.txt"
if update_request:
msg = Messages.SENT_ACCEPTED_UPDATE_REQUEST_EMAIL.format(email=email)
template = "email/publisher_update_request_accepted.txt"
jn = journal_title
app_email.send_mail(to=to,
fro=fro,
subject=subject,
template_name=template,
journal_title=jn,
publisher_name=publisher_name,
url_root=url_root
)
self.add_alert(msg)
else:
msg = Messages.NOT_SENT_ACCEPTED_APPLICATION_EMAIL.format(email=email)
if update_request:
msg = Messages.NOT_SENT_ACCEPTED_UPDATE_REQUEST_EMAIL.format(email=email)
self.add_alert(msg)
except Exception as e:
magic = str(uuid.uuid1())
self.add_alert('Sending the journal acceptance information email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending application approved email failed - ' + magic)
def validate(self):
_statuses_not_requiring_validation = ['rejected', 'pending', 'in progress', 'on hold']
# make use of the ability to disable validation, otherwise, let it run
if self.form is not None:
if self.form.application_status.data in _statuses_not_requiring_validation:
self.pre_validate()
return True
return super(AdminApplication, self).validate()
class EditorApplication(ApplicationProcessor):
"""
Editors Application Review form. This should be used in a context where an editor who owns an editorial group
is accessing an application. This prevents re-assignment of Editorial group, but permits assignment of associate
editor. It also permits change in application state, except to "accepted"; therefore this form context cannot
be used to create journals from applications. Deleting notes is not allowed, but adding is.
"""
def pre_validate(self):
# Call to super sets all the basic disabled fields
super(EditorApplication, self).pre_validate()
# although the editor_group field is handled by the general pre-validator, we still need to set the choices
# self.form.editor_group.data = self.source.editor_group
self.form.editor.choices = [(self.form.editor.data, self.form.editor.data)]
# This is no longer necessary, is handled by the main pre_validate function
#if self._formulaic.get('application_status').is_disabled:
# self.form.application_status.data = self.source.application_status
# but we do still need to add the overwritten status to the choices for validation
if self.form.application_status.data not in [c[0] for c in self.form.application_status.choices]:
self.form.application_status.choices.append((self.form.application_status.data, self.form.application_status.data))
def patch_target(self):
super(EditorApplication, self).patch_target()
self._carry_fixed_aspects()
self._merge_notes_forward()
self._carry_continuations()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
def finalise(self):
if self.source is None:
raise Exception("You cannot edit a not-existent application")
if self.source.application_status == constants.APPLICATION_STATUS_ACCEPTED:
raise Exception("You cannot edit applications which have been accepted into DOAJ.")
# if we are allowed to finalise, kick this up to the superclass
super(EditorApplication, self).finalise()
# Check the status change is valid
# TODO: we want to rid ourselves of the Choices module
Choices.validate_status_change('editor', self.source.application_status, self.target.application_status)
# FIXME: may want to factor this out of the suggestionformxwalk
new_associate_assigned = ApplicationFormXWalk.is_new_editor(self.form, self.source)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# record the event in the provenance tracker
models.Provenance.make(current_user, "edit", self.target)
# if we need to email the associate because they have just been assigned, handle that here.
if new_associate_assigned:
try:
self.add_alert("New editor assigned - email with confirmation has been sent")
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception('Error sending associate assigned email')
# If this is the first time this application has been assigned to an editor, notify the publisher.
old_ed = self.source.editor
if (old_ed is None or old_ed == '') and self.target.editor is not None:
is_update_request = self.target.current_journal is not None
if is_update_request:
alerts = emails.send_publisher_update_request_editor_assigned_email(self.target)
else:
alerts = emails.send_publisher_application_editor_assigned_email(self.target)
for alert in alerts:
self.add_alert(alert)
# Email the assigned associate if the application was reverted from 'completed' to 'in progress' (failed review)
if self.source.application_status == constants.APPLICATION_STATUS_COMPLETED and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS:
try:
emails.send_assoc_editor_inprogress_email(self.target)
self.add_alert(
'An email has been sent to notify the assigned associate editor of the change in status.')
except AttributeError as e:
magic = str(uuid.uuid1())
self.add_alert(
'Couldn\'t find a recipient for this email - check an associate editor is assigned. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('No associate editor recipient for failed review email - ' + magic)
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert(
'Sending the failed review email to associate editor didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending failed review email to associate editor - ' + magic)
# email managing editors if the application was newly set to 'ready'
if self.source.application_status != constants.APPLICATION_STATUS_READY and self.target.application_status == constants.APPLICATION_STATUS_READY:
# Tell the ManEds who has made the status change - the editor in charge of the group
editor_group_name = self.target.editor_group
editor_group_id = models.EditorGroup.group_exists_by_name(name=editor_group_name)
editor_group = models.EditorGroup.pull(editor_group_id)
editor_acc = editor_group.get_editor_account()
# record the event in the provenance tracker
models.Provenance.make(current_user, "status:ready", self.target)
editor_id = editor_acc.id
try:
emails.send_admin_ready_email(self.target, editor_id=editor_id)
self.add_alert('A confirmation email has been sent to the Managing Editors.')
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert(
'Sending the ready status to managing editors didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending ready status email to managing editors - ' + magic)
class AssociateApplication(ApplicationProcessor):
"""
Associate Editors Application Review form. This is to be used in a context where an associate editor (fewest rights)
needs to access an application for review. This editor cannot change the editorial group or the assigned editor.
They also cannot change the owner of the application. They cannot set an application to "Accepted" so this form can't
be used to create a journal from an application. They cannot delete, only add notes.
"""
def pre_validate(self):
# Call to super sets all the basic disabled fields
super(AssociateApplication, self).pre_validate()
# no longer necessary, handled by superclass pre_validate
#if self._formulaic.get('application_status').is_disabled:
# self.form.application_status.data = self.source.application_status
# but we do still need to add the overwritten status to the choices for validation
if self.form.application_status.data not in [c[0] for c in self.form.application_status.choices]:
self.form.application_status.choices.append(
(self.form.application_status.data, self.form.application_status.data))
def patch_target(self):
if self.source is None:
raise Exception("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self._merge_notes_forward()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self.target.set_editor(self.source.editor)
self.target.set_seal(self.source.has_seal())
self._carry_continuations()
def finalise(self):
# if we are allowed to finalise, kick this up to the superclass
super(AssociateApplication, self).finalise()
# Check the status change is valid
Choices.validate_status_change('associate', self.source.application_status, self.target.application_status)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# record the event in the provenance tracker
models.Provenance.make(current_user, "edit", self.target)
# inform publisher if this was set to 'in progress' from 'pending'
if self.source.application_status == constants.APPLICATION_STATUS_PENDING and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
is_update_request = self.target.current_journal is not None
if is_update_request:
alerts = emails.send_publisher_update_request_inprogress_email(self.target)
else:
alerts = emails.send_publisher_application_inprogress_email(self.target)
for alert in alerts:
self.add_alert(alert)
else:
self.add_alert(Messages.IN_PROGRESS_NOT_SENT_EMAIL_DISABLED)
# inform editor if this was newly set to 'completed'
if self.source.application_status != constants.APPLICATION_STATUS_COMPLETED and self.target.application_status == constants.APPLICATION_STATUS_COMPLETED:
# record the event in the provenance tracker
models.Provenance.make(current_user, "status:completed", self.target)
try:
emails.send_editor_completed_email(self.target)
self.add_alert('A confirmation email has been sent to notify the editor of the change in status.')
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert(
'Sending the ready status to editor email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending completed status email to editor - ' + magic)
class PublisherUpdateRequest(ApplicationProcessor):
def pre_validate(self):
if self.source is None:
raise Exception("You cannot validate a form from a non-existent source")
super(ApplicationProcessor, self).pre_validate()
# no longer required, handled by call to superclass pre_validate
# carry forward the disabled fields
#bj = self.source.bibjson()
#self.form.title.data = bj.title
#self.form.alternative_title.data = bj.alternative_title
#pissn = bj.pissn
#if pissn == "": pissn = None
#self.form.pissn.data = pissn
#eissn = bj.eissn
#if eissn == "": eissn = None
#self.form.eissn.data = eissn
def patch_target(self):
if self.source is None:
raise Exception("You cannot patch a target from a non-existent source")
self._carry_subjects_and_seal()
self._carry_fixed_aspects()
self._merge_notes_forward()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self.target.set_editor(self.source.editor)
self._carry_continuations()
# we carry this over for completeness, although it will be overwritten in the finalise() method
self.target.set_application_status(self.source.application_status)
def finalise(self, save_target=True, email_alert=True):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise Exception("You cannot edit a not-existent application")
# if we are allowed to finalise, kick this up to the superclass
super(PublisherUpdateRequest, self).finalise()
# set the status to update_request (if not already)
self.target.set_application_status(constants.APPLICATION_STATUS_UPDATE_REQUEST)
# Save the target
self.target.set_last_manual_update()
if save_target:
saved = self.target.save()
if saved is None:
raise Exception("Save on application failed")
# obtain the related journal, and attach the current application id to it
journal_id = self.target.current_journal
from portality.bll.doaj import DOAJ
journalService = DOAJ.journalService()
if journal_id is not None:
journal, _ = journalService.journal(journal_id)
if journal is not None:
journal.set_current_application(self.target.id)
if save_target:
saved = journal.save()
if saved is None:
raise Exception("Save on journal failed")
else:
self.target.remove_current_journal()
# email the publisher to tell them we received their update request
if email_alert:
try:
self._send_received_email()
except app_email.EmailException as e:
self.add_alert("We were unable to send you an email confirmation - possible problem with your email address")
app.logger.exception('Error sending reapplication received email to publisher')
def _carry_subjects_and_seal(self):
# carry over the subjects
source_subjects = self.source.bibjson().subject
self.target.bibjson().subject = source_subjects
# carry over the seal
self.target.set_seal(self.source.has_seal())
def _send_received_email(self):
acc = models.Account.pull(self.target.owner)
if acc is None:
self.add_alert("Unable to locate account for specified owner")
return
journal_name = self.target.bibjson().title #.encode('utf-8', 'replace')
to = [acc.email]
fro = app.config.get('SYSTEM_EMAIL_FROM', '[email protected]')
subject = app.config.get("SERVICE_NAME","") + " - update request received"
try:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
app_email.send_mail(to=to,
fro=fro,
subject=subject,
template_name="email/publisher_update_request_received.txt",
journal_name=journal_name,
username=self.target.owner
)
self.add_alert('A confirmation email has been sent to ' + acc.email + '.')
except app_email.EmailException as e:
magic = str(uuid.uuid1())
self.add_alert('Hm, sending the "update request received" email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.error(magic + "\n" + repr(e))
raise e
class PublisherUpdateRequestReadOnly(ApplicationProcessor):
"""
Read Only Application form for publishers. Nothing can be changed. Useful to show publishers what they
currently have submitted for review
"""
def finalise(self):
raise Exception("You cannot edit applications using the read-only form")
###############################################
### Journal form processors
###############################################
class ManEdJournalReview(ApplicationProcessor):
"""
Managing Editor's Journal Review form. Should be used in a context where the form warrants full
admin privileges. It will permit doing every action.
"""
def patch_target(self):
if self.source is None:
raise Exception("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self._merge_notes_forward(allow_delete=True)
# NOTE: this means you can't unset an owner once it has been set. But you can change it.
if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None):
self.target.set_owner(self.source.owner)
def finalise(self):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise Exception("You cannot edit a not-existent journal")
# if we are allowed to finalise, kick this up to the superclass
super(ManEdJournalReview, self).finalise()
# FIXME: may want to factor this out of the suggestionformxwalk
# If we have changed the editors assinged to this application, let them know.
is_editor_group_changed = JournalFormXWalk.is_new_editor_group(self.form, self.source)
is_associate_editor_changed = JournalFormXWalk.is_new_editor(self.form, self.source)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# if we need to email the editor and/or the associate, handle those here
if is_editor_group_changed:
try:
emails.send_editor_group_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to editor - probably address is invalid")
app.logger.exception('Error sending assignment email to editor.')
if is_associate_editor_changed:
try:
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception('Error sending assignment email to associate.')
def validate(self):
# make use of the ability to disable validation, otherwise, let it run
if self.form is not None:
if self.form.make_all_fields_optional.data:
self.pre_validate()
return True
return super(ManEdJournalReview, self).validate()
class EditorJournalReview(ApplicationProcessor):
"""
Editors Journal Review form. This should be used in a context where an editor who owns an editorial group
is accessing a journal. This prevents re-assignment of Editorial group, but permits assignment of associate
editor.
"""
def patch_target(self):
if self.source is None:
raise Exception("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self._merge_notes_forward()
self._carry_continuations()
def pre_validate(self):
# call to super handles all the basic disabled field
super(EditorJournalReview, self).pre_validate()
# although the superclass sets the value of the disabled field, we still need to set the choices
# self.form.editor_group.data = self.source.editor_group
self.form.editor.choices = [(self.form.editor.data, self.form.editor.data)]
def finalise(self):
if self.source is None:
raise Exception("You cannot edit a not-existent journal")
# if we are allowed to finalise, kick this up to the superclass
super(EditorJournalReview, self).finalise()
email_associate = ApplicationFormXWalk.is_new_editor(self.form, self.source)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# if we need to email the associate, handle that here.
if email_associate:
try:
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception('Error sending assignment email to associate.')
class AssEdJournalReview(ApplicationProcessor):
"""
Associate Editors Journal Review form. This is to be used in a context where an associate editor (fewest rights)
needs to access a journal for review. This editor cannot change the editorial group or the assigned editor.
They also cannot change the owner of the journal. They cannot delete, only add notes.
"""
def patch_target(self):
if self.source is None:
raise Exception("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self._merge_notes_forward()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self.target.set_editor(self.source.editor)
self._carry_continuations()
def finalise(self):
if self.source is None:
raise Exception("You cannot edit a not-existent journal")
# if we are allowed to finalise, kick this up to the superclass
super(AssEdJournalReview, self).finalise()
# Save the target
self.target.set_last_manual_update()
self.target.save()
class ReadOnlyJournal(ApplicationProcessor):
"""
Read Only Journal form. Nothing can be changed. Useful for reviewing a journal and an application
(or update request) side by side in 2 browser windows or tabs.
"""
def form2target(self):
pass # you can't edit objects using this form
def patch_target(self):
pass # you can't edit objects using this form
def finalise(self):
raise Exception("You cannot edit journals using the read-only form")
class ManEdBulkEdit(ApplicationProcessor):
"""
Managing Editor's Journal Review form. Should be used in a context where the form warrants full
admin privileges. It will permit doing every action.
"""
pass
| apache-2.0 | 428,683,396,306,092,600 | 47.62395 | 241 | 0.631475 | false |
jenmud/behave-graph | behave_graph/__init__.py | 1 | 2422 | """
Setup the environment by parsing the command line options and staring
a ruruki http server.
"""
import argparse
import logging
import os
from behave.configuration import Configuration
from behave.runner import Runner, parse_features
from ruruki_eye.server import run
from behave_graph.scrape import GRAPH
from behave_graph.scrape import scrape_features
__all__ = ["load"]
def load(path):
"""
Load the given path that contains the features and steps.
:param path: Path where the feature and steps files can be found.
:type path: :class:`str`
:returns: A behave runner.
:rtype: :class:`behave.runner.Runner`
"""
try:
config = Configuration(path)
runner = Runner(config)
features = parse_features(
[f.filename for f in runner.feature_locations()]
)
scrape_features(features)
return runner
except Exception as error: # pylint: disable=broad-except
logging.exception(
"Unexpected error creating configuration %r: %r",
path, error
)
raise argparse.ArgumentTypeError(error)
def parse_arguments():
"""
Parse the command line arguments.
:returns: All the command line arguments.
:rtype: :class:`argparse.Namespace`
"""
parser = argparse.ArgumentParser(
description="Behave dependency grapher."
)
parser.add_argument(
"-b",
"--base-dir",
default=os.getcwd(),
type=load,
help=(
"Behave base directory path "
"where features and steps can be found. "
"(default: %(default)s)"
),
)
parser.add_argument(
"--runserver",
action="store_true",
help="Start a ruruki http server.",
)
parser.add_argument(
"--address",
default="0.0.0.0",
help="Address to start the web server on. (default: %(default)s)",
)
parser.add_argument(
"--port",
type=int,
default=8000,
help=(
"Port number that the web server will accept connections on. "
"(default: %(default)d)"
),
)
return parser.parse_args()
def main():
"""
Entry point.
"""
logging.basicConfig(level=logging.INFO)
namespace = parse_arguments()
if namespace.runserver is True:
run(namespace.address, namespace.port, False, GRAPH)
| mit | 1,787,216,094,776,277,800 | 23.22 | 74 | 0.603633 | false |
brainix/social-butterfly | channels.py | 1 | 6608 | #-----------------------------------------------------------------------------#
# channels.py #
# #
# Copyright (c) 2010-2012, Code A La Mode, original authors. #
# #
# This file is part of Social Butterfly. #
# #
# Social Butterfly is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License as #
# published by the Free Software Foundation, either version 3 of the #
# License, or (at your option) any later version. #
# #
# Social Butterfly is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with Social Butterfly. If not, see: #
# <http://www.gnu.org/licenses/>. #
#-----------------------------------------------------------------------------#
"""Datastore model and public API for Google App Engine channel management."""
import datetime
import logging
import random
from google.appengine.api import channel
from google.appengine.ext import db
from google.appengine.ext import deferred
from google.appengine.runtime import DeadlineExceededError
from config import NUM_RETRIES
_log = logging.getLogger(__name__)
class Channel(db.Model):
"""Datastore model and public API for Google App Engine channel management.
Google App Engine implements channels (similar to Comet or WebSockets) for
real-time cloud to browser communication. But App Engine only provides the
communication primitives. We need to persist additional data about the
open channels, so that we know who to broadcast the messages to.
"""
name = db.StringProperty()
datetime = db.DateTimeProperty(required=True, auto_now=True)
@classmethod
def create(cls, name=None):
"""Create a channel and return its token."""
_log.info('creating channel')
def txn():
for retry in range(NUM_RETRIES):
client_id = 'client' + str(random.randint(0, 10 ** 8 - 1))
chan = cls.get_by_key_name(client_id)
if chan is None:
chan = cls(key_name=client_id, name=name)
chan.put()
return client_id
client_id = db.run_in_transaction(txn)
if client_id is None:
_log.warning("couldn't create channel; couldn't allocate ID")
else:
token = channel.create_channel(client_id)
_countdown = 2 * 60 * 60
deferred.defer(cls.destroy, client_id, _countdown=_countdown)
_log.info('created channel %s, token %s' % (client_id, token))
return token
@classmethod
def destroy(cls, client_id):
"""Destroy the specified channel."""
_log.info('destroying channel %s' % client_id)
chan = cls.get_by_key_name(client_id)
if chan is None:
body = "couldn't destroy channel %s; already destroyed" % client_id
_log.info(body)
else:
db.delete(chan)
_log.info('destroyed channel %s' % client_id)
@classmethod
def broadcast(cls, json, name=None):
"""Schedule broadcasting the specified JSON string to all channels."""
_log.info('deferring broadcasting JSON to all connected channels')
channels = cls.all()
if name is not None:
channels = channels.filter('name =', name)
channels = channels.count(1)
if channels:
deferred.defer(cls._broadcast, json, name=name, cursor=None)
_log.info('deferred broadcasting JSON to all connected channels')
else:
body = 'not deferring broadcasting JSON (no connected channels)'
_log.info(body)
@classmethod
def _broadcast(cls, json, name=None, cursor=None):
"""Broadcast the specified JSON string to all channels."""
_log.info('broadcasting JSON to all connected channels')
keys = cls.all(keys_only=True)
if name is not None:
keys = keys.filter('name = ', name)
if cursor is not None:
keys = keys.with_cursor(cursor)
num_channels = 0
try:
for key in keys:
client_id = key.name()
channel.send_message(client_id, json)
# There's a chance that Google App Engine will throw the
# DeadlineExceededError exception at this point in the flow of
# execution. In this case, the current channel will have
# already received our JSON broadcast, but the cursor will not
# have been updated. So on the next go-around, the current
# channel will receive our JSON broadcast again. I'm just
# documenting this possibility, but it shouldn't be a big deal.
cursor = keys.cursor()
num_channels += 1
except DeadlineExceededError:
_log.info('broadcasted JSON to %s channels' % num_channels)
_log.warning("deadline; deferring broadcast to remaining channels")
deferred.defer(cls._broadcast, json, name=name, cursor=cursor)
else:
_log.info('broadcasted JSON to %s channels' % num_channels)
_log.info('broadcasted JSON to all connected channels')
@classmethod
def flush(cls):
"""Destroy all channels created over two hours ago."""
_log.info('destroying all channels over two hours old')
now = datetime.datetime.now()
timeout = datetime.timedelta(hours=2)
expiry = now - timeout
keys = cls.all(keys_only=True).filter('datetime <=', expiry)
db.delete(keys)
_log.info('destroyed all channels over two hours old')
| gpl-3.0 | -4,261,593,968,478,345,000 | 45.20979 | 79 | 0.544492 | false |
Jumpscale/core9 | JumpScale9/tools/develop/CodeDirs.py | 1 | 5548 | from js9 import j
JSBASE = j.application.jsbase_get_class()
class CodeDirs(JSBASE):
def __init__(self):
JSBASE.__init__(self)
self.path = j.dirs.CODEDIR
self.load()
def load(self):
data = j.core.state.stateGetFromDict("develop", "codedirs", "")
self.tree = j.data.treemanager.get(data=data)
self.tree.setDeleteState() # set all on deleted state
types = j.sal.fs.listDirsInDir(j.dirs.CODEDIR, False, True)
# types2 = []
for ttype in types:
self.tree.set(ttype, cat="type")
# types2.append(currootTree)
if ttype[0] == "." or ttype[0] == "_":
continue
accounts = j.sal.fs.listDirsInDir("%s/%s" % (j.dirs.CODEDIR, ttype), False, True)
for account in accounts:
if account[0] == "." or account[0] == "_":
continue
path = "%s.%s" % (ttype, account)
self.tree.set(path, cat="account")
repos = j.sal.fs.listDirsInDir("%s/%s/%s" % (j.dirs.CODEDIR, ttype, account), False, True)
for repo in repos:
if not repo.startswith(".") and not account.startswith("."):
path = "%s.%s.%s" % (ttype, account, repo)
self.tree.set(path, cat="repo", item=CodeDir(self, ttype, account, repo))
self.tree.removeDeletedItems() # make sure that the ones no longer there are deleted
# @property
# def codedirs(self):
# return self.tree.find("", getItems=True)
# def codeDirsGetAsStringList(self):
# res = []
# for codedir in self.codedirs:
# res.append(str(codedir))
# res.sort()
# return res
def getActiveCodeDirs(self):
res = []
for item in self.tree.find(cat="repo"):
if item.selected:
# path=j.dirs.CODEDIR+"/"+item.path.replace(".","/")
ttype, account, name = item.path.split(".")
res.append(CodeDir(self, ttype=ttype, account=account, name=name))
return res
def get(self, type, account, reponame):
return CodeDir(self, type, account, reponame)
def codeDirGet(self, reponame, account=None, die=True):
res = []
for item in self.self.tree.find("", getItems=True):
if account is None or item.account == account:
if item.name == reponame:
for codedirget in develtools:
self.logger.debug(codedirget)
from IPython import embed
embed(colors='Linux')
CodeDir(self, ttype, account, reponame)
res.append(item)
if len(res) == 0:
if die is False:
return None
raise j.exceptions.Input("did not find codedir: %s:%s" % (account, reponame))
if len(res) > 1:
raise j.exceptions.Input("found more than 1 codedir: %s:%s" % (account, reponame))
return res[0]
def save(self):
j.core.state.stateSetInDict("develop", "codedirs", self.tree.dumps())
# def selectionGet(self):
# coderepos = j.core.state.configGetFromDict("developtools", "coderepos", default=[])
# res = []
# for account, reponame in coderepos:
# res.append(self.codeDirGet(account=account, reponame=reponame))
# return res
# def _selectionGet(self):
# """
# will return as position in list e.g. [2,3] would mean position 3&4 in sorted list of the coderepo's
# """
# sel0 = self.codeDirsGetAsStringList()
# sel = [str(item) for item in self.selectionGet()]
# res = []
# for item in sel:
# # is string in selection
# try:
# col = sel0.index(item)
# # means it exists
# res.append(col)
# except:
# pass
# return res
#
# def _selectionSet(self, selection):
# slist = self.codeDirsGetAsStringList()
# res = []
# for item in selection:
# selectedItem = slist[item]
# account, name = selectedItem.split(":", 1)
# account = account.strip()
# name = name.strip()
# res.append([account, name])
# j.core.state.configSetInDict("developtools", "coderepos", res)
# def selectionSet(self, codedir):
# """
# will set the code dir as selected in the jumpscale config file
# """
# if not self.selectionExists(codedir):
# items = j.core.state.configGetFromDict("developtools", "coderepos", default=[])
# items.append([codedir.account, codedir.name])
# j.core.state.configSetInDict("developtools", "coderepos", items)
# j.core.state.configSave()
#
# def selectionExists(self, codedir):
# return str(codedir) in self.codeDirsGetAsStringList()
def __repr__(self):
return self.__str__()
def __str__(self):
return ("%s" % self.tree)
class CodeDir(JSBASE):
def __init__(self, codedirs, ttype, account, name):
JSBASE.__init__(self)
self.path = j.sal.fs.joinPaths(codedirs.path, ttype, account, name)
self.account = account
self.type = ttype
self.name = name
def __repr__(self):
return self.__str__()
def __str__(self):
return ("%-22s : %s" % (self.account, self.name))
| apache-2.0 | -3,457,550,131,889,815,000 | 35.741722 | 109 | 0.53641 | false |
eykd/fuzzy-octo-bear | tests/test_map_loader.py | 1 | 1227 | from unittest import TestCase
from ensure import ensure
from path import path
from fuzzy.map import load_game_map
from fuzzy.rooms import Room
from fuzzy.exits import Exit
PATH = path(__file__).abspath().dirname()
class MapLoaderTests(TestCase):
def setUp(self):
self.filename = PATH / 'rooms.yaml'
def test_it_should_construct_a_map_from_the_yaml_file(self):
start_room = load_game_map(self.filename)
ensure(start_room).is_a(Room)
ensure(start_room.exits).has_length(2)
ensure(start_room.exits).is_a(list).of(Exit)
ensure(start_room.exits[0].target).is_a(Room)
ensure(start_room.exits[0].target).is_not(start_room)
room_3 = start_room.exits[1].target
ensure(room_3.exits).has_length(4)
ensure(room_3.exits).is_a(list).of(Exit)
room_6 = room_3.exits[2].target
ensure(room_6).is_a(Room)
ensure(room_6.exits).has_length(2)
ensure(room_6.description).equals("A nondescript room")
room_7 = room_3.exits[3].target
ensure(room_7).is_a(Room)
ensure(room_7.exits).has_length(2)
ensure(room_7.description).equals("A nondescript room")
ensure(room_6).is_not(room_7)
| gpl-2.0 | -8,923,510,156,966,951,000 | 29.675 | 64 | 0.647107 | false |
adamsd5/yavalath | memorycontrol.py | 1 | 4959 | """This holds a routine for restricting the current process memory on Windows."""
import multiprocessing
import ctypes
def set_memory_limit(memory_limit):
"""Creates a new unnamed job object and assigns the current process to it.
The job object will have the given memory limit in bytes: the given process
together with its descendant processes will not be allowed to exceed
the limit. If purge_pid_on_exit is true, when the *calling* process exits
(the calling process can be the same or different from the given process),
the given process and all its descendant processes will be killed."""
import os
pid = os.getpid()
purge_pid_on_exit = True
# Windows API constants, used for OpenProcess and SetInformationJobObject.
PROCESS_TERMINATE = 0x1
PROCESS_SET_QUOTA = 0x100
JobObjectExtendedLimitInformation = 9
JOB_OBJECT_LIMIT_PROCESS_MEMORY = 0x100
JOB_OBJECT_LIMIT_JOB_MEMORY = 0x200
JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE = 0x2000
class JOBOBJECT_EXTENDED_LIMIT_INFORMATION(ctypes.Structure):
"""Windows API structure, used as input to SetInformationJobObject."""
class JOBOBJECT_BASIC_LIMIT_INFORMATION(ctypes.Structure):
_fields_ = [("PerProcessUserTimeLimit", ctypes.c_int64),
("PerJobUserTimeLimit", ctypes.c_int64),
("LimitFlags", ctypes.c_uint32),
("MinimumWorkingSetSize", ctypes.c_void_p),
("MaximumWorkingSetSize", ctypes.c_void_p),
("ActiveProcessLimit", ctypes.c_uint32),
("Affinity", ctypes.c_void_p),
("PriorityClass", ctypes.c_uint32),
("SchedulingClass", ctypes.c_uint32)]
class IO_COUNTERS(ctypes.Structure):
_fields_ = [("ReadOperationCount", ctypes.c_uint64),
("WriteOperationCount", ctypes.c_uint64),
("OtherOperationCount", ctypes.c_uint64),
("ReadTransferCount", ctypes.c_uint64),
("WriteTransferCount", ctypes.c_uint64),
("OtherTransferCount", ctypes.c_uint64)]
_fields_ = [("BasicLimitInformation", JOBOBJECT_BASIC_LIMIT_INFORMATION),
("IoInfo", IO_COUNTERS),
("ProcessMemoryLimit", ctypes.c_void_p),
("JobMemoryLimit", ctypes.c_void_p),
("PeakProcessMemoryUsed", ctypes.c_void_p),
("PeakJobMemoryUsed", ctypes.c_void_p)]
job_info = JOBOBJECT_EXTENDED_LIMIT_INFORMATION()
job_info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_JOB_MEMORY
if purge_pid_on_exit:
job_info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
job_info.JobMemoryLimit = memory_limit
kernel = ctypes.windll.kernel32
job = kernel.CreateJobObjectA(None, None)
if job == 0:
raise RuntimeError("CreateJobObjectA failed")
keep_job_handle = False
try:
if not kernel.SetInformationJobObject(
job,
JobObjectExtendedLimitInformation,
ctypes.POINTER(JOBOBJECT_EXTENDED_LIMIT_INFORMATION)(job_info),
ctypes.sizeof(JOBOBJECT_EXTENDED_LIMIT_INFORMATION)):
raise RuntimeError("SetInformationJobObject failed")
process = kernel.OpenProcess(PROCESS_SET_QUOTA | PROCESS_TERMINATE,False, pid)
if process == 0:
raise RuntimeError("OpenProcess failed")
try:
if not kernel.AssignProcessToJobObject(job, process):
raise RuntimeError("AssignProcessToJobObject failed")
# If purge_pid_on_exit is true, we kill process pid and all its
# descendants when the job handle is closed. So, we keep the handle
# dangling, and it will be closed when *this* process terminates.
keep_job_handle = purge_pid_on_exit
finally:
if not kernel.CloseHandle(process):
raise RuntimeError("CloseHandle failed")
finally:
if not (keep_job_handle or kernel.CloseHandle(job)):
raise RuntimeError("CloseHandle failed")
def allocate(bytes):
import numpy
try:
result = numpy.zeros(shape=(bytes,), dtype='i1')
print("allocation done:", bytes)
except Exception as ex:
print("Failed to allocate:", ex)
raise
def runner(thunk, memory_limit, *args):
set_memory_limit(memory_limit)
thunk(*args)
def run_in_process_with_memory_limit(thunk, memory_limit, test_bytes):
p = multiprocessing.Process(target=runner, args=(thunk, memory_limit, test_bytes))
p.start()
p.join()
def main():
memory_limit = 1000*1000*100
run_in_process_with_memory_limit(allocate, memory_limit=memory_limit, test_bytes=memory_limit)
if __name__ == "__main__":
main()
| mit | -2,975,399,561,159,198,700 | 39.647541 | 98 | 0.626336 | false |
j-dasilva/COMP4350 | apartment/messaging/message.py | 1 | 1169 | from django.conf import settings
import time
class Message(object):
def __init__(self, *args, **kwargs):
vals = self.process_args(args, kwargs)
self.sender = vals['sender']
self.recipient = vals['recipient']
self.urgency = int(vals['urgency'])
self.content = vals['content']
self.timestamp = int(vals['timestamp'])
self.read = (vals['read'] == 'True')
def process_args(self, args, kwargs):
if len(kwargs) == 6:
return kwargs
elif len(args) == 1:
return args[0]
elif settings.CREATE_STUBS:
# CREATE A STUB MESSAGE
return self.create_stub()
else:
raise MessageException()
def create_stub(self):
return {
"sender": "StubSender",
"recipient": "StubRecipient",
"urgency": "1",
"content": "Stub Message Body",
"timestamp": time.time(),
"read": "False"
}
class MessageException(BaseException):
def __init__(self):
super(MessageException, self).__init__("Failed to create Message. Please refer to constructor.") | gpl-2.0 | 587,018,529,972,457,200 | 29 | 104 | 0.551754 | false |
nature-python/youcai-contest | application/utils/cipherutils.py | 1 | 1513 | #!/usr/bin/python
#encoding:utf-8
#
#author:xin.xin
#since:14-5-19上午10:35
#
#
from binascii import b2a_hex, a2b_hex
from Crypto.Cipher import AES
from application import app
class CipherUtils(object):
#加密函数,如果text不足16位就用空格补足为16位,
#如果大于16当时不是16的倍数,那就补足为16的倍数。
@staticmethod
def encrypt(text):
cryptor = AES.new(app.config['PASSWORD_CIPHER_KEY'], AES.MODE_CBC, '0000000000000000')
#这里密钥key 长度必须为16(AES-128),
#24(AES-192),或者32 (AES-256)Bytes 长度
#目前AES-128 足够目前使用
length = 16
count = len(text)
if count < length:
add = (length - count)
#\0 backspace
text = text + (' ' * add)
elif count > length:
add = (length - (count % length))
text = text + ('\0' * add)
ciphertext = cryptor.encrypt(text)
#因为AES加密时候得到的字符串不一定是ascii字符集的,输出到终端或者保存时候可能存在问题
#所以这里统一把加密后的字符串转化为16进制字符串
return b2a_hex(ciphertext)
#解密后,去掉补足的空格用strip() 去掉
@staticmethod
def decrypt(text):
cryptor = AES.new(app.config['PASSWORD_CIPHER_KEY'], AES.MODE_CBC, '0000000000000000')
plain_text = cryptor.decrypt(a2b_hex(text))
return plain_text.rstrip('\0')
| apache-2.0 | 8,200,681,271,139,766,000 | 24.5625 | 94 | 0.612062 | false |
gobstones/PyGobstones | pygobstones/gui/views/gobstonesMain.py | 1 | 21498 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gobstonesMain.ui'
#
# Created by: PyQt4 UI code generator 4.9.6
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import sys
import resources
sys.path.append('..')
from pygobstones.commons.i18n import *
from pygobstones.gui.textEditor import *
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8('MainWindow'))
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8('centralwidget'))
self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))
self.tabWidgetEditors = QtGui.QTabWidget(self.centralwidget)
self.tabWidgetEditors.setObjectName(_fromUtf8('tabWidgetEditors'))
self.tabWidgetEditors.setStyleSheet("border:2px solid #4682b4; border-color:'#4682b4';")
self.tabWidgetEditors.tabBar().setStyleSheet("background-color:'white'; color:'#4682b4'; border:2px solid #4682b4; font-size:15px")
self.tabWidgetEditors.tabBar().setAttribute(QtCore.Qt.WA_TranslucentBackground)
self.tabFile = QtGui.QWidget()
self.tabFile.setStyleSheet("border-color:white")
self.tabFile.setObjectName(_fromUtf8('tabFile'))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.tabFile)
self.verticalLayout_3.setObjectName(_fromUtf8('verticalLayout_3'))
self.textEditFile = GobstonesTextEditor(self.tabFile)
self.textEditFile.setObjectName(_fromUtf8('textEditFile'))
self.textEditFile.setStyleSheet("selection-color: white; selection-background-color:#008080")
self.verticalLayout_3.addWidget(self.textEditFile)
self.tabWidgetEditors.addTab(self.tabFile, _fromUtf8(''))
self.tabLibrary = QtGui.QWidget()
self.tabLibrary.setStyleSheet("border-color:white")
self.tabLibrary.setObjectName(_fromUtf8('tabLibrary'))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.tabLibrary)
self.verticalLayout_2.setObjectName(_fromUtf8('verticalLayout_2'))
self.textEditLibrary = GobstonesTextEditor(self.tabLibrary)
self.textEditLibrary.setObjectName(_fromUtf8('textEditLibrary'))
self.textEditLibrary.setStyleSheet("selection-color: white; selection-background-color:#008080")
self.verticalLayout_2.addWidget(self.textEditLibrary)
self.tabWidgetEditors.addTab(self.tabLibrary, _fromUtf8(''))
self.set_highlighter(GobstonesHighlighter)
self.logger = QtGui.QTextEdit()
self.logger.setObjectName(_fromUtf8('logger'))
self.logger.setReadOnly(True)
self.logger.setStyleSheet("font-family: Monospace, Consolas, 'Courier New'; font-weight: 100; font-size: 10pt")
self.grid = QtGui.QGridLayout()
self.grid.setSpacing(1)
self.verticalLayout.addLayout(self.grid)
self.splitter = QtGui.QSplitter(QtCore.Qt.Vertical, self.centralwidget)
self.splitter.addWidget(self.tabWidgetEditors)
self.splitter.addWidget(self.logger)
self.verticalLayout.addWidget(self.splitter)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8('statusbar'))
MainWindow.setStatusBar(self.statusbar)
self.toolBar = QtGui.QToolBar(MainWindow)
self.toolBar.setObjectName(_fromUtf8('toolBar'))
self.toolBar.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.menuBar = QtGui.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 703, 20))
self.menuBar.setObjectName(_fromUtf8('menuBar'))
self.menuFile = QtGui.QMenu(self.menuBar)
self.menuFile.setObjectName(_fromUtf8('menuFile'))
self.menuEdit = QtGui.QMenu(self.menuBar)
self.menuEdit.setObjectName(_fromUtf8('menuEdit'))
self.menuGobstones = QtGui.QMenu(self.menuBar)
self.menuGobstones.setObjectName(_fromUtf8('menuGobstones'))
self.menuBoard = QtGui.QMenu(self.menuBar)
self.menuBoard.setObjectName(_fromUtf8('menuBoard'))
self.menuSelectResultView = QtGui.QMenu(self.menuBoard)
self.menuSelectResultView.setObjectName(_fromUtf8
('menuSelectResultView'))
self.menuHelp = QtGui.QMenu(self.menuBar)
self.menuHelp.setObjectName(_fromUtf8('menuHelp'))
MainWindow.setMenuBar(self.menuBar)
self.actionChangeLang = QtGui.QAction(MainWindow)
icon = QtGui.QIcon(":/logoGobstones.png")
self.actionChangeLang.setIcon(icon)
self.actionChangeLang.setObjectName(_fromUtf8('actionChangeLang'))
self.actionNewFile = QtGui.QAction(MainWindow)
icon = QtGui.QIcon(":/new.png")
self.actionNewFile.setIcon(icon)
self.actionNewFile.setObjectName(_fromUtf8('actionNewFile'))
self.actionCloseFile = QtGui.QAction(MainWindow)
icon = QtGui.QIcon(":/close.png")
self.actionCloseFile.setIcon(icon)
self.actionCloseFile.setObjectName(_fromUtf8('actionCloseFile'))
self.actionOpenFile = QtGui.QAction(MainWindow)
icon1 = QtGui.QIcon(":/open.png")
self.actionOpenFile.setIcon(icon1)
self.actionOpenFile.setObjectName(_fromUtf8('actionOpenFile'))
self.actionSave = QtGui.QAction(MainWindow)
icon2 = QtGui.QIcon(":/save.png")
self.actionSave.setIcon(icon2)
self.actionSave.setObjectName(_fromUtf8('actionSave'))
self.actionSaveAs = QtGui.QAction(MainWindow)
icon3 = QtGui.QIcon(":/save-as.png")
self.actionSaveAs.setIcon(icon3)
self.actionSaveAs.setObjectName(_fromUtf8('actionSaveAs'))
self.actionUndo = QtGui.QAction(MainWindow)
icon5 = QtGui.QIcon(":/undo.png")
self.actionUndo.setIcon(icon5)
self.actionUndo.setObjectName(_fromUtf8('actionUndo'))
self.actionRedo = QtGui.QAction(MainWindow)
icon6 = QtGui.QIcon(":/redo.png")
self.actionRedo.setIcon(icon6)
self.actionRedo.setObjectName(_fromUtf8('actionRedo'))
self.actionCut = QtGui.QAction(MainWindow)
icon7 = QtGui.QIcon(":/cut.png")
self.actionCut.setIcon(icon7)
self.actionCut.setObjectName(_fromUtf8('actionCut'))
self.actionCopy = QtGui.QAction(MainWindow)
icon8 = QtGui.QIcon(":/copy.png")
self.actionCopy.setIcon(icon8)
self.actionCopy.setObjectName(_fromUtf8('actionCopy'))
self.actionPaste = QtGui.QAction(MainWindow)
icon9 = QtGui.QIcon(":/paste.png")
self.actionPaste.setIcon(icon9)
self.actionPaste.setObjectName(_fromUtf8('actionPaste'))
self.actionSelectAll = QtGui.QAction(MainWindow)
icon10 = QtGui.QIcon(":/select-all.png")
self.actionSelectAll.setIcon(icon10)
self.actionSelectAll.setObjectName(_fromUtf8('actionSelectAll'))
self.actionFind = QtGui.QAction(MainWindow)
icon11 = QtGui.QIcon(":/find.png")
self.actionFind.setIcon(icon11)
self.actionFind.setObjectName(_fromUtf8('actionFind'))
self.actionReplace = QtGui.QAction(MainWindow)
icon20 = QtGui.QIcon(":/find.png")
self.actionReplace.setIcon(icon20)
self.actionReplace.setObjectName(_fromUtf8('actionReplace'))
self.actionFonts = QtGui.QAction(MainWindow)
icon21 = QtGui.QIcon(":/select-font.png")
self.actionFonts.setIcon(icon21)
self.actionFonts.setObjectName(_fromUtf8('actionFonts'))
self.actionPreferences = QtGui.QAction(MainWindow)
self.actionPreferences.setObjectName(_fromUtf8('actionFonts'))
self.actionCheck = QtGui.QAction(MainWindow)
icon14 = QtGui.QIcon(":/check.png")
self.actionCheck.setIcon(icon14)
self.actionCheck.setObjectName(_fromUtf8('actionCheck'))
self.actionRun = QtGui.QAction(MainWindow)
icon12 = QtGui.QIcon(":/start.png")
self.actionRun.setIcon(icon12)
self.actionRun.setObjectName(_fromUtf8('actionRun'))
self.actionStop = QtGui.QAction(MainWindow)
icon13 = QtGui.QIcon(":/stop.png")
self.actionStop.setIcon(icon13)
self.actionStop.setObjectName(_fromUtf8('actionStop'))
self.actionManual = QtGui.QAction(MainWindow)
icon15 = QtGui.QIcon(":/help.png")
self.actionManual.setIcon(icon15)
self.actionManual.setObjectName(_fromUtf8('actionManual'))
self.actionLicense = QtGui.QAction(MainWindow)
icon16 = QtGui.QIcon(":/manual.png")
self.actionLicense.setIcon(icon16)
self.actionLicense.setObjectName(_fromUtf8('actionLicense'))
self.actionAbout = QtGui.QAction(MainWindow)
icon17 = QtGui.QIcon(":/about.png")
self.actionAbout.setIcon(icon17)
self.actionAbout.setObjectName(_fromUtf8('actionAbout'))
self.actionExit = QtGui.QAction(MainWindow)
icon18 = QtGui.QIcon(":/exit.png")
self.actionExit.setIcon(icon18)
self.actionExit.setObjectName(_fromUtf8('actionExit'))
self.actionOpenBoardEditor = QtGui.QAction(MainWindow)
icon19 = QtGui.QIcon(":/board-random.png")
self.actionOpenBoardEditor.setIcon(icon19)
self.actionOpenBoardEditor.setObjectName(_fromUtf8
('actionOpenBoardEditor'))
self.actionBoardOptions = QtGui.QAction(MainWindow)
icon20 = QtGui.QIcon(":/board-size.png")
self.actionBoardOptions.setIcon(icon20)
self.actionBoardOptions.setObjectName(_fromUtf8
('actionBoardOptions'))
self.actionLoadBoard = QtGui.QAction(MainWindow)
icon20 = QtGui.QIcon(":/board-new.png")
self.actionLoadBoard.setIcon(icon20)
self.actionLoadBoard.setObjectName(_fromUtf8
('actionLoadBoard'))
self.toolBar.addAction(self.actionChangeLang)
self.toolBar.addAction(self.actionNewFile)
self.toolBar.addAction(self.actionOpenFile)
self.toolBar.addAction(self.actionSave)
self.toolBar.addAction(self.actionCloseFile)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionUndo)
self.toolBar.addAction(self.actionRedo)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionOpenBoardEditor)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionCheck)
self.toolBar.addAction(self.actionRun)
self.toolBar.addAction(self.actionStop)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionManual)
self.toolBar.addAction(self.actionAbout)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionChangeLang)
self.menuFile.addAction(self.actionNewFile)
self.menuFile.addAction(self.actionOpenFile)
self.menuFile.addAction(self.actionSave)
self.menuFile.addAction(self.actionSaveAs)
self.menuFile.addAction(self.actionCloseFile)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionExit)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionUndo)
self.menuEdit.addAction(self.actionRedo)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionCut)
self.menuEdit.addAction(self.actionCopy)
self.menuEdit.addAction(self.actionPaste)
self.menuEdit.addAction(self.actionSelectAll)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionFind)
self.menuEdit.addAction(self.actionReplace)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionFonts)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionPreferences)
self.menuGobstones.addSeparator()
self.menuGobstones.addAction(self.actionRun)
self.menuGobstones.addAction(self.actionStop)
self.menuGobstones.addAction(self.actionCheck)
self.menuBoard.addSeparator()
self.menuBoard.addAction(self.actionLoadBoard)
self.menuBoard.addAction(self.actionBoardOptions)
self.menuBoard.addAction(self.actionOpenBoardEditor)
self.menuBoard.addSeparator()
self.menuBoard.addAction(self.menuSelectResultView.menuAction())
self.menuHelp.addSeparator()
self.menuHelp.addAction(self.actionManual)
self.menuHelp.addAction(self.actionLicense)
self.menuHelp.addAction(self.actionAbout)
self.menuBar.addAction(self.menuFile.menuAction())
self.menuBar.addAction(self.menuEdit.menuAction())
self.menuBar.addAction(self.menuGobstones.menuAction())
self.menuBar.addAction(self.menuBoard.menuAction())
self.menuBar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
self.tabWidgetEditors.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def set_highlighter(self, highlighter_class):
if hasattr(self, "highlighter"):
self.highlighter["main"].setDocument(None)
self.highlighter["library"].setDocument(None)
else:
self.highlighter = {}
self.highlighter["main"] = highlighter_class(self.textEditFile.edit.document())
self.highlighter["library"] = highlighter_class(self.textEditLibrary.edit.document())
def retranslateUi(self, MainWindow):
self.tabWidgetEditors.setTabText(
self.tabWidgetEditors.indexOf(self.tabFile),
_translate('MainWindow', i18n('Untitled'), None))
self.tabWidgetEditors.setTabText(
self.tabWidgetEditors.indexOf(self.tabLibrary),
_translate('MainWindow', i18n('Untitled'), None))
self.toolBar.setWindowTitle(_translate('MainWindow', 'toolBar', None))
self.menuFile.setTitle(_translate('MainWindow', i18n('File'), None))
self.menuEdit.setTitle(_translate('MainWindow', i18n('Edit'), None))
self.menuGobstones.setTitle(_translate('MainWindow', 'Gobstones',
None))
self.menuBoard.setTitle(_translate('MainWindow', i18n('Board'), None))
self.menuSelectResultView.setTitle(_translate('MainWindow',
i18n('Select view results'), None))
self.menuHelp.setTitle(_translate('MainWindow', i18n('Help'), None))
self.actionChangeLang.setText(_translate('MainWindow',
'Gobstones ', None))
self.actionChangeLang.setToolTip(_translate('MainWindow',
i18n('Change the Gobstones Language'), None))
self.actionChangeLang.setShortcut(_translate('MainWindow', 'F11', None))
self.actionNewFile.setText(_translate('MainWindow', i18n('New'), None))
self.actionNewFile.setToolTip(_translate('MainWindow',
i18n('Create new file'), None))
self.actionNewFile.setShortcut(_translate('MainWindow', 'Ctrl+N',
None))
self.actionCloseFile.setText(_translate('MainWindow', i18n('Close'), None))
self.actionCloseFile.setToolTip(_translate('MainWindow',
i18n('Close the current file and the library'), None))
self.actionCloseFile.setShortcut(_translate('MainWindow', 'Ctrl+R',
None))
self.actionOpenFile.setText(_translate('MainWindow', i18n('Open'), None))
self.actionOpenFile.setToolTip(_translate('MainWindow',
i18n('Open an existent file'), None))
self.actionOpenFile.setShortcut(_translate('MainWindow', 'Ctrl+O',
None))
self.actionSave.setText(_translate('MainWindow', i18n('Save'), None))
self.actionSave.setToolTip(_translate('MainWindow',
i18n('Save the current file'), None))
self.actionSave.setShortcut(_translate('MainWindow', 'Ctrl+S', None))
self.actionSaveAs.setText(_translate('MainWindow', i18n('Save as...'),
None))
self.actionSaveAs.setToolTip(_translate('MainWindow',
i18n('Save the current file and allows put a name and choose the location'),
None))
self.actionUndo.setText(_translate('MainWindow', i18n('Undo'), None))
self.actionUndo.setShortcut(_translate('MainWindow', 'Ctrl+Z', None))
self.actionRedo.setText(_translate('MainWindow', i18n('Redo'), None))
self.actionRedo.setShortcut(_translate('MainWindow', 'Ctrl+Shift+Z',
None))
self.actionCut.setText(_translate('MainWindow', i18n('Cut'), None))
self.actionCut.setShortcut(_translate('MainWindow', 'Ctrl+X', None))
self.actionCopy.setText(_translate('MainWindow', i18n('Copy'), None))
self.actionCopy.setShortcut(_translate('MainWindow', 'Ctrl+C', None))
self.actionPaste.setText(_translate('MainWindow', i18n('Paste'), None))
self.actionPaste.setShortcut(_translate('MainWindow', 'Ctrl+V', None))
self.actionSelectAll.setText(_translate('MainWindow',
i18n('Select all'), None))
self.actionSelectAll.setShortcut(_translate('MainWindow', 'Ctrl+A',
None))
self.actionFind.setText(_translate('MainWindow', i18n('Search'), None))
self.actionFind.setShortcut(_translate('MainWindow', 'Ctrl+F', None))
self.actionReplace.setText(_translate('MainWindow', i18n('Search and replace'), None))
self.actionReplace.setShortcut(_translate('MainWindow', 'Ctrl+H', None))
self.actionFonts.setText(_translate('MainWindow', i18n('Select fonts'), None))
self.actionFonts.setShortcut(_translate('MainWindow', 'Ctrl+T', None))
self.actionPreferences.setText(_translate('MainWindow', i18n('Preferences'), None))
self.actionPreferences.setShortcut(_translate('MainWindow', 'Ctrl+P', None))
self.actionRun.setText(_translate('MainWindow', i18n('Run'), None))
self.actionRun.setToolTip(_translate('MainWindow',
i18n('Executes the current program'), None))
self.actionRun.setShortcut(_translate('MainWindow', 'F5', None))
self.actionStop.setText(_translate('MainWindow', i18n('Stop'), None))
self.actionStop.setToolTip(_translate('MainWindow',
i18n('Stops execution of the current program'), None))
self.actionStop.setShortcut(_translate('MainWindow', 'F6', None))
self.actionCheck.setText(_translate('MainWindow', i18n('Check'), None))
self.actionCheck.setToolTip(_translate('MainWindow',
i18n('Checks if the program is well-formed'), None))
self.actionCheck.setShortcut(_translate('MainWindow', 'F10', None))
self.actionManual.setText(_translate('MainWindow', i18n('Manual'), None))
self.actionManual.setToolTip(_translate('MainWindow',
i18n('Open the Gobstones\'s manual'), None))
self.actionLicense.setText(_translate('MainWindow', i18n('Licence'), None))
self.actionAbout.setText(_translate('MainWindow', i18n('About...'),
None))
self.actionExit.setText(_translate('MainWindow', i18n('Exit'), None))
self.actionExit.setToolTip(_translate('MainWindow',
i18n('Closes the application'), None))
self.actionExit.setShortcut(_translate('MainWindow', 'Ctrl+Q', None))
self.actionOpenBoardEditor.setText(_translate('MainWindow',
i18n('Board editor'), None))
self.actionOpenBoardEditor.setToolTip(_translate('MainWindow',
i18n('Open board editor'), None))
self.actionBoardOptions.setText(_translate('MainWindow',
i18n('Options Board'), None))
self.actionBoardOptions.setToolTip(_translate('MainWindow',
i18n('Select board options'), None))
self.actionLoadBoard.setText(_translate('MainWindow',
i18n('Load board'), None))
self.actionLoadBoard.setToolTip(_translate('MainWindow',
i18n('Open a board from existing .gbb file'), None))
| gpl-3.0 | 4,872,066,273,332,599,000 | 54.123077 | 139 | 0.651084 | false |
forkbong/qutebrowser | tests/unit/browser/webkit/network/test_filescheme.py | 1 | 9313 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2021 Florian Bruhin (The Compiler) <[email protected]>
# Copyright 2015-2018 Antoni Boucher (antoyo) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
import os
import dataclasses
from typing import List
import pytest
import bs4
from PyQt5.QtCore import QUrl
from PyQt5.QtNetwork import QNetworkRequest
from qutebrowser.browser.webkit.network import filescheme
from qutebrowser.utils import urlutils, utils
from helpers import utils as testutils
@pytest.mark.parametrize('create_file, create_dir, filterfunc, expected', [
(True, False, os.path.isfile, True),
(True, False, os.path.isdir, False),
(False, True, os.path.isfile, False),
(False, True, os.path.isdir, True),
(False, False, os.path.isfile, False),
(False, False, os.path.isdir, False),
])
def test_get_file_list(tmpdir, create_file, create_dir, filterfunc, expected):
"""Test get_file_list."""
path = tmpdir / 'foo'
if create_file or create_dir:
path.ensure(dir=create_dir)
all_files = os.listdir(str(tmpdir))
result = filescheme.get_file_list(str(tmpdir), all_files, filterfunc)
item = {'name': 'foo', 'absname': str(path)}
assert (item in result) == expected
class TestIsRoot:
@pytest.mark.windows
@pytest.mark.parametrize('directory, is_root', [
('C:\\foo\\bar', False),
('C:\\foo\\', False),
('C:\\foo', False),
('C:\\', True)
])
def test_windows(self, directory, is_root):
assert filescheme.is_root(directory) == is_root
@pytest.mark.posix
@pytest.mark.parametrize('directory, is_root', [
('/foo/bar', False),
('/foo/', False),
('/foo', False),
('/', True)
])
def test_posix(self, directory, is_root):
assert filescheme.is_root(directory) == is_root
class TestParentDir:
@pytest.mark.windows
@pytest.mark.parametrize('directory, parent', [
('C:\\foo\\bar', 'C:\\foo'),
('C:\\foo', 'C:\\'),
('C:\\foo\\', 'C:\\'),
('C:\\', 'C:\\'),
])
def test_windows(self, directory, parent):
assert filescheme.parent_dir(directory) == parent
@pytest.mark.posix
@pytest.mark.parametrize('directory, parent', [
('/home/foo', '/home'),
('/home', '/'),
('/home/', '/'),
('/', '/'),
])
def test_posix(self, directory, parent):
assert filescheme.parent_dir(directory) == parent
def _file_url(path):
"""Return a file:// url (as string) for the given LocalPath.
Arguments:
path: The filepath as LocalPath (as handled by py.path)
"""
return urlutils.file_url(str(path))
class TestDirbrowserHtml:
@dataclasses.dataclass
class Parsed:
parent: str
folders: List[str]
files: List[str]
@dataclasses.dataclass
class Item:
link: str
text: str
@pytest.fixture
def parser(self):
"""Provide a function to get a parsed dirbrowser document."""
def parse(path):
html = filescheme.dirbrowser_html(path).decode('utf-8')
soup = bs4.BeautifulSoup(html, 'html.parser')
with testutils.ignore_bs4_warning():
print(soup.prettify())
container = soup('div', id='dirbrowserContainer')[0]
parent_elem = container('ul', class_='parent')
if not parent_elem:
parent = None
else:
parent = parent_elem[0].li.a.string
folders = []
files = []
for li in container('ul', class_='folders')[0]('li'):
item = self.Item(link=li.a['href'], text=str(li.a.string))
folders.append(item)
for li in container('ul', class_='files')[0]('li'):
item = self.Item(link=li.a['href'], text=str(li.a.string))
files.append(item)
return self.Parsed(parent=parent, folders=folders, files=files)
return parse
def test_basic(self):
html = filescheme.dirbrowser_html(os.getcwd()).decode('utf-8')
soup = bs4.BeautifulSoup(html, 'html.parser')
with testutils.ignore_bs4_warning():
print(soup.prettify())
container = soup.div
assert container['id'] == 'dirbrowserContainer'
title_elem = container('div', id='dirbrowserTitle')[0]
title_text = title_elem('p', id='dirbrowserTitleText')[0].text
assert title_text == 'Browse directory: {}'.format(os.getcwd())
def test_icons(self, monkeypatch):
"""Make sure icon paths are correct file:// URLs."""
monkeypatch.setattr(filescheme.jinja.utils, 'resource_filename',
lambda name: '/test path/foo.svg')
html = filescheme.dirbrowser_html(os.getcwd()).decode('utf-8')
soup = bs4.BeautifulSoup(html, 'html.parser')
with testutils.ignore_bs4_warning():
print(soup.prettify())
css = soup.html.head.style.string
assert "background-image: url('file:///test%20path/foo.svg');" in css
def test_empty(self, tmpdir, parser):
parsed = parser(str(tmpdir))
assert parsed.parent
assert not parsed.folders
assert not parsed.files
def test_files(self, tmpdir, parser):
foo_file = tmpdir / 'foo'
bar_file = tmpdir / 'bar'
foo_file.ensure()
bar_file.ensure()
parsed = parser(str(tmpdir))
assert parsed.parent
assert not parsed.folders
foo_item = self.Item(_file_url(foo_file), foo_file.relto(tmpdir))
bar_item = self.Item(_file_url(bar_file), bar_file.relto(tmpdir))
assert parsed.files == [bar_item, foo_item]
def test_html_special_chars(self, tmpdir, parser):
special_file = tmpdir / 'foo&bar'
special_file.ensure()
parsed = parser(str(tmpdir))
item = self.Item(_file_url(special_file), special_file.relto(tmpdir))
assert parsed.files == [item]
def test_dirs(self, tmpdir, parser):
foo_dir = tmpdir / 'foo'
bar_dir = tmpdir / 'bar'
foo_dir.ensure(dir=True)
bar_dir.ensure(dir=True)
parsed = parser(str(tmpdir))
assert parsed.parent
assert not parsed.files
foo_item = self.Item(_file_url(foo_dir), foo_dir.relto(tmpdir))
bar_item = self.Item(_file_url(bar_dir), bar_dir.relto(tmpdir))
assert parsed.folders == [bar_item, foo_item]
def test_mixed(self, tmpdir, parser):
foo_file = tmpdir / 'foo'
bar_dir = tmpdir / 'bar'
foo_file.ensure()
bar_dir.ensure(dir=True)
parsed = parser(str(tmpdir))
foo_item = self.Item(_file_url(foo_file), foo_file.relto(tmpdir))
bar_item = self.Item(_file_url(bar_dir), bar_dir.relto(tmpdir))
assert parsed.parent
assert parsed.files == [foo_item]
assert parsed.folders == [bar_item]
def test_root_dir(self, tmpdir, parser):
root_dir = 'C:\\' if utils.is_windows else '/'
parsed = parser(root_dir)
assert not parsed.parent
def test_oserror(self, mocker):
m = mocker.patch('qutebrowser.browser.webkit.network.filescheme.'
'os.listdir')
m.side_effect = OSError('Error message')
html = filescheme.dirbrowser_html('').decode('utf-8')
soup = bs4.BeautifulSoup(html, 'html.parser')
with testutils.ignore_bs4_warning():
print(soup.prettify())
error_msg = soup('p', id='error-message-text')[0].string
assert error_msg == 'Error message'
class TestFileSchemeHandler:
def test_dir(self, tmpdir):
url = QUrl.fromLocalFile(str(tmpdir))
req = QNetworkRequest(url)
reply = filescheme.handler(req, None, None)
# The URL will always use /, even on Windows - so we force this here
# too.
tmpdir_path = str(tmpdir).replace(os.sep, '/')
assert reply.readAll() == filescheme.dirbrowser_html(tmpdir_path)
def test_file(self, tmpdir):
filename = tmpdir / 'foo'
filename.ensure()
url = QUrl.fromLocalFile(str(filename))
req = QNetworkRequest(url)
reply = filescheme.handler(req, None, None)
assert reply is None
def test_unicode_encode_error(self, mocker):
url = QUrl('file:///tmp/foo')
req = QNetworkRequest(url)
err = UnicodeEncodeError('ascii', '', 0, 2, 'foo')
mocker.patch('os.path.isdir', side_effect=err)
reply = filescheme.handler(req, None, None)
assert reply is None
| gpl-3.0 | 7,587,535,219,534,022,000 | 31.449477 | 78 | 0.605068 | false |
davy39/eric | Helpviewer/AdBlock/AdBlockSubscription.py | 1 | 24766 | # -*- coding: utf-8 -*-
# Copyright (c) 2009 - 2014 Detlev Offenbach <[email protected]>
#
"""
Module implementing the AdBlock subscription class.
"""
from __future__ import unicode_literals
import os
import re
import hashlib
import base64
from PyQt5.QtCore import pyqtSignal, Qt, QObject, QByteArray, QDateTime, \
QUrl, QCryptographicHash, QFile, QIODevice, QTextStream, QDate, QTime, \
qVersion
from PyQt5.QtNetwork import QNetworkReply
from E5Gui import E5MessageBox
import Utilities
import Preferences
class AdBlockSubscription(QObject):
"""
Class implementing the AdBlock subscription.
@signal changed() emitted after the subscription has changed
@signal rulesChanged() emitted after the subscription's rules have changed
@signal enabledChanged(bool) emitted after the enabled state was changed
"""
changed = pyqtSignal()
rulesChanged = pyqtSignal()
enabledChanged = pyqtSignal(bool)
def __init__(self, url, custom, parent=None, default=False):
"""
Constructor
@param url AdBlock URL for the subscription (QUrl)
@param custom flag indicating a custom subscription (boolean)
@param parent reference to the parent object (QObject)
@param default flag indicating a default subscription (boolean)
"""
super(AdBlockSubscription, self).__init__(parent)
self.__custom = custom
self.__url = url.toEncoded()
self.__enabled = False
self.__downloading = None
self.__defaultSubscription = default
self.__title = ""
self.__location = QByteArray()
self.__lastUpdate = QDateTime()
self.__requiresLocation = ""
self.__requiresTitle = ""
self.__updatePeriod = 0 # update period in hours, 0 = use default
self.__remoteModified = QDateTime()
self.__rules = [] # list containing all AdBlock rules
self.__networkExceptionRules = []
self.__networkBlockRules = []
self.__domainRestrictedCssRules = []
self.__elementHidingRules = ""
self.__documentRules = []
self.__elemhideRules = []
self.__checksumRe = re.compile(
r"""^\s*!\s*checksum[\s\-:]+([\w\+\/=]+).*\n""",
re.IGNORECASE | re.MULTILINE)
self.__expiresRe = re.compile(
r"""(?:expires:|expires after)\s*(\d+)\s*(hour|h)?""",
re.IGNORECASE)
self.__remoteModifiedRe = re.compile(
r"""!\s*(?:Last modified|Updated):\s*(\d{1,2})\s*"""
r"""(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s*"""
r"""(\d{2,4})\s*((\d{1,2}):(\d{2}))?""",
re.IGNORECASE)
self.__monthNameToNumber = {
"Jan": 1,
"Feb": 2,
"Mar": 3,
"Apr": 4,
"May": 5,
"Jun": 6,
"Jul": 7,
"Aug": 8,
"Sep": 9,
"Oct": 10,
"Nov": 11,
"Dec": 12
}
self.__parseUrl(url)
def __parseUrl(self, url):
"""
Private method to parse the AdBlock URL for the subscription.
@param url AdBlock URL for the subscription (QUrl)
"""
if url.scheme() != "abp":
return
if url.path() != "subscribe":
return
if qVersion() >= "5.0.0":
from PyQt5.QtCore import QUrlQuery
urlQuery = QUrlQuery(url)
self.__title = urlQuery.queryItemValue("title")
self.__enabled = urlQuery.queryItemValue("enabled") != "false"
self.__location = QByteArray(urlQuery.queryItemValue("location"))
# Check for required subscription
self.__requiresLocation = urlQuery.queryItemValue(
"requiresLocation")
self.__requiresTitle = urlQuery.queryItemValue("requiresTitle")
if self.__requiresLocation and self.__requiresTitle:
import Helpviewer.HelpWindow
Helpviewer.HelpWindow.HelpWindow.adBlockManager()\
.loadRequiredSubscription(self.__requiresLocation,
self.__requiresTitle)
lastUpdateString = urlQuery.queryItemValue("lastUpdate")
self.__lastUpdate = QDateTime.fromString(lastUpdateString,
Qt.ISODate)
else:
self.__title = \
QUrl.fromPercentEncoding(url.encodedQueryItemValue("title"))
self.__enabled = QUrl.fromPercentEncoding(
url.encodedQueryItemValue("enabled")) != "false"
self.__location = QByteArray(QUrl.fromPercentEncoding(
url.encodedQueryItemValue("location")))
# Check for required subscription
self.__requiresLocation = QUrl.fromPercentEncoding(
url.encodedQueryItemValue("requiresLocation"))
self.__requiresTitle = QUrl.fromPercentEncoding(
url.encodedQueryItemValue("requiresTitle"))
if self.__requiresLocation and self.__requiresTitle:
import Helpviewer.HelpWindow
Helpviewer.HelpWindow.HelpWindow.adBlockManager()\
.loadRequiredSubscription(self.__requiresLocation,
self.__requiresTitle)
lastUpdateByteArray = url.encodedQueryItemValue("lastUpdate")
lastUpdateString = QUrl.fromPercentEncoding(lastUpdateByteArray)
self.__lastUpdate = QDateTime.fromString(lastUpdateString,
Qt.ISODate)
self.__loadRules()
def url(self):
"""
Public method to generate the URL for this subscription.
@return AdBlock URL for the subscription (QUrl)
"""
url = QUrl()
url.setScheme("abp")
url.setPath("subscribe")
queryItems = []
queryItems.append(("location", bytes(self.__location).decode()))
queryItems.append(("title", self.__title))
if self.__requiresLocation and self.__requiresTitle:
queryItems.append(("requiresLocation", self.__requiresLocation))
queryItems.append(("requiresTitle", self.__requiresTitle))
if not self.__enabled:
queryItems.append(("enabled", "false"))
if self.__lastUpdate.isValid():
queryItems.append(("lastUpdate",
self.__lastUpdate.toString(Qt.ISODate)))
if qVersion() >= "5.0.0":
from PyQt5.QtCore import QUrlQuery
query = QUrlQuery()
query.setQueryItems(queryItems)
url.setQuery(query)
else:
url.setQueryItems(queryItems)
return url
def isEnabled(self):
"""
Public method to check, if the subscription is enabled.
@return flag indicating the enabled status (boolean)
"""
return self.__enabled
def setEnabled(self, enabled):
"""
Public method to set the enabled status.
@param enabled flag indicating the enabled status (boolean)
"""
if self.__enabled == enabled:
return
self.__enabled = enabled
self.enabledChanged.emit(enabled)
def title(self):
"""
Public method to get the subscription title.
@return subscription title (string)
"""
return self.__title
def setTitle(self, title):
"""
Public method to set the subscription title.
@param title subscription title (string)
"""
if self.__title == title:
return
self.__title = title
self.changed.emit()
def location(self):
"""
Public method to get the subscription location.
@return URL of the subscription location (QUrl)
"""
return QUrl.fromEncoded(self.__location)
def setLocation(self, url):
"""
Public method to set the subscription location.
@param url URL of the subscription location (QUrl)
"""
if url == self.location():
return
self.__location = url.toEncoded()
self.__lastUpdate = QDateTime()
self.changed.emit()
def requiresLocation(self):
"""
Public method to get the location of a required subscription.
@return location of a required subscription (string)
"""
return self.__requiresLocation
def lastUpdate(self):
"""
Public method to get the date and time of the last update.
@return date and time of the last update (QDateTime)
"""
return self.__lastUpdate
def rulesFileName(self):
"""
Public method to get the name of the rules file.
@return name of the rules file (string)
"""
if self.location().scheme() == "file":
return self.location().toLocalFile()
if self.__location.isEmpty():
return ""
sha1 = bytes(QCryptographicHash.hash(
self.__location, QCryptographicHash.Sha1).toHex()).decode()
dataDir = os.path.join(
Utilities.getConfigDir(), "browser", "subscriptions")
if not os.path.exists(dataDir):
os.makedirs(dataDir)
fileName = os.path.join(
dataDir, "adblock_subscription_{0}".format(sha1))
return fileName
def __loadRules(self):
"""
Private method to load the rules of the subscription.
"""
fileName = self.rulesFileName()
f = QFile(fileName)
if f.exists():
if not f.open(QIODevice.ReadOnly):
E5MessageBox.warning(
None,
self.tr("Load subscription rules"),
self.tr(
"""Unable to open adblock file '{0}' for reading.""")
.format(fileName))
else:
textStream = QTextStream(f)
header = textStream.readLine(1024)
if not header.startswith("[Adblock"):
E5MessageBox.warning(
None,
self.tr("Load subscription rules"),
self.tr("""AdBlock file '{0}' does not start"""
""" with [Adblock.""")
.format(fileName))
f.close()
f.remove()
self.__lastUpdate = QDateTime()
else:
from .AdBlockRule import AdBlockRule
self.__updatePeriod = 0
self.__remoteModified = QDateTime()
self.__rules = []
self.__rules.append(AdBlockRule(header, self))
while not textStream.atEnd():
line = textStream.readLine()
self.__rules.append(AdBlockRule(line, self))
expires = self.__expiresRe.search(line)
if expires:
period, kind = expires.groups()
if kind:
# hours
self.__updatePeriod = int(period)
else:
# days
self.__updatePeriod = int(period) * 24
remoteModified = self.__remoteModifiedRe.search(line)
if remoteModified:
day, month, year, time, hour, minute = \
remoteModified.groups()
self.__remoteModified.setDate(
QDate(int(year),
self.__monthNameToNumber[month],
int(day))
)
if time:
self.__remoteModified.setTime(
QTime(int(hour), int(minute)))
self.__populateCache()
self.changed.emit()
elif not fileName.endswith("_custom"):
self.__lastUpdate = QDateTime()
self.checkForUpdate()
def checkForUpdate(self):
"""
Public method to check for an update.
"""
if self.__updatePeriod:
updatePeriod = self.__updatePeriod
else:
updatePeriod = Preferences.getHelp("AdBlockUpdatePeriod") * 24
if not self.__lastUpdate.isValid() or \
(self.__remoteModified.isValid() and
self.__remoteModified.addSecs(updatePeriod * 3600) <
QDateTime.currentDateTime()) or \
self.__lastUpdate.addSecs(updatePeriod * 3600) < \
QDateTime.currentDateTime():
self.updateNow()
def updateNow(self):
"""
Public method to update the subscription immediately.
"""
if self.__downloading is not None:
return
if not self.location().isValid():
return
if self.location().scheme() == "file":
self.__lastUpdate = QDateTime.currentDateTime()
self.__loadRules()
return
import Helpviewer.HelpWindow
from Helpviewer.Network.FollowRedirectReply import FollowRedirectReply
self.__downloading = FollowRedirectReply(
self.location(),
Helpviewer.HelpWindow.HelpWindow.networkAccessManager())
self.__downloading.finished.connect(self.__rulesDownloaded)
def __rulesDownloaded(self):
"""
Private slot to deal with the downloaded rules.
"""
reply = self.sender()
response = reply.readAll()
reply.close()
self.__downloading = None
if reply.error() != QNetworkReply.NoError:
if not self.__defaultSubscription:
# don't show error if we try to load the default
E5MessageBox.warning(
None,
self.tr("Downloading subscription rules"),
self.tr(
"""<p>Subscription rules could not be"""
""" downloaded.</p><p>Error: {0}</p>""")
.format(reply.errorString()))
else:
# reset after first download attempt
self.__defaultSubscription = False
return
if response.isEmpty():
E5MessageBox.warning(
None,
self.tr("Downloading subscription rules"),
self.tr("""Got empty subscription rules."""))
return
fileName = self.rulesFileName()
QFile.remove(fileName)
f = QFile(fileName)
if not f.open(QIODevice.ReadWrite):
E5MessageBox.warning(
None,
self.tr("Downloading subscription rules"),
self.tr(
"""Unable to open adblock file '{0}' for writing.""")
.file(fileName))
return
f.write(response)
f.close()
self.__lastUpdate = QDateTime.currentDateTime()
if self.__validateCheckSum(fileName):
self.__loadRules()
else:
QFile.remove(fileName)
self.__downloading = None
def __validateCheckSum(self, fileName):
"""
Private method to check the subscription file's checksum.
@param fileName name of the file containing the subscription (string)
@return flag indicating a valid file (boolean). A file is considered
valid, if the checksum is OK or the file does not contain a
checksum (i.e. cannot be checked).
"""
try:
f = open(fileName, "r", encoding="utf-8")
data = f.read()
f.close()
except (IOError, OSError):
return False
match = re.search(self.__checksumRe, data)
if match:
expectedChecksum = match.group(1)
else:
# consider it as valid
return True
# normalize the data
data = re.sub(r"\r", "", data) # normalize eol
data = re.sub(r"\n+", "\n", data) # remove empty lines
data = re.sub(self.__checksumRe, "", data) # remove checksum line
# calculate checksum
md5 = hashlib.md5()
md5.update(data.encode("utf-8"))
calculatedChecksum = base64.b64encode(md5.digest()).decode()\
.rstrip("=")
if calculatedChecksum == expectedChecksum:
return True
else:
res = E5MessageBox.yesNo(
None,
self.tr("Downloading subscription rules"),
self.tr(
"""<p>AdBlock subscription <b>{0}</b> has a wrong"""
""" checksum.<br/>"""
"""Found: {1}<br/>"""
"""Calculated: {2}<br/>"""
"""Use it anyway?</p>""")
.format(self.__title, expectedChecksum,
calculatedChecksum))
return res
def saveRules(self):
"""
Public method to save the subscription rules.
"""
fileName = self.rulesFileName()
if not fileName:
return
f = QFile(fileName)
if not f.open(QIODevice.ReadWrite | QIODevice.Truncate):
E5MessageBox.warning(
None,
self.tr("Saving subscription rules"),
self.tr(
"""Unable to open adblock file '{0}' for writing.""")
.format(fileName))
return
textStream = QTextStream(f)
if not self.__rules or not self.__rules[0].isHeader():
textStream << "[Adblock Plus 1.1.1]\n"
for rule in self.__rules:
textStream << rule.filter() << "\n"
def match(self, req, urlDomain, urlString):
"""
Public method to check the subscription for a matching rule.
@param req reference to the network request (QNetworkRequest)
@param urlDomain domain of the URL (string)
@param urlString URL (string)
@return reference to the rule object or None (AdBlockRule)
"""
for rule in self.__networkExceptionRules:
if rule.networkMatch(req, urlDomain, urlString):
return None
for rule in self.__networkBlockRules:
if rule.networkMatch(req, urlDomain, urlString):
return rule
return None
def adBlockDisabledForUrl(self, url):
"""
Public method to check, if AdBlock is disabled for the given URL.
@param url URL to check (QUrl)
@return flag indicating disabled state (boolean)
"""
for rule in self.__documentRules:
if rule.urlMatch(url):
return True
return False
def elemHideDisabledForUrl(self, url):
"""
Public method to check, if element hiding is disabled for the given
URL.
@param url URL to check (QUrl)
@return flag indicating disabled state (boolean)
"""
if self.adBlockDisabledForUrl(url):
return True
for rule in self.__elemhideRules:
if rule.urlMatch(url):
return True
return False
def elementHidingRules(self):
"""
Public method to get the element hiding rules.
@return element hiding rules (string)
"""
return self.__elementHidingRules
def elementHidingRulesForDomain(self, domain):
"""
Public method to get the element hiding rules for the given domain.
@param domain domain name (string)
@return element hiding rules (string)
"""
rules = ""
for rule in self.__domainRestrictedCssRules:
if rule.matchDomain(domain):
rules += rule.cssSelector() + ","
return rules
def rule(self, offset):
"""
Public method to get a specific rule.
@param offset offset of the rule (integer)
@return requested rule (AdBlockRule)
"""
if offset >= len(self.__rules):
return None
return self.__rules[offset]
def allRules(self):
"""
Public method to get the list of rules.
@return list of rules (list of AdBlockRule)
"""
return self.__rules[:]
def addRule(self, rule):
"""
Public method to add a rule.
@param rule reference to the rule to add (AdBlockRule)
@return offset of the rule (integer)
"""
self.__rules.append(rule)
self.__populateCache()
self.rulesChanged.emit()
return len(self.__rules) - 1
def removeRule(self, offset):
"""
Public method to remove a rule given the offset.
@param offset offset of the rule to remove (integer)
"""
if offset < 0 or offset > len(self.__rules):
return
del self.__rules[offset]
self.__populateCache()
self.rulesChanged.emit()
def replaceRule(self, rule, offset):
"""
Public method to replace a rule given the offset.
@param rule reference to the rule to set (AdBlockRule)
@param offset offset of the rule to remove (integer)
@return requested rule (AdBlockRule)
"""
if offset >= len(self.__rules):
return None
self.__rules[offset] = rule
self.__populateCache()
self.rulesChanged.emit()
return self.__rules[offset]
def __populateCache(self):
"""
Private method to populate the various rule caches.
"""
self.__networkExceptionRules = []
self.__networkBlockRules = []
self.__domainRestrictedCssRules = []
self.__elementHidingRules = ""
self.__documentRules = []
self.__elemhideRules = []
for rule in self.__rules:
if not rule.isEnabled():
continue
if rule.isCSSRule():
if rule.isDomainRestricted():
self.__domainRestrictedCssRules.append(rule)
else:
self.__elementHidingRules += rule.cssSelector() + ","
elif rule.isDocument():
self.__documentRules.append(rule)
elif rule.isElementHiding():
self.__elemhideRules.append(rule)
elif rule.isException():
self.__networkExceptionRules.append(rule)
else:
self.__networkBlockRules.append(rule)
def canEditRules(self):
"""
Public method to check, if rules can be edited.
@return flag indicating rules may be edited (boolean)
"""
return self.__custom
def canBeRemoved(self):
"""
Public method to check, if the subscription can be removed.
@return flag indicating removal is allowed (boolean)
"""
return not self.__custom and not self.__defaultSubscription
def setRuleEnabled(self, offset, enabled):
"""
Public method to enable a specific rule.
@param offset offset of the rule (integer)
@param enabled new enabled state (boolean)
@return reference to the changed rule (AdBlockRule)
"""
if offset >= len(self.__rules):
return None
rule = self.__rules[offset]
rule.setEnabled(enabled)
if rule.isCSSRule():
import Helpviewer.HelpWindow
self.__populateCache()
Helpviewer.HelpWindow.HelpWindow.mainWindow()\
.reloadUserStyleSheet()
return rule
| gpl-3.0 | -8,359,129,584,118,204,000 | 33.493036 | 78 | 0.519503 | false |
ariegg/webiopi-drivers | chips/sensor/ina219/ina219.py | 1 | 22877 | # Copyright 2017 Andreas Riegg - t-h-i-n-x.net
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Changelog
#
# 1.0 2017/01/03 Initial release
#
# Config parameters
#
# - slave 8 bit Value of the I2C slave address for the chip.
# Defaults to 0x40. Possible values are from 0x40 to 0x4F.
# - shunt Float Value of the shunt resistor in Ohms. Default is 0.1.
# - vrange Integer Vrange value of the chip. Valid values are 16 or 32.
# Default is 32.
# - gaindiv Integer Gain divider (PGA) value of the chip. Valid values
# are from (1, 2, 4 , 8). Default is 8.
# - mode Integer Value of the chip mode. Possible values are from
# 0x0 to 0x7. Default is 0x7.
# - badc Integer Value of the voltage bus ADC settings. Possible
# values are from 0x0 to 0xF. Default is 0x3.
# - sadc Integer Value of the shunt voltage ADC settings. Possible
# values are from 0x0 to 0xF. Default is 0x3.
# - vmax Float Value of the desired vmax value for automatic
# calibration. Default is None. This parameter will
# only be used of imax is also not None.
# - imax Float Value of the desired imax value for automatic
# calibration. Default is None. If imax is given,
# the values for vrange, gaindiv and currentLSB will be
# ignored and calculated instead. If imax is higher than
# possible, then the highest possible value will be
# used instead and overflow may occur.
# - currentLSB Float Value of the current LSB to use. Default is None.
# If you mistrust the automatic calibration you can
# set the current LSB manual with this parameter. If
# used, make sure to manual set the desired gaindiv also.
# - bus String Name of the I2C bus
#
# Usage remarks
#
# - The default values of this driver are valid for a 32 V Bus range, a maximum
# possible current of 3.2 A and a current resolution of around 98 microAmperes/Bit.
# If you are fine with this you can just use those defaults.
# - If you want to have some more configuration while keeping it still simple you
# can provide parameters for vmax and imax and the driver will do its best to
# automatically calculate vrange, gaindiv and calibration with a very good resolution.
# - If you prefer complete manual setup you should set vrange, gaindiv, currentLSB and
# optional fine-tuned calibration (in this order).
# - Setting the calibration register via setCalibration() is to be used for the final
# calibration as explained in the chip spec for the final fine tuning. It must not
# be used for the currentLSB setting as this is calculated automatically by this
# driver based on the values of shunt and gaindiv.
# - This driver implements an automatical calibration feature calibrate(vmax, imax)
# that can be used during device creation and also at runtime. The value for vmax
# is used to set vrange within the allowed limits. The value for imax is used to
# set gaindiv so that the maximal desired current can be measured at the highest
# possible resolution for current LSB. If the desired imax is higher than the
# possible imax based on the value of shunt, then the maximum possible imax will
# be used. You get the choosen values via the response of the calibrate(...) call.
# In this case, sending a higher current through the shunt will result in overflow
# which will generate a debugging message (only when reading the bus voltage).
# - If values for vmax and imax are given at device creation they will override the
# init values for vrange and gaindiv as those will be ignored then and calculated via
# the automatic calibration feature instead.
# - All chip parameters with the exception of shunt can be changed at runtime. If
# an updated parameter has an influence on the currentLSB and/or calibration value,
# then this/these will be re-calculated automatically and the calibration register
# will be set also. If you use setCalibration() for final fine-tuning you have to
# repeat that step again if automatic calibration has taken place.
# - Updating of the mode value at runtime allows triggered conversions and power-down
# of the chip.
# - If you are unsure about the calculated values set debugging to "True" and look at
# the debugging messages as they will notify you about all resulting values. Or
# call getConfiguration() to see all values.
# - If you encounter overflow (getting the overflow error) try to increase the
# gaindiv value or reduce the shunt value (please as real hardware change).
#
# Implementation remarks
#
# - This driver is implemented based on the specs from Intel.
# - The default value for the shunt resistor of 0.1 Ohms is appropriate for the
# breakout board from Adafruit for this chip (Adafruit PRODUCT ID: 904).
# - The parameter value for shunt can't be changed at runtime after device
# creation because it is very unlikely to modify the shunt resistor during operation
# of the chip. Please provide the correct value via the config options or at
# device creation if the default value does not suit your hardware setup.
# - This driver uses floating point calculation and takes no care about integer
# only arithmetics. For that reason, the mathematical lowest possible LSB value is
# calculated automatically and used for best resolution with the exception when you
# manual set your own current LSB value.
# - If you want to override/select the current LSB value manual you can do that
# via config parameter or at runtime. In this case make sure to use the correct
# corresponding gaindiv value otherwise the value readings will be wrong.
# - If for some reason (e.g. an impropriate setting of the currentLSB) the value
# of the calibration register would be out of its allowed bounds it will be set
# to zero so that all current and power readings will also be zero to avoid wrong
# measurements until the calibration register is set again to an allowed range.
# - This driver does not use the shunt adc register as this value is not needed
# for operation if the calibration register is used.
#
from webiopi.utils.logger import debug
from webiopi.decorators.rest import request, response, api
from webiopi.utils.types import toint, signInteger, M_JSON
from webiopi.devices.i2c import I2C
from webiopi.devices.sensor import Current, Voltage, Power
#---------- Class definition ----------
class INA219(I2C, Current, Voltage, Power):
CONFIGURATION_ADDRESS = 0x00
#SHUNTADC_ADDRESS = 0x01
BUSADC_ADDRESS = 0x02
POWER_ADDRESS = 0x03
CURRENT_ADDRESS = 0x04
CALIBRATION_ADDRESS = 0x05
RESET_FLAG = 0b1 << 15
BRNG_16_VALUE = 0b0 << 13
BRNG_32_VALUE = 0b1 << 13
BRNG_MASK = 0b0010000000000000
GAINDIV_1_VALUE = 0b00 << 11
GAINDIV_2_VALUE = 0b01 << 11
GAINDIV_4_VALUE = 0b10 << 11
GAINDIV_8_VALUE = 0b11 << 11
GAINDIV_MASK = 0b0001100000000000
BADC_MASK = 0b0000011110000000
SADC_MASK = 0b0000000001111000
MODE_MASK = 0b0000000000000111
OVERFLOW_MASK = 0b0000000000000001
CALIBRATION_MASK = 0b1111111111111110
VSHUNT_FULL_SCALE_BASE_VALUE = 0.04 # always fixed to 40mV
CALIBRATION_CONSTANT_VALUE = 0.04096 # fixed value from data sheet
BUS_VOLTAGE_LSB_VALUE = 0.004 # always fixed to 4mV
CURRENT_LSB_TO_POWER_LSB_VALUE = 20 # always 20 times the currentLSB value
#---------- Class initialisation ----------
def __init__(self, slave=0x40, shunt=0.1, vrange=32, gaindiv=8, mode=0x7, badc=0x3, sadc=0x3, vmax=None, imax=None, currentLSB=None, bus=None):
I2C.__init__(self, toint(slave), bus)
self.__setShunt__(float(shunt))
self.__reset__()
if imax != None:
if vmax == None:
vmax = toint(vrange)
else:
vmax = float(vmax)
imax = float(imax)
self.__calibrate__(vmax, imax)
else:
self.__setVrange__(toint(vrange))
self.__setGaindiv__(toint(gaindiv))
if currentLSB != None:
self.__setCurrentLSB__(float(currentLSB))
self.__setMode__(toint(mode))
self.__setBadc__(toint(badc))
self.__setSadc__(toint(sadc))
#---------- Abstraction framework contracts ----------
def __str__(self):
return "INA219(slave=0x%02X, dev=%s, shunt=%f Ohm)" % (self.slave, self.device(), self._shunt)
def __family__(self):
return [Current.__family__(self), Voltage.__family__(self), Power.__family__(self)]
#---------- Current abstraction related methods ----------
def __getMilliampere__(self):
rawCurrent = self.__read16BitRegister__(self.CURRENT_ADDRESS)
debug("%s: raw current=%s" % (self.__str__(), bin(rawCurrent)))
return signInteger(rawCurrent, 16) * self._currentLSB * 1000 # scale from Amperes to milliAmperes
#---------- Voltage abstraction related methods ----------
def __getVolt__(self):
rawVoltage = self.__read16BitRegister__(self.BUSADC_ADDRESS)
debug("%s: raw voltage=%s" % (self.__str__(), bin(rawVoltage)))
overflow = rawVoltage & self.OVERFLOW_MASK
if overflow:
debug("%s: overflow condition" % self.__str__())
return (rawVoltage >> 3) * self.BUS_VOLTAGE_LSB_VALUE
#---------- Power abstraction related methods ----------
def __getWatt__(self):
rawWatt = self.__read16BitRegister__(self.POWER_ADDRESS)
debug("%s: raw watt=%s" % (self.__str__(), bin(rawWatt)))
return rawWatt * self.CURRENT_LSB_TO_POWER_LSB_VALUE * self._currentLSB
#---------- Device methods that implement features including additional REST mappings ----------
@api("Device", 3, "feature", "driver")
@request("POST", "run/calibrate/%(pars)s")
@response(contentType=M_JSON)
def calibrate(self, pars):
(vmax, imax) = pars.split(",")
vmax = float(vmax)
if vmax <= 0 or vmax > 32:
raise ValueError("Calibration parameter error, vmax:%f out of allowed range [0 < vmax <= 32]" % vmax)
imax = float(imax)
self.__calibrate__(vmax, imax)
values = self.getConfiguration()
values["vmax required"] = "%f" % vmax
values["imax required"] = "%f" % imax
return values
def __calibrate__(self, vmax, imax):
if vmax > 16:
self.setVrange(32)
else:
self.setVrange(16)
gaindiv = 1
shuntdiv = 1 / self._shunt
while True:
imaxpossible = self.__calculateImaxpossible__(gaindiv, shuntdiv)
if gaindiv == 8:
break
if imax > imaxpossible:
gaindiv *= 2
else:
break
self.setGaindiv(gaindiv)
debug("%s: auto-calibrated, max possible current=%f A" % (self.__str__(), imaxpossible))
@api("Device", 3, "feature", "driver")
@request("POST", "run/reset")
@response("%s")
def reset(self):
self.__reset__()
return "Chip is reset."
def __reset__(self):
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, self.RESET_FLAG)
debug("%s: chip reset" % self.__str__())
@api("Device", 3, "feature", "driver")
@request("POST", "run/recalibrate")
@response("%d")
def reCalibrate(self):
self.__reCalibrate__()
return self.__getCalibration__()
#---------- Device methods that implement chip configuration settings including additional REST mappings ----------
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/*")
@response(contentType=M_JSON)
def getConfiguration(self):
values = {}
values["vmax possible"] = "%d" % self._vrange
values["imax possible"] = "%f" % self.__calculateImaxpossible__(self._gaindiv, 1 / self._shunt)
values["current LSB"] = "%f" % self._currentLSB
values["calibration"] = "%d" % self._cal
values["gaindiv"] = "%d" % self._gaindiv
values["shunt"] = "%f" % self._shunt
return values
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/calibration")
@response("%d")
def getCalibration(self):
return self.__getCalibration__()
def __getCalibration__(self):
return self.__read16BitRegister__(self.CALIBRATION_ADDRESS)
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/calibration/%(calibration)d")
@response("%d")
def setCalibration(self, calibration):
self.__setCalibration__(calibration)
return self.__getCalibration__()
def __setCalibration__(self, calibration):
if calibration not in range(0, 65535):
self.__write16BitRegister__(self.CALIBRATION_ADDRESS, 0) # zero out calibration register to avoid wrong measurements
self._cal = 0
debug("%s: set calibration=0" % self.__str__())
raise ValueError("Parameter calibration:%d not in the allowed range [0 .. 65534]" % calibration)
calibration = calibration & self.CALIBRATION_MASK
self.__write16BitRegister__(self.CALIBRATION_ADDRESS, calibration)
self._cal = calibration
debug("%s: set calibration=%d" % (self.__str__(), self._cal))
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/vrange/%(vrange)d")
@response("%d")
def setVrange(self, vrange):
self.__setVrange__(vrange)
return self.__getVrange__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/vrange")
@response("%d")
def getVrange(self):
return self.__getVrange__()
def __setVrange__(self, vrange):
if vrange not in (16, 32):
raise ValueError("Parameter vrange:%d not one of the allowed values (16, 32)" % vrange)
if vrange == 16:
bitsVrange = self.BRNG_16_VALUE
elif vrange == 32:
bitsVrange = self.BRNG_32_VALUE
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.BRNG_MASK) | bitsVrange
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
self._vrange = vrange
debug("%s: set vrange=%d V" % (self.__str__(), vrange))
def __getVrange__(self):
bitsVrange = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.BRNG_MASK) >> 13
if bitsVrange == self.BRNG_16_VALUE:
self._vrange = 16
elif bitsVrange == self.BRNG_32_VALUE:
self._vrange = 32
return self._vrange
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/gaindiv/%(gaindiv)d")
@response("%d")
def setGaindiv(self, gaindiv):
self.__setGaindiv__(gaindiv)
return self.__getGaindiv__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/gaindiv")
@response("%d")
def getGaindiv(self):
return self.__getGaindiv__()
def __setGaindiv__(self, gaindiv):
if gaindiv not in (1, 2, 4, 8):
raise ValueError("Parameter gaindiv:%d not one of the allowed values (1, 2, 4, 8)" % gaindiv)
if gaindiv == 1:
bitsGaindiv = self.GAINDIV_1_VALUE
elif gaindiv == 2:
bitsGaindiv = self.GAINDIV_2_VALUE
elif gaindiv == 4:
bitsGaindiv = self.GAINDIV_4_VALUE
elif gaindiv == 8:
bitsGaindiv = self.GAINDIV_8_VALUE
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.GAINDIV_MASK) | bitsGaindiv
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
self._gaindiv = gaindiv
debug("%s: set gaindiv=%d" % (self.__str__(), gaindiv))
self.__reCalculate__()
def __getGaindiv__(self):
bitsGaindiv = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.GAINDIV_MASK) >> 11
if bitsGaindiv == self.GAINDIV_1_VALUE:
self._gaindiv = 1
elif bitsGaindiv == self.GAINDIV_2_VALUE:
self._gaindiv = 2
elif bitsGaindiv == self.GAINDIV_4_VALUE:
self._gaindiv = 4
elif bitsGaindiv == self.GAINDIV_8_VALUE:
self._gaindiv = 8
return self._gaindiv
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/mode/%(mode)d")
@response("%d")
def setMode(self, mode):
self.__setMode__(mode)
return self.__getMode__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/mode")
@response("%d")
def getMode(self):
return self.__getMode__()
def __setMode__(self, mode):
if mode not in range(0, 0x8):
raise ValueError("Parameter mode:0x%1X not in the allowed range [0x0 .. 0x7]" % mode)
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.MODE_MASK) | mode
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
debug("%s: set mode=0x%1X" % (self.__str__(), mode))
def __getMode__(self):
bitsMode = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.MODE_MASK)
return bitsMode
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/badc/%(badc)d")
@response("%d")
def setBadc(self, badc):
self.__setBadc__(badc)
return self.__getBadc__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/badc")
@response("%d")
def getBadc(self):
return self.__getBadc__()
def __setBadc__(self, badc):
if badc not in range(0, 0x10):
raise ValueError("Parameter badc:0x%1X not in the allowed range [0x0 .. 0xF]" % badc)
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.BADC_MASK) | badc << 7
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
debug("%s: set badc=0x%1X" % (self.__str__(), badc))
def __getBadc__(self):
bitsBadc = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.BADC_MASK) >> 7
return bitsBadc
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/sadc/%(sadc)d")
@response("%d")
def setSadc(self, sadc):
self.__setSadc__(sadc)
return self.__getSadc__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/sadc")
@response("%d")
def getSadc(self):
return self.__getSadc__()
def __setSadc__(self, sadc):
if sadc not in range(0, 0x10):
raise ValueError("Parameter sadc:0x%1X not in the allowed range [0x0 .. 0xF]" % sadc)
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.SADC_MASK) | sadc << 3
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
debug("%s: set sadc=0x%1X" % (self.__str__(), sadc))
def __getSadc__(self):
bitsSadc = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.SADC_MASK) >> 3
return bitsSadc
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/currentlsb/%(currentLSB)f")
@response("%f")
def setCurrentLSB(self, currentLSB):
self.__setCurrentLSB__(currentLSB)
return self._currentLSB
#---------- Device methods that implement chip configuration settings ----------
def __setShunt__(self, shunt):
self._shunt = shunt
def __setCurrentLSB__(self, currentLSB):
self._currentLSB = currentLSB
debug("%s: set current LSB=%f mA" % (self.__str__(), self._currentLSB * 1000))
self.__setCalibration__(self.__calculateCalibration__())
#---------- Calibration helper methods ----------
def __reCalculate__(self):
self.__setCurrentLSB__(self.__calculateCurrentLSB__())
def __reCalibrate__(self):
self.__setCalibration__(self._cal)
def __calculateCurrentLSB__(self):
calCurrentLSB = self.VSHUNT_FULL_SCALE_BASE_VALUE * self._gaindiv / self._shunt / 2**15 # in Amperes
debug("%s: calculated current LSB=%f mA" % (self.__str__(), calCurrentLSB * 1000))
return calCurrentLSB
def __calculateCalibration__(self):
calCal = int(self.CALIBRATION_CONSTANT_VALUE / self._currentLSB / self._shunt) # this does trunc
debug("%s: calculated calibration=%d" % (self.__str__(), calCal))
return calCal
def __calculateImaxpossible__(self, gaindiv, shuntdiv):
return self.VSHUNT_FULL_SCALE_BASE_VALUE * gaindiv * shuntdiv
#---------- Register helper methods ----------
def __read16BitRegister__(self, addr):
regBytes = self.readRegisters(addr, 2)
return regBytes[0] << 8 | regBytes[1]
def __write16BitRegister__(self, addr, word):
data = bytearray(2)
data[0] = (word >> 8) & 0xFF
data[1] = word & 0xFF
self.writeRegisters(addr , data)
| apache-2.0 | -8,523,589,605,352,649,000 | 42.856863 | 147 | 0.604362 | false |
mhorn71/StarbaseMini | instument_builder/builder.py | 1 | 1671 | __author__ = 'mark'
# StarbaseMini Staribus/Starinet Client for the British Astronomical Association Staribus Protocol
# Copyright (C) 2015 Mark Horn
#
# This file is part of StarbaseMini.
#
# StarbaseMini is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# StarbaseMini is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with StarbaseMini. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
from PyQt5 import QtWidgets
from ui import Ui_InstrumentBuilderDialog
logger = logging.getLogger('instrument.builder')
class InstrumentBuilder(QtWidgets.QDialog, Ui_InstrumentBuilderDialog):
def __init__(self):
QtWidgets.QDialog.__init__(self)
self.setupUi(self)
# Style sheets
stylebool = False
if sys.platform.startswith('darwin'):
stylesheet = 'css/macStyle.css'
stylebool = True
elif sys.platform.startswith('win32'):
stylesheet = 'css/winStyle.css'
stylebool = True
elif sys.platform.startswith('linux'):
stylesheet = 'css/nixStyle.css'
stylebool = True
if stylebool:
with open(stylesheet, 'r') as style:
self.setStyleSheet(style.read()) | gpl-2.0 | -8,834,807,194,172,752,000 | 32.44 | 98 | 0.692998 | false |
mganeva/mantid | Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/SANSILLReductionTest.py | 1 | 6050 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.api import MatrixWorkspace
from mantid.simpleapi import SANSILLReduction, config, mtd
class SANSILLReductionTest(unittest.TestCase):
_facility = None
def setUp(self):
self._facility = config['default.facility']
config.appendDataSearchSubDir('ILL/D11/')
config.appendDataSearchSubDir('ILL/D33/')
config['default.facility'] = 'ILL'
def tearDown(self):
config['default.facility'] = self._facility
mtd.clear()
def test_absorber(self):
SANSILLReduction(Run='010462', ProcessAs='Absorber', OutputWorkspace='Cd')
self._check_output(mtd['Cd'], True, 1, 128*128)
self._check_process_flag(mtd['Cd'], 'Absorber')
def test_beam(self):
SANSILLReduction(Run='010414', ProcessAs='Beam', OutputWorkspace='Db', FluxOutputWorkspace='Fl')
self._check_output(mtd['Db'], True, 1, 128*128)
self._check_process_flag(mtd['Db'], 'Beam')
run = mtd['Db'].getRun()
self.assertAlmostEqual(run.getLogData('BeamCenterX').value, -0.0048, delta=1e-4)
self.assertAlmostEqual(run.getLogData('BeamCenterY').value, -0.0027, delta=1e-4)
self._check_output(mtd['Fl'], False, 1, 128*128)
self._check_process_flag(mtd['Fl'], 'Beam')
self.assertAlmostEqual(mtd['Fl'].readY(0)[0], 6628249, delta=1)
self.assertAlmostEqual(mtd['Fl'].readE(0)[0], 8566, delta=1)
def test_transmission(self):
SANSILLReduction(Run='010414', ProcessAs='Beam', OutputWorkspace='Db')
SANSILLReduction(Run='010585', ProcessAs='Transmission', BeamInputWorkspace='Db', OutputWorkspace='Tr')
self.assertAlmostEqual(mtd['Tr'].readY(0)[0], 0.640, delta=1e-3)
self.assertAlmostEqual(mtd['Tr'].readE(0)[0], 0.0019, delta=1e-4)
self._check_process_flag(mtd['Tr'], 'Transmission')
def test_container(self):
SANSILLReduction(Run='010460', ProcessAs='Container', OutputWorkspace='can')
self._check_output(mtd['can'], True, 1, 128*128)
self._check_process_flag(mtd['can'], 'Container')
def test_reference(self):
SANSILLReduction(Run='010453', ProcessAs='Reference', SensitivityOutputWorkspace='sens', OutputWorkspace='water')
self._check_output(mtd['water'], True, 1, 128*128)
self._check_output(mtd['sens'], False, 1, 128*128)
self._check_process_flag(mtd['water'], 'Reference')
self._check_process_flag(mtd['sens'], 'Sensitivity')
def test_sample(self):
SANSILLReduction(Run='010569', ProcessAs='Sample', OutputWorkspace='sample')
self._check_output(mtd['sample'], True, 1, 128*128)
self._check_process_flag(mtd['sample'], 'Sample')
def test_absorber_tof(self):
# D33 VTOF
# actually this is a container run, not an absorber, but is fine for this test
SANSILLReduction(Run='093409', ProcessAs='Absorber', OutputWorkspace='absorber')
self._check_output(mtd['absorber'], True, 30, 256*256)
self._check_process_flag(mtd['absorber'], 'Absorber')
def test_beam_tof(self):
# D33 VTOF
SANSILLReduction(Run='093406', ProcessAs='Beam', OutputWorkspace='beam', FluxOutputWorkspace='flux')
self._check_output(mtd['beam'], True, 30, 256*256)
self._check_process_flag(mtd['beam'], 'Beam')
run = mtd['beam'].getRun()
self.assertAlmostEqual(run.getLogData('BeamCenterX').value, -0.0025, delta=1e-4)
self.assertAlmostEqual(run.getLogData('BeamCenterY').value, 0.0009, delta=1e-4)
self._check_output(mtd['flux'], False, 30, 256*256)
self._check_process_flag(mtd['flux'], 'Beam')
def test_transmission_tof(self):
# D33 VTOF
SANSILLReduction(Run='093406', ProcessAs='Beam', OutputWorkspace='beam')
SANSILLReduction(Run='093407', ProcessAs='Transmission', BeamInputWorkspace='beam', OutputWorkspace='ctr')
self._check_output(mtd['ctr'], False, 75, 1)
def test_container_tof(self):
# D33 VTOF
# this is actually a sample run, not water, but is fine for this test
SANSILLReduction(Run='093410', ProcessAs='Reference', OutputWorkspace='ref')
self._check_output(mtd['ref'], True, 30, 256*256)
self._check_process_flag(mtd['ref'], 'Reference')
def test_sample_tof(self):
# D33 VTOF, Pluronic F127
SANSILLReduction(Run='093410', ProcessAs='Sample', OutputWorkspace='sample')
self._check_output(mtd['sample'], True, 30, 256*256)
self._check_process_flag(mtd['sample'], 'Sample')
def _check_process_flag(self, ws, value):
self.assertTrue(ws.getRun().getLogData('ProcessedAs').value, value)
def _check_output(self, ws, logs, blocksize, spectra):
self.assertTrue(ws)
self.assertTrue(isinstance(ws, MatrixWorkspace))
self.assertTrue(ws.isHistogramData())
self.assertTrue(not ws.isDistribution())
self.assertEqual(ws.getAxis(0).getUnit().unitID(), "Wavelength")
self.assertEqual(ws.blocksize(), blocksize)
self.assertEqual(ws.getNumberHistograms(), spectra)
self.assertTrue(ws.getInstrument())
self.assertTrue(ws.getRun())
self.assertTrue(ws.getHistory())
if logs:
self.assertTrue(ws.getRun().hasProperty('qmin'))
self.assertTrue(ws.getRun().hasProperty('qmax'))
self.assertTrue(ws.getRun().hasProperty('l2'))
self.assertTrue(ws.getRun().hasProperty('pixel_height'))
self.assertTrue(ws.getRun().hasProperty('pixel_width'))
self.assertTrue(ws.getRun().hasProperty('collimation.actual_position'))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 7,514,569,693,448,580,000 | 45.899225 | 121 | 0.652231 | false |
openstack/tempest | tempest/cmd/cleanup_service.py | 1 | 39207 | # Copyright 2015 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from urllib import parse as urllib
from oslo_log import log as logging
from tempest import clients
from tempest.common import credentials_factory as credentials
from tempest.common import identity
from tempest.common import utils
from tempest.common.utils import net_info
from tempest import config
from tempest.lib import exceptions
LOG = logging.getLogger('tempest.cmd.cleanup')
CONF = config.CONF
CONF_FLAVORS = None
CONF_IMAGES = None
CONF_NETWORKS = []
CONF_PRIV_NETWORK_NAME = None
CONF_PUB_NETWORK = None
CONF_PUB_ROUTER = None
CONF_PROJECTS = None
CONF_USERS = None
IS_CINDER = None
IS_GLANCE = None
IS_NEUTRON = None
IS_NOVA = None
def init_conf():
global CONF_FLAVORS
global CONF_IMAGES
global CONF_NETWORKS
global CONF_PRIV_NETWORK
global CONF_PRIV_NETWORK_NAME
global CONF_PUB_NETWORK
global CONF_PUB_ROUTER
global CONF_PROJECTS
global CONF_USERS
global IS_CINDER
global IS_GLANCE
global IS_HEAT
global IS_NEUTRON
global IS_NOVA
IS_CINDER = CONF.service_available.cinder
IS_GLANCE = CONF.service_available.glance
IS_NEUTRON = CONF.service_available.neutron
IS_NOVA = CONF.service_available.nova
CONF_FLAVORS = [CONF.compute.flavor_ref, CONF.compute.flavor_ref_alt]
CONF_IMAGES = [CONF.compute.image_ref, CONF.compute.image_ref_alt]
CONF_PRIV_NETWORK_NAME = CONF.compute.fixed_network_name
CONF_PUB_NETWORK = CONF.network.public_network_id
CONF_PUB_ROUTER = CONF.network.public_router_id
CONF_PROJECTS = [CONF.auth.admin_project_name]
CONF_USERS = [CONF.auth.admin_username]
if IS_NEUTRON:
CONF_PRIV_NETWORK = _get_network_id(CONF.compute.fixed_network_name,
CONF.auth.admin_project_name)
CONF_NETWORKS = [CONF_PUB_NETWORK, CONF_PRIV_NETWORK]
def _get_network_id(net_name, project_name):
am = clients.Manager(
credentials.get_configured_admin_credentials())
net_cl = am.networks_client
pr_cl = am.projects_client
networks = net_cl.list_networks()
project = identity.get_project_by_name(pr_cl, project_name)
p_id = project['id']
n_id = None
for net in networks['networks']:
if (net['project_id'] == p_id and net['name'] == net_name):
n_id = net['id']
break
return n_id
class BaseService(object):
def __init__(self, kwargs):
self.client = None
for key, value in kwargs.items():
setattr(self, key, value)
self.tenant_filter = {}
if hasattr(self, 'tenant_id'):
self.tenant_filter['project_id'] = self.tenant_id
def _filter_by_tenant_id(self, item_list):
if (item_list is None or
not item_list or
not hasattr(self, 'tenant_id') or
self.tenant_id is None or
'tenant_id' not in item_list[0]):
return item_list
return [item for item in item_list
if item['tenant_id'] == self.tenant_id]
def list(self):
pass
def delete(self):
pass
def dry_run(self):
pass
def save_state(self):
pass
def run(self):
try:
if self.is_dry_run:
self.dry_run()
elif self.is_save_state:
self.save_state()
else:
self.delete()
except exceptions.NotImplemented as exc:
# Many OpenStack services use extensions logic to implement the
# features or resources. Tempest cleanup tries to clean up the test
# resources without having much logic of extensions checks etc.
# If any of the extension is missing then, service will return
# NotImplemented error.
msg = ("Got NotImplemented error in %s, full exception: %s" %
(str(self.__class__), str(exc)))
LOG.exception(msg)
self.got_exceptions.append(exc)
class SnapshotService(BaseService):
def __init__(self, manager, **kwargs):
super(SnapshotService, self).__init__(kwargs)
self.client = manager.snapshots_client_latest
def list(self):
client = self.client
snaps = client.list_snapshots()['snapshots']
if not self.is_save_state:
# recreate list removing saved snapshots
snaps = [snap for snap in snaps if snap['id']
not in self.saved_state_json['snapshots'].keys()]
LOG.debug("List count, %s Snapshots", len(snaps))
return snaps
def delete(self):
snaps = self.list()
client = self.client
for snap in snaps:
try:
LOG.debug("Deleting Snapshot with id %s", snap['id'])
client.delete_snapshot(snap['id'])
except Exception:
LOG.exception("Delete Snapshot %s exception.", snap['id'])
def dry_run(self):
snaps = self.list()
self.data['snapshots'] = snaps
def save_state(self):
snaps = self.list()
self.data['snapshots'] = {}
for snap in snaps:
self.data['snapshots'][snap['id']] = snap['name']
class ServerService(BaseService):
def __init__(self, manager, **kwargs):
super(ServerService, self).__init__(kwargs)
self.client = manager.servers_client
self.server_groups_client = manager.server_groups_client
def list(self):
client = self.client
servers_body = client.list_servers()
servers = servers_body['servers']
if not self.is_save_state:
# recreate list removing saved servers
servers = [server for server in servers if server['id']
not in self.saved_state_json['servers'].keys()]
LOG.debug("List count, %s Servers", len(servers))
return servers
def delete(self):
client = self.client
servers = self.list()
for server in servers:
try:
LOG.debug("Deleting Server with id %s", server['id'])
client.delete_server(server['id'])
except Exception:
LOG.exception("Delete Server %s exception.", server['id'])
def dry_run(self):
servers = self.list()
self.data['servers'] = servers
def save_state(self):
servers = self.list()
self.data['servers'] = {}
for server in servers:
self.data['servers'][server['id']] = server['name']
class ServerGroupService(ServerService):
def list(self):
client = self.server_groups_client
sgs = client.list_server_groups()['server_groups']
if not self.is_save_state:
# recreate list removing saved server_groups
sgs = [sg for sg in sgs if sg['id']
not in self.saved_state_json['server_groups'].keys()]
LOG.debug("List count, %s Server Groups", len(sgs))
return sgs
def delete(self):
client = self.server_groups_client
sgs = self.list()
for sg in sgs:
try:
LOG.debug("Deleting Server Group with id %s", sg['id'])
client.delete_server_group(sg['id'])
except Exception:
LOG.exception("Delete Server Group %s exception.", sg['id'])
def dry_run(self):
sgs = self.list()
self.data['server_groups'] = sgs
def save_state(self):
sgs = self.list()
self.data['server_groups'] = {}
for sg in sgs:
self.data['server_groups'][sg['id']] = sg['name']
class KeyPairService(BaseService):
def __init__(self, manager, **kwargs):
super(KeyPairService, self).__init__(kwargs)
self.client = manager.keypairs_client
def list(self):
client = self.client
keypairs = client.list_keypairs()['keypairs']
if not self.is_save_state:
# recreate list removing saved keypairs
keypairs = [keypair for keypair in keypairs
if keypair['keypair']['name']
not in self.saved_state_json['keypairs'].keys()]
LOG.debug("List count, %s Keypairs", len(keypairs))
return keypairs
def delete(self):
client = self.client
keypairs = self.list()
for k in keypairs:
name = k['keypair']['name']
try:
LOG.debug("Deleting keypair %s", name)
client.delete_keypair(name)
except Exception:
LOG.exception("Delete Keypair %s exception.", name)
def dry_run(self):
keypairs = self.list()
self.data['keypairs'] = keypairs
def save_state(self):
keypairs = self.list()
self.data['keypairs'] = {}
for keypair in keypairs:
keypair = keypair['keypair']
self.data['keypairs'][keypair['name']] = keypair
class VolumeService(BaseService):
def __init__(self, manager, **kwargs):
super(VolumeService, self).__init__(kwargs)
self.client = manager.volumes_client_latest
def list(self):
client = self.client
vols = client.list_volumes()['volumes']
if not self.is_save_state:
# recreate list removing saved volumes
vols = [vol for vol in vols if vol['id']
not in self.saved_state_json['volumes'].keys()]
LOG.debug("List count, %s Volumes", len(vols))
return vols
def delete(self):
client = self.client
vols = self.list()
for v in vols:
try:
LOG.debug("Deleting volume with id %s", v['id'])
client.delete_volume(v['id'])
except Exception:
LOG.exception("Delete Volume %s exception.", v['id'])
def dry_run(self):
vols = self.list()
self.data['volumes'] = vols
def save_state(self):
vols = self.list()
self.data['volumes'] = {}
for vol in vols:
self.data['volumes'][vol['id']] = vol['name']
class VolumeQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(VolumeQuotaService, self).__init__(kwargs)
self.client = manager.volume_quotas_client_latest
def delete(self):
client = self.client
try:
LOG.debug("Deleting Volume Quotas for project with id %s",
self.project_id)
client.delete_quota_set(self.project_id)
except Exception:
LOG.exception("Delete Volume Quotas exception for 'project %s'.",
self.project_id)
def dry_run(self):
quotas = self.client.show_quota_set(
self.project_id, params={'usage': True})['quota_set']
self.data['volume_quotas'] = quotas
class NovaQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(NovaQuotaService, self).__init__(kwargs)
self.client = manager.quotas_client
self.limits_client = manager.limits_client
def delete(self):
client = self.client
try:
LOG.debug("Deleting Nova Quotas for project with id %s",
self.project_id)
client.delete_quota_set(self.project_id)
except Exception:
LOG.exception("Delete Nova Quotas exception for 'project %s'.",
self.project_id)
def dry_run(self):
client = self.limits_client
quotas = client.show_limits()['limits']
self.data['compute_quotas'] = quotas['absolute']
class NetworkQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(NetworkQuotaService, self).__init__(kwargs)
self.client = manager.network_quotas_client
def delete(self):
client = self.client
try:
LOG.debug("Deleting Network Quotas for project with id %s",
self.project_id)
client.reset_quotas(self.project_id)
except Exception:
LOG.exception("Delete Network Quotas exception for 'project %s'.",
self.project_id)
def dry_run(self):
resp = [quota for quota in self.client.list_quotas()['quotas']
if quota['project_id'] == self.project_id]
self.data['network_quotas'] = resp
# Begin network service classes
class BaseNetworkService(BaseService):
def __init__(self, manager, **kwargs):
super(BaseNetworkService, self).__init__(kwargs)
self.networks_client = manager.networks_client
self.subnets_client = manager.subnets_client
self.ports_client = manager.ports_client
self.floating_ips_client = manager.floating_ips_client
self.metering_labels_client = manager.metering_labels_client
self.metering_label_rules_client = manager.metering_label_rules_client
self.security_groups_client = manager.security_groups_client
self.routers_client = manager.routers_client
self.subnetpools_client = manager.subnetpools_client
def _filter_by_conf_networks(self, item_list):
if not item_list or not all(('network_id' in i for i in item_list)):
return item_list
return [item for item in item_list if item['network_id']
not in CONF_NETWORKS]
class NetworkService(BaseNetworkService):
def list(self):
client = self.networks_client
networks = client.list_networks(**self.tenant_filter)
networks = networks['networks']
if not self.is_save_state:
# recreate list removing saved networks
networks = [network for network in networks if network['id']
not in self.saved_state_json['networks'].keys()]
# filter out networks declared in tempest.conf
if self.is_preserve:
networks = [network for network in networks
if network['id'] not in CONF_NETWORKS]
LOG.debug("List count, %s Networks", len(networks))
return networks
def delete(self):
client = self.networks_client
networks = self.list()
for n in networks:
try:
LOG.debug("Deleting Network with id %s", n['id'])
client.delete_network(n['id'])
except Exception:
LOG.exception("Delete Network %s exception.", n['id'])
def dry_run(self):
networks = self.list()
self.data['networks'] = networks
def save_state(self):
networks = self.list()
self.data['networks'] = {}
for network in networks:
self.data['networks'][network['id']] = network
class NetworkFloatingIpService(BaseNetworkService):
def list(self):
client = self.floating_ips_client
flips = client.list_floatingips(**self.tenant_filter)
flips = flips['floatingips']
if not self.is_save_state:
# recreate list removing saved flips
flips = [flip for flip in flips if flip['id']
not in self.saved_state_json['floatingips'].keys()]
LOG.debug("List count, %s Network Floating IPs", len(flips))
return flips
def delete(self):
client = self.floating_ips_client
flips = self.list()
for flip in flips:
try:
LOG.debug("Deleting Network Floating IP with id %s",
flip['id'])
client.delete_floatingip(flip['id'])
except Exception:
LOG.exception("Delete Network Floating IP %s exception.",
flip['id'])
def dry_run(self):
flips = self.list()
self.data['floatingips'] = flips
def save_state(self):
flips = self.list()
self.data['floatingips'] = {}
for flip in flips:
self.data['floatingips'][flip['id']] = flip
class NetworkRouterService(BaseNetworkService):
def list(self):
client = self.routers_client
routers = client.list_routers(**self.tenant_filter)
routers = routers['routers']
if not self.is_save_state:
# recreate list removing saved routers
routers = [router for router in routers if router['id']
not in self.saved_state_json['routers'].keys()]
if self.is_preserve:
routers = [router for router in routers
if router['id'] != CONF_PUB_ROUTER]
LOG.debug("List count, %s Routers", len(routers))
return routers
def delete(self):
client = self.routers_client
ports_client = self.ports_client
routers = self.list()
for router in routers:
rid = router['id']
ports = [port for port
in ports_client.list_ports(device_id=rid)['ports']
if net_info.is_router_interface_port(port)]
for port in ports:
try:
LOG.debug("Deleting port with id %s of router with id %s",
port['id'], rid)
client.remove_router_interface(rid, port_id=port['id'])
except Exception:
LOG.exception("Delete Router Interface exception for "
"'port %s' of 'router %s'.", port['id'], rid)
try:
LOG.debug("Deleting Router with id %s", rid)
client.delete_router(rid)
except Exception:
LOG.exception("Delete Router %s exception.", rid)
def dry_run(self):
routers = self.list()
self.data['routers'] = routers
def save_state(self):
routers = self.list()
self.data['routers'] = {}
for router in routers:
self.data['routers'][router['id']] = router['name']
class NetworkMeteringLabelRuleService(NetworkService):
def list(self):
client = self.metering_label_rules_client
rules = client.list_metering_label_rules()
rules = rules['metering_label_rules']
rules = self._filter_by_tenant_id(rules)
if not self.is_save_state:
saved_rules = self.saved_state_json['metering_label_rules'].keys()
# recreate list removing saved rules
rules = [rule for rule in rules if rule['id'] not in saved_rules]
LOG.debug("List count, %s Metering Label Rules", len(rules))
return rules
def delete(self):
client = self.metering_label_rules_client
rules = self.list()
for rule in rules:
try:
LOG.debug("Deleting Metering Label Rule with id %s",
rule['id'])
client.delete_metering_label_rule(rule['id'])
except Exception:
LOG.exception("Delete Metering Label Rule %s exception.",
rule['id'])
def dry_run(self):
rules = self.list()
self.data['metering_label_rules'] = rules
def save_state(self):
rules = self.list()
self.data['metering_label_rules'] = {}
for rule in rules:
self.data['metering_label_rules'][rule['id']] = rule
class NetworkMeteringLabelService(BaseNetworkService):
def list(self):
client = self.metering_labels_client
labels = client.list_metering_labels()
labels = labels['metering_labels']
labels = self._filter_by_tenant_id(labels)
if not self.is_save_state:
# recreate list removing saved labels
labels = [label for label in labels if label['id']
not in self.saved_state_json['metering_labels'].keys()]
LOG.debug("List count, %s Metering Labels", len(labels))
return labels
def delete(self):
client = self.metering_labels_client
labels = self.list()
for label in labels:
try:
LOG.debug("Deleting Metering Label with id %s", label['id'])
client.delete_metering_label(label['id'])
except Exception:
LOG.exception("Delete Metering Label %s exception.",
label['id'])
def dry_run(self):
labels = self.list()
self.data['metering_labels'] = labels
def save_state(self):
labels = self.list()
self.data['metering_labels'] = {}
for label in labels:
self.data['metering_labels'][label['id']] = label['name']
class NetworkPortService(BaseNetworkService):
def list(self):
client = self.ports_client
ports = [port for port in
client.list_ports(**self.tenant_filter)['ports']
if port["device_owner"] == "" or
port["device_owner"].startswith("compute:")]
if not self.is_save_state:
# recreate list removing saved ports
ports = [port for port in ports if port['id']
not in self.saved_state_json['ports'].keys()]
if self.is_preserve:
ports = self._filter_by_conf_networks(ports)
LOG.debug("List count, %s Ports", len(ports))
return ports
def delete(self):
client = self.ports_client
ports = self.list()
for port in ports:
try:
LOG.debug("Deleting port with id %s", port['id'])
client.delete_port(port['id'])
except Exception:
LOG.exception("Delete Port %s exception.", port['id'])
def dry_run(self):
ports = self.list()
self.data['ports'] = ports
def save_state(self):
ports = self.list()
self.data['ports'] = {}
for port in ports:
self.data['ports'][port['id']] = port['name']
class NetworkSecGroupService(BaseNetworkService):
def list(self):
client = self.security_groups_client
filter = self.tenant_filter
# cannot delete default sec group so never show it.
secgroups = [secgroup for secgroup in
client.list_security_groups(**filter)['security_groups']
if secgroup['name'] != 'default']
if not self.is_save_state:
# recreate list removing saved security_groups
secgroups = [secgroup for secgroup in secgroups if secgroup['id']
not in self.saved_state_json['security_groups'].keys()
]
if self.is_preserve:
secgroups = [secgroup for secgroup in secgroups
if secgroup['security_group_rules'][0]['project_id']
not in CONF_PROJECTS]
LOG.debug("List count, %s security_groups", len(secgroups))
return secgroups
def delete(self):
client = self.security_groups_client
secgroups = self.list()
for secgroup in secgroups:
try:
LOG.debug("Deleting security_group with id %s", secgroup['id'])
client.delete_security_group(secgroup['id'])
except Exception:
LOG.exception("Delete security_group %s exception.",
secgroup['id'])
def dry_run(self):
secgroups = self.list()
self.data['security_groups'] = secgroups
def save_state(self):
secgroups = self.list()
self.data['security_groups'] = {}
for secgroup in secgroups:
self.data['security_groups'][secgroup['id']] = secgroup['name']
class NetworkSubnetService(BaseNetworkService):
def list(self):
client = self.subnets_client
subnets = client.list_subnets(**self.tenant_filter)
subnets = subnets['subnets']
if not self.is_save_state:
# recreate list removing saved subnets
subnets = [subnet for subnet in subnets if subnet['id']
not in self.saved_state_json['subnets'].keys()]
if self.is_preserve:
subnets = self._filter_by_conf_networks(subnets)
LOG.debug("List count, %s Subnets", len(subnets))
return subnets
def delete(self):
client = self.subnets_client
subnets = self.list()
for subnet in subnets:
try:
LOG.debug("Deleting subnet with id %s", subnet['id'])
client.delete_subnet(subnet['id'])
except Exception:
LOG.exception("Delete Subnet %s exception.", subnet['id'])
def dry_run(self):
subnets = self.list()
self.data['subnets'] = subnets
def save_state(self):
subnets = self.list()
self.data['subnets'] = {}
for subnet in subnets:
self.data['subnets'][subnet['id']] = subnet['name']
class NetworkSubnetPoolsService(BaseNetworkService):
def list(self):
client = self.subnetpools_client
pools = client.list_subnetpools(**self.tenant_filter)['subnetpools']
if not self.is_save_state:
# recreate list removing saved subnet pools
pools = [pool for pool in pools if pool['id']
not in self.saved_state_json['subnetpools'].keys()]
if self.is_preserve:
pools = [pool for pool in pools if pool['project_id']
not in CONF_PROJECTS]
LOG.debug("List count, %s Subnet Pools", len(pools))
return pools
def delete(self):
client = self.subnetpools_client
pools = self.list()
for pool in pools:
try:
LOG.debug("Deleting Subnet Pool with id %s", pool['id'])
client.delete_subnetpool(pool['id'])
except Exception:
LOG.exception("Delete Subnet Pool %s exception.", pool['id'])
def dry_run(self):
pools = self.list()
self.data['subnetpools'] = pools
def save_state(self):
pools = self.list()
self.data['subnetpools'] = {}
for pool in pools:
self.data['subnetpools'][pool['id']] = pool['name']
# begin global services
class RegionService(BaseService):
def __init__(self, manager, **kwargs):
super(RegionService, self).__init__(kwargs)
self.client = manager.regions_client
def list(self):
client = self.client
regions = client.list_regions()
if not self.is_save_state:
regions = [region for region in regions['regions'] if region['id']
not in self.saved_state_json['regions'].keys()]
LOG.debug("List count, %s Regions", len(regions))
return regions
else:
LOG.debug("List count, %s Regions", len(regions['regions']))
return regions['regions']
def delete(self):
client = self.client
regions = self.list()
for region in regions:
try:
LOG.debug("Deleting region with id %s", region['id'])
client.delete_region(region['id'])
except Exception:
LOG.exception("Delete Region %s exception.", region['id'])
def dry_run(self):
regions = self.list()
self.data['regions'] = {}
for region in regions:
self.data['regions'][region['id']] = region
def save_state(self):
regions = self.list()
self.data['regions'] = {}
for region in regions:
self.data['regions'][region['id']] = region
class FlavorService(BaseService):
def __init__(self, manager, **kwargs):
super(FlavorService, self).__init__(kwargs)
self.client = manager.flavors_client
def list(self):
client = self.client
flavors = client.list_flavors({"is_public": None})['flavors']
if not self.is_save_state:
# recreate list removing saved flavors
flavors = [flavor for flavor in flavors if flavor['id']
not in self.saved_state_json['flavors'].keys()]
if self.is_preserve:
flavors = [flavor for flavor in flavors
if flavor['id'] not in CONF_FLAVORS]
LOG.debug("List count, %s Flavors after reconcile", len(flavors))
return flavors
def delete(self):
client = self.client
flavors = self.list()
for flavor in flavors:
try:
LOG.debug("Deleting flavor with id %s", flavor['id'])
client.delete_flavor(flavor['id'])
except Exception:
LOG.exception("Delete Flavor %s exception.", flavor['id'])
def dry_run(self):
flavors = self.list()
self.data['flavors'] = flavors
def save_state(self):
flavors = self.list()
self.data['flavors'] = {}
for flavor in flavors:
self.data['flavors'][flavor['id']] = flavor['name']
class ImageService(BaseService):
def __init__(self, manager, **kwargs):
super(ImageService, self).__init__(kwargs)
self.client = manager.image_client_v2
def list(self):
client = self.client
response = client.list_images()
images = []
images.extend(response['images'])
while 'next' in response:
parsed = urllib.urlparse(response['next'])
marker = urllib.parse_qs(parsed.query)['marker'][0]
response = client.list_images(params={"marker": marker})
images.extend(response['images'])
if not self.is_save_state:
images = [image for image in images if image['id']
not in self.saved_state_json['images'].keys()]
if self.is_preserve:
images = [image for image in images
if image['id'] not in CONF_IMAGES]
LOG.debug("List count, %s Images after reconcile", len(images))
return images
def delete(self):
client = self.client
images = self.list()
for image in images:
try:
LOG.debug("Deleting image with id %s", image['id'])
client.delete_image(image['id'])
except Exception:
LOG.exception("Delete Image %s exception.", image['id'])
def dry_run(self):
images = self.list()
self.data['images'] = images
def save_state(self):
self.data['images'] = {}
images = self.list()
for image in images:
self.data['images'][image['id']] = image['name']
class UserService(BaseService):
def __init__(self, manager, **kwargs):
super(UserService, self).__init__(kwargs)
self.client = manager.users_v3_client
def list(self):
users = self.client.list_users()['users']
if not self.is_save_state:
users = [user for user in users if user['id']
not in self.saved_state_json['users'].keys()]
if self.is_preserve:
users = [user for user in users if user['name']
not in CONF_USERS]
elif not self.is_save_state: # Never delete admin user
users = [user for user in users if user['name'] !=
CONF.auth.admin_username]
LOG.debug("List count, %s Users after reconcile", len(users))
return users
def delete(self):
users = self.list()
for user in users:
try:
LOG.debug("Deleting user with id %s", user['id'])
self.client.delete_user(user['id'])
except Exception:
LOG.exception("Delete User %s exception.", user['id'])
def dry_run(self):
users = self.list()
self.data['users'] = users
def save_state(self):
users = self.list()
self.data['users'] = {}
for user in users:
self.data['users'][user['id']] = user['name']
class RoleService(BaseService):
def __init__(self, manager, **kwargs):
super(RoleService, self).__init__(kwargs)
self.client = manager.roles_v3_client
def list(self):
try:
roles = self.client.list_roles()['roles']
# reconcile roles with saved state and never list admin role
if not self.is_save_state:
roles = [role for role in roles if
(role['id'] not in
self.saved_state_json['roles'].keys() and
role['name'] != CONF.identity.admin_role)]
LOG.debug("List count, %s Roles after reconcile", len(roles))
return roles
except Exception:
LOG.exception("Cannot retrieve Roles.")
return []
def delete(self):
roles = self.list()
for role in roles:
try:
LOG.debug("Deleting role with id %s", role['id'])
self.client.delete_role(role['id'])
except Exception:
LOG.exception("Delete Role %s exception.", role['id'])
def dry_run(self):
roles = self.list()
self.data['roles'] = roles
def save_state(self):
roles = self.list()
self.data['roles'] = {}
for role in roles:
self.data['roles'][role['id']] = role['name']
class ProjectService(BaseService):
def __init__(self, manager, **kwargs):
super(ProjectService, self).__init__(kwargs)
self.client = manager.projects_client
def list(self):
projects = self.client.list_projects()['projects']
if not self.is_save_state:
project_ids = self.saved_state_json['projects']
projects = [project
for project in projects
if (project['id'] not in project_ids and
project['name'] != CONF.auth.admin_project_name)]
if self.is_preserve:
projects = [project
for project in projects
if project['name'] not in CONF_PROJECTS]
LOG.debug("List count, %s Projects after reconcile", len(projects))
return projects
def delete(self):
projects = self.list()
for project in projects:
try:
LOG.debug("Deleting project with id %s", project['id'])
self.client.delete_project(project['id'])
except Exception:
LOG.exception("Delete project %s exception.", project['id'])
def dry_run(self):
projects = self.list()
self.data['projects'] = projects
def save_state(self):
projects = self.list()
self.data['projects'] = {}
for project in projects:
self.data['projects'][project['id']] = project['name']
class DomainService(BaseService):
def __init__(self, manager, **kwargs):
super(DomainService, self).__init__(kwargs)
self.client = manager.domains_client
def list(self):
client = self.client
domains = client.list_domains()['domains']
if not self.is_save_state:
domains = [domain for domain in domains if domain['id']
not in self.saved_state_json['domains'].keys()]
LOG.debug("List count, %s Domains after reconcile", len(domains))
return domains
def delete(self):
client = self.client
domains = self.list()
for domain in domains:
try:
LOG.debug("Deleting domain with id %s", domain['id'])
client.update_domain(domain['id'], enabled=False)
client.delete_domain(domain['id'])
except Exception:
LOG.exception("Delete Domain %s exception.", domain['id'])
def dry_run(self):
domains = self.list()
self.data['domains'] = domains
def save_state(self):
domains = self.list()
self.data['domains'] = {}
for domain in domains:
self.data['domains'][domain['id']] = domain['name']
def get_project_associated_cleanup_services():
"""Returns list of project service classes.
The list contains services whose resources need to be deleted prior,
the project they are associated with, deletion. The resources cannot be
most likely deleted after the project is deleted first.
"""
project_associated_services = []
# TODO(gmann): Tempest should provide some plugin hook for cleanup
# script extension to plugin tests also.
if IS_NOVA:
project_associated_services.append(NovaQuotaService)
if IS_CINDER:
project_associated_services.append(VolumeQuotaService)
if IS_NEUTRON:
project_associated_services.append(NetworkQuotaService)
return project_associated_services
def get_resource_cleanup_services():
"""Returns list of project related classes.
The list contains services whose resources are associated with a project,
however, their deletion is possible also after the project is deleted
first.
"""
resource_cleanup_services = []
# TODO(gmann): Tempest should provide some plugin hook for cleanup
# script extension to plugin tests also.
if IS_NOVA:
resource_cleanup_services.append(ServerService)
resource_cleanup_services.append(KeyPairService)
resource_cleanup_services.append(ServerGroupService)
if IS_NEUTRON:
resource_cleanup_services.append(NetworkFloatingIpService)
if utils.is_extension_enabled('metering', 'network'):
resource_cleanup_services.append(NetworkMeteringLabelRuleService)
resource_cleanup_services.append(NetworkMeteringLabelService)
resource_cleanup_services.append(NetworkRouterService)
resource_cleanup_services.append(NetworkPortService)
resource_cleanup_services.append(NetworkSubnetService)
resource_cleanup_services.append(NetworkService)
resource_cleanup_services.append(NetworkSecGroupService)
resource_cleanup_services.append(NetworkSubnetPoolsService)
if IS_CINDER:
resource_cleanup_services.append(SnapshotService)
resource_cleanup_services.append(VolumeService)
return resource_cleanup_services
def get_global_cleanup_services():
global_services = []
if IS_NOVA:
global_services.append(FlavorService)
if IS_GLANCE:
global_services.append(ImageService)
global_services.append(UserService)
global_services.append(ProjectService)
global_services.append(DomainService)
global_services.append(RoleService)
global_services.append(RegionService)
return global_services
| apache-2.0 | -4,164,139,727,326,428,000 | 33.635159 | 79 | 0.578264 | false |
haihala/modman | cli.py | 1 | 12445 | #!/usr/bin/env python3
try:
import requests
except ImportError:
print("It looks like requests is not installed.")
print("Try: pip3 install requests")
exit(1)
import os
import sys
import subprocess
from getpass import getpass
import mod_manager
from mod_manager import server
from mod_manager.exceptions import LoginError
def open_gui_editor(filename):
"""Opens default GUI text editor."""
if sys.platform == "win32":
os.startfile(filename)
elif sys.platform.startswith("darwin"):
try:
subprocess.call(["open", filename])
except FileNotFoundError:
print("Your default editor \"{}\" could not be opened.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
elif sys.platform.startswith("linux"):
try:
subprocess.call(["xdg-open", filename])
except FileNotFoundError:
print("Your default editor \"{}\" could not be opened.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
else:
print("Could not determine text editor.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
def open_editor(filename):
"""Opens default text editor, preferring CLI editors to GUI editors."""
if sys.platform.startswith("win32"):
open_gui_editor(filename)
elif sys.platform.startswith("darwin") or sys.platform.startswith("linux"):
default_editor = os.environ.get("EDITOR", None)
if default_editor:
try:
subprocess.call([default_editor, filename])
except FileNotFoundError:
# could not use default editor
print("Your default editor \"{}\" could not be opened.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
else:
open_gui_editor(filename)
class CLI(object):
ACTIONS = [
"help [action]",
"list",
"contents <packname> [packname2]...",
"edit <packname>",
"compress <packname>",
"decompress <base64>",
"install <packname>",
"match <server_address>",
"enabled",
"enable <modname> [version]",
"disable <modname>",
"search <query> [-n <integer>]",
"credentials <action> [args]",
"cache <action>",
"apicache <action>",
"serv_install <modpacks> [experimental]",
]
HELP = {
"help": "If action is present, prints detailed information of the action, otherwise this help message is printed",
"list": "Lists all available modpacks",
"contents": "Lists all mods in a modpack",
"edit": "Opens the specified pack in default text editor",
"compress": "Makes a base64 digest of the mentioned modpack",
"decompress": "Unpacks a mod from base64 digest (overrides existing modpacks with the same name)",
"install": "Despite what is in the mod folder, downloads the newest mods into the specified folder",
"match": "Match your mod configuration to one in a server, using exactly same versions",
"enabled": "List enabled mods",
"enable": "Enables a single mod by name and optionally a version number",
"disable": "Disable a single mod",
"search": "Search for mods from the Factorio mod portal. Specify the amount of results with -n parameter. By default 5 results are displayed.",
"credentials": "Manage mod portal credentials. Actions: set, set [username] [password], clear",
"cache": "Manage cache. Actions: reset, list",
"apicache": "Manage api call cache. Actions: reset",
"serv_install": "Installs the newest server with the chosen modpacks. If '-experimental' or '-e' are present in the command, the newest experimental release is installed."
}
ACTION_NAMES = [a.split()[0] for a in ACTIONS]
def __init__(self):
self.mod_manager = mod_manager.ModManager(login_callback=self.login)
def print_progress_message(self, step):
print(step.message, end="")
sys.stdout.flush()
def print_2col_table(self, rows, indent=0, empty_msg=None):
if rows:
c1_max_width = max([len(c1) for c1, c2 in rows])
for c1, c2 in rows:
print("".join([" "*2*indent, c1, " "*(c1_max_width - len(c1) + 2), c2]))
elif empty_msg:
print("({})".format(empty_msg))
def prompt_credentials(self):
print("")
print("Logging in to Factorio mod portal")
print("(Password will not be displayed.)")
username = input("Username: ")
password = getpass("Password: ")
print("")
return mod_manager.credentials.Credentials(username, password)
def login(self):
if not mod_manager.credentials.Keyring.credentials_stored:
cred = self.prompt_credentials()
else:
cred = None
try:
self.mod_manager.mod_portal.login(cred)
except LoginError:
print("Could not log in to the mod portal.")
exit(1)
def cmd_help(self, args):
if args == []:
print("")
print("Usage: {} [action] [args]".format(sys.argv[0]))
print("")
self.print_2col_table([(action, self.HELP[action.split()[0]]) for action in self.ACTIONS], indent=1)
print("")
elif args[0] in self.ACTION_NAMES:
action = [a for a in self.ACTIONS if a.startswith(args[0])][0]
print(action+": "+self.HELP[args[0]])
else:
print("Invalid action \"{}\"".format(args[0]))
exit(1)
def cmd_list(self, args):
if len(args) != 0:
print("Invalid argument count")
exit(1)
for p in self.mod_manager.modpacks:
print(p.name)
def cmd_contents(self, args):
if len(args) == 0:
print("Invalid argument count")
exit(1)
packs = {p.name: p for p in self.mod_manager.modpacks}
for arg in args:
matching = []
if arg in packs:
pack = packs[arg]
if pack not in matching:
matching.append(pack)
else:
print("Mod pack \"{}\" does not exist.".format(arg))
exit(1)
lengths = [len(mod.name) for pack in matching for mod in pack.contents]
if lengths:
maxlen = max(lengths)
for pack in matching:
print(pack.name)
if pack.empty:
print(" (modpack is empty)")
else:
for mod in pack.contents:
ver = mod.version + " (" + ("fixed" if mod.fixed_version else "floating") + ")"
print(" "*2 + mod.name + " "*((maxlen-len(mod.name))+2) + ver)
def cmd_edit(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
mp = self.mod_manager.get_pack(args[0])
open_editor(mp.path)
def cmd_compress(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
mp = self.mod_manager.get_pack(args[0])
if mp.exists:
print(mp.compress())
else:
print("Mod pack \"{}\" does not exist.".format(args[0]))
exit(1)
def cmd_decompress(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
self.mod_manager.decompress_modpack(args[0]).save()
def cmd_install(self, args):
if args:
packs = []
for p in args:
mp = self.mod_manager.get_pack(p)
if mp.exists:
packs.append(mp)
else:
print("Mod pack \"{}\" does not exist.".format(p))
exit(1)
self.mod_manager.install_packs(packs, self.print_progress_message)
else:
print("Invalid argument count")
exit(1)
def cmd_match(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
try:
self.mod_manager.install_matching(args[0], callback=self.print_progress_message)
except ConnectionRefusedError:
print("Could not connect to the server. Is it running?")
exit(1)
except BrokenPipeError:
print("Could not communicate with the server. Are you using same Factorio version?")
exit(1)
def cmd_enabled(self, args):
if len(args) != 0:
print("Invalid argument count")
exit(1)
self.print_2col_table(
[(mod.name, mod.version) for mod in self.mod_manager.installed_mods],
empty_msg="no mods enabled"
)
def cmd_search(self, args):
search_args = " ".join(args)
wanted_responces = 5
lenght_param = search_args.rsplit(" -n ", 1)
if len(lenght_param) == 2 and len(lenght_param[1]):
try:
wanted_responces = int(lenght_param[1])
wanted_responces = min(max(wanted_responces, 0), 25)
search_args = " ".join(args[:-2])
except ValueError:
pass
results = self.mod_manager.mod_portal.search(search_args, n=wanted_responces)
for i,s in enumerate(results):
print("{}. {}: {} ({} downloads)".format(i+1, s.name, s.title, s.downloads_count))
def cmd_credentials(self, args):
if len(args) not in [1,3]:
print("Invalid argument count")
exit(1)
if args[0] == "clear":
if len(args) != 1:
print("Invalid arguments: clear doesn't take any")
exit(1)
mod_manager.credentials.Keyring.clear()
elif args[0] == "set":
if len(args) == 1:
c = self.prompt_credentials()
else:
c = mod_manager.credentials.Credentials(*args[1:])
print("Verifying... ", end="")
sys.stdout.flush()
try:
self.mod_manager.mod_portal.login(c)
except LoginError:
print("invalid credentials")
exit(1)
else:
print("ok")
mod_manager.credentials.Keyring.set_credentials(c)
else:
print("Invalid action \"{}\"".format(args[0]))
exit(1)
def cmd_cache(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
if args[0] == "reset":
self.mod_manager.mod_cache.reset()
elif args[0] == "list":
self.print_2col_table(
[(cmod.name, cmod.version) for cmod in self.mod_manager.mod_cache.mods],
empty_msg="no cached mods"
)
else:
print("Invalid arguments")
print("Usage: cache <action>")
print("Actions: reset, list")
exit(1)
def cmd_apicache(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
if args[0] == "reset":
self.mod_manager.mod_portal.api_cache.reset()
else:
print("Invalid arguments")
print("Usage: apicache reset")
exit(1)
def cmd_serv_install(self, args):
experimental = args[-1] in ["-e", "-experimental"]
if experimental:
modpacks = args[:-1]
else:
modpacks = args[:]
mod_manager.server.create_server(modpacks, experimental, self.mod_manager, self.print_progress_message)
def run(self, cmd):
if cmd == []:
cmd = ["help"]
if cmd[0] in self.ACTION_NAMES:
try:
# get function in this folder named "cmd_<action>"
fn = getattr(self, "cmd_"+cmd[0])
except AttributeError:
print("Action not implemented yet.")
exit(1)
fn(cmd[1:])
else:
print("Invalid action \"{}\"".format(cmd[0]))
exit(1)
def main():
CLI().run(sys.argv[1:])
if __name__ == '__main__':
main()
| mit | 3,436,079,789,954,197,000 | 32.910082 | 179 | 0.538931 | false |
pybel/pybel | tests/test_struct/test_node_utils.py | 1 | 4775 | # -*- coding: utf-8 -*-
"""Tests for node utilities."""
import unittest
from pybel import BELGraph
from pybel.constants import INCREASES
from pybel.dsl import ComplexAbundance as g, CompositeAbundance as c, Protein, Reaction
from pybel.examples.various_example import adp, atp, glucose, glucose_6_phosphate, hk1, phosphate, single_reaction_graph
from pybel.struct.node_utils import flatten_list_abundance, reaction_cartesian_expansion
class TestNodeUtils(unittest.TestCase):
"""Test node utilities."""
def test_flatten_complex(self):
"""Test flattening a nested complex."""
p1, p2, p3 = (Protein('N', str(i + 1)) for i in range(3))
pairs = [
# Mainly complexes
(g([p1, p2, p3]), g([p1, p2, p3])), # no nesting
(g([p1, p2, p3]), g([g([p1, p2]), p3])), # one nesting
(g([p1, p2, p3]), g([g([p1]), p2, p3])), # one nesting
(g([p1, p2, p3]), g([g([p1]), g([p2]), p3])), # one nesting
# Mainly composites
(c([p1, p2, p3]), c([p1, p2, p3])), # no nesting
(c([p1, p2, p3]), c([c([p1, p2]), p3])), # one nesting
(c([p1, p2, p3]), c([c([p1]), p2, p3])), # one nesting
(c([p1, p2, p3]), c([c([p1]), c([p2]), p3])), # one nesting
# TODO: mixtures of composites and complexes?
]
for expected, source in pairs:
self.assertEqual(expected, flatten_list_abundance(source))
def test_flatten_reaction(self):
"""Test flattening a reaction."""
single_reaction_graph_copy = single_reaction_graph.copy()
self.assertEqual(single_reaction_graph_copy.number_of_nodes(), 7)
self.assertEqual(single_reaction_graph_copy.number_of_edges(), 7)
reaction_cartesian_expansion(single_reaction_graph_copy)
self.assertEqual(single_reaction_graph_copy.number_of_nodes(), 6)
self.assertEqual(single_reaction_graph_copy.number_of_edges(), 8)
pairs = [
(glucose, INCREASES, glucose_6_phosphate),
(glucose, INCREASES, adp),
(hk1, INCREASES, glucose_6_phosphate),
(hk1, INCREASES, adp),
(atp, INCREASES, glucose_6_phosphate),
(atp, INCREASES, adp),
(phosphate, INCREASES, glucose_6_phosphate),
(phosphate, INCREASES, adp),
]
for source, target, data in single_reaction_graph_copy.edges(data=True):
self.assertIn((source, INCREASES, target), pairs)
def test_flatten_reaction_2(self):
"""Test flattening a qualified reaction."""
node_increases_reaction_graph = BELGraph()
glycolisis_step_1 = Reaction(reactants=[glucose, hk1, atp], products=[glucose_6_phosphate, adp, hk1])
node_increases_reaction_graph.add_increases(glucose_6_phosphate, glycolisis_step_1, citation='X', evidence='X')
self.assertEqual(node_increases_reaction_graph.number_of_nodes(), 6)
self.assertEqual(node_increases_reaction_graph.number_of_edges(), 7)
reaction_cartesian_expansion(node_increases_reaction_graph)
self.assertEqual(node_increases_reaction_graph.number_of_nodes(), 5)
# TODO Fix so unqualified duplicate edges are not created (it should be the 8 edges below)
self.assertEqual(node_increases_reaction_graph.number_of_edges(), 12)
# pairs = [
# (glucose, INCREASES, glucose_6_phosphate),
# (glucose, INCREASES, adp),
# (hk1, INCREASES, glucose_6_phosphate),
# (hk1, INCREASES, adp),
# (atp, INCREASES, glucose_6_phosphate),
# (atp, INCREASES, adp),
# (phosphate, INCREASES, glucose_6_phosphate),
# (phosphate, INCREASES, adp),
# ]
#
# for source, target, data in node_increases_reaction_graph.edges(data=True):
# self.assertIn((source, INCREASES, target), pairs)
def test_flatten_reaction_3(self):
"""Test flattening a graph containing 2 reactions connected to each other."""
two_reactions_graph = BELGraph()
reaction_1 = Reaction(reactants=[glucose, atp], products=hk1)
reaction_2 = Reaction(reactants=glucose_6_phosphate, products=adp)
two_reactions_graph.add_increases(reaction_1, reaction_2, citation='X', evidence='X')
self.assertEqual(two_reactions_graph.number_of_nodes(), 7)
self.assertEqual(two_reactions_graph.number_of_edges(), 6)
reaction_cartesian_expansion(two_reactions_graph)
# TODO Fix so unqualified duplicate edges are not created (it should be the 6 edges below)
self.assertEqual(two_reactions_graph.number_of_nodes(), 5)
self.assertEqual(two_reactions_graph.number_of_edges(), 8)
| mit | -454,075,000,345,115,800 | 42.018018 | 120 | 0.617801 | false |
awesto/django-shop | shop/transition.py | 1 | 3695 | from urllib.parse import urlparse
from django.contrib.auth.models import AnonymousUser
from django.db import models
from django.http.request import HttpRequest
from post_office import mail
from post_office.models import EmailTemplate
from shop.conf import app_settings
from shop.models.order import BaseOrder
from shop.models.notification import Notification
from shop.serializers.delivery import DeliverySerializer
from shop.serializers.order import OrderDetailSerializer
from shop.signals import email_queued
class EmulateHttpRequest(HttpRequest):
"""
Use this class to emulate a HttpRequest object, when templates must be rendered
asynchronously, for instance when an email must be generated out of an Order object.
"""
def __init__(self, customer, stored_request):
super().__init__()
parsedurl = urlparse(stored_request.get('absolute_base_uri'))
self.path = self.path_info = parsedurl.path
self.environ = {}
self.META['PATH_INFO'] = parsedurl.path
self.META['SCRIPT_NAME'] = ''
self.META['HTTP_HOST'] = parsedurl.netloc
self.META['HTTP_X_FORWARDED_PROTO'] = parsedurl.scheme
self.META['QUERY_STRING'] = parsedurl.query
self.META['HTTP_USER_AGENT'] = stored_request.get('user_agent')
self.META['REMOTE_ADDR'] = stored_request.get('remote_ip')
self.method = 'GET'
self.LANGUAGE_CODE = self.COOKIES['django_language'] = stored_request.get('language')
self.customer = customer
self.user = customer.is_anonymous and AnonymousUser or customer.user
self.current_page = None
def transition_change_notification(order):
"""
This function shall be called, after an Order object performed a transition change.
"""
if not isinstance(order, BaseOrder):
raise TypeError("Object order must inherit from class BaseOrder")
emails_in_queue = False
for notification in Notification.objects.filter(transition_target=order.status):
recipient = notification.get_recipient(order)
if recipient is None:
continue
# emulate a request object which behaves similar to that one, when the customer submitted its order
emulated_request = EmulateHttpRequest(order.customer, order.stored_request)
customer_serializer = app_settings.CUSTOMER_SERIALIZER(order.customer)
render_context = {'request': emulated_request, 'render_label': 'email'}
order_serializer = OrderDetailSerializer(order, context=render_context)
language = order.stored_request.get('language')
context = {
'customer': customer_serializer.data,
'order': order_serializer.data,
'ABSOLUTE_BASE_URI': emulated_request.build_absolute_uri().rstrip('/'),
'render_language': language,
}
try:
latest_delivery = order.delivery_set.latest()
context['latest_delivery'] = DeliverySerializer(latest_delivery, context=render_context).data
except (AttributeError, models.ObjectDoesNotExist):
pass
try:
template = notification.mail_template.translated_templates.get(language=language)
except EmailTemplate.DoesNotExist:
template = notification.mail_template
attachments = {}
for notiatt in notification.notificationattachment_set.all():
attachments[notiatt.attachment.original_filename] = notiatt.attachment.file.file
mail.send(recipient, template=template, context=context,
attachments=attachments, render_on_delivery=True)
emails_in_queue = True
if emails_in_queue:
email_queued()
| bsd-3-clause | 40,270,448,760,447,544 | 44.617284 | 107 | 0.689851 | false |
UKPLab/semeval2017-scienceie | code/convNet.py | 1 | 7292 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from extras import VSM, read_and_map
from representation import VeryStupidCBOWMapper, CharMapper
import sys, numpy as np,os
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import confusion_matrix
from sklearn.metrics import precision_recall_fscore_support
from keras.layers import Dense, Dropout, Activation, Embedding
from keras.models import Sequential
from keras.utils.np_utils import to_categorical
from keras.layers import Convolution1D, GlobalMaxPooling1D, Lambda, Merge
from keras.preprocessing import sequence
from keras import backend as K
maxlen=50
maxlen=100
maxlen=150
maxlen=50+2*30
try:
L = int(sys.argv[5])
M = int(sys.argv[6])
R = int(sys.argv[7])
except IndexError:
L = 30
M = 50
R = 30
maxlen=L+M+R
# this is a simple cnn
# if you would want to use it below, you would have to do
# X_train = X_train.reshape(len(X_train),input_shape[0],input_shape[1])
def build_cnn(input_shape, output_dim,nb_filter):
clf = Sequential()
clf.add(Convolution1D(nb_filter=nb_filter,
filter_length=4,border_mode="valid",activation="relu",subsample_length=1,input_shape=input_shape))
clf.add(GlobalMaxPooling1D())
clf.add(Dense(100))
clf.add(Dropout(0.2))
clf.add(Activation("tanh"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
# just one filter
def build_cnn_char(input_dim, output_dim,nb_filter):
clf = Sequential()
clf.add(Embedding(input_dim,
32, # character embedding size
input_length=maxlen,
dropout=0.2))
clf.add(Convolution1D(nb_filter=nb_filter,
filter_length=3,border_mode="valid",activation="relu",subsample_length=1))
clf.add(GlobalMaxPooling1D())
clf.add(Dense(100))
clf.add(Dropout(0.2))
clf.add(Activation("tanh"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
# just one filter
def build_cnn_char_threeModels(input_dim, output_dim,nb_filter,filter_size=3):
left = Sequential()
left.add(Embedding(input_dim,
32, # character embedding size
input_length=L,
dropout=0.2))
left.add(Convolution1D(nb_filter=nb_filter,
filter_length=filter_size,border_mode="valid",activation="relu",subsample_length=1))
left.add(GlobalMaxPooling1D())
left.add(Dense(100))
left.add(Dropout(0.2))
left.add(Activation("tanh"))
center = Sequential()
center.add(Embedding(input_dim,
32, # character embedding size
input_length=M,
dropout=0.2))
center.add(Convolution1D(nb_filter=nb_filter,
filter_length=filter_size,border_mode="valid",activation="relu",subsample_length=1))
center.add(GlobalMaxPooling1D())
center.add(Dense(100))
center.add(Dropout(0.2))
center.add(Activation("tanh"))
right = Sequential()
right.add(Embedding(input_dim,
32, # character embedding size
input_length=R,
dropout=0.2))
right.add(Convolution1D(nb_filter=nb_filter,
filter_length=filter_size,border_mode="valid",activation="relu",subsample_length=1))
right.add(GlobalMaxPooling1D())
right.add(Dense(100))
right.add(Dropout(0.2))
right.add(Activation("tanh"))
clf = Sequential()
clf.add(Merge([left,center,right],mode="concat"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
def max_1d(X):
return K.max(X,axis=1)
# multiple filters
def build_cnn_char_complex(input_dim, output_dim,nb_filter):
randomEmbeddingLayer = Embedding(input_dim,32, input_length=maxlen,dropout=0.1)
poolingLayer = Lambda(max_1d, output_shape=(nb_filter,))
conv_filters = []
for n_gram in range(2,4):
ngramModel = Sequential()
ngramModel.add(randomEmbeddingLayer)
ngramModel.add(Convolution1D(nb_filter=nb_filter,
filter_length=n_gram,
border_mode="valid",
activation="relu",
subsample_length=1))
ngramModel.add(poolingLayer)
conv_filters.append(ngramModel)
clf = Sequential()
clf.add(Merge(conv_filters,mode="concat"))
clf.add(Activation("relu"))
clf.add(Dense(100))
clf.add(Dropout(0.1))
clf.add(Activation("tanh"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
def acc(correct, total):
return 1.0*correct/total
# example argline:
# python convNet.py ../scienceie2017_train/train2 ../scienceie2017_dev/dev ../resources/vsm/glove.6B/glove.6B.100d.txt
if __name__=="__main__":
train_src = sys.argv[1]
dev_src = sys.argv[2]
# vsm_path = sys.argv[3]
vsm_path = None
print("Loading VSM")
vsm = VSM(vsm_path)
try:
csize = 2
except IndexError:
csize = int(sys.argv[4])
try:
n_filter = int(sys.argv[8])
except IndexError:
n_filter = 250
try:
filter_size = int(sys.argv[9])
except IndexError:
filter_size = 3
if len(sys.argv)>10 and sys.argv[10]=="document":
SB = False
else:
SB = True
mapper = CharMapper(vsm,csize,L=L,M=M,R=R,sentence_boundaries=SB)
print("Reading training data")
X_train, y_train, y_values, _ = read_and_map(train_src, mapper)
X_dev, y_dev_gold, _, estrings = read_and_map(dev_src, mapper, y_values)
vocabSize = mapper.curVal
print(X_train.shape)
print(y_train.shape)
#sys.exit(1)
print("Trainig a model")
timesteps = 2*csize + 1 # left, right, center
context_dim = 100
input_shape = (timesteps,context_dim)
clf = build_cnn_char(vocabSize+1, len(y_values)+1,n_filter)
clf = build_cnn_char_threeModels(vocabSize+1, len(y_values)+1,n_filter)
X_left = X_train[:,:L]
X_center = X_train[:,L:L+M]
X_right = X_train[:,L+M:L+M+R]
print L,M,R,X_train.shape,X_left.shape,X_center.shape,X_right.shape,y_train,y_values
clf.fit([X_left,X_center,X_right], to_categorical(y_train, len(y_values)+1), verbose=1, nb_epoch=15)
print("Reading test data")
print("Testing")
X_dev_left = X_dev[:,:L]
X_dev_center = X_dev[:,L:L+M]
X_dev_right = X_dev[:,L+M:L+M+R]
print(X_dev.shape,X_dev_left.shape,X_dev_center.shape,X_dev_right.shape)
y_dev_auto = clf.predict_classes([X_dev_left,X_dev_center,X_dev_right]) # for LogisticRegression just do predict()
print "==PREDICTING=="
for i in xrange(len(y_dev_auto)):
print y_values[y_dev_auto[i]]
| apache-2.0 | 7,097,266,528,719,987,000 | 31.995475 | 124 | 0.623149 | false |
JaneliaSciComp/Neuroptikon | Source/library/library.py | 1 | 1804 | # Copyright (c) 2010 Howard Hughes Medical Institute.
# All rights reserved.
# Use is subject to Janelia Farm Research Campus Software Copyright 1.1 license terms.
# http://license.janelia.org/license/jfrc_copyright_1_1.html
from library_item import LibraryItem
from library_frame import LibraryFrame
from pydispatch import dispatcher
from itertools import groupby
class Library(object):
def __init__(self):
self._library = {}
self._frame = LibraryFrame()
def add(self, item):
if not issubclass(item.__class__, LibraryItem):
raise ValueError, gettext('Library items must be instances of a subclass of LibraryItem')
if item.__class__.__name__ in self._library:
# This class of item has been added before.
dict = self._library[item.__class__.__name__]
else:
# Create and retain a new dictionary for this class of item.
dict = {}
self._library[item.__class__.__name__] = dict
# Add a method to ourself that returns the full list of items of this class.
setattr(self, item.__class__.listProperty(), lambda: sorted([value for value, group in groupby(dict.values())], cmp=lambda x,y: cmp(x.name.lower(), y.name.lower())))
# Add a method to ourself that performs a lookup of items of this class.
setattr(self, item.__class__.lookupProperty(), lambda itemId: dict.get(itemId, None))
self._frame.addItemClass(item.__class__)
dict[item.identifier] = item
for synonym in item.synonyms:
dict[synonym] = item
dispatcher.send(('addition', item.__class__), self)
def browse(self):
self._frame.Show()
self._frame.Raise()
| bsd-3-clause | 8,292,867,889,160,056,000 | 39.088889 | 177 | 0.616408 | false |
3bot/3bot-hook | threebot_hook/models.py | 1 | 1773 | # -*- coding: utf-8 -*-
from django import dispatch
from django.contrib.sites.models import Site
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from rest_framework.authtoken.models import Token
from threebot.models import Workflow
from threebot.models import Worker
from threebot.models import ParameterList
@python_2_unicode_compatible
class Hook(models.Model):
slug = models.SlugField(max_length=255)
user = models.CharField(max_length=255, blank=True, null=True)
repo = models.CharField(
max_length=255, blank=True, null=True,
help_text=u'Leave blank. Field is not used in the current version.')
secret = models.CharField(
max_length=255, blank=True, null=True,
help_text=u'Leave blank. Field is not used in the current version.')
workflow = models.ForeignKey(Workflow)
worker = models.ForeignKey(Worker)
param_list = models.ForeignKey(ParameterList)
def get_hook_url(self):
return "%d-%d-%d-%s" % (self.workflow.id, self.worker.id, self.param_list.id, self.slug)
def __str__(self):
return "%s (%d)" % (self.get_hook_url(), self.pk)
def make_full_url(self, user):
token, created = Token.objects.get_or_create(user=user)
return "https://%s/hooks/%s/%s-%s-%s/" % (Site.objects.get_current().domain, token, self.workflow.id, self.worker.id, self.param_list.id)
class Meta():
verbose_name = _("Hook")
verbose_name_plural = _("Hooks")
db_table = 'threebot_hook'
unique_together = ("workflow", "worker", "param_list")
class HookSignal(dispatch.Signal):
pass
pre_hook_signal = HookSignal()
post_hook_signal = HookSignal()
| bsd-3-clause | -1,663,933,699,646,561,500 | 34.46 | 145 | 0.685279 | false |
borg-project/borg | borg/tools/get_features.py | 1 | 2297 | """@author: Bryan Silverthorn <[email protected]>"""
import os.path
import csv
import borg
import borg.distributors
logger = borg.get_logger(__name__, default_level = "INFO")
def features_for_path(domain, task_path):
# bring back relevant globals
import os.path
import borg
logger = borg.get_logger(__name__, default_level = "INFO")
# collect features
logger.info("getting features of %s", os.path.basename(task_path))
with domain.task_from_path(task_path) as task:
with borg.accounting() as accountant:
(names, values) = domain.compute_features(task)
return (
task_path,
["cpu_cost"] + list(names),
[accountant.total.cpu_seconds] + list(values))
@borg.annotations(
domain_name = ("suite path, or name of the problem domain", "positional"),
instances_root = ("path to instances files", "positional", None, os.path.abspath),
suffix = ("file suffix to apply", "positional"),
skip_existing = ("skip existing features?", "flag"),
distributor_name = ("name of task distributor", "option"),
workers = ("submit jobs?", "option", "w", int),
)
def main(
domain_name,
instances_root,
suffix = ".features.csv",
skip_existing = False,
distributor_name = "ipython",
workers = 0):
"""Collect task features."""
def yield_runs():
if os.path.exists(domain_name):
domain = borg.load_solvers(domain_name).domain
else:
domain = borg.get_domain(domain_name)
paths = list(borg.util.files_under(instances_root, domain.extensions))
count = 0
for path in paths:
if skip_existing and os.path.exists(path + suffix):
continue
count += 1
yield (features_for_path, [domain, path])
logger.info("collecting features for %i instances", count)
distributor = borg.distributors.make(
distributor_name,
workers=workers)
for (cnf_path, names, values) in distributor.do(yield_runs()):
csv_path = cnf_path + suffix
with open(csv_path, "wb") as csv_file:
csv.writer(csv_file).writerow(names)
csv.writer(csv_file).writerow(values)
if __name__ == "__main__":
borg.script(main)
| mit | 5,014,328,690,648,508,000 | 28.831169 | 86 | 0.610361 | false |
SamHames/scikit-image | skimage/restoration/tests/test_denoise.py | 1 | 5281 | import numpy as np
from numpy.testing import run_module_suite, assert_raises, assert_equal
from skimage import restoration, data, color, img_as_float
np.random.seed(1234)
lena = img_as_float(data.lena()[:128, :128])
lena_gray = color.rgb2gray(lena)
checkerboard_gray = img_as_float(data.checkerboard())
checkerboard = color.gray2rgb(checkerboard_gray)
def test_denoise_tv_chambolle_2d():
# lena image
img = lena_gray.copy()
# add noise to lena
img += 0.5 * img.std() * np.random.rand(*img.shape)
# clip noise so that it does not exceed allowed range for float images.
img = np.clip(img, 0, 1)
# denoise
denoised_lena = restoration.denoise_tv_chambolle(img, weight=60.0)
# which dtype?
assert denoised_lena.dtype in [np.float, np.float32, np.float64]
from scipy import ndimage
grad = ndimage.morphological_gradient(img, size=((3, 3)))
grad_denoised = ndimage.morphological_gradient(
denoised_lena, size=((3, 3)))
# test if the total variation has decreased
assert grad_denoised.dtype == np.float
assert (np.sqrt((grad_denoised**2).sum())
< np.sqrt((grad**2).sum()) / 2)
def test_denoise_tv_chambolle_multichannel():
denoised0 = restoration.denoise_tv_chambolle(lena[..., 0], weight=60.0)
denoised = restoration.denoise_tv_chambolle(lena, weight=60.0,
multichannel=True)
assert_equal(denoised[..., 0], denoised0)
def test_denoise_tv_chambolle_float_result_range():
# lena image
img = lena_gray
int_lena = np.multiply(img, 255).astype(np.uint8)
assert np.max(int_lena) > 1
denoised_int_lena = restoration.denoise_tv_chambolle(int_lena, weight=60.0)
# test if the value range of output float data is within [0.0:1.0]
assert denoised_int_lena.dtype == np.float
assert np.max(denoised_int_lena) <= 1.0
assert np.min(denoised_int_lena) >= 0.0
def test_denoise_tv_chambolle_3d():
"""Apply the TV denoising algorithm on a 3D image representing a sphere."""
x, y, z = np.ogrid[0:40, 0:40, 0:40]
mask = (x - 22)**2 + (y - 20)**2 + (z - 17)**2 < 8**2
mask = 100 * mask.astype(np.float)
mask += 60
mask += 20 * np.random.rand(*mask.shape)
mask[mask < 0] = 0
mask[mask > 255] = 255
res = restoration.denoise_tv_chambolle(mask.astype(np.uint8), weight=100)
assert res.dtype == np.float
assert res.std() * 255 < mask.std()
# test wrong number of dimensions
assert_raises(ValueError, restoration.denoise_tv_chambolle,
np.random.rand(8, 8, 8, 8))
def test_denoise_tv_bregman_2d():
img = checkerboard_gray.copy()
# add some random noise
img += 0.5 * img.std() * np.random.rand(*img.shape)
img = np.clip(img, 0, 1)
out1 = restoration.denoise_tv_bregman(img, weight=10)
out2 = restoration.denoise_tv_bregman(img, weight=5)
# make sure noise is reduced in the checkerboard cells
assert img[30:45, 5:15].std() > out1[30:45, 5:15].std()
assert out1[30:45, 5:15].std() > out2[30:45, 5:15].std()
def test_denoise_tv_bregman_float_result_range():
# lena image
img = lena_gray.copy()
int_lena = np.multiply(img, 255).astype(np.uint8)
assert np.max(int_lena) > 1
denoised_int_lena = restoration.denoise_tv_bregman(int_lena, weight=60.0)
# test if the value range of output float data is within [0.0:1.0]
assert denoised_int_lena.dtype == np.float
assert np.max(denoised_int_lena) <= 1.0
assert np.min(denoised_int_lena) >= 0.0
def test_denoise_tv_bregman_3d():
img = checkerboard.copy()
# add some random noise
img += 0.5 * img.std() * np.random.rand(*img.shape)
img = np.clip(img, 0, 1)
out1 = restoration.denoise_tv_bregman(img, weight=10)
out2 = restoration.denoise_tv_bregman(img, weight=5)
# make sure noise is reduced in the checkerboard cells
assert img[30:45, 5:15].std() > out1[30:45, 5:15].std()
assert out1[30:45, 5:15].std() > out2[30:45, 5:15].std()
def test_denoise_bilateral_2d():
img = checkerboard_gray.copy()
# add some random noise
img += 0.5 * img.std() * np.random.rand(*img.shape)
img = np.clip(img, 0, 1)
out1 = restoration.denoise_bilateral(img, sigma_range=0.1,
sigma_spatial=20)
out2 = restoration.denoise_bilateral(img, sigma_range=0.2,
sigma_spatial=30)
# make sure noise is reduced in the checkerboard cells
assert img[30:45, 5:15].std() > out1[30:45, 5:15].std()
assert out1[30:45, 5:15].std() > out2[30:45, 5:15].std()
def test_denoise_bilateral_3d():
img = checkerboard.copy()
# add some random noise
img += 0.5 * img.std() * np.random.rand(*img.shape)
img = np.clip(img, 0, 1)
out1 = restoration.denoise_bilateral(img, sigma_range=0.1,
sigma_spatial=20)
out2 = restoration.denoise_bilateral(img, sigma_range=0.2,
sigma_spatial=30)
# make sure noise is reduced in the checkerboard cells
assert img[30:45, 5:15].std() > out1[30:45, 5:15].std()
assert out1[30:45, 5:15].std() > out2[30:45, 5:15].std()
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | 2,153,911,807,464,892,400 | 34.92517 | 79 | 0.625828 | false |
qsnake/gpaw | config.py | 1 | 17421 | # Copyright (C) 2006 CSC-Scientific Computing Ltd.
# Please see the accompanying LICENSE file for further information.
import os
import sys
import re
import distutils.util
from distutils.sysconfig import get_config_var, get_config_vars
from distutils.command.config import config
from glob import glob
from os.path import join
from stat import ST_MTIME
def check_packages(packages, msg, include_ase, import_numpy):
"""Check the python version and required extra packages
If ASE is not installed, the `packages` list is extended with the
ASE modules if they are found."""
if sys.version_info < (2, 3, 0, 'final', 0):
raise SystemExit('Python 2.3.1 or later is required!')
if import_numpy:
try:
import numpy
except ImportError:
raise SystemExit('numpy is not installed!')
else:
msg += ['* numpy is not installed.',
' "include_dirs" in your customize.py must point to "numpy/core/include".']
if not include_ase:
if import_numpy:
try:
import ase
except ImportError:
import_ase = True
else:
import_ase = False
else:
import_ase = False
if include_ase or import_ase:
# Find ASE directories:
# include_ase works in case:
# cd gpaw # top-level gpaw source directory
# tar zxf ~/python-ase-3.1.0.846.tar.gz
# ln -s python-ase-3.1.0.846/ase .
ase_root = 'ase'
if include_ase:
assert os.path.isdir(ase_root), ase_root+': No such file or directory'
ase = []
for root, dirs, files in os.walk(ase_root):
if 'CVS' in dirs:
dirs.remove('CVS')
if '.svn' in dirs:
dirs.remove('.svn')
if '__init__.py' in files:
ase.append(root.replace('/', '.'))
if len(ase) == 0:
msg += ['* ASE is not installed! You may be able to install',
" gpaw, but you can't use it without ASE!"]
else:
packages += ase
def find_file(arg, dir, files):
#looks if the first element of the list arg is contained in the list files
# and if so, appends dir to to arg. To be used with the os.path.walk
if arg[0] in files:
arg.append(dir)
def get_system_config(define_macros, undef_macros,
include_dirs, libraries, library_dirs, extra_link_args,
extra_compile_args, runtime_library_dirs, extra_objects,
msg, import_numpy):
undef_macros += ['NDEBUG']
if import_numpy:
import numpy
include_dirs += [numpy.get_include()]
include_dirs += ['c/libxc']
machine = os.uname()[4]
if machine == 'sun4u':
# _
# |_ | ||\ |
# _||_|| \|
#
extra_compile_args += ['-Kpic', '-fast']
# Suppress warning from -fast (-xarch=native):
f = open('cc-test.c', 'w')
f.write('int main(){}\n')
f.close()
stderr = os.popen3('cc cc-test.c -fast')[2].read()
arch = re.findall('-xarch=(\S+)', stderr)
os.remove('cc-test.c')
if len(arch) > 0:
extra_compile_args += ['-xarch=%s' % arch[-1]]
# We need the -Bstatic before the -lsunperf and -lfsu:
# http://forum.java.sun.com/thread.jspa?threadID=5072537&messageID=9265782
extra_link_args += ['-Bstatic', '-lsunperf', '-lfsu', '-Bdynamic']
cc_version = os.popen3('cc -V')[2].readline().split()[3]
if cc_version > '5.6':
libraries.append('mtsk')
else:
extra_link_args.append('-lmtsk')
#define_macros.append(('NO_C99_COMPLEX', '1'))
msg += ['* Using SUN high performance library']
elif sys.platform in ['aix5', 'aix6']:
#
# o|_ _ _
# ||_)| | |
#
extra_compile_args += ['-qlanglvl=stdc99']
# setting memory limit is necessary on aix5
if sys.platform == 'aix5':
extra_link_args += ['-bmaxdata:0x80000000',
'-bmaxstack:0x80000000']
libraries += ['f', 'lapack', 'essl']
define_macros.append(('GPAW_AIX', '1'))
elif machine == 'x86_64':
# _
# \/|_||_ |_ |_|
# /\|_||_| _ |_| |
#
extra_compile_args += ['-Wall', '-std=c99']
# Look for ACML libraries:
acml = glob('/opt/acml*/g*64/lib')
if len(acml) > 0:
library_dirs += [acml[-1]]
libraries += ['acml']
if acml[-1].find('gfortran') != -1: libraries.append('gfortran')
if acml[-1].find('gnu') != -1: libraries.append('g2c')
extra_link_args += ['-Wl,-rpath=' + acml[-1]]
msg += ['* Using ACML library']
else:
atlas = False
for dir in ['/usr/lib', '/usr/local/lib']:
if glob(join(dir, 'libatlas.a')) != []:
atlas = True
break
if atlas:
libraries += ['lapack', 'atlas', 'blas']
library_dirs += [dir]
msg += ['* Using ATLAS library']
else:
libraries += ['blas', 'lapack']
msg += ['* Using standard lapack']
elif machine =='ia64':
# _ _
# |_ | o
# _||_||
#
extra_compile_args += ['-Wall', '-std=c99']
libraries += ['mkl','mkl_lapack64']
elif machine == 'i686':
# _
# o|_ |_||_
# ||_||_||_|
#
extra_compile_args += ['-Wall', '-std=c99']
if 'MKL_ROOT' in os.environ:
mklbasedir = [os.environ['MKL_ROOT']]
else:
mklbasedir = glob('/opt/intel/mkl*')
libs = ['libmkl_ia32.a']
if mklbasedir != []:
os.path.walk(mklbasedir[0],find_file, libs)
libs.pop(0)
if libs != []:
libs.sort()
libraries += ['mkl_lapack',
'mkl_ia32', 'guide', 'pthread', 'mkl']#, 'mkl_def']
library_dirs += libs
msg += ['* Using MKL library: %s' % library_dirs[-1]]
#extra_link_args += ['-Wl,-rpath=' + library_dirs[-1]]
else:
atlas = False
for dir in ['/usr/lib', '/usr/local/lib']:
if glob(join(dir, 'libatlas.a')) != []:
atlas = True
break
if atlas:
libraries += ['lapack', 'atlas', 'blas']
library_dirs += [dir]
msg += ['* Using ATLAS library']
else:
libraries += ['blas', 'lapack']
msg += ['* Using standard lapack']
# add libg2c if available
g2c=False
for dir in ['/usr/lib', '/usr/local/lib']:
if glob(join(dir, 'libg2c.so')) != []:
g2c=True
break
if glob(join(dir, 'libg2c.a')) != []:
g2c=True
break
if g2c: libraries += ['g2c']
elif sys.platform == 'darwin':
extra_compile_args += ['-Wall', '-std=c99']
include_dirs += ['/usr/include/malloc']
if glob('/System/Library/Frameworks/vecLib.framework') != []:
extra_link_args += ['-framework vecLib']
msg += ['* Using vecLib']
else:
libraries += ['blas', 'lapack']
msg += ['* Using standard lapack']
return msg
def get_parallel_config(mpi_libraries,mpi_library_dirs,mpi_include_dirs,
mpi_runtime_library_dirs,mpi_define_macros):
globals = {}
execfile('gpaw/mpi/config.py', globals)
mpi = globals['get_mpi_implementation']()
if mpi == '':
mpicompiler = None
elif mpi == 'sun':
mpi_include_dirs += ['/opt/SUNWhpc/include']
mpi_libraries += ['mpi']
mpi_library_dirs += ['/opt/SUNWhpc/lib']
mpi_runtime_library_dirs += ['/opt/SUNWhpc/lib']
mpicompiler = get_config_var('CC')
elif mpi == 'poe':
mpicompiler = 'mpcc_r'
else:
#Try to use mpicc
mpicompiler = 'mpicc'
return mpicompiler
def get_scalapack_config(define_macros):
# check ScaLapack settings
define_macros.append(('GPAW_WITH_SL', '1'))
def mtime(path, name, mtimes):
"""Return modification time.
The modification time of a source file is returned. If one of its
dependencies is newer, the mtime of that file is returned.
This function fails if two include files with the same name
are present in different directories."""
include = re.compile('^#\s*include "(\S+)"', re.MULTILINE)
if mtimes.has_key(name):
return mtimes[name]
t = os.stat(os.path.join(path, name))[ST_MTIME]
for name2 in include.findall(open(os.path.join(path, name)).read()):
path2, name22 = os.path.split(name2)
if name22 != name:
t = max(t, mtime(os.path.join(path, path2), name22, mtimes))
mtimes[name] = t
return t
def check_dependencies(sources):
# Distutils does not do deep dependencies correctly. We take care of
# that here so that "python setup.py build_ext" always does the right
# thing!
mtimes = {} # modification times
# Remove object files if any dependencies have changed:
plat = distutils.util.get_platform() + '-' + sys.version[0:3]
remove = False
for source in sources:
path, name = os.path.split(source)
t = mtime(path + '/', name, mtimes)
o = 'build/temp.%s/%s.o' % (plat, source[:-2]) # object file
if os.path.exists(o) and t > os.stat(o)[ST_MTIME]:
print 'removing', o
os.remove(o)
remove = True
so = 'build/lib.%s/_gpaw.so' % plat
if os.path.exists(so) and remove:
# Remove shared object C-extension:
# print 'removing', so
os.remove(so)
def test_configuration():
raise NotImplementedError
def write_configuration(define_macros, include_dirs, libraries, library_dirs,
extra_link_args, extra_compile_args,
runtime_library_dirs, extra_objects, mpicompiler,
mpi_libraries, mpi_library_dirs, mpi_include_dirs,
mpi_runtime_library_dirs, mpi_define_macros):
# Write the compilation configuration into a file
try:
out = open('configuration.log', 'w')
except IOError, x:
print x
return
print >> out, "Current configuration"
print >> out, "libraries", libraries
print >> out, "library_dirs", library_dirs
print >> out, "include_dirs", include_dirs
print >> out, "define_macros", define_macros
print >> out, "extra_link_args", extra_link_args
print >> out, "extra_compile_args", extra_compile_args
print >> out, "runtime_library_dirs", runtime_library_dirs
print >> out, "extra_objects", extra_objects
if mpicompiler is not None:
print >> out
print >> out, "Parallel configuration"
print >> out, "mpicompiler", mpicompiler
print >> out, "mpi_libraries", mpi_libraries
print >> out, "mpi_library_dirs", mpi_library_dirs
print >> out, "mpi_include_dirs", mpi_include_dirs
print >> out, "mpi_define_macros", mpi_define_macros
print >> out, "mpi_runtime_library_dirs", mpi_runtime_library_dirs
out.close()
def build_interpreter(define_macros, include_dirs, libraries, library_dirs,
extra_link_args, extra_compile_args,
runtime_library_dirs, extra_objects,
mpicompiler, mpilinker, mpi_libraries, mpi_library_dirs,
mpi_include_dirs, mpi_runtime_library_dirs,
mpi_define_macros):
#Build custom interpreter which is used for parallel calculations
cfgDict = get_config_vars()
plat = distutils.util.get_platform() + '-' + sys.version[0:3]
cfiles = glob('c/[a-zA-Z_]*.c') + ['c/bmgs/bmgs.c']
cfiles += glob('c/libxc/src/*.c')
if ('HDF5', 1) in define_macros:
cfiles += glob('h5py/c/*.c')
cfiles += glob('h5py/c/lzf/*.c')
cfiles2remove = ['c/libxc/src/test.c',
'c/libxc/src/xc_f.c',
'c/libxc/src/work_gga_x.c',
'c/libxc/src/work_lda.c'
]
for c2r in glob('c/libxc/src/funcs_*.c'): cfiles2remove.append(c2r)
for c2r in cfiles2remove: cfiles.remove(c2r)
sources = ['c/bc.c', 'c/localized_functions.c', 'c/mpi.c', 'c/_gpaw.c',
'c/operators.c', 'c/transformers.c', 'c/compiled_WITH_SL.c',
'c/blacs.c', 'c/utilities.c']
objects = ' '.join(['build/temp.%s/' % plat + x[:-1] + 'o'
for x in cfiles])
if not os.path.isdir('build/bin.%s/' % plat):
os.makedirs('build/bin.%s/' % plat)
exefile = 'build/bin.%s/' % plat + '/gpaw-python'
libraries += mpi_libraries
library_dirs += mpi_library_dirs
define_macros += mpi_define_macros
include_dirs += mpi_include_dirs
runtime_library_dirs += mpi_runtime_library_dirs
define_macros.append(('PARALLEL', '1'))
define_macros.append(('GPAW_INTERPRETER', '1'))
macros = ' '.join(['-D%s=%s' % x for x in define_macros if x[0].strip()])
include_dirs.append(cfgDict['INCLUDEPY'])
include_dirs.append(cfgDict['CONFINCLUDEPY'])
includes = ' '.join(['-I' + incdir for incdir in include_dirs])
library_dirs.append(cfgDict['LIBPL'])
lib_dirs = ' '.join(['-L' + lib for lib in library_dirs])
libs = ' '.join(['-l' + lib for lib in libraries if lib.strip()])
libs += ' -lpython%s' % cfgDict['VERSION']
libs = ' '.join([libs, cfgDict['LIBS'], cfgDict['LIBM']])
#Hack taken from distutils to determine option for runtime_libary_dirs
if sys.platform[:6] == 'darwin':
# MacOSX's linker doesn't understand the -R flag at all
runtime_lib_option = '-L'
elif sys.platform[:5] == 'hp-ux':
runtime_lib_option = '+s -L'
elif os.popen('mpicc --showme 2> /dev/null', 'r').read()[:3] == 'gcc':
runtime_lib_option = '-Wl,-R'
elif os.popen('mpicc -show 2> /dev/null', 'r').read()[:3] == 'gcc':
runtime_lib_option = '-Wl,-R'
else:
runtime_lib_option = '-R'
runtime_libs = ' '.join([ runtime_lib_option + lib for lib in runtime_library_dirs])
extra_link_args.append(cfgDict['LDFLAGS'])
if sys.platform in ['aix5', 'aix6']:
extra_link_args.append(cfgDict['LINKFORSHARED'].replace('Modules', cfgDict['LIBPL']))
elif sys.platform == 'darwin':
pass
else:
extra_link_args.append(cfgDict['LINKFORSHARED'])
if ('IO_WRAPPERS', 1) in define_macros:
extra_link_args += ['-Wl,-wrap,fread',
'-Wl,-wrap,_IO_getc',
'-Wl,-wrap,getc_unlocked',
'-Wl,-wrap,fgets',
'-Wl,-wrap,ungetc',
'-Wl,-wrap,feof',
'-Wl,-wrap,ferror',
'-Wl,-wrap,fflush',
'-Wl,-wrap,fseek',
'-Wl,-wrap,rewind',
# '-Wl,-wrap,fileno',
'-Wl,-wrap,flockfile',
'-Wl,-wrap,funlockfile',
'-Wl,-wrap,clearerr',
'-Wl,-wrap,fgetpos',
'-Wl,-wrap,fsetpos',
'-Wl,-wrap,setbuf',
'-Wl,-wrap,setvbuf',
'-Wl,-wrap,ftell',
'-Wl,-wrap,fstat',
'-Wl,-wrap,fstat64',
'-Wl,-wrap,fgetc',
# '-Wl,-wrap,fputc',
# '-Wl,-wrap,fputs',
# '-Wl,-wrap,fwrite',
# '-Wl,-wrap,_IO_putc',
'-Wl,-wrap,fopen',
'-Wl,-wrap,fopen64',
'-Wl,-wrap,fclose',
]
# Compile the parallel sources
for src in sources:
obj = 'build/temp.%s/' % plat + src[:-1] + 'o'
cmd = ('%s %s %s %s -o %s -c %s ' ) % \
(mpicompiler,
macros,
' '.join(extra_compile_args),
includes,
obj,
src)
print cmd
if '--dry-run' not in sys.argv:
error=os.system(cmd)
if error != 0:
msg = ['* compiling FAILED! Only serial version of code will work.']
break
# Link the custom interpreter
cmd = ('%s -o %s %s %s %s %s %s %s' ) % \
(mpilinker,
exefile,
objects,
' '.join(extra_objects),
lib_dirs,
libs,
runtime_libs,
' '.join(extra_link_args))
msg = ['* Building a custom interpreter']
print cmd
if '--dry-run' not in sys.argv:
error=os.system(cmd)
if error != 0:
msg += ['* linking FAILED! Only serial version of code will work.']
return error, msg
| gpl-3.0 | 1,044,954,531,353,206,300 | 33.772455 | 93 | 0.508983 | false |
jamesabel/osnap | osnap/osnapy.py | 1 | 2772 |
import argparse
from osnap import default_python_version, get_logger, init_logger_from_args, __application_name__
import osnap.osnapy_win
import osnap.osnapy_mac
import osnap.util
LOGGER = get_logger(__application_name__)
def make_osnapy(
python_version,
application_name = None,
clean_cache = False,
use_pyrun = False, # support for eGenix™ PyRun™ has been removed
force_app_uninstall = False,
architecture = '64bit',
):
LOGGER.debug('creating osnapy Python environment using python %s' % python_version)
if osnap.util.is_mac() and application_name is None:
raise Exception('must specify the application name on mac')
osnapy = None
if osnap.util.is_windows():
osnapy = osnap.osnapy_win.OsnapyWin(python_version, application_name, clean_cache, architecture=architecture)
elif osnap.util.is_mac():
if use_pyrun:
LOGGER.critical('pyrun capability has been removed')
else:
osnapy = osnap.osnapy_mac.OsnapyMac(python_version, application_name, clean_cache, force_app_uninstall)
else:
raise NotImplementedError
osnapy.create_python()
osnapy.pip('pip')
osnapy.pip('setuptools')
osnapy.pip('Cython') # e.g. for kivy
osnapy.pip(None) # install all from requirements.txt
def main():
parser = argparse.ArgumentParser(description='create the osnapy Python environment',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-a', '--application', default=None, help='application name (required for OSX/MacOS)')
parser.add_argument('-A', '--architecture', default='64bit', choices=['64bit', '32bit'], help='The architecture to use for the launcher')
parser.add_argument('-p', '--python_version', default=default_python_version, help='python version')
parser.add_argument('-c', '--clear', action='store_true', default=False, help='clear cache')
parser.add_argument('-f', '--force_uninstall', action='store_true', default=False,
help='force application uninstalls if necessary')
parser.add_argument('-v', '--verbose', action='store_true', default=False, help='print more verbose messages')
args = parser.parse_args()
init_logger_from_args(args)
make_osnapy(
python_version = args.python_version,
application_name = args.application,
clean_cache = args.clear,
use_pyrun = False, # support for eGenix™ PyRun™ has been removed
force_app_uninstall = args.force_uninstall,
architecture = args.architecture
)
if __name__ == '__main__':
main()
| mit | 8,053,201,480,406,695,000 | 40.253731 | 141 | 0.643632 | false |
oblank/pydjango-froum | xp/settings.py | 1 | 7211 | # coding: utf-8
# Django settings for xp project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'forum', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': 'root',
'PASSWORD': '123456',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '3306', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Asia/Shanghai'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'zh-CN'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True # 只有用admin的时候需要开启
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = False
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'h6=yzee&jze#4p1@twhksg1wg6hv%pzwomw(!o($qsly%lzlhe'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.cache.UpdateCacheMiddleware', # 缓存中间件,必须放在开头
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware', # 开启了CSRF,记得在POST表单中加{% csrf_token %},使用RequestContext
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware', # 缓存中间件,必须放在最后
)
ROOT_URLCONF = 'xp.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'xp.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
TEMPLATE_CONTEXT_PROCESSORS = ( # F2E中有current_user对象和request对象,这里设置可在模板中使用RquestContext
'django.contrib.auth.context_processors.auth', # user对象等等
'django.core.context_processors.request', # request对象等等
'django.core.context_processors.static', # 在模板中使用{{ STATIC_URL }}获取静态文件路径
'forum.context_processors.custom_proc', # 自定义模板上下文处理器
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'django.contrib.sitemaps', # Django sitemap framework
'forum',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'django.db.backends': {
'level': 'DEBUG',
'handlers': ['console'],
},
}
}
# CACHES = { # memcached缓存设置
# 'default': {
# 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
# 'LOCATION': '127.0.0.1:11211',
# }
# }
# SESSION_ENGINE = 'django.contrib.sessions.backends.cache' # 使用memcached存储session
# 自定义User类
AUTH_USER_MODEL = 'forum.ForumUser'
# 用户认证BackEnds
AUTHENTICATION_BACKENDS = ('forum.backends.EmailAuthBackend',)
# 默认登陆uri
LOGIN_URL = '/login/'
# 发送邮件设置
EMAIL_HOST = 'smtp.163.com'
EMAIL_PORT = 25
EMAIL_HOST_USER= 'a135689110'
EMAIL_HOST_PASSWORD= '8804183'
DEFAULT_FROM_EMAIL = '[email protected]'
# 注册用户保留关键字,非Django设置
RESERVED = ["user", "topic", "home", "setting", "forgot", "login", "logout", "register", "admin"]
| mit | -2,053,450,417,403,817,500 | 33.014706 | 136 | 0.682951 | false |
aldanor/blox | blox/utils.py | 1 | 1944 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
import struct
import functools
import numpy as np
try:
import ujson as json
json_dumps = json.dumps
except ImportError:
import json
json_dumps = functools.partial(json.dumps, separators=',:')
def flatten_dtype(dtype):
dtype = np.dtype(dtype)
if dtype.fields is not None:
if dtype.type is np.record:
return ('record', list(dtype.descr))
return list(dtype.descr)
return str(dtype)
def restore_dtype(dtype):
def _convert_dtype(dt):
# workaround for a long-standing bug in numpy:
# https://github.com/numpy/numpy/issues/2407
is_string = lambda s: isinstance(s, (six.text_type, six.string_types))
if isinstance(dt, list):
if len(dt) == 2 and is_string(dt[0]):
return _convert_dtype(tuple(dt))
return [_convert_dtype(subdt) for subdt in dt]
elif isinstance(dt, tuple):
return tuple(_convert_dtype(subdt) for subdt in dt)
elif isinstance(dt, six.text_type) and six.PY2:
return dt.encode('ascii')
return dt
dtype = _convert_dtype(dtype)
if isinstance(dtype, (list, tuple)) and len(dtype) == 2 and dtype[0] == 'record':
return np.dtype((np.record, np.dtype(dtype[1])))
return np.dtype(dtype)
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json_dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
| mit | 1,654,692,354,838,072,600 | 28.014925 | 85 | 0.629115 | false |
basimr/snoop-dogg-number | filter_graph_by_sdn.py | 1 | 1253 | #!/usr/bin/python2
# TODO: Add description.
import psycopg2
import networkx as nx
# TODO: Create a class for storing artists' SDN and path to avoid doing this.
SDN = 0
PATH = 1
# Load graph from disk
graph = nx.read_gexf("graph/sdn-unweighted.gexf")
# Initialize dictionary with the Snoop Dogg as the base case
artists = {"Snoop Dogg" : (0, ["Snoop Dogg"])}
# Traverse the graph breadth-first and compute every artist's Snoop Dogg Number in O(V + E)
for edge in nx.bfs_edges(graph, "Snoop Dogg"):
parent = edge[0]
child = edge[1]
dist_to_snoopdogg = artists[parent][SDN] + 1
path_to_snoopdogg = artists[parent][PATH] + [child]
artists[child] = (dist_to_snoopdogg, path_to_snoopdogg)
# Remove artists far from Snoop Dogg and save a separate graph for each iteration
# TODO: Can I use comprehensions to simplify these loops?
for sdn in [5, 4, 3, 2, 1]:
distant_artists = []
for a in artists:
if artists[a][SDN] > sdn:
distant_artists.append(a)
for a in distant_artists:
del artists[a]
graph.remove_node(a)
filename = "graph/sdn-" + str(sdn) + ".gexf"
nx.write_gexf(graph, filename)
print("Wrote graph of artists with SDN of " + sdn + " or less at " + filename)
print(nx.info(graph))
| mit | 2,550,880,907,891,530,000 | 29.560976 | 91 | 0.676776 | false |
kurokid/connme | connme/connme.py | 1 | 2726 | #!/usr/bin/env python2
import sip
sip.setapi('QString', 2)
from PyQt4 import QtGui, QtCore, QtNetwork
from connmeMain import connme
import sys,os
class SingleApplicationWithMessaging(QtGui.QApplication):
messageAvailable = QtCore.pyqtSignal(object)
def __init__(self, argv, key):
QtGui.QApplication.__init__(self, argv)
self._key = key
self._memory = QtCore.QSharedMemory(self)
self._memory.setKey(self._key)
if self._memory.attach():
self._running = True
else:
self._running = False
if not self._memory.create(1):
raise RuntimeError(self._memory.errorString())
self._timeout = 1000
self._server = QtNetwork.QLocalServer(self)
if not self.isRunning():
self._server.newConnection.connect(self.handleMessage)
self._server.listen(self._key)
def handleMessage(self):
socket = self._server.nextPendingConnection()
if socket.waitForReadyRead(self._timeout):
self.messageAvailable.emit(
socket.readAll().data().decode('utf-8'))
socket.disconnectFromServer()
else:
QtCore.qDebug(socket.errorString())
def isRunning(self):
return self._running
def sendMessage(self, message):
if self.isRunning():
socket = QtNetwork.QLocalSocket(self)
socket.connectToServer(self._key, QtCore.QIODevice.WriteOnly)
if not socket.waitForConnected(self._timeout):
print(socket.errorString())
return False
if not isinstance(message, bytes):
message = message.encode('utf-8')
socket.write(message)
if not socket.waitForBytesWritten(self._timeout):
print(socket.errorString())
return False
socket.disconnectFromServer()
return True
return False
def main():
key = 'connme'
app = SingleApplicationWithMessaging(sys.argv, key)
if app.isRunning():
app.sendMessage(' '.join(sys.argv[1:]))
sys.exit(1)
gui = connme()
gui.address = os.path.realpath(__file__)
app.messageAvailable.connect(gui.processClient)
gui.showGui()
sys.exit(app.exec_())
if __name__ == '__main__':
euid = os.geteuid()
os.chdir(sys.path[0])
if euid != 0:
if os.path.exists("/usr/bin/gksu"):
args = ['gksu', sys.executable] + sys.argv + [os.environ]
os.execlpe('gksu', *args)
elif os.path.exists("/usr/bin/kdesudo"):
args = ['kdesudo', sys.executable] + sys.argv + [os.environ]
os.execlpe('kdesudo', *args)
main() | gpl-3.0 | -4,815,941,542,223,123,000 | 32.256098 | 73 | 0.591709 | false |
BaseBot/Triangula | src/python/triangula/navigation.py | 1 | 1101 | class TaskWaypoint:
"""
Consists of a target Pose defining a location and orientation, and a Task which should be run when the robot reaches
the target position. The task can be None, in which case the robot won't attempt to do anything at the target point.
"""
def __init__(self, pose, task=None, stop=False):
"""
Constructor
:param triangula.chassis.Pose pose:
The target Pose, defining the location and orientation of this waypoint
:param triangula.task.Task task:
A Task to run when the target point is reached. The task will be run until a non-None value is returned from
the poll method. Defaults to None, in which case no task will be invoked and the robot will proceed
immediately to the next waypoint.
:param stop:
Defaults to False, if this is set to True then the robot will come to a complete stop before either running
the sub-task or proceeding to the next waypoint.
"""
self.pose = pose
self.task = task
self.stop = stop | apache-2.0 | -6,752,106,085,634,482,000 | 46.913043 | 120 | 0.656676 | false |
ivanlai/Kaggle-Planet-Amazon | PyTorch_models.py | 1 | 5862 | # Reference and ideas from http://pytorch.org/tutorials/beginner/transfer_learning_tutorial.html
from __future__ import print_function
import torch.nn as nn
import torchvision.models as models
import warnings
warnings.filterwarnings("ignore")
##################################################################
## PyTorch Model implementations in
## /usr/local/lib/python2.7/dist-packages/torchvision/models ##
##################################################################
def resnet18(num_classes, pretrained=True, freeze=False):
model = models.resnet18( pretrained=True)
if freeze:
model = freeze_all_layers(model)
# Parameters of newly constructed modules have requires_grad=True by default
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Resnet18'
def resnet34(num_classes, pretrained=True, freeze=False):
model = models.resnet34( pretrained=True)
if freeze:
model = freeze_all_layers(model)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Resnet34'
def resnet50(num_classes, pretrained=True, freeze=False):
model = models.resnet50( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Resnet50'
def resnet101(num_classes, pretrained=True, freeze=False):
model = models.resnet101( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Resnet101'
def resnet152(num_classes, pretrained=True, freeze=False):
model = models.resnet152( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Resnet152'
##################################################################
def densenet121(num_classes, pretrained=True, freeze=False):
model = models.densenet121( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Densenet121'
def densenet161(num_classes, pretrained=True, freeze=False):
model = models.densenet161( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Densenet161'
def densenet169(num_classes, pretrained=True, freeze=False):
model = models.densenet169(pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Densenet169'
def densenet201(num_classes, pretrained=True, freeze=False):
model = models.densenet201( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Densenet201'
##################################################################
def inception_v3(num_classes, pretrained=True, freeze=False):
model = models.inception_v3(pretrained=pretrained)
model.aux_logits = False
if freeze:
model = freeze_all_layers(model)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Inception_v3'
##################################################################
def vgg16(num_classes, pretrained=True, freeze=False):
# Credit: https://discuss.pytorch.org/t/how-to-perform-finetuning-in-pytorch/419/10
model = models.vgg16(pretrained=True)
if freeze:
model = freeze_all_layers(model)
mod = list(model.classifier.children())
mod.pop()
mod.append(nn.Linear(4096, 17))
new_classifier = nn.Sequential(*mod)
model.classifier = new_classifier
model = Add_Sigmoid(model)
return model, 'VGG16'
##################################################################
def vgg19(num_classes, pretrained=True, freeze=False):
# Credit: https://discuss.pytorch.org/t/how-to-perform-finetuning-in-pytorch/419/10
model = models.vgg19(pretrained=True)
if freeze:
model = freeze_all_layers(model)
mod = list(model.classifier.children())
mod.pop()
mod.append(nn.Linear(4096, 17))
new_classifier = nn.Sequential(*mod)
model.classifier = new_classifier
model = Add_Sigmoid(model)
return model, 'VGG19'
##################################################################
class Add_Sigmoid(nn.Module):
def __init__(self, pretrained_model):
super(Add_Sigmoid, self).__init__()
self.pretrained_model = pretrained_model
self.sigmoid = nn.Sigmoid()
def forward(self, x):
return self.sigmoid(self.pretrained_model(x))
##################################################################
def freeze_all_layers(model):
#Freeze all layers except last during training (last layer training set to true when it get redefined)
for param in model.parameters():
param.requires_grad = False
return model
################################################################## | mit | 1,453,137,191,912,408,600 | 27.740196 | 106 | 0.624019 | false |
jfillmore/hoops | tests/models_tests/test_model_basekit_site.py | 1 | 4446 | from sqlalchemy.exc import IntegrityError
from tests.api_tests import APITestBase
from tests.models_tests import ModelsTestBase
from test_models.basekit import BaseKitBrand, BaseKitSite
from test_models.core import User
from hoops.common import BaseModel
import time
class TestBaseKitUserModel(ModelsTestBase):
def test_01_populate(self):
''' Populate the required Tables. '''
APITestBase.populate(self.db)
def test_02_for_fk_reference_to_basekit_brand(self):
''' Check the FK reference of BaseKitSite to BaseKitBrand.'''
my_bk_site = BaseKitSite.query.first()
my_bk_brand = my_bk_site.brand # via backref
bk_sites_for_brand = BaseKitBrand.query.filter_by(
id=my_bk_brand.id).first().basekit_sites # via relationship
assert my_bk_site in bk_sites_for_brand, "Test for checking the FK reference of BaseKitSite to BaseKitBrand failed"
def test_03_for_fk_reference_to_user(self):
''' Check the FK reference of BaseKitSite to User. '''
my_bk_site = BaseKitSite.query.first()
my_bk_user = my_bk_site.user # via backref
bk_sites_for_user = User.query.filter_by(
id=my_bk_user.id).first().basekit_sites # via relationship
assert my_bk_site in bk_sites_for_user, "Test for checking the FK reference of BaseKitSite to User failed"
def test_04_for_repr(self):
'''Test the __repr__ of BaseKitSite model.'''
first_bk_site = BaseKitSite.query.first()
assert "<BaseKitSite('" + str(first_bk_site.id) + "')>" in first_bk_site.__repr__(
), "Test for BaseKitSite __repr__ failed"
def test_05_for_unique_bk_site_id(self):
'''Test the uniqueness of bk_site_id.'''
my_bk_site_id = BaseKitSite.query.first().bk_site_id
bk_brand = BaseKitBrand.query.first()
user = User.query.first()
self.db.session.add(
BaseKitSite(brand=bk_brand, user=user, bk_site_id=my_bk_site_id, basekit_package_id='2', subdomain="com"))
try:
self.db.session.commit()
except Exception:
self.db.session.rollback()
assert IntegrityError, "Test for checking uniqueness of bk_site_id failed"
def test_06_for_unique_basekit_package_id(self):
'''Test the uniqueness of basekit_package_id.'''
my_basekit_package_id = BaseKitSite.query.first().basekit_package_id
brand = BaseKitBrand.query.first()
user = User.query.first()
self.db.session.add(
BaseKitSite(brand=brand, user=user, bk_site_id='5', basekit_package_id=my_basekit_package_id, subdomain='in'))
try:
self.db.session.commit()
except Exception:
self.db.session.rollback()
assert IntegrityError, "Test for checking uniqueness of basekit_package_id failed"
def test_07_get_the_inherited_class(self):
''' Check the inherited BaseModel model class.'''
baskekit_site = BaseKitSite()
assert isinstance(
baskekit_site, BaseModel), "Test to check inheritance of BaseKitSite from BaseModel failed"
def test_08_for_bk_user_updation(self):
''' Check the BaseKitSite updation. '''
first_bk_site = BaseKitSite.query.first()
my_bk_site_created_at = first_bk_site.created_at
my_bk_site_updated_at = first_bk_site.updated_at
time.sleep(1)
self.db.session.merge(
BaseKitSite(id=first_bk_site.id, bk_site_id='8'))
try:
self.db.session.commit()
assert my_bk_site_created_at == BaseKitSite.query.first().created_at, 'Test for checking whether "created_at" is not changing failed'
assert my_bk_site_updated_at != BaseKitSite.query.first().updated_at, 'Test for checking whether "updated_at" changes failed'
assert True
except Exception, e:
self.db.session.rollback()
raise e('Test for updating the BaseKitSite fields failed')
def test_to_json(self):
'''Check that serialization works'''
site = BaseKitSite.query.first()
out = site.to_json()
for col in ['status', 'front_end_ip_addresses', 'user_id', 'service', 'created_at', 'updated_at', 'template_id', 'package_id', 'domains', 'id', 'front_end_cnames']:
assert col in out, '%s not found in result of BaseKitSite.to_json()' % col
assert out['service'] == 'builder'
| mit | -1,244,179,570,931,433,500 | 46.297872 | 172 | 0.6426 | false |
trozet/python-tackerclient | tackerclient/shell.py | 1 | 32048 | # Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Command-line interface to the Tacker APIs
"""
from __future__ import print_function
import argparse
import getpass
import inspect
import itertools
import logging
import os
import sys
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth.identity import v3 as v3_auth
from keystoneclient import discover
from keystoneclient.openstack.common.apiclient import exceptions as ks_exc
from keystoneclient import session
from oslo_utils import encodeutils
import six.moves.urllib.parse as urlparse
from cliff import app
from cliff import commandmanager
from tackerclient.common import clientmanager
from tackerclient.common import command as openstack_command
from tackerclient.common import exceptions as exc
from tackerclient.common import extension as client_extension
from tackerclient.common import utils
from tackerclient.i18n import _
from tackerclient.tacker.v1_0 import extension
from tackerclient.tacker.v1_0.vm import device
from tackerclient.tacker.v1_0.vm import device_template
from tackerclient.tacker.v1_0.vm import vnf
from tackerclient.tacker.v1_0.vm import vnfd
from tackerclient.version import __version__
VERSION = '1.0'
TACKER_API_VERSION = '1.0'
def run_command(cmd, cmd_parser, sub_argv):
_argv = sub_argv
index = -1
values_specs = []
if '--' in sub_argv:
index = sub_argv.index('--')
_argv = sub_argv[:index]
values_specs = sub_argv[index:]
known_args, _values_specs = cmd_parser.parse_known_args(_argv)
cmd.values_specs = (index == -1 and _values_specs or values_specs)
return cmd.run(known_args)
def env(*_vars, **kwargs):
"""Search for the first defined of possibly many env vars.
Returns the first environment variable defined in vars, or
returns the default defined in kwargs.
"""
for v in _vars:
value = os.environ.get(v, None)
if value:
return value
return kwargs.get('default', '')
def check_non_negative_int(value):
try:
value = int(value)
except ValueError:
raise argparse.ArgumentTypeError(_("invalid int value: %r") % value)
if value < 0:
raise argparse.ArgumentTypeError(_("input value %d is negative") %
value)
return value
class BashCompletionCommand(openstack_command.OpenStackCommand):
"""Prints all of the commands and options for bash-completion."""
resource = "bash_completion"
COMMAND_V1 = {
'bash-completion': BashCompletionCommand,
'ext-list': extension.ListExt,
'ext-show': extension.ShowExt,
'device-template-create': device_template.CreateDeviceTemplate,
'device-template-list': device_template.ListDeviceTemplate,
'device-template-show': device_template.ShowDeviceTemplate,
'device-template-update': device_template.UpdateDeviceTemplate,
'device-template-delete': device_template.DeleteDeviceTemplate,
'device-create': device.CreateDevice,
'device-list': device.ListDevice,
'device-show': device.ShowDevice,
'device-update': device.UpdateDevice,
'device-delete': device.DeleteDevice,
'interface-attach': device.AttachInterface,
'interface-detach': device.DetachInterface,
# MANO lingo
'vnfd-create': vnfd.CreateVNFD,
'vnfd-delete': vnfd.DeleteVNFD,
'vnfd-list': vnfd.ListVNFD,
'vnfd-show': vnfd.ShowVNFD,
'vnf-create': vnf.CreateVNF,
'vnf-update': vnf.UpdateVNF,
'vnf-delete': vnf.DeleteVNF,
'vnf-list': vnf.ListVNF,
'vnf-show': vnf.ShowVNF,
# 'vnf-config-create'
# 'vnf-config-push'
}
COMMANDS = {'1.0': COMMAND_V1}
class HelpAction(argparse.Action):
"""Provide a custom action so the -h and --help options
to the main app will print a list of the commands.
The commands are determined by checking the CommandManager
instance, passed in as the "default" value for the action.
"""
def __call__(self, parser, namespace, values, option_string=None):
outputs = []
max_len = 0
app = self.default
parser.print_help(app.stdout)
app.stdout.write(_('\nCommands for API v%s:\n') % app.api_version)
command_manager = app.command_manager
for name, ep in sorted(command_manager):
factory = ep.load()
cmd = factory(self, None)
one_liner = cmd.get_description().split('\n')[0]
outputs.append((name, one_liner))
max_len = max(len(name), max_len)
for (name, one_liner) in outputs:
app.stdout.write(' %s %s\n' % (name.ljust(max_len), one_liner))
sys.exit(0)
class TackerShell(app.App):
# verbose logging levels
WARNING_LEVEL = 0
INFO_LEVEL = 1
DEBUG_LEVEL = 2
CONSOLE_MESSAGE_FORMAT = '%(message)s'
DEBUG_MESSAGE_FORMAT = '%(levelname)s: %(name)s %(message)s'
log = logging.getLogger(__name__)
def __init__(self, apiversion):
super(TackerShell, self).__init__(
description=__doc__.strip(),
version=VERSION,
command_manager=commandmanager.CommandManager('tacker.cli'), )
self.commands = COMMANDS
for k, v in self.commands[apiversion].items():
self.command_manager.add_command(k, v)
self._register_extensions(VERSION)
# Pop the 'complete' to correct the outputs of 'tacker help'.
self.command_manager.commands.pop('complete')
# This is instantiated in initialize_app() only when using
# password flow auth
self.auth_client = None
self.api_version = apiversion
def build_option_parser(self, description, version):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
"""
parser = argparse.ArgumentParser(
description=description,
add_help=False, )
parser.add_argument(
'--version',
action='version',
version=__version__, )
parser.add_argument(
'-v', '--verbose', '--debug',
action='count',
dest='verbose_level',
default=self.DEFAULT_VERBOSE_LEVEL,
help=_('Increase verbosity of output and show tracebacks on'
' errors. You can repeat this option.'))
parser.add_argument(
'-q', '--quiet',
action='store_const',
dest='verbose_level',
const=0,
help=_('Suppress output except warnings and errors.'))
parser.add_argument(
'-h', '--help',
action=HelpAction,
nargs=0,
default=self, # tricky
help=_("Show this help message and exit."))
parser.add_argument(
'-r', '--retries',
metavar="NUM",
type=check_non_negative_int,
default=0,
help=_("How many times the request to the Tacker server should "
"be retried if it fails."))
# FIXME(bklei): this method should come from python-keystoneclient
self._append_global_identity_args(parser)
return parser
def _append_global_identity_args(self, parser):
# FIXME(bklei): these are global identity (Keystone) arguments which
# should be consistent and shared by all service clients. Therefore,
# they should be provided by python-keystoneclient. We will need to
# refactor this code once this functionality is available in
# python-keystoneclient.
#
# Note: At that time we'll need to decide if we can just abandon
# the deprecated args (--service-type and --endpoint-type).
parser.add_argument(
'--os-service-type', metavar='<os-service-type>',
default=env('OS_SERVICEVM_SERVICE_TYPE', default='servicevm'),
help=_('Defaults to env[OS_SERVICEVM_SERVICE_TYPE] or servicevm.'))
parser.add_argument(
'--os-endpoint-type', metavar='<os-endpoint-type>',
default=env('OS_ENDPOINT_TYPE', default='publicURL'),
help=_('Defaults to env[OS_ENDPOINT_TYPE] or publicURL.'))
# FIXME(bklei): --service-type is deprecated but kept in for
# backward compatibility.
parser.add_argument(
'--service-type', metavar='<service-type>',
default=env('OS_SERVICEVM_SERVICE_TYPE', default='servicevm'),
help=_('DEPRECATED! Use --os-service-type.'))
# FIXME(bklei): --endpoint-type is deprecated but kept in for
# backward compatibility.
parser.add_argument(
'--endpoint-type', metavar='<endpoint-type>',
default=env('OS_ENDPOINT_TYPE', default='publicURL'),
help=_('DEPRECATED! Use --os-endpoint-type.'))
parser.add_argument(
'--os-auth-strategy', metavar='<auth-strategy>',
default=env('OS_AUTH_STRATEGY', default='keystone'),
help=_('DEPRECATED! Only keystone is supported.'))
parser.add_argument(
'--os_auth_strategy',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-auth-url', metavar='<auth-url>',
default=env('OS_AUTH_URL'),
help=_('Authentication URL, defaults to env[OS_AUTH_URL].'))
parser.add_argument(
'--os_auth_url',
help=argparse.SUPPRESS)
project_name_group = parser.add_mutually_exclusive_group()
project_name_group.add_argument(
'--os-tenant-name', metavar='<auth-tenant-name>',
default=env('OS_TENANT_NAME'),
help=_('Authentication tenant name, defaults to '
'env[OS_TENANT_NAME].'))
project_name_group.add_argument(
'--os-project-name',
metavar='<auth-project-name>',
default=utils.env('OS_PROJECT_NAME'),
help='Another way to specify tenant name. '
'This option is mutually exclusive with '
' --os-tenant-name. '
'Defaults to env[OS_PROJECT_NAME].')
parser.add_argument(
'--os_tenant_name',
help=argparse.SUPPRESS)
project_id_group = parser.add_mutually_exclusive_group()
project_id_group.add_argument(
'--os-tenant-id', metavar='<auth-tenant-id>',
default=env('OS_TENANT_ID'),
help=_('Authentication tenant ID, defaults to '
'env[OS_TENANT_ID].'))
project_id_group.add_argument(
'--os-project-id',
metavar='<auth-project-id>',
default=utils.env('OS_PROJECT_ID'),
help='Another way to specify tenant ID. '
'This option is mutually exclusive with '
' --os-tenant-id. '
'Defaults to env[OS_PROJECT_ID].')
parser.add_argument(
'--os-username', metavar='<auth-username>',
default=utils.env('OS_USERNAME'),
help=_('Authentication username, defaults to env[OS_USERNAME].'))
parser.add_argument(
'--os_username',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-id', metavar='<auth-user-id>',
default=env('OS_USER_ID'),
help=_('Authentication user ID (Env: OS_USER_ID)'))
parser.add_argument(
'--os_user_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-domain-id',
metavar='<auth-user-domain-id>',
default=utils.env('OS_USER_DOMAIN_ID'),
help='OpenStack user domain ID. '
'Defaults to env[OS_USER_DOMAIN_ID].')
parser.add_argument(
'--os_user_domain_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-domain-name',
metavar='<auth-user-domain-name>',
default=utils.env('OS_USER_DOMAIN_NAME'),
help='OpenStack user domain name. '
'Defaults to env[OS_USER_DOMAIN_NAME].')
parser.add_argument(
'--os_user_domain_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os_project_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os_project_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-project-domain-id',
metavar='<auth-project-domain-id>',
default=utils.env('OS_PROJECT_DOMAIN_ID'),
help='Defaults to env[OS_PROJECT_DOMAIN_ID].')
parser.add_argument(
'--os-project-domain-name',
metavar='<auth-project-domain-name>',
default=utils.env('OS_PROJECT_DOMAIN_NAME'),
help='Defaults to env[OS_PROJECT_DOMAIN_NAME].')
parser.add_argument(
'--os-cert',
metavar='<certificate>',
default=utils.env('OS_CERT'),
help=_("Path of certificate file to use in SSL "
"connection. This file can optionally be "
"prepended with the private key. Defaults "
"to env[OS_CERT]."))
parser.add_argument(
'--os-cacert',
metavar='<ca-certificate>',
default=env('OS_CACERT', default=None),
help=_("Specify a CA bundle file to use in "
"verifying a TLS (https) server certificate. "
"Defaults to env[OS_CACERT]."))
parser.add_argument(
'--os-key',
metavar='<key>',
default=utils.env('OS_KEY'),
help=_("Path of client key to use in SSL "
"connection. This option is not necessary "
"if your key is prepended to your certificate "
"file. Defaults to env[OS_KEY]."))
parser.add_argument(
'--os-password', metavar='<auth-password>',
default=utils.env('OS_PASSWORD'),
help=_('Authentication password, defaults to env[OS_PASSWORD].'))
parser.add_argument(
'--os_password',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-region-name', metavar='<auth-region-name>',
default=env('OS_REGION_NAME'),
help=_('Authentication region name, defaults to '
'env[OS_REGION_NAME].'))
parser.add_argument(
'--os_region_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-token', metavar='<token>',
default=env('OS_TOKEN'),
help=_('Authentication token, defaults to env[OS_TOKEN].'))
parser.add_argument(
'--os_token',
help=argparse.SUPPRESS)
parser.add_argument(
'--http-timeout', metavar='<seconds>',
default=env('OS_NETWORK_TIMEOUT', default=None), type=float,
help=_('Timeout in seconds to wait for an HTTP response. Defaults '
'to env[OS_NETWORK_TIMEOUT] or None if not specified.'))
parser.add_argument(
'--os-url', metavar='<url>',
default=env('OS_URL'),
help=_('Defaults to env[OS_URL].'))
parser.add_argument(
'--os_url',
help=argparse.SUPPRESS)
parser.add_argument(
'--insecure',
action='store_true',
default=env('TACKERCLIENT_INSECURE', default=False),
help=_("Explicitly allow tackerclient to perform \"insecure\" "
"SSL (https) requests. The server's certificate will "
"not be verified against any certificate authorities. "
"This option should be used with caution."))
def _bash_completion(self):
"""Prints all of the commands and options for bash-completion."""
commands = set()
options = set()
for option, _action in self.parser._option_string_actions.items():
options.add(option)
for command_name, command in self.command_manager:
commands.add(command_name)
cmd_factory = command.load()
cmd = cmd_factory(self, None)
cmd_parser = cmd.get_parser('')
for option, _action in cmd_parser._option_string_actions.items():
options.add(option)
print(' '.join(commands | options))
def _register_extensions(self, version):
for name, module in itertools.chain(
client_extension._discover_via_entry_points()):
self._extend_shell_commands(module, version)
def _extend_shell_commands(self, module, version):
classes = inspect.getmembers(module, inspect.isclass)
for cls_name, cls in classes:
if (issubclass(cls, client_extension.TackerClientExtension) and
hasattr(cls, 'shell_command')):
cmd = cls.shell_command
if hasattr(cls, 'versions'):
if version not in cls.versions:
continue
try:
self.command_manager.add_command(cmd, cls)
self.commands[version][cmd] = cls
except TypeError:
pass
def run(self, argv):
"""Equivalent to the main program for the application.
:param argv: input arguments and options
:paramtype argv: list of str
"""
try:
index = 0
command_pos = -1
help_pos = -1
help_command_pos = -1
for arg in argv:
if arg == 'bash-completion' and help_command_pos == -1:
self._bash_completion()
return 0
if arg in self.commands[self.api_version]:
if command_pos == -1:
command_pos = index
elif arg in ('-h', '--help'):
if help_pos == -1:
help_pos = index
elif arg == 'help':
if help_command_pos == -1:
help_command_pos = index
index = index + 1
if command_pos > -1 and help_pos > command_pos:
argv = ['help', argv[command_pos]]
if help_command_pos > -1 and command_pos == -1:
argv[help_command_pos] = '--help'
self.options, remainder = self.parser.parse_known_args(argv)
self.configure_logging()
self.interactive_mode = not remainder
self.initialize_app(remainder)
except Exception as err:
if self.options.verbose_level >= self.DEBUG_LEVEL:
self.log.exception(err)
raise
else:
self.log.error(err)
return 1
if self.interactive_mode:
_argv = [sys.argv[0]]
sys.argv = _argv
return self.interact()
return self.run_subcommand(remainder)
def run_subcommand(self, argv):
subcommand = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = subcommand
cmd = cmd_factory(self, self.options)
try:
self.prepare_to_run_command(cmd)
full_name = (cmd_name
if self.interactive_mode
else ' '.join([self.NAME, cmd_name])
)
cmd_parser = cmd.get_parser(full_name)
return run_command(cmd, cmd_parser, sub_argv)
except Exception as e:
if self.options.verbose_level >= self.DEBUG_LEVEL:
self.log.exception("%s", e)
raise
self.log.error("%s", e)
return 1
def authenticate_user(self):
"""Make sure the user has provided all of the authentication
info we need.
"""
if self.options.os_auth_strategy == 'keystone':
if self.options.os_token or self.options.os_url:
# Token flow auth takes priority
if not self.options.os_token:
raise exc.CommandError(
_("You must provide a token via"
" either --os-token or env[OS_TOKEN]"
" when providing a service URL"))
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"
" when providing a token"))
else:
# Validate password flow auth
project_info = (self.options.os_tenant_name or
self.options.os_tenant_id or
(self.options.os_project_name and
(self.options.os_project_domain_name or
self.options.os_project_domain_id)) or
self.options.os_project_id)
if (not self.options.os_username
and not self.options.os_user_id):
raise exc.CommandError(
_("You must provide a username or user ID via"
" --os-username, env[OS_USERNAME] or"
" --os-user-id, env[OS_USER_ID]"))
if not self.options.os_password:
# No password, If we've got a tty, try prompting for it
if hasattr(sys.stdin, 'isatty') and sys.stdin.isatty():
# Check for Ctl-D
try:
self.options.os_password = getpass.getpass(
'OS Password: ')
except EOFError:
pass
# No password because we didn't have a tty or the
# user Ctl-D when prompted.
if not self.options.os_password:
raise exc.CommandError(
_("You must provide a password via"
" either --os-password or env[OS_PASSWORD]"))
if (not project_info):
# tenent is deprecated in Keystone v3. Use the latest
# terminology instead.
raise exc.CommandError(
_("You must provide a project_id or project_name ("
"with project_domain_name or project_domain_id) "
"via "
" --os-project-id (env[OS_PROJECT_ID])"
" --os-project-name (env[OS_PROJECT_NAME]),"
" --os-project-domain-id "
"(env[OS_PROJECT_DOMAIN_ID])"
" --os-project-domain-name "
"(env[OS_PROJECT_DOMAIN_NAME])"))
if not self.options.os_auth_url:
raise exc.CommandError(
_("You must provide an auth url via"
" either --os-auth-url or via env[OS_AUTH_URL]"))
auth_session = self._get_keystone_session()
auth = auth_session.auth
else: # not keystone
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"))
auth_session = None
auth = None
self.client_manager = clientmanager.ClientManager(
token=self.options.os_token,
url=self.options.os_url,
auth_url=self.options.os_auth_url,
tenant_name=self.options.os_tenant_name,
tenant_id=self.options.os_tenant_id,
username=self.options.os_username,
user_id=self.options.os_user_id,
password=self.options.os_password,
region_name=self.options.os_region_name,
api_version=self.api_version,
auth_strategy=self.options.os_auth_strategy,
# FIXME (bklei) honor deprecated service_type and
# endpoint type until they are removed
service_type=self.options.os_service_type or
self.options.service_type,
endpoint_type=self.options.os_endpoint_type or self.endpoint_type,
insecure=self.options.insecure,
ca_cert=self.options.os_cacert,
timeout=self.options.http_timeout,
retries=self.options.retries,
raise_errors=False,
session=auth_session,
auth=auth,
log_credentials=True)
return
def initialize_app(self, argv):
"""Global app init bits:
* set up API versions
* validate authentication info
"""
super(TackerShell, self).initialize_app(argv)
self.api_version = {'servicevm': self.api_version}
# If the user is not asking for help, make sure they
# have given us auth.
cmd_name = None
if argv:
cmd_info = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = cmd_info
if self.interactive_mode or cmd_name != 'help':
self.authenticate_user()
def configure_logging(self):
"""Create logging handlers for any log output."""
root_logger = logging.getLogger('')
# Set up logging to a file
root_logger.setLevel(logging.DEBUG)
# Send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {self.WARNING_LEVEL: logging.WARNING,
self.INFO_LEVEL: logging.INFO,
self.DEBUG_LEVEL: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
# The default log level is INFO, in this situation, set the
# log level of the console to WARNING, to avoid displaying
# useless messages. This equals using "--quiet"
if console_level == logging.INFO:
console.setLevel(logging.WARNING)
else:
console.setLevel(console_level)
if logging.DEBUG == console_level:
formatter = logging.Formatter(self.DEBUG_MESSAGE_FORMAT)
else:
formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
logging.getLogger('iso8601.iso8601').setLevel(logging.WARNING)
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def get_v2_auth(self, v2_auth_url):
return v2_auth.Password(
v2_auth_url,
username=self.options.os_username,
password=self.options.os_password,
tenant_id=self.options.os_tenant_id,
tenant_name=self.options.os_tenant_name)
def get_v3_auth(self, v3_auth_url):
project_id = self.options.os_project_id or self.options.os_tenant_id
project_name = (self.options.os_project_name or
self.options.os_tenant_name)
return v3_auth.Password(
v3_auth_url,
username=self.options.os_username,
password=self.options.os_password,
user_id=self.options.os_user_id,
user_domain_name=self.options.os_user_domain_name,
user_domain_id=self.options.os_user_domain_id,
project_id=project_id,
project_name=project_name,
project_domain_name=self.options.os_project_domain_name,
project_domain_id=self.options.os_project_domain_id
)
def _discover_auth_versions(self, session, auth_url):
# discover the API versions the server is supporting base on the
# given URL
try:
ks_discover = discover.Discover(session=session, auth_url=auth_url)
return (ks_discover.url_for('2.0'), ks_discover.url_for('3.0'))
except ks_exc.ClientException:
# Identity service may not support discover API version.
# Lets try to figure out the API version from the original URL.
url_parts = urlparse.urlparse(auth_url)
(scheme, netloc, path, params, query, fragment) = url_parts
path = path.lower()
if path.startswith('/v3'):
return (None, auth_url)
elif path.startswith('/v2'):
return (auth_url, None)
else:
# not enough information to determine the auth version
msg = _('Unable to determine the Keystone version '
'to authenticate with using the given '
'auth_url. Identity service may not support API '
'version discovery. Please provide a versioned '
'auth_url instead.')
raise exc.CommandError(msg)
def _get_keystone_session(self):
# first create a Keystone session
cacert = self.options.os_cacert or None
cert = self.options.os_cert or None
key = self.options.os_key or None
insecure = self.options.insecure or False
ks_session = session.Session.construct(dict(cacert=cacert,
cert=cert,
key=key,
insecure=insecure))
# discover the supported keystone versions using the given url
(v2_auth_url, v3_auth_url) = self._discover_auth_versions(
session=ks_session,
auth_url=self.options.os_auth_url)
# Determine which authentication plugin to use. First inspect the
# auth_url to see the supported version. If both v3 and v2 are
# supported, then use the highest version if possible.
user_domain_name = self.options.os_user_domain_name or None
user_domain_id = self.options.os_user_domain_id or None
project_domain_name = self.options.os_project_domain_name or None
project_domain_id = self.options.os_project_domain_id or None
domain_info = (user_domain_name or user_domain_id or
project_domain_name or project_domain_id)
if (v2_auth_url and not domain_info) or not v3_auth_url:
ks_session.auth = self.get_v2_auth(v2_auth_url)
else:
ks_session.auth = self.get_v3_auth(v3_auth_url)
return ks_session
def main(argv=sys.argv[1:]):
try:
return TackerShell(TACKER_API_VERSION).run(
list(map(encodeutils.safe_decode, argv)))
except KeyboardInterrupt:
print("... terminating tacker client", file=sys.stderr)
return 130
except exc.TackerClientException:
return 1
except Exception as e:
print(e)
return 1
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| apache-2.0 | -3,938,062,603,018,916,000 | 38.419434 | 79 | 0.564185 | false |
demisto/content | Packs/DNSDB/Integrations/DNSDB_v2/DNSDB_v2.py | 1 | 30136 | # Copyright (c) 2020 by Farsight Security, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import locale
from typing import Iterator, Dict, List, Tuple, Union, Any, Callable, Iterable
import urllib
import urllib.parse
from CommonServerPython import * # noqa: E402 lgtm [py/polluting-import]
import datetime # type: ignore[no-redef]
import json
import re
import requests
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
INTEGRATION_NAME = 'Farsight DNSDB'
INTEGRATION_COMMAND_NAME = 'dnsdb'
INTEGRATION_CONTEXT_NAME = 'DNSDB'
RECORD_SUBCONTEXT_NAME = 'Record'
SUMMARY_SUBCONTEXT_NAME = 'Summary'
RATE_SUBCONTEXT_NAME = 'Rate'
# CONSTANTS
DEFAULT_DNSDB_SERVER = 'https://api.dnsdb.info'
TIMEOUT = 60
SWCLIENT = "demisto"
VERSION = "v2.1.2"
PATH_PREFIX = 'dnsdb/v2'
IDN_REGEX = re.compile(r'(?:^|(?<=[\s=.:@]))xn--[a-z0-9\-]+\.')
FALSE_REGEX = re.compile(r'^(?i:f(alse)?)$')
COND_BEGIN = 'begin'
COND_ONGOING = 'ongoing'
COND_SUCCEEDED = 'succeeded'
COND_LIMITED = 'limited'
COND_FAILED = 'failed'
locale.setlocale(locale.LC_ALL, '')
''' HELPER FUNCTIONS '''
class QueryError(Exception):
pass
class timeval(int):
pass
class Client(BaseClient):
def __init__(self, base_url: str, apikey: str, verify=None, proxy=None):
BaseClient.__init__(
self,
base_url,
verify=verify,
headers={
'Accept': 'application/x-ndjson',
'X-Api-Key': apikey,
},
proxy=proxy,
ok_codes=(200, ),
)
self.apikey = apikey
@staticmethod
def base_params() -> dict:
return {
'swclient': SWCLIENT,
'version': VERSION,
}
def rate_limit(self) -> Dict:
params = self.base_params()
url_suffix = 'dnsdb/v2/rate_limit'
return self._http_request('GET', url_suffix=url_suffix, params=params)
def lookup_rrset(self, owner_name: str, rrtype: str = None, bailiwick: str = None, limit: int = None,
time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None) -> Iterator[Dict]:
return self._query_rrset("lookup",
owner_name=owner_name,
rrtype=rrtype,
bailiwick=bailiwick,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
offset=offset)
def summarize_rrset(self, owner_name: str, rrtype: str = None, bailiwick: str = None, limit: int = None,
time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, max_count: int = None) -> dict:
try:
return next(self._query_rrset("summarize",
owner_name=owner_name,
rrtype=rrtype,
bailiwick=bailiwick,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
max_count=max_count))
except StopIteration:
raise QueryError("no data")
def _query_rrset(self, mode: str, owner_name: str, rrtype: str = None, bailiwick: str = None, limit: int = None,
time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None, max_count: int = None) -> Iterator[Dict]:
owner_name = quote(to_ascii(owner_name))
if bailiwick:
if not rrtype:
rrtype = 'ANY'
bailiwick = quote(to_ascii(bailiwick))
path = f'{PATH_PREFIX}/{mode}/rrset/name/{owner_name}/{rrtype}/{bailiwick}'
elif rrtype:
path = f'{PATH_PREFIX}/{mode}/rrset/name/{owner_name}/{rrtype}'
else:
path = f'{PATH_PREFIX}/{mode}/rrset/name/{owner_name}'
return self._query(path, limit=limit, time_first_before=time_first_before, time_first_after=time_first_after,
time_last_before=time_last_before, time_last_after=time_last_after,
aggr=aggr, offset=offset, max_count=max_count)
def lookup_rdata_name(self, value: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None) -> Iterator[Dict]:
return self._query_rdata_name("lookup",
name=value,
rrtype=rrtype,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
offset=offset)
def summarize_rdata_name(self, value: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, max_count: int = None) -> dict:
try:
return next(self._query_rdata_name("summarize",
name=value,
rrtype=rrtype,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
max_count=max_count))
except StopIteration:
raise QueryError("no data")
def _query_rdata_name(self, mode: str, name: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None, max_count: int = None) -> Iterator[Dict]:
rdata_name = quote(to_ascii(name))
if rrtype:
path = f'{PATH_PREFIX}/{mode}/rdata/name/{rdata_name}/{rrtype}'
else:
path = f'{PATH_PREFIX}/{mode}/rdata/name/{rdata_name}'
return self._query(path, limit=limit, time_first_before=time_first_before, time_first_after=time_first_after,
time_last_before=time_last_before, time_last_after=time_last_after,
aggr=aggr, offset=offset, max_count=max_count)
def lookup_rdata_ip(self, value: str, limit: int = None,
time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None) -> Iterator[Dict]:
return self._query_rdata_ip("lookup",
ip=value,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
offset=offset)
def summarize_rdata_ip(self, value: str, limit: int = None,
time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, max_count: int = None) -> dict:
try:
return next(self._query_rdata_ip("summarize",
ip=value,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
max_count=max_count))
except StopIteration:
raise QueryError("no data")
def _query_rdata_ip(self, mode: str, ip: str,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None, max_count: int = None) -> Iterator[Dict]:
ip = ip.replace('/', ',')
path = f'{PATH_PREFIX}/{mode}/rdata/ip/{ip}'
return self._query(path, limit=limit, time_first_before=time_first_before, time_first_after=time_first_after,
time_last_before=time_last_before, time_last_after=time_last_after,
aggr=aggr, offset=offset, max_count=max_count)
def lookup_rdata_raw(self, value: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None) -> Iterator[Dict]:
return self._query_rdata_raw("lookup",
raw=value,
rrtype=rrtype,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
offset=offset)
def summarize_rdata_raw(self, value: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, max_count: int = None) -> dict:
try:
return next(self._query_rdata_raw("summarize",
raw=value,
rrtype=rrtype,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
max_count=max_count))
except StopIteration:
raise QueryError("no data")
def _query_rdata_raw(self, mode: str, raw: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None, max_count: int = None) -> Iterator[Dict]:
if rrtype:
path = f'{PATH_PREFIX}/{mode}/rdata/raw/{quote(raw)}/{rrtype}'
else:
path = f'{PATH_PREFIX}/{mode}/rdata/raw/{quote(raw)}'
return self._query(path, limit=limit, time_first_before=time_first_before, time_first_after=time_first_after,
time_last_before=time_last_before, time_last_after=time_last_after,
aggr=aggr, offset=offset, max_count=max_count)
def flex(self, method: str, key: str, value: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None):
path = f'{PATH_PREFIX}/{method}/{key}/{quote(value)}'
if rrtype:
path += f'/{rrtype}'
return self._query(path, limit=limit, time_first_before=time_first_before, time_first_after=time_first_after,
time_last_before=time_last_before, time_last_after=time_last_after)
def _query(self, path: str, limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None, max_count: int = None) -> Iterator[Dict]:
params = self.base_params()
params.update(
assign_params(
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
offset=offset,
max_count=max_count,
)
)
res = self._http_request('GET', path,
params=params,
stream=True,
resp_type='response',
timeout=TIMEOUT)
return _handle_saf(res.iter_lines(decode_unicode=True))
def _handle_saf(i: Iterable[str]):
for line in i:
if not line:
continue
try:
saf_msg = json.loads(line)
except json.JSONDecodeError as e:
raise DemistoException(f'saf protocol error: could not decode json: {line}') from e
cond = saf_msg.get('cond')
obj = saf_msg.get('obj')
msg = saf_msg.get('msg')
if cond == COND_BEGIN:
continue
elif cond == COND_SUCCEEDED:
return
if obj:
yield obj
if cond == COND_ONGOING or not cond:
continue
elif cond == COND_LIMITED:
return
elif cond == COND_FAILED:
raise QueryError(f'saf query failed: {msg}')
else:
raise QueryError(f'saf protocol error: invalid cond: {cond}')
raise QueryError('saf query truncated')
def quote(path: str) -> str:
return urllib.parse.quote(path, safe='')
@logger
def _run_query(f, args):
sig = inspect.signature(f)
kwargs = {} # type: Dict[str, Any]
for name, p in sig.parameters.items():
if name in args:
if p.annotation != p.empty:
if p.annotation == bool:
if FALSE_REGEX.match(args[name]):
kwargs[name] = False
else:
kwargs[name] = True
elif p.annotation == timeval:
try:
kwargs[name] = int(args[name])
except ValueError:
kwargs[name] = date_to_timestamp(args[name])
else:
kwargs[name] = p.annotation(args[name])
else:
kwargs[name] = args[name]
elif p.kind == p.POSITIONAL_ONLY:
raise Exception(f'Missing argument: {name}')
return f(**kwargs)
def to_unicode(domain: str) -> str:
try:
return domain.encode('utf8').decode('idna')
except UnicodeError:
return domain
def to_ascii(domain: str) -> str:
try:
return domain.encode('idna').decode('utf8')
except UnicodeError:
return domain
def format_name_for_context(domain: str) -> str:
return domain.rstrip('.')
def format_name_for_markdown(domain: str) -> str:
return to_unicode(domain.rstrip('.'))
def parse_rdata(rdata: Union[str, List[str]]):
if isinstance(rdata, list):
return [parse_rdata(entry) for entry in rdata] # pragma: no cover
def f(m):
return to_unicode(m.group(0))
return str(IDN_REGEX.sub(f, rdata))
def format_rdata_for_markdown(rdata: Union[str, List[str]]):
rdata = parse_rdata(rdata)
if isinstance(rdata, str):
return rdata
return '<br>'.join(rdata)
def parse_rate_limit_int(i):
try:
return int(i)
except ValueError:
return i
def parse_unix_time(ts) -> str:
try:
return datetime.datetime.utcfromtimestamp(ts).strftime("%Y-%m-%dT%H:%M:%SZ") # type: ignore[attr-defined]
except TypeError:
return ts
def nop(x):
return x
@logger
def build_result_context(results: Dict) -> Dict:
ctx = {}
for ckey, rkey, f in (
('RRName', 'rrname', format_name_for_context),
('RRType', 'rrtype', str),
('Bailiwick', 'bailiwick', format_name_for_context),
('RData', 'rdata', nop),
('RawRData', 'raw_rdata', nop),
('Count', 'count', int),
('NumResults', 'num_results', int),
('TimeFirst', 'time_first', parse_unix_time),
('TimeLast', 'time_last', parse_unix_time),
('TimeFirst', 'zone_time_first', parse_unix_time),
('TimeLast', 'zone_time_last', parse_unix_time),
):
if rkey in results:
ctx[ckey] = f(results[rkey]) # type: ignore[operator]
if 'zone_time_first' in results or 'time_first' in results:
ctx['FromZoneFile'] = 'zone_time_first' in results
return ctx
@logger
def build_rate_limits_context(results: Dict) -> Dict:
"""Formatting results from Rate Limit API to Demisto Context"""
rate = results.get('rate')
if rate is None:
raise ValueError("Missing rate key")
ctx = {}
if rate['limit'] == 'unlimited':
return {
'Unlimited': True
}
for ckey, rkey, f in (
('Limit', 'limit', parse_rate_limit_int),
('Remaining', 'remaining', parse_rate_limit_int),
('Expires', 'expires', parse_unix_time),
('ResultsMax', 'results_max', parse_rate_limit_int),
('BurstSize', 'burst_size', parse_rate_limit_int),
('BurstWindow', 'burst_window', parse_rate_limit_int),
):
if rkey in rate:
ctx[ckey] = f(rate[rkey])
if 'reset' in rate:
if rate['reset'] == "n/a":
ctx['NeverResets'] = True
else:
ctx['Reset'] = parse_unix_time(rate['reset'])
if 'offset_max' in rate:
if rate['offset_max'] == "n/a":
ctx['OffsetNotAllowed'] = True
else:
ctx['OffsetMax'] = parse_rate_limit_int(rate['offset_max'])
return ctx
@logger
def lookup_to_markdown(results: List[Dict], title: str = 'Farsight DNSDB Lookup', want_bailiwick=True, header_filter=None) -> str:
# TODO this should be more specific, include arguments?
out = []
keys = [
('RRName', 'rrname', format_name_for_context),
('RRType', 'rrtype', str),
('Bailiwick', 'bailiwick', format_name_for_context),
('RData', 'rdata', format_rdata_for_markdown),
('Count', 'count', str),
] # type: List[Tuple[str, str, Callable]]
if not want_bailiwick:
keys = list(filter(lambda r: r[1] != 'bailiwick', keys))
headers = [k[0] for k in keys] + ['TimeFirst', 'TimeLast', 'FromZoneFile']
if header_filter:
headers = list(filter(header_filter, headers))
for result in results:
row = dict() # type: Dict[str, Any]
for ckey, rkey, f in keys:
if rkey in result:
row[ckey] = f(result[rkey])
if 'time_first' in result:
row['TimeFirst'] = parse_unix_time(result['time_first'])
elif 'zone_time_first' in result:
row['TimeFirst'] = parse_unix_time(result['zone_time_first'])
if 'time_last' in result:
row['TimeLast'] = parse_unix_time(result['time_last'])
elif 'zone_time_last' in result:
row['TimeLast'] = parse_unix_time(result['zone_time_last'])
row['FromZoneFile'] = str("zone_time_first" in result)
out.append(row)
return tableToMarkdown(title, out, headers=headers)
@logger
def summarize_to_markdown(summary: Dict) -> str:
headers = []
out = dict() # type: Dict[str, Any]
for ckey, rkey, f in (
('Count', 'count', int),
('NumResults', 'num_results', int),
('TimeFirst', 'time_first', parse_unix_time),
('TimeLast', 'time_last', parse_unix_time),
('ZoneTimeFirst', 'zone_time_first', parse_unix_time),
('ZoneTimeLast', 'zone_time_last', parse_unix_time),
):
if rkey in summary:
headers.append(ckey)
out[ckey] = f(summary[rkey]) # type: ignore[operator]
return tableToMarkdown('Farsight DNSDB Summarize', out, headers=headers)
@logger
def rate_limit_to_markdown(results: Dict) -> str:
rate = results.get('rate')
if rate is None:
return '### Error'
out = dict() # type: Dict[str, Any]
headers = []
if rate['limit'] != "unlimited":
for ckey, rkey, f in (
('Limit', 'limit', parse_rate_limit_int),
('Remaining', 'remaining', parse_rate_limit_int),
('Reset', 'reset', parse_unix_time),
('Expires', 'expires', parse_unix_time),
('ResultsMax', 'results_max', parse_rate_limit_int),
('OffsetMax', 'offset_max', parse_rate_limit_int),
('BurstSize', 'burst_size', parse_rate_limit_int),
('BurstWindow', 'burst_window', parse_rate_limit_int),
):
if rkey in rate:
headers.append(ckey)
if rkey == 'reset':
if rate[rkey] == "n/a":
NEVER_RESETS = 'NeverResets'
out[NEVER_RESETS] = True
headers.append(NEVER_RESETS)
else:
out[f'{ckey}'] = f(rate[rkey])
elif rkey == 'offset_max':
if rate[rkey] == "n/a":
OFFSET_NOT_ALLOWED = 'OffsetNotAllowed'
out[OFFSET_NOT_ALLOWED] = True
headers.append(OFFSET_NOT_ALLOWED)
else:
out[f'{ckey}'] = f(rate[rkey])
else:
out[f'{ckey}'] = f(rate[rkey])
else:
UNLIMITED = 'Unlimited'
out[UNLIMITED] = True
headers.append(UNLIMITED)
return tableToMarkdown('Farsight DNSDB Service Limits', out, headers=headers)
''' COMMANDS '''
@logger
def test_module(client, _):
try:
client.rate_limit()
except DemistoException as e:
if 'forbidden' in str(e):
return 'Authorization Error: make sure API Key is correctly set'
else:
raise e
return 'ok'
@logger
def dnsdb_flex(client, args):
res = list(_run_query(client.flex, args))
def skip_rrname(header) -> bool:
return header.lower() not in ('rrname', 'fromzonefile')
def skip_rdata(header) -> bool:
return header.lower() not in ('rdata', 'fromzonefile')
if args.get('key') == 'rdata':
skip = skip_rrname
else:
skip = skip_rdata
return CommandResults(
readable_output=lookup_to_markdown(res, title='Farsight DNSDB Flex Search', want_bailiwick=False,
header_filter=skip),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{RECORD_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=[build_result_context(r) for r in res],
)
@logger
def dnsdb_rdata(client, args):
type = args.get('type')
if type == 'name':
res = list(_run_query(client.lookup_rdata_name, args))
elif type == 'ip':
res = list(_run_query(client.lookup_rdata_ip, args))
elif type == 'raw':
res = list(_run_query(client.lookup_rdata_raw, args))
else:
raise Exception(f'Invalid rdata query type: {type}')
return CommandResults(
readable_output=lookup_to_markdown(res, want_bailiwick=False),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{RECORD_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=[build_result_context(r) for r in res],
)
@logger
def dnsdb_summarize_rdata(client, args):
type = args.get('type')
if type == 'name':
res = _run_query(client.summarize_rdata_name, args)
elif type == 'ip':
res = _run_query(client.summarize_rdata_ip, args)
elif type == 'raw':
res = _run_query(client.summarize_rdata_raw, args)
else:
raise Exception(f'Invalid rdata query type: {type}')
return CommandResults(
readable_output=summarize_to_markdown(res),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{SUMMARY_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=build_result_context(res),
)
@logger
def dnsdb_rrset(client, args):
q = _run_query(client.lookup_rrset, args)
res = list(q)
return CommandResults(
readable_output=lookup_to_markdown(res),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{RECORD_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=[build_result_context(r) for r in res],
)
@logger
def dnsdb_summarize_rrset(client, args):
res = _run_query(client.summarize_rrset, args)
return CommandResults(
readable_output=summarize_to_markdown(res),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{SUMMARY_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=build_result_context(res),
)
@logger
def dnsdb_rate_limit(client, _):
res = client.rate_limit()
return CommandResults(
readable_output=rate_limit_to_markdown(res),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{RATE_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=build_rate_limits_context(res),
)
def main():
"""
PARSE AND VALIDATE INTEGRATION PARAMS
"""
apikey = demisto.params().get('apikey')
base_url = demisto.params().get('url')
if not base_url:
base_url = DEFAULT_DNSDB_SERVER
verify_certificate = not demisto.params().get('insecure', False)
proxy = demisto.params().get('proxy', False)
client = Client(
base_url,
apikey,
verify=verify_certificate,
proxy=proxy)
command = demisto.command()
LOG(f'Command being called is {command}')
try:
if command == 'test-module':
return_results(test_module(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-flex':
return_results(dnsdb_flex(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-rdata':
return_results(dnsdb_rdata(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-summarize-rdata':
return_results(dnsdb_summarize_rdata(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-rrset':
return_results(dnsdb_rrset(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-summarize-rrset':
return_results(dnsdb_summarize_rrset(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-rate-limit':
return_results(dnsdb_rate_limit(client, demisto.args()))
# Log exceptions
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
err_msg = f'Error in {INTEGRATION_NAME} Integration [{e}]'
return_error(err_msg, error=e)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | -323,106,621,309,568,450 | 37.935401 | 130 | 0.525684 | false |
codeforsanjose/calischools | schools/serializers.py | 1 | 1123 | from rest_framework import serializers
from .models import County, District, School
class CountySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = County
class CountyMixin(serializers.Serializer):
county = CountySerializer(read_only=True)
class DistrictCompactSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = District
exclude = ('county',)
class DistrictSerializer(CountyMixin,
serializers.HyperlinkedModelSerializer):
class Meta:
model = District
class DistrictCompactMixin(serializers.Serializer):
district = DistrictCompactSerializer(read_only=True)
class SchoolCompactSerializer(serializers.HyperlinkedModelSerializer):
short_code = serializers.ReadOnlyField()
class Meta:
model = School
fields = ('url', 'short_code', 'name',)
class SchoolSerializer(DistrictCompactMixin,
CountyMixin,
serializers.HyperlinkedModelSerializer):
short_code = serializers.ReadOnlyField()
class Meta:
model = School
| mit | -468,564,356,021,282,600 | 23.955556 | 72 | 0.700801 | false |
mdavoodi/konkourse-python | conversation/views.py | 1 | 4095 | from django.http import HttpResponse
from django.utils import simplejson
from account.models import UserProfile
from conversation.models import ConversationPost, ConvoWall, ConversationComment
from notification.views import notifyComment, notifyPost
def post(request):
results = {'success': False}
if request.user.is_authenticated() and request.user.is_active:
if request.method == 'POST':
POST = request.POST
wall = ConvoWall.objects.get(id=POST['id'])
message = POST['message']
if message == '':
results = {'success': False}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
elif len(message) > 5000:
results = {'success': False, 'error': 'invalid length'}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
post_type = POST['type']
wallPost = ConversationPost(creator=request.user, wall=wall, message=message, post_type=post_type)
wallPost.save()
notifyPost(request=request, wall=wall, post=wallPost)
results = {'success': True}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
def comment(request):
results = {'success': False}
if request.user.is_authenticated() and request.user.is_active:
if request.method == 'POST':
POST = request.POST
convoPost = ConversationPost.objects.get(id=POST['id'])
message = POST['message']
if message == '':
results = {'success': False}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
elif len(message) > 5000:
results = {'success': False, 'error': 'invalid length'}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
comment = ConversationComment(creator=request.user, message=message, post=convoPost)
comment.save()
convoPost.comments.add(comment)
convoPost.save()
notifyComment(request=request, post=convoPost, comment=comment)
results = {'success': True}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
def deletePost(request):
results = {'success': False}
if request.user.is_authenticated() and request.user.is_active:
if request.method == 'POST':
POST = request.POST
convoPost = ConversationPost.objects.get(id=POST['id'])
parent = convoPost.wall.getParent
if convoPost.creator != request.user or (isinstance(parent, UserProfile) and parent.user != request.user):
results = {'success': False}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
convoPost.deleted = True
convoPost.save()
results = {'success': True}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
def deleteComment(request):
results = {'success': False}
if request.user.is_authenticated() and request.user.is_active:
if request.method == 'POST':
POST = request.POST
comment = ConversationComment.objects.get(id=POST['id'])
parent = comment.post.wall.getParent
if comment.creator != request.user or comment.post != request.user or (isinstance(parent, UserProfile) and parent.user != request.user):
results = {'success': False}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
comment.deleted = True
comment.save()
results = {'success': True}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
| mit | -4,804,500,232,000,305,000 | 44.5 | 148 | 0.615873 | false |
XENON1T/processing | montecarlo/fax_waveform/CreateFakeCSV.py | 1 | 4806 | #################################
## Sub-code used in WF simulation
## It creates a csv file for the input of fax
## by Qing Lin
## @ 2016-09-12
##
## HARDCODE WARNING: The FV dimensions below need to be modified
## according to the detector you wish to simulate
##
## Ref: http://xenon1t.github.io/pax/simulator.html#instruction-file-format
## Code: https://github.com/XENON1T/pax/blob/master/pax/plugins/io/WaveformSimulator.py#L244
##
#################################
import sys
import numpy as np
import scipy as sp
if len(sys.argv)<2:
print("========= Syntax ==========")
print("python CreateFakeCSV.py ..... ")
print("<detector: XENON100, XENON1T>")
print("<number of events>")
print("<photon number lower>")
print("<photon number upper>")
print("<electron number lower>")
print("<electron number upper>")
print("<recoil type: ER, NR>")
print("<output file (abs. path)>")
print("<If force S1-S2 correlation (0 for no; 1 for yes)>")
exit()
Detector = sys.argv[1]
NumEvents = int(sys.argv[2])
PhotonNumLower = float(sys.argv[3])
PhotonNumUpper = float(sys.argv[4])
ElectronNumLower = float(sys.argv[5])
ElectronNumUpper = float(sys.argv[6])
DefaultType = sys.argv[7]
OutputFilename = sys.argv[8]
IfS1S2Correlation = True
if int(sys.argv[9])==0:
IfS1S2Correlation = False
####################################
## Some nuisance parameters (HARDCODE WARNING):
####################################
MaxDriftTime = 650. # us
####################################
## Some functions (HARDCODE WARNING):
####################################
# Current FV cut for Xe1T
scalecmtomm=1
def radius2_cut(zpos):
return 1400*scalecmtomm**2+(zpos+100*scalecmtomm)*(2250-1900)*scalecmtomm/100
def IfPassFV(x,y,z):
if Detector == "XENON100":
# check if the x,y,z passing X48kg0
I = np.power( (z+15.)/14.6, 4.)
I += np.power( (x**2+y**2)/20000., 4.)
if I<1:
return True
elif Detector == "XENON1T": # NEED TO UPDATE THIS
Zlower, Zupper = -90*scalecmtomm, -15*scalecmtomm
Zcut = ((z>=Zlower) & (z<=Zupper))
R2upper=radius2_cut(z)
Rcut = (x**2+y**2<R2upper)
if(Zcut & Rcut):
return True
return False
def RandomizeFV():
# randomize the X, Y, Z according to X48kg FV
if Detector == "XENON100":
Zlower, Zupper = -14.6-15.0, -14.6+15.0
Rlower, Rupper = -np.sqrt(200.), np.sqrt(200.)
elif Detector == "XENON1T": # NEED TO UPDATE THIS
Zlower, Zupper = -90*scalecmtomm, -15*scalecmtomm
Rlower, Rupper = -46*scalecmtomm, 46*scalecmtomm
for i in range(100000):
x = np.random.uniform(Rlower,Rupper)
y = np.random.uniform(Rlower,Rupper)
z = np.random.uniform(Zlower,Zupper)
if IfPassFV(x,y,z):
return (x,y,z)
return (0,0,0)
####################################
## Starts to create
####################################
# Some default
DefaultEventTime = MaxDriftTime*1000.
##########
fout = open(OutputFilename, 'w')
# headers
fout.write("instruction,recoil_type,x,y,depth,s1_photons,s2_electrons,t\n")
if IfS1S2Correlation:
# events loop
for i in range(NumEvents):
fout.write(str(i)+",")
fout.write(DefaultType+",")
X, Y, Z = RandomizeFV()
fout.write(str(X)+",")
fout.write(str(Y)+",")
#fout.write("random,")
#fout.write("random,")
fout.write(str(-Z)+",")
NumPhoton = int( np.random.uniform(PhotonNumLower, PhotonNumUpper) )
fout.write(str(NumPhoton)+",")
NumElectron = int( np.random.uniform(ElectronNumLower, ElectronNumUpper) )
fout.write(str(NumElectron)+",")
fout.write(str(DefaultEventTime)+"\n")
else:
# events loop S1-S2 no correlation
for i in range(NumEvents):
# first for S1
fout.write(str(i)+",")
fout.write(DefaultType+",")
X, Y, Z = RandomizeFV()
fout.write(str(X)+",")
fout.write(str(Y)+",")
fout.write(str(-Z)+",")
NumPhoton = int( np.random.uniform(PhotonNumLower, PhotonNumUpper) )
fout.write(str(NumPhoton)+",")
fout.write("0,")
fout.write(str(DefaultEventTime)+"\n")
# second for S2
fout.write(str(i)+",")
fout.write(DefaultType+",")
X, Y, Z = RandomizeFV()
fout.write(str(X)+",")
fout.write(str(Y)+",")
fout.write(str(-Z)+",")
fout.write("0,")
NumElectron = int( np.random.uniform(ElectronNumLower, ElectronNumUpper) )
fout.write(str(NumElectron)+",")
TimeOffset = np.random.uniform(-MaxDriftTime*1000., MaxDriftTime*1000.)
S2EventTime = DefaultEventTime+TimeOffset
fout.write(str(S2EventTime)+"\n")
fout.close()
| apache-2.0 | -7,343,586,964,277,585,000 | 31.04 | 92 | 0.572201 | false |
teheavy/AMA3D | Nh3D/3_CathTopo_uploader.py | 1 | 1726 | # Script Version: 1.0
# Author: Te Chen
# Project: AMA3D
# Task Step: 1
# This script is specially for loading CATH Node Name file and record all the topology level into database.
# CathDomainList File format: Cath Names File (CNF) Format 2.0, to find more info, please visit www.cathdb.info
import MySQLdb
import os
import sys
# Connect to Database by reading Account File.
with open("Account", "r") as file:
parsed = file.readline().split()
DB = MySQLdb.connect(host=parsed[0], user=parsed[1], passwd=parsed[2], db=parsed[3])
cursor = DB.cursor()
# Read the node list and register CATH topology into database.
os.getcwd()
node_file = open("./Nh3D/CathNames", "r")
line = node_file.readline()
trigger = ''
while line:
if line.startswith("#") == False and line != "":
node_info = line.split(" ")
if len(node_info) == 3:
if node_info[0].count('.') == 2:
print "Working on Node: " + node_info[0]
cursor.execute("""INSERT INTO Topology(Node, Description, Comment, Representative) VALUES (\'%s\', \'%s\', \'%s\', \'%s\')"""
% (node_info[0], str(MySQLdb.escape_string(node_info[2][1:-1])), 'from CathNames', node_info[1]))
# print """INSERT INTO Topology(Node, Description, Comment, Representative) VALUES (\'%s\', \'%s\', \'%s\', \'%s\')"""\
# % (node_info[0], (node_info[2][1:-1]).replace(";", ""), 'from CathNames', node_info[1])
# Trigger a new TC
print trigger
sys.stdout.flush()
trigger = "trigger\t%s\t%d\t%d"%(node_info[0], 4, 0)
elif node_info[0].count('.') == 3:
# Trigger a new TC but leave last flag on.
print trigger[:-1] + "1"
sys.stdout.flush()
break
line = node_file.readline()
# Wrap up and close connection.
DB.commit()
DB.close() | gpl-2.0 | -5,206,685,651,074,116,000 | 33.54 | 129 | 0.637891 | false |
Copenbacon/code-katas | katas/baker.py | 1 | 1095 | """Pete likes to bake some cakes. He has some recipes and ingredients. Unfortunately he is not good in maths. Can you help him to find out, how many cakes he could bake considering his recipes?
Write a function cakes(), which takes the recipe (object) and the available ingredients (also an object) and returns the maximum number of cakes Pete can bake (integer). For simplicity there are no units for the amounts (e.g. 1 lb of flour or 200 g of sugar are simply 1 or 200). Ingredients that are not present in the objects, can be considered as 0.
Examples:
# must return 2
cakes({flour: 500, sugar: 200, eggs: 1}, {flour: 1200, sugar: 1200, eggs: 5, milk: 200})
# must return 0
cakes({apples: 3, flour: 300, sugar: 150, milk: 100, oil: 100}, {sugar: 500, flour: 2000, milk: 2000})"""
def cakes(recipe, available):
match = {}
for item in recipe.keys():
if item not in available.keys():
return 0
if available[item]//recipe[item] == 0:
return 0
else:
match[item] = available[item]//recipe[item]
return min(match.values()) | mit | 5,414,944,121,848,995,000 | 48.818182 | 352 | 0.677626 | false |
pmghalvorsen/gramps_branch | gramps/plugins/view/view.gpr.py | 1 | 8180 | # encoding:utf-8
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2009 Benny Malengier
# Copyright (C) 2009 Douglas S. Blank
# Copyright (C) 2009 Nick Hall
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
MODULE_VERSION="4.2"
#------------------------------------------------------------------------
#
# default views of Gramps
#
#------------------------------------------------------------------------
register(VIEW,
id = 'eventview',
name = _("Events"),
description = _("The view showing all the events"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'eventview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Events", _("Events")),
viewclass = 'EventView',
order = START,
)
register(VIEW,
id = 'familyview',
name = _("Families"),
description = _("The view showing all families"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'familyview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Families", _("Families")),
viewclass = 'FamilyView',
order = START,
)
register(VIEW,
id = 'dashboardview',
name = _("Dashboard"),
description = _("The view showing Gramplets"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'dashboardview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Dashboard", _("Dashboard")),
viewclass = 'DashboardView',
order = START,
)
register(VIEW,
id = 'mediaview',
name = _("Media"),
description = _("The view showing all the media objects"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'mediaview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Media", _("Media")),
viewclass = 'MediaView',
order = START,
)
register(VIEW,
id = 'noteview',
name = _("Notes"),
description = _("The view showing all the notes"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'noteview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Notes", _("Notes")),
viewclass = 'NoteView',
order = START,
)
register(VIEW,
id = 'relview',
name = _("Relationships"),
description = _("The view showing all relationships of the selected person"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'relview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Relationships", _("Relationships")),
viewclass = 'RelationshipView',
order = START,
)
register(VIEW,
id = 'pedigreeview',
name = _("Pedigree"),
description = _("The view showing an ancestor pedigree of the selected person"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'pedigreeview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Ancestry", _("Charts")),
viewclass = 'PedigreeView',
order = START,
stock_icon = 'gramps-pedigree',
)
register(VIEW,
id = 'fanchartview',
name = _("Fan Chart"),
category = ("Ancestry", _("Charts")),
description = _("A view showing parents through a fanchart"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'fanchartview.py',
authors = ["Douglas S. Blank", "B. Malengier"],
authors_email = ["[email protected]", "[email protected]"],
viewclass = 'FanChartView',
stock_icon = 'gramps-fanchart',
)
register(VIEW,
id = 'fanchartdescview',
name = _("Descendant Fan"),
category = ("Ancestry", _("Charts")),
description = _("Showing descendants through a fanchart"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'fanchartdescview.py',
authors = ["B. Malengier"],
authors_email = ["[email protected]"],
viewclass = 'FanChartDescView',
stock_icon = 'gramps-fanchartdesc',
)
register(VIEW,
id = 'personview',
name = _("Grouped People"),
description = _("The view showing all people in the Family Tree grouped per"
" family name"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'persontreeview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("People", _("People")),
viewclass = 'PersonTreeView',
order = START,
stock_icon = 'gramps-tree-group',
)
register(VIEW,
id = 'personlistview',
name = _("People"),
description = _("The view showing all people in the Family Tree"
" in a flat list"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'personlistview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("People", _("People")),
viewclass = 'PersonListView',
order = START,
stock_icon = 'gramps-tree-list',
)
register(VIEW,
id = 'placelistview',
name = _("Places"),
description = _("The view showing all the places of the Family Tree"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'placelistview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Places", _("Places")),
viewclass = 'PlaceListView',
order = START,
stock_icon = 'gramps-tree-list',
)
register(VIEW,
id = 'placetreeview',
name = _("Place Tree"),
description = _("A view displaying places in a tree format."),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'placetreeview.py',
authors = ["Donald N. Allingham", "Gary Burton", "Nick Hall"],
authors_email = [""],
category = ("Places", _("Places")),
viewclass = 'PlaceTreeView',
stock_icon = 'gramps-tree-group',
)
register(VIEW,
id = 'repoview',
name = _("Repositories"),
description = _("The view showing all the repositories"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'repoview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Repositories", _("Repositories")),
viewclass = 'RepositoryView',
order = START,
)
register(VIEW,
id = 'sourceview',
name = _("Sources"),
description = _("The view showing all the sources"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'sourceview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Sources", _("Sources")),
viewclass = 'SourceView',
order = START,
stock_icon = 'gramps-tree-list',
)
register(VIEW,
id = 'citationlistview',
name = _("Citations"),
description = _("The view showing all the citations"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'citationlistview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Citations", _("Citations")),
viewclass = 'CitationListView',
order = START,
)
register(VIEW,
id = 'citationtreeview',
name = _("Citation Tree"),
description = _("A view displaying citations and sources in a tree format."),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'citationtreeview.py',
authors = ["Tim G L Lyons", "Nick Hall"],
authors_email = [""],
category = ("Sources", _("Sources")),
viewclass = 'CitationTreeView',
stock_icon = 'gramps-tree-select',
)
| gpl-2.0 | -9,066,584,488,088,675,000 | 26.918089 | 81 | 0.665037 | false |
minghuascode/pyj | library/pyjamas/ui/BuilderPanel.py | 1 | 3810 | """ Pyjamas UI BuilderPanel: takes a PyJsGlade builder spec and adds widgets
requested using the methods just like in any other Panel class.
Copyright (C) 2010 Luke Kenneth Casson Leighton <[email protected]>
The purpose of this class is to be able to set up a Panel of any type
that can be dynamically created using Builder, and then add child widgets
once again by their name as specified in the Builder spec file.
This class therefore has all of the usual Panel functions (add,
remove, insert, __iter__, getWidget) as well as those required
for it to be instantiable via Builder itself (!) such as
addIndexedItem, getIndex and getIndexedChild.
"""
from pyjamas.ui.BuilderWidget import BuilderWidget
class BuilderPanel(BuilderWidget):
def __init__(self, **kwargs):
self.panel_instance_name = None
BuilderWidget.__init__(self, **kwargs)
def add(self, child_instance_name, *args, **kwargs):
""" versatile adding-function, copes with:
HTMLPanel.add(widget, id)
HTMLTable.add(item, row, col)
HorizontalPanel.add(item)
VerticalPanel.add(item)
VerticalSplitPanel.add(item)
HorizontalSplitPanel.add(item)
DeckPanel.add(item)
TabPanel.add(item)
DockPanel.add(widget, direction)
StackPanel.add(widget, stackText, asHTML)
AbsolutePanel.add(widget, left, top)
FlowPanel.add(widget)
CaptionPanel.add(widget)
ScrollPanel.add(widget)
"""
widget = self.b.createInstance(child_instance_name, self.event_receiver)
self.getPanel().add(widget, *args, **kwargs)
return widget
def insert(self, child_instance_name, *args, **kwargs):
widget = self.b.createInstance(child_instance_name, self.event_receiver)
self.getPanel().insert(widget, *args, **kwargs)
return widget
def remove(self, widget, *args, **kwargs):
""" versatile removing-function, copes with:
HTMLPanel.remove(widget) # if it had one
HTMLTable.remove(item)
HorizontalPanel.remove(item)
VerticalPanel.remove(item)
VerticalSplitPanel.remove(item) # if it had one
HorizontalSplitPanel.remove(item) # if it had one
DeckPanel.remove(item)
TabPanel.remove(item)
DockPanel.remove(item)
StackPanel.remove(item, index=None)
AbsolutePanel.remove(item)
FlowPanel.add(widget)
"""
self.getPanel().remove(widget, *args, **kwargs)
def __iter__(self):
return self.b.__iter__()
def getChildren(self):
return self.b.getChildren()
def setPanelInstanceName(self, panel_instance_name):
self.panel_instance_name = panel_instance_name
def getPanel(self):
if self.panel_instance_name is None:
return self.widget
wids = self.b.widget_instances[self.instance_name]
return wids[self.panel_instance_name]
# these next three functions are part of the standard Builder API
# and are required for panels to be manageable by PyJsGlade.
def addIndexedItem(self, index, instance_name):
widget = self.b.createInstance(child_instance_name, self.event_receiver)
self.getPanel().addIndexedItem(index, widget)
def getIndexedChild(self, index):
return self.getPanel().getIndexedChild(index)
def getWidgetIndex(self, widget):
return self.getPanel().getWidgetIndex(widget)
def getWidget(self, *args):
return self.getPanel().getWidget(*args)
def getWidgetCount(self):
return self.getPanel().getWidgetCount()
def setWidgetPosition(self, *args):
return self.getPanel().setWidgetPosition(*args)
| apache-2.0 | 5,191,970,841,784,223,000 | 35.990291 | 80 | 0.655643 | false |
erikdejonge/newsrivr | daemons/python2/hn.py | 1 | 17227 | """
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import division
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import chr
from builtins import str
from builtins import range
from past.utils import old_div
from xml.sax.saxutils import escape
import urllib.request, urllib.parse, urllib.error, re, os, urllib.parse
import html.parser, feedparser
from BeautifulSoup import BeautifulSoup, Comment
from pprint import pprint
import codecs
import sys
import html.entities
streamWriter = codecs.lookup("utf-8")[-1]
sys.stdout = streamWriter(sys.stdout)
HN_RSS_FEED = "http://news.ycombinator.com/rss"
negative_str = "([A-Z,a-z,0-9,-,_ ]*comments[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*comment[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*bcomments[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*meta[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*footer[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*footnote[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*foot[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*bottom[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*klasbox[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*side[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*inner[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*sidebar[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*hide[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*component[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*reactie[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*ad[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*ads[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*transcript[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*react[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*transcript[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*transcriptText[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*error[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*related[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*also[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*share[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*sideblock[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*policy[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*related[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*social[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*reflist[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*postmetadata[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*references[A-Z,a-z,0-9,-,_ ]*)|"
negative_str += "([A-Z,a-z,0-9,-,_ ]*promo[A-Z,a-z,0-9,-,_ ]*)"
NEGATIVE = re.compile(negative_str)
super_negative_str = "([A-Z,a-z,0-9,-,_ ]*comment[A-Z,a-z,0-9,-,_ ]*)|"
super_negative_str += "([A-Z,a-z,0-9,-,_ ]*voting[A-Z,a-z,0-9,-,_ ]*)|"
super_negative_str += "([A-Z,a-z,0-9,-,_ ]*reactie[A-Z,a-z,0-9,-,_ ]*)|"
super_negative_str += "([A-Z,a-z,0-9,-,_ ]*reaction[A-Z,a-z,0-9,-,_ ]*)|"
super_negative_str += "([A-Z,a-z,0-9,-,_ ]*idgedragregelsusercontent[A-Z,a-z,0-9,-,_ ]*)|"
super_negative_str += "([A-Z,a-z,0-9,-,_ ]*vote[A-Z,a-z,0-9,-,_ ]*)"
SUPERNEGATIVE = re.compile(super_negative_str)
positive_str = "([A-Z,a-z,0-9,-,_ ]*summary[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*post[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*hentry[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*entry[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*content[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*text[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*tekst[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*venue[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*venueInfo[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*venueDetails[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*body[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*bodycontent[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*content permalink[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*wrapper[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*article[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*articleblock[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*text[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*tekst[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*lead[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*leadarticle[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*story[A-Z,a-z,0-9,-,_ ]*)|"
positive_str += "([A-Z,a-z,0-9,-,_ ]*permalink[A-Z,a-z,0-9,-,_ ]*)"
POSITIVE = re.compile(positive_str)
PUNCTUATION = re.compile("""[!"#$%&\"()*+,-./:;<=>?@[\\]^_`{|}~]""")
MAXLINKS = 50
def latin1_to_ascii (unicrap):
xlate={0xc0:'A', 0xc1:'A', 0xc2:'A', 0xc3:'A', 0xc4:'A', 0xc5:'A',
0xc6:'Ae', 0xc7:'C',
0xc8:'E', 0xc9:'E', 0xca:'E', 0xcb:'E',
0xcc:'I', 0xcd:'I', 0xce:'I', 0xcf:'I',
0xd0:'Th', 0xd1:'N',
0xd2:'O', 0xd3:'O', 0xd4:'O', 0xd5:'O', 0xd6:'O', 0xd8:'O',
0xd9:'U', 0xda:'U', 0xdb:'U', 0xdc:'U',
0xdd:'Y', 0xde:'th', 0xdf:'ss',
0xe0:'a', 0xe1:'a', 0xe2:'a', 0xe3:'a', 0xe4:'a', 0xe5:'a',
0xe6:'ae', 0xe7:'c',
0xe8:'e', 0xe9:'e', 0xea:'e', 0xeb:'e',
0xec:'i', 0xed:'i', 0xee:'i', 0xef:'i',
0xf0:'th', 0xf1:'n',
0xf2:'o', 0xf3:'o', 0xf4:'o', 0xf5:'o', 0xf6:'o', 0xf8:'o',
0xf9:'u', 0xfa:'u', 0xfb:'u', 0xfc:'u',
0xfd:'y', 0xfe:'th', 0xff:'y',
0xa1:'!', 0xa2:'{cent}', 0xa3:'{pound}', 0xa4:'{currency}',
0xa5:'{yen}', 0xa6:'|', 0xa7:'{section}', 0xa8:'{umlaut}',
0xa9:'{C}', 0xaa:'{^a}', 0xab:'<<', 0xac:'{not}',
0xad:'-', 0xae:'{R}', 0xaf:'_', 0xb0:'{degrees}',
0xb1:'{+/-}', 0xb2:'{^2}', 0xb3:'{^3}', 0xb4:"'",
0xb5:'{micro}', 0xb6:'{paragraph}', 0xb7:'*', 0xb8:'{cedilla}',
0xb9:'{^1}', 0xba:'{^o}', 0xbb:'>>',
0xbc:'{1/4}', 0xbd:'{1/2}', 0xbe:'{3/4}', 0xbf:'?',
0xd7:'*', 0xf7:'/'
}
r = ''
for i in unicrap:
if ord(i) in xlate:
r += xlate[ord(i)]
elif ord(i) >= 0x80:
pass
else:
r += str(i)
return r
def toUTF8(data):
try:
data = data.encode("utf-8")
except:
data = latin1_to_ascii(data)
return data
def text2simpleHtml(data):
data = data.replace("<h1"," <b").replace("</h1>","</b><br><br>")
data = data.replace("<h2"," <b").replace("</h2>","</b><br>")
data = data.replace("<h3>","").replace("</h3>","<br>")
VALID_TAGS = ["strong", "b", "i", "table", "th", "tr", "td", "a", "code", "em", "p", "ul", "li", "br"]
soup = BeautifulSoup(data)
for tag in soup.findAll(True):
if tag.name not in VALID_TAGS:
tag.hidden = True
return soup.renderContents()
def _text(node):
return " ".join(node.findAll(text=True))
def get_link_density(elem):
link_length = len("".join([i.text or "" for i in elem.findAll("a")]))
text_length = len(_text(elem))
return old_div(float(link_length), max(text_length, 1))
def removeFrontBreaks(s):
try:
soup = BeautifulSoup(s)
whitespace = True
for tag in soup.findAll(True):
tagname = str(tag.name)
if tagname!="br":
whitespace=False
if tagname!="p":
whitespace=False
if tagname=="br" or tagname=="p" and whitespace:
tag.extract()
return str(soup).strip()
except Exception as e:
clog(e)
return s
def convertentity(m):
"""Convert a HTML entity into normal string (ISO-8859-1)"""
if m.group(1)=='#':
try:
return chr(int(m.group(2)))
except ValueError:
return '&#%s;' % m.group(2)
try:
return html.entities.entitydefs[m.group(2)]
except KeyError:
return '&%s;' % m.group(2)
def unquotehtml(s):
"""Convert a HTML quoted string into normal string (ISO-8859-1).
Works with &#XX; and with > etc."""
return re.sub(r'&(#?)(.+?);',convertentity,s)
def getNumLinks(s):
try:
cnt = 0
soup = BeautifulSoup(s)
for a in soup.findAll("a"):
if "href" in a:
#print a
cnt += 1
return cnt
except:
return 0
def removeEmptyParas(html):
foundempty = False
soup = BeautifulSoup(html)
for p in soup.findAll("p"):
if "id" in p:
if "error_" in p["id"]:
p.extract()
if 0==len(p.text.strip().replace("\n", "")):
if foundempty:
p.extract()
foundempty = True
else:
foundempty = False
return soup.renderContents()
def removeEmptyLis(html):
soup = BeautifulSoup(html)
for li in soup.findAll("li"):
for a in li.findAll("a"):
if len(a.contents)>0:
if len(a.contents[0])<5:
a.extract()
if len(li.renderContents().strip())==0:
li.extract()
else:
for x in li.findAll():
if len(x.renderContents().strip())==0:
li.extract()
for ul in soup.findAll("ul"):
if 0==len(ul.findAll("li")):
ul.extract()
return soup.renderContents()
def removeExtraBreaks(s):
try:
l = []
brcnt = 0
soup = BeautifulSoup(s)
for tag in soup.findAll():
if tag.name=="p":
if len(tag.text.strip().replace("\n", ""))<1:
tag.extract()
brcnt += 1
if tag.name=="br":
brcnt += 1
if brcnt>1:
tag.extract()
else:
brcnt = 0
return str(soup)
except Exception as e:
clog(e)
return s
def grabContent(link, html):
if ">" in html:
html = unquotehtml(html)
html = "<!DOCTYPE html><html><head><meta charset=\"utf-8\"></head><body>"+html+"</body></html>"
#open("usedforscoring.html", "w").write(html)
#exit(1)
replaceBrs = re.compile("<br */? *>[ \r\n]*<br */? *>")
html = re.sub(replaceBrs, "</p><p>", html)
try:
soup = BeautifulSoup(html)
except html.parser.HTMLParseError as e:
try:
soup = BeautifulSoup(text2simpleHtml(html))
except html.parser.HTMLParseError:
return ""
#print str(soup)
# REMOVE SCRIPTS
for s in soup.findAll("div"):
if get_link_density(s)>0.5 and len(s.renderContents())>1000:
s.extract()
if "id" in s:
if SUPERNEGATIVE.match(str(s["id"]).lower()):
s.extract()
if "class" in s:
if SUPERNEGATIVE.match(str(s["class"]).lower()):
s.extract()
for s in soup.findAll("script"):
s.extract()
for a in soup.findAll("a"):
if "href" in a:
if "javascript:" in a["href"]:
a.extract()
if "onclick" in a:
if "return " in a["onclick"]:
a.extract()
allParagraphs = soup.findAll("p")
topParent = None
parents = []
for paragraph in allParagraphs:
parent = paragraph.parent
if (parent not in parents):
parents.append(parent)
parent.score = 0
if ("class" in parent):
if (NEGATIVE.match(parent["class"].lower())):
#print parent["class"]
if len(parent.findAll('a'))>MAXLINKS:
parent.score -= 500
parent.score -= 50
if (POSITIVE.match(parent["class"].lower())):
if len(parent.findAll('a'))<MAXLINKS:
parent.score += 25
else:
parent.score -= 150
parent.score += 50
if ("id" in parent):
if (NEGATIVE.match(parent["id"].lower())):
#print parent["id"]
if len(parent.findAll('a'))>MAXLINKS:
parent.score -= 500
parent.score -= 50
if (POSITIVE.match(parent["id"].lower())):
if len(parent.findAll('a'))<MAXLINKS:
parent.score += 25
else:
parent.score -= 150
parent.score += 50
if (parent.score == None):
parent.score = 0
innerText = paragraph.renderContents() #"".join(paragraph.findAll(text=True))
if (len(innerText) > 10):
parent.score += 1
if (len(innerText) > 300):
parent.score += 2
parent.score += innerText.count(",")*3
parent.score += innerText.count(".")*3
for parent in parents:
#print parent.score
#print str(parent )
#print "-------------"
if ((not topParent) or (parent.score > topParent.score)):
topParent = parent
if (not topParent):
return ""
# REMOVE LINK"D STYLES
styleLinks = soup.findAll("link", attrs={"type" : "text/css"})
for s in styleLinks:
s.extract()
# REMOVE ON PAGE STYLES
for s in soup.findAll("style"):
s.extract()
# CLEAN STYLES FROM ELEMENTS IN TOP PARENT
for ele in topParent.findAll(True):
del(ele["style"])
del(ele["class"])
#print str(ele)
#print "-----"
killDivs(topParent)
clean(topParent, "form")
clean(topParent, "object")
clean(topParent, "iframe")
fixLinks(topParent, link)
for s in topParent.findAll("ul"):
if get_link_density(s)>0.3:
s.extract()
lis = topParent.findAll("li")
if len(lis)>50:
for li in lis:
li.extract()
for li in lis:
if len(li)>1:
contents = str(li.contents[1]).replace("\n", "").replace(" ", "").replace("<br>", "").replace("<br/>", "").replace("<br />", "").replace("<p></p>", "")
#print "c", contents
if len(contents)==0:
li.extract()
comments = topParent.findAll(text=lambda text:isinstance(text, Comment))
[comment.extract() for comment in comments]
html2 = topParent.renderContents()
html2 = removeFrontBreaks(html2)
html2 = html2.replace("\n", " ")
for i in range(0, 10):
html2 = html2.replace(" ", " ")
html2 = html2.replace("<div></div>", "")
html2 = html2.replace("<p>\xc2\xa0</p>", "")
html2 = html2.replace("<p></p>", "<br/>")
html2 = html2.replace("<p><br /></p>", "")
#html2 = html2.replace("\xc2\xa9", "")#
html2 = re.sub(r'© (\w+.\w+)', "", html2)
html2 = re.sub(r'© (\w+)', "", html2)
html2 = re.sub(r'\xc2\xa9 (\w+.\w+)', "", html2)
html2 = re.sub(r'\xc2\xa9 (\w+)', "", html2)
#if getNumLinks(html2)>25:
# html2 = "html ignored, more then 25 links"
#print get_link_density(BeautifulSoup(html2))
html2 = removeEmptyLis(html2)
html2 = toUTF8(text2simpleHtml(html2)).replace("a href", "a target='blank' href")
html2 = removeEmptyParas(html2)
html2 = removeExtraBreaks(html2)
html2 = html2.replace("</strong>", "</strong><br/>")
html2 = html2.replace("</b>", "</b><br/>")
#detect
return html2
def fixLinks(parent, link):
tags = parent.findAll(True)
for t in tags:
if ("href" in t):
t["href"] = urllib.parse.urljoin(link, t["href"])
if ("src" in t):
t["src"] = urllib.parse.urljoin(link, t["src"])
def clean(top, tag, minWords=10000):
tags = top.findAll(tag)
for t in tags:
if (t.renderContents().count(" ") < minWords):
t.extract()
def killDivs(parent):
divs = parent.findAll("div")
for d in divs:
p = len(d.findAll("p"))
img = len(d.findAll("img"))
li = len(d.findAll("li"))
a = len(d.findAll("a"))
embed = len(d.findAll("embed"))
pre = len(d.findAll("pre"))
#code = len(d.findAll("code"))
if (d.renderContents().count(",") < 10):
if (pre == 0):# and (code == 0)):
if ((img > p ) or (li > p) or (a > p) or (p == 0) or (embed > 0)):
d.extract()
def upgradeLink(link):
link = link.encode("utf-8")
if (not (link.startswith("http://news.ycombinator.com") or link.endswith(".pdf"))):
linkFile = "upgraded/" + re.sub(PUNCTUATION, "_", link)
if (os.path.exists(linkFile)):
return open(linkFile).read()
else:
content = ""
try:
html = urllib.request.urlopen(link).read()
content = grabContent(link, html)
filp = open(linkFile, "w")
filp.write(content)
filp.close()
except IOError:
pass
return content
else:
return ""
def upgradeFeed(feedUrl):
feedData = urllib.request.urlopen(feedUrl).read()
upgradedLinks = []
parsedFeed = feedparser.parse(feedData)
for entry in parsedFeed.entries:
upgradedLinks.append((entry, upgradeLink(entry.link)))
rss = """<rss version="2.0">
<channel>
<title>Hacker News</title>
<link>http://news.ycombinator.com/</link>
<description>Links for the intellectually curious, ranked by readers.</description>
"""
for entry, content in upgradedLinks:
rss += u"""
<item>
<title>%s</title>
<link>%s</link>
<comments>%s</comments>
<description>
<![CDATA[<a href="%s">Comments</a><br/>%s<br/><a href="%s">Comments</a>]]>
</description>
</item>
""" % (entry.title, escape(entry.link), escape(entry.comments), entry.comments, content.decode("utf-8"), entry.comments)
rss += """
</channel>
</rss>"""
return rss
def clog(s):
from time import gmtime, strftime
s= str(s)
print('\033[%93m'+strftime("%Y-%m-%d %H:%M:%S", gmtime())+": "+s+'\033[%0m')
if __name__ == "__main__":
c = open("usedforscoring.html", "r").read()
soup = BeautifulSoup(grabContent('x', c))
clog(soup.prettify())
| gpl-2.0 | 4,797,747,525,430,843,000 | 30.961039 | 159 | 0.565914 | false |
looker/sentry | src/sentry/plugins/bases/issue2.py | 1 | 15435 | from __future__ import absolute_import
import six
from rest_framework.response import Response
from social_auth.models import UserSocialAuth
from django.conf import settings
from django.conf.urls import url
from django.core.urlresolvers import reverse
from django.utils.html import format_html
from sentry.api.serializers.models.plugin import PluginSerializer
# api compat
from sentry.exceptions import PluginError # NOQA
from sentry.models import Activity, Event, GroupMeta
from sentry.plugins import Plugin
from sentry.plugins.base.configuration import react_plugin_config
from sentry.plugins.endpoints import PluginGroupEndpoint
from sentry.signals import issue_tracker_used
from sentry.utils.auth import get_auth_providers
from sentry.utils.http import absolute_uri
from sentry.utils.safe import safe_execute
# TODO(dcramer): remove this in favor of GroupEndpoint
class IssueGroupActionEndpoint(PluginGroupEndpoint):
view_method_name = None
plugin = None
def _handle(self, request, group, *args, **kwargs):
GroupMeta.objects.populate_cache([group])
return getattr(self.plugin, self.view_method_name)(request, group, *args, **kwargs)
class IssueTrackingPlugin2(Plugin):
auth_provider = None
allowed_actions = ('create', 'link', 'unlink')
# we default this to None to support legacy integrations, but newer style
# should explicitly call out what is stored
issue_fields = None
# issue_fields = frozenset(['id', 'title', 'url'])
def configure(self, project, request):
return react_plugin_config(self, project, request)
def get_plugin_type(self):
return 'issue-tracking'
def has_project_conf(self):
return True
def get_group_body(self, request, group, event, **kwargs):
result = []
for interface in six.itervalues(event.interfaces):
output = safe_execute(interface.to_string, event, _with_transaction=False)
if output:
result.append(output)
return '\n\n'.join(result)
def get_group_description(self, request, group, event):
output = [
absolute_uri(group.get_absolute_url()),
]
body = self.get_group_body(request, group, event)
if body:
output.extend([
'',
'```',
body,
'```',
])
return '\n'.join(output)
def get_group_title(self, request, group, event):
return event.error()
def is_configured(self, request, project, **kwargs):
raise NotImplementedError
def get_group_urls(self):
_urls = []
for action in self.allowed_actions:
view_method_name = 'view_%s' % action
_urls.append(
url(
r'^%s/' % action,
PluginGroupEndpoint.as_view(
view=getattr(self, view_method_name),
),
)
)
return _urls
def get_auth_for_user(self, user, **kwargs):
"""
Return a ``UserSocialAuth`` object for the given user based on this plugins ``auth_provider``.
"""
assert self.auth_provider, 'There is no auth provider configured for this plugin.'
if not user.is_authenticated():
return None
try:
return UserSocialAuth.objects.filter(user=user, provider=self.auth_provider)[0]
except IndexError:
return None
def needs_auth(self, request, project, **kwargs):
"""
Return ``True`` if the authenticated user needs to associate an auth service before
performing actions with this plugin.
"""
if self.auth_provider is None:
return False
if not request.user.is_authenticated():
return True
return not UserSocialAuth.objects.filter(
user=request.user, provider=self.auth_provider
).exists()
def get_new_issue_fields(self, request, group, event, **kwargs):
"""
If overriding, supported properties include 'readonly': true
"""
return [
{
'name': 'title',
'label': 'Title',
'default': self.get_group_title(request, group, event),
'type': 'text'
}, {
'name': 'description',
'label': 'Description',
'default': self.get_group_description(request, group, event),
'type': 'textarea'
}
]
def get_link_existing_issue_fields(self, request, group, event, **kwargs):
return []
def _get_issue_url_compat(self, group, issue, **kwargs):
if self.issue_fields is None:
return self.get_issue_url(group, issue['id'])
return self.get_issue_url(group, issue)
def _get_issue_label_compat(self, group, issue, **kwargs):
if self.issue_fields is None:
return self.get_issue_label(group, issue['id'])
return self.get_issue_label(group, issue)
def get_issue_url(self, group, issue, **kwargs):
"""
Given an issue context (issue_id string or issue dict) return an absolute URL to the issue's details
page.
"""
raise NotImplementedError
def get_issue_label(self, group, issue, **kwargs):
"""
Given an issue context (issue_id string or issue dict) return a string representing the issue.
e.g. GitHub represents issues as GH-XXX
"""
if isinstance(issue, dict):
return '#{}'.format(issue['id'])
return '#{}'.format(issue)
def create_issue(self, request, group, form_data, **kwargs):
"""
Creates the issue on the remote service and returns an issue ID.
Returns ``{'id': '1', 'title': issue_title}``
"""
raise NotImplementedError
def link_issue(self, request, group, form_data, **kwargs):
"""
Can be overridden for any actions needed when linking issues
(like adding a comment to an existing issue).
Returns ``{'id': '1', 'title': issue_title}``
"""
pass
def has_auth_configured(self, **kwargs):
if not self.auth_provider:
return True
return self.auth_provider in get_auth_providers()
def validate_form(self, fields, form_data):
errors = {}
for field in fields:
if field.get('required', True) and not field.get('readonly'):
value = form_data.get(field['name'])
if value is None or value == '':
errors[field['name']] = u'%s is a required field.' % field['label']
return errors
def get_issue_field_map(self):
# XXX(dcramer): legacy support
conf_key = self.get_conf_key()
if self.issue_fields is None:
return {
'id': '{}:tid'.format(conf_key)
}
return {
key: '{}:issue_{}'.format(
conf_key,
key,
)
for key in self.issue_fields
}
def build_issue(self, group):
issue_field_map = self.get_issue_field_map()
issue = {}
for key, meta_name in six.iteritems(issue_field_map):
issue[key] = GroupMeta.objects.get_value(group, meta_name, None)
if not any(issue.values()):
return None
return issue
def has_linked_issue(self, group):
return bool(self.build_issue(group))
def unlink_issue(self, request, group, issue, **kwargs):
issue_field_map = self.get_issue_field_map()
for meta_name in six.itervalues(issue_field_map):
GroupMeta.objects.unset_value(group, meta_name)
return self.redirect(group.get_absolute_url())
def view_create(self, request, group, **kwargs):
auth_errors = self.check_config_and_auth(request, group)
if auth_errors:
return Response(auth_errors, status=400)
event = group.get_latest_event()
if event is None:
return Response({
'message': 'Unable to create issues: there are '
'no events associated with this group',
}, status=400)
Event.objects.bind_nodes([event], 'data')
try:
fields = self.get_new_issue_fields(request, group, event, **kwargs)
except Exception as e:
return self.handle_api_error(e)
if request.method == 'GET':
return Response(fields)
errors = self.validate_form(fields, request.DATA)
if errors:
return Response({'error_type': 'validation', 'errors': errors}, status=400)
try:
issue = self.create_issue(
group=group,
form_data=request.DATA,
request=request,
)
except Exception as e:
return self.handle_api_error(e)
if not isinstance(issue, dict):
issue = {'id': issue}
issue_field_map = self.get_issue_field_map()
for key, meta_name in six.iteritems(issue_field_map):
if key in issue:
GroupMeta.objects.set_value(group, meta_name, issue[key])
else:
GroupMeta.objects.unset_value(group, meta_name)
issue_information = {
'title': issue.get('title') or request.DATA.get('title') or self._get_issue_label_compat(group, issue),
'provider': self.get_title(),
'location': self._get_issue_url_compat(group, issue),
'label': self._get_issue_label_compat(group, issue),
}
Activity.objects.create(
project=group.project,
group=group,
type=Activity.CREATE_ISSUE,
user=request.user,
data=issue_information,
)
issue_tracker_used.send(
plugin=self, project=group.project, user=request.user,
sender=type(self)
)
return Response({'issue_url': self.get_issue_url(group, issue)})
def view_link(self, request, group, **kwargs):
auth_errors = self.check_config_and_auth(request, group)
if auth_errors:
return Response(auth_errors, status=400)
event = group.get_latest_event()
if event is None:
return Response({
'message': 'Unable to create issues: there are '
'no events associated with this group',
}, status=400)
Event.objects.bind_nodes([event], 'data')
try:
fields = self.get_link_existing_issue_fields(request, group, event, **kwargs)
except Exception as e:
return self.handle_api_error(e)
if request.method == 'GET':
return Response(fields)
errors = self.validate_form(fields, request.DATA)
if errors:
return Response({'error_type': 'validation', 'errors': errors}, status=400)
try:
issue = self.link_issue(
group=group,
form_data=request.DATA,
request=request,
) or {}
except Exception as e:
return self.handle_api_error(e)
# HACK(dcramer): maintain data for legacy issues
if 'id' not in issue and 'issue_id' in request.DATA:
issue['id'] = request.DATA['issue_id']
issue_field_map = self.get_issue_field_map()
for key, meta_name in six.iteritems(issue_field_map):
if key in issue:
GroupMeta.objects.set_value(group, meta_name, issue[key])
else:
GroupMeta.objects.unset_value(group, meta_name)
issue_information = {
'title': issue.get('title') or self._get_issue_label_compat(group, issue),
'provider': self.get_title(),
'location': self._get_issue_url_compat(group, issue),
'label': self._get_issue_label_compat(group, issue),
}
Activity.objects.create(
project=group.project,
group=group,
type=Activity.CREATE_ISSUE,
user=request.user,
data=issue_information,
)
return Response({'message': 'Successfully linked issue.'})
def view_unlink(self, request, group, **kwargs):
auth_errors = self.check_config_and_auth(request, group)
if auth_errors:
return Response(auth_errors, status=400)
issue = self.build_issue(group)
if issue and 'unlink' in self.allowed_actions:
self.unlink_issue(request, group, issue)
return Response({'message': 'Successfully unlinked issue.'})
return Response({'message': 'No issues to unlink.'}, status=400)
def plugin_issues(self, request, group, plugin_issues, **kwargs):
if not self.is_configured(request=request, project=group.project):
return plugin_issues
item = {
'slug': self.slug,
'allowed_actions': self.allowed_actions,
'title': self.get_title()
}
issue = self.build_issue(group)
if issue:
item['issue'] = {
'issue_id': issue.get('id'),
'url': self._get_issue_url_compat(group, issue),
'label': self._get_issue_label_compat(group, issue),
}
item.update(PluginSerializer(group.project).serialize(self, None, request.user))
plugin_issues.append(item)
return plugin_issues
def get_config(self, *args, **kwargs):
# TODO(dcramer): update existing plugins to just use get_config
# TODO(dcramer): remove request kwarg after sentry-plugins has been
# updated
kwargs.setdefault('request', None)
return self.get_configure_plugin_fields(*args, **kwargs)
def check_config_and_auth(self, request, group):
has_auth_configured = self.has_auth_configured()
if not (has_auth_configured and self.is_configured(
project=group.project, request=request)):
if self.auth_provider:
required_auth_settings = settings.AUTH_PROVIDERS[self.auth_provider]
else:
required_auth_settings = None
return {
'error_type': 'config',
'has_auth_configured': has_auth_configured,
'auth_provider': self.auth_provider,
'required_auth_settings': required_auth_settings,
}
if self.needs_auth(project=group.project, request=request):
return {
'error_type': 'auth',
'auth_url': reverse('socialauth_associate', args=[self.auth_provider])
}
# TODO: should we get rid of this (move it to react?)
def tags(self, request, group, tag_list, **kwargs):
if not self.is_configured(request=request, project=group.project):
return tag_list
issue = self.build_issue(group)
if not issue:
return tag_list
tag_list.append(
format_html(
'<a href="{}">{}</a>',
self._get_issue_url_compat(group, issue),
self._get_issue_label_compat(group, issue),
)
)
return tag_list
IssuePlugin2 = IssueTrackingPlugin2
| bsd-3-clause | 7,559,492,697,025,866,000 | 34 | 115 | 0.574862 | false |
olgabot/poshsplice | docs/conf.py | 1 | 8434 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# poshsplice documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import poshsplice
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PoshSplice'
copyright = u'2015, Olga Botvinnik'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = poshsplice.__version__
# The full version, including alpha/beta/rc tags.
release = poshsplice.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'poshsplicedoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'poshsplice.tex',
u'PoshSplice Documentation',
u'Olga Botvinnik', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'poshsplice',
u'PoshSplice Documentation',
[u'Olga Botvinnik'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'poshsplice',
u'PoshSplice Documentation',
u'Olga Botvinnik',
'poshsplice',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| bsd-3-clause | 8,295,166,194,338,778,000 | 29.669091 | 76 | 0.705715 | false |
DIRACGrid/DIRAC | integration_tests.py | 1 | 25030 | #!/usr/bin/env python
import fnmatch
import os
from pathlib import Path
import re
import shlex
import subprocess
import sys
import tempfile
import time
from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
from typing import Optional
import click
import git
import typer
import yaml
from packaging.version import Version
from typer import colors as c
# Editable configuration
DEFAULT_HOST_OS = "cc7"
DEFAULT_MYSQL_VER = "8.0"
DEFAULT_ES_VER = "7.9.1"
FEATURE_VARIABLES = {
"DIRACOSVER": "master",
"DIRACOS_TARBALL_PATH": None,
"TEST_HTTPS": "No",
"DIRAC_FEWER_CFG_LOCKS": None,
"DIRAC_USE_NEWTHREADPOOL": None,
"USE_PYTHON3": None,
}
DEFAULT_MODULES = {
"DIRAC": Path(__file__).parent.absolute(),
}
# Static configuration
DB_USER = "Dirac"
DB_PASSWORD = "Dirac"
DB_ROOTUSER = "root"
DB_ROOTPWD = "password"
DB_HOST = "mysql"
DB_PORT = "3306"
# Implementation details
LOG_LEVEL_MAP = {
"ALWAYS": (c.BLACK, c.WHITE),
"NOTICE": (None, c.MAGENTA),
"INFO": (None, c.GREEN),
"VERBOSE": (None, c.CYAN),
"DEBUG": (None, c.BLUE),
"WARN": (None, c.YELLOW),
"ERROR": (None, c.RED),
"FATAL": (c.RED, c.BLACK),
}
LOG_PATTERN = re.compile(r"^[\d\-]{10} [\d:]{8} UTC [^\s]+ ([A-Z]+):")
class NaturalOrderGroup(click.Group):
"""Group for showing subcommands in the correct order"""
def list_commands(self, ctx):
return self.commands.keys()
app = typer.Typer(
cls=NaturalOrderGroup,
help=f"""Run the DIRAC integration tests.
A local DIRAC setup can be created and tested by running:
\b
./integration_tests.py create
This is equivalent to running:
\b
./integration_tests.py prepare-environment
./integration_tests.py install-server
./integration_tests.py install-client
./integration_tests.py test-server
./integration_tests.py test-client
The test setup can be shutdown using:
\b
./integration_tests.py destroy
See below for additional subcommands which are useful during local development.
## Features
The currently known features and their default values are:
\b
HOST_OS: {DEFAULT_HOST_OS!r}
MYSQL_VER: {DEFAULT_MYSQL_VER!r}
ES_VER: {DEFAULT_ES_VER!r}
{(os.linesep + ' ').join(['%s: %r' % x for x in FEATURE_VARIABLES.items()])}
All features can be prefixed with "SERVER_" or "CLIENT_" to limit their scope.
## Extensions
Integration tests can be ran for extensions to DIRAC by specifying the module
name and path such as:
\b
./integration_tests.py create --extra-module MyDIRAC=/path/to/MyDIRAC
This will modify the setup process based on the contents of
`MyDIRAC/tests/.dirac-ci-config.yaml`. See the Vanilla DIRAC file for the
available options.
## Command completion
Command competion of typer based scripts can be enabled by running:
typer --install-completion
After restarting your terminal you command completion is available using:
typer ./integration_tests.py run ...
"""
)
@app.command()
def create(
flags: Optional[list[str]] = typer.Argument(None),
editable: Optional[bool] = None,
extra_module: Optional[list[str]] = None,
release_var: Optional[str] = None,
run_server_tests: bool = True,
run_client_tests: bool = True,
):
"""Start a local instance of the integration tests"""
prepare_environment(flags, editable, extra_module, release_var)
install_server()
install_client()
exit_code = 0
if run_server_tests:
try:
test_server()
except TestExit as e:
exit_code += e.exit_code
else:
raise NotImplementedError()
if run_client_tests:
try:
test_client()
except TestExit as e:
exit_code += e.exit_code
else:
raise NotImplementedError()
if exit_code != 0:
typer.secho("One or more tests failed", err=True, fg=c.RED)
raise typer.Exit(exit_code)
@app.command()
def destroy():
"""Destroy a local instance of the integration tests"""
typer.secho("Shutting down and removing containers", err=True, fg=c.GREEN)
with _gen_docker_compose(DEFAULT_MODULES) as docker_compose_fn:
os.execvpe(
"docker-compose",
["docker-compose", "-f", docker_compose_fn, "down", "--remove-orphans", "-t", "0"],
_make_env({}),
)
@app.command()
def prepare_environment(
flags: Optional[list[str]] = typer.Argument(None),
editable: Optional[bool] = None,
extra_module: Optional[list[str]] = None,
release_var: Optional[str] = None,
):
"""Prepare the local environment for installing DIRAC."""
_check_containers_running(is_up=False)
if editable is None:
editable = sys.stdout.isatty()
typer.secho(
f"No value passed for --[no-]editable, automatically detected: {editable}",
fg=c.YELLOW,
)
typer.echo(f"Preparing environment")
modules = DEFAULT_MODULES | dict(f.split("=", 1) for f in extra_module)
modules = {k: Path(v).absolute() for k, v in modules.items()}
flags = dict(f.split("=", 1) for f in flags)
docker_compose_env = _make_env(flags)
server_flags = {}
client_flags = {}
for key, value in flags.items():
if key.startswith("SERVER_"):
server_flags[key[len("SERVER_"):]] = value
elif key.startswith("CLIENT_"):
client_flags[key[len("CLIENT_"):]] = value
else:
server_flags[key] = value
client_flags[key] = value
server_config = _make_config(modules, server_flags, release_var, editable)
client_config = _make_config(modules, client_flags, release_var, editable)
typer.secho("Running docker-compose to create containers", fg=c.GREEN)
with _gen_docker_compose(modules) as docker_compose_fn:
subprocess.run(
["docker-compose", "-f", docker_compose_fn, "up", "-d"],
check=True,
env=docker_compose_env,
)
typer.secho("Creating users in server and client containers", fg=c.GREEN)
for container_name in ["server", "client"]:
if os.getuid() == 0:
continue
cmd = _build_docker_cmd(container_name, use_root=True, cwd="/")
gid = str(os.getgid())
uid = str(os.getuid())
ret = subprocess.run(cmd + ["groupadd", "--gid", gid, "dirac"], check=False)
if ret.returncode != 0:
typer.secho(f"Failed to add add group dirac with id={gid}", fg=c.YELLOW)
subprocess.run(
cmd
+ [
"useradd",
"--uid",
uid,
"--gid",
gid,
"-s",
"/bin/bash",
"-d",
"/home/dirac",
"dirac",
],
check=True,
)
subprocess.run(cmd + ["chown", "dirac", "/home/dirac"], check=True)
typer.secho("Creating MySQL user", fg=c.GREEN)
cmd = ["docker", "exec", "mysql", "mysql", f"--password={DB_ROOTPWD}", "-e"]
# It sometimes takes a while for MySQL to be ready so wait for a while if needed
for _ in range(10):
ret = subprocess.run(
cmd + [f"CREATE USER '{DB_USER}'@'%' IDENTIFIED BY '{DB_PASSWORD}';"],
check=False,
)
if ret.returncode != 0:
typer.secho("Failed to connect to MySQL, will retry in 10 seconds", fg=c.YELLOW)
time.sleep(10)
break
else:
raise Exception(ret)
subprocess.run(
cmd + [f"CREATE USER '{DB_USER}'@'localhost' IDENTIFIED BY '{DB_PASSWORD}';"],
check=True,
)
subprocess.run(
cmd + [f"CREATE USER '{DB_USER}'@'mysql' IDENTIFIED BY '{DB_PASSWORD}';"],
check=True,
)
typer.secho("Copying files to containers", fg=c.GREEN)
for name, config in [("server", server_config), ("client", client_config)]:
if path := config.get("DIRACOS_TARBALL_PATH"):
path = Path(path)
if config["USE_PYTHON3"]:
config["DIRACOS_TARBALL_PATH"] = f"/{path.name}"
subprocess.run(
["docker", "cp", str(path), f"{name}:/{config['DIRACOS_TARBALL_PATH']}"],
check=True,
)
else:
md5_fn = Path(str(path).replace(".tar.gz", ".md5"))
if not md5_fn.exists():
typer.secho(
"Failed to find MD5 filename for DIRACOS_TARBALL_PATH. "
f"Expected at: {md5_fn}",
err=True,
fg=c.RED,
)
raise typer.Exit(1)
subprocess.run(["docker", "cp", str(path), f"{name}:/{path.name}"], check=True)
subprocess.run(["docker", "cp", str(md5_fn), f"{name}:/{md5_fn.name}"], check=True)
config["DIRACOS_TARBALL_PATH"] = "/"
config["DIRACOSVER"] = md5_fn.stem.split("-", 1)[1]
config_as_shell = _dict_to_shell(config)
typer.secho(f"## {name.title()} config is:", fg=c.BRIGHT_WHITE, bg=c.BLACK)
typer.secho(config_as_shell)
with tempfile.TemporaryDirectory() as tmpdir:
path = Path(tmpdir) / "CONFIG"
path.write_text(config_as_shell)
subprocess.run(
["docker", "cp", str(path), f"{name}:/home/dirac"],
check=True,
)
for module_name, module_configs in _load_module_configs(modules).items():
for command in module_configs.get("commands", {}).get("post-prepare", []):
typer.secho(
f"Running post-prepare command for {module_name}: {command}",
err=True,
fg=c.GREEN,
)
subprocess.run(command, check=True, shell=True)
@app.command()
def install_server():
"""Install DIRAC in the server container."""
_check_containers_running()
typer.secho("Running server installation", fg=c.GREEN)
base_cmd = _build_docker_cmd("server", tty=False)
subprocess.run(
base_cmd
+ ["bash", "/home/dirac/LocalRepo/TestCode/DIRAC/tests/CI/install_server.sh"],
check=True,
)
typer.secho("Copying credentials and certificates", fg=c.GREEN)
base_cmd = _build_docker_cmd("client", tty=False)
subprocess.run(
base_cmd
+ [
"mkdir",
"-p",
"/home/dirac/ServerInstallDIR/user",
"/home/dirac/ClientInstallDIR/etc",
"/home/dirac/.globus",
],
check=True,
)
for path in [
"etc/grid-security",
"user/client.pem",
"user/client.key",
f"/tmp/x509up_u{os.getuid()}",
]:
source = os.path.join("/home/dirac/ServerInstallDIR", path)
ret = subprocess.run(
["docker", "cp", f"server:{source}", "-"],
check=True,
text=False,
stdout=subprocess.PIPE,
)
if path.startswith("user/"):
dest = f"client:/home/dirac/ServerInstallDIR/{os.path.dirname(path)}"
elif path.startswith("/"):
dest = f"client:{os.path.dirname(path)}"
else:
dest = f"client:/home/dirac/ClientInstallDIR/{os.path.dirname(path)}"
subprocess.run(
["docker", "cp", "-", dest], check=True, text=False, input=ret.stdout
)
subprocess.run(
base_cmd
+ [
"bash",
"-c",
"cp /home/dirac/ServerInstallDIR/user/client.* /home/dirac/.globus/",
],
check=True,
)
@app.command()
def install_client():
"""Install DIRAC in the client container."""
_check_containers_running()
typer.secho("Running client installation", fg=c.GREEN)
base_cmd = _build_docker_cmd("client")
subprocess.run(
base_cmd
+ ["bash", "/home/dirac/LocalRepo/TestCode/DIRAC/tests/CI/install_client.sh"],
check=True,
)
@app.command()
def test_server():
"""Run the server integration tests."""
_check_containers_running()
typer.secho("Running server tests", err=True, fg=c.GREEN)
base_cmd = _build_docker_cmd("server")
ret = subprocess.run(
base_cmd + ["bash", "TestCode/DIRAC/tests/CI/run_tests.sh"], check=False
)
color = c.GREEN if ret.returncode == 0 else c.RED
typer.secho(f"Server tests finished with {ret.returncode}", err=True, fg=color)
raise TestExit(ret.returncode)
@app.command()
def test_client():
"""Run the client integration tests."""
_check_containers_running()
typer.secho("Running client tests", err=True, fg=c.GREEN)
base_cmd = _build_docker_cmd("client")
ret = subprocess.run(
base_cmd + ["bash", "TestCode/DIRAC/tests/CI/run_tests.sh"], check=False
)
color = c.GREEN if ret.returncode == 0 else c.RED
typer.secho(f"Client tests finished with {ret.returncode}", err=True, fg=color)
raise TestExit(ret.returncode)
@app.command()
def exec_server():
"""Start an interactive session in the server container."""
_check_containers_running()
cmd = _build_docker_cmd("server")
cmd += [
"bash",
"-c",
". $HOME/CONFIG && . $HOME/ServerInstallDIR/bashrc && exec bash",
]
typer.secho("Opening prompt inside server container", err=True, fg=c.GREEN)
os.execvp(cmd[0], cmd)
@app.command()
def exec_client():
"""Start an interactive session in the client container."""
_check_containers_running()
cmd = _build_docker_cmd("client")
cmd += [
"bash",
"-c",
". $HOME/CONFIG && . $HOME/ClientInstallDIR/bashrc && exec bash",
]
typer.secho("Opening prompt inside client container", err=True, fg=c.GREEN)
os.execvp(cmd[0], cmd)
@app.command()
def exec_mysql():
"""Start an interactive session in the server container."""
_check_containers_running()
cmd = _build_docker_cmd("mysql", use_root=True, cwd='/')
cmd += [
"bash",
"-c",
f"exec mysql --user={DB_USER} --password={DB_PASSWORD}",
]
typer.secho("Opening prompt inside server container", err=True, fg=c.GREEN)
os.execvp(cmd[0], cmd)
@app.command()
def list_services():
"""List the services which have been running.
Only the services for which /log/current exists are shown.
"""
_check_containers_running()
typer.secho("Known services:", err=True)
for service in _list_services():
typer.secho(f"* {service}", err=True)
@app.command()
def runsvctrl(command: str, pattern: str):
"""Execute runsvctrl inside the server container."""
_check_containers_running()
cmd = _build_docker_cmd("server", cwd="/home/dirac/ServerInstallDIR/runit")
services = fnmatch.filter(_list_services(), pattern)
if not services:
typer.secho(f"No services match {pattern!r}", fg=c.RED)
raise typer.Exit(code=1)
cmd += ["runsvctrl", command] + services
os.execvp(cmd[0], cmd)
@app.command()
def logs(pattern: str = "*", lines: int = 10, follow: bool = True):
"""Show DIRAC's logs from the service container.
For services matching [--pattern] show the most recent [--lines] from the
logs. If [--follow] is True, continiously stream the logs.
"""
_check_containers_running()
services = _list_services()
base_cmd = _build_docker_cmd("server", tty=False) + ["tail"]
base_cmd += [f"--lines={lines}"]
if follow:
base_cmd += ["-f"]
with ThreadPoolExecutor(len(services)) as pool:
for service in fnmatch.filter(services, pattern):
cmd = base_cmd + [f"ServerInstallDIR/runit/{service}/log/current"]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=None, text=True)
pool.submit(_log_popen_stdout, p)
class TestExit(typer.Exit):
pass
@contextmanager
def _gen_docker_compose(modules):
# Load the docker-compose configuration and mount the necessary volumes
input_fn = Path(__file__).parent / "tests/CI/docker-compose.yml"
docker_compose = yaml.safe_load(input_fn.read_text())
volumes = [
f"{path}:/home/dirac/LocalRepo/ALTERNATIVE_MODULES/{name}"
for name, path in modules.items()
]
volumes += [
f"{path}:/home/dirac/LocalRepo/TestCode/{name}"
for name, path in modules.items()
]
docker_compose["services"]["dirac-server"]["volumes"] = volumes[:]
docker_compose["services"]["dirac-client"]["volumes"] = volumes[:]
# Add any extension services
for module_name, module_configs in _load_module_configs(modules).items():
for service_name, service_config in module_configs["extra-services"].items():
typer.secho(f"Adding service {service_name} for {module_name}", err=True, fg=c.GREEN)
docker_compose["services"][service_name] = service_config.copy()
docker_compose["services"][service_name]["volumes"] = volumes[:]
# Write to a tempory file with the appropriate profile name
prefix = "ci"
with tempfile.TemporaryDirectory() as tmpdir:
output_fn = Path(tmpdir) / prefix / "docker-compose.yml"
output_fn.parent.mkdir()
output_fn.write_text(yaml.safe_dump(docker_compose, sort_keys=False))
yield output_fn
def _check_containers_running(*, is_up=True):
with _gen_docker_compose(DEFAULT_MODULES) as docker_compose_fn:
running_containers = subprocess.run(
["docker-compose", "-f", docker_compose_fn, "ps", "-q", "-a"],
stdout=subprocess.PIPE,
check=True,
text=True,
).stdout.split("\n")
if is_up:
if not any(running_containers):
typer.secho(
f"No running containers found, environment must be prepared first!",
err=True,
fg=c.RED,
)
raise typer.Exit(code=1)
else:
if any(running_containers):
typer.secho(
f"Running instance already found, it must be destroyed first!",
err=True,
fg=c.RED,
)
raise typer.Exit(code=1)
def _find_dirac_release_and_branch():
# Start by looking for the GitHub/GitLab environment variables
ref = os.environ.get("CI_COMMIT_REF_NAME", os.environ.get("GITHUB_REF"))
if ref == "refs/heads/integration":
return "integration", ""
ref = os.environ.get(
"CI_MERGE_REQUEST_TARGET_BRANCH_NAME", os.environ.get("GITHUB_BASE_REF")
)
if ref == "integration":
return "integration", ""
repo = git.Repo(os.getcwd())
# Try to make sure the upstream remote is up to date
try:
upstream = repo.remote("upstream")
except ValueError:
typer.secho("No upstream remote found, adding", err=True, fg=c.YELLOW)
upstream = repo.create_remote(
"upstream", "https://github.com/DIRACGrid/DIRAC.git"
)
try:
upstream.fetch()
except Exception:
typer.secho("Failed to fetch from remote 'upstream'", err=True, fg=c.YELLOW)
# Find the most recent tag on the current branch
version = Version(
repo.git.describe(
dirty=True,
tags=True,
long=True,
match="*[0-9]*",
exclude=["v[0-9]r*", "v[0-9][0-9]r*"],
).split("-")[0]
)
# See if there is a remote branch named "rel-vXrY"
version_branch = f"rel-v{version.major}r{version.minor}"
try:
upstream.refs[version_branch]
except IndexError:
typer.secho(
f"Failed to find branch for {version_branch}, defaulting to integration",
err=True,
fg=c.YELLOW,
)
return "integration", ""
else:
return "", f"v{version.major}r{version.minor}"
def _make_env(flags):
env = os.environ.copy()
env["DIRAC_UID"] = str(os.getuid())
env["DIRAC_GID"] = str(os.getgid())
env["HOST_OS"] = flags.pop("HOST_OS", DEFAULT_HOST_OS)
env["CI_REGISTRY_IMAGE"] = flags.pop("CI_REGISTRY_IMAGE", "diracgrid")
env["MYSQL_VER"] = flags.pop("MYSQL_VER", DEFAULT_MYSQL_VER)
env["ES_VER"] = flags.pop("ES_VER", DEFAULT_ES_VER)
return env
def _dict_to_shell(variables):
lines = []
for name, value in variables.items():
if value is None:
continue
elif isinstance(value, list):
lines += [f"declare -a {name}"]
lines += [f"{name}+=({shlex.quote(v)})" for v in value]
elif isinstance(value, bool):
lines += [f"export {name}={'Yes' if value else 'No'}"]
elif isinstance(value, str):
lines += [f"export {name}={shlex.quote(value)}"]
else:
raise NotImplementedError(name, value, type(value))
return "\n".join(lines)
def _make_config(modules, flags, release_var, editable):
config = {
"DEBUG": "True",
# MYSQL Settings
"DB_USER": DB_USER,
"DB_PASSWORD": DB_PASSWORD,
"DB_ROOTUSER": DB_ROOTUSER,
"DB_ROOTPWD": DB_ROOTPWD,
"DB_HOST": DB_HOST,
"DB_PORT": DB_PORT,
# ElasticSearch settings
"NoSQLDB_HOST": "elasticsearch",
"NoSQLDB_PORT": "9200",
# Hostnames
"SERVER_HOST": "server",
"CLIENT_HOST": "client",
# Test specific variables
"WORKSPACE": "/home/dirac",
}
if editable:
config["PIP_INSTALL_EXTRA_ARGS"] = "-e"
required_feature_flags = []
for module_name, module_ci_config in _load_module_configs(modules).items():
config |= module_ci_config["config"]
required_feature_flags += module_ci_config.get("required-feature-flags", [])
config["DIRAC_CI_SETUP_SCRIPT"] = "/home/dirac/LocalRepo/TestCode/" + config["DIRAC_CI_SETUP_SCRIPT"]
# This can likely be removed after the Python 3 migration
if release_var:
config |= dict([release_var.split("=", 1)])
else:
config["DIRAC_RELEASE"], config["DIRACBRANCH"] = _find_dirac_release_and_branch()
for key, default_value in FEATURE_VARIABLES.items():
config[key] = flags.pop(key, default_value)
for key in required_feature_flags:
try:
config[key] = flags.pop(key)
except KeyError:
typer.secho(f"Required feature variable {key!r} is missing", err=True, fg=c.RED)
raise typer.Exit()
config["TESTREPO"] = [
f"/home/dirac/LocalRepo/TestCode/{name}" for name in modules
]
config["ALTERNATIVE_MODULES"] = [
f"/home/dirac/LocalRepo/ALTERNATIVE_MODULES/{name}" for name in modules
]
if not config["USE_PYTHON3"]:
config["ALTERNATIVE_MODULES"] = [
f"{x}/src/{Path(x).name}" for x in config["ALTERNATIVE_MODULES"]
]
# Exit with an error if there are unused feature flags remaining
if flags:
typer.secho(f"Unrecognised feature flags {flags!r}", err=True, fg=c.RED)
raise typer.Exit(code=1)
return config
def _load_module_configs(modules):
module_ci_configs = {}
for module_name, module_path in modules.items():
module_ci_config_path = module_path / "tests/.dirac-ci-config.yaml"
if not module_ci_config_path.exists():
continue
module_ci_configs[module_name] = yaml.safe_load(module_ci_config_path.read_text())
return module_ci_configs
def _build_docker_cmd(container_name, *, use_root=False, cwd="/home/dirac", tty=True):
if use_root or os.getuid() == 0:
user = "root"
else:
user = "dirac"
cmd = ["docker", "exec"]
if tty:
if sys.stdout.isatty():
cmd += ["-it"]
else:
typer.secho(
'Not passing "-it" to docker as stdout is not a tty',
err=True,
fg=c.YELLOW,
)
cmd += [
"-e=TERM=xterm-color",
"-e=INSTALLROOT=/home/dirac",
f"-e=INSTALLTYPE={container_name}",
f"-u={user}",
f"-w={cwd}",
container_name,
]
return cmd
def _list_services():
cmd = _build_docker_cmd("server")
cmd += [
"bash",
"-c",
'cd ServerInstallDIR/runit/ && for fn in */*/log/current; do echo "$(dirname "$(dirname "$fn")")"; done'
]
ret = subprocess.run(cmd, check=False, stdout=subprocess.PIPE, text=True)
if ret.returncode:
typer.secho("Failed to find list of available services", err=True, fg=c.RED)
typer.secho(f"stdout was: {ret.stdout!r}", err=True)
typer.secho(f"stderr was: {ret.stderr!r}", err=True)
raise typer.Exit(1)
return ret.stdout.split()
def _log_popen_stdout(p):
while p.poll() is None:
line = p.stdout.readline().rstrip()
if not line:
continue
bg, fg = None, None
if match := LOG_PATTERN.match(line):
bg, fg = LOG_LEVEL_MAP.get(match.groups()[0], (bg, fg))
typer.secho(line, err=True, bg=bg, fg=fg)
if __name__ == "__main__":
app()
| gpl-3.0 | -6,613,157,610,265,139,000 | 31.633638 | 112 | 0.588334 | false |
mostaphaRoudsari/Honeybee | src/Honeybee_EnergyPlus Window Air Gap.py | 1 | 3032 | #
# Honeybee: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Honeybee.
#
# Copyright (c) 2013-2020, Mostapha Sadeghipour Roudsari <[email protected]>
# Honeybee is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Honeybee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Honeybee; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
Use this component to create a custom material for a window air gap, which can be plugged into the "Honeybee_EnergyPlus Construction" component.
_
It is important to note that this component only creates gaps of air and not other gasses.
Also, the material out of this component represents only a single layer of air, which can be combined with the "Honeybee_EnergyPlus Glass Material" to make multi-pane windows.
If you have specifications for a whole window element and not individual panes of glass and gas, you are better-off using the "Honeybee_EnergyPlus Window Material" component instead of this one.
-
Provided by Honeybee 0.0.66
Args:
_name: A text name for your window air gap material.
_thickness_: A number that represents the thickness of the air gap in meters. The default is set to 0.0125 meters (1.25 cm).
Returns:
EPMaterial: A window air gap material that can be plugged into the "Honeybee_EnergyPlus Construction" component.
"""
ghenv.Component.Name = "Honeybee_EnergyPlus Window Air Gap"
ghenv.Component.NickName = 'EPWindowAirGap'
ghenv.Component.Message = 'VER 0.0.66\nJUL_07_2020'
ghenv.Component.IconDisplayMode = ghenv.Component.IconDisplayMode.application
ghenv.Component.Category = "HB-Legacy"
ghenv.Component.SubCategory = "06 | Energy | Material | Construction"
#compatibleHBVersion = VER 0.0.56\nFEB_01_2015
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "0"
except: pass
def main(name, thickness):
if name == None: name = "AIRGAP"
gasType = "AIR"
if thickness == None: thickness = .0125
values = [name.upper(), gasType, thickness]
comments = ["Name", "Gas type", "Thickness {m}"]
materialStr = "WindowMaterial:Gas,\n"
for count, (value, comment) in enumerate(zip(values, comments)):
if count!= len(values) - 1:
materialStr += str(value) + ", !-" + str(comment) + "\n"
else:
materialStr += str(value) + "; !-" + str(comment)
return materialStr
EPMaterial = main(_name_, _thickness_)
| gpl-3.0 | 6,590,567,128,053,553,000 | 41.704225 | 194 | 0.716689 | false |
jawrainey/sris | settings.py | 1 | 1602 | import os
class Config(object):
"""
The shared configuration settings for the flask app.
"""
# Service settings
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__)))
SERVICE_ONTOLOGY = PROJECT_ROOT + '/sris/config/ontology.json'
# Database settings
CLIENT_NAME = 'client'
SERVICE_NAME = 'service'
# These need to be set by you!
ACCOUNT_SID = os.environ.get('ACCOUNT_SID', None)
AUTH_TOKEN = os.environ.get('AUTH_TOKEN', None)
NUM = os.environ.get('NUM', None)
class ProdConfig(Config):
"""
Setup the production configuration for the flask app.
Args:
Config (object): Inherit the default shared configuration settings.
"""
DEBUG = False
# These are set server-side for ease-of-use when using PaaS.
SQLALCHEMY_BINDS = {
Config.CLIENT_NAME: os.environ.get('CLIENT_DATABASE_URL', None),
Config.SERVICE_NAME: os.environ.get('SERVICE_DATABASE_URL', None)
}
class DevConfig(Config):
"""
Setup the development configuration for the flask app.
Args:
Config (object): Inherit the default shared configuration settings.
"""
DEBUG = True
# Store these in the root directly.
CLIENT_DB = os.path.join(Config.PROJECT_ROOT, Config.CLIENT_NAME + '.db')
SERVICE_DB = os.path.join(Config.PROJECT_ROOT, Config.SERVICE_NAME + '.db')
# Support for multiple databases (client & service)
SQLALCHEMY_BINDS = {
Config.CLIENT_NAME: 'sqlite:///{0}'.format(CLIENT_DB),
Config.SERVICE_NAME: 'sqlite:///{0}'.format(SERVICE_DB)
}
| mit | -7,199,926,662,232,172,000 | 29.807692 | 79 | 0.651685 | false |
kostya0shift/SyncToGit | synctogit/Config.py | 1 | 1728 | from __future__ import absolute_import
try:
import configparser
except:
import ConfigParser as configparser
class _NotSet(object):
pass
class ConfigException(Exception):
pass
class Config:
def __init__(self, conffile):
self.conffile = conffile
self.conf = configparser.ConfigParser()
with open(self.conffile, 'r') as f:
self.conf.readfp(f)
def _get(self, section, key, getter, default=_NotSet()):
if not self.conf.has_section(section):
if isinstance(default, _NotSet):
raise ConfigException('Section %s is missing' % section)
else:
return default
if not self.conf.has_option(section, key):
if isinstance(default, _NotSet):
raise ConfigException('Key %s from section %s is missing' % (key, section))
else:
v = default
else:
v = getter(section, key)
return v
def get_int(self, section, key, default=_NotSet()):
v = self._get(section, key, self.conf.getint, default)
return int(v)
def get_string(self, section, key, default=_NotSet()):
v = self._get(section, key, self.conf.get, default)
return "" + v
def get_boolean(self, section, key, default=_NotSet()):
v = self._get(section, key, self.conf.getboolean, default)
return bool(v)
def _write(self):
with open(self.conffile, 'w') as f:
self.conf.write(f)
def set(self, section, key, value):
self.conf.set(section, key, value)
self._write()
def unset(self, section, key):
self.conf.remove_option(section, key)
self._write()
| mit | 587,310,397,621,717,500 | 25.584615 | 91 | 0.58044 | false |
FiveEye/ml-notebook | dlp/ch6_2_pretrained_embedding.py | 1 | 2633 | import os
imdb_dir = '/home/han/code/data/aclImdb'
train_dir = os.path.join(imdb_dir, 'train')
# Processing the labels of the raw IMDB data
labels = []
texts = []
for label_type in ['neg', 'pos']:
dir_name = os.path.join(train_dir, label_type)
for fname in os.listdir(dir_name):
if fname[-4:] == '.txt':
f = open(os.path.join(dir_name, fname))
texts.append(f.read())
f.close()
if label_type == 'neg':
labels.append(0)
else:
labels.append(1)
# Tokenizing the text of the raw IMDB data
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
import numpy as np
maxlen = 100
training_samples = 10000
validation_samples = 10000
max_words = 10000
tokenizer = Tokenizer(num_words=max_words)
tokenizer.fit_on_texts(texts)
sequences = tokenizer.texts_to_sequences(texts)
word_index = tokenizer.word_index
data = pad_sequences(sequences, maxlen=maxlen)
labels = np.asarray(labels)
indices = np.arange(data.shape[0])
np.random.shuffle(indices)
data = data[indices]
labels = labels[indices]
x_train = data[:training_samples]
y_train = labels[:training_samples]
x_val = data[training_samples : training_samples + validation_samples]
y_val = labels[training_samples : training_samples + validation_samples]
# Parsing the GloVe word-embedding file
glove_dir = '/home/han/code/models/glove.6B'
embeddings_index = {}
f = open(os.path.join(glove_dir, 'glove.6B.100d.txt'))
for line in f:
values = line.split()
word = values[0]
coefs = np.asarray(values[1:], dtype='float32')
embeddings_index[word] = coefs
f.close()
# preparing the glove matrix
embedding_dim = 100
embedding_matrix = np.zeros((max_words, embedding_dim))
for word, i in word_index.items():
if i < max_words:
embedding_vector = embeddings_index.get(word)
if embedding_vector is not None:
embedding_matrix[i] = embedding_vector
else:
print("Not found ", word)
# build model
from keras.models import Sequential
from keras.layers import Embedding, Dense, Flatten, LSTM
model = Sequential()
model.add(Embedding(max_words, embedding_dim, input_length=maxlen))
model.add(LSTM(32))
#model.add(Flatten())
#model.add(Dense(32, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.layers[0].set_weights([embedding_matrix])
model.layers[0].trainable = False
print(model.summary())
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['acc'])
history = model.fit(x_train, y_train, epochs=10, batch_size=32, validation_data=(x_val, y_val))
| mit | -4,351,887,156,097,140,700 | 23.813725 | 95 | 0.692366 | false |
pikeBishop/OMP_gpxReport | geotiler/tests/cache/test_redis.py | 1 | 3155 | #
# GeoTiler - library to create maps using tiles from a map provider
#
# Copyright (C) 2014 by Artur Wroblewski <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# This file incorporates work covered by the following copyright and
# permission notice (restored, based on setup.py file from
# https://github.com/stamen/modestmaps-py):
#
# Copyright (C) 2007-2013 by Michal Migurski and other contributors
# License: BSD
#
"""
Redis cache unit tests.
"""
from geotiler.cache.redis import RedisCache
import unittest
from unittest import mock
class RedisCacheTestCase(unittest.TestCase):
"""
Redis cache unit tests.
"""
def test_wrapper(self):
"""
Test Redis cache wrapper
"""
client = mock.MagicMock()
downloader = mock.MagicMock()
f = lambda host, path, query: True
cache = RedisCache(client, downloader)
fc = cache(f)
self.assertEqual(f, fc.__wrapped__)
def test_updating_cache(self):
"""
Test Redis cache update
Check that valid paramaters are passed to underlying function and
that cache got updated
"""
client = mock.MagicMock()
downloader = mock.MagicMock()
data = mock.MagicMock()
downloader.f.return_value = data
cache = RedisCache(client, downloader)
fc = cache(downloader.f) # function f with cachinig capability
client.exists.return_value = False
value = fc('host', 'path', 'query')
self.assertEqual(data, value)
downloader.f.assert_called_once_with(
downloader, 'host', 'path', 'query'
)
client.setex.assert_called_once_with(
('host', 'path', 'query'),
data,
cache.timeout
)
def test_cache_use(self):
"""
Test Redis cache use
Verify that value is fetched from Redis cache on cache hit
"""
client = mock.MagicMock()
data = mock.MagicMock() # data returned from cache
downloader = mock.MagicMock()
cache = RedisCache(client, downloader)
fc = cache(downloader.f) # function f with cachinig capability
client.exists.return_value = True # cache hit
client.get.return_value = data # return data from cache
value = fc('host', 'path', 'query')
self.assertEqual(data, value)
self.assertFalse(downloader.f.called)
client.get.assert_called_once_with(('host', 'path', 'query'))
# vim: sw=4:et:ai
| gpl-2.0 | 7,988,911,465,809,533,000 | 28.485981 | 73 | 0.642472 | false |
seanfisk/buzzword-bingo-server | djangorestframework/status.py | 1 | 1455 | """
Descriptive HTTP status codes, for code readability.
See RFC 2616 - Sec 10: http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
Also see django.core.handlers.wsgi.STATUS_CODE_TEXT
"""
HTTP_100_CONTINUE = 100
HTTP_101_SWITCHING_PROTOCOLS = 101
HTTP_200_OK = 200
HTTP_201_CREATED = 201
HTTP_202_ACCEPTED = 202
HTTP_203_NON_AUTHORITATIVE_INFORMATION = 203
HTTP_204_NO_CONTENT = 204
HTTP_205_RESET_CONTENT = 205
HTTP_206_PARTIAL_CONTENT = 206
HTTP_300_MULTIPLE_CHOICES = 300
HTTP_301_MOVED_PERMANENTLY = 301
HTTP_302_FOUND = 302
HTTP_303_SEE_OTHER = 303
HTTP_304_NOT_MODIFIED = 304
HTTP_305_USE_PROXY = 305
HTTP_306_RESERVED = 306
HTTP_307_TEMPORARY_REDIRECT = 307
HTTP_400_BAD_REQUEST = 400
HTTP_401_UNAUTHORIZED = 401
HTTP_402_PAYMENT_REQUIRED = 402
HTTP_403_FORBIDDEN = 403
HTTP_404_NOT_FOUND = 404
HTTP_405_METHOD_NOT_ALLOWED = 405
HTTP_406_NOT_ACCEPTABLE = 406
HTTP_407_PROXY_AUTHENTICATION_REQUIRED = 407
HTTP_408_REQUEST_TIMEOUT = 408
HTTP_409_CONFLICT = 409
HTTP_410_GONE = 410
HTTP_411_LENGTH_REQUIRED = 411
HTTP_412_PRECONDITION_FAILED = 412
HTTP_413_REQUEST_ENTITY_TOO_LARGE = 413
HTTP_414_REQUEST_URI_TOO_LONG = 414
HTTP_415_UNSUPPORTED_MEDIA_TYPE = 415
HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE = 416
HTTP_417_EXPECTATION_FAILED = 417
HTTP_500_INTERNAL_SERVER_ERROR = 500
HTTP_501_NOT_IMPLEMENTED = 501
HTTP_502_BAD_GATEWAY = 502
HTTP_503_SERVICE_UNAVAILABLE = 503
HTTP_504_GATEWAY_TIMEOUT = 504
HTTP_505_HTTP_VERSION_NOT_SUPPORTED = 505
| bsd-3-clause | 6,804,221,208,206,848,000 | 29.3125 | 77 | 0.768385 | false |
dlebauer/plantcv | lib/plantcv/dev/roi_multi_objects.py | 1 | 3484 | import cv2
import numpy as np
from plantcv import print_image
### Find Objects Partially Inside Region of Interest or Cut Objects to Region of Interest
def roi_objects(img,roi_type,roi_contour, roi_hierarchy,object_contour, obj_hierarchy, device, debug=False):
# img = img to display kept objects
# roi_type = 'cutto' or 'partial' (for partially inside)
# roi_contour = contour of roi, output from "View and Ajust ROI" function
# roi_hierarchy = contour of roi, output from "View and Ajust ROI" function
# object_contour = contours of objects, output from "Identifying Objects" fuction
# obj_hierarchy = hierarchy of objects, output from "Identifying Objects" fuction
# device = device number. Used to count steps in the pipeline
device +=1
if len(np.shape(img))==3:
ix,iy,iz=np.shape(img)
else:
ix,iy=np.shape(img)
size = ix,iy,3
background = np.zeros(size, dtype=np.uint8)
ori_img=np.copy(img)
w_back=background+255
background1 = np.zeros(size, dtype=np.uint8)
background2 = np.zeros(size, dtype=np.uint8)
# Allows user to find all objects that are completely inside or overlapping with ROI
if roi_type=='partial':
for c,cnt in enumerate(object_contour):
length=(len(cnt)-1)
stack=np.vstack(cnt)
test=[]
keep=False
for i in range(0,length):
pptest=cv2.pointPolygonTest(roi_contour[0], (stack[i][0],stack[i][1]), False)
if int(pptest)!=-1:
keep=True
if keep==True:
if obj_hierarchy[0][c][3]>-1:
cv2.drawContours(w_back,object_contour,c, (255,255,255),-1, lineType=8,hierarchy=obj_hierarchy)
else:
cv2.drawContours(w_back,object_contour,c, (0,0,0),-1, lineType=8,hierarchy=obj_hierarchy)
else:
cv2.drawContours(w_back,object_contour,c, (255,255,255),-1, lineType=8,hierarchy=obj_hierarchy)
kept=cv2.cvtColor(w_back, cv2.COLOR_RGB2GRAY )
kept_obj= cv2.bitwise_not(kept)
mask=np.copy(kept_obj)
obj_area=cv2.countNonZero(kept_obj)
kept_cnt,hierarchy=cv2.findContours(kept_obj,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)
cv2.drawContours(ori_img,kept_cnt,-1, (0,255,0),-1, lineType=8,hierarchy=hierarchy)
cv2.drawContours(ori_img,roi_contour,-1, (255,0,0),5, lineType=8,hierarchy=roi_hierarchy)
# Allows uer to cut objects to the ROI (all objects completely outside ROI will not be kept)
elif roi_type=='cutto':
cv2.drawContours(background1,object_contour,-1, (255,255,255),-1, lineType=8,hierarchy=obj_hierarchy)
roi_points=np.vstack(roi_contour[0])
cv2.fillPoly(background2,[roi_points], (255,255,255))
obj_roi=cv2.multiply(background1,background2)
kept_obj=cv2.cvtColor(obj_roi, cv2.COLOR_RGB2GRAY)
mask=np.copy(kept_obj)
obj_area=cv2.countNonZero(kept_obj)
kept_cnt,hierarchy = cv2.findContours(kept_obj,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)
cv2.drawContours(w_back,kept_cnt,-1, (0,0,0),-1)
cv2.drawContours(ori_img,kept_cnt,-1, (0,255,0),-1, lineType=8,hierarchy=hierarchy)
cv2.drawContours(ori_img,roi_contour,-1, (255,0,0),5, lineType=8,hierarchy=roi_hierarchy)
else:
fatal_error('ROI Type' + str(roi_type) + ' is not "cutto" or "partial"!')
if debug:
print_image(w_back, (str(device) + '_roi_objects.png'))
print_image(ori_img, (str(device) + '_obj_on_img.png'))
print_image(mask, (str(device) + '_roi_mask.png'))
#print ('Object Area=', obj_area)
return device, kept_cnt, hierarchy, mask, obj_area | gpl-2.0 | -8,480,578,486,346,879,000 | 44.25974 | 108 | 0.683984 | false |
guition/Nocturn-RS4L | Live/src/__init__.py | 1 | 1246 | #
# Copyright (C) 2009 Guillermo Ruiz Troyano
#
# This file is part of Nocturn Remote Script for Live (Nocturn RS4L).
#
# Nocturn RS4L is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Nocturn RS4L is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Nocturn RS4L. If not, see <http://www.gnu.org/licenses/>.
#
# Contact info:
# Guillermo Ruiz Troyano, [email protected]
#
import Live
from Nocturn import Nocturn
#import MidiRemoteScript
#from apihelper import print_api
def create_instance(c_instance):
#print_api(Live, "Live", "/Users/Guillermo/Desarrollo/Control MIDI/LiveAPI/API/")
#print_api(c_instance, "c_instance", "/Users/Guillermo/Desktop/")
#print_api(MidiRemoteScript, "MidiRemoteScript", "/Users/Guillermo/Desktop/")
return Nocturn(c_instance)
| gpl-3.0 | -8,760,490,697,633,229,000 | 36.757576 | 82 | 0.74077 | false |
jeffFranklin/iam-resttools | resttools/dao_implementation/nws.py | 1 | 2539 | """
Contains NWS DAO implementations.
"""
from resttools.mock.mock_http import MockHTTP
import re
from resttools.dao_implementation.live import get_con_pool, get_live_url
from resttools.dao_implementation.mock import get_mockdata_url
import logging
logger = logging.getLogger(__name__)
class File(object):
"""
The File DAO implementation returns generally static content. Use this
DAO with this configuration:
"""
_max_pool_size = 5
def __init__(self, conf):
self._conf = conf
if 'MAX_POOL_SIZE' in conf:
self._max_pool_size = conf['MAX_POOL_SIZE']
def getURL(self, url, headers):
logger.debug('file nws get url: ' + url)
response = get_mockdata_url("nws", self._conf, url, headers)
if response.status == 404:
logger.debug('status 404')
response.data = '{"error": {"code": "7000","message": "No record matched"}}'
return response
def postURL(self, url, headers, body):
logger.debug('file nws post url: ' + url)
response = get_mockdata_url("nws", self._conf, url, headers)
if response.status == 404:
logger.debug('status 404')
response.data = '{"error": {"code": "7000","message": "No record matched"}}'
return response
class Live(object):
"""
This DAO provides real data. It requires further configuration, (conf)
"""
_max_pool_size = 5
def __init__(self, conf):
self._conf = conf
if 'MAX_POOL_SIZE' in conf:
self._max_pool_size = conf['MAX_POOL_SIZE']
pool = None
def getURL(self, url, headers):
if Live.pool is None:
Live.pool = self._get_pool()
return get_live_url(Live.pool, 'GET',
self._conf['HOST'],
url, headers=headers,
service_name='nws')
def postURL(self, url, headers, body):
if Live.pool is None:
Live.pool = self._get_pool()
return get_live_url(Live.pool, 'POST',
self._conf['HOST'],
url, headers=headers, body=body,
service_name='nws')
def _get_pool(self):
return get_con_pool(self._conf['HOST'],
self._conf['KEY_FILE'],
self._conf['CERT_FILE'],
self._conf['CA_FILE'],
max_pool_size=self._max_pool_size, verify_https=False)
| apache-2.0 | 8,144,495,644,167,200,000 | 29.963415 | 88 | 0.541552 | false |
dpshelio/sunpy | sunpy/net/dataretriever/tests/test_eve.py | 2 | 3757 | import pytest
from sunpy.time import parse_time
from sunpy.time.timerange import TimeRange
from sunpy.net.vso import VSOClient
from sunpy.net.vso.attrs import Time, Instrument, Source, Level
from sunpy.net.dataretriever.client import QueryResponse
import sunpy.net.dataretriever.sources.eve as eve
from sunpy.net.fido_factory import UnifiedResponse
from sunpy.net import Fido
from sunpy.net import attrs as a
LCClient = eve.EVEClient()
@pytest.mark.remote_data
@pytest.mark.parametrize("timerange,url_start,url_end", [
(TimeRange('2012/4/21', '2012/4/21'),
'http://lasp.colorado.edu/eve/data_access/evewebdata/quicklook/L0CS/SpWx/2012/20120421_EVE_L0CS_DIODES_1m.txt',
'http://lasp.colorado.edu/eve/data_access/evewebdata/quicklook/L0CS/SpWx/2012/20120421_EVE_L0CS_DIODES_1m.txt'
),
(TimeRange('2012/5/5', '2012/5/6'),
'http://lasp.colorado.edu/eve/data_access/evewebdata/quicklook/L0CS/SpWx/2012/20120505_EVE_L0CS_DIODES_1m.txt',
'http://lasp.colorado.edu/eve/data_access/evewebdata/quicklook/L0CS/SpWx/2012/20120506_EVE_L0CS_DIODES_1m.txt',
),
(TimeRange('2012/7/7', '2012/7/14'),
'http://lasp.colorado.edu/eve/data_access/evewebdata/quicklook/L0CS/SpWx/2012/20120707_EVE_L0CS_DIODES_1m.txt',
'http://lasp.colorado.edu/eve/data_access/evewebdata/quicklook/L0CS/SpWx/2012/20120714_EVE_L0CS_DIODES_1m.txt',
)
])
def test_get_url_for_time_range(timerange, url_start, url_end):
urls = LCClient._get_url_for_timerange(timerange)
assert isinstance(urls, list)
assert urls[0] == url_start
assert urls[-1] == url_end
def test_can_handle_query():
ans1 = eve.EVEClient._can_handle_query(
Time('2012/8/9', '2012/8/10'), Instrument('eve'), Level(0))
assert ans1 is True
ans2 = eve.EVEClient._can_handle_query(Time('2012/7/7', '2012/7/7'))
assert ans2 is False
ans3 = eve.EVEClient._can_handle_query(
Time('2012/8/9', '2012/8/10'), Instrument('eve'), Source('sdo'))
assert ans3 is False
@pytest.mark.remote_data
def test_query():
qr1 = LCClient.search(Time('2012/8/9', '2012/8/10'), Instrument('eve'))
assert isinstance(qr1, QueryResponse)
assert len(qr1) == 2
assert qr1.time_range().start == parse_time('2012/08/09')
assert qr1.time_range().end == parse_time('2012/08/11') # includes end.
@pytest.mark.remote_data
@pytest.mark.parametrize("time,instrument", [
(Time('2012/11/27', '2012/11/27'), Instrument('eve')),
])
def test_get(time, instrument):
qr1 = LCClient.search(time, instrument)
res = LCClient.fetch(qr1)
assert len(res) == len(qr1)
@pytest.mark.remote_data
@pytest.mark.parametrize(
'query',
[(a.Time('2012/10/4', '2012/10/6') & a.Instrument('eve') & a.Level(0))])
def test_fido(query):
qr = Fido.search(query)
client = qr.get_response(0).client
assert isinstance(qr, UnifiedResponse)
assert isinstance(client, eve.EVEClient)
response = Fido.fetch(qr)
assert len(response) == qr._numfile
@pytest.mark.remote_data
@pytest.mark.parametrize(
'time',
[(a.Time('2012/10/4', '2012/10/6')), (a.Time('2012/11/27', '2012/11/27'))])
def test_levels(time):
"""
Test the correct handling of level 0 / 1.
The default should be level 1 from VSO, level 0 comes from EVEClient.
"""
eve_a = a.Instrument('EVE')
qr = Fido.search(time, eve_a)
client = qr.get_response(0).client
assert isinstance(client, VSOClient)
qr = Fido.search(time, eve_a, a.Level(0))
client = qr.get_response(0).client
assert isinstance(client, eve.EVEClient)
qr = Fido.search(time, eve_a, a.Level(0) | a.Level(1))
clients = {type(a.client) for a in qr.responses}
assert clients.symmetric_difference({VSOClient, eve.EVEClient}) == set()
| bsd-2-clause | 1,231,243,682,288,274,200 | 35.833333 | 116 | 0.684056 | false |
anaruse/chainer | chainer/links/connection/deformable_convolution_2d.py | 1 | 5529 | from chainer.functions import deformable_convolution_2d_sampler
from chainer import initializers
from chainer.initializers import constant
from chainer import link
from chainer.links.connection.convolution_2d import Convolution2D
from chainer import variable
class DeformableConvolution2D(link.Chain):
"""Two-dimensional deformable convolutional layer.
This link wraps the
convolution layer for offset prediction and
the :func:`~chainer.functions.deformable_convolution_2d_sampler`
function.
This also holds the filter weights and bias vectors of two
convolution layers as parameters.
Args:
in_channels (int): Number of channels of input arrays. If ``None``,
parameter initialization will be deferred until the first forward
data pass at which time the size will be determined.
channel_multiplier (int): Channel multiplier number. Number of output
arrays equal ``in_channels * channel_multiplier``.
ksize (int or pair of ints): Size of filters (a.k.a. kernels).
``ksize=k`` and ``ksize=(k, k)`` are equivalent.
stride (int or pair of ints): Stride of filter applications.
``stride=s`` and ``stride=(s, s)`` are equivalent.
pad (int or pair of ints): Spatial padding width for input arrays.
``pad=p`` and ``pad=(p, p)`` are equivalent.
offset_nobias (bool): If ``True``, then this link does not use the
bias term for the first convolution layer.
offset_initialW (:ref:`initializer <initializer>`): Initializer to
initialize the weight of the first convolution layer.
When it is :class:`numpy.ndarray`, its ``ndim`` should be 4.
offset_initial_bias (:ref:`initializer <initializer>`): Initializer to
initialize the bias of the first convolution layer.
If ``None``, the bias will be initialized to
zero. When it is :class:`numpy.ndarray`, its ``ndim`` should be 1.
deform_nobias (bool): If ``True``, then this link does not use the
bias term for the second convolution layer.
deform_initialW (:ref:`initializer <initializer>`): Initializer to
initialize the weight for the second convolution layer.
When it is :class:`numpy.ndarray`,
its ``ndim`` should be 4.
deform_initial_bias (:ref:`initializer <initializer>`): Initializer to
initialize the bias for the second convolution layer.
If ``None``, the bias will be initialized to
zero. When it is :class:`numpy.ndarray`, its ``ndim`` should be 1.
.. seealso::
See :func:`chainer.functions.deformable_convolution_2d_sampler`.
"""
def __init__(self, in_channels, out_channels, ksize, stride=1, pad=0,
offset_nobias=False, offset_initialW=None,
offset_initial_bias=None,
deform_nobias=False,
deform_initialW=None, deform_initial_bias=None):
super(DeformableConvolution2D, self).__init__()
kh, kw = _pair(ksize)
with self.init_scope():
self.offset_conv = Convolution2D(
in_channels, 2 * kh * kw, ksize, stride, pad,
offset_nobias, offset_initialW, offset_initial_bias)
self.deform_conv = DeformableConvolution2DSampler(
in_channels, out_channels, ksize, stride, pad,
deform_nobias, deform_initialW, deform_initial_bias)
def __call__(self, x):
"""Applies the deformable convolution.
Args:
x (~chainer.Variable): Input image.
Returns:
~chainer.Variable: Output of the deformable convolution.
"""
offset = self.offset_conv(x)
return self.deform_conv(x, offset)
class DeformableConvolution2DSampler(link.Link):
"""Apply a two-dimensional deformable convolution layer using offsets"""
def __init__(self, in_channels, out_channels, ksize, stride=1, pad=0,
nobias=False, initialW=None, initial_bias=None):
super(DeformableConvolution2DSampler, self).__init__()
self.ksize = ksize
self.stride = _pair(stride)
self.pad = _pair(pad)
self.out_channels = out_channels
self.initialW = initialW
if initialW is None:
initialW = constant.Zero()
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = initializers.Constant(0)
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer)
if in_channels is not None:
self._initialize_params(in_channels)
def _initialize_params(self, in_channels):
kh, kw = _pair(self.ksize)
W_shape = (self.out_channels, in_channels, kh, kw)
self.W.initialize(W_shape)
if self.b is not None:
self.b.initialize(self.out_channels)
def __call__(self, x, offset):
if self.W.data is None:
self._initialize_params(x.shape[1])
return deformable_convolution_2d_sampler(
x, offset, self.W, self.b, self.stride, self.pad)
def _pair(x):
if hasattr(x, '__getitem__'):
return x
return x, x
| mit | -8,680,863,434,744,701,000 | 40.261194 | 78 | 0.621993 | false |
agermanidis/Pattern | graph/__init__.py | 1 | 46675 | #### PATTERN | GRAPH #################################################################################
# Copyright (c) 2010 University of Antwerp, Belgium
# Author: Tom De Smedt <[email protected]>
# License: BSD (see LICENSE.txt for details).
# http://www.clips.ua.ac.be/pages/pattern
######################################################################################################
# This module can benefit greatly from loading psyco.
from math import sqrt, pow
from math import sin, cos, atan2, degrees, radians, pi
from random import random
from heapq import heappush, heappop
from warnings import warn
from codecs import open
# float("inf") doesn't work on windows.
INFINITE = 1e20
# This module is standalone, line(), ellipse() and Text.draw()
# must be either implemented or patched:
def line(x1, y1, x2, y2, stroke=(0,0,0,1), strokewidth=1):
pass
def ellipse(x, y, width, height, fill=(0,0,0,1), stroke=None, strokewidth=1):
pass
class Text:
def __init__(self, string, **kwargs):
self.string = string
self.__dict__.update(kwargs)
def copy(self):
k = self.__dict__.copy()
k.pop("string")
return Text(self.string, **k)
def draw(self):
pass
class Vector(object):
def __init__(self, x=0, y=0):
self.x = x
self.y = y
class Base(object):
pass
#--- NODE --------------------------------------------------------------------------------------------
def _copy(x):
# A color can be represented as a tuple or as a nodebox.graphics.Color object,
# in which case it needs to be copied by invoking Color.copy().
return hasattr(x, "copy") and x.copy() or x
class Node(object):
def __init__(self, id="", radius=5, **kwargs):
""" A node with a unique id in the graph.
Node.id is drawn as a text label, unless optional parameter text=False.
Optional parameters include: fill, stroke, strokewidth, text, font, fontsize, fontweight.
"""
self.graph = None
self.links = Links()
self.id = id
self._x = 0 # Calculated by Graph.layout.update().
self._y = 0 # Calculated by Graph.layout.update().
self.force = Vector(0,0)
self.radius = radius
self.fill = kwargs.get("fill", None)
self.stroke = kwargs.get("stroke", (0,0,0,1))
self.strokewidth = kwargs.get("strokewidth", 1)
self.text = kwargs.get("text", True) and \
Text(unicode(id),
width = 85,
fill = kwargs.pop("text", (0,0,0,1)),
fontsize = kwargs.pop("fontsize", 11), **kwargs) or None
self._weight = None # Calculated by Graph.eigenvector_centrality().
self._centrality = None # Calculated by Graph.betweenness_centrality().
@property
def _distance(self):
# Graph.distance controls the (x,y) spacing between nodes.
return self.graph and float(self.graph.distance) or 1.0
def _get_x(self):
return self._x * self._distance
def _get_y(self):
return self._y * self._distance
def _set_x(self, v):
self._x = v / self._distance
def _set_y(self, v):
self._y = v / self._distance
x = property(_get_x, _set_x)
y = property(_get_y, _set_y)
@property
def edges(self):
return self.graph is not None \
and [e for e in self.graph.edges if self.id in (e.node1, e.node2)] \
or []
@property
def weight(self):
if self.graph and self._weight is None:
self.graph.eigenvector_centrality()
return self._weight
@property
def centrality(self):
if self.graph and self._centrality is None:
self.graph.betweenness_centrality()
return self._centrality
def flatten(self, depth=1, _visited=None):
""" Recursively lists the node and nodes linked to it.
Depth 0 returns a list with the node.
Depth 1 returns a list with the node and all the directly linked nodes.
Depth 2 includes the linked nodes' links, and so on.
"""
_visited = _visited or {}
_visited[self.id] = (self, depth)
if depth >= 1:
for n in self.links:
if n.id not in _visited or _visited[n.id][1] < depth-1:
n.flatten(depth-1, _visited)
return [n for n,d in _visited.values()] # Fast, but not order-preserving.
def draw(self, weighted=False):
""" Draws the node as a circle with the given radius, fill, stroke and strokewidth.
Draws the node centrality as a shadow effect when weighted=True.
Draws the node text label.
Override this method in a subclass for custom drawing.
"""
# Draw the node weight as a shadow (based on node betweenness centrality).
if weighted is not False and self.centrality > (weighted==True and -1 or weighted):
w = self.centrality * 35
ellipse(
self.x,
self.y,
self.radius*2 + w,
self.radius*2 + w, fill=(0,0,0,0.2), stroke=None)
# Draw the node.
ellipse(
self.x,
self.y,
self.radius*2,
self.radius*2, fill=self.fill, stroke=self.stroke, strokewidth=self.strokewidth)
# Draw the node text label.
if self.text:
self.text.draw(
self.x + self.radius,
self.y + self.radius)
def contains(self, x, y):
return abs(self.x - x) < self.radius*2 and \
abs(self.y - y) < self.radius*2
def __repr__(self):
return "%s(id=%s)" % (self.__class__.__name__, repr(self.id))
def __eq__(self, node):
return isinstance(node, Node) and self.id == node.id
def __ne__(self, node):
return not self.__eq__(node)
def copy(self):
""" Returns a shallow copy of the node (i.e. linked nodes are not copied).
"""
n = Node(self.id, self.radius,
text = None,
fill = _copy(self.fill),
stroke = _copy(self.stroke),
strokewidth = self.strokewidth)
if self.text:
n.text = self.text.copy()
n.__class__ = self.__class__
return n
class Links(list):
def __init__(self):
""" A list in which each node has an associated edge.
The edge() method returns the edge for a given node id.
"""
self.edges = dict()
def append(self, node, edge=None):
if node.id not in self.edges:
list.append(self, node)
self.edges[node.id] = edge
def remove(self, node):
list.remove(self, node)
self.edges.pop(node.id, None)
def edge(self, node):
return self.edges.get(isinstance(node, Node) and node.id or node)
#--- EDGE --------------------------------------------------------------------------------------------
coordinates = lambda x, y, d, a: (x + d*cos(radians(a)), y + d*sin(radians(a)))
class Edge(object):
def __init__(self, node1, node2, weight=0.0, length=1.0, type=None, stroke=(0,0,0,1), strokewidth=1):
""" A connection between two nodes.
Its weight indicates the importance (not the cost) of the connection.
Its type is useful in a semantic network (e.g. "is-a", "is-part-of", ...)
"""
self.node1 = node1
self.node2 = node2
self.weight = weight
self.length = length
self.type = type
self.stroke = stroke
self.strokewidth = strokewidth
def draw(self, weighted=False, directed=False):
""" Draws the edge as a line with the given stroke and strokewidth (increased with Edge.weight).
Override this method in a subclass for custom drawing.
"""
w = weighted and self.weight or 0
line(
self.node1.x,
self.node1.y,
self.node2.x,
self.node2.y, stroke=self.stroke, strokewidth=self.strokewidth+w)
if directed:
self.draw_arrow(stroke=self.stroke, strokewidth=self.strokewidth+w)
def draw_arrow(self, **kwargs):
""" Draws the direction of the edge as an arrow on the rim of the receiving node.
"""
x0, y0 = self.node1.x, self.node1.y
x1, y1 = self.node2.x, self.node2.y
# Find the edge's angle based on node1 and node2 position.
a = degrees(atan2(y1-y0, x1-x0))
# The arrow points to node2's rim instead of it's center.
r = self.node2.radius
d = sqrt(pow(x1-x0, 2) + pow(y1-y0, 2))
x01, y01 = coordinates(x0, y0, d-r-1, a)
# Find the two other arrow corners under the given angle.
r = max(kwargs.get("strokewidth", 1) * 3, 6)
dx1, dy1 = coordinates(x01, y01, -r, a-20)
dx2, dy2 = coordinates(x01, y01, -r, a+20)
line(x01, y01, dx1, dy1, **kwargs)
line(x01, y01, dx2, dy2, **kwargs)
line(dx1, dy1, dx2, dy2, **kwargs)
def __repr__(self):
return "%s(id1=%s, id2=%s)" % (self.__class__.__name__, repr(self.node1.id), repr(self.node2.id))
def copy(self, node1, node2):
e = Edge(node1, node2, self.weight, self.length, self.type, _copy(self.stroke), self.strokewidth)
e.__class__ = self.__class__
return e
#--- GRAPH -------------------------------------------------------------------------------------------
def unique(list):
u, b = [], {}
for item in list:
if item not in b: u.append(item); b[item]=1
return u
# Graph layouts:
SPRING = "spring"
# Graph node sort order:
WEIGHT, CENTRALITY = "weight", "centrality"
ALL = "all"
class Graph(dict):
def __init__(self, layout=SPRING, distance=10.0):
""" A network of nodes connected by edges that can be drawn with a given layout.
"""
self.nodes = []
self.edges = []
self.root = None
self.distance = distance
self.layout = layout==SPRING and GraphSpringLayout(self) or GraphLayout(self)
def append(self, type, *args, **kwargs):
""" Appends a Node or Edge to the graph: Graph.append(Node, id="rabbit").
"""
if type is Node:
return self.add_node(*args, **kwargs)
if type is Edge:
return self.add_edge(*args, **kwargs)
def add_node(self, id, *args, **kwargs):
""" Appends a new Node to the graph.
"""
n = isinstance(id, Node) and id or self.get(id) or Node(id, *args, **kwargs)
if n.id not in self:
self.nodes.append(n)
self[n.id] = n; n.graph = self
self.root = kwargs.get("root", False) and n or self.root
return n
def add_edge(self, id1, id2, *args, **kwargs):
""" Appends a new Edge to the graph.
"""
# Create nodes that are not yet part of the graph.
n1 = self.add_node(id1)
n2 = self.add_node(id2)
# Creates an Edge instance.
# If an edge (in the same direction) already exists, yields that edge instead.
e1 = n1.links.edge(n2)
if e1 and e1.node1 == n1 and e1.node2 == n2:
return e1
e2 = Edge(n1, n2, *args, **kwargs)
self.edges.append(e2)
# Synchronizes Node.links:
# A.links.edge(B) yields edge A->B
# B.links.edge(A) yields edge B->A
n1.links.append(n2, edge=e2)
n2.links.append(n1, edge=e1 or e2)
return e2
def remove(self, x):
""" Removes the given Node (and all its edges) or Edge from the graph.
Note: removing Edge a->b does not remove Edge b->a.
"""
if isinstance(x, Node) and x.id in self:
self.pop(x.id)
self.nodes.remove(x); x.graph = None
# Remove all edges involving the given node.
for e in list(self.edges):
if x in (e.node1, e.node2):
if x in e.node1.links: e.node1.links.remove(x)
if x in e.node2.links: e.node2.links.remove(x)
self.edges.remove(e)
if isinstance(x, Edge):
self.edges.remove(x)
def node(self, id):
""" Returns the node in the graph with the given id.
"""
return self.get(id)
def edge(self, id1, id2):
""" Returns the edge between the nodes with given id1 and id2.
"""
return id1 in self and id2 in self and self[id1].links.edge(id2) or None
def shortest_path(self, node1, node2, heuristic=None, directed=False):
""" Returns a list of nodes connecting the two nodes.
"""
try:
p = dijkstra_shortest_path(self, node1.id, node2.id, heuristic, directed)
p = [self[id] for id in p]
return p
except IndexError:
return None
def eigenvector_centrality(self, normalized=True, reversed=True, rating={}, iterations=100, tolerance=0.0001):
""" Calculates eigenvector centrality and returns a node => weight dictionary.
Node.weight is updated in the process.
Node.weight is higher for nodes with a lot of (indirect) incoming traffic.
"""
ec = eigenvector_centrality(self, normalized, reversed, rating, iterations, tolerance)
ec = dict([(self[id], w) for id, w in ec.items()])
for n, w in ec.items():
n._weight = w
return ec
def betweenness_centrality(self, normalized=True, directed=False):
""" Calculates betweenness centrality and returns a node => weight dictionary.
Node.centrality is updated in the process.
Node.centrality is higher for nodes with a lot of passing traffic.
"""
bc = brandes_betweenness_centrality(self, normalized, directed)
bc = dict([(self[id], w) for id, w in bc.items()])
for n, w in bc.items():
n._centrality = w
return bc
def sorted(self, order=WEIGHT, threshold=0.0):
""" Returns a list of nodes sorted by WEIGHT or CENTRALITY.
Nodes with a lot of traffic will be at the start of the list.
"""
o = lambda node: getattr(node, order)
nodes = [(o(n), n) for n in self.nodes if o(n) > threshold]
nodes = reversed(sorted(nodes))
return [n for w, n in nodes]
def prune(self, depth=0):
""" Removes all nodes with less or equal links than depth.
"""
for n in [n for n in self.nodes if len(n.links) <= depth]:
self.remove(n)
def fringe(self, depth=0):
""" For depth=0, returns the list of leaf nodes (nodes with only one connection).
For depth=1, returns the list of leaf nodes and their connected nodes, and so on.
"""
u = []; [u.extend(n.flatten(depth)) for n in self.nodes if len(n.links) == 1]
return unique(u)
@property
def density(self):
# Number of edges vs. maximum number of possible edges.
# E.g. <0.35 => sparse, >0.65 => dense, 1.0 => complete.
return 2.0*len(self.edges) / (len(self.nodes) * (len(self.nodes)-1))
def split(self):
return partition(self)
def update(self, iterations=10, **kwargs):
""" Graph.layout.update() is called the given number of iterations.
"""
for i in range(iterations):
self.layout.update(**kwargs)
def draw(self, weighted=False, directed=False):
""" Draws all nodes and edges.
"""
for e in self.edges:
e.draw(weighted, directed)
for n in reversed(self.nodes): # New nodes (with Node._weight=None) first.
n.draw(weighted)
def node_at(self, x, y):
""" Returns the node at (x,y) or None.
"""
for n in self.nodes:
if n.contains(x, y): return n
def copy(self, nodes=ALL):
""" Returns a copy of the graph with the given list of nodes (and connecting edges).
The layout will be reset.
"""
g = Graph(layout=None, distance=self.distance)
g.layout = self.layout.copy(graph=g)
for n in (nodes==ALL and self.nodes or nodes):
g.append(n.copy(), root=self.root==n)
for e in self.edges:
if e.node1.id in g and e.node2.id in g:
g.append(e.copy(
node1=g[e.node1.id],
node2=g[e.node2.id]))
return g
#--- GRAPH LAYOUT ------------------------------------------------------------------------------------
# Graph drawing or graph layout, as a branch of graph theory,
# applies topology and geometry to derive two-dimensional representations of graphs.
class GraphLayout:
def __init__(self, graph):
""" Calculates node positions iteratively when GraphLayout.update() is called.
"""
self.graph = graph
self.iterations = 0
def update(self):
self.iterations += 1
def reset(self):
self.iterations = 0
for n in self.graph.nodes:
n._x = 0
n._y = 0
n.force = Vector(0,0)
@property
def bounds(self):
""" Returns a (x, y, width, height)-tuple of the approximate layout dimensions.
"""
x0, y0 = +INFINITE, +INFINITE
x1, y1 = -INFINITE, -INFINITE
for n in self.graph.nodes:
if (n.x < x0): x0 = n.x
if (n.y < y0): y0 = n.y
if (n.x > x1): x1 = n.x
if (n.y > y1): y1 = n.y
return (x0, y0, x1-x0, y1-y0)
def copy(self, graph):
return GraphLayout(self, graph)
class GraphSpringLayout(GraphLayout):
def __init__(self, graph):
""" A force-based layout in which edges are regarded as springs.
The forces are applied to the nodes, pulling them closer or pushing them apart.
"""
# Based on: http://snipplr.com/view/1950/graph-javascript-framework-version-001/
GraphLayout.__init__(self, graph)
self.k = 4.0 # Force constant.
self.force = 0.01 # Force multiplier.
self.repulsion = 15 # Maximum repulsive force radius.
def _distance(self, node1, node2):
# Yields a tuple with distances (dx, dy, d, d**2).
# Ensures that the distance is never zero (which deadlocks the animation).
dx = node2._x - node1._x
dy = node2._y - node1._y
d2 = dx*dx + dy*dy
if d2 < 0.01:
dx = random() * 0.1 + 0.1
dy = random() * 0.1 + 0.1
d2 = dx*dx + dy*dy
return dx, dy, sqrt(d2), d2
def _repulse(self, node1, node2):
# Updates Node.force with the repulsive force.
dx, dy, d, d2 = self._distance(node1, node2)
if d < self.repulsion:
f = self.k**2 / d2
node2.force.x += f * dx
node2.force.y += f * dy
node1.force.x -= f * dx
node1.force.y -= f * dy
def _attract(self, node1, node2, weight=0, length=1.0):
# Updates Node.force with the attractive edge force.
dx, dy, d, d2 = self._distance(node1, node2)
d = min(d, self.repulsion)
f = (d2 - self.k**2) / self.k * length
f *= weight * 0.5 + 1
f /= d
node2.force.x -= f * dx
node2.force.y -= f * dy
node1.force.x += f * dx
node1.force.y += f * dy
def update(self, weight=10.0, limit=0.5):
""" Updates the position of nodes in the graph.
The weight parameter determines the impact of edge weight.
The limit parameter determines the maximum movement each update().
"""
GraphLayout.update(self)
# Forces on all nodes due to node-node repulsions.
for i, n1 in enumerate(self.graph.nodes):
for j, n2 in enumerate(self.graph.nodes[i+1:]):
self._repulse(n1, n2)
# Forces on nodes due to edge attractions.
for e in self.graph.edges:
self._attract(e.node1, e.node2, weight*e.weight, 1.0/(e.length or 0.01))
# Move nodes by given force.
for n in self.graph.nodes:
n._x += max(-limit, min(self.force * n.force.x, limit))
n._y += max(-limit, min(self.force * n.force.y, limit))
n.force.x = 0
n.force.y = 0
def copy(self, graph):
g = GraphSpringLayout(graph)
g.k, g.force, g.repulsion = self.k, self.force, self.repulsion
return g
#--- GRAPH THEORY ------------------------------------------------------------------------------------
def depth_first_search(node, visit=lambda node: False, traversable=lambda node, edge: True, _visited=None):
""" Visits all the nodes connected to the given root node, depth-first.
The visit function is called on each node.
Recursion will stop if it returns True, and subsequently dfs() will return True.
The traversable function takes the current node and edge,
and returns True if we are allowed to follow this connection to the next node.
For example, the traversable for directed edges is follows:
lambda node, edge: node == edge.node1
"""
stop = visit(node)
_visited = _visited or {}
_visited[node.id] = True
for n in node.links:
if stop: return True
if not traversable(node, node.links.edge(n)): continue
if not n.id in _visited:
stop = depth_first_search(n, visit, traversable, _visited)
return stop
dfs = depth_first_search;
def breadth_first_search(node, visit=lambda node: False, traversable=lambda node, edge: True):
""" Visits all the nodes connected to the given root node, breadth-first.
"""
q = [node]
_visited = {}
while q:
node = q.pop(0)
if not node.id in _visited:
if visit(node):
return True
q.extend((n for n in node.links if traversable(node, node.links.edge(n))))
_visited[node.id] = True
return False
bfs = breadth_first_search;
def adjacency(graph, directed=False, reversed=False, stochastic=False, heuristic=None):
""" Returns a dictionary indexed by node id1's,
in which each value is a dictionary of connected node id2's linking to the edge weight.
If directed=True, edges go from id1 to id2, but not the other way.
If stochastic=True, all the weights for the neighbors of a given node sum to 1.
A heuristic function can be given that takes two node id's and returns
an additional cost for movement between the two nodes.
"""
map = {}
for n in graph.nodes:
map[n.id] = {}
for e in graph.edges:
id1, id2 = not reversed and (e.node1.id, e.node2.id) or (e.node2.id, e.node1.id)
map[id1][id2] = 1.0 - 0.5 * e.weight
if heuristic:
map[id1][id2] += heuristic(id1, id2)
if not directed:
map[id2][id1] = map[id1][id2]
if stochastic:
for id1 in map:
n = sum(map[id1].values())
for id2 in map[id1]:
map[id1][id2] /= n
return map
def dijkstra_shortest_path(graph, id1, id2, heuristic=None, directed=False):
""" Dijkstra algorithm for finding shortest paths.
Raises an IndexError between nodes on unconnected graphs.
"""
# Based on: Connelly Barnes, http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/119466
def flatten(list):
# Flattens a linked list of the form [0,[1,[2,[]]]]
while len(list) > 0:
yield list[0]; list=list[1]
G = adjacency(graph, directed=directed, heuristic=heuristic)
q = [(0, id1, ())] # Heap of (cost, path_head, path_rest).
visited = set() # Visited nodes.
while True:
(cost1, n1, path) = heappop(q)
if n1 not in visited:
visited.add(n1)
if n1 == id2:
return list(flatten(path))[::-1] + [n1]
path = (n1, path)
for (n2, cost2) in G[n1].iteritems():
if n2 not in visited:
heappush(q, (cost1 + cost2, n2, path))
def brandes_betweenness_centrality(graph, normalized=True, directed=False):
""" Betweenness centrality for nodes in the graph.
Betweenness centrality is a measure of the number of shortests paths that pass through a node.
Nodes in high-density areas will get a good score.
"""
# Ulrik Brandes, A Faster Algorithm for Betweenness Centrality,
# Journal of Mathematical Sociology 25(2):163-177, 2001,
# http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
# Based on: Dijkstra's algorithm for shortest paths modified from Eppstein.
# Based on: NetworkX 1.0.1: Aric Hagberg, Dan Schult and Pieter Swart.
# http://python-networkx.sourcearchive.com/documentation/1.0.1/centrality_8py-source.html
G = graph.keys()
W = adjacency(graph, directed=directed)
betweenness = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
for s in G:
S = []
P = {}
for v in G: P[v] = []
sigma = dict.fromkeys(G, 0) # sigma[v]=0 for v in G
D = {}
sigma[s] = 1
seen = {s: 0}
Q = [] # use Q as heap with (distance, node id) tuples
heappush(Q, (0, s, s))
while Q:
(dist, pred, v) = heappop(Q)
if v in D: continue # already searched this node
sigma[v] = sigma[v] + sigma[pred] # count paths
S.append(v)
D[v] = dist
for w in W[v].keys():
vw_dist = D[v] + W[v][w]
if w not in D and (w not in seen or vw_dist < seen[w]):
seen[w] = vw_dist
heappush(Q, (vw_dist, v, w))
sigma[w] = 0
P[w] = [v]
elif vw_dist == seen[w]: # handle equal paths
sigma[w] = sigma[w] + sigma[v]
P[w].append(v)
delta = dict.fromkeys(G,0)
while S:
w = S.pop()
for v in P[w]:
delta[v] = delta[v] + (float(sigma[v]) / float(sigma[w])) * (1.0 + delta[w])
if w != s:
betweenness[w] = betweenness[w] + delta[w]
if normalized:
# Normalize between 0.0 and 1.0.
m = max(betweenness.values())
if m == 0: m = 1
else:
m = 1
betweenness = dict([(id, w/m) for id, w in betweenness.iteritems()])
return betweenness
def eigenvector_centrality(graph, normalized=True, reversed=True, rating={}, iterations=100, tolerance=0.0001):
""" Eigenvector centrality for nodes in the graph (cfr. Google's PageRank).
Eigenvector centrality is a measure of the importance of a node in a directed network.
It rewards nodes with a high potential of (indirectly) connecting to high-scoring nodes.
Nodes with no incoming connections have a score of zero.
If you want to measure outgoing connections, reversed should be False.
"""
# Based on: NetworkX, Aric Hagberg ([email protected])
# http://python-networkx.sourcearchive.com/documentation/1.0.1/centrality_8py-source.html
def normalize(vector):
w = 1.0 / (sum(vector.values()) or 1)
for node in vector:
vector[node] *= w
return vector
G = adjacency(graph, directed=True, reversed=reversed)
v = normalize(dict([(n, random()) for n in graph])) # Node ID => weight vector.
# Eigenvector calculation using the power iteration method: y = Ax.
# It has no guarantee of convergence.
for i in range(iterations):
v0 = v
v = dict.fromkeys(v0.keys(), 0)
for n1 in v:
for n2 in G[n1]:
v[n1] += 0.01 + v0[n2] * G[n1][n2] * rating.get(n1, 1)
normalize(v)
e = sum([abs(v[n]-v0[n]) for n in v]) # Check for convergence.
if e < len(G) * tolerance:
if normalized:
# Normalize between 0.0 and 1.0.
m = max(v.values()) or 1
v = dict([(id, w/m) for id, w in v.items()])
return v
warn("node weight is 0 because eigenvector_centrality() did not converge.", Warning)
return dict([(n, 0) for n in G])
# a | b => all elements from a and all the elements from b.
# a & b => elements that appear in a as well as in b.
# a - b => elements that appear in a but not in b.
def union(a, b):
return [x for x in a] + [x for x in b if x not in a]
def intersection(a, b):
return [x for x in a if x in b]
def difference(a, b):
return [x for x in a if x not in b]
def partition(graph):
""" Returns a list of unconnected subgraphs.
"""
# Creates clusters of nodes and directly connected nodes.
# Iteratively merges two clusters if they overlap.
# Optimized: about 2x faster than original implementation.
g = []
for n in graph.nodes:
g.append(dict.fromkeys([n.id for n in n.flatten()], True))
for i in reversed(range(len(g))):
for j in reversed(range(i+1, len(g))):
if g[i] and g[j] and len(intersection(g[i], g[j])) > 0:
g[i] = union(g[i], g[j])
g[j] = []
g = [graph.copy(nodes=[graph[id] for id in n]) for n in g if n]
g.sort(lambda a, b: len(b) - len(a))
return g
#--- GRAPH MAINTENANCE -------------------------------------------------------------------------------
# Utility commands for safe linking and unlinking of nodes,
# with respect for the surrounding nodes.
def unlink(graph, node1, node2=None):
""" Removes the edges between node1 and node2.
If only node1 is given, removes all edges to and from it.
This does not remove node1 from the graph.
"""
for e in list(graph.edges):
if node1 in (e.node1, e.node2) and node2 in (e.node1, e.node2, None):
graph.edges.remove(e)
try:
node1.links.remove(node2)
node2.links.remove(node1)
except: # 'NoneType' object has no attribute 'links'
pass
def redirect(graph, node1, node2):
""" Connects all of node1's edges to node2 and unlinks node1.
"""
for e in graph.edges:
if node in (e.node1, e.node2):
if e.node1 == node1 and e.node2 != node2:
graph.append(e.copy(node2, e.node2))
if e.node2 == node1 and e.node1 != node2:
graph.append(e.copy(e.node1, node2))
unlink(graph, node1)
def cut(graph, node):
""" Unlinks the given node, but keeps edges intact by connecting the surrounding nodes.
If A, B, C, D are nodes and A->B, B->C, B->D, if we then cut B: A->C, A->D.
"""
for e in graph.edges:
if node in (e.node1, e.node2):
for n in node.links:
if e.node1 == node and e.node2 != n:
graph.append(e.copy(n, e.node2))
if e.node2 == node and e.node1 != n:
graph.append(e.copy(e.node1, n))
unlink(graph, node)
def insert(graph, node, a, b):
""" Inserts the given node between node a and node b.
If A, B, C are nodes and A->B, if we then insert C: A->C, C->B.
"""
for e in graph.edges:
for (n1,n2) in ((a,b), (b,a)):
if e.node1 == n1 and e.node2 == n2:
graph.append(e.copy(node, n2))
if e.node1 == n2 and e.node2 == n1:
graph.append(e.copy(n2, node))
unlink(graph, a, b)
#--- HTML CANVAS RENDERER ----------------------------------------------------------------------------
import os, shutil, glob
try:
MODULE = os.path.dirname(__file__)
except:
MODULE = ""
DEFAULT, INLINE = "default", "inline"
HTML, CANVAS, STYLE, SCRIPT, DATA = "html", "canvas", "style", "script", "data"
class HTMLCanvasRenderer:
def __init__(self, graph, **kwargs):
self.graph = graph
self._source = \
"<!DOCTYPE html>\n" \
"<html>\n" \
"<head>\n" \
"\t<title>%s</title>\n" \
"\t<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\" />\n" \
"\t%s\n" \
"\t<!--[if lte IE 8]><script type=\"text/javascript\" src=\"%sexcanvas.js\"></script><![endif]-->\n" \
"\t<script type=\"text/javascript\" src=\"%sgraph.js\"></script>\n" \
"\t%s\n" \
"</head>\n" \
"<body onload=\"javascript:init_%s();\">\n" \
"\t<div id=\"%s\" style=\"width:%spx; height:%spx;\">\n" \
"\t\t<canvas id=\"%s\" width=\"%s\" height=\"%s\">\n" \
"\t\t</canvas>\n" \
"\t</div>\n" \
"\t<p>Generated with " \
"<a href=\"http://www.clips.ua.ac.be/pages/pattern\">Pattern</a>.</p>\n" \
"</body>\n" \
"</html>"
# HTML
self.title = "Graph" # <title>Graph</title>
self.javascript = "js/" # Path to excanvas.js + graph.js.
self.stylesheet = INLINE # Either None, INLINE, DEFAULT (screen.css) or a custom path.
self.id = "graph" # <div id="graph">
self.ctx = "_ctx" # <canvas id="_ctx" width=700 height=500>
self.width = 700 # Canvas width in pixels.
self.height = 500 # Canvas height in pixels.
# Javascript:Graph
self.frames = 500 # Number of frames of animation.
self.fps = 20 # Frames per second.
self.ipf = 2 # Iterations per frame.
self.weighted = False # Indicate betweenness centrality as a shadow?
self.directed = False # Indicate edge direction with an arrow?
self.prune = None # None or int, calls Graph.prune() in Javascript.
self.pack = True # Shortens leaf edges, adds eigenvector weight to node radius.
# Javascript:GraphLayout
self.distance = 10 # Node spacing.
self.k = 4.0 # Force constant.
self.force = 0.01 # Force dampener.
self.repulsion = 50 # Repulsive force radius.
# Data
self.weight = [WEIGHT, CENTRALITY] # Calculate these in Python, or True (in Javascript).
self.href = {} # Dictionary of Node.id => URL.
self.css = {} # Dictionary of Node.id => CSS classname.
# Default options.
# If a Node or Edge has one of these settings,
# it is not passed to Javascript to save bandwidth.
self.default = {
"radius": 5,
"fill": None,
"stroke": (0,0,0,1),
"strokewidth": 1,
"text": (0,0,0,1),
"fontsize": 11,
}
def _escape(self, s):
return s.replace("\"", "\\\"")
def _rgba(self, clr):
# Color or tuple to a CSS "rgba(255,255,255,1.0)" string.
return "\"rgba(%s,%s,%s,%.2f)\"" % (int(clr[0]*255), int(clr[1]*255), int(clr[2]*255), clr[3])
@property
def data(self):
""" Yields a string of Javascript code that loads the nodes and edges into variable g,
which is a Javascript Graph object (see graph.js).
This can be the response to a XMLHttpRequest, after wich you move g into your own variable.
"""
return "".join(self._data())
def _data(self):
if self.graph.nodes and isinstance(self.weight, (list, tuple)):
if WEIGHT in self.weight and self.graph.nodes[-1]._weight is None:
self.graph.eigenvector_centrality()
if CENTRALITY in self.weight and self.graph.nodes[-1]._centrality is None:
self.graph.betweenness_centrality()
s = []
s.append("var g = new Graph(document.getElementById(\"%s\"), %s);\n" % (self.ctx, self.distance))
s.append("var n = {")
if len(self.graph.nodes) > 0:
s.append("\n")
# Translate node properties to Javascript dictionary (var n).
for n in self.graph.nodes:
p = []
if n._x != 0:
p.append("x:%i" % n._x) # 0
if n._y != 0:
p.append("y:%i" % n._y) # 0
if n.radius != self.default["radius"]:
p.append("radius:%.1f" % n.radius) # 5.0
if n._weight is not None:
p.append("weight:%.2f" % n.weight) # 0.00
if n._centrality is not None:
p.append("centrality:%.2f" % n.centrality) # 0.00
if n.fill != self.default["fill"]:
p.append("fill:%s" % self._rgba(n.fill)) # [0,0,0,1.0]
if n.stroke != self.default["stroke"]:
p.append("stroke:%s" % self._rgba(n.stroke)) # [0,0,0,1.0]
if n.strokewidth != self.default["strokewidth"]:
p.append("strokewidth:%.1f" % n.strokewidth) # 0.5
if n.text and n.text.fill != self.default["text"]:
p.append("text:%s" % self._rgba(n.text.fill)) # [0,0,0,1.0]
if n.text and "font" in n.text.__dict__:
p.append("font:\"%s\"" % n.text.__dict__["font"]) # "sans-serif"
if n.text and n.text.__dict__.get("fontsize", self.default["fontsize"]) != self.default["fontsize"]:
p.append("fontsize:%i" % int(max(1, n.text.fontsize)))
if n.text and "fontweight" in n.text.__dict__: # "bold"
p.append("fontweight:\"%s\"" % n.text.__dict__["fontweight"])
if n.text and n.text.string != n.id:
p.append("label:\"%s\"" % n.text.string)
if n.id in self.href:
p.append("href:\"%s\"" % self.href[n.id])
if n.id in self.css:
p.append("css:\"%s\"" % self.css[n.id])
s.append("\t\"%s\": {%s},\n" % (self._escape(n.id), ", ".join(p)))
s.append("};\n")
s.append("var e = [")
if len(self.graph.edges) > 0:
s.append("\n")
# Translate edge properties to Javascript dictionary (var e).
for e in self.graph.edges:
id1, id2 = self._escape(e.node1.id), self._escape(e.node2.id)
p = []
if e.weight != 0:
p.append("weight:%.2f" % e.weight) # 0.00
if e.length != 1:
p.append("length:%.2f" % e.length) # 1.00
if e.type is not None:
p.append("type:\"%s\"" % self.type) # "is-part-of"
if e.stroke != self.default["stroke"]:
p.append("stroke:%s" % self._rgba(e.stroke)) # [0,0,0,1.0]
if e.strokewidth != self.default["strokewidth"]:
p.append("strokewidth:%.2f" % e.strokewidth) # 0.5
s.append("\t[\"%s\", \"%s\", {%s}],\n" % (id1, id2, ", ".join(p)))
s.append("];\n")
# Append the nodes to graph g.
s.append("for (var id in n) {\n"
"\tg.addNode(id, n[id]);\n"
"}\n")
# Append the edges to graph g.
s.append("for (var i=0; i < e.length; i++) {\n"
"\tvar n1 = g.nodeset[e[i][0]];\n"
"\tvar n2 = g.nodeset[e[i][1]];\n"
"\tg.addEdge(n1, n2, e[i][2]);\n"
"}")
return s
@property
def script(self):
""" Yields a string of Javascript code that loads the nodes and edges into variable g (Graph),
and starts the animation of the visualization by calling g.loop().
"""
return "".join(self._script())
def _script(self):
s = self._data()
s.append("\n")
# Apply node weight to node radius.
if self.pack:
s.append(
"for (var i=0; i < g.nodes.length; i++) {\n"
"\tvar n = g.nodes[i];\n"
"\tn.radius = n.radius + n.radius * n.weight;\n"
"}\n")
# Apply edge length (leaves get shorter edges).
if self.pack:
s.append(
"for (var i=0; i < g.nodes.length; i++) {\n"
"\tvar e = g.nodes[i].edges();\n"
"\tif (e.length == 1) {\n"
"\t\te[0].length *= 0.2;\n"
"\t}\n"
"}\n")
# Apply eigenvector and betweenness centrality.
if self.weight is True:
s.append(
"g.eigenvectorCentrality();\n"
"g.betweennessCentrality();\n")
# Apply pruning.
if self.prune is not None:
s.append(
"g.prune(%s);\n" % self.prune)
# Include the layout settings (for clarity).
s.append("g.layout.k = %s; // Force constant (= edge length).\n"
"g.layout.force = %s; // Repulsive strength.\n"
"g.layout.repulsion = %s; // Repulsive radius.\n" % (
self.k, self.force, self.repulsion))
# Start the graph animation loop.
s.append("// Start the animation loop.\n")
s.append("g.loop({frames:%s, fps:%s, ipf:%s, weighted:%s, directed:%s});" % (
int(self.frames),
int(self.fps),
int(self.ipf),
str(self.weighted).lower(),
str(self.directed).lower()))
return s
@property
def canvas(self):
""" Yields a string of HTML with a <div id="graph"> containing a HTML5 <canvas> element.
"""
s = [
"<div id=\"%s\" style=\"width:%spx; height:%spx;\">\n" % (self.id, self.width, self.height),
"\t<canvas id=\"%s\" width=\"%s\" height=\"%s\">\n" % (self.ctx, self.width, self.height),
"\t</canvas>\n",
"</div>"
]
#s.append("\n<script type=\"text/javascript\">\n")
#s.append("".join(self._script()).replace("\n", "\n\t"))
#s.append("\n</script>")
return "".join(s)
@property
def style(self):
""" Yields a string of CSS for <div id="graph">.
"""
return \
"body { font: 11px sans-serif; }\n" \
"a { color: dodgerblue; }\n" \
"#%s {\n" \
"\tdisplay: block;\n" \
"\tposition: relative;\n" \
"\toverflow: hidden;\n" \
"\tborder: 1px solid #ccc;\n" \
"}\n" \
"#%s canvas { }\n" \
".node-label { font-size: 11px; }" % (self.id, self.id)
@property
def html(self):
""" Yields a string of HTML to visualize the graph using a force-based spring layout.
The js parameter sets the path to graph.js and excanvas.js (by default, "./").
"""
js = self.javascript.rstrip("/")
js = (js and js or ".")+"/"
if self.stylesheet == INLINE:
css = self.style.replace("\n","\n\t\t").rstrip("\t")
css = "<style type=\"text/css\">\n\t\t%s\n\t</style>" % css
elif self.stylesheet == DEFAULT:
css = "<link rel=\"stylesheet\" href=\"screen.css\" type=\"text/css\" media=\"screen\" />"
elif self.stylesheet is not None:
css = "<link rel=\"stylesheet\" href=\"%s\" type=\"text/css\" media=\"screen\" />" % self.stylesheet
s = self._script()
s = "".join(s)
s = s.replace("\n", "\n\t\t")
s = "<script type=\"text/javascript\">\n\tfunction init_%s() {\n\t\t%s\n\t}\n\t</script>" % (self.id, s)
s = s.rstrip()
s = self._source % (
self.title,
css,
js,
js,
s,
self.id,
self.id,
self.width,
self.height,
self.ctx,
self.width,
self.height)
return s
def render(self, type=HTML):
if type == HTML:
return self.html
if type == CANVAS:
return self.canvas
if type == STYLE:
return self.style
if type == SCRIPT:
return self.script
if type == DATA:
return self.data
def export(self, path, overwrite=False, encoding="utf-8"):
""" Generates a folder at the given path containing an index.html
that visualizes the graph using the HTML5 <canvas> tag.
"""
if overwrite and os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path) # With overwrite=False, raises OSError if the path already exists.
os.mkdir(os.path.join(path, "js"))
# Copy js/graph.js + js/excanvas.js (unless a custom path is given.)
if self.javascript == "js/":
for f in glob.glob(os.path.join(MODULE, "js", "*.js")):
shutil.copy(f, os.path.join(path, "js", os.path.basename(f)))
# Create screen.css.
if self.stylesheet == DEFAULT:
f = open(os.path.join(path, "screen.css"), "w")
f.write(self.style)
f.close()
# Create index.html.
f = open(os.path.join(path, "index.html"), "w", encoding=encoding)
f.write(self.html)
f.close()
def render(graph, type=HTML, **kwargs):
renderer = HTMLCanvasRenderer(graph)
renderer.default.update(kwargs.get("default", {}))
kwargs["default"] = renderer.default
kwargs["stylesheet"] = kwargs.get("stylesheet", INLINE)
for k,v in kwargs.items():
if k in renderer.__dict__:
renderer.__dict__[k] = v
return renderer.render(type)
def export(graph, path, overwrite=False, encoding="utf-8", **kwargs):
renderer = HTMLCanvasRenderer(graph)
renderer.default.update(kwargs.get("default", {}))
kwargs["default"] = renderer.default
kwargs["stylesheet"] = kwargs.get("stylesheet", DEFAULT)
for k,v in kwargs.items():
if k in renderer.__dict__:
renderer.__dict__[k] = v
return renderer.export(path, overwrite)
| bsd-3-clause | -9,090,473,286,160,558,000 | 39.693112 | 118 | 0.526213 | false |
pragle/craft | web/model/db_model.py | 1 | 1193 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Michal Szczepanski'
from sqlalchemy.sql.schema import Column, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Integer, String, Boolean, Binary
Base = declarative_base()
class DatabaseConnection(Base):
__tablename__ = 'database_connection'
connection_name = Column(String, unique=True, primary_key=True)
name = Column(String)
host = Column(String)
port = Column(Integer)
username = Column(String)
password = Column(String)
database = Column(String)
'''
class SSHConnection(Base):
__tablename__ = 'ssh_connection'
id = Column(Integer, primary_key=True)
name = Column(String)
host = Column(String)
port = Column(String)
auth_id = Column(Integer, ForeignKey('ssh_connection_auth.id'))
class SSHConnectionAuth(Base):
__tablename__ = 'ssh_connection_auth'
id = Column(Integer, primary_key=True)
key = Column(Boolean, default=False)
key_data = Column(Binary)
username = Column(String)
password = Column(String)
connections = relationship('SSHConnection')
'''
| bsd-3-clause | -1,577,213,707,850,402,800 | 23.346939 | 67 | 0.694049 | false |
wasade/picrust | picrust/ace.py | 1 | 3860 | #!/usr/bin/env python
# Author: Morgan Langille ([email protected])
# count_wagner.py
""" Application controller for the `ace' function within the R package `ape`.
File created on Feb 2012.
"""
from __future__ import division
from cogent.util.table import Table
from os.path import split, splitext
from os import remove, environ
from glob import glob
from cogent.app.util import CommandLineApplication, ResultPath, get_tmp_filename
from cogent.app.parameters import ValuedParameter, FilePath
from cogent import LoadTree
from cogent import LoadTable
from picrust.util import get_picrust_project_dir
from os.path import join
__author__ = "Morgan Langille"
__copyright__ = "Copyright 2011-2013, The PICRUSt Project"
__credits__ = ["Morgan Langille", "Greg Caporaso"]
__license__ = "GPL"
__version__ = "1.0.0-dev"
__maintainer__ = "Morgan Langille"
__email__ = "[email protected]"
__status__ = "Development"
class Ace(CommandLineApplication):
""" Application controller for 'ace' fucntion within the 'ape' R package."""
ace_script_fp = join(get_picrust_project_dir(),'picrust','support_files','R','ace.R')
_command = ace_script_fp
_input_handler = '_input_as_string'
_suppress_stdout = False
_suppress_stderr = False
# Overridden to call script with R rather than directly - this is useful
# because permisssions on the script are set to 644 when PICRUSt is installed
# with setup.py. This is fine if we're executing it with R, but not if we're
# trying to execute it directly.
def _get_base_command(self):
""" Returns the full command string
input_arg: the argument to the command which represents the input
to the program, this will be a string, either
representing input or a filename to get input from
"""
command_parts = []
# Append a change directory to the beginning of the command to change
# to self.WorkingDir before running the command
# WorkingDir should be in quotes -- filenames might contain spaces
cd_command = ''.join(['cd ',str(self.WorkingDir),';'])
if self._command is None:
raise ApplicationError, '_command has not been set.'
command = self._command
parameters = self.Parameters
command_parts.append(cd_command)
command_parts.append("R")
command_parts.append("-f")
command_parts.append(command)
command_parts.append("--args")
command_parts.append(self._command_delimiter.join(filter(\
None,(map(str,parameters.values())))))
return self._command_delimiter.join(command_parts).strip()
BaseCommand = property(_get_base_command)
def ace_for_picrust(tree_path,trait_table_path,method='pic',HALT_EXEC=False):
'''Runs the Ace application controller given path of tree and trait table and returns a Table'''
#initialize Ace app controller
ace=Ace(HALT_EXEC=HALT_EXEC)
tmp_output_count_path=get_tmp_filename()
tmp_output_prob_path=get_tmp_filename()
#quote file names
tree_path='"{0}"'.format(tree_path)
trait_table_path='"{0}"'.format(trait_table_path)
as_string = " ".join([tree_path,trait_table_path,method,tmp_output_count_path,tmp_output_prob_path])
#Run ace here
result = ace(data=as_string)
#Load the output into Table objects
try:
asr_table=LoadTable(filename=tmp_output_count_path,header=True,sep='\t')
except IOError:
raise RuntimeError,\
("R reported an error on stderr:"
" %s" % "\n".join(result["StdErr"].readlines()))
asr_prob_table=LoadTable(filename=tmp_output_prob_path,header=True,sep='\t')
#Remove tmp files
remove(tmp_output_count_path)
remove(tmp_output_prob_path)
return asr_table,asr_prob_table
| gpl-3.0 | -2,905,388,587,920,334,000 | 36.475728 | 104 | 0.673834 | false |
frederick623/HTI | fa_util_py/HTI_ExportSIToMSS.py | 1 | 16771 | import ael
import acm
import time
import datetime
import os
import stat
import smtplib
import shutil
import string
#import HTI_DB_Functions
from datetime import date
from datetime import datetime
import shlex
#import HTI_MailFunction
import os
SEQNBR = 0
TRDNBR = 1
SETTTYPE = 2
VALUEDAY = 3
TEXT1 = 4
PRFID = 5
AMOUNT = 6
ISIN = 7
INSTYPE = 8
UI_ISIN = 9
'''
select s.seqnbr, t.trdnbr, s.type, s.value_day, t.text1
from settlement s, trade t, instrument i
where s.trdnbr = t.trdnbr
and t.insaddr = i.insaddr
and s.status = 'Released'
and s.updat_time >= Today and s.updat_time < Today + 1
and s.type in ('Security Nominal', 'End Security')
'''
dsn = "HTIConnString"
msse_fa_acc_mapping = {'Trading Book 5': '02-0238771-22',
'Trading Book 6': '02-0228640-30',
'Trading Book 7': '02-0228640-30',
'Trading Book 8': '02-0228640-30',
'Trading Book 13': '02-0263880-22',
'Trading Book 14': '02-0228640-30',
'Trading Book 17': '02-0238771-22'}
pfs = acm.FPhysicalPortfolio.Select('')
def get_dates():
dates = []
dates.append("TODAY")
dates.append(ael.date('2015-05-28'))
return dates
def get_all_setttypes():
settType = []
settType.append('Coupon')
settType.append('Coupon Transfer')
def get_all_instypes():
insType = []
insType.append('BasketRepo/Reverse')
insType.append('BasketSecurityLoan')
insType.append('Bill')
insType.append('Bond')
insType.append('BondIndex')
insType.append('BuySellback')
insType.append('CallAccount')
insType.append('Cap')
insType.append('CashCollateral')
insType.append('CD')
insType.append('Certificate')
insType.append('CFD')
insType.append('CLN')
insType.append('Collar')
insType.append('Collateral')
insType.append('Combination')
insType.append('Commodity')
insType.append('Commodity Index')
insType.append('Commodity Variant')
insType.append('Convertible')
insType.append('Credit Balance')
insType.append('CreditDefaultSwap')
insType.append('CreditIndex')
insType.append('Curr')
insType.append('CurrSwap')
insType.append('Deposit')
insType.append('Depositary Receipt')
insType.append('Dividend Point Index')
insType.append('DualCurrBond')
insType.append('EquityIndex')
insType.append('EquitySwap')
insType.append('ETF')
insType.append('Flexi Bond')
insType.append('Floor')
insType.append('FRA')
insType.append('FreeDefCF')
insType.append('FRN')
insType.append('Fund')
insType.append('Future/Forward')
insType.append('Fx Rate')
insType.append('FXOptionDatedFwd')
insType.append('FxSwap')
insType.append('IndexLinkedBond')
insType.append('IndexLinkedSwap')
insType.append('LEPO')
insType.append('MBS/ABS')
insType.append('MultiAsset')
insType.append('MultiOption')
insType.append('None')
insType.append('Option')
insType.append('Portfolio Swap')
insType.append('PriceIndex')
insType.append('PriceSwap')
insType.append('PromisLoan')
insType.append('RateIndex')
insType.append('Repo/Reverse')
insType.append('SecurityLoan')
insType.append('Stock')
insType.append('StockRight')
insType.append('Swap')
insType.append('TotalReturnSwap')
insType.append('UnKnown')
insType.append('VarianceSwap')
insType.append('VolatilitySwap')
insType.append('Warrant')
insType.append('Zero')
insType.sort()
return insType
def get_all_portfolios():
portfolios = []
for port in ael.Portfolio.select():
portfolios.append(port.display_id())
portfolios.sort()
return portfolios
def get_all_acquirers():
acquirers = []
for acq in ael.Party.select("type = 'Intern Dept'"):
acquirers.append(acq.display_id())
acquirers.sort()
return acquirers
def get_all_fileMsgType():
msgType = []
msgType.append("SI") # Sec In/Out
msgType.sort()
return msgType
def disable_variables(variables, enable = 0):
for i in variables:
for j in ael_variables:
if i == j[0]:
j[9] = enable
def get_all_status():
status = []
status.append('Released')
status.append('Pending Closure')
status.append('Closed')
status.sort()
return status
ael_variables = [['acquirers', 'Acquirers', 'string', get_all_acquirers(), 'HTIFP', 1, 1, 'Acquirers', None, 1], \
['sett_status', 'Settlement Status', 'string', get_all_status(), 'Released', 1, 1, 'Settlement Status', None, 1], \
['instypes', 'Instrument Types', 'string', get_all_instypes(), 'Bond', 1, 1, 'Instrument Types', None, 1], \
['not_setttypes', 'Not Settlement Types', 'string', get_all_setttypes(), 'Coupon,Coupon Transfer', 1, 1, 'Not Settlement Types', None, 1], \
['pf', 'Portfolio', 'string', get_all_portfolios(), None, 1, 1, 'Portfolio', None, 1], \
['filePath', 'File Path', 'string', None, 'c:\\temp', 1, 0, 'File Name', None, 1], \
['fileName', 'File Name', 'string', None, '<FileMsgType>_<YYYYMMDDhhmmss>.csv', 1, 0, 'File Name', None, 0], \
['participant_id', 'Participant Id', 'string', None, 'B01143', 1, 0, 'Haitong Participant Id', None, 1], \
['asofdate', 'Date', 'string', get_dates(), "TODAY", 1, 0, 'Date', None, 1], \
['fileMsgType', 'File Message Type', 'string', get_all_fileMsgType(), 'SI', 1, 0, 'File Message Type', None, 0]]
def EmailNotify(subject, messg, RECIPIENTS):
session = smtplib.SMTP(smtpserver)
BODY = string.join((
"From: %s" % SENDER,
"To: %s" % RECIPIENTS,
"Subject: %s" % subject,
"",
messg
), "\r\n")
#print BODY
if AUTHREQUIRED:
session.login(smtpuser, smtppass)
smtpresult = session.sendmail(SENDER, RECIPIENTS, BODY)
if smtpresult:
errstr = ''
for recip in smtpresult.keys():
errstr = 'Could not delivery mail to: %s Server said: %s %s %s' % (recip, smtpresult[recip][0], smtpresult[recip][1])
raise smtplib.SMTPException, errstr
session.quit()
def ValidPortfolio(array_pf, portfolio):
for pf in array_pf:
if portfolio == pf:
return True
return False
def getExecBroker(ptyid):
p = ael.Party[ptyid]
for ai in p.additional_infos():
if ai.addinf_specnbr.field_name == 'Broker Ref':
return ai.value.strip()
return ''
def ConvertDateToYYYYMMDD(dt):
d = ael.date(dt).to_ymd()
yy = str(d[0])
mm = str(d[1])
if d[1] < 10:
mm = "0" + mm
dd = str(d[2])
if d[2] < 10:
dd = "0" + dd
return yy+mm+dd
def getChildPortfolio(pPf, pfarr):
if (pPf == None):
return pfarr
for child in pPf.children():
pfid = child.display_id()
cPf = ael.Portfolio[pfid]
if cPf != None:
if cPf.compound == True:
pfarr = getChildPortfolio(cPf, pfarr)
else:
pfarr.append(pfid)
return pfarr
def ael_main(dict):
# Acquirers
acq_array_list = dict['acquirers']
acq_list = ''
for acq in acq_array_list:
if acq_list == '':
acq_list = "'" + acq + "'"
else:
acq_list = acq_list + ",'" + acq + "'"
# instypes
instype_array_list = dict['instypes']
instype_list = ''
for instype in instype_array_list:
if instype_list == '':
instype_list = "'" + instype + "'"
else:
instype_list = instype_list + ",'" + instype + "'"
# settlement status
sett_status_array_list = dict['sett_status']
sett_status_list = ''
for sett_status in sett_status_array_list:
if sett_status_list == '':
sett_status_list = "'" + sett_status + "'"
else:
sett_status_list = sett_status_list + ",'" + sett_status + "'"
# Portfolios
pf_array_list = dict['pf']
pf_list = ''
for pf in pf_array_list:
if pf_list == '':
pf_list = "'" + pf + "'"
else:
pf_list = pf_list + ",'" + pf + "'"
# sett_types
not_setttype_array_list = dict['not_setttypes']
not_setttype_list = ''
for setttype in not_setttype_array_list:
if not_setttype_list == '':
not_setttype_list = "'" + setttype + "'"
else:
not_setttype_list = not_setttype_list + ",'" + setttype + "'"
participant_id = dict['participant_id']
print 'pf_list', pf_list
print 'acq_list', acq_list
print 'sett_status_list', sett_status_list
print 'not_setttype_list', not_setttype_list
print 'instype_list', instype_list
# File Message Type
fileMsgType = dict['fileMsgType']
# Asof Date
asofdate = dict['asofdate']
if asofdate == 'TODAY':
d = ael.date_today().to_ymd()
d1 = ael.date_today().add_days(1).to_ymd()
else:
d = ael.date(asofdate).to_ymd()
d1 = ael.date(asofdate).add_days(1).to_ymd()
yy = str(d[0])
mm = str(d[1])
mm = "%02d" % int(mm)
dd = str(d[2])
dd = "%02d" % int(dd)
asofdate = yy+'-'+mm+'-'+dd
yy = str(d1[0])
mm = str(d1[1])
mm = "%02d" % int(mm)
dd = str(d1[2])
dd = "%02d" % int(dd)
d1_date = yy+'-'+mm+'-'+dd
# File Name
filePath = dict['filePath']
fileName = dict['fileName']
fileName = filePath + '\\' + fileName
genDate = ael.date_today()
timeStamp = time.strftime("%Y%m%d%H%M%S")
fileName = fileName.replace("<YYYYMMDDhhmmss>", timeStamp)
fileName = fileName.replace("<FileMsgType>", fileMsgType)
errMsg = ''
print fileName
f = open(fileName, "w")
# trade details
if fileMsgType == 'SI':
# Header
headerLine = "settleDate,instructionType,settleMethod,haitongParticipantId,market,stockCode,shares,payment,ccassClientAccountNo,haitongClientAccountNo"
headerLine = str(headerLine) + '\n'
print headerLine
f.write(headerLine)
strSql = """select s.seqnbr, t.trdnbr, s.type, s.value_day, t.text1, pf.prfid, s.amount, i.isin, i.instype, ui.isin
from settlement s, trade t, instrument i, party acq, portfolio pf, instrument ui
where s.trdnbr = t.trdnbr
and t.insaddr = i.insaddr
and t.acquirer_ptynbr = acq.ptynbr
and t.prfnbr = pf.prfnbr
and acq.ptyid in (%s)
and s.status in (%s)
and s.updat_time >= '%s' and s.updat_time < '%s'
and i.instype in (%s)
and t.category ~= 'Collateral'
and pf.prfid in (%s)
and i.und_insaddr *= ui.insaddr
and s.type in ('Security Nominal', 'End Security')""" % (acq_list, sett_status_list, asofdate, d1_date, instype_list, pf_list)
print strSql
recCnt = 0
rs = ael.asql(strSql)
columns, buf = rs
for table in buf:
for row in table:
print row
seqnbr = str(row[SEQNBR]).strip()
trdnbr = str(row[TRDNBR]).strip()
setttype = str(row[SETTTYPE]).strip()
valueday = str(row[VALUEDAY]).strip()
text1 = str(row[TEXT1]).strip()
sec_amount = str(row[AMOUNT]).strip()
instype = str(row[INSTYPE]).strip()
print 'louis1'
if instype == 'Repo/Reverse':
if text1 == '':
prfid = str(row[PRFID]).strip()
else:
prfid = text1
isin = str(row[UI_ISIN]).strip()
else:
prfid = str(row[PRFID]).strip()
isin = str(row[ISIN]).strip()
accountId = ''
try:
accountId = msse_fa_acc_mapping[prfid]
except:
print 'cannot get accountId'
settledt = ael.date(valueday).to_string("%Y-%m-%d")
if float(sec_amount) >= 0:
instructionType = 'DELIVER'
else:
instructionType = 'RECEIVE'
settlemethod = 'FOP'
marketcode = 'OTC'
payment = '0.00'
sec_amount = str(abs(float(sec_amount)))
payment_strSql = """select sum(s.amount) 'amount'
from settlement s, trade t, instrument i, party acq, portfolio pf, instrument ui
where s.trdnbr = t.trdnbr
and t.insaddr = i.insaddr
and t.acquirer_ptynbr = acq.ptynbr
and t.prfnbr = pf.prfnbr
and acq.ptyid in (%s)
and s.status in (%s)
and i.instype in (%s)
and t.category ~= 'Collateral'
and pf.prfid in (%s)
and i.und_insaddr *= ui.insaddr
and s.type not in ('Security Nominal', 'End Security')
and s.type not in (%s)
and s.value_day = '%s'
and t.trdnbr = %s""" % (acq_list, sett_status_list, instype_list, pf_list, not_setttype_list, settledt, int(trdnbr))
print payment_strSql
payment_rs = ael.asql(payment_strSql)
payment_columns, payment_buf = payment_rs
for payment_table in payment_buf:
for payment_row in payment_table:
payment = str(abs(float(str(payment_row[0]).strip())))
settlemethod = 'DVP'
print 'payment', payment
detailLine = settledt + ',' + instructionType + ',' + settlemethod + ',' + participant_id + ',' + marketcode + ',' + isin + ',' + sec_amount + ',' + payment + ',' + '' + ',' + accountId
detailLine = str(detailLine) + '\n'
recCnt = recCnt + 1
print detailLine
f.write(detailLine)
else:
recCnt = 0
f.close()
mb = acm.GetFunction("msgBox", 3)
if mb != None:
mb("Message", "File has been generated successfully at " + fileName, 0)
mb = None
return
| apache-2.0 | -2,448,934,486,967,091,700 | 35.458696 | 245 | 0.476179 | false |
awesto/django-shop | shop/views/auth.py | 1 | 8484 | from django.contrib.auth import logout, get_user_model
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.tokens import default_token_generator
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.encoding import force_str
from django.utils.translation import gettext_lazy as _
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.exceptions import ErrorDetail, ValidationError
from rest_framework.generics import GenericAPIView
from rest_framework.permissions import AllowAny
from rest_framework.renderers import JSONRenderer, BrowsableAPIRenderer
from rest_framework.response import Response
from rest_framework.settings import api_settings
from rest_auth.views import LoginView as OriginalLoginView, PasswordChangeView as OriginalPasswordChangeView
from shop.models.cart import CartModel
from shop.models.customer import CustomerModel
from shop.rest.renderers import CMSPageRenderer
from shop.serializers.auth import PasswordResetRequestSerializer, PasswordResetConfirmSerializer
from shop.signals import email_queued
class AuthFormsView(GenericAPIView):
"""
Generic view to handle authentication related forms such as user registration
"""
serializer_class = None
form_class = None
def post(self, request, *args, **kwargs):
if request.customer.is_visitor:
customer = CustomerModel.objects.get_or_create_from_request(request)
else:
customer = request.customer
form_data = request.data.get(self.form_class.scope_prefix, {})
form = self.form_class(data=form_data, instance=customer)
if form.is_valid():
form.save(request=request)
response_data = {form.form_name: {
'success_message': _("Successfully registered yourself."),
}}
return Response(response_data, status=status.HTTP_200_OK)
errors = dict(form.errors)
if 'email' in errors:
errors.update({NON_FIELD_ERRORS: errors.pop('email')})
return Response({form.form_name: errors}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
class LoginView(OriginalLoginView):
form_name = 'login_form'
def login(self):
"""
Logs in as the given user, and moves the items from the current to the new cart.
"""
try:
anonymous_cart = CartModel.objects.get_from_request(self.request)
except CartModel.DoesNotExist:
anonymous_cart = None
if self.request.customer.user.is_anonymous or self.request.customer.is_registered:
previous_user = None
else:
previous_user = self.request.customer.user
super().login() # this rotates the session_key
if not self.serializer.data.get('stay_logged_in'):
self.request.session.set_expiry(0) # log out when the browser is closed
authenticated_cart = CartModel.objects.get_from_request(self.request)
if anonymous_cart:
# an anonymous customer logged in, now merge his current cart with a cart,
# which previously might have been created under his account.
authenticated_cart.merge_with(anonymous_cart)
if previous_user and previous_user.is_active is False and previous_user != self.request.user:
# keep the database clean and remove this anonymous entity
if previous_user.customer.orders.count() == 0:
previous_user.delete()
def post(self, request, *args, **kwargs):
self.request = request
if request.user.is_anonymous:
form_data = request.data.get('form_data', {})
self.serializer = self.get_serializer(data=form_data)
if self.serializer.is_valid():
self.login()
return self.get_response()
exc = ValidationError({self.form_name: self.serializer.errors})
else:
message = ErrorDetail("Please log out before signing in again.")
exc = ValidationError({self.form_name: {api_settings.NON_FIELD_ERRORS_KEY: [message]}})
response = self.handle_exception(exc)
self.response = self.finalize_response(request, response, *args, **kwargs)
return self.response
class LogoutView(APIView):
"""
Calls Django logout method and delete the auth Token assigned to the current User object.
"""
permission_classes = (AllowAny,)
form_name = 'logout_form'
def post(self, request):
try:
request.user.auth_token.delete()
except:
pass
logout(request)
request.user = AnonymousUser()
response_data = {self.form_name: {'success_message': _("Successfully logged out.")}}
return Response(response_data)
class PasswordChangeView(OriginalPasswordChangeView):
form_name = 'password_change_form'
def post(self, request, *args, **kwargs):
form_data = request.data.get('form_data', {})
serializer = self.get_serializer(data=form_data)
if serializer.is_valid():
serializer.save()
response_data = {self.form_name: {
'success_message': _("Password has been changed successfully."),
}}
return Response(response_data)
return Response({self.form_name: serializer.errors}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
class PasswordResetRequestView(GenericAPIView):
"""
Calls Django Auth PasswordResetRequestForm save method.
Accepts the following POST parameters: email
Returns the success/fail message.
"""
serializer_class = PasswordResetRequestSerializer
permission_classes = (AllowAny,)
form_name = 'password_reset_request_form'
def post(self, request, *args, **kwargs):
form_data = request.data.get('form_data', {})
serializer = self.get_serializer(data=form_data)
if not serializer.is_valid():
return Response({self.form_name: serializer.errors}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
# send email containing a reset link
serializer.save()
# trigger async email queue
email_queued()
# Return the success message with OK HTTP status
msg = _("Instructions on how to reset the password have been sent to '{email}'.")
response_data = {self.form_name: {
'success_message': msg.format(**serializer.data),
}}
return Response(response_data)
class PasswordResetConfirmView(GenericAPIView):
"""
Password reset e-mail link points onto a CMS page with the Page ID = 'password-reset-confirm'.
This page then shall render the CMS plugin as provided by the **ShopAuthenticationPlugin** using
the form "Confirm Password Reset".
"""
renderer_classes = (CMSPageRenderer, JSONRenderer, BrowsableAPIRenderer)
serializer_class = PasswordResetConfirmSerializer
permission_classes = (AllowAny,)
token_generator = default_token_generator
form_name = 'password_reset_confirm_form'
def get(self, request, uidb64=None, token=None):
data = {'uid': uidb64, 'token': token}
serializer_class = self.get_serializer_class()
password = get_user_model().objects.make_random_password()
data.update(new_password1=password, new_password2=password)
serializer = serializer_class(data=data, context=self.get_serializer_context())
if not serializer.is_valid():
return Response({'validlink': False})
return Response({
'validlink': True,
'user_name': force_str(serializer.user),
'form_name': 'password_reset_form',
})
def post(self, request, uidb64=None, token=None):
try:
data = dict(request.data['form_data'], uid=uidb64, token=token)
except (KeyError, TypeError, ValueError):
errors = {'non_field_errors': [_("Invalid POST data.")]}
else:
serializer = self.get_serializer(data=data)
if serializer.is_valid():
serializer.save()
response_data = {self.form_name: {
'success_message': _("Password has been reset with the new password."),
}}
return Response(response_data)
else:
errors = serializer.errors
return Response({self.form_name: errors}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
| bsd-3-clause | 8,623,765,009,324,266,000 | 41.208955 | 109 | 0.660655 | false |
charlesthk/django-nightmare-pdf | nightmare_pdf/generators.py | 1 | 2545 | import subprocess
import os
from django.core.validators import URLValidator
from nightmare_pdf.settings import pdf_settings
from django.http import (
HttpResponse,
Http404
)
from django.core.files.base import ContentFile
from .models import PdfDoc
from .utils import get_random_filename
validate_url = URLValidator(schemes=['https', 'http'])
class PDFGenerator(object):
def __init__(self, url, timeout=1000, page_size='A4', landscape=0,
print_background=1, margins_type=1, script=pdf_settings.DEFAULT_RENDER_SCRIPT,
temp_dir=pdf_settings.DEFAULT_TEMP_DIR):
validate_url(url)
self.url = url
self.filename = self.__get_random_filename()
self.filepath = self.__get_filepath()
self.timeout = timeout
self.page_size = page_size
self.landscape = landscape
self.print_background = print_background
self.margins_type = margins_type
self.script = script
self.temp_dir = temp_dir
self.pdf_data = None
self.__generate()
self.__set_pdf_data()
self.__remove_source_file()
def __get_random_filename(self):
name = get_random_filename(20)
return "%s.pdf" % name
def __get_filepath(self):
return os.path.join(pdf_settings.DEFAULT_TEMP_DIR, self.filename)
def __generate(self):
"""
call the following command:
node render_pdf.js [url] [filepath]
--timeout [timeout]
--pageSize [page_size]
--landscape [landscape]
--printBackground [print_background]
--marginsType [margins_type]
"""
command = [
pdf_settings.NODE_PATH,
self.script,
self.url,
self.filepath,
'--timeout',
str(self.timeout),
'--pageSize',
self.page_size,
'--landscape',
str(self.landscape),
'--printBackground',
str(self.print_background),
'--marginsType',
str(self.margins_type)
]
return subprocess.call(command)
def __set_pdf_data(self):
with open(self.filepath) as pdf:
self.pdf_data = pdf.read()
def get_content_file(self, filename):
return ContentFile(self.pdf_data, name=filename)
def get_data(self):
return self.pdf_data
def get_http_response(self, filename):
response = HttpResponse(self.pdf_data, content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="%s.pdf"' % filename
return response
def __remove_source_file(self):
return subprocess.call(['rm', self.filepath])
def save(self, filename, title='', description=''):
file = self.get_content_file(filename)
document = PdfDoc(
title=title,
description=description,
document=file)
document.save()
return document
| mit | 8,103,723,720,229,453,000 | 22.564815 | 83 | 0.697053 | false |
yephper/django | tests/generic_relations_regress/tests.py | 1 | 23296 | <<<<<<< HEAD
from django.db.models import Q, Sum
from django.db.models.deletion import ProtectedError
from django.db.utils import IntegrityError
from django.forms.models import modelform_factory
from django.test import TestCase, skipIfDBFeature
from .models import (
A, B, C, D, Address, Board, CharLink, Company, Contact, Content, Developer,
Guild, HasLinkThing, Link, Node, Note, OddRelation1, OddRelation2,
Organization, Person, Place, Related, Restaurant, Tag, Team, TextLink,
)
class GenericRelationTests(TestCase):
def test_inherited_models_content_type(self):
"""
Test that GenericRelations on inherited classes use the correct content
type.
"""
p = Place.objects.create(name="South Park")
r = Restaurant.objects.create(name="Chubby's")
l1 = Link.objects.create(content_object=p)
l2 = Link.objects.create(content_object=r)
self.assertEqual(list(p.links.all()), [l1])
self.assertEqual(list(r.links.all()), [l2])
def test_reverse_relation_pk(self):
"""
Test that the correct column name is used for the primary key on the
originating model of a query. See #12664.
"""
p = Person.objects.create(account=23, name='Chef')
Address.objects.create(street='123 Anywhere Place',
city='Conifer', state='CO',
zipcode='80433', content_object=p)
qs = Person.objects.filter(addresses__zipcode='80433')
self.assertEqual(1, qs.count())
self.assertEqual('Chef', qs[0].name)
def test_charlink_delete(self):
oddrel = OddRelation1.objects.create(name='clink')
CharLink.objects.create(content_object=oddrel)
oddrel.delete()
def test_textlink_delete(self):
oddrel = OddRelation2.objects.create(name='tlink')
TextLink.objects.create(content_object=oddrel)
oddrel.delete()
def test_q_object_or(self):
"""
Tests that SQL query parameters for generic relations are properly
grouped when OR is used.
Test for bug http://code.djangoproject.com/ticket/11535
In this bug the first query (below) works while the second, with the
query parameters the same but in reverse order, does not.
The issue is that the generic relation conditions do not get properly
grouped in parentheses.
"""
note_contact = Contact.objects.create()
org_contact = Contact.objects.create()
Note.objects.create(note='note', content_object=note_contact)
org = Organization.objects.create(name='org name')
org.contacts.add(org_contact)
# search with a non-matching note and a matching org name
qs = Contact.objects.filter(Q(notes__note__icontains=r'other note') |
Q(organizations__name__icontains=r'org name'))
self.assertIn(org_contact, qs)
# search again, with the same query parameters, in reverse order
qs = Contact.objects.filter(
Q(organizations__name__icontains=r'org name') |
Q(notes__note__icontains=r'other note'))
self.assertIn(org_contact, qs)
def test_join_reuse(self):
qs = Person.objects.filter(
addresses__street='foo'
).filter(
addresses__street='bar'
)
self.assertEqual(str(qs.query).count('JOIN'), 2)
def test_generic_relation_ordering(self):
"""
Test that ordering over a generic relation does not include extraneous
duplicate results, nor excludes rows not participating in the relation.
"""
p1 = Place.objects.create(name="South Park")
p2 = Place.objects.create(name="The City")
c = Company.objects.create(name="Chubby's Intl.")
Link.objects.create(content_object=p1)
Link.objects.create(content_object=c)
places = list(Place.objects.order_by('links__id'))
def count_places(place):
return len([p for p in places if p.id == place.id])
self.assertEqual(len(places), 2)
self.assertEqual(count_places(p1), 1)
self.assertEqual(count_places(p2), 1)
def test_target_model_is_unsaved(self):
"""Test related to #13085"""
# Fails with another, ORM-level error
dev1 = Developer(name='Joe')
note = Note(note='Deserves promotion', content_object=dev1)
with self.assertRaises(IntegrityError):
note.save()
def test_target_model_len_zero(self):
"""Test for #13085 -- __len__() returns 0"""
team1 = Team.objects.create(name='Backend devs')
try:
note = Note(note='Deserve a bonus', content_object=team1)
except Exception as e:
if (issubclass(type(e), Exception) and
str(e) == 'Impossible arguments to GFK.get_content_type!'):
self.fail("Saving model with GenericForeignKey to model instance whose "
"__len__ method returns 0 shouldn't fail.")
raise e
note.save()
def test_target_model_nonzero_false(self):
"""Test related to #13085"""
# __nonzero__() returns False -- This actually doesn't currently fail.
# This test validates that
g1 = Guild.objects.create(name='First guild')
note = Note(note='Note for guild', content_object=g1)
note.save()
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_gfk_to_model_with_empty_pk(self):
"""Test related to #13085"""
# Saving model with GenericForeignKey to model instance with an
# empty CharField PK
b1 = Board.objects.create(name='')
tag = Tag(label='VP', content_object=b1)
tag.save()
def test_ticket_20378(self):
# Create a couple of extra HasLinkThing so that the autopk value
# isn't the same for Link and HasLinkThing.
hs1 = HasLinkThing.objects.create()
hs2 = HasLinkThing.objects.create()
hs3 = HasLinkThing.objects.create()
hs4 = HasLinkThing.objects.create()
l1 = Link.objects.create(content_object=hs3)
l2 = Link.objects.create(content_object=hs4)
self.assertQuerysetEqual(
HasLinkThing.objects.filter(links=l1),
[hs3], lambda x: x)
self.assertQuerysetEqual(
HasLinkThing.objects.filter(links=l2),
[hs4], lambda x: x)
self.assertQuerysetEqual(
HasLinkThing.objects.exclude(links=l2),
[hs1, hs2, hs3], lambda x: x, ordered=False)
self.assertQuerysetEqual(
HasLinkThing.objects.exclude(links=l1),
[hs1, hs2, hs4], lambda x: x, ordered=False)
def test_ticket_20564(self):
b1 = B.objects.create()
b2 = B.objects.create()
b3 = B.objects.create()
c1 = C.objects.create(b=b1)
c2 = C.objects.create(b=b2)
c3 = C.objects.create(b=b3)
A.objects.create(flag=None, content_object=b1)
A.objects.create(flag=True, content_object=b2)
self.assertQuerysetEqual(
C.objects.filter(b__a__flag=None),
[c1, c3], lambda x: x
)
self.assertQuerysetEqual(
C.objects.exclude(b__a__flag=None),
[c2], lambda x: x
)
def test_ticket_20564_nullable_fk(self):
b1 = B.objects.create()
b2 = B.objects.create()
b3 = B.objects.create()
d1 = D.objects.create(b=b1)
d2 = D.objects.create(b=b2)
d3 = D.objects.create(b=b3)
d4 = D.objects.create()
A.objects.create(flag=None, content_object=b1)
A.objects.create(flag=True, content_object=b1)
A.objects.create(flag=True, content_object=b2)
self.assertQuerysetEqual(
D.objects.exclude(b__a__flag=None),
[d2], lambda x: x
)
self.assertQuerysetEqual(
D.objects.filter(b__a__flag=None),
[d1, d3, d4], lambda x: x
)
self.assertQuerysetEqual(
B.objects.filter(a__flag=None),
[b1, b3], lambda x: x
)
self.assertQuerysetEqual(
B.objects.exclude(a__flag=None),
[b2], lambda x: x
)
def test_extra_join_condition(self):
# A crude check that content_type_id is taken in account in the
# join/subquery condition.
self.assertIn("content_type_id", str(B.objects.exclude(a__flag=None).query).lower())
# No need for any joins - the join from inner query can be trimmed in
# this case (but not in the above case as no a objects at all for given
# B would then fail).
self.assertNotIn(" join ", str(B.objects.exclude(a__flag=True).query).lower())
self.assertIn("content_type_id", str(B.objects.exclude(a__flag=True).query).lower())
def test_annotate(self):
hs1 = HasLinkThing.objects.create()
hs2 = HasLinkThing.objects.create()
HasLinkThing.objects.create()
b = Board.objects.create(name=str(hs1.pk))
Link.objects.create(content_object=hs2)
l = Link.objects.create(content_object=hs1)
Link.objects.create(content_object=b)
qs = HasLinkThing.objects.annotate(Sum('links')).filter(pk=hs1.pk)
# If content_type restriction isn't in the query's join condition,
# then wrong results are produced here as the link to b will also match
# (b and hs1 have equal pks).
self.assertEqual(qs.count(), 1)
self.assertEqual(qs[0].links__sum, l.id)
l.delete()
# Now if we don't have proper left join, we will not produce any
# results at all here.
# clear cached results
qs = qs.all()
self.assertEqual(qs.count(), 1)
# Note - 0 here would be a nicer result...
self.assertIs(qs[0].links__sum, None)
# Finally test that filtering works.
self.assertEqual(qs.filter(links__sum__isnull=True).count(), 1)
self.assertEqual(qs.filter(links__sum__isnull=False).count(), 0)
def test_filter_targets_related_pk(self):
HasLinkThing.objects.create()
hs2 = HasLinkThing.objects.create()
l = Link.objects.create(content_object=hs2)
self.assertNotEqual(l.object_id, l.pk)
self.assertQuerysetEqual(
HasLinkThing.objects.filter(links=l.pk),
[hs2], lambda x: x)
def test_editable_generic_rel(self):
GenericRelationForm = modelform_factory(HasLinkThing, fields='__all__')
form = GenericRelationForm()
self.assertIn('links', form.fields)
form = GenericRelationForm({'links': None})
self.assertTrue(form.is_valid())
form.save()
links = HasLinkThing._meta.get_field('links')
self.assertEqual(links.save_form_data_calls, 1)
def test_ticket_22998(self):
related = Related.objects.create()
content = Content.objects.create(related_obj=related)
Node.objects.create(content=content)
# deleting the Related cascades to the Content cascades to the Node,
# where the pre_delete signal should fire and prevent deletion.
with self.assertRaises(ProtectedError):
related.delete()
def test_ticket_22982(self):
place = Place.objects.create(name='My Place')
self.assertIn('GenericRelatedObjectManager', str(place.links))
=======
from django.db.models import Q, Sum
from django.db.models.deletion import ProtectedError
from django.db.utils import IntegrityError
from django.forms.models import modelform_factory
from django.test import TestCase, skipIfDBFeature
from .models import (
A, Address, B, Board, C, CharLink, Company, Contact, Content, D, Developer,
Guild, HasLinkThing, Link, Node, Note, OddRelation1, OddRelation2,
Organization, Person, Place, Related, Restaurant, Tag, Team, TextLink,
)
class GenericRelationTests(TestCase):
def test_inherited_models_content_type(self):
"""
Test that GenericRelations on inherited classes use the correct content
type.
"""
p = Place.objects.create(name="South Park")
r = Restaurant.objects.create(name="Chubby's")
l1 = Link.objects.create(content_object=p)
l2 = Link.objects.create(content_object=r)
self.assertEqual(list(p.links.all()), [l1])
self.assertEqual(list(r.links.all()), [l2])
def test_reverse_relation_pk(self):
"""
Test that the correct column name is used for the primary key on the
originating model of a query. See #12664.
"""
p = Person.objects.create(account=23, name='Chef')
Address.objects.create(street='123 Anywhere Place',
city='Conifer', state='CO',
zipcode='80433', content_object=p)
qs = Person.objects.filter(addresses__zipcode='80433')
self.assertEqual(1, qs.count())
self.assertEqual('Chef', qs[0].name)
def test_charlink_delete(self):
oddrel = OddRelation1.objects.create(name='clink')
CharLink.objects.create(content_object=oddrel)
oddrel.delete()
def test_textlink_delete(self):
oddrel = OddRelation2.objects.create(name='tlink')
TextLink.objects.create(content_object=oddrel)
oddrel.delete()
def test_q_object_or(self):
"""
Tests that SQL query parameters for generic relations are properly
grouped when OR is used.
Test for bug http://code.djangoproject.com/ticket/11535
In this bug the first query (below) works while the second, with the
query parameters the same but in reverse order, does not.
The issue is that the generic relation conditions do not get properly
grouped in parentheses.
"""
note_contact = Contact.objects.create()
org_contact = Contact.objects.create()
Note.objects.create(note='note', content_object=note_contact)
org = Organization.objects.create(name='org name')
org.contacts.add(org_contact)
# search with a non-matching note and a matching org name
qs = Contact.objects.filter(Q(notes__note__icontains=r'other note') |
Q(organizations__name__icontains=r'org name'))
self.assertIn(org_contact, qs)
# search again, with the same query parameters, in reverse order
qs = Contact.objects.filter(
Q(organizations__name__icontains=r'org name') |
Q(notes__note__icontains=r'other note'))
self.assertIn(org_contact, qs)
def test_join_reuse(self):
qs = Person.objects.filter(
addresses__street='foo'
).filter(
addresses__street='bar'
)
self.assertEqual(str(qs.query).count('JOIN'), 2)
def test_generic_relation_ordering(self):
"""
Test that ordering over a generic relation does not include extraneous
duplicate results, nor excludes rows not participating in the relation.
"""
p1 = Place.objects.create(name="South Park")
p2 = Place.objects.create(name="The City")
c = Company.objects.create(name="Chubby's Intl.")
Link.objects.create(content_object=p1)
Link.objects.create(content_object=c)
places = list(Place.objects.order_by('links__id'))
def count_places(place):
return len([p for p in places if p.id == place.id])
self.assertEqual(len(places), 2)
self.assertEqual(count_places(p1), 1)
self.assertEqual(count_places(p2), 1)
def test_target_model_is_unsaved(self):
"""Test related to #13085"""
# Fails with another, ORM-level error
dev1 = Developer(name='Joe')
note = Note(note='Deserves promotion', content_object=dev1)
with self.assertRaises(IntegrityError):
note.save()
def test_target_model_len_zero(self):
"""Test for #13085 -- __len__() returns 0"""
team1 = Team.objects.create(name='Backend devs')
try:
note = Note(note='Deserve a bonus', content_object=team1)
except Exception as e:
if (issubclass(type(e), Exception) and
str(e) == 'Impossible arguments to GFK.get_content_type!'):
self.fail("Saving model with GenericForeignKey to model instance whose "
"__len__ method returns 0 shouldn't fail.")
raise e
note.save()
def test_target_model_nonzero_false(self):
"""Test related to #13085"""
# __nonzero__() returns False -- This actually doesn't currently fail.
# This test validates that
g1 = Guild.objects.create(name='First guild')
note = Note(note='Note for guild', content_object=g1)
note.save()
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_gfk_to_model_with_empty_pk(self):
"""Test related to #13085"""
# Saving model with GenericForeignKey to model instance with an
# empty CharField PK
b1 = Board.objects.create(name='')
tag = Tag(label='VP', content_object=b1)
tag.save()
def test_ticket_20378(self):
# Create a couple of extra HasLinkThing so that the autopk value
# isn't the same for Link and HasLinkThing.
hs1 = HasLinkThing.objects.create()
hs2 = HasLinkThing.objects.create()
hs3 = HasLinkThing.objects.create()
hs4 = HasLinkThing.objects.create()
l1 = Link.objects.create(content_object=hs3)
l2 = Link.objects.create(content_object=hs4)
self.assertQuerysetEqual(
HasLinkThing.objects.filter(links=l1),
[hs3], lambda x: x)
self.assertQuerysetEqual(
HasLinkThing.objects.filter(links=l2),
[hs4], lambda x: x)
self.assertQuerysetEqual(
HasLinkThing.objects.exclude(links=l2),
[hs1, hs2, hs3], lambda x: x, ordered=False)
self.assertQuerysetEqual(
HasLinkThing.objects.exclude(links=l1),
[hs1, hs2, hs4], lambda x: x, ordered=False)
def test_ticket_20564(self):
b1 = B.objects.create()
b2 = B.objects.create()
b3 = B.objects.create()
c1 = C.objects.create(b=b1)
c2 = C.objects.create(b=b2)
c3 = C.objects.create(b=b3)
A.objects.create(flag=None, content_object=b1)
A.objects.create(flag=True, content_object=b2)
self.assertQuerysetEqual(
C.objects.filter(b__a__flag=None),
[c1, c3], lambda x: x
)
self.assertQuerysetEqual(
C.objects.exclude(b__a__flag=None),
[c2], lambda x: x
)
def test_ticket_20564_nullable_fk(self):
b1 = B.objects.create()
b2 = B.objects.create()
b3 = B.objects.create()
d1 = D.objects.create(b=b1)
d2 = D.objects.create(b=b2)
d3 = D.objects.create(b=b3)
d4 = D.objects.create()
A.objects.create(flag=None, content_object=b1)
A.objects.create(flag=True, content_object=b1)
A.objects.create(flag=True, content_object=b2)
self.assertQuerysetEqual(
D.objects.exclude(b__a__flag=None),
[d2], lambda x: x
)
self.assertQuerysetEqual(
D.objects.filter(b__a__flag=None),
[d1, d3, d4], lambda x: x
)
self.assertQuerysetEqual(
B.objects.filter(a__flag=None),
[b1, b3], lambda x: x
)
self.assertQuerysetEqual(
B.objects.exclude(a__flag=None),
[b2], lambda x: x
)
def test_extra_join_condition(self):
# A crude check that content_type_id is taken in account in the
# join/subquery condition.
self.assertIn("content_type_id", str(B.objects.exclude(a__flag=None).query).lower())
# No need for any joins - the join from inner query can be trimmed in
# this case (but not in the above case as no a objects at all for given
# B would then fail).
self.assertNotIn(" join ", str(B.objects.exclude(a__flag=True).query).lower())
self.assertIn("content_type_id", str(B.objects.exclude(a__flag=True).query).lower())
def test_annotate(self):
hs1 = HasLinkThing.objects.create()
hs2 = HasLinkThing.objects.create()
HasLinkThing.objects.create()
b = Board.objects.create(name=str(hs1.pk))
Link.objects.create(content_object=hs2)
l = Link.objects.create(content_object=hs1)
Link.objects.create(content_object=b)
qs = HasLinkThing.objects.annotate(Sum('links')).filter(pk=hs1.pk)
# If content_type restriction isn't in the query's join condition,
# then wrong results are produced here as the link to b will also match
# (b and hs1 have equal pks).
self.assertEqual(qs.count(), 1)
self.assertEqual(qs[0].links__sum, l.id)
l.delete()
# Now if we don't have proper left join, we will not produce any
# results at all here.
# clear cached results
qs = qs.all()
self.assertEqual(qs.count(), 1)
# Note - 0 here would be a nicer result...
self.assertIs(qs[0].links__sum, None)
# Finally test that filtering works.
self.assertEqual(qs.filter(links__sum__isnull=True).count(), 1)
self.assertEqual(qs.filter(links__sum__isnull=False).count(), 0)
def test_filter_targets_related_pk(self):
HasLinkThing.objects.create()
hs2 = HasLinkThing.objects.create()
l = Link.objects.create(content_object=hs2)
self.assertNotEqual(l.object_id, l.pk)
self.assertQuerysetEqual(
HasLinkThing.objects.filter(links=l.pk),
[hs2], lambda x: x)
def test_editable_generic_rel(self):
GenericRelationForm = modelform_factory(HasLinkThing, fields='__all__')
form = GenericRelationForm()
self.assertIn('links', form.fields)
form = GenericRelationForm({'links': None})
self.assertTrue(form.is_valid())
form.save()
links = HasLinkThing._meta.get_field('links')
self.assertEqual(links.save_form_data_calls, 1)
def test_ticket_22998(self):
related = Related.objects.create()
content = Content.objects.create(related_obj=related)
Node.objects.create(content=content)
# deleting the Related cascades to the Content cascades to the Node,
# where the pre_delete signal should fire and prevent deletion.
with self.assertRaises(ProtectedError):
related.delete()
def test_ticket_22982(self):
place = Place.objects.create(name='My Place')
self.assertIn('GenericRelatedObjectManager', str(place.links))
>>>>>>> 6448873197fa4e3df3f5f03201538dc57d7643d6
| bsd-3-clause | 7,890,941,244,261,746,000 | 39.589065 | 92 | 0.607572 | false |
zerotired/kotori | kotori/daq/decoder/__init__.py | 1 | 1246 | # -*- coding: utf-8 -*-
# (c) 2019-2020 Andreas Motl <[email protected]>
from kotori.daq.decoder.airrohr import AirrohrDecoder
from kotori.daq.decoder.tasmota import TasmotaSensorDecoder, TasmotaStateDecoder
from kotori.daq.decoder.schema import MessageType
class DecoderInfo:
def __init__(self):
self.message_type = None
self.decoder = None
class DecoderManager:
def __init__(self, topology):
self.topology = topology
self.info = DecoderInfo()
def probe(self):
if 'slot' not in self.topology:
return False
# Airrohr
if self.topology.slot.endswith('airrohr.json'):
self.info.message_type = MessageType.DATA_CONTAINER
self.info.decoder = AirrohrDecoder
return True
# Tasmota Sensor
if self.topology.slot.endswith('SENSOR'):
self.info.message_type = MessageType.DATA_CONTAINER
self.info.decoder = TasmotaSensorDecoder
return True
# Tasmota State
if self.topology.slot.endswith('STATE'):
self.info.message_type = MessageType.DATA_CONTAINER
self.info.decoder = TasmotaStateDecoder
return True
return False
| agpl-3.0 | -397,541,410,420,240,700 | 27.318182 | 80 | 0.638042 | false |
tinyms/ArchiveX | tinyms/core/common.py | 1 | 14690 | __author__ = 'tinyms'
#coding=UTF8
import os
import sys
import re
import codecs
import hashlib
import json
#import urllib.request
#import urllib.parse
import time
import datetime
import decimal
import uuid
from imp import find_module, load_module, acquire_lock, release_lock
from tornado.template import Template
# import psycopg2
# import psycopg2.extras
#
#
# class Postgres():
# DATABASE_NAME = "postgres"
# USER_NAME = "postgres"
# PASSWORD = ""
#
# @staticmethod
# #Connect to Postgres Database
# def open():
# return psycopg2.connect(database=Postgres.DATABASE_NAME,
# user=Postgres.USER_NAME,
# password=Postgres.PASSWORD)
#
# @staticmethod
# def update(sql, params, return_col_name=None):
#
# """
# for Insert,Update,Delete
# :param sql:
# :param params:
# :param return_col_name: last insert row id etc.
# :return:
# """
# if return_col_name:
# sql += " RETURNING %s" % return_col_name
# cnn = None
# try:
# cnn = Postgres.open()
# cur = cnn.cursor()
# cur.execute(sql, params)
# if return_col_name:
# result = cur.fetchone()[0]
# else:
# result = True
# cnn.commit()
# except psycopg2.DatabaseError as e:
# print("Error %s" % e)
# cnn.rollback()
# result = False
# finally:
# if cnn:
# cnn.close()
#
# return result
#
# @staticmethod
# #Batch Insert,Update,Delete
# def update_many(sql, arr_params):
# try:
# cnn = Postgres.open()
# cur = cnn.cursor()
# cur.executemany(sql, arr_params)
# cnn.commit()
# except psycopg2.DatabaseError as e:
# print("Error %s" % e)
# finally:
# if cnn:
# cnn.close()
#
# @staticmethod
# #Query DataSet
# def many(sql, params=(), callback=None):
# dataset = list()
# cnn = None
# try:
# cnn = Postgres.open()
# cur = cnn.cursor(cursor_factory=psycopg2.extras.DictCursor)
# cur.execute(sql, params)
# rows = cur.fetchall()
# for row in rows:
# c = row.copy()
# if callback:
# callback(c)
# dataset.append(c)
# cur.close()
# except psycopg2.DatabaseError as e:
# print("Error %s" % e)
# finally:
# if cnn:
# cnn.close()
# return dataset
#
# @staticmethod
# #First Row Data
# def row(sql, params, callback=None):
# items = Postgres.many(sql, params, callback)
# if len(items) > 0:
# return items[0]
# return None
#
# @staticmethod
# #First Column Data
# def col(sql, params, callback=None):
# items = Postgres.many(sql, params, callback)
# cols = list()
# for item in items:
# values = [i for i in item.values()]
# if len(values) > 0:
# cols.append(values[0])
# return cols
#
# @staticmethod
# #First Row And First Column
# def one(sql, params=(), callback=None):
# first_col = Postgres.col(sql, params, callback)
# if len(first_col) > 0:
# return first_col[0]
# return None
#
# @staticmethod
# #Store Proc, Return Single Result
# def proc_one(name, params, callback=None):
# first_col = Postgres.proc_many(name, params, callback)
# if len(first_col) > 0:
# return first_col[0]
# return None
#
# @staticmethod
# #Store Proc, Return DataSet
# def proc_many(name, params, callback=None):
# dataset = list()
# cnn = None
# try:
# cnn = Postgres.open()
# cur = cnn.cursor(cursor_factory=psycopg2.extras.DictCursor)
# rows = cur.callproc(name, params)
# for row in rows:
# c = row.copy()
# if callback:
# callback(c)
# dataset.append(c)
# cur.close()
# except psycopg2.DatabaseError as e:
# print("Error %s" % e)
# finally:
# if cnn:
# cnn.close()
# return dataset
#
# @staticmethod
# #Return all cols name from current Query cursor
# def col_names(cur):
# names = list()
# for col in cur.description:
# names.append(col.name)
# return names
class JsonEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return float(o)
elif isinstance(o, datetime.date):
return Utils.format_datetime_short(o)
elif isinstance(o, datetime.datetime):
return Utils.format_datetime_short(o)
elif isinstance(o, datetime.time):
return Utils.format_time(o)
super(JsonEncoder, self).default(o)
class Utils():
def __init__(self):
pass
@staticmethod
def text_read(f_name, join=True):
if not os.path.exists(f_name):
return ""
f = codecs.open(f_name, "r", "utf-8")
all_ = f.readlines()
f.close()
if join:
return "".join(all_)
return all
@staticmethod
def text_write(f_name, lines=list(), suffix="\n"):
f = codecs.open(f_name, "w+", "utf-8")
if isinstance(lines, list):
for line in lines:
f.write(line + suffix)
else:
f.write(lines)
f.write(suffix)
f.close()
# @staticmethod
# def url_with_params(url):
# r1 = urllib.parse.urlsplit(url)
# if r1.query != "":
# return True
# return False
@staticmethod
def trim(text):
return "".join(text.split())
@staticmethod
def uniq_index():
return uuid.uuid1()
@staticmethod
def render(tpl_text, context):
"""
render a template
:param tpl_text: template text
:param context: dict object
:return: str
"""
tpl = Template(tpl_text)
return tpl.generate(context)
@staticmethod
def md5(s):
h = hashlib.new('ripemd160')
h.update(bytearray(s.encode("utf8")))
return h.hexdigest()
@staticmethod
def current_datetime():
from datetime import datetime as tmp
return tmp.now()
@staticmethod
def mkdirs(path):
isexists = os.path.exists(path)
if not isexists:
os.makedirs(path)
return True
else:
return False
@staticmethod
def parse_int(text):
nums = Utils.parse_int_array(text)
if len(nums) > 0:
return int(nums[0])
return None
@staticmethod
def parse_int_array(text):
arr = list()
p = re.compile("[-]?\\d+", re.M)
nums = p.findall(text)
if len(nums) > 0:
arr = [int(s) for s in nums]
return arr
@staticmethod
def parse_time_text(text):
if not text:
return ""
p = re.compile("\\d{2}:\\d{2}")
dates = p.findall(text)
if len(dates) > 0:
return dates[0]
return ""
@staticmethod
def parse_time(text):
time_text = Utils.parse_time_text(text)
if not time_text:
return None
time_struct = time.strptime(time_text, "%H:%M")
return datetime.time(time_struct.tm_hour, time_struct.tm_min)
@staticmethod
def parse_date_text(text):
if not text:
return ""
p = re.compile("\\d{4}-\\d{2}-\\d{2}")
dates = p.findall(text)
if len(dates) > 0:
return dates[0]
return ""
@staticmethod
def parse_date(text):
date_text = Utils.parse_date_text(text)
if not date_text:
return None
from datetime import datetime
return datetime.strptime(date_text, "%Y-%m-%d").date()
@staticmethod
def parse_datetime_text(text):
if not text:
return ""
p = "\\d{4}-\\d{2}-\\d{2}\\s{1}\\d{2}:\\d{2}"
r = re.compile(p)
matchs = r.findall(text)
if len(matchs) > 0:
return matchs[0]
return ""
@staticmethod
def parse_datetime(text):
datetime_text = Utils.parse_datetime_text(text)
if not datetime_text:
return None
from datetime import datetime
return datetime.strptime(datetime_text, "%Y-%m-%d %H:%M")
@staticmethod
def parse_float(text):
floats = Utils.parse_float_array(text)
if len(floats) > 0:
return float(floats[0])
return None
@staticmethod
def parse_float_array(text):
p = re.compile("[-]?\\d+\\.\\d+", re.M)
return [float(s) for s in p.findall(text)]
@staticmethod
def parse_number_array(text):
"""
int or float
:param text:
:return:
"""
p = re.compile("[-]?\\d+[\\.]?[\\d]*", re.M)
return [float(s) for s in p.findall(text)]
@staticmethod
def encode(obj):
return json.dumps(obj, cls=JsonEncoder)
@staticmethod
def decode(text):
return json.loads(text)
# @staticmethod
# def download(url, save_path):
# try:
# f = urllib.request.urlopen(url, timeout=15)
# data = f.read()
# with open(save_path, "wb") as cache:
# cache.write(data)
# except urllib.error.URLError as ex:
# info = sys.exc_info()
# print(info[0], ":", info[1], ex)
@staticmethod
def matrix_reverse(arr):
"""
矩阵翻转
:param arr:
:return:
"""
return [[r[col] for r in arr] for col in range(len(arr[0]))]
@staticmethod
def combine_text_files(folder, target_file_name):
text = Utils.text_read(os.path.join(folder, "combine.list"))
cfg = json.loads(text)
for key in cfg.keys():
files = cfg[key]
if len(files) > 0:
combine_file = os.path.join(folder, target_file_name + "." + key)
if os.path.exists(combine_file):
os.remove(combine_file)
all_ = list()
for file_ in files:
path = os.path.join(folder, file_)
all_.append(Utils.text_read(path))
Utils.text_write(combine_file, all_)
pass
@staticmethod
def is_email(s):
p = r"[^@]+@[^@]+\.[^@]+"
if re.match(p, s):
return True
return False
@staticmethod
def email_account_name(s):
#匹配@前面的字符串
p = r".*(?=@)"
r = re.compile(p)
matchs = r.findall(s)
if len(matchs) > 0:
return matchs[0]
return ""
@staticmethod
def format_year_month(date_obj):
if not date_obj:
return ""
return date_obj.strftime('%Y-%m')
@staticmethod
def format_datetime(date_obj):
if not date_obj:
return ""
return date_obj.strftime('%Y-%m-%d %H:%M:%S')
@staticmethod
def format_datetime_short(date_obj):
if not date_obj:
return ""
return date_obj.strftime('%Y-%m-%d %H:%M')
@staticmethod
def format_date(date_obj):
if not date_obj:
return ""
return date_obj.strftime('%Y-%m-%d')
@staticmethod
def format_time(datetime_obj):
if not datetime_obj:
return ""
if isinstance(datetime_obj, datetime.time):
curr_date = Utils.current_datetime()
dt = datetime.datetime.combine(curr_date, datetime_obj)
return dt.strftime('%H:%M')
elif isinstance(datetime_obj, datetime.datetime):
return datetime_obj.strftime('%H:%M')
return ""
class Plugin():
def __init__(self):
pass
ObjectPool = dict()
@staticmethod
def one(type_):
plugins = Plugin.get(type_)
if len(plugins) > 0:
return plugins[0]
return None
@staticmethod
def get(type_, class_full_name=""):
"""
get plugin class object instance
:param type_: extends plugin interface
:param class_full_name: class name with module name
:return: a object
"""
if not class_full_name:
return Plugin.ObjectPool.get(type_)
else:
arr = Plugin.ObjectPool.get(type_)
for t in arr:
name = "%s.%s" % (t.__class__.__module__, t.__class__.__name__)
if name.lower() == class_full_name.lower():
return t
@staticmethod
def load():
Plugin.ObjectPool.clear()
path = os.path.join(os.getcwd(), "config")
wid = os.walk(path)
plugins = []
print("Search config modules..")
for rootDir, pathList, fileList in wid:
if rootDir.find("__pycache__") != -1:
continue
for file_ in fileList:
if file_.find("__init__.py") != -1:
continue
#re \\.py[c]?$
if file_.endswith(".py") or file_.endswith(".pyc"):
plugins.append((os.path.splitext(file_)[0], rootDir))
print(plugins)
print("Instance all Config class.")
for (name, dir_) in plugins:
try:
acquire_lock()
file_, filename, desc = find_module(name, [dir_])
prev = sys.modules.get(name)
if prev:
del sys.modules[name]
module_ = load_module(name, file_, filename, desc)
finally:
release_lock()
if hasattr(module_, "__export__"):
attrs = [getattr(module_, x) for x in module_.__export__]
for attr in attrs:
parents = attr.__bases__
if len(parents) > 0:
parent = parents[0]
if not Plugin.ObjectPool.get(parent):
Plugin.ObjectPool[parent] = [attr()]
else:
Plugin.ObjectPool[parent].append(attr())
print("Config init completed.") | bsd-3-clause | -1,749,095,310,060,765,400 | 27.260116 | 81 | 0.50375 | false |
yugangw-msft/azure-cli | src/azure-cli/azure/cli/command_modules/servicebus/tests/latest/test_servicebus_subscription_commands.py | 3 | 4818 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# AZURE CLI SERVICEBUS - CRUD TEST DEFINITIONS
import time
from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer, live_only)
from knack.util import CLIError
# pylint: disable=line-too-long
# pylint: disable=too-many-lines
class SBSubscriptionCRUDScenarioTest(ScenarioTest):
from azure_devtools.scenario_tests import AllowLargeResponse
@AllowLargeResponse()
@ResourceGroupPreparer(name_prefix='cli_test_sb_subscription')
def test_sb_subscription(self, resource_group):
self.kwargs.update({
'namespacename': self.create_random_name(prefix='sb-nscli', length=20),
'tags': {'tag1: value1', 'tag2: value2'},
'sku': 'Standard',
'tier': 'Standard',
'authoname': self.create_random_name(prefix='cliAutho', length=20),
'defaultauthorizationrule': 'RootManageSharedAccessKey',
'accessrights': 'Send, Listen',
'primary': 'PrimaryKey',
'secondary': 'SecondaryKey',
'topicname': self.create_random_name(prefix='sb-topiccli', length=25),
'topicauthoname': self.create_random_name(prefix='cliTopicAutho', length=25),
'subscriptionname': self.create_random_name(prefix='sb-subscli', length=25),
'lockduration': 'PT4M',
'defaultmessagetimetolive': 'PT7M',
'autodeleteonidle': 'P9D',
'maxdelivery': '3',
'false': 'false',
'true': 'true'
})
# Create Namespace
self.cmd(
'servicebus namespace create --resource-group {rg} --name {namespacename} --tags {tags} --sku {sku}',
checks=[self.check('sku.name', '{sku}')])
# Get Created Namespace
self.cmd('servicebus namespace show --resource-group {rg} --name {namespacename}',
checks=[self.check('sku.name', '{sku}')])
# Create Topic
self.cmd('servicebus topic create --resource-group {rg} --namespace-name {namespacename} --name {topicname}',
checks=[self.check('name', '{topicname}')])
# Get Topic
self.cmd('servicebus topic show --resource-group {rg} --namespace-name {namespacename} --name {topicname}',
checks=[self.check('name', '{topicname}')])
# Create Subscription
self.cmd(
'servicebus topic subscription create --resource-group {rg} --namespace-name {namespacename} --topic-name {topicname} --name {subscriptionname}',
checks=[self.check('name', '{subscriptionname}')])
# Get Create Subscription
self.cmd(
'servicebus topic subscription show --resource-group {rg} --namespace-name {namespacename} --topic-name {topicname} --name {subscriptionname}',
checks=[self.check('name', '{subscriptionname}')])
# Get list of Subscription+
self.cmd(
'servicebus topic subscription list --resource-group {rg} --namespace-name {namespacename} --topic-name {topicname}')
# update Subscription
self.cmd(
'servicebus topic subscription update --resource-group {rg} --namespace-name {namespacename} --topic-name '
'{topicname} --name {subscriptionname} --max-delivery {maxdelivery} '
'--default-message-time-to-live {defaultmessagetimetolive} --dead-letter-on-filter-exceptions {false}'
' --enable-dead-lettering-on-message-expiration {false} --auto-delete-on-idle {autodeleteonidle}'
' --default-message-time-to-live {defaultmessagetimetolive} --lock-duration {lockduration}',
checks=[self.check('name', '{subscriptionname}'),
self.check('lockDuration', '0:04:00'),
self.check('maxDeliveryCount', '3'),
self.check('defaultMessageTimeToLive', '0:07:00'),
self.check('autoDeleteOnIdle', '9 days, 0:00:00'),
self.check('deadLetteringOnFilterEvaluationExceptions', 'False')])
# Delete Subscription
self.cmd(
'servicebus topic subscription delete --resource-group {rg} --namespace-name {namespacename} --topic-name {topicname} --name {subscriptionname}')
# Delete Topic
self.cmd('servicebus topic delete --resource-group {rg} --namespace-name {namespacename} --name {topicname}')
# Delete Namespace
self.cmd('servicebus namespace delete --resource-group {rg} --name {namespacename}')
| mit | 8,952,345,896,922,559,000 | 48.670103 | 157 | 0.601702 | false |
epaglier/Project-JARVIS | mycroft-core/mycroft/client/speech/hotword_factory.py | 1 | 5746 | # Copyright 2017 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
from mycroft.configuration import ConfigurationManager
from mycroft.util.log import getLogger
from os.path import dirname, exists, join, abspath
import os
import time
import tempfile
__author__ = 'seanfitz, jdorleans, jarbas'
LOG = getLogger("HotwordFactory")
RECOGNIZER_DIR = join(abspath(dirname(__file__)), "recognizer")
class HotWordEngine(object):
def __init__(self, key_phrase="hey mycroft", config=None, lang="en-us"):
self.lang = str(lang).lower()
self.key_phrase = str(key_phrase).lower()
# rough estimate 1 phoneme per 2 chars
self.num_phonemes = len(key_phrase) / 2 + 1
if config is None:
config = ConfigurationManager.get().get("hot_words", {})
config = config.get(self.key_phrase, {})
self.config = config
self.listener_config = ConfigurationManager.get().get("listener", {})
def found_wake_word(self, frame_data):
return False
class PocketsphinxHotWord(HotWordEngine):
def __init__(self, key_phrase="hey mycroft", config=None, lang="en-us"):
super(PocketsphinxHotWord, self).__init__(key_phrase, config, lang)
# Hotword module imports
from pocketsphinx import Decoder
# Hotword module config
module = self.config.get("module")
if module != "pocketsphinx":
LOG.warning(
str(module) + " module does not match with "
"Hotword class pocketsphinx")
# Hotword module params
self.phonemes = self.config.get("phonemes", "HH EY . M AY K R AO F T")
self.num_phonemes = len(self.phonemes.split())
self.threshold = self.config.get("threshold", 1e-90)
self.sample_rate = self.listener_config.get("sample_rate", 1600)
dict_name = self.create_dict(key_phrase, self.phonemes)
config = self.create_config(dict_name, Decoder.default_config())
self.decoder = Decoder(config)
def create_dict(self, key_phrase, phonemes):
(fd, file_name) = tempfile.mkstemp()
words = key_phrase.split()
phoneme_groups = phonemes.split('.')
with os.fdopen(fd, 'w') as f:
for word, phoneme in zip(words, phoneme_groups):
f.write(word + ' ' + phoneme + '\n')
return file_name
def create_config(self, dict_name, config):
model_file = join(RECOGNIZER_DIR, 'model', self.lang, 'hmm')
if not exists(model_file):
LOG.error('PocketSphinx model not found at ' + str(model_file))
config.set_string('-hmm', model_file)
config.set_string('-dict', dict_name)
config.set_string('-keyphrase', self.key_phrase)
config.set_float('-kws_threshold', float(self.threshold))
config.set_float('-samprate', self.sample_rate)
config.set_int('-nfft', 2048)
config.set_string('-logfn', '/dev/null')
return config
def transcribe(self, byte_data, metrics=None):
start = time.time()
self.decoder.start_utt()
self.decoder.process_raw(byte_data, False, False)
self.decoder.end_utt()
if metrics:
metrics.timer("mycroft.stt.local.time_s", time.time() - start)
return self.decoder.hyp()
def found_wake_word(self, frame_data):
hyp = self.transcribe(frame_data)
return hyp and self.key_phrase in hyp.hypstr.lower()
class SnowboyHotWord(HotWordEngine):
def __init__(self, key_phrase="hey mycroft", config=None, lang="en-us"):
super(SnowboyHotWord, self).__init__(key_phrase, config, lang)
# Hotword module imports
from snowboydecoder import HotwordDetector
# Hotword module config
module = self.config.get("module")
if module != "snowboy":
LOG.warning(module + " module does not match with Hotword class "
"snowboy")
# Hotword params
models = self.config.get("models", {})
paths = []
for key in models:
paths.append(models[key])
sensitivity = self.config.get("sensitivity", 0.5)
self.snowboy = HotwordDetector(paths,
sensitivity=[sensitivity] * len(paths))
self.lang = str(lang).lower()
self.key_phrase = str(key_phrase).lower()
def found_wake_word(self, frame_data):
wake_word = self.snowboy.detector.RunDetection(frame_data)
return wake_word == 1
class HotWordFactory(object):
CLASSES = {
"pocketsphinx": PocketsphinxHotWord,
"snowboy": SnowboyHotWord
}
@staticmethod
def create_hotword(hotword="hey mycroft", config=None, lang="en-us"):
LOG.info("creating " + hotword)
if not config:
config = ConfigurationManager.get().get("hotwords", {})
module = config.get(hotword).get("module", "pocketsphinx")
config = config.get(hotword, {"module": module})
clazz = HotWordFactory.CLASSES.get(module)
return clazz(hotword, config, lang=lang)
| gpl-3.0 | 8,848,975,264,727,014,000 | 38.902778 | 78 | 0.630352 | false |
beetbox/beets | beetsplug/replaygain.py | 1 | 49627 | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Fabrice Laporte, Yevgeny Bezman, and Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
from __future__ import division, absolute_import, print_function
import collections
import enum
import math
import os
import signal
import six
import subprocess
import sys
import warnings
from multiprocessing.pool import ThreadPool, RUN
from six.moves import zip, queue
from threading import Thread, Event
from beets import ui
from beets.plugins import BeetsPlugin
from beets.util import (syspath, command_output, displayable_path,
py3_path, cpu_count)
# Utilities.
class ReplayGainError(Exception):
"""Raised when a local (to a track or an album) error occurs in one
of the backends.
"""
class FatalReplayGainError(Exception):
"""Raised when a fatal error occurs in one of the backends.
"""
class FatalGstreamerPluginReplayGainError(FatalReplayGainError):
"""Raised when a fatal error occurs in the GStreamerBackend when
loading the required plugins."""
def call(args, **kwargs):
"""Execute the command and return its output or raise a
ReplayGainError on failure.
"""
try:
return command_output(args, **kwargs)
except subprocess.CalledProcessError as e:
raise ReplayGainError(
u"{0} exited with status {1}".format(args[0], e.returncode)
)
except UnicodeEncodeError:
# Due to a bug in Python 2's subprocess on Windows, Unicode
# filenames can fail to encode on that platform. See:
# https://github.com/google-code-export/beets/issues/499
raise ReplayGainError(u"argument encoding failed")
def after_version(version_a, version_b):
return tuple(int(s) for s in version_a.split('.')) \
>= tuple(int(s) for s in version_b.split('.'))
def db_to_lufs(db):
"""Convert db to LUFS.
According to https://wiki.hydrogenaud.io/index.php?title=
ReplayGain_2.0_specification#Reference_level
"""
return db - 107
def lufs_to_db(db):
"""Convert LUFS to db.
According to https://wiki.hydrogenaud.io/index.php?title=
ReplayGain_2.0_specification#Reference_level
"""
return db + 107
# Backend base and plumbing classes.
# gain: in LU to reference level
# peak: part of full scale (FS is 1.0)
Gain = collections.namedtuple("Gain", "gain peak")
# album_gain: Gain object
# track_gains: list of Gain objects
AlbumGain = collections.namedtuple("AlbumGain", "album_gain track_gains")
class Peak(enum.Enum):
none = 0
true = 1
sample = 2
class Backend(object):
"""An abstract class representing engine for calculating RG values.
"""
do_parallel = False
def __init__(self, config, log):
"""Initialize the backend with the configuration view for the
plugin.
"""
self._log = log
def compute_track_gain(self, items, target_level, peak):
"""Computes the track gain of the given tracks, returns a list
of Gain objects.
"""
raise NotImplementedError()
def compute_album_gain(self, items, target_level, peak):
"""Computes the album gain of the given album, returns an
AlbumGain object.
"""
raise NotImplementedError()
# ffmpeg backend
class FfmpegBackend(Backend):
"""A replaygain backend using ffmpeg's ebur128 filter.
"""
do_parallel = True
def __init__(self, config, log):
super(FfmpegBackend, self).__init__(config, log)
self._ffmpeg_path = "ffmpeg"
# check that ffmpeg is installed
try:
ffmpeg_version_out = call([self._ffmpeg_path, "-version"])
except OSError:
raise FatalReplayGainError(
u"could not find ffmpeg at {0}".format(self._ffmpeg_path)
)
incompatible_ffmpeg = True
for line in ffmpeg_version_out.stdout.splitlines():
if line.startswith(b"configuration:"):
if b"--enable-libebur128" in line:
incompatible_ffmpeg = False
if line.startswith(b"libavfilter"):
version = line.split(b" ", 1)[1].split(b"/", 1)[0].split(b".")
version = tuple(map(int, version))
if version >= (6, 67, 100):
incompatible_ffmpeg = False
if incompatible_ffmpeg:
raise FatalReplayGainError(
u"Installed FFmpeg version does not support ReplayGain."
u"calculation. Either libavfilter version 6.67.100 or above or"
u"the --enable-libebur128 configuration option is required."
)
def compute_track_gain(self, items, target_level, peak):
"""Computes the track gain of the given tracks, returns a list
of Gain objects (the track gains).
"""
gains = []
for item in items:
gains.append(
self._analyse_item(
item,
target_level,
peak,
count_blocks=False,
)[0] # take only the gain, discarding number of gating blocks
)
return gains
def compute_album_gain(self, items, target_level, peak):
"""Computes the album gain of the given album, returns an
AlbumGain object.
"""
target_level_lufs = db_to_lufs(target_level)
# analyse tracks
# list of track Gain objects
track_gains = []
# maximum peak
album_peak = 0
# sum of BS.1770 gating block powers
sum_powers = 0
# total number of BS.1770 gating blocks
n_blocks = 0
for item in items:
track_gain, track_n_blocks = self._analyse_item(
item, target_level, peak
)
track_gains.append(track_gain)
# album peak is maximum track peak
album_peak = max(album_peak, track_gain.peak)
# prepare album_gain calculation
# total number of blocks is sum of track blocks
n_blocks += track_n_blocks
# convert `LU to target_level` -> LUFS
track_loudness = target_level_lufs - track_gain.gain
# This reverses ITU-R BS.1770-4 p. 6 equation (5) to convert
# from loudness to power. The result is the average gating
# block power.
track_power = 10**((track_loudness + 0.691) / 10)
# Weight that average power by the number of gating blocks to
# get the sum of all their powers. Add that to the sum of all
# block powers in this album.
sum_powers += track_power * track_n_blocks
# calculate album gain
if n_blocks > 0:
# compare ITU-R BS.1770-4 p. 6 equation (5)
# Album gain is the replaygain of the concatenation of all tracks.
album_gain = -0.691 + 10 * math.log10(sum_powers / n_blocks)
else:
album_gain = -70
# convert LUFS -> `LU to target_level`
album_gain = target_level_lufs - album_gain
self._log.debug(
u"{0}: gain {1} LU, peak {2}"
.format(items, album_gain, album_peak)
)
return AlbumGain(Gain(album_gain, album_peak), track_gains)
def _construct_cmd(self, item, peak_method):
"""Construct the shell command to analyse items."""
return [
self._ffmpeg_path,
"-nostats",
"-hide_banner",
"-i",
item.path,
"-map",
"a:0",
"-filter",
"ebur128=peak={0}".format(peak_method),
"-f",
"null",
"-",
]
def _analyse_item(self, item, target_level, peak, count_blocks=True):
"""Analyse item. Return a pair of a Gain object and the number
of gating blocks above the threshold.
If `count_blocks` is False, the number of gating blocks returned
will be 0.
"""
target_level_lufs = db_to_lufs(target_level)
peak_method = peak.name
# call ffmpeg
self._log.debug(u"analyzing {0}".format(item))
cmd = self._construct_cmd(item, peak_method)
self._log.debug(
u'executing {0}', u' '.join(map(displayable_path, cmd))
)
output = call(cmd).stderr.splitlines()
# parse output
if peak == Peak.none:
peak = 0
else:
line_peak = self._find_line(
output,
" {0} peak:".format(peak_method.capitalize()).encode(),
start_line=len(output) - 1, step_size=-1,
)
peak = self._parse_float(
output[self._find_line(
output, b" Peak:",
line_peak,
)]
)
# convert TPFS -> part of FS
peak = 10**(peak / 20)
line_integrated_loudness = self._find_line(
output, b" Integrated loudness:",
start_line=len(output) - 1, step_size=-1,
)
gain = self._parse_float(
output[self._find_line(
output, b" I:",
line_integrated_loudness,
)]
)
# convert LUFS -> LU from target level
gain = target_level_lufs - gain
# count BS.1770 gating blocks
n_blocks = 0
if count_blocks:
gating_threshold = self._parse_float(
output[self._find_line(
output, b" Threshold:",
start_line=line_integrated_loudness,
)]
)
for line in output:
if not line.startswith(b"[Parsed_ebur128"):
continue
if line.endswith(b"Summary:"):
continue
line = line.split(b"M:", 1)
if len(line) < 2:
continue
if self._parse_float(b"M: " + line[1]) >= gating_threshold:
n_blocks += 1
self._log.debug(
u"{0}: {1} blocks over {2} LUFS"
.format(item, n_blocks, gating_threshold)
)
self._log.debug(
u"{0}: gain {1} LU, peak {2}"
.format(item, gain, peak)
)
return Gain(gain, peak), n_blocks
def _find_line(self, output, search, start_line=0, step_size=1):
"""Return index of line beginning with `search`.
Begins searching at index `start_line` in `output`.
"""
end_index = len(output) if step_size > 0 else -1
for i in range(start_line, end_index, step_size):
if output[i].startswith(search):
return i
raise ReplayGainError(
u"ffmpeg output: missing {0} after line {1}"
.format(repr(search), start_line)
)
def _parse_float(self, line):
"""Extract a float from a key value pair in `line`.
This format is expected: /[^:]:[[:space:]]*value.*/, where `value` is
the float.
"""
# extract value
value = line.split(b":", 1)
if len(value) < 2:
raise ReplayGainError(
u"ffmpeg output: expected key value pair, found {0}"
.format(line)
)
value = value[1].lstrip()
# strip unit
value = value.split(b" ", 1)[0]
# cast value to float
try:
return float(value)
except ValueError:
raise ReplayGainError(
u"ffmpeg output: expected float value, found {0}"
.format(value)
)
# mpgain/aacgain CLI tool backend.
class CommandBackend(Backend):
do_parallel = True
def __init__(self, config, log):
super(CommandBackend, self).__init__(config, log)
config.add({
'command': u"",
'noclip': True,
})
self.command = config["command"].as_str()
if self.command:
# Explicit executable path.
if not os.path.isfile(self.command):
raise FatalReplayGainError(
u'replaygain command does not exist: {0}'.format(
self.command)
)
else:
# Check whether the program is in $PATH.
for cmd in ('mp3gain', 'aacgain'):
try:
call([cmd, '-v'])
self.command = cmd
except OSError:
pass
if not self.command:
raise FatalReplayGainError(
u'no replaygain command found: install mp3gain or aacgain'
)
self.noclip = config['noclip'].get(bool)
def compute_track_gain(self, items, target_level, peak):
"""Computes the track gain of the given tracks, returns a list
of TrackGain objects.
"""
supported_items = list(filter(self.format_supported, items))
output = self.compute_gain(supported_items, target_level, False)
return output
def compute_album_gain(self, items, target_level, peak):
"""Computes the album gain of the given album, returns an
AlbumGain object.
"""
# TODO: What should be done when not all tracks in the album are
# supported?
supported_items = list(filter(self.format_supported, items))
if len(supported_items) != len(items):
self._log.debug(u'tracks are of unsupported format')
return AlbumGain(None, [])
output = self.compute_gain(supported_items, target_level, True)
return AlbumGain(output[-1], output[:-1])
def format_supported(self, item):
"""Checks whether the given item is supported by the selected tool.
"""
if 'mp3gain' in self.command and item.format != 'MP3':
return False
elif 'aacgain' in self.command and item.format not in ('MP3', 'AAC'):
return False
return True
def compute_gain(self, items, target_level, is_album):
"""Computes the track or album gain of a list of items, returns
a list of TrackGain objects.
When computing album gain, the last TrackGain object returned is
the album gain
"""
if len(items) == 0:
self._log.debug(u'no supported tracks to analyze')
return []
"""Compute ReplayGain values and return a list of results
dictionaries as given by `parse_tool_output`.
"""
# Construct shell command. The "-o" option makes the output
# easily parseable (tab-delimited). "-s s" forces gain
# recalculation even if tags are already present and disables
# tag-writing; this turns the mp3gain/aacgain tool into a gain
# calculator rather than a tag manipulator because we take care
# of changing tags ourselves.
cmd = [self.command, '-o', '-s', 's']
if self.noclip:
# Adjust to avoid clipping.
cmd = cmd + ['-k']
else:
# Disable clipping warning.
cmd = cmd + ['-c']
cmd = cmd + ['-d', str(int(target_level - 89))]
cmd = cmd + [syspath(i.path) for i in items]
self._log.debug(u'analyzing {0} files', len(items))
self._log.debug(u"executing {0}", " ".join(map(displayable_path, cmd)))
output = call(cmd).stdout
self._log.debug(u'analysis finished')
return self.parse_tool_output(output,
len(items) + (1 if is_album else 0))
def parse_tool_output(self, text, num_lines):
"""Given the tab-delimited output from an invocation of mp3gain
or aacgain, parse the text and return a list of dictionaries
containing information about each analyzed file.
"""
out = []
for line in text.split(b'\n')[1:num_lines + 1]:
parts = line.split(b'\t')
if len(parts) != 6 or parts[0] == b'File':
self._log.debug(u'bad tool output: {0}', text)
raise ReplayGainError(u'mp3gain failed')
d = {
'file': parts[0],
'mp3gain': int(parts[1]),
'gain': float(parts[2]),
'peak': float(parts[3]) / (1 << 15),
'maxgain': int(parts[4]),
'mingain': int(parts[5]),
}
out.append(Gain(d['gain'], d['peak']))
return out
# GStreamer-based backend.
class GStreamerBackend(Backend):
def __init__(self, config, log):
super(GStreamerBackend, self).__init__(config, log)
self._import_gst()
# Initialized a GStreamer pipeline of the form filesrc ->
# decodebin -> audioconvert -> audioresample -> rganalysis ->
# fakesink The connection between decodebin and audioconvert is
# handled dynamically after decodebin figures out the type of
# the input file.
self._src = self.Gst.ElementFactory.make("filesrc", "src")
self._decbin = self.Gst.ElementFactory.make("decodebin", "decbin")
self._conv = self.Gst.ElementFactory.make("audioconvert", "conv")
self._res = self.Gst.ElementFactory.make("audioresample", "res")
self._rg = self.Gst.ElementFactory.make("rganalysis", "rg")
if self._src is None or self._decbin is None or self._conv is None \
or self._res is None or self._rg is None:
raise FatalGstreamerPluginReplayGainError(
u"Failed to load required GStreamer plugins"
)
# We check which files need gain ourselves, so all files given
# to rganalsys should have their gain computed, even if it
# already exists.
self._rg.set_property("forced", True)
self._sink = self.Gst.ElementFactory.make("fakesink", "sink")
self._pipe = self.Gst.Pipeline()
self._pipe.add(self._src)
self._pipe.add(self._decbin)
self._pipe.add(self._conv)
self._pipe.add(self._res)
self._pipe.add(self._rg)
self._pipe.add(self._sink)
self._src.link(self._decbin)
self._conv.link(self._res)
self._res.link(self._rg)
self._rg.link(self._sink)
self._bus = self._pipe.get_bus()
self._bus.add_signal_watch()
self._bus.connect("message::eos", self._on_eos)
self._bus.connect("message::error", self._on_error)
self._bus.connect("message::tag", self._on_tag)
# Needed for handling the dynamic connection between decodebin
# and audioconvert
self._decbin.connect("pad-added", self._on_pad_added)
self._decbin.connect("pad-removed", self._on_pad_removed)
self._main_loop = self.GLib.MainLoop()
self._files = []
def _import_gst(self):
"""Import the necessary GObject-related modules and assign `Gst`
and `GObject` fields on this object.
"""
try:
import gi
except ImportError:
raise FatalReplayGainError(
u"Failed to load GStreamer: python-gi not found"
)
try:
gi.require_version('Gst', '1.0')
except ValueError as e:
raise FatalReplayGainError(
u"Failed to load GStreamer 1.0: {0}".format(e)
)
from gi.repository import GObject, Gst, GLib
# Calling GObject.threads_init() is not needed for
# PyGObject 3.10.2+
with warnings.catch_warnings():
warnings.simplefilter("ignore")
GObject.threads_init()
Gst.init([sys.argv[0]])
self.GObject = GObject
self.GLib = GLib
self.Gst = Gst
def compute(self, files, target_level, album):
self._error = None
self._files = list(files)
if len(self._files) == 0:
return
self._file_tags = collections.defaultdict(dict)
self._rg.set_property("reference-level", target_level)
if album:
self._rg.set_property("num-tracks", len(self._files))
if self._set_first_file():
self._main_loop.run()
if self._error is not None:
raise self._error
def compute_track_gain(self, items, target_level, peak):
self.compute(items, target_level, False)
if len(self._file_tags) != len(items):
raise ReplayGainError(u"Some tracks did not receive tags")
ret = []
for item in items:
ret.append(Gain(self._file_tags[item]["TRACK_GAIN"],
self._file_tags[item]["TRACK_PEAK"]))
return ret
def compute_album_gain(self, items, target_level, peak):
items = list(items)
self.compute(items, target_level, True)
if len(self._file_tags) != len(items):
raise ReplayGainError(u"Some items in album did not receive tags")
# Collect track gains.
track_gains = []
for item in items:
try:
gain = self._file_tags[item]["TRACK_GAIN"]
peak = self._file_tags[item]["TRACK_PEAK"]
except KeyError:
raise ReplayGainError(u"results missing for track")
track_gains.append(Gain(gain, peak))
# Get album gain information from the last track.
last_tags = self._file_tags[items[-1]]
try:
gain = last_tags["ALBUM_GAIN"]
peak = last_tags["ALBUM_PEAK"]
except KeyError:
raise ReplayGainError(u"results missing for album")
return AlbumGain(Gain(gain, peak), track_gains)
def close(self):
self._bus.remove_signal_watch()
def _on_eos(self, bus, message):
# A file finished playing in all elements of the pipeline. The
# RG tags have already been propagated. If we don't have a next
# file, we stop processing.
if not self._set_next_file():
self._pipe.set_state(self.Gst.State.NULL)
self._main_loop.quit()
def _on_error(self, bus, message):
self._pipe.set_state(self.Gst.State.NULL)
self._main_loop.quit()
err, debug = message.parse_error()
f = self._src.get_property("location")
# A GStreamer error, either an unsupported format or a bug.
self._error = ReplayGainError(
u"Error {0!r} - {1!r} on file {2!r}".format(err, debug, f)
)
def _on_tag(self, bus, message):
tags = message.parse_tag()
def handle_tag(taglist, tag, userdata):
# The rganalysis element provides both the existing tags for
# files and the new computes tags. In order to ensure we
# store the computed tags, we overwrite the RG values of
# received a second time.
if tag == self.Gst.TAG_TRACK_GAIN:
self._file_tags[self._file]["TRACK_GAIN"] = \
taglist.get_double(tag)[1]
elif tag == self.Gst.TAG_TRACK_PEAK:
self._file_tags[self._file]["TRACK_PEAK"] = \
taglist.get_double(tag)[1]
elif tag == self.Gst.TAG_ALBUM_GAIN:
self._file_tags[self._file]["ALBUM_GAIN"] = \
taglist.get_double(tag)[1]
elif tag == self.Gst.TAG_ALBUM_PEAK:
self._file_tags[self._file]["ALBUM_PEAK"] = \
taglist.get_double(tag)[1]
elif tag == self.Gst.TAG_REFERENCE_LEVEL:
self._file_tags[self._file]["REFERENCE_LEVEL"] = \
taglist.get_double(tag)[1]
tags.foreach(handle_tag, None)
def _set_first_file(self):
if len(self._files) == 0:
return False
self._file = self._files.pop(0)
self._pipe.set_state(self.Gst.State.NULL)
self._src.set_property("location", py3_path(syspath(self._file.path)))
self._pipe.set_state(self.Gst.State.PLAYING)
return True
def _set_file(self):
"""Initialize the filesrc element with the next file to be analyzed.
"""
# No more files, we're done
if len(self._files) == 0:
return False
self._file = self._files.pop(0)
# Ensure the filesrc element received the paused state of the
# pipeline in a blocking manner
self._src.sync_state_with_parent()
self._src.get_state(self.Gst.CLOCK_TIME_NONE)
# Ensure the decodebin element receives the paused state of the
# pipeline in a blocking manner
self._decbin.sync_state_with_parent()
self._decbin.get_state(self.Gst.CLOCK_TIME_NONE)
# Disconnect the decodebin element from the pipeline, set its
# state to READY to to clear it.
self._decbin.unlink(self._conv)
self._decbin.set_state(self.Gst.State.READY)
# Set a new file on the filesrc element, can only be done in the
# READY state
self._src.set_state(self.Gst.State.READY)
self._src.set_property("location", py3_path(syspath(self._file.path)))
self._decbin.link(self._conv)
self._pipe.set_state(self.Gst.State.READY)
return True
def _set_next_file(self):
"""Set the next file to be analyzed while keeping the pipeline
in the PAUSED state so that the rganalysis element can correctly
handle album gain.
"""
# A blocking pause
self._pipe.set_state(self.Gst.State.PAUSED)
self._pipe.get_state(self.Gst.CLOCK_TIME_NONE)
# Try setting the next file
ret = self._set_file()
if ret:
# Seek to the beginning in order to clear the EOS state of the
# various elements of the pipeline
self._pipe.seek_simple(self.Gst.Format.TIME,
self.Gst.SeekFlags.FLUSH,
0)
self._pipe.set_state(self.Gst.State.PLAYING)
return ret
def _on_pad_added(self, decbin, pad):
sink_pad = self._conv.get_compatible_pad(pad, None)
assert(sink_pad is not None)
pad.link(sink_pad)
def _on_pad_removed(self, decbin, pad):
# Called when the decodebin element is disconnected from the
# rest of the pipeline while switching input files
peer = pad.get_peer()
assert(peer is None)
class AudioToolsBackend(Backend):
"""ReplayGain backend that uses `Python Audio Tools
<http://audiotools.sourceforge.net/>`_ and its capabilities to read more
file formats and compute ReplayGain values using it replaygain module.
"""
def __init__(self, config, log):
super(AudioToolsBackend, self).__init__(config, log)
self._import_audiotools()
def _import_audiotools(self):
"""Check whether it's possible to import the necessary modules.
There is no check on the file formats at runtime.
:raises :exc:`ReplayGainError`: if the modules cannot be imported
"""
try:
import audiotools
import audiotools.replaygain
except ImportError:
raise FatalReplayGainError(
u"Failed to load audiotools: audiotools not found"
)
self._mod_audiotools = audiotools
self._mod_replaygain = audiotools.replaygain
def open_audio_file(self, item):
"""Open the file to read the PCM stream from the using
``item.path``.
:return: the audiofile instance
:rtype: :class:`audiotools.AudioFile`
:raises :exc:`ReplayGainError`: if the file is not found or the
file format is not supported
"""
try:
audiofile = self._mod_audiotools.open(py3_path(syspath(item.path)))
except IOError:
raise ReplayGainError(
u"File {} was not found".format(item.path)
)
except self._mod_audiotools.UnsupportedFile:
raise ReplayGainError(
u"Unsupported file type {}".format(item.format)
)
return audiofile
def init_replaygain(self, audiofile, item):
"""Return an initialized :class:`audiotools.replaygain.ReplayGain`
instance, which requires the sample rate of the song(s) on which
the ReplayGain values will be computed. The item is passed in case
the sample rate is invalid to log the stored item sample rate.
:return: initialized replagain object
:rtype: :class:`audiotools.replaygain.ReplayGain`
:raises: :exc:`ReplayGainError` if the sample rate is invalid
"""
try:
rg = self._mod_replaygain.ReplayGain(audiofile.sample_rate())
except ValueError:
raise ReplayGainError(
u"Unsupported sample rate {}".format(item.samplerate))
return
return rg
def compute_track_gain(self, items, target_level, peak):
"""Compute ReplayGain values for the requested items.
:return list: list of :class:`Gain` objects
"""
return [self._compute_track_gain(item, target_level) for item in items]
def _with_target_level(self, gain, target_level):
"""Return `gain` relative to `target_level`.
Assumes `gain` is relative to 89 db.
"""
return gain + (target_level - 89)
def _title_gain(self, rg, audiofile, target_level):
"""Get the gain result pair from PyAudioTools using the `ReplayGain`
instance `rg` for the given `audiofile`.
Wraps `rg.title_gain(audiofile.to_pcm())` and throws a
`ReplayGainError` when the library fails.
"""
try:
# The method needs an audiotools.PCMReader instance that can
# be obtained from an audiofile instance.
gain, peak = rg.title_gain(audiofile.to_pcm())
except ValueError as exc:
# `audiotools.replaygain` can raise a `ValueError` if the sample
# rate is incorrect.
self._log.debug(u'error in rg.title_gain() call: {}', exc)
raise ReplayGainError(u'audiotools audio data error')
return self._with_target_level(gain, target_level), peak
def _compute_track_gain(self, item, target_level):
"""Compute ReplayGain value for the requested item.
:rtype: :class:`Gain`
"""
audiofile = self.open_audio_file(item)
rg = self.init_replaygain(audiofile, item)
# Each call to title_gain on a ReplayGain object returns peak and gain
# of the track.
rg_track_gain, rg_track_peak = self._title_gain(
rg, audiofile, target_level
)
self._log.debug(u'ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}',
item.artist, item.title, rg_track_gain, rg_track_peak)
return Gain(gain=rg_track_gain, peak=rg_track_peak)
def compute_album_gain(self, items, target_level, peak):
"""Compute ReplayGain values for the requested album and its items.
:rtype: :class:`AlbumGain`
"""
# The first item is taken and opened to get the sample rate to
# initialize the replaygain object. The object is used for all the
# tracks in the album to get the album values.
item = list(items)[0]
audiofile = self.open_audio_file(item)
rg = self.init_replaygain(audiofile, item)
track_gains = []
for item in items:
audiofile = self.open_audio_file(item)
rg_track_gain, rg_track_peak = self._title_gain(
rg, audiofile, target_level
)
track_gains.append(
Gain(gain=rg_track_gain, peak=rg_track_peak)
)
self._log.debug(u'ReplayGain for track {0}: {1:.2f}, {2:.2f}',
item, rg_track_gain, rg_track_peak)
# After getting the values for all tracks, it's possible to get the
# album values.
rg_album_gain, rg_album_peak = rg.album_gain()
rg_album_gain = self._with_target_level(rg_album_gain, target_level)
self._log.debug(u'ReplayGain for album {0}: {1:.2f}, {2:.2f}',
items[0].album, rg_album_gain, rg_album_peak)
return AlbumGain(
Gain(gain=rg_album_gain, peak=rg_album_peak),
track_gains=track_gains
)
class ExceptionWatcher(Thread):
"""Monitors a queue for exceptions asynchronously.
Once an exception occurs, raise it and execute a callback.
"""
def __init__(self, queue, callback):
self._queue = queue
self._callback = callback
self._stopevent = Event()
Thread.__init__(self)
def run(self):
while not self._stopevent.is_set():
try:
exc = self._queue.get_nowait()
self._callback()
six.reraise(exc[0], exc[1], exc[2])
except queue.Empty:
# No exceptions yet, loop back to check
# whether `_stopevent` is set
pass
def join(self, timeout=None):
self._stopevent.set()
Thread.join(self, timeout)
# Main plugin logic.
class ReplayGainPlugin(BeetsPlugin):
"""Provides ReplayGain analysis.
"""
backends = {
"command": CommandBackend,
"gstreamer": GStreamerBackend,
"audiotools": AudioToolsBackend,
"ffmpeg": FfmpegBackend,
}
peak_methods = {
"true": Peak.true,
"sample": Peak.sample,
}
def __init__(self):
super(ReplayGainPlugin, self).__init__()
# default backend is 'command' for backward-compatibility.
self.config.add({
'overwrite': False,
'auto': True,
'backend': u'command',
'threads': cpu_count(),
'parallel_on_import': False,
'per_disc': False,
'peak': 'true',
'targetlevel': 89,
'r128': ['Opus'],
'r128_targetlevel': lufs_to_db(-23),
})
self.overwrite = self.config['overwrite'].get(bool)
self.per_disc = self.config['per_disc'].get(bool)
# Remember which backend is used for CLI feedback
self.backend_name = self.config['backend'].as_str()
if self.backend_name not in self.backends:
raise ui.UserError(
u"Selected ReplayGain backend {0} is not supported. "
u"Please select one of: {1}".format(
self.backend_name,
u', '.join(self.backends.keys())
)
)
peak_method = self.config["peak"].as_str()
if peak_method not in self.peak_methods:
raise ui.UserError(
u"Selected ReplayGain peak method {0} is not supported. "
u"Please select one of: {1}".format(
peak_method,
u', '.join(self.peak_methods.keys())
)
)
self._peak_method = self.peak_methods[peak_method]
# On-import analysis.
if self.config['auto']:
self.register_listener('import_begin', self.import_begin)
self.register_listener('import', self.import_end)
self.import_stages = [self.imported]
# Formats to use R128.
self.r128_whitelist = self.config['r128'].as_str_seq()
try:
self.backend_instance = self.backends[self.backend_name](
self.config, self._log
)
except (ReplayGainError, FatalReplayGainError) as e:
raise ui.UserError(
u'replaygain initialization failed: {0}'.format(e))
def should_use_r128(self, item):
"""Checks the plugin setting to decide whether the calculation
should be done using the EBU R128 standard and use R128_ tags instead.
"""
return item.format in self.r128_whitelist
def track_requires_gain(self, item):
return self.overwrite or \
(self.should_use_r128(item) and not item.r128_track_gain) or \
(not self.should_use_r128(item) and
(not item.rg_track_gain or not item.rg_track_peak))
def album_requires_gain(self, album):
# Skip calculating gain only when *all* files don't need
# recalculation. This way, if any file among an album's tracks
# needs recalculation, we still get an accurate album gain
# value.
return self.overwrite or \
any([self.should_use_r128(item) and
(not item.r128_track_gain or not item.r128_album_gain)
for item in album.items()]) or \
any([not self.should_use_r128(item) and
(not item.rg_album_gain or not item.rg_album_peak)
for item in album.items()])
def store_track_gain(self, item, track_gain):
item.rg_track_gain = track_gain.gain
item.rg_track_peak = track_gain.peak
item.store()
self._log.debug(u'applied track gain {0} LU, peak {1} of FS',
item.rg_track_gain, item.rg_track_peak)
def store_album_gain(self, item, album_gain):
item.rg_album_gain = album_gain.gain
item.rg_album_peak = album_gain.peak
item.store()
self._log.debug(u'applied album gain {0} LU, peak {1} of FS',
item.rg_album_gain, item.rg_album_peak)
def store_track_r128_gain(self, item, track_gain):
item.r128_track_gain = track_gain.gain
item.store()
self._log.debug(u'applied r128 track gain {0} LU',
item.r128_track_gain)
def store_album_r128_gain(self, item, album_gain):
item.r128_album_gain = album_gain.gain
item.store()
self._log.debug(u'applied r128 album gain {0} LU',
item.r128_album_gain)
def tag_specific_values(self, items):
"""Return some tag specific values.
Returns a tuple (store_track_gain, store_album_gain, target_level,
peak_method).
"""
if any([self.should_use_r128(item) for item in items]):
store_track_gain = self.store_track_r128_gain
store_album_gain = self.store_album_r128_gain
target_level = self.config['r128_targetlevel'].as_number()
peak = Peak.none # R128_* tags do not store the track/album peak
else:
store_track_gain = self.store_track_gain
store_album_gain = self.store_album_gain
target_level = self.config['targetlevel'].as_number()
peak = self._peak_method
return store_track_gain, store_album_gain, target_level, peak
def handle_album(self, album, write, force=False):
"""Compute album and track replay gain store it in all of the
album's items.
If ``write`` is truthy then ``item.write()`` is called for each
item. If replay gain information is already present in all
items, nothing is done.
"""
if not force and not self.album_requires_gain(album):
self._log.info(u'Skipping album {0}', album)
return
if (any([self.should_use_r128(item) for item in album.items()]) and not
all(([self.should_use_r128(item) for item in album.items()]))):
self._log.error(
u"Cannot calculate gain for album {0} (incompatible formats)",
album)
return
self._log.info(u'analyzing {0}', album)
tag_vals = self.tag_specific_values(album.items())
store_track_gain, store_album_gain, target_level, peak = tag_vals
discs = {}
if self.per_disc:
for item in album.items():
if discs.get(item.disc) is None:
discs[item.disc] = []
discs[item.disc].append(item)
else:
discs[1] = album.items()
for discnumber, items in discs.items():
def _store_album(album_gain):
if not album_gain or not album_gain.album_gain \
or len(album_gain.track_gains) != len(items):
# In some cases, backends fail to produce a valid
# `album_gain` without throwing FatalReplayGainError
# => raise non-fatal exception & continue
raise ReplayGainError(
u"ReplayGain backend `{}` failed "
u"for some tracks in album {}"
.format(self.backend_name, album)
)
for item, track_gain in zip(items,
album_gain.track_gains):
store_track_gain(item, track_gain)
store_album_gain(item, album_gain.album_gain)
if write:
item.try_write()
self._log.debug(u'done analyzing {0}', item)
try:
self._apply(
self.backend_instance.compute_album_gain, args=(),
kwds={
"items": list(items),
"target_level": target_level,
"peak": peak
},
callback=_store_album
)
except ReplayGainError as e:
self._log.info(u"ReplayGain error: {0}", e)
except FatalReplayGainError as e:
raise ui.UserError(
u"Fatal replay gain error: {0}".format(e))
def handle_track(self, item, write, force=False):
"""Compute track replay gain and store it in the item.
If ``write`` is truthy then ``item.write()`` is called to write
the data to disk. If replay gain information is already present
in the item, nothing is done.
"""
if not force and not self.track_requires_gain(item):
self._log.info(u'Skipping track {0}', item)
return
tag_vals = self.tag_specific_values([item])
store_track_gain, store_album_gain, target_level, peak = tag_vals
def _store_track(track_gains):
if not track_gains or len(track_gains) != 1:
# In some cases, backends fail to produce a valid
# `track_gains` without throwing FatalReplayGainError
# => raise non-fatal exception & continue
raise ReplayGainError(
u"ReplayGain backend `{}` failed for track {}"
.format(self.backend_name, item)
)
store_track_gain(item, track_gains[0])
if write:
item.try_write()
self._log.debug(u'done analyzing {0}', item)
try:
self._apply(
self.backend_instance.compute_track_gain, args=(),
kwds={
"items": [item],
"target_level": target_level,
"peak": peak,
},
callback=_store_track
)
except ReplayGainError as e:
self._log.info(u"ReplayGain error: {0}", e)
except FatalReplayGainError as e:
raise ui.UserError(u"Fatal replay gain error: {0}".format(e))
def _has_pool(self):
"""Check whether a `ThreadPool` is running instance in `self.pool`
"""
if hasattr(self, 'pool'):
if isinstance(self.pool, ThreadPool) and self.pool._state == RUN:
return True
return False
def open_pool(self, threads):
"""Open a `ThreadPool` instance in `self.pool`
"""
if not self._has_pool() and self.backend_instance.do_parallel:
self.pool = ThreadPool(threads)
self.exc_queue = queue.Queue()
signal.signal(signal.SIGINT, self._interrupt)
self.exc_watcher = ExceptionWatcher(
self.exc_queue, # threads push exceptions here
self.terminate_pool # abort once an exception occurs
)
self.exc_watcher.start()
def _apply(self, func, args, kwds, callback):
if self._has_pool():
def catch_exc(func, exc_queue, log):
"""Wrapper to catch raised exceptions in threads
"""
def wfunc(*args, **kwargs):
try:
return func(*args, **kwargs)
except ReplayGainError as e:
log.info(e.args[0]) # log non-fatal exceptions
except Exception:
exc_queue.put(sys.exc_info())
return wfunc
# Wrap function and callback to catch exceptions
func = catch_exc(func, self.exc_queue, self._log)
callback = catch_exc(callback, self.exc_queue, self._log)
self.pool.apply_async(func, args, kwds, callback)
else:
callback(func(*args, **kwds))
def terminate_pool(self):
"""Terminate the `ThreadPool` instance in `self.pool`
(e.g. stop execution in case of exception)
"""
# Don't call self._as_pool() here,
# self.pool._state may not be == RUN
if hasattr(self, 'pool') and isinstance(self.pool, ThreadPool):
self.pool.terminate()
self.pool.join()
# self.exc_watcher.join()
def _interrupt(self, signal, frame):
try:
self._log.info('interrupted')
self.terminate_pool()
sys.exit(0)
except SystemExit:
# Silence raised SystemExit ~ exit(0)
pass
def close_pool(self):
"""Close the `ThreadPool` instance in `self.pool` (if there is one)
"""
if self._has_pool():
self.pool.close()
self.pool.join()
self.exc_watcher.join()
def import_begin(self, session):
"""Handle `import_begin` event -> open pool
"""
threads = self.config['threads'].get(int)
if self.config['parallel_on_import'] \
and self.config['auto'] \
and threads:
self.open_pool(threads)
def import_end(self, paths):
"""Handle `import` event -> close pool
"""
self.close_pool()
def imported(self, session, task):
"""Add replay gain info to items or albums of ``task``.
"""
if self.config['auto']:
if task.is_album:
self.handle_album(task.album, False)
else:
self.handle_track(task.item, False)
def command_func(self, lib, opts, args):
try:
write = ui.should_write(opts.write)
force = opts.force
# Bypass self.open_pool() if called with `--threads 0`
if opts.threads != 0:
threads = opts.threads or self.config['threads'].get(int)
self.open_pool(threads)
if opts.album:
albums = lib.albums(ui.decargs(args))
self._log.info(
"Analyzing {} albums ~ {} backend..."
.format(len(albums), self.backend_name)
)
for album in albums:
self.handle_album(album, write, force)
else:
items = lib.items(ui.decargs(args))
self._log.info(
"Analyzing {} tracks ~ {} backend..."
.format(len(items), self.backend_name)
)
for item in items:
self.handle_track(item, write, force)
self.close_pool()
except (SystemExit, KeyboardInterrupt):
# Silence interrupt exceptions
pass
def commands(self):
"""Return the "replaygain" ui subcommand.
"""
cmd = ui.Subcommand('replaygain', help=u'analyze for ReplayGain')
cmd.parser.add_album_option()
cmd.parser.add_option(
"-t", "--threads", dest="threads", type=int,
help=u'change the number of threads, \
defaults to maximum available processors'
)
cmd.parser.add_option(
"-f", "--force", dest="force", action="store_true", default=False,
help=u"analyze all files, including those that "
"already have ReplayGain metadata")
cmd.parser.add_option(
"-w", "--write", default=None, action="store_true",
help=u"write new metadata to files' tags")
cmd.parser.add_option(
"-W", "--nowrite", dest="write", action="store_false",
help=u"don't write metadata (opposite of -w)")
cmd.func = self.command_func
return [cmd]
| mit | -4,354,630,148,471,183,000 | 35.197666 | 79 | 0.558587 | false |
perfidia/regexpgen | tests/Date.py | 1 | 10848 | '''
Created on Mar 16, 2012
@author: Bartosz Alchimowicz
'''
import unittest
import regexpgen
import re
class Test(unittest.TestCase):
def testDefault(self):
regexp = regexpgen.date("%Y")
self.assertTrue(re.match(regexp, "1990"))
self.assertTrue(re.match(regexp, "2099"))
self.assertTrue(re.match(regexp, "1970"))
self.assertTrue(re.match(regexp, "1983"))
self.assertTrue(re.match(regexp, "2012"))
self.assertFalse(re.match(regexp, "1"))
self.assertFalse(re.match(regexp, "33"))
self.assertFalse(re.match(regexp, "0024"))
self.assertFalse(re.match(regexp, "99"))
self.assertFalse(re.match(regexp, "-17"))
self.assertFalse(re.match(regexp, "2100"))
self.assertFalse(re.match(regexp, "1969"))
regexp = regexpgen.date("%y")
self.assertTrue(re.match(regexp, "90"))
self.assertTrue(re.match(regexp, "99"))
self.assertTrue(re.match(regexp, "70"))
self.assertTrue(re.match(regexp, "83"))
self.assertTrue(re.match(regexp, "02"))
self.assertFalse(re.match(regexp, "1"))
self.assertFalse(re.match(regexp, "335"))
self.assertFalse(re.match(regexp, "0024"))
self.assertFalse(re.match(regexp, "9"))
self.assertFalse(re.match(regexp, "-17"))
self.assertFalse(re.match(regexp, "1ss"))
regexp = regexpgen.date("%m")
self.assertTrue(re.match(regexp, "12"))
self.assertTrue(re.match(regexp, "01"))
self.assertTrue(re.match(regexp, "11"))
self.assertTrue(re.match(regexp, "09"))
self.assertFalse(re.match(regexp, "1"))
self.assertFalse(re.match(regexp, "335"))
self.assertFalse(re.match(regexp, "13"))
self.assertFalse(re.match(regexp, "00"))
self.assertFalse(re.match(regexp, "-17"))
self.assertFalse(re.match(regexp, "1s"))
regexp = regexpgen.date("%d")
self.assertTrue(re.match(regexp, "12"))
self.assertTrue(re.match(regexp, "01"))
self.assertTrue(re.match(regexp, "31"))
self.assertTrue(re.match(regexp, "28"))
self.assertTrue(re.match(regexp, "09"))
self.assertFalse(re.match(regexp, "1"))
self.assertFalse(re.match(regexp, "335"))
self.assertFalse(re.match(regexp, "99"))
self.assertFalse(re.match(regexp, "00"))
self.assertFalse(re.match(regexp, "-17"))
self.assertFalse(re.match(regexp, "1ss"))
regexp = regexpgen.date("%d-%m")
self.assertTrue(re.match(regexp, "12-12"))
self.assertTrue(re.match(regexp, "01-01"))
self.assertTrue(re.match(regexp, "31-12"))
self.assertTrue(re.match(regexp, "28-02"))
self.assertTrue(re.match(regexp, "09-09"))
self.assertFalse(re.match(regexp, "1-10"))
self.assertFalse(re.match(regexp, "31-02"))
self.assertFalse(re.match(regexp, "99-92"))
self.assertFalse(re.match(regexp, "00-00"))
self.assertFalse(re.match(regexp, "-17-00"))
self.assertFalse(re.match(regexp, "1ss"))
regexp = regexpgen.date("%Y-%m")
self.assertTrue(re.match(regexp, "2012-12"))
self.assertTrue(re.match(regexp, "2001-01"))
self.assertTrue(re.match(regexp, "1991-12"))
self.assertTrue(re.match(regexp, "2050-02"))
self.assertTrue(re.match(regexp, "1999-09"))
self.assertFalse(re.match(regexp, "1955-10"))
self.assertFalse(re.match(regexp, "31-02"))
self.assertFalse(re.match(regexp, "3099-92"))
self.assertFalse(re.match(regexp, "0000-00"))
self.assertFalse(re.match(regexp, "-1700-00"))
self.assertFalse(re.match(regexp, "1sss-ss"))
regexp = regexpgen.date("%Y-%m-%d")
self.assertTrue(re.match(regexp, "2089-01-12"))
self.assertTrue(re.match(regexp, "2087-12-13"))
self.assertTrue(re.match(regexp, "2090-02-28"))
self.assertTrue(re.match(regexp, "2088-09-30"))
self.assertFalse(re.match(regexp, "1955-10-00"))
self.assertFalse(re.match(regexp, "31-02-04"))
self.assertFalse(re.match(regexp, "3099-92-19"))
self.assertFalse(re.match(regexp, "0000-00-00"))
self.assertFalse(re.match(regexp, "-1700-00-21"))
self.assertFalse(re.match(regexp, "1sss-ss-45"))
def testForMin(self):
regexp = regexpgen.date("%Y", "1990")
self.assertTrue(re.match(regexp, "1990"))
self.assertTrue(re.match(regexp, "2099"))
self.assertTrue(re.match(regexp, "1997"))
self.assertFalse(re.match(regexp, "1989"))
self.assertFalse(re.match(regexp, "1988"))
self.assertFalse(re.match(regexp, "0024"))
self.assertFalse(re.match(regexp, "1969"))
regexp = regexpgen.date("%y" ,"85")
self.assertTrue(re.match(regexp, "99"))
self.assertTrue(re.match(regexp, "88"))
self.assertTrue(re.match(regexp, "85"))
self.assertTrue(re.match(regexp, "91"))
self.assertFalse(re.match(regexp, "01"))
self.assertFalse(re.match(regexp, "00"))
self.assertFalse(re.match(regexp, "84"))
self.assertFalse(re.match(regexp, "55"))
regexp = regexpgen.date("%m", "06")
self.assertTrue(re.match(regexp, "12"))
self.assertTrue(re.match(regexp, "06"))
self.assertTrue(re.match(regexp, "08"))
self.assertTrue(re.match(regexp, "09"))
self.assertFalse(re.match(regexp, "01"))
self.assertFalse(re.match(regexp, "05"))
self.assertFalse(re.match(regexp, "13"))
self.assertFalse(re.match(regexp, "04"))
regexp = regexpgen.date("%d", "13")
self.assertTrue(re.match(regexp, "13"))
self.assertTrue(re.match(regexp, "14"))
self.assertTrue(re.match(regexp, "31"))
self.assertTrue(re.match(regexp, "28"))
self.assertFalse(re.match(regexp, "01"))
self.assertFalse(re.match(regexp, "12"))
self.assertFalse(re.match(regexp, "99"))
self.assertFalse(re.match(regexp, "00"))
regexp = regexpgen.date("%Y-%m-%d", "2072-12-01")
self.assertTrue(re.match(regexp, "2072-12-01"))
self.assertTrue(re.match(regexp, "2083-01-12"))
self.assertTrue(re.match(regexp, "2090-02-28"))
self.assertTrue(re.match(regexp, "2099-09-30"))
self.assertFalse(re.match(regexp, "1972-12-01"))
self.assertFalse(re.match(regexp, "2012-11-01"))
self.assertFalse(re.match(regexp, "1995-10-01"))
self.assertFalse(re.match(regexp, "1955-10-01"))
def testForMax(self):
regexp = regexpgen.date("%Y", None, "1990")
self.assertFalse(re.match(regexp, "1991"))
self.assertFalse(re.match(regexp, "2099"))
self.assertFalse(re.match(regexp, "1997"))
self.assertTrue(re.match(regexp, "1989"))
self.assertTrue(re.match(regexp, "1990"))
self.assertTrue(re.match(regexp, "1971"))
regexp = regexpgen.date("%y" , None, "85")
self.assertFalse(re.match(regexp, "99"))
self.assertFalse(re.match(regexp, "88"))
self.assertFalse(re.match(regexp, "86"))
self.assertFalse(re.match(regexp, "91"))
self.assertTrue(re.match(regexp, "01"))
self.assertTrue(re.match(regexp, "85"))
self.assertTrue(re.match(regexp, "84"))
self.assertTrue(re.match(regexp, "55"))
regexp = regexpgen.date("%m", None, "06")
self.assertFalse(re.match(regexp, "12"))
self.assertFalse(re.match(regexp, "07"))
self.assertFalse(re.match(regexp, "08"))
self.assertFalse(re.match(regexp, "09"))
self.assertTrue(re.match(regexp, "01"))
self.assertTrue(re.match(regexp, "05"))
self.assertTrue(re.match(regexp, "06"))
self.assertTrue(re.match(regexp, "04"))
regexp = regexpgen.date("%d", None, "13")
self.assertFalse(re.match(regexp, "14"))
self.assertFalse(re.match(regexp, "15"))
self.assertFalse(re.match(regexp, "31"))
self.assertFalse(re.match(regexp, "28"))
self.assertTrue(re.match(regexp, "01"))
self.assertTrue(re.match(regexp, "12"))
self.assertTrue(re.match(regexp, "13"))
self.assertTrue(re.match(regexp, "07"))
regexp = regexpgen.date("%Y-%m-%d", None, "1980-12-01")
self.assertFalse(re.match(regexp, "2072-12-01"))
self.assertFalse(re.match(regexp, "2083-01-12"))
self.assertFalse(re.match(regexp, "2090-02-28"))
self.assertFalse(re.match(regexp, "1980-12-02"))
self.assertTrue(re.match(regexp, "1980-12-01"))
self.assertTrue(re.match(regexp, "1980-11-02"))
self.assertTrue(re.match(regexp, "1975-10-05"))
self.assertTrue(re.match(regexp, "1977-10-21"))
self.assertTrue(re.match(regexp, "1976-02-29"))
self.assertFalse(re.match(regexp, "1977-02-29"))
self.assertTrue(re.match(regexp, "1980-02-29"))
def testForMinMax(self):
regexp = regexpgen.date("%Y", "1990", "2000")
self.assertTrue(re.match(regexp, "1990"))
self.assertTrue(re.match(regexp, "2000"))
self.assertTrue(re.match(regexp, "1997"))
self.assertFalse(re.match(regexp, "1989"))
self.assertFalse(re.match(regexp, "1988"))
self.assertFalse(re.match(regexp, "2001"))
self.assertFalse(re.match(regexp, "2011"))
regexp = regexpgen.date("%y" ,"85", "95")
self.assertTrue(re.match(regexp, "95"))
self.assertTrue(re.match(regexp, "88"))
self.assertTrue(re.match(regexp, "85"))
self.assertTrue(re.match(regexp, "91"))
self.assertFalse(re.match(regexp, "01"))
self.assertFalse(re.match(regexp, "84"))
self.assertFalse(re.match(regexp, "84"))
self.assertFalse(re.match(regexp, "99"))
regexp = regexpgen.date("%m", "06", "10")
self.assertTrue(re.match(regexp, "10"))
self.assertTrue(re.match(regexp, "06"))
self.assertTrue(re.match(regexp, "08"))
self.assertTrue(re.match(regexp, "09"))
self.assertFalse(re.match(regexp, "11"))
self.assertFalse(re.match(regexp, "05"))
self.assertFalse(re.match(regexp, "13"))
self.assertFalse(re.match(regexp, "04"))
regexp = regexpgen.date("%d", "13", "20")
self.assertTrue(re.match(regexp, "13"))
self.assertTrue(re.match(regexp, "14"))
self.assertTrue(re.match(regexp, "20"))
self.assertTrue(re.match(regexp, "15"))
self.assertFalse(re.match(regexp, "21"))
self.assertFalse(re.match(regexp, "12"))
self.assertFalse(re.match(regexp, "99"))
self.assertFalse(re.match(regexp, "00"))
regexp = regexpgen.date("%Y-%m-%d", "2072-12-01", "2085-12-01")
self.assertTrue(re.match(regexp, "2072-12-01"))
self.assertTrue(re.match(regexp, "2083-01-12"))
self.assertTrue(re.match(regexp, "2073-02-28"))
self.assertTrue(re.match(regexp, "2085-12-01"))
self.assertFalse(re.match(regexp, "2085-12-02"))
self.assertFalse(re.match(regexp, "2072-11-30"))
self.assertFalse(re.match(regexp, "1995-10-01"))
self.assertFalse(re.match(regexp, "1955-10-01"))
def testForWrongFormat(self):
self.assertRaises(ValueError, regexpgen.date, "%wd %ay")
self.assertRaises(ValueError,regexpgen.date, "%Y:%y")
self.assertRaises(ValueError,regexpgen.date, "%y:%d")
self.assertRaises(ValueError,regexpgen.date, "%Y:%d")
self.assertRaises(ValueError,regexpgen.date, "%P")
def testForWrongInput(self):
self.assertRaises(ValueError,regexpgen.time, "%d:%m", "01:00", "00:00")
self.assertRaises(ValueError,regexpgen.time, "%Y-%m", "99-03", "1998-03")
self.assertRaises(ValueError,regexpgen.time, "%m-%d", "13-02", "02-02")
self.assertRaises(ValueError,regexpgen.time, "%m", "12", "02")
self.assertRaises(ValueError,regexpgen.time, "%d", "00", "100")
self.assertRaises(ValueError,regexpgen.time, "%Y/%m/%d", "1990-02/02", "1992/03-03")
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| mit | 2,060,216,852,325,057,500 | 33.00627 | 86 | 0.683721 | false |
YantaiGao/learn_Python_The-Hard-Way | No38_ListOp.py | 1 | 1059 | # -*- coding:utf-8 -*-
#注意类的声明方法:
class Thing(object):
#self是需要有的 否则报错
def test(self,hi):
print hi
a = Thing()#这是声明类的一个对象
a.test("hahaha")
print "---------------------------------"
test_things = "Apple Orange Crows Telephone Light Suger"
print "There is not 10 things in that list,let's fix it."
stuff = test_things.split(' ')
more_stuff = ["Mon","Tues","Wed","Thris","Fir","Sat","Sun","MOON"]
while len(stuff)!=10:
#注意:pop()方法是从后往前出,先出最后一个
next = more_stuff.pop()
print "Adding ", next
#append()方法是增加
stuff.append(next)
print "There are %d elements in list " %len(stuff)
print "Here we go: ",stuff
#注意:下标从0开始!!!
print stuff[1]
#注意:-1是最后一个,下标从-1开始 ,从后向前遍历
print "stuff[-1] == ",stuff[-1]
print "stuff[-2] == ",stuff[-2]
print stuff.pop()
#注意:并没有修改数组实际的元素
print ' '.join(stuff)
#stuff[3:5]类似range()
print '#'.join(stuff[3:5])
print stuff | gpl-3.0 | -114,587,832,266,722,030 | 18.545455 | 66 | 0.630966 | false |
jr-garcia/Engendro3D | e3d/model_management/interpolation.py | 1 | 1562 | # import numpy
# from cycgkit.cgtypes import vec3, quat
def getClosest(keys, time, chrid, sortedKeys):
def getfrom(keys1, time, ch):
try:
if ch == 'p':
return keys1[time].position
elif ch == 's':
return keys1[time].scale
else:
return keys1[time].rotation
except KeyError:
return None
a = None
b = None
a1 = -1
b1 = -1
for i in range(len(keys) - 1, -1, -1):
if sortedKeys[i] < time:
a = getfrom(keys, sortedKeys[i], chrid)
a1 = sortedKeys[i]
break
for j in range(len(keys)):
if sortedKeys[j] > time:
b = getfrom(keys, sortedKeys[j], chrid)
b1 = sortedKeys[j]
break
if a is None:
if b is not None:
return b, None, time
else:
return getfrom(keys, 0, chrid), None, time
t = 1.0 - ((b1 - time) / (b1 - a1))
return a, b, t
# -----------
# http:#keithmaggio.wordpress.com/2011/02/15/math-magician-lerp-slerp-and-nlerp/
def Lerp(percent, start, end):
return start + (percent * (end - start))
# def Nlerp(percent, start, end):
# res = Lerp(percent, start, end)
# if res.shape[0] == 3:
# return numpy.array(vec3(res).normalize())
# else:
# na = numpy.zeros(shape=(4,))
# tres = quat(res).normalize()
# # na = res
# na[0] = tres.w
# na[1] = tres.x
# na[2] = tres.y
# na[3] = tres.z
# return na | mit | -1,189,140,796,025,778,700 | 23.809524 | 80 | 0.493598 | false |
NetApp/manila | manila/tests/share/drivers/emc/plugins/vnx/test_object_manager.py | 1 | 125573 | # Copyright (c) 2015 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import ddt
from lxml import builder
import mock
from oslo_concurrency import processutils
from manila.common import constants as const
from manila import exception
from manila.share.drivers.emc.plugins.vnx import connector
from manila.share.drivers.emc.plugins.vnx import constants
from manila.share.drivers.emc.plugins.vnx import object_manager as manager
from manila.share.drivers.emc.plugins.vnx import xml_api_parser as parser
from manila import test
from manila.tests.share.drivers.emc.plugins.vnx import fakes
from manila.tests.share.drivers.emc.plugins.vnx import utils
class StorageObjectManagerTestCase(test.TestCase):
@mock.patch.object(connector, "XMLAPIConnector", mock.Mock())
@mock.patch.object(connector, "SSHConnector", mock.Mock())
def setUp(self):
super(StorageObjectManagerTestCase, self).setUp()
emd_share_driver = fakes.FakeEMCShareDriver()
self.manager = manager.StorageObjectManager(
emd_share_driver.configuration)
def test_get_storage_context(self):
type_map = {
'FileSystem': manager.FileSystem,
'StoragePool': manager.StoragePool,
'MountPoint': manager.MountPoint,
'Mover': manager.Mover,
'VDM': manager.VDM,
'Snapshot': manager.Snapshot,
'MoverInterface': manager.MoverInterface,
'DNSDomain': manager.DNSDomain,
'CIFSServer': manager.CIFSServer,
'CIFSShare': manager.CIFSShare,
'NFSShare': manager.NFSShare,
}
for key, value in type_map.items():
self.assertTrue(
isinstance(self.manager.getStorageContext(key), value))
for key in self.manager.context.keys():
self.assertTrue(key in type_map)
def test_get_storage_context_invalid_type(self):
fake_type = 'fake_type'
self.assertRaises(exception.EMCVnxXMLAPIError,
self.manager.getStorageContext,
fake_type)
class StorageObjectTestCaseBase(test.TestCase):
@mock.patch.object(connector, "XMLAPIConnector", mock.Mock())
@mock.patch.object(connector, "SSHConnector", mock.Mock())
def setUp(self):
super(StorageObjectTestCaseBase, self).setUp()
emd_share_driver = fakes.FakeEMCShareDriver()
self.manager = manager.StorageObjectManager(
emd_share_driver.configuration)
self.base = fakes.StorageObjectTestData()
self.pool = fakes.PoolTestData()
self.vdm = fakes.VDMTestData()
self.mover = fakes.MoverTestData()
self.fs = fakes.FileSystemTestData()
self.mount = fakes.MountPointTestData()
self.snap = fakes.SnapshotTestData()
self.cifs_share = fakes.CIFSShareTestData()
self.nfs_share = fakes.NFSShareTestData()
self.cifs_server = fakes.CIFSServerTestData()
self.dns = fakes.DNSDomainTestData()
class StorageObjectTestCase(StorageObjectTestCaseBase):
def test_xml_api_retry(self):
hook = utils.RequestSideEffect()
hook.append(self.base.resp_need_retry())
hook.append(self.base.resp_task_succeed())
elt_maker = builder.ElementMaker(nsmap={None: constants.XML_NAMESPACE})
xml_parser = parser.XMLAPIParser()
storage_object = manager.StorageObject(self.manager.connectors,
elt_maker, xml_parser,
self.manager)
storage_object.conn['XML'].request = utils.EMCMock(side_effect=hook)
fake_req = storage_object._build_task_package(
elt_maker.StartFake(name='foo')
)
resp = storage_object._send_request(fake_req)
self.assertEqual('ok', resp['maxSeverity'])
expected_calls = [
mock.call(self.base.req_fake_start_task()),
mock.call(self.base.req_fake_start_task())
]
storage_object.conn['XML'].request.assert_has_calls(expected_calls)
class FileSystemTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
self.ssh_hook = utils.SSHSideEffect()
def test_create_file_system_on_vdm(self):
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.fs.resp_task_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.fs.filesystem_name,
size=self.fs.filesystem_size,
pool_name=self.pool.pool_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.fs.req_create_on_vdm()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_file_system_on_mover(self):
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.fs.resp_task_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.fs.filesystem_name,
size=self.fs.filesystem_size,
pool_name=self.pool.pool_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.fs.req_create_on_mover()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_file_system_but_already_exist(self):
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.fs.resp_create_but_already_exist())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.fs.filesystem_name,
size=self.fs.filesystem_size,
pool_name=self.pool.pool_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.fs.req_create_on_vdm()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_file_system_invalid_mover_id(self, sleep_mock):
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.fs.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.fs.resp_task_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.fs.filesystem_name,
size=self.fs.filesystem_size,
pool_name=self.pool.pool_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.fs.req_create_on_mover()),
mock.call(self.mover.req_get_ref()),
mock.call(self.fs.req_create_on_mover()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_file_system_with_error(self):
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.fs.resp_task_error())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
name=self.fs.filesystem_name,
size=self.fs.filesystem_size,
pool_name=self.pool.pool_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.fs.req_create_on_vdm()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_file_system(self):
self.hook.append(self.fs.resp_get_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.fs.filesystem_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.fs.filesystem_name, context.filesystem_map)
property_map = [
'name',
'pools_id',
'volume_id',
'size',
'id',
'type',
'dataServicePolicies',
]
for prop in property_map:
self.assertIn(prop, out)
id = context.get_id(self.fs.filesystem_name)
self.assertEqual(self.fs.filesystem_id, id)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_file_system_but_not_found(self):
self.hook.append(self.fs.resp_get_but_not_found())
self.hook.append(self.fs.resp_get_without_value())
self.hook.append(self.fs.resp_get_error())
self.hook.append(self.fs.resp_get_but_not_found())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.fs.filesystem_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
status, out = context.get(self.fs.filesystem_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
status, out = context.get(self.fs.filesystem_name)
self.assertEqual(constants.STATUS_ERROR, status)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get_id,
self.fs.filesystem_name)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.fs.req_get()),
mock.call(self.fs.req_get()),
mock.call(self.fs.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_file_system_but_miss_property(self):
self.hook.append(self.fs.resp_get_but_miss_property())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.fs.filesystem_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.fs.filesystem_name, context.filesystem_map)
property_map = [
'name',
'pools_id',
'volume_id',
'size',
'id',
'type',
'dataServicePolicies',
]
for prop in property_map:
self.assertIn(prop, out)
self.assertIsNone(out['dataServicePolicies'])
id = context.get_id(self.fs.filesystem_name)
self.assertEqual(self.fs.filesystem_id, id)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_file_system(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.fs.resp_task_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.fs.filesystem_name)
self.assertNotIn(self.fs.filesystem_name, context.filesystem_map)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.fs.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertNotIn(self.fs.filesystem_name, context.filesystem_map)
def test_delete_file_system_but_not_found(self):
self.hook.append(self.fs.resp_get_but_not_found())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.fs.filesystem_name)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_file_system_but_get_file_system_error(self):
self.hook.append(self.fs.resp_get_error())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.fs.filesystem_name)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_file_system_with_error(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.fs.resp_delete_but_failed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.fs.filesystem_name)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.fs.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertIn(self.fs.filesystem_name, context.filesystem_map)
def test_extend_file_system(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.fs.resp_task_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.extend(name=self.fs.filesystem_name,
pool_name=self.pool.pool_name,
new_size=self.fs.filesystem_new_size)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.pool.req_get()),
mock.call(self.fs.req_extend()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_extend_file_system_but_not_found(self):
self.hook.append(self.fs.resp_get_but_not_found())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.extend,
name=self.fs.filesystem_name,
pool_name=self.fs.pool_name,
new_size=self.fs.filesystem_new_size)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_extend_file_system_with_small_size(self):
self.hook.append(self.fs.resp_get_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.extend,
name=self.fs.filesystem_name,
pool_name=self.pool.pool_name,
new_size=1)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_extend_file_system_with_same_size(self):
self.hook.append(self.fs.resp_get_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.extend(name=self.fs.filesystem_name,
pool_name=self.pool.pool_name,
new_size=self.fs.filesystem_size)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_extend_file_system_with_error(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.fs.resp_extend_but_error())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.extend,
name=self.fs.filesystem_name,
pool_name=self.pool.pool_name,
new_size=self.fs.filesystem_new_size)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.pool.req_get()),
mock.call(self.fs.req_extend()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_filesystem_from_snapshot(self):
self.ssh_hook.append()
self.ssh_hook.append()
self.ssh_hook.append(self.fs.output_copy_ckpt)
self.ssh_hook.append(self.fs.output_info())
self.ssh_hook.append()
self.ssh_hook.append()
self.ssh_hook.append()
context = self.manager.getStorageContext('FileSystem')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.create_from_snapshot(self.fs.filesystem_name,
self.snap.src_snap_name,
self.fs.src_fileystems_name,
self.pool.pool_name,
self.vdm.vdm_name,
self.mover.interconnect_id,)
ssh_calls = [
mock.call(self.fs.cmd_create_from_ckpt(), False),
mock.call(self.mount.cmd_server_mount('ro'), False),
mock.call(self.fs.cmd_copy_ckpt(), True),
mock.call(self.fs.cmd_nas_fs_info(), False),
mock.call(self.mount.cmd_server_umount(), False),
mock.call(self.fs.cmd_delete(), False),
mock.call(self.mount.cmd_server_mount('rw'), False),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_create_filesystem_from_snapshot_with_error(self):
self.ssh_hook.append()
self.ssh_hook.append()
self.ssh_hook.append(ex=processutils.ProcessExecutionError(
stdout=self.fs.fake_output, stderr=None))
self.ssh_hook.append(self.fs.output_info())
self.ssh_hook.append()
self.ssh_hook.append()
self.ssh_hook.append()
context = self.manager.getStorageContext('FileSystem')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.create_from_snapshot(
self.fs.filesystem_name,
self.snap.src_snap_name,
self.fs.src_fileystems_name,
self.pool.pool_name,
self.vdm.vdm_name,
self.mover.interconnect_id, )
ssh_calls = [
mock.call(self.fs.cmd_create_from_ckpt(), False),
mock.call(self.mount.cmd_server_mount('ro'), False),
mock.call(self.fs.cmd_copy_ckpt(), True),
mock.call(self.fs.cmd_nas_fs_info(), False),
mock.call(self.mount.cmd_server_umount(), False),
mock.call(self.fs.cmd_delete(), False),
mock.call(self.mount.cmd_server_mount('rw'), False),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
class MountPointTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_create_mount_point_on_vdm(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mount_path=self.mount.path,
fs_name=self.fs.filesystem_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_create(self.vdm.vdm_id, True)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_mount_point_on_mover(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mount_path=self.mount.path,
fs_name=self.fs.filesystem_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_create(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_mount_point_but_already_exist(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_create_but_already_exist())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mount_path=self.mount.path,
fs_name=self.fs.filesystem_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_create(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_mount_point_invalid_mover_id(self, sleep_mock):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mount_path=self.mount.path,
fs_name=self.fs.filesystem_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_create(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_create(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_mount_point_with_error(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_task_error())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
mount_path=self.mount.path,
fs_name=self.fs.filesystem_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_create(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_mount_point_on_vdm(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mount_path=self.mount.path,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_delete(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_mount_point_on_mover(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mount_path=self.mount.path,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_delete(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_mount_point_but_nonexistent(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_delete_but_nonexistent())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mount_path=self.mount.path,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_delete(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_delete_mount_point_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mount_path=self.mount.path,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_delete(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_delete(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_delete_mount_point_with_error(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_task_error())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
mount_path=self.mount.path,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_delete(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mount_points(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_get_succeed(self.vdm.vdm_id))
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_get_succeed(self.mover.mover_id,
False))
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_OK, status)
property_map = [
'path',
'mover',
'moverIdIsVdm',
'fileSystem',
]
for item in out:
for prop in property_map:
self.assertIn(prop, item)
status, out = context.get(self.mover.mover_name, False)
self.assertEqual(constants.STATUS_OK, status)
property_map = [
'path',
'mover',
'moverIdIsVdm',
'fileSystem',
]
for item in out:
for prop in property_map:
self.assertIn(prop, item)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_get(self.vdm.vdm_id)),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mount_points_but_not_found(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_get_without_value())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.mover.mover_name, False)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_get_mount_points_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_get_succeed(self.mover.mover_id,
False))
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.mover.mover_name, False)
self.assertEqual(constants.STATUS_OK, status)
property_map = [
'path',
'mover',
'moverIdIsVdm',
'fileSystem',
]
for item in out:
for prop in property_map:
self.assertIn(prop, item)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_get(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_get_mount_points_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_get_error())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.mover.mover_name, False)
self.assertEqual(constants.STATUS_ERROR, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
class VDMTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
self.ssh_hook = utils.SSHSideEffect()
def test_create_vdm(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.vdm.resp_task_succeed())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(self.vdm.vdm_name, self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.vdm.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_vdm_but_already_exist(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.vdm.resp_create_but_already_exist())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create VDM which already exists.
context.create(self.vdm.vdm_name, self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.vdm.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_vdm_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.vdm.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.vdm.resp_task_succeed())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create VDM with invalid mover ID
context.create(self.vdm.vdm_name, self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.vdm.req_create()),
mock.call(self.mover.req_get_ref()),
mock.call(self.vdm.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_vdm_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.vdm.resp_task_error())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create VDM with invalid mover ID
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
name=self.vdm.vdm_name,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.vdm.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_vdm(self):
self.hook.append(self.vdm.resp_get_succeed())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.vdm.vdm_name, context.vdm_map)
property_map = [
'name',
'id',
'state',
'host_mover_id',
'interfaces',
]
for prop in property_map:
self.assertIn(prop, out)
expected_calls = [mock.call(self.vdm.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_vdm_with_error(self):
self.hook.append(self.vdm.resp_get_error())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Get VDM with error
status, out = context.get(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_ERROR, status)
expected_calls = [mock.call(self.vdm.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_vdm_but_not_found(self):
self.hook.append(self.vdm.resp_get_without_value())
self.hook.append(self.vdm.resp_get_succeed('fake'))
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Get VDM which does not exist
status, out = context.get(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
status, out = context.get(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.vdm.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_vdm_id_with_error(self):
self.hook.append(self.vdm.resp_get_error())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get_id,
self.vdm.vdm_name)
expected_calls = [mock.call(self.vdm.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_vdm(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.vdm.resp_task_succeed())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.vdm.vdm_name)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.vdm.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_vdm_but_not_found(self):
self.hook.append(self.vdm.resp_get_but_not_found())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.vdm.vdm_name)
expected_calls = [mock.call(self.vdm.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_vdm_but_failed_to_get_vdm(self):
self.hook.append(self.vdm.resp_get_error())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.vdm.vdm_name)
expected_calls = [mock.call(self.vdm.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_vdm_with_error(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.vdm.resp_task_error())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.vdm.vdm_name)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.vdm.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_attach_detach_nfs_interface(self):
self.ssh_hook.append()
self.ssh_hook.append()
context = self.manager.getStorageContext('VDM')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.attach_nfs_interface(self.vdm.vdm_name,
self.mover.interface_name2)
context.detach_nfs_interface(self.vdm.vdm_name,
self.mover.interface_name2)
ssh_calls = [
mock.call(self.vdm.cmd_attach_nfs_interface(), False),
mock.call(self.vdm.cmd_detach_nfs_interface(), True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_detach_nfs_interface_with_error(self):
self.ssh_hook.append(ex=processutils.ProcessExecutionError(
stdout=self.vdm.fake_output))
self.ssh_hook.append(self.vdm.output_get_interfaces(
self.mover.interface_name2))
self.ssh_hook.append(ex=processutils.ProcessExecutionError(
stdout=self.vdm.fake_output))
self.ssh_hook.append(self.vdm.output_get_interfaces(
nfs_interface=fakes.FakeData.interface_name1))
context = self.manager.getStorageContext('VDM')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.detach_nfs_interface,
self.vdm.vdm_name,
self.mover.interface_name2)
context.detach_nfs_interface(self.vdm.vdm_name,
self.mover.interface_name2)
ssh_calls = [
mock.call(self.vdm.cmd_detach_nfs_interface(), True),
mock.call(self.vdm.cmd_get_interfaces(), False),
mock.call(self.vdm.cmd_detach_nfs_interface(), True),
mock.call(self.vdm.cmd_get_interfaces(), False),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_get_cifs_nfs_interface(self):
self.ssh_hook.append(self.vdm.output_get_interfaces())
context = self.manager.getStorageContext('VDM')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
interfaces = context.get_interfaces(self.vdm.vdm_name)
self.assertIsNotNone(interfaces['cifs'])
self.assertIsNotNone(interfaces['nfs'])
ssh_calls = [mock.call(self.vdm.cmd_get_interfaces(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
class StoragePoolTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_get_pool(self):
self.hook.append(self.pool.resp_get_succeed())
context = self.manager.getStorageContext('StoragePool')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.pool.pool_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.pool.pool_name, context.pool_map)
property_map = [
'name',
'movers_id',
'total_size',
'used_size',
'diskType',
'dataServicePolicies',
'id',
]
for prop in property_map:
self.assertIn(prop, out)
expected_calls = [mock.call(self.pool.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_pool_with_error(self):
self.hook.append(self.pool.resp_get_error())
self.hook.append(self.pool.resp_get_without_value())
self.hook.append(self.pool.resp_get_succeed(name='other'))
context = self.manager.getStorageContext('StoragePool')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.pool.pool_name)
self.assertEqual(constants.STATUS_ERROR, status)
status, out = context.get(self.pool.pool_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
status, out = context.get(self.pool.pool_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.pool.req_get()),
mock.call(self.pool.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_pool_id_with_error(self):
self.hook.append(self.pool.resp_get_error())
context = self.manager.getStorageContext('StoragePool')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get_id,
self.pool.pool_name)
expected_calls = [mock.call(self.pool.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
class MoverTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
self.ssh_hook = utils.SSHSideEffect()
def test_get_mover(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_succeed())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.mover.mover_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.mover.mover_name, context.mover_map)
property_map = [
'name',
'id',
'Status',
'version',
'uptime',
'role',
'interfaces',
'devices',
'dns_domain',
]
for prop in property_map:
self.assertIn(prop, out)
status, out = context.get(self.mover.mover_name)
self.assertEqual(constants.STATUS_OK, status)
status, out = context.get(self.mover.mover_name, True)
self.assertEqual(constants.STATUS_OK, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_ref_not_found(self):
self.hook.append(self.mover.resp_get_ref_succeed(name='other'))
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get_ref(self.mover.mover_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [mock.call(self.mover.req_get_ref())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_ref_with_error(self):
self.hook.append(self.mover.resp_get_error())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get_ref(self.mover.mover_name)
self.assertEqual(constants.STATUS_ERROR, status)
expected_calls = [mock.call(self.mover.req_get_ref())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_ref_and_mover(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_succeed())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get_ref(self.mover.mover_name)
self.assertEqual(constants.STATUS_OK, status)
property_map = ['name', 'id']
for prop in property_map:
self.assertIn(prop, out)
status, out = context.get(self.mover.mover_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.mover.mover_name, context.mover_map)
property_map = [
'name',
'id',
'Status',
'version',
'uptime',
'role',
'interfaces',
'devices',
'dns_domain',
]
for prop in property_map:
self.assertIn(prop, out)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_failed_to_get_mover_ref(self):
self.hook.append(self.mover.resp_get_error())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get,
self.mover.mover_name)
expected_calls = [mock.call(self.mover.req_get_ref())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_but_not_found(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_without_value())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(name=self.mover.mover_name, force=True)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_error())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.mover.mover_name)
self.assertEqual(constants.STATUS_ERROR, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_interconnect_id(self):
self.ssh_hook.append(self.mover.output_get_interconnect_id())
context = self.manager.getStorageContext('Mover')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
conn_id = context.get_interconnect_id(self.mover.mover_name,
self.mover.mover_name)
self.assertEqual(self.mover.interconnect_id, conn_id)
ssh_calls = [mock.call(self.mover.cmd_get_interconnect_id(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_get_physical_devices(self):
self.ssh_hook.append(self.mover.output_get_physical_devices())
context = self.manager.getStorageContext('Mover')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
devices = context.get_physical_devices(self.mover.mover_name)
self.assertIn(self.mover.device_name, devices)
ssh_calls = [mock.call(self.mover.cmd_get_physical_devices(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
class SnapshotTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_create_snapshot(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.snap.resp_task_succeed())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.snap.snapshot_name,
fs_name=self.fs.filesystem_name,
pool_id=self.pool.pool_id)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.snap.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_snapshot_but_already_exist(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.snap.resp_create_but_already_exist())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.snap.snapshot_name,
fs_name=self.fs.filesystem_name,
pool_id=self.pool.pool_id,
ckpt_size=self.snap.snapshot_size)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.snap.req_create_with_size()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_snapshot_with_error(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.snap.resp_task_error())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
name=self.snap.snapshot_name,
fs_name=self.fs.filesystem_name,
pool_id=self.pool.pool_id,
ckpt_size=self.snap.snapshot_size)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.snap.req_create_with_size()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_snapshot(self):
self.hook.append(self.snap.resp_get_succeed())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.snap.snapshot_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.snap.snapshot_name, context.snap_map)
property_map = [
'name',
'id',
'checkpointOf',
'state',
]
for prop in property_map:
self.assertIn(prop, out)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_snapshot_but_not_found(self):
self.hook.append(self.snap.resp_get_without_value())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.snap.snapshot_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_snapshot_with_error(self):
self.hook.append(self.snap.resp_get_error())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.snap.snapshot_name)
self.assertEqual(constants.STATUS_ERROR, status)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_snapshot(self):
self.hook.append(self.snap.resp_get_succeed())
self.hook.append(self.snap.resp_task_succeed())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.snap.snapshot_name)
self.assertNotIn(self.snap.snapshot_name, context.snap_map)
expected_calls = [
mock.call(self.snap.req_get()),
mock.call(self.snap.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_snapshot_failed_to_get_snapshot(self):
self.hook.append(self.snap.resp_get_error())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.snap.snapshot_name)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_snapshot_but_not_found(self):
self.hook.append(self.snap.resp_get_without_value())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.snap.snapshot_name)
self.assertNotIn(self.snap.snapshot_name, context.snap_map)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_snapshot_with_error(self):
self.hook.append(self.snap.resp_get_succeed())
self.hook.append(self.snap.resp_task_error())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.snap.snapshot_name)
expected_calls = [
mock.call(self.snap.req_get()),
mock.call(self.snap.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_snapshot_id(self):
self.hook.append(self.snap.resp_get_succeed())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
id = context.get_id(self.snap.snapshot_name)
self.assertEqual(self.snap.snapshot_id, id)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_snapshot_id_with_error(self):
self.hook.append(self.snap.resp_get_error())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get_id,
self.snap.snapshot_name)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
@ddt.ddt
class MoverInterfaceTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_create_mover_interface(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_succeed())
self.hook.append(self.mover.resp_task_succeed())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
context.create(interface)
interface['name'] = self.mover.long_interface_name
context.create(interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
mock.call(self.mover.req_create_interface(
self.mover.long_interface_name[:31])),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_mover_interface_name_already_exist(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(
self.mover.resp_create_interface_but_name_already_exist())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
context.create(interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_mover_interface_ip_already_exist(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(
self.mover.resp_create_interface_but_ip_already_exist())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
context.create(interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@ddt.data(fakes.MoverTestData().resp_task_succeed(),
fakes.MoverTestData().resp_task_error())
def test_create_mover_interface_with_conflict_vlan_id(self, xml_resp):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(
self.mover.resp_create_interface_with_conflicted_vlan_id())
self.hook.append(xml_resp)
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
mock.call(self.mover.req_delete_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_mover_interface_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_succeed())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
context.create(interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_mover_interface_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_error())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_interface(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_succeed())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(name=self.mover.interface_name1,
mover_name=self.mover.mover_name)
self.assertEqual(constants.STATUS_OK, status)
property_map = [
'name',
'device',
'up',
'ipVersion',
'netMask',
'ipAddress',
'vlanid',
]
for prop in property_map:
self.assertIn(prop, out)
context.get(name=self.mover.long_interface_name,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_interface_not_found(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_without_value())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(name=self.mover.interface_name1,
mover_name=self.mover.mover_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_mover_interface(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_succeed())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(ip_addr=self.mover.ip_address1,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_delete_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_mover_interface_but_nonexistent(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_delete_interface_but_nonexistent())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(ip_addr=self.mover.ip_address1,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_delete_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_delete_mover_interface_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_succeed())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(ip_addr=self.mover.ip_address1,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_delete_interface()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_delete_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_delete_mover_interface_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_error())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
ip_addr=self.mover.ip_address1,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_delete_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
class DNSDomainTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_create_dns_domain(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_task_succeed())
context = self.manager.getStorageContext('DNSDomain')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mover_name=self.mover.mover_name,
name=self.dns.domain_name,
servers=self.dns.dns_ip_address)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_dns_domain_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_task_succeed())
context = self.manager.getStorageContext('DNSDomain')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mover_name=self.mover.mover_name,
name=self.dns.domain_name,
servers=self.dns.dns_ip_address)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_create()),
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_dns_domain_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_task_error())
context = self.manager.getStorageContext('DNSDomain')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
mover_name=self.mover.mover_name,
name=self.mover.domain_name,
servers=self.dns.dns_ip_address)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_dns_domain(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_task_succeed())
self.hook.append(self.dns.resp_task_error())
context = self.manager.getStorageContext('DNSDomain')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mover_name=self.mover.mover_name,
name=self.mover.domain_name)
context.delete(mover_name=self.mover.mover_name,
name=self.mover.domain_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_delete()),
mock.call(self.dns.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_delete_dns_domain_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_task_succeed())
context = self.manager.getStorageContext('DNSDomain')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mover_name=self.mover.mover_name,
name=self.mover.domain_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_delete()),
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
class CIFSServerTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_create_cifs_server(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
self.hook.append(self.cifs_server.resp_task_error())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True))
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create CIFS server on mover
cifs_server_args = {
'name': self.cifs_server.cifs_server_name,
'interface_ip': self.cifs_server.ip_address1,
'domain_name': self.cifs_server.domain_name,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.mover.mover_name,
'is_vdm': False,
}
context.create(cifs_server_args)
# Create CIFS server on VDM
cifs_server_args = {
'name': self.cifs_server.cifs_server_name,
'interface_ip': self.cifs_server.ip_address1,
'domain_name': self.cifs_server.domain_name,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
'is_vdm': True,
}
context.create(cifs_server_args)
# Create CIFS server on VDM
cifs_server_args = {
'name': self.cifs_server.cifs_server_name,
'interface_ip': self.cifs_server.ip_address1,
'domain_name': self.cifs_server.domain_name,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
'is_vdm': True,
}
context.create(cifs_server_args)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_create(self.mover.mover_id, False)),
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_create(self.vdm.vdm_id)),
mock.call(self.cifs_server.req_create(self.vdm.vdm_id)),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_cifs_server_already_exist(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_task_error())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True))
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
@mock.patch('time.sleep')
def test_create_cifs_server_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create CIFS server on mover
cifs_server_args = {
'name': self.cifs_server.cifs_server_name,
'interface_ip': self.cifs_server.ip_address1,
'domain_name': self.cifs_server.domain_name,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.mover.mover_name,
'is_vdm': False,
}
context.create(cifs_server_args)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_create(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_create(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_cifs_server_with_error(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_task_error())
self.hook.append(self.cifs_server.resp_get_error())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create CIFS server on VDM
cifs_server_args = {
'name': self.cifs_server.cifs_server_name,
'interface_ip': self.cifs_server.ip_address1,
'domain_name': self.cifs_server.domain_name,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
'is_vdm': True,
}
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
cifs_server_args)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_create(self.vdm.vdm_id)),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_all_cifs_server(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True))
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True))
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get_all(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.vdm.vdm_name, context.cifs_server_map)
# Get CIFS server from the cache
status, out = context.get_all(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.vdm.vdm_name, context.cifs_server_map)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_get_all_cifs_server_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True))
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get_all(self.mover.mover_name, False)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.mover.mover_name, context.cifs_server_map)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_get(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_get_cifs_server(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True))
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(name=self.cifs_server.cifs_server_name,
mover_name=self.vdm.vdm_name)
self.assertEqual(constants.STATUS_OK, status)
property_map = {
'name',
'compName',
'Aliases',
'type',
'interfaces',
'domain',
'domainJoined',
'mover',
'moverIdIsVdm',
}
for prop in property_map:
self.assertIn(prop, out)
context.get(name=self.cifs_server.cifs_server_name,
mover_name=self.vdm.vdm_name)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_modify_cifs_server(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': True,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.mover.mover_name,
'is_vdm': False,
}
context.modify(cifs_server_args)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': False,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
}
context.modify(cifs_server_args)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_modify(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True)),
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_modify(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_modify_cifs_server_but_unjoin_domain(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_modify_but_unjoin_domain())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': False,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
}
context.modify(cifs_server_args)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_modify(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_modify_cifs_server_but_already_join_domain(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(
self.cifs_server.resp_modify_but_already_join_domain())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': True,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
}
context.modify(cifs_server_args)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_modify(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_modify_cifs_server_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': True,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.mover.mover_name,
'is_vdm': False,
}
context.modify(cifs_server_args)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_modify(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_modify(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_modify_cifs_server_with_error(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_task_error())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': False,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
}
self.assertRaises(exception.EMCVnxXMLAPIError,
context.modify,
cifs_server_args)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_modify(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_cifs_server(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True))
self.hook.append(self.cifs_server.resp_task_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=False))
self.hook.append(self.cifs_server.resp_task_succeed())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(computer_name=self.cifs_server.cifs_server_name,
mover_name=self.mover.mover_name,
is_vdm=False)
context.delete(computer_name=self.cifs_server.cifs_server_name,
mover_name=self.vdm.vdm_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_get(self.mover.mover_id, False)),
mock.call(self.cifs_server.req_delete(self.mover.mover_id, False)),
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
mock.call(self.cifs_server.req_delete(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_cifs_server_but_not_found(self):
self.hook.append(self.mover.resp_get_without_value())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_get_without_value())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(computer_name=self.cifs_server.cifs_server_name,
mover_name=self.mover.mover_name,
is_vdm=False)
context.delete(computer_name=self.cifs_server.cifs_server_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_cifs_server_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True))
self.hook.append(self.cifs_server.resp_task_error())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
computer_name=self.cifs_server.cifs_server_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_get(self.mover.mover_id, False)),
mock.call(self.cifs_server.req_delete(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
class CIFSShareTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
self.ssh_hook = utils.SSHSideEffect()
def test_create_cifs_share(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.cifs_share.share_name,
server_name=self.cifs_share.cifs_server_name[-14:],
mover_name=self.vdm.vdm_name,
is_vdm=True)
context.create(name=self.cifs_share.share_name,
server_name=self.cifs_share.cifs_server_name[-14:],
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_share.req_create(self.vdm.vdm_id)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_create(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_cifs_share_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.cifs_share.share_name,
server_name=self.cifs_share.cifs_server_name[-14:],
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_create(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_create(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_cifs_share_with_error(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_share.resp_task_error())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
name=self.cifs_share.share_name,
server_name=self.cifs_share.cifs_server_name[-14:],
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_share.req_create(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_cifs_share(self):
self.hook.append(self.cifs_share.resp_get_succeed(self.vdm.vdm_id))
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
self.hook.append(self.cifs_share.resp_get_succeed(self.mover.mover_id,
False))
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
context.delete(name=self.cifs_share.share_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.cifs_share.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.cifs_share.req_delete(self.vdm.vdm_id)),
mock.call(self.cifs_share.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_delete(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_cifs_share_not_found(self):
self.hook.append(self.cifs_share.resp_get_error())
self.hook.append(self.cifs_share.resp_get_without_value())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
context.delete(name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.cifs_share.req_get()),
mock.call(self.cifs_share.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_delete_cifs_share_invalid_mover_id(self, sleep_mock):
self.hook.append(self.cifs_share.resp_get_succeed(self.mover.mover_id,
False))
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(name=self.cifs_share.share_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.cifs_share.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_delete(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_delete(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_delete_cifs_share_with_error(self):
self.hook.append(self.cifs_share.resp_get_succeed(self.vdm.vdm_id))
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_share.resp_task_error())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.cifs_share.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.cifs_share.req_delete(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_cifs_share(self):
self.hook.append(self.cifs_share.resp_get_succeed(self.vdm.vdm_id))
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.get(self.cifs_share.share_name)
expected_calls = [mock.call(self.cifs_share.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_disable_share_access(self):
self.ssh_hook.append('Command succeeded')
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.disable_share_access(share_name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.cifs_share.cmd_disable_access(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_disable_share_access_with_error(self):
self.ssh_hook.append(ex=processutils.ProcessExecutionError(
stdout=self.cifs_share.fake_output))
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.disable_share_access,
share_name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.cifs_share.cmd_disable_access(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_allow_share_access(self):
self.ssh_hook.append(self.cifs_share.output_allow_access())
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.allow_share_access(mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [mock.call(self.cifs_share.cmd_change_access(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_allow_share_access_duplicate_ACE(self):
expt_dup_ace = processutils.ProcessExecutionError(
stdout=self.cifs_share.output_allow_access_but_duplicate_ace())
self.ssh_hook.append(ex=expt_dup_ace)
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.allow_share_access(mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [mock.call(self.cifs_share.cmd_change_access(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_allow_share_access_with_error(self):
expt_err = processutils.ProcessExecutionError(
self.cifs_share.fake_output)
self.ssh_hook.append(ex=expt_err)
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.allow_share_access,
mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [mock.call(self.cifs_share.cmd_change_access(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_share_access(self):
self.ssh_hook.append('Command succeeded')
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.deny_share_access(mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [
mock.call(self.cifs_share.cmd_change_access(action='revoke'),
True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_share_access_no_ace(self):
expt_no_ace = processutils.ProcessExecutionError(
stdout=self.cifs_share.output_deny_access_but_no_ace())
self.ssh_hook.append(ex=expt_no_ace)
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.deny_share_access(mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [
mock.call(self.cifs_share.cmd_change_access(action='revoke'),
True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_share_access_but_no_user_found(self):
expt_no_user = processutils.ProcessExecutionError(
stdout=self.cifs_share.output_deny_access_but_no_user_found())
self.ssh_hook.append(ex=expt_no_user)
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.deny_share_access(mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [
mock.call(self.cifs_share.cmd_change_access(action='revoke'),
True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_share_access_with_error(self):
expt_err = processutils.ProcessExecutionError(
self.cifs_share.fake_output)
self.ssh_hook.append(ex=expt_err)
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.deny_share_access,
mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [
mock.call(self.cifs_share.cmd_change_access(action='revoke'),
True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
class NFSShareTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.ssh_hook = utils.SSHSideEffect()
def test_create_nfs_share(self):
self.ssh_hook.append(self.nfs_share.output_create())
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.create(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_create(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_create_nfs_share_with_error(self):
expt_err = processutils.ProcessExecutionError(
stdout=self.nfs_share.fake_output)
self.ssh_hook.append(ex=expt_err)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_create(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_delete_nfs_share(self):
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
self.ssh_hook.append(self.nfs_share.output_delete_succeed())
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.delete(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get(), False),
mock.call(self.nfs_share.cmd_delete(), True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_delete_nfs_share_not_found(self):
expt_not_found = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_get_but_not_found())
self.ssh_hook.append(ex=expt_not_found)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.delete(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_get(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
@mock.patch('time.sleep')
def test_delete_nfs_share_locked(self, sleep_mock):
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
expt_locked = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_delete_but_locked())
self.ssh_hook.append(ex=expt_locked)
self.ssh_hook.append(self.nfs_share.output_delete_succeed())
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.delete(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get(), False),
mock.call(self.nfs_share.cmd_delete(), True),
mock.call(self.nfs_share.cmd_delete(), True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
self.assertTrue(sleep_mock.called)
def test_delete_nfs_share_with_error(self):
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
expt_err = processutils.ProcessExecutionError(
stdout=self.nfs_share.fake_output)
self.ssh_hook.append(ex=expt_err)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get(), False),
mock.call(self.nfs_share.cmd_delete(), True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_get_nfs_share(self):
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.get(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
# Get NFS share from cache
context.get(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_get(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_get_nfs_share_not_found(self):
expt_not_found = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_get_but_not_found())
self.ssh_hook.append(ex=expt_not_found)
self.ssh_hook.append(self.nfs_share.output_get_but_not_found())
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.get(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
context.get(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get(), False),
mock.call(self.nfs_share.cmd_get(), False),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_get_nfs_share_with_error(self):
expt_err = processutils.ProcessExecutionError(
stdout=self.nfs_share.fake_output)
self.ssh_hook.append(ex=expt_err)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get,
name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_get(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_allow_share_access(self):
rw_hosts = copy.deepcopy(self.nfs_share.rw_hosts)
rw_hosts.append(self.nfs_share.nfs_host_ip)
ro_hosts = copy.deepcopy(self.nfs_share.ro_hosts)
ro_hosts.append(self.nfs_share.nfs_host_ip)
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
self.ssh_hook.append(self.nfs_share.output_set_access_success())
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts))
self.ssh_hook.append(self.nfs_share.output_set_access_success())
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts, ro_hosts=ro_hosts))
self.ssh_hook.append(self.nfs_share.output_set_access_success())
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts))
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
context.allow_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name,
access_level=const.ACCESS_LEVEL_RW)
context.allow_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name,
access_level=const.ACCESS_LEVEL_RO)
context.allow_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name,
access_level=const.ACCESS_LEVEL_RW)
context.allow_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name,
access_level=const.ACCESS_LEVEL_RW)
ssh_calls = [
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts)),
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(
rw_hosts=self.nfs_share.rw_hosts, ro_hosts=ro_hosts)),
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts)),
mock.call(self.nfs_share.cmd_get()),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_allow_share_access_not_found(self):
expt_not_found = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_get_but_not_found())
self.ssh_hook.append(ex=expt_not_found)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.allow_share_access,
share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name,
access_level=const.ACCESS_LEVEL_RW)
ssh_calls = [mock.call(self.nfs_share.cmd_get())]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_rw_share_access(self):
rw_hosts = copy.deepcopy(self.nfs_share.rw_hosts)
rw_hosts.append(self.nfs_share.nfs_host_ip)
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts))
self.ssh_hook.append(self.nfs_share.output_set_access_success())
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
context.deny_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(self.nfs_share.rw_hosts,
self.nfs_share.ro_hosts)),
mock.call(self.nfs_share.cmd_get()),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_ro_share_access(self):
ro_hosts = copy.deepcopy(self.nfs_share.ro_hosts)
ro_hosts.append(self.nfs_share.nfs_host_ip)
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts, ro_hosts=ro_hosts))
self.ssh_hook.append(self.nfs_share.output_set_access_success())
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
context.deny_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name)
context.deny_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(self.nfs_share.rw_hosts,
self.nfs_share.ro_hosts)),
mock.call(self.nfs_share.cmd_get()),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_share_not_found(self):
expt_not_found = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_get_but_not_found())
self.ssh_hook.append(ex=expt_not_found)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.deny_share_access,
share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_get())]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_rw_share_with_error(self):
rw_hosts = copy.deepcopy(self.nfs_share.rw_hosts)
rw_hosts.append(self.nfs_share.nfs_host_ip)
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts))
expt_not_found = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_get_but_not_found())
self.ssh_hook.append(ex=expt_not_found)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.deny_share_access,
share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(self.nfs_share.rw_hosts,
self.nfs_share.ro_hosts)),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
| apache-2.0 | 3,176,923,940,610,800,600 | 39.678005 | 79 | 0.601164 | false |
Agiliza/AgilizaFramework | tests/agiliza/core/utils/patterns/test_singleton.py | 1 | 1894 | """
This file is part of Agiliza.
Agiliza is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Agiliza is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Agiliza. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) 2012 Vicente Ruiz <[email protected]>
"""
import unittest
from agiliza.core.utils.patterns import Singleton
class SingletonTest(unittest.TestCase):
def test_must_retrieve_the_same_instance(self):
class SingletonExample(Singleton): pass
instance1 = SingletonExample()
instance2 = SingletonExample.getInstance()
self.assertEqual(
instance1, instance2,
"Singleton makes different instances"
)
def test_must_retrieve_the_same_instance_multiple_times(self):
class SingletonExample(Singleton): pass
instance1 = SingletonExample()
SingletonExample()
SingletonExample()
instance2 = SingletonExample()
self.assertEqual(
instance1, instance2,
"Singleton makes different instances"
)
def test_must_invalidate_a_instance(self):
class SingletonExample(Singleton): pass
instance1 = SingletonExample.getInstance()
SingletonExample.invalidateInstance()
instance2 = SingletonExample()
self.assertNotEqual(
instance1, instance2,
"Singleton does not invalidate instances"
)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -4,045,939,752,745,946,600 | 27.69697 | 68 | 0.69377 | false |
dhalleine/tensorflow | tensorflow/contrib/learn/python/learn/experiment.py | 1 | 6100 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Experiment class collecting information needed for a single training run."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
from tensorflow.python.platform import tf_logging as logging
class Experiment(object):
"""Experiment is a class containing all information needed to train a model.
"""
def __init__(self,
estimator,
train_input_fn,
eval_input_fn,
eval_metrics=None,
train_steps=None,
eval_steps=100,
train_monitors=None):
"""Constructor for `Experiment`.
Args:
estimator: `Estimator` object.
train_input_fn: function, returns features and targets for training.
eval_input_fn: function, returns features and targets for evaluation. If
`eval_steps` is `None`, this should be configured only to produce for a
finite number of batches (generally, 1 epoch over the evaluation data).
eval_metrics: `dict` of string, metric function. If `None`, default set
is used.
train_steps: Perform this many steps of training. `None`, the default,
means train forever.
eval_steps: `evaluate` runs until input is exhausted (or another exception
is raised), or for `eval_steps` steps, if specified.
train_monitors: A list of monitors to pass to the `Estimator`'s `fit`
function.
"""
super(Experiment, self).__init__()
self._estimator = estimator
self._train_input_fn = train_input_fn
self._eval_input_fn = eval_input_fn
self._eval_metrics = eval_metrics
self._train_steps = train_steps
self._eval_steps = eval_steps
self._train_monitors = train_monitors
def train(self, delay_secs=0):
"""Fit the estimator using the training data.
Train the estimator for `steps` steps, after waiting for `delay_secs`
seconds. If `steps` is `None`, train forever.
Args:
delay_secs: Start training after this many seconds.
Returns:
The trained estimator.
"""
if delay_secs:
logging.info("Waiting %d secs before starting training.", delay_secs)
time.sleep(delay_secs)
return self._estimator.fit(input_fn=self._train_input_fn,
steps=self._train_steps,
monitors=self._train_monitors)
def evaluate(self, delay_secs=0):
"""Evaluate on the evaluation data.
Runs evaluation on the evaluation data and returns the result. If `steps`
is given, only run for this many steps. Otherwise run until input is
exhausted, or another exception is raised. Start the evaluation after
`delay_secs` seconds.
Args:
delay_secs: Start evaluating after waiting for this many seconds.
Returns:
The result of the `evaluate` call to the `Estimator`.
"""
if delay_secs:
logging.info("Waiting %d secs before starting eval.", delay_secs)
time.sleep(delay_secs)
return self._estimator.evaluate(input_fn=self._eval_input_fn,
steps=self._eval_steps,
metrics=self._eval_metrics,
name="one_pass")
def local_run(self):
"""Run when called on local machine.
Returns:
The result of the `evaluate` call to the `Estimator`.
"""
# TODO(ipolosukhin): Add a ValidationMonitor to run in-training evaluation.
self.train()
return self.evaluate()
def _continuous_eval(self,
input_fn,
name,
delay_secs=0,
throttle_delay_secs=60):
"""Run continuous eval.
Run `steps` steps of evaluation on the evaluation data set. This function
starts evaluating after `delay_secs` seconds and then runs no more than one
evaluation per `throttle_delay_secs`. It never returns.
Args:
input_fn: The input to use for this eval.
name: A string appended to the folder name of evaluation results.
delay_secs: Start evaluating after this many seconds.
throttle_delay_secs: Do not re-evaluate unless the last evaluation was
started at least this many seconds ago.
"""
if delay_secs:
logging.info("Waiting %f secs before starting eval.", delay_secs)
time.sleep(delay_secs)
while True:
start = time.time()
self._estimator.evaluate(input_fn=input_fn,
steps=self._eval_steps,
metrics=self._eval_metrics,
name=name)
duration = time.time() - start
if duration < throttle_delay_secs:
difference = throttle_delay_secs - duration
logging.info("Waiting %f secs before starting next eval run.",
difference)
time.sleep(difference)
def continuous_eval(self, delay_secs=0, throttle_delay_secs=60):
self._continuous_eval(self._eval_input_fn,
name="continuous",
delay_secs=delay_secs,
throttle_delay_secs=throttle_delay_secs)
def continuous_eval_on_train_data(self, delay_secs=0, throttle_delay_secs=60):
self._continuous_eval(self._train_input_fn,
name="continuous_on_train_data",
delay_secs=delay_secs,
throttle_delay_secs=throttle_delay_secs)
| apache-2.0 | 2,442,779,366,369,398,300 | 36.423313 | 80 | 0.627705 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.