filename
stringlengths 13
19
| text
stringlengths 134
1.04M
|
---|---|
the-stack_0_167 | import re
import os
import struct
import argparse
import collections
from nltk.tokenize import word_tokenize
from nltk.corpus import stopwords
from nltk.stem.wordnet import WordNetLemmatizer
from tensorflow.core.example import example_pb2
from utils import is_num
parser = argparse.ArgumentParser()
# Path
parser.add_argument('--dataset', type=str, choices=['restaurant', 'beer'], default='restaurant')
parser.add_argument('--vocab_fname', type=str, default='./data/vocab.txt')
args = parser.parse_args()
"""
Preprocessing script file
"""
def tokenize_sent(sent, lemtzr, stopword):
tokens = tokenize(sent.strip().lower())
tokens = re.sub(r'[^A-Za-z0-9]+',' ', ' '.join(tokens))
tokens = [tok for tok in tokens.split() if tok not in stopword]
tokens = [tok if not is_num(tok) else '<NUM>' for tok in tokens]
tokens = [lemtzr.lemmatize(tok) for tok in tokens]
return tokens
def tokenize_train_file(fname):
"""
Tokenize the raw train data(unlabeled).
"""
split_fname = fname.split('/')
new_fname = '/'.join([el if idx != len(split_fname) - 1 else 'parsed_' + el for idx, el in enumerate(split_fname)])
if os.path.exists(new_fname): return new_fname
with open(fname, 'r', encoding='utf8') as f:
ls = f.readlines()
parsed_data = []
lemtzr = WordNetLemmatizer()
stopword = stopwords.words('english')
for line in ls:
tokens = tokenize_sent(line, lemtzr, stopword)
parsed_data.append(tokens)
save_file(parsed_data, new_fname)
return new_fname
def tokenize_labeled_test_file(fname, label_fname):
"""
Tokenize the raw test data (labelled).
"""
split_fname = fname.split('/')
new_fname = '/'.join([el if idx != len(split_fname) - 1 else 'parsed_' + el for idx, el in enumerate(split_fname)])
label_map_fname = '/'.join([el if idx != len(split_fname) - 1 else 'label_map.txt' for idx, el in enumerate(split_fname)])
if os.path.exists(new_fname): return label_map_fname, new_fname
with open(fname, 'r', encoding='utf8') as f1, open(label_fname, 'r', encoding='utf8') as f2:
ls1, ls2 = f1.readlines(), f2.readlines()
parsed_data = []
lemtzr = WordNetLemmatizer()
stopword = stopwords.words('english')
for line in ls1:
tokens = tokenize_sent(line, lemtzr, stopword)
parsed_data.append(tokens)
assert len(ls1) == len(ls2) == len(parsed_data)
new_parsed, new_ls2 = [], []
for parsed, label in zip(parsed_data, ls2):
if 'Positive' in label or 'Neutral' in label:
continue
new_parsed.append(parsed)
new_ls2.append(label)
assert len(new_parsed) == len(new_ls2)
parsed_data, ls2 = new_parsed, new_ls2
label_text = list(set([tok for line in ls2 for tok in line.strip().split()]))
label_map = dict()
print("Label for this dataset with assigned index is as follows.")
for idx, label in enumerate(label_text):
print('{}: {}'.format(label, idx))
label_map[label] = idx
with open(label_map_fname, 'w') as f:
for key,val in label_map.items():
f.write("{} {} ||| ".format(key, val))
for idx, data in enumerate(parsed_data):
labels = ls2[idx].strip().split()
assert all([label in list(label_map.keys()) for label in labels])
parsed_data[idx].insert(0, '|||')
for label in labels:
parsed_data[idx].insert(0, str(label_map[label]))
save_file(parsed_data, new_fname)
return label_map_fname, new_fname
def build_vocab(parsed_train_fname, vocab_file, vocab_size=30000):
"""
Build vocab based on frequency of each word in train set.
Save vocab file and return vocab list.
"""
if os.path.exists(vocab_file):
with open(vocab_file, 'r', encoding='utf8') as f:
ls = f.readlines()
assert len(ls) == vocab_size
vocab = [line.strip() for line in ls]
return vocab
with open(parsed_train_fname, 'r', encoding='utf8') as f:
ls = f.readlines()
tokens = [tok for line in ls for tok in line.strip().split()]
counts = dict(collections.Counter(tokens))
import operator
vocab = sorted(counts.items(), key=operator.itemgetter(1), reverse=True)
print("TOTAL VOCAB SIZE: {}".format(len(vocab)))
for idx,tok in enumerate(vocab):
if tok[1] <= 10:
print("WORDS MORE THAN 10: {}".format(idx))
break
vocab = [tok[0] for tok in vocab][:idx]
vocab.append('<UNK>')
vocab.append('<PAD>')
assert all([isinstance(tok, str) for tok in vocab])
with open(vocab_file, 'w') as f:
f.write('\n'.join([tok for tok in vocab]))
return vocab
def save_file(data, new_fname):
"""
Change the "raw_fname" into parsed fname, then save "data" into parsed fname.
"""
assert isinstance(data, list)
with open(new_fname, 'w') as f: f.write('\n'.join([" ".join(one_sample) for one_sample in data]))
def tokenize(sent):
assert isinstance(sent, str)
return word_tokenize(sent)
def make_binary_dataset(fname, is_label=False):
"""
Make a binary data file for learning.
"""
binary_fname = fname.replace('.txt', '.bin')
if os.path.exists(binary_fname): return
with open(fname, 'r', encoding='utf8') as f:
ls = f.readlines()
data = [line.strip() for line in ls]
assert all(['|||' in dat for dat in data]) if is_label else all(['|||' not in dat for dat in data])
with open(binary_fname, 'wb') as f:
for line in data:
if is_label:
split_line = line.split('|||')
assert len(split_line) == 2
label, text = split_line[0].strip(), split_line[1].strip()
else:
text = line
example = example_pb2.Example()
example.features.feature['text'].bytes_list.value.extend([text.encode()])
if is_label:
example.features.feature['label'].bytes_list.value.extend([label.encode()])
example_str = example.SerializeToString()
str_len = len(example_str)
f.write(struct.pack('q', str_len))
f.write(struct.pack('%ds' % str_len, example_str))
return
def main():
train_fname = './data/datasets/{}/train.txt'.format(args.dataset)
test_fname = './data/datasets/{}/test.txt'.format(args.dataset)
test_label_fname = './data/datasets/{}/test_label.txt'.format(args.dataset)
vocab_fname = './data/vocab.txt'
vocab_size = args.vocab_size
parsed_train_fname = tokenize_train_file(train_fname)
label_map, parsed_test_fname = tokenize_labeled_test_file(test_fname, test_label_fname)
build_vocab(parsed_train_fname, vocab_fname, vocab_size=vocab_size)
make_binary_dataset(parsed_train_fname, False)
make_binary_dataset(parsed_test_fname, True)
if __name__ == '__main__':
main() |
the-stack_0_169 | #!/usr/bin/env python3
from os import environ
from curio import Channel, run
syncword = environ.get('RRIDBOT_SYNC')
chan = ('localhost', 12345)
async def consumer():
ch = Channel(chan)
c = await ch.accept(authkey=syncword.encode())
myset = set()
while True:
try:
msg = await c.recv()
except (EOFError, ConnectionResetError) as e: # in the event that the client closes
print('resetting')
myset = set()
c = await ch.accept(authkey=syncword.encode())
continue
if msg is None: # explicit reset
myset = set()
else:
op, uri = msg.split(' ', 1)
print(op, uri)
if op == 'add':
if uri in myset:
await c.send(True)
else:
myset.add(uri)
await c.send(False)
elif op == 'del':
myset.discard(uri)
await c.send(False)
else:
await c.send('ERROR')
print(myset)
if __name__ == '__main__':
run(consumer)
|
the-stack_0_172 | import aiohttp_csrf
import pytest
from aiohttp import web
SESSION_NAME = COOKIE_NAME = 'csrf_token'
FORM_FIELD_NAME = HEADER_NAME = 'X-CSRF-TOKEN'
@pytest.yield_fixture
def init_app():
def go(
loop,
policy,
storage,
handlers,
error_renderer=None,
):
app = web.Application()
kwargs = {
'policy': policy,
'storage': storage,
}
if error_renderer is not None:
kwargs['error_renderer'] = error_renderer
aiohttp_csrf.setup(app, **kwargs)
for method, url, handler in handlers:
app.router.add_route(
method,
url,
handler,
)
return app
yield go
@pytest.fixture(params=[
(aiohttp_csrf.policy.FormPolicy, (FORM_FIELD_NAME,)),
(aiohttp_csrf.policy.FormAndHeaderPolicy, (HEADER_NAME, FORM_FIELD_NAME)),
])
def csrf_form_policy(request):
_class, args = request.param
return _class(*args)
@pytest.fixture(params=[
(aiohttp_csrf.policy.HeaderPolicy, (HEADER_NAME,)),
(aiohttp_csrf.policy.FormAndHeaderPolicy, (HEADER_NAME, FORM_FIELD_NAME)),
])
def csrf_header_policy(request):
_class, args = request.param
return _class(*args)
@pytest.fixture(params=[
(aiohttp_csrf.storage.SessionStorage, (SESSION_NAME,)),
(aiohttp_csrf.storage.CookieStorage, (COOKIE_NAME,)),
])
def csrf_storage(request):
_class, args = request.param
return _class(*args)
|
the-stack_0_174 | """
sphinxcontrib.openapi.openapi30
-------------------------------
The OpenAPI 3.0.0 spec renderer. Based on ``sphinxcontrib-httpdomain``.
:copyright: (c) 2016, Ihor Kalnytskyi.
:license: BSD, see LICENSE for details.
"""
import copy
import collections
import collections.abc
from datetime import datetime
import itertools
import json
import re
from urllib import parse
from http.client import responses as http_status_codes
from sphinx.util import logging
from sphinxcontrib.openapi import utils
LOG = logging.getLogger(__name__)
# https://github.com/OAI/OpenAPI-Specification/blob/3.0.2/versions/3.0.0.md#data-types
_TYPE_MAPPING = {
('integer', 'int32'): 1, # integer
('integer', 'int64'): 1, # long
('number', 'float'): 1.0, # float
('number', 'double'): 1.0, # double
('boolean', None): True, # boolean
('string', None): 'string', # string
('string', 'byte'): 'c3RyaW5n', # b'string' encoded in base64, # byte
('string', 'binary'): '01010101', # binary
('string', 'date'): datetime.now().date().isoformat(), # date
('string', 'date-time'): datetime.now().isoformat(), # dateTime
('string', 'password'): '********', # password
# custom extensions to handle common formats
('string', 'email'): '[email protected]',
('string', 'zip-code'): '90210',
('string', 'uri'): 'https://example.com',
# additional fallthrough cases
('integer', None): 1, # integer
('number', None): 1.0, # <fallthrough>
}
_READONLY_PROPERTY = object() # sentinel for values not included in requests
def _dict_merge(dct, merge_dct):
"""Recursive dict merge.
Inspired by :meth:``dict.update()``, instead of updating only top-level
keys, dict_merge recurses down into dicts nested to an arbitrary depth,
updating keys. The ``merge_dct`` is merged into ``dct``.
From https://gist.github.com/angstwad/bf22d1822c38a92ec0a9
Arguments:
dct: dict onto which the merge is executed
merge_dct: dct merged into dct
"""
for k in merge_dct.keys():
if (k in dct and isinstance(dct[k], dict)
and isinstance(merge_dct[k], collections.abc.Mapping)):
_dict_merge(dct[k], merge_dct[k])
else:
dct[k] = merge_dct[k]
def _parse_schema(schema, method):
"""
Convert a Schema Object to a Python object.
Args:
schema: An ``OrderedDict`` representing the schema object.
"""
if method and schema.get('readOnly', False):
return _READONLY_PROPERTY
# allOf: Must be valid against all of the subschemas
if 'allOf' in schema:
schema_ = copy.deepcopy(schema['allOf'][0])
for x in schema['allOf'][1:]:
_dict_merge(schema_, x)
return _parse_schema(schema_, method)
# anyOf: Must be valid against any of the subschemas
# TODO(stephenfin): Handle anyOf
# oneOf: Must be valid against exactly one of the subschemas
if 'oneOf' in schema:
# we only show the first one since we can't show everything
return _parse_schema(schema['oneOf'][0], method)
if 'enum' in schema:
# we only show the first one since we can't show everything
return schema['enum'][0]
schema_type = schema.get('type', 'object')
if schema_type == 'array':
# special case oneOf and anyOf so that we can show examples for all
# possible combinations
if 'oneOf' in schema['items']:
return [
_parse_schema(x, method) for x in schema['items']['oneOf']
]
if 'anyOf' in schema['items']:
return [
_parse_schema(x, method) for x in schema['items']['anyOf']
]
return [_parse_schema(schema['items'], method)]
if schema_type == 'object':
if method and 'properties' in schema and \
all(v.get('readOnly', False)
for v in schema['properties'].values()):
return _READONLY_PROPERTY
results = []
for name, prop in schema.get('properties', {}).items():
result = _parse_schema(prop, method)
if result != _READONLY_PROPERTY:
results.append((name, result))
return collections.OrderedDict(results)
if (schema_type, schema.get('format')) in _TYPE_MAPPING:
return _TYPE_MAPPING[(schema_type, schema.get('format'))]
return _TYPE_MAPPING[(schema_type, None)] # unrecognized format
def _example(media_type_objects, method=None, endpoint=None, status=None,
nb_indent=0):
"""
Format examples in `Media Type Object` openapi v3 to HTTP request or
HTTP response example.
If method and endpoint is provided, this function prints a request example
else status should be provided to print a response example.
Arguments:
media_type_objects (Dict[str, Dict]): Dict containing
Media Type Objects.
method: The HTTP method to use in example.
endpoint: The HTTP route to use in example.
status: The HTTP status to use in example.
"""
indent = ' '
extra_indent = indent * nb_indent
if method is not None:
method = method.upper()
else:
try:
# one of possible values for status might be 'default'.
# in the case, just fallback to '-'
status_text = http_status_codes[int(status)]
except (ValueError, KeyError):
status_text = '-'
# Provide request samples for GET requests
if method == 'GET':
media_type_objects[''] = {
'examples': {'Example request': {'value': ''}}}
for content_type, content in media_type_objects.items():
examples = content.get('examples')
example = content.get('example')
# Try to get the example from the schema
if example is None and 'schema' in content:
example = content['schema'].get('example')
if examples is None:
examples = {}
if not example:
if re.match(r"application/[a-zA-Z\+]*json", content_type) is \
None:
LOG.info('skipping non-JSON example generation.')
continue
example = _parse_schema(content['schema'], method=method)
if method is None:
examples['Example response'] = {
'value': example,
}
else:
examples['Example request'] = {
'value': example,
}
for example in examples.values():
# According to OpenAPI v3 specs, string examples should be left unchanged
if not isinstance(example['value'], str):
example['value'] = json.dumps(
example['value'], indent=4, separators=(',', ': '))
for example_name, example in examples.items():
if 'summary' in example:
example_title = '{example_name} - {example[summary]}'.format(
**locals())
else:
example_title = example_name
yield ''
yield '{extra_indent}**{example_title}:**'.format(**locals())
yield ''
yield '{extra_indent}.. sourcecode:: http'.format(**locals())
yield ''
# Print http request example
if method:
yield '{extra_indent}{indent}{method} {endpoint} HTTP/1.1' \
.format(**locals())
yield '{extra_indent}{indent}Host: example.com' \
.format(**locals())
if content_type:
yield '{extra_indent}{indent}Content-Type: {content_type}'\
.format(**locals())
# Print http response example
else:
yield '{extra_indent}{indent}HTTP/1.1 {status} {status_text}' \
.format(**locals())
yield '{extra_indent}{indent}Content-Type: {content_type}' \
.format(**locals())
yield ''
for example_line in example['value'].splitlines():
yield '{extra_indent}{indent}{example_line}'.format(**locals())
if example['value'].splitlines():
yield ''
def convert_json_schema(schema, directive=':<json'):
"""
Convert json schema to `:<json` sphinx httpdomain.
"""
output = []
def _convert(schema, name='', required=False):
"""
Fill the output list, with 2-tuple (name, template)
i.e: ('user.age', 'str user.age: the age of user')
This allow to sort output by field name
"""
type_ = schema.get('type', 'any')
required_properties = schema.get('required', ())
if type_ == 'object' and schema.get('properties'):
for prop, next_schema in schema.get('properties', {}).items():
_convert(
next_schema, '{name}.{prop}'.format(**locals()),
(prop in required_properties))
elif type_ == 'array':
_convert(schema['items'], name + '[]')
else:
if name:
name = name.lstrip('.')
constraints = []
if required:
constraints.append('required')
if schema.get('readOnly', False):
constraints.append('read only')
if constraints:
constraints = '({})'.format(', '.join(constraints))
else:
constraints = ''
if schema.get('description', ''):
if constraints:
output.append((
name,
'{type_} {name}:'
' {schema[description]}'
' {constraints}'.format(**locals())))
else:
output.append((
name,
'{type_} {name}:'
' {schema[description]}'.format(**locals())))
else:
if constraints:
output.append(
(name,
'{type_} {name}:'
' {constraints}'.format(**locals())))
else:
output.append(
(name,
'{type_} {name}:'.format(**locals())))
_convert(schema)
for _, render in sorted(output):
yield '{} {}'.format(directive, render)
def _httpresource(endpoint, method, properties, convert, render_examples,
render_request):
# https://github.com/OAI/OpenAPI-Specification/blob/3.0.2/versions/3.0.0.md#operation-object
parameters = properties.get('parameters', [])
responses = properties['responses']
query_param_examples = []
indent = ' '
yield '.. http:{0}:: {1}'.format(method, endpoint)
yield ' :synopsis: {0}'.format(properties.get('summary', 'null'))
yield ''
if 'summary' in properties:
for line in properties['summary'].splitlines():
yield '{indent}**{line}**'.format(**locals())
yield ''
if 'description' in properties:
for line in convert(properties['description']).splitlines():
yield '{indent}{line}'.format(**locals())
yield ''
# print request's path params
for param in filter(lambda p: p['in'] == 'path', parameters):
yield indent + ':param {type} {name}:'.format(
type=param['schema']['type'],
name=param['name'])
for line in convert(param.get('description', '')).splitlines():
yield '{indent}{indent}{line}'.format(**locals())
# print request's query params
for param in filter(lambda p: p['in'] == 'query', parameters):
yield indent + ':query {type} {name}:'.format(
type=param['schema']['type'],
name=param['name'])
for line in convert(param.get('description', '')).splitlines():
yield '{indent}{indent}{line}'.format(**locals())
if param.get('required', False):
yield '{indent}{indent}(Required)'.format(**locals())
example = _parse_schema(param['schema'], method)
example = param.get('example', example)
if param.get('explode', False) and isinstance(example, list):
for v in example:
query_param_examples.append((param['name'], v))
elif param.get('explode', False) and isinstance(example, dict):
for k, v in example.items():
query_param_examples.append((k, v))
else:
query_param_examples.append((param['name'], example))
# print request content
if render_request:
request_content = properties.get('requestBody', {}).get('content', {})
if request_content and 'application/json' in request_content:
schema = request_content['application/json']['schema']
yield ''
for line in convert_json_schema(schema):
yield '{indent}{line}'.format(**locals())
yield ''
req_properties = json.dumps(schema.get('properties', {}), indent=2,
separators=(',', ':'))
yield '{indent}**Request body:**'.format(**locals())
yield ''
yield '{indent}.. sourcecode:: json'.format(**locals())
yield ''
for line in req_properties.splitlines():
# yield indent + line
yield '{indent}{indent}{line}'.format(**locals())
# yield ''
# print request example
if render_examples:
endpoint_examples = endpoint
if query_param_examples:
endpoint_examples = endpoint + "?" + \
parse.urlencode(query_param_examples)
# print request example
request_content = properties.get('requestBody', {}).get('content', {})
for line in _example(
request_content,
method,
endpoint=endpoint_examples,
nb_indent=1):
yield line
# print response status codes
for status, response in responses.items():
yield '{indent}:status {status}:'.format(**locals())
for line in convert(response['description']).splitlines():
yield '{indent}{indent}{line}'.format(**locals())
# print response example
if render_examples:
for line in _example(
response.get('content', {}), status=status, nb_indent=2):
yield line
# print request header params
for param in filter(lambda p: p['in'] == 'header', parameters):
yield indent + ':reqheader {name}:'.format(**param)
for line in convert(param.get('description', '')).splitlines():
yield '{indent}{indent}{line}'.format(**locals())
if param.get('required', False):
yield '{indent}{indent}(Required)'.format(**locals())
# print response headers
for status, response in responses.items():
for headername, header in response.get('headers', {}).items():
yield indent + ':resheader {name}:'.format(name=headername)
for line in convert(header['description']).splitlines():
yield '{indent}{indent}{line}'.format(**locals())
for cb_name, cb_specs in properties.get('callbacks', {}).items():
yield ''
yield indent + '.. admonition:: Callback: ' + cb_name
yield ''
for cb_endpoint in cb_specs.keys():
for cb_method, cb_properties in cb_specs[cb_endpoint].items():
for line in _httpresource(
cb_endpoint,
cb_method,
cb_properties,
convert=convert,
render_examples=render_examples,
render_request=render_request):
if line:
yield indent+indent+line
else:
yield ''
yield ''
def _header(title):
yield title
yield '=' * len(title)
yield ''
def openapihttpdomain(spec, **options):
generators = []
# OpenAPI spec may contain JSON references, common properties, etc.
# Trying to render the spec "As Is" will require to put multiple
# if-s around the code. In order to simplify flow, let's make the
# spec to have only one (expected) schema, i.e. normalize it.
utils.normalize_spec(spec, **options)
# Paths list to be processed
paths = []
# If 'paths' are passed we've got to ensure they exist within an OpenAPI
# spec; otherwise raise error and ask user to fix that.
if 'paths' in options:
if not set(options['paths']).issubset(spec['paths']):
raise ValueError(
'One or more paths are not defined in the spec: %s.' % (
', '.join(set(options['paths']) - set(spec['paths'])),
)
)
paths = options['paths']
# Check against regular expressions to be included
if 'include' in options:
for i in options['include']:
ir = re.compile(i)
for path in spec['paths']:
if ir.match(path):
paths.append(path)
# If no include nor paths option, then take full path
if 'include' not in options and 'paths' not in options:
paths = spec['paths']
# Remove paths matching regexp
if 'exclude' in options:
_paths = []
for e in options['exclude']:
er = re.compile(e)
for path in paths:
if not er.match(path):
_paths.append(path)
paths = _paths
render_request = False
if 'request' in options:
render_request = True
convert = utils.get_text_converter(options)
# https://github.com/OAI/OpenAPI-Specification/blob/3.0.2/versions/3.0.0.md#paths-object
if 'group' in options:
groups = collections.OrderedDict(
[(x['name'], []) for x in spec.get('tags', {})]
)
for endpoint in paths:
for method, properties in spec['paths'][endpoint].items():
key = properties.get('tags', [''])[0]
groups.setdefault(key, []).append(_httpresource(
endpoint,
method,
properties,
convert,
render_examples='examples' in options,
render_request=render_request))
for key in groups.keys():
if key:
generators.append(_header(key))
else:
generators.append(_header('default'))
generators.extend(groups[key])
else:
for endpoint in paths:
for method, properties in spec['paths'][endpoint].items():
generators.append(_httpresource(
endpoint,
method,
properties,
convert,
render_examples='examples' in options,
render_request=render_request))
return iter(itertools.chain(*generators))
|
the-stack_0_175 |
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
def send_email(name,receiver):
# Creating message subject and sender
subject = 'Welcome to Our Neighbourhood'
sender = '[email protected]'
#passing in the context vairables
text_content = render_to_string('email/notification.txt',{"name": name})
html_content = render_to_string('email/notification.html',{"name": name})
msg = EmailMultiAlternatives(subject,text_content,sender,[receiver])
msg.attach_alternative(html_content,'text/html')
msg.send()
|
the-stack_0_176 | """Project: Eskapade - A python-based package for data analysis.
Module: spark_analysis.data_conversion
Created: 2017/05/30
Description:
Converters between Spark, Pandas, and Python data formats
Authors:
KPMG Advanced Analytics & Big Data team, Amstelveen, The Netherlands
Redistribution and use in source and binary forms, with or without
modification, are permitted according to the terms listed in the file
LICENSE.
"""
import uuid
import pyspark
from eskapadespark.helpers import apply_transform_funcs
from eskapade.logger import Logger
SPARK_SQL_TYPES = pyspark.sql.types._type_mappings
logger = Logger()
def create_spark_df(spark, data, schema=None, process_methods=None, **kwargs):
"""Create a Spark data frame from data in a different format.
A Spark data frame is created with either a specified schema or a schema
inferred from the input data. The schema can be specified with the
keyword argument "schema".
Functions to transform the data frame after creation can be specified by
the keyword argument "process_methods". The value of this argument is
an iterable of (function, arguments, keyword arguments) tuples to apply.
The data frame is created with the createDataFrame function of the
SparkSession. Remaining keyword arguments are passed to this function.
>>> spark = pyspark.sql.SparkSession.builder.getOrCreate()
>>> df = create_spark_df(spark,
>>> [[1, 1.1, 'one'], [2, 2.2, 'two']],
>>> schema=['int', 'float', 'str'],
>>> process_methods=[('repartition', (), {'numPartitions': 6})])
>>> df.show()
+---+-----+---+
|int|float|str|
+---+-----+---+
| 2| 2.2|two|
| 1| 1.1|one|
+---+-----+---+
:param pyspark.sql.SparkSession spark: SparkSession instance
:param data: input dataset
:param schema: schema of created data frame
:param iterable process_methods: methods to apply on the data frame after creation
:returns: created data frame
:rtype: pyspark.sql.DataFrame
"""
# check if data-frame schema was provided
if isinstance(schema, int):
# infer schema from a single row (prevents Spark >= 1.6.1 from checking schema of all rows)
def get_row(data, ind):
"""Get row."""
try:
return data.iloc[ind].tolist()
except AttributeError:
pass
try:
row = data.first()
if ind > 0:
logger.warning('Inferring data-frame schema from first row, instead of row with index {i:d}', i=ind)
return row
except AttributeError:
pass
try:
return data[ind]
except TypeError:
raise TypeError('Unable to get row from data of type "{!s}" to infer schema.'.format(type(data)))
row = get_row(data, schema)
def to_python_type(var):
"""Get item."""
try:
return var.item()
except AttributeError:
return var
schema = pyspark.sql.types._infer_schema(tuple(to_python_type(it) for it in row))
try:
for t, n in zip(schema.fields, data.columns):
t.name = str(n)
except AttributeError:
pass
elif isinstance(schema, dict):
# create schema from dictionary of (name, data type) pairs
schema = df_schema(schema)
kwargs['schema'] = schema
# check if input is a data frame
if isinstance(data, pyspark.sql.DataFrame):
if not kwargs['schema']:
kwargs['schema'] = data.schema
data = data.rdd
# create and transform data frame
df = spark.createDataFrame(data, **kwargs)
if process_methods:
df = apply_transform_funcs(df, process_methods)
return df
def df_schema(schema_spec):
"""Create Spark data-frame schema.
Create a schema for a Spark data frame from a dictionary of (name, data
type) pairs, describing the columns. Data types are specified by Python
types or by Spark-SQL types from the pyspark.sql.types module.
>>> from collections import OrderedDict as odict
>>> schema_dict = odict()
>>> schema_dict['foo'] = pyspark.sql.types.IntegerType()
>>> schema_dict['bar'] = odict([('descr', str), ('val', float)])
>>> print(schema_dict)
OrderedDict([('foo', IntegerType), ('bar', OrderedDict([('descr', <class 'str'>), ('val', <class 'float'>)]))])
>>> spark = pyspark.sql.SparkSession.builder.getOrCreate()
>>> df = spark.createDataFrame([(1, ('one', 1.1)), (2, ('two', 2.2))], schema=df_schema(schema_dict))
>>> df.show()
+---+---------+
|foo| bar|
+---+---------+
| 1|[one,1.1]|
| 2|[two,2.2]|
+---+---------+
:param dict schema_spec: schema specification
:returns: data-frame schema
:rtype: pyspark.sql.types.StructType
:raises: TypeError if data type is specified incorrectly
"""
def get_field(name, data_type):
"""Return a struct field for specified data type."""
# treat dictionaries as struct types
if isinstance(data_type, dict):
data_type = pyspark.sql.types.StructType([get_field(*spec) for spec in data_type.items()])
# convert Python types to Spark-SQL types
data_type = SPARK_SQL_TYPES.get(data_type, data_type)
# convert Spark-SQL type classes to Spark-SQL types
if isinstance(data_type, type) and issubclass(data_type, pyspark.sql.types.DataType):
data_type = data_type()
# check and return data type
if not isinstance(data_type, pyspark.sql.types.DataType):
raise TypeError('Type specifications for data-frame schemas must be DataTypes or dictionaries')
return pyspark.sql.types.StructField(str(name), data_type)
# return a struct type with a list of struct fields for specified data types
return pyspark.sql.types.StructType([get_field(*spec) for spec in schema_spec.items()])
def hive_table_from_df(spark, df, db, table):
"""Create a Hive table from a Spark data frame.
:param pyspark.sql.SparkSession spark: SparkSession instance
:param pyspark.sql.DataFrame df: input data frame
:param str db: database for table
:param str table: name of table
"""
# register temporary table
temp_name = '{0:s}_{1:s}'.format(table, uuid.uuid4().hex)
df.createOrReplaceTempView(temp_name)
# create table
table_spec = '.'.join(s for s in (db, table) if s)
create_table_query = 'CREATE TABLE {spec} AS SELECT {cols} FROM {name}'\
.format(name=temp_name, spec=table_spec, cols=', '.join(c for c in df.columns))
logger.debug(create_table_query)
spark.sql(create_table_query)
|
the-stack_0_177 | from sympy.combinatorics import Permutation as Perm
from sympy.combinatorics.perm_groups import PermutationGroup
from sympy.core import Basic, Tuple
from sympy.core.compatibility import as_int
from sympy.sets import FiniteSet
from sympy.utilities.iterables import (minlex, unflatten, flatten)
rmul = Perm.rmul
class Polyhedron(Basic):
"""
Represents the polyhedral symmetry group (PSG).
Explanation
===========
The PSG is one of the symmetry groups of the Platonic solids.
There are three polyhedral groups: the tetrahedral group
of order 12, the octahedral group of order 24, and the
icosahedral group of order 60.
All doctests have been given in the docstring of the
constructor of the object.
References
==========
.. [1] http://mathworld.wolfram.com/PolyhedralGroup.html
"""
_edges = None
def __new__(cls, corners, faces=[], pgroup=[]):
"""
The constructor of the Polyhedron group object.
Explanation
===========
It takes up to three parameters: the corners, faces, and
allowed transformations.
The corners/vertices are entered as a list of arbitrary
expressions that are used to identify each vertex.
The faces are entered as a list of tuples of indices; a tuple
of indices identifies the vertices which define the face. They
should be entered in a cw or ccw order; they will be standardized
by reversal and rotation to be give the lowest lexical ordering.
If no faces are given then no edges will be computed.
>>> from sympy.combinatorics.polyhedron import Polyhedron
>>> Polyhedron(list('abc'), [(1, 2, 0)]).faces
FiniteSet((0, 1, 2))
>>> Polyhedron(list('abc'), [(1, 0, 2)]).faces
FiniteSet((0, 1, 2))
The allowed transformations are entered as allowable permutations
of the vertices for the polyhedron. Instance of Permutations
(as with faces) should refer to the supplied vertices by index.
These permutation are stored as a PermutationGroup.
Examples
========
>>> from sympy.combinatorics.permutations import Permutation
>>> from sympy.interactive import init_printing
>>> from sympy.abc import w, x, y, z
>>> init_printing(pretty_print=False, perm_cyclic=False)
Here we construct the Polyhedron object for a tetrahedron.
>>> corners = [w, x, y, z]
>>> faces = [(0, 1, 2), (0, 2, 3), (0, 3, 1), (1, 2, 3)]
Next, allowed transformations of the polyhedron must be given. This
is given as permutations of vertices.
Although the vertices of a tetrahedron can be numbered in 24 (4!)
different ways, there are only 12 different orientations for a
physical tetrahedron. The following permutations, applied once or
twice, will generate all 12 of the orientations. (The identity
permutation, Permutation(range(4)), is not included since it does
not change the orientation of the vertices.)
>>> pgroup = [Permutation([[0, 1, 2], [3]]), \
Permutation([[0, 1, 3], [2]]), \
Permutation([[0, 2, 3], [1]]), \
Permutation([[1, 2, 3], [0]]), \
Permutation([[0, 1], [2, 3]]), \
Permutation([[0, 2], [1, 3]]), \
Permutation([[0, 3], [1, 2]])]
The Polyhedron is now constructed and demonstrated:
>>> tetra = Polyhedron(corners, faces, pgroup)
>>> tetra.size
4
>>> tetra.edges
FiniteSet((0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3))
>>> tetra.corners
(w, x, y, z)
It can be rotated with an arbitrary permutation of vertices, e.g.
the following permutation is not in the pgroup:
>>> tetra.rotate(Permutation([0, 1, 3, 2]))
>>> tetra.corners
(w, x, z, y)
An allowed permutation of the vertices can be constructed by
repeatedly applying permutations from the pgroup to the vertices.
Here is a demonstration that applying p and p**2 for every p in
pgroup generates all the orientations of a tetrahedron and no others:
>>> all = ( (w, x, y, z), \
(x, y, w, z), \
(y, w, x, z), \
(w, z, x, y), \
(z, w, y, x), \
(w, y, z, x), \
(y, z, w, x), \
(x, z, y, w), \
(z, y, x, w), \
(y, x, z, w), \
(x, w, z, y), \
(z, x, w, y) )
>>> got = []
>>> for p in (pgroup + [p**2 for p in pgroup]):
... h = Polyhedron(corners)
... h.rotate(p)
... got.append(h.corners)
...
>>> set(got) == set(all)
True
The make_perm method of a PermutationGroup will randomly pick
permutations, multiply them together, and return the permutation that
can be applied to the polyhedron to give the orientation produced
by those individual permutations.
Here, 3 permutations are used:
>>> tetra.pgroup.make_perm(3) # doctest: +SKIP
Permutation([0, 3, 1, 2])
To select the permutations that should be used, supply a list
of indices to the permutations in pgroup in the order they should
be applied:
>>> use = [0, 0, 2]
>>> p002 = tetra.pgroup.make_perm(3, use)
>>> p002
Permutation([1, 0, 3, 2])
Apply them one at a time:
>>> tetra.reset()
>>> for i in use:
... tetra.rotate(pgroup[i])
...
>>> tetra.vertices
(x, w, z, y)
>>> sequentially = tetra.vertices
Apply the composite permutation:
>>> tetra.reset()
>>> tetra.rotate(p002)
>>> tetra.corners
(x, w, z, y)
>>> tetra.corners in all and tetra.corners == sequentially
True
Notes
=====
Defining permutation groups
---------------------------
It is not necessary to enter any permutations, nor is necessary to
enter a complete set of transformations. In fact, for a polyhedron,
all configurations can be constructed from just two permutations.
For example, the orientations of a tetrahedron can be generated from
an axis passing through a vertex and face and another axis passing
through a different vertex or from an axis passing through the
midpoints of two edges opposite of each other.
For simplicity of presentation, consider a square --
not a cube -- with vertices 1, 2, 3, and 4:
1-----2 We could think of axes of rotation being:
| | 1) through the face
| | 2) from midpoint 1-2 to 3-4 or 1-3 to 2-4
3-----4 3) lines 1-4 or 2-3
To determine how to write the permutations, imagine 4 cameras,
one at each corner, labeled A-D:
A B A B
1-----2 1-----3 vertex index:
| | | | 1 0
| | | | 2 1
3-----4 2-----4 3 2
C D C D 4 3
original after rotation
along 1-4
A diagonal and a face axis will be chosen for the "permutation group"
from which any orientation can be constructed.
>>> pgroup = []
Imagine a clockwise rotation when viewing 1-4 from camera A. The new
orientation is (in camera-order): 1, 3, 2, 4 so the permutation is
given using the *indices* of the vertices as:
>>> pgroup.append(Permutation((0, 2, 1, 3)))
Now imagine rotating clockwise when looking down an axis entering the
center of the square as viewed. The new camera-order would be
3, 1, 4, 2 so the permutation is (using indices):
>>> pgroup.append(Permutation((2, 0, 3, 1)))
The square can now be constructed:
** use real-world labels for the vertices, entering them in
camera order
** for the faces we use zero-based indices of the vertices
in *edge-order* as the face is traversed; neither the
direction nor the starting point matter -- the faces are
only used to define edges (if so desired).
>>> square = Polyhedron((1, 2, 3, 4), [(0, 1, 3, 2)], pgroup)
To rotate the square with a single permutation we can do:
>>> square.rotate(square.pgroup[0])
>>> square.corners
(1, 3, 2, 4)
To use more than one permutation (or to use one permutation more
than once) it is more convenient to use the make_perm method:
>>> p011 = square.pgroup.make_perm([0, 1, 1]) # diag flip + 2 rotations
>>> square.reset() # return to initial orientation
>>> square.rotate(p011)
>>> square.corners
(4, 2, 3, 1)
Thinking outside the box
------------------------
Although the Polyhedron object has a direct physical meaning, it
actually has broader application. In the most general sense it is
just a decorated PermutationGroup, allowing one to connect the
permutations to something physical. For example, a Rubik's cube is
not a proper polyhedron, but the Polyhedron class can be used to
represent it in a way that helps to visualize the Rubik's cube.
>>> from sympy.utilities.iterables import flatten, unflatten
>>> from sympy import symbols
>>> from sympy.combinatorics import RubikGroup
>>> facelets = flatten([symbols(s+'1:5') for s in 'UFRBLD'])
>>> def show():
... pairs = unflatten(r2.corners, 2)
... print(pairs[::2])
... print(pairs[1::2])
...
>>> r2 = Polyhedron(facelets, pgroup=RubikGroup(2))
>>> show()
[(U1, U2), (F1, F2), (R1, R2), (B1, B2), (L1, L2), (D1, D2)]
[(U3, U4), (F3, F4), (R3, R4), (B3, B4), (L3, L4), (D3, D4)]
>>> r2.rotate(0) # cw rotation of F
>>> show()
[(U1, U2), (F3, F1), (U3, R2), (B1, B2), (L1, D1), (R3, R1)]
[(L4, L2), (F4, F2), (U4, R4), (B3, B4), (L3, D2), (D3, D4)]
Predefined Polyhedra
====================
For convenience, the vertices and faces are defined for the following
standard solids along with a permutation group for transformations.
When the polyhedron is oriented as indicated below, the vertices in
a given horizontal plane are numbered in ccw direction, starting from
the vertex that will give the lowest indices in a given face. (In the
net of the vertices, indices preceded by "-" indicate replication of
the lhs index in the net.)
tetrahedron, tetrahedron_faces
------------------------------
4 vertices (vertex up) net:
0 0-0
1 2 3-1
4 faces:
(0, 1, 2) (0, 2, 3) (0, 3, 1) (1, 2, 3)
cube, cube_faces
----------------
8 vertices (face up) net:
0 1 2 3-0
4 5 6 7-4
6 faces:
(0, 1, 2, 3)
(0, 1, 5, 4) (1, 2, 6, 5) (2, 3, 7, 6) (0, 3, 7, 4)
(4, 5, 6, 7)
octahedron, octahedron_faces
----------------------------
6 vertices (vertex up) net:
0 0 0-0
1 2 3 4-1
5 5 5-5
8 faces:
(0, 1, 2) (0, 2, 3) (0, 3, 4) (0, 1, 4)
(1, 2, 5) (2, 3, 5) (3, 4, 5) (1, 4, 5)
dodecahedron, dodecahedron_faces
--------------------------------
20 vertices (vertex up) net:
0 1 2 3 4 -0
5 6 7 8 9 -5
14 10 11 12 13-14
15 16 17 18 19-15
12 faces:
(0, 1, 2, 3, 4) (0, 1, 6, 10, 5) (1, 2, 7, 11, 6)
(2, 3, 8, 12, 7) (3, 4, 9, 13, 8) (0, 4, 9, 14, 5)
(5, 10, 16, 15, 14) (6, 10, 16, 17, 11) (7, 11, 17, 18, 12)
(8, 12, 18, 19, 13) (9, 13, 19, 15, 14)(15, 16, 17, 18, 19)
icosahedron, icosahedron_faces
------------------------------
12 vertices (face up) net:
0 0 0 0 -0
1 2 3 4 5 -1
6 7 8 9 10 -6
11 11 11 11 -11
20 faces:
(0, 1, 2) (0, 2, 3) (0, 3, 4)
(0, 4, 5) (0, 1, 5) (1, 2, 6)
(2, 3, 7) (3, 4, 8) (4, 5, 9)
(1, 5, 10) (2, 6, 7) (3, 7, 8)
(4, 8, 9) (5, 9, 10) (1, 6, 10)
(6, 7, 11) (7, 8, 11) (8, 9, 11)
(9, 10, 11) (6, 10, 11)
>>> from sympy.combinatorics.polyhedron import cube
>>> cube.edges
FiniteSet((0, 1), (0, 3), (0, 4), (1, 2), (1, 5), (2, 3), (2, 6), (3, 7), (4, 5), (4, 7), (5, 6), (6, 7))
If you want to use letters or other names for the corners you
can still use the pre-calculated faces:
>>> corners = list('abcdefgh')
>>> Polyhedron(corners, cube.faces).corners
(a, b, c, d, e, f, g, h)
References
==========
.. [1] www.ocf.berkeley.edu/~wwu/articles/platonicsolids.pdf
"""
faces = [minlex(f, directed=False, is_set=True) for f in faces]
corners, faces, pgroup = args = \
[Tuple(*a) for a in (corners, faces, pgroup)]
obj = Basic.__new__(cls, *args)
obj._corners = tuple(corners) # in order given
obj._faces = FiniteSet(*faces)
if pgroup and pgroup[0].size != len(corners):
raise ValueError("Permutation size unequal to number of corners.")
# use the identity permutation if none are given
obj._pgroup = PermutationGroup(
pgroup or [Perm(range(len(corners)))] )
return obj
@property
def corners(self):
"""
Get the corners of the Polyhedron.
The method ``vertices`` is an alias for ``corners``.
Examples
========
>>> from sympy.combinatorics import Polyhedron
>>> from sympy.abc import a, b, c, d
>>> p = Polyhedron(list('abcd'))
>>> p.corners == p.vertices == (a, b, c, d)
True
See Also
========
array_form, cyclic_form
"""
return self._corners
vertices = corners
@property
def array_form(self):
"""Return the indices of the corners.
The indices are given relative to the original position of corners.
Examples
========
>>> from sympy.combinatorics.polyhedron import tetrahedron
>>> tetrahedron = tetrahedron.copy()
>>> tetrahedron.array_form
[0, 1, 2, 3]
>>> tetrahedron.rotate(0)
>>> tetrahedron.array_form
[0, 2, 3, 1]
>>> tetrahedron.pgroup[0].array_form
[0, 2, 3, 1]
See Also
========
corners, cyclic_form
"""
corners = list(self.args[0])
return [corners.index(c) for c in self.corners]
@property
def cyclic_form(self):
"""Return the indices of the corners in cyclic notation.
The indices are given relative to the original position of corners.
See Also
========
corners, array_form
"""
return Perm._af_new(self.array_form).cyclic_form
@property
def size(self):
"""
Get the number of corners of the Polyhedron.
"""
return len(self._corners)
@property
def faces(self):
"""
Get the faces of the Polyhedron.
"""
return self._faces
@property
def pgroup(self):
"""
Get the permutations of the Polyhedron.
"""
return self._pgroup
@property
def edges(self):
"""
Given the faces of the polyhedra we can get the edges.
Examples
========
>>> from sympy.combinatorics import Polyhedron
>>> from sympy.abc import a, b, c
>>> corners = (a, b, c)
>>> faces = [(0, 1, 2)]
>>> Polyhedron(corners, faces).edges
FiniteSet((0, 1), (0, 2), (1, 2))
"""
if self._edges is None:
output = set()
for face in self.faces:
for i in range(len(face)):
edge = tuple(sorted([face[i], face[i - 1]]))
output.add(edge)
self._edges = FiniteSet(*output)
return self._edges
def rotate(self, perm):
"""
Apply a permutation to the polyhedron *in place*. The permutation
may be given as a Permutation instance or an integer indicating
which permutation from pgroup of the Polyhedron should be
applied.
This is an operation that is analogous to rotation about
an axis by a fixed increment.
Notes
=====
When a Permutation is applied, no check is done to see if that
is a valid permutation for the Polyhedron. For example, a cube
could be given a permutation which effectively swaps only 2
vertices. A valid permutation (that rotates the object in a
physical way) will be obtained if one only uses
permutations from the ``pgroup`` of the Polyhedron. On the other
hand, allowing arbitrary rotations (applications of permutations)
gives a way to follow named elements rather than indices since
Polyhedron allows vertices to be named while Permutation works
only with indices.
Examples
========
>>> from sympy.combinatorics import Polyhedron, Permutation
>>> from sympy.combinatorics.polyhedron import cube
>>> cube = cube.copy()
>>> cube.corners
(0, 1, 2, 3, 4, 5, 6, 7)
>>> cube.rotate(0)
>>> cube.corners
(1, 2, 3, 0, 5, 6, 7, 4)
A non-physical "rotation" that is not prohibited by this method:
>>> cube.reset()
>>> cube.rotate(Permutation([[1, 2]], size=8))
>>> cube.corners
(0, 2, 1, 3, 4, 5, 6, 7)
Polyhedron can be used to follow elements of set that are
identified by letters instead of integers:
>>> shadow = h5 = Polyhedron(list('abcde'))
>>> p = Permutation([3, 0, 1, 2, 4])
>>> h5.rotate(p)
>>> h5.corners
(d, a, b, c, e)
>>> _ == shadow.corners
True
>>> copy = h5.copy()
>>> h5.rotate(p)
>>> h5.corners == copy.corners
False
"""
if not isinstance(perm, Perm):
perm = self.pgroup[perm]
# and we know it's valid
else:
if perm.size != self.size:
raise ValueError('Polyhedron and Permutation sizes differ.')
a = perm.array_form
corners = [self.corners[a[i]] for i in range(len(self.corners))]
self._corners = tuple(corners)
def reset(self):
"""Return corners to their original positions.
Examples
========
>>> from sympy.combinatorics.polyhedron import tetrahedron as T
>>> T = T.copy()
>>> T.corners
(0, 1, 2, 3)
>>> T.rotate(0)
>>> T.corners
(0, 2, 3, 1)
>>> T.reset()
>>> T.corners
(0, 1, 2, 3)
"""
self._corners = self.args[0]
def _pgroup_calcs():
"""Return the permutation groups for each of the polyhedra and the face
definitions: tetrahedron, cube, octahedron, dodecahedron, icosahedron,
tetrahedron_faces, cube_faces, octahedron_faces, dodecahedron_faces,
icosahedron_faces
Explanation
===========
(This author didn't find and didn't know of a better way to do it though
there likely is such a way.)
Although only 2 permutations are needed for a polyhedron in order to
generate all the possible orientations, a group of permutations is
provided instead. A set of permutations is called a "group" if::
a*b = c (for any pair of permutations in the group, a and b, their
product, c, is in the group)
a*(b*c) = (a*b)*c (for any 3 permutations in the group associativity holds)
there is an identity permutation, I, such that I*a = a*I for all elements
in the group
a*b = I (the inverse of each permutation is also in the group)
None of the polyhedron groups defined follow these definitions of a group.
Instead, they are selected to contain those permutations whose powers
alone will construct all orientations of the polyhedron, i.e. for
permutations ``a``, ``b``, etc... in the group, ``a, a**2, ..., a**o_a``,
``b, b**2, ..., b**o_b``, etc... (where ``o_i`` is the order of
permutation ``i``) generate all permutations of the polyhedron instead of
mixed products like ``a*b``, ``a*b**2``, etc....
Note that for a polyhedron with n vertices, the valid permutations of the
vertices exclude those that do not maintain its faces. e.g. the
permutation BCDE of a square's four corners, ABCD, is a valid
permutation while CBDE is not (because this would twist the square).
Examples
========
The is_group checks for: closure, the presence of the Identity permutation,
and the presence of the inverse for each of the elements in the group. This
confirms that none of the polyhedra are true groups:
>>> from sympy.combinatorics.polyhedron import (
... tetrahedron, cube, octahedron, dodecahedron, icosahedron)
...
>>> polyhedra = (tetrahedron, cube, octahedron, dodecahedron, icosahedron)
>>> [h.pgroup.is_group for h in polyhedra]
...
[True, True, True, True, True]
Although tests in polyhedron's test suite check that powers of the
permutations in the groups generate all permutations of the vertices
of the polyhedron, here we also demonstrate the powers of the given
permutations create a complete group for the tetrahedron:
>>> from sympy.combinatorics import Permutation, PermutationGroup
>>> for h in polyhedra[:1]:
... G = h.pgroup
... perms = set()
... for g in G:
... for e in range(g.order()):
... p = tuple((g**e).array_form)
... perms.add(p)
...
... perms = [Permutation(p) for p in perms]
... assert PermutationGroup(perms).is_group
In addition to doing the above, the tests in the suite confirm that the
faces are all present after the application of each permutation.
References
==========
.. [1] http://dogschool.tripod.com/trianglegroup.html
"""
def _pgroup_of_double(polyh, ordered_faces, pgroup):
n = len(ordered_faces[0])
# the vertices of the double which sits inside a give polyhedron
# can be found by tracking the faces of the outer polyhedron.
# A map between face and the vertex of the double is made so that
# after rotation the position of the vertices can be located
fmap = dict(zip(ordered_faces,
range(len(ordered_faces))))
flat_faces = flatten(ordered_faces)
new_pgroup = []
for i, p in enumerate(pgroup):
h = polyh.copy()
h.rotate(p)
c = h.corners
# reorder corners in the order they should appear when
# enumerating the faces
reorder = unflatten([c[j] for j in flat_faces], n)
# make them canonical
reorder = [tuple(map(as_int,
minlex(f, directed=False, is_set=True)))
for f in reorder]
# map face to vertex: the resulting list of vertices are the
# permutation that we seek for the double
new_pgroup.append(Perm([fmap[f] for f in reorder]))
return new_pgroup
tetrahedron_faces = [
(0, 1, 2), (0, 2, 3), (0, 3, 1), # upper 3
(1, 2, 3), # bottom
]
# cw from top
#
_t_pgroup = [
Perm([[1, 2, 3], [0]]), # cw from top
Perm([[0, 1, 2], [3]]), # cw from front face
Perm([[0, 3, 2], [1]]), # cw from back right face
Perm([[0, 3, 1], [2]]), # cw from back left face
Perm([[0, 1], [2, 3]]), # through front left edge
Perm([[0, 2], [1, 3]]), # through front right edge
Perm([[0, 3], [1, 2]]), # through back edge
]
tetrahedron = Polyhedron(
range(4),
tetrahedron_faces,
_t_pgroup)
cube_faces = [
(0, 1, 2, 3), # upper
(0, 1, 5, 4), (1, 2, 6, 5), (2, 3, 7, 6), (0, 3, 7, 4), # middle 4
(4, 5, 6, 7), # lower
]
# U, D, F, B, L, R = up, down, front, back, left, right
_c_pgroup = [Perm(p) for p in
[
[1, 2, 3, 0, 5, 6, 7, 4], # cw from top, U
[4, 0, 3, 7, 5, 1, 2, 6], # cw from F face
[4, 5, 1, 0, 7, 6, 2, 3], # cw from R face
[1, 0, 4, 5, 2, 3, 7, 6], # cw through UF edge
[6, 2, 1, 5, 7, 3, 0, 4], # cw through UR edge
[6, 7, 3, 2, 5, 4, 0, 1], # cw through UB edge
[3, 7, 4, 0, 2, 6, 5, 1], # cw through UL edge
[4, 7, 6, 5, 0, 3, 2, 1], # cw through FL edge
[6, 5, 4, 7, 2, 1, 0, 3], # cw through FR edge
[0, 3, 7, 4, 1, 2, 6, 5], # cw through UFL vertex
[5, 1, 0, 4, 6, 2, 3, 7], # cw through UFR vertex
[5, 6, 2, 1, 4, 7, 3, 0], # cw through UBR vertex
[7, 4, 0, 3, 6, 5, 1, 2], # cw through UBL
]]
cube = Polyhedron(
range(8),
cube_faces,
_c_pgroup)
octahedron_faces = [
(0, 1, 2), (0, 2, 3), (0, 3, 4), (0, 1, 4), # top 4
(1, 2, 5), (2, 3, 5), (3, 4, 5), (1, 4, 5), # bottom 4
]
octahedron = Polyhedron(
range(6),
octahedron_faces,
_pgroup_of_double(cube, cube_faces, _c_pgroup))
dodecahedron_faces = [
(0, 1, 2, 3, 4), # top
(0, 1, 6, 10, 5), (1, 2, 7, 11, 6), (2, 3, 8, 12, 7), # upper 5
(3, 4, 9, 13, 8), (0, 4, 9, 14, 5),
(5, 10, 16, 15, 14), (6, 10, 16, 17, 11), (7, 11, 17, 18,
12), # lower 5
(8, 12, 18, 19, 13), (9, 13, 19, 15, 14),
(15, 16, 17, 18, 19) # bottom
]
def _string_to_perm(s):
rv = [Perm(range(20))]
p = None
for si in s:
if si not in '01':
count = int(si) - 1
else:
count = 1
if si == '0':
p = _f0
elif si == '1':
p = _f1
rv.extend([p]*count)
return Perm.rmul(*rv)
# top face cw
_f0 = Perm([
1, 2, 3, 4, 0, 6, 7, 8, 9, 5, 11,
12, 13, 14, 10, 16, 17, 18, 19, 15])
# front face cw
_f1 = Perm([
5, 0, 4, 9, 14, 10, 1, 3, 13, 15,
6, 2, 8, 19, 16, 17, 11, 7, 12, 18])
# the strings below, like 0104 are shorthand for F0*F1*F0**4 and are
# the remaining 4 face rotations, 15 edge permutations, and the
# 10 vertex rotations.
_dodeca_pgroup = [_f0, _f1] + [_string_to_perm(s) for s in '''
0104 140 014 0410
010 1403 03104 04103 102
120 1304 01303 021302 03130
0412041 041204103 04120410 041204104 041204102
10 01 1402 0140 04102 0412 1204 1302 0130 03120'''.strip().split()]
dodecahedron = Polyhedron(
range(20),
dodecahedron_faces,
_dodeca_pgroup)
icosahedron_faces = [
(0, 1, 2), (0, 2, 3), (0, 3, 4), (0, 4, 5), (0, 1, 5),
(1, 6, 7), (1, 2, 7), (2, 7, 8), (2, 3, 8), (3, 8, 9),
(3, 4, 9), (4, 9, 10), (4, 5, 10), (5, 6, 10), (1, 5, 6),
(6, 7, 11), (7, 8, 11), (8, 9, 11), (9, 10, 11), (6, 10, 11)]
icosahedron = Polyhedron(
range(12),
icosahedron_faces,
_pgroup_of_double(
dodecahedron, dodecahedron_faces, _dodeca_pgroup))
return (tetrahedron, cube, octahedron, dodecahedron, icosahedron,
tetrahedron_faces, cube_faces, octahedron_faces,
dodecahedron_faces, icosahedron_faces)
# -----------------------------------------------------------------------
# Standard Polyhedron groups
#
# These are generated using _pgroup_calcs() above. However to save
# import time we encode them explicitly here.
# -----------------------------------------------------------------------
tetrahedron = Polyhedron(
Tuple(0, 1, 2, 3),
Tuple(
Tuple(0, 1, 2),
Tuple(0, 2, 3),
Tuple(0, 1, 3),
Tuple(1, 2, 3)),
Tuple(
Perm(1, 2, 3),
Perm(3)(0, 1, 2),
Perm(0, 3, 2),
Perm(0, 3, 1),
Perm(0, 1)(2, 3),
Perm(0, 2)(1, 3),
Perm(0, 3)(1, 2)
))
cube = Polyhedron(
Tuple(0, 1, 2, 3, 4, 5, 6, 7),
Tuple(
Tuple(0, 1, 2, 3),
Tuple(0, 1, 5, 4),
Tuple(1, 2, 6, 5),
Tuple(2, 3, 7, 6),
Tuple(0, 3, 7, 4),
Tuple(4, 5, 6, 7)),
Tuple(
Perm(0, 1, 2, 3)(4, 5, 6, 7),
Perm(0, 4, 5, 1)(2, 3, 7, 6),
Perm(0, 4, 7, 3)(1, 5, 6, 2),
Perm(0, 1)(2, 4)(3, 5)(6, 7),
Perm(0, 6)(1, 2)(3, 5)(4, 7),
Perm(0, 6)(1, 7)(2, 3)(4, 5),
Perm(0, 3)(1, 7)(2, 4)(5, 6),
Perm(0, 4)(1, 7)(2, 6)(3, 5),
Perm(0, 6)(1, 5)(2, 4)(3, 7),
Perm(1, 3, 4)(2, 7, 5),
Perm(7)(0, 5, 2)(3, 4, 6),
Perm(0, 5, 7)(1, 6, 3),
Perm(0, 7, 2)(1, 4, 6)))
octahedron = Polyhedron(
Tuple(0, 1, 2, 3, 4, 5),
Tuple(
Tuple(0, 1, 2),
Tuple(0, 2, 3),
Tuple(0, 3, 4),
Tuple(0, 1, 4),
Tuple(1, 2, 5),
Tuple(2, 3, 5),
Tuple(3, 4, 5),
Tuple(1, 4, 5)),
Tuple(
Perm(5)(1, 2, 3, 4),
Perm(0, 4, 5, 2),
Perm(0, 1, 5, 3),
Perm(0, 1)(2, 4)(3, 5),
Perm(0, 2)(1, 3)(4, 5),
Perm(0, 3)(1, 5)(2, 4),
Perm(0, 4)(1, 3)(2, 5),
Perm(0, 5)(1, 4)(2, 3),
Perm(0, 5)(1, 2)(3, 4),
Perm(0, 4, 1)(2, 3, 5),
Perm(0, 1, 2)(3, 4, 5),
Perm(0, 2, 3)(1, 5, 4),
Perm(0, 4, 3)(1, 5, 2)))
dodecahedron = Polyhedron(
Tuple(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19),
Tuple(
Tuple(0, 1, 2, 3, 4),
Tuple(0, 1, 6, 10, 5),
Tuple(1, 2, 7, 11, 6),
Tuple(2, 3, 8, 12, 7),
Tuple(3, 4, 9, 13, 8),
Tuple(0, 4, 9, 14, 5),
Tuple(5, 10, 16, 15, 14),
Tuple(6, 10, 16, 17, 11),
Tuple(7, 11, 17, 18, 12),
Tuple(8, 12, 18, 19, 13),
Tuple(9, 13, 19, 15, 14),
Tuple(15, 16, 17, 18, 19)),
Tuple(
Perm(0, 1, 2, 3, 4)(5, 6, 7, 8, 9)(10, 11, 12, 13, 14)(15, 16, 17, 18, 19),
Perm(0, 5, 10, 6, 1)(2, 4, 14, 16, 11)(3, 9, 15, 17, 7)(8, 13, 19, 18, 12),
Perm(0, 10, 17, 12, 3)(1, 6, 11, 7, 2)(4, 5, 16, 18, 8)(9, 14, 15, 19, 13),
Perm(0, 6, 17, 19, 9)(1, 11, 18, 13, 4)(2, 7, 12, 8, 3)(5, 10, 16, 15, 14),
Perm(0, 2, 12, 19, 14)(1, 7, 18, 15, 5)(3, 8, 13, 9, 4)(6, 11, 17, 16, 10),
Perm(0, 4, 9, 14, 5)(1, 3, 13, 15, 10)(2, 8, 19, 16, 6)(7, 12, 18, 17, 11),
Perm(0, 1)(2, 5)(3, 10)(4, 6)(7, 14)(8, 16)(9, 11)(12, 15)(13, 17)(18, 19),
Perm(0, 7)(1, 2)(3, 6)(4, 11)(5, 12)(8, 10)(9, 17)(13, 16)(14, 18)(15, 19),
Perm(0, 12)(1, 8)(2, 3)(4, 7)(5, 18)(6, 13)(9, 11)(10, 19)(14, 17)(15, 16),
Perm(0, 8)(1, 13)(2, 9)(3, 4)(5, 12)(6, 19)(7, 14)(10, 18)(11, 15)(16, 17),
Perm(0, 4)(1, 9)(2, 14)(3, 5)(6, 13)(7, 15)(8, 10)(11, 19)(12, 16)(17, 18),
Perm(0, 5)(1, 14)(2, 15)(3, 16)(4, 10)(6, 9)(7, 19)(8, 17)(11, 13)(12, 18),
Perm(0, 11)(1, 6)(2, 10)(3, 16)(4, 17)(5, 7)(8, 15)(9, 18)(12, 14)(13, 19),
Perm(0, 18)(1, 12)(2, 7)(3, 11)(4, 17)(5, 19)(6, 8)(9, 16)(10, 13)(14, 15),
Perm(0, 18)(1, 19)(2, 13)(3, 8)(4, 12)(5, 17)(6, 15)(7, 9)(10, 16)(11, 14),
Perm(0, 13)(1, 19)(2, 15)(3, 14)(4, 9)(5, 8)(6, 18)(7, 16)(10, 12)(11, 17),
Perm(0, 16)(1, 15)(2, 19)(3, 18)(4, 17)(5, 10)(6, 14)(7, 13)(8, 12)(9, 11),
Perm(0, 18)(1, 17)(2, 16)(3, 15)(4, 19)(5, 12)(6, 11)(7, 10)(8, 14)(9, 13),
Perm(0, 15)(1, 19)(2, 18)(3, 17)(4, 16)(5, 14)(6, 13)(7, 12)(8, 11)(9, 10),
Perm(0, 17)(1, 16)(2, 15)(3, 19)(4, 18)(5, 11)(6, 10)(7, 14)(8, 13)(9, 12),
Perm(0, 19)(1, 18)(2, 17)(3, 16)(4, 15)(5, 13)(6, 12)(7, 11)(8, 10)(9, 14),
Perm(1, 4, 5)(2, 9, 10)(3, 14, 6)(7, 13, 16)(8, 15, 11)(12, 19, 17),
Perm(19)(0, 6, 2)(3, 5, 11)(4, 10, 7)(8, 14, 17)(9, 16, 12)(13, 15, 18),
Perm(0, 11, 8)(1, 7, 3)(4, 6, 12)(5, 17, 13)(9, 10, 18)(14, 16, 19),
Perm(0, 7, 13)(1, 12, 9)(2, 8, 4)(5, 11, 19)(6, 18, 14)(10, 17, 15),
Perm(0, 3, 9)(1, 8, 14)(2, 13, 5)(6, 12, 15)(7, 19, 10)(11, 18, 16),
Perm(0, 14, 10)(1, 9, 16)(2, 13, 17)(3, 19, 11)(4, 15, 6)(7, 8, 18),
Perm(0, 16, 7)(1, 10, 11)(2, 5, 17)(3, 14, 18)(4, 15, 12)(8, 9, 19),
Perm(0, 16, 13)(1, 17, 8)(2, 11, 12)(3, 6, 18)(4, 10, 19)(5, 15, 9),
Perm(0, 11, 15)(1, 17, 14)(2, 18, 9)(3, 12, 13)(4, 7, 19)(5, 6, 16),
Perm(0, 8, 15)(1, 12, 16)(2, 18, 10)(3, 19, 5)(4, 13, 14)(6, 7, 17)))
icosahedron = Polyhedron(
Tuple(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11),
Tuple(
Tuple(0, 1, 2),
Tuple(0, 2, 3),
Tuple(0, 3, 4),
Tuple(0, 4, 5),
Tuple(0, 1, 5),
Tuple(1, 6, 7),
Tuple(1, 2, 7),
Tuple(2, 7, 8),
Tuple(2, 3, 8),
Tuple(3, 8, 9),
Tuple(3, 4, 9),
Tuple(4, 9, 10),
Tuple(4, 5, 10),
Tuple(5, 6, 10),
Tuple(1, 5, 6),
Tuple(6, 7, 11),
Tuple(7, 8, 11),
Tuple(8, 9, 11),
Tuple(9, 10, 11),
Tuple(6, 10, 11)),
Tuple(
Perm(11)(1, 2, 3, 4, 5)(6, 7, 8, 9, 10),
Perm(0, 5, 6, 7, 2)(3, 4, 10, 11, 8),
Perm(0, 1, 7, 8, 3)(4, 5, 6, 11, 9),
Perm(0, 2, 8, 9, 4)(1, 7, 11, 10, 5),
Perm(0, 3, 9, 10, 5)(1, 2, 8, 11, 6),
Perm(0, 4, 10, 6, 1)(2, 3, 9, 11, 7),
Perm(0, 1)(2, 5)(3, 6)(4, 7)(8, 10)(9, 11),
Perm(0, 2)(1, 3)(4, 7)(5, 8)(6, 9)(10, 11),
Perm(0, 3)(1, 9)(2, 4)(5, 8)(6, 11)(7, 10),
Perm(0, 4)(1, 9)(2, 10)(3, 5)(6, 8)(7, 11),
Perm(0, 5)(1, 4)(2, 10)(3, 6)(7, 9)(8, 11),
Perm(0, 6)(1, 5)(2, 10)(3, 11)(4, 7)(8, 9),
Perm(0, 7)(1, 2)(3, 6)(4, 11)(5, 8)(9, 10),
Perm(0, 8)(1, 9)(2, 3)(4, 7)(5, 11)(6, 10),
Perm(0, 9)(1, 11)(2, 10)(3, 4)(5, 8)(6, 7),
Perm(0, 10)(1, 9)(2, 11)(3, 6)(4, 5)(7, 8),
Perm(0, 11)(1, 6)(2, 10)(3, 9)(4, 8)(5, 7),
Perm(0, 11)(1, 8)(2, 7)(3, 6)(4, 10)(5, 9),
Perm(0, 11)(1, 10)(2, 9)(3, 8)(4, 7)(5, 6),
Perm(0, 11)(1, 7)(2, 6)(3, 10)(4, 9)(5, 8),
Perm(0, 11)(1, 9)(2, 8)(3, 7)(4, 6)(5, 10),
Perm(0, 5, 1)(2, 4, 6)(3, 10, 7)(8, 9, 11),
Perm(0, 1, 2)(3, 5, 7)(4, 6, 8)(9, 10, 11),
Perm(0, 2, 3)(1, 8, 4)(5, 7, 9)(6, 11, 10),
Perm(0, 3, 4)(1, 8, 10)(2, 9, 5)(6, 7, 11),
Perm(0, 4, 5)(1, 3, 10)(2, 9, 6)(7, 8, 11),
Perm(0, 10, 7)(1, 5, 6)(2, 4, 11)(3, 9, 8),
Perm(0, 6, 8)(1, 7, 2)(3, 5, 11)(4, 10, 9),
Perm(0, 7, 9)(1, 11, 4)(2, 8, 3)(5, 6, 10),
Perm(0, 8, 10)(1, 7, 6)(2, 11, 5)(3, 9, 4),
Perm(0, 9, 6)(1, 3, 11)(2, 8, 7)(4, 10, 5)))
tetrahedron_faces = list(tuple(arg) for arg in tetrahedron.faces)
cube_faces = list(tuple(arg) for arg in cube.faces)
octahedron_faces = list(tuple(arg) for arg in octahedron.faces)
dodecahedron_faces = list(tuple(arg) for arg in dodecahedron.faces)
icosahedron_faces = list(tuple(arg) for arg in icosahedron.faces)
|
the-stack_0_179 | ################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from pyflink.java_gateway import get_gateway
from pyflink.common import Configuration
from pyflink.table import EnvironmentSettings
from pyflink.testing.test_case_utils import PyFlinkTestCase
class EnvironmentSettingsTests(PyFlinkTestCase):
def test_mode_selection(self):
builder = EnvironmentSettings.new_instance()
# test the default behaviour to make sure it is consistent with the python doc
environment_settings = builder.build()
self.assertTrue(environment_settings.is_streaming_mode())
# test in_streaming_mode
environment_settings = builder.in_streaming_mode().build()
self.assertTrue(environment_settings.is_streaming_mode())
environment_settings = EnvironmentSettings.in_streaming_mode()
self.assertTrue(environment_settings.is_streaming_mode())
# test in_batch_mode
environment_settings = builder.in_batch_mode().build()
self.assertFalse(environment_settings.is_streaming_mode())
environment_settings = EnvironmentSettings.in_batch_mode()
self.assertFalse(environment_settings.is_streaming_mode())
def test_with_built_in_catalog_name(self):
gateway = get_gateway()
DEFAULT_BUILTIN_CATALOG = gateway.jvm.TableConfigOptions.TABLE_CATALOG_NAME.defaultValue()
builder = EnvironmentSettings.new_instance()
# test the default behaviour to make sure it is consistent with the python doc
environment_settings = builder.build()
self.assertEqual(environment_settings.get_built_in_catalog_name(), DEFAULT_BUILTIN_CATALOG)
environment_settings = builder.with_built_in_catalog_name("my_catalog").build()
self.assertEqual(environment_settings.get_built_in_catalog_name(), "my_catalog")
def test_with_built_in_database_name(self):
gateway = get_gateway()
DEFAULT_BUILTIN_DATABASE = gateway.jvm.TableConfigOptions.TABLE_DATABASE_NAME.defaultValue()
builder = EnvironmentSettings.new_instance()
# test the default behaviour to make sure it is consistent with the python doc
environment_settings = builder.build()
self.assertEqual(environment_settings.get_built_in_database_name(),
DEFAULT_BUILTIN_DATABASE)
environment_settings = builder.with_built_in_database_name("my_database").build()
self.assertEqual(environment_settings.get_built_in_database_name(), "my_database")
def test_to_configuration(self):
expected_settings = EnvironmentSettings.new_instance().in_batch_mode().build()
config = expected_settings.to_configuration()
self.assertEqual("BATCH", config.get_string("execution.runtime-mode", "stream"))
def test_from_configuration(self):
config = Configuration()
config.set_string("execution.runtime-mode", "batch")
actual_setting = EnvironmentSettings.from_configuration(config)
self.assertFalse(actual_setting.is_streaming_mode(), "Use batch mode.")
|
the-stack_0_180 | #
# This file is part of ravstack. Ravstack is free software available under
# the terms of the MIT license. See the file "LICENSE" that was provided
# together with this source file for the licensing terms.
#
# Copyright (c) 2015 the ravstack authors. See the file "AUTHORS" for a
# complete list.
"""Ravello Ironic command-line utility.
Usage:
ravstack [options] setup
ravstack [options] proxy-create
ravstack [options] node-create [-c <cpus>] [-m <memory>]
[-D <disk>] [-n <count>]
ravstack [options] node-dump
ravstack [options] node-list [--all [--cached]]
ravstack [options] node-start <node>
ravstack [options] node-stop <node>
ravstack [options] node-reboot <node>
ravstack [options] node-get-boot-device <node>
ravstack [options] node-set-boot-device <node> <bootdev>
ravstack [options] node-get-macs <node> [--cached]
ravstack [options] fixup
ravstack [options] endpoint-resolve <port> [-t <timeout>]
[--start-port <base>] [--num-ports <count>]
ravstack --help
Command help:
setup Create ravstack directories and config file.
proxy-create Create SSH -> Ravello API proxy.
node-create Create a new node.
node-dump Dump node definitions to specified file.
node-list List powered on nodes. (--all lists all nodes)
node-start Start a node.
node-stop Stop a node.
node-reboot Reboot a node.
node-get-boot-device Return boot device for <node>.
node-set-boot-device Set boot device for <node> to <bootdev>.
The boot device may be "hd" or "network".
node-get-macs Return MAC addresses for <node>.
fixup Fix Ravello and OS config after one or
more nodes were deployed.
endpoint-resolve Resolve an endpoint for a local service using
a public IP address or under portmapping.
Options:
-d, --debug Enable debugging.
-v, --verbose Be verbose.
--log-stderr Show logs on standard error.
-u <username>, --username=<username>
Ravello API username.
-p <password>, --password=<password>
Ravello API password.
-a <application>, --application=<application>
The Ravello application name.
--all List all nodes.
--cached Allow use of cached information.
Options for `node-create`:
-c <cpus>, --cpus=<cpus>
The number of CPUs. [default: 2]
-m <memory>, --memory=<memory>
The amount of memory in MB. [default: 8192]
-D <disk>, --disk=<disk>
The size of the disk in GB. [default: 60]
-n <count>, --count=<count>
The number of nodes to create. [default: 1]
Options for `endpoint-resolve`:
-t <timeout>, --timeout <timeout>
Timeout. [default: 2]
--start-port <port> Starting port for endpoint resolution with
portmapping. [default: 10000]
--num-ports <count> Number of ports to scan for endpoint resulution
with portmapping. [default: 50]
"""
from __future__ import absolute_import, print_function
import docopt
from . import factory, setup, node, proxy, fixup, endpoint, runtime
from .runtime import CONF
def main():
"""Ravstack main entry point."""
args = docopt.docopt(__doc__)
CONF.update_from_args(args)
CONF.update_to_env()
runtime.setup_logging() # logging configuration might have changed
env = factory.get_environ(args)
if args['setup']:
setup.do_setup(env)
elif args['proxy-create']:
proxy.do_create(env)
elif args['node-create']:
node.do_create(env)
elif args['node-dump']:
node.do_dump(env)
elif args['node-list'] and not args.get('--all'):
node.do_list_running(env, False)
elif args['node-list']:
node.do_list_all(env)
elif args['node-start']:
node.do_start(env, args['<node>'])
elif args['node-stop']:
node.do_stop(env, args['<node>'])
elif args['node-reboot']:
node.do_reboot(env, args['<node>'])
elif args['node-get-boot-device']:
node.do_get_boot_device(env, args['<node>'])
elif args['node-set-boot-device']:
node.do_set_boot_device(env, args['<node>'], args['<bootdev>'])
elif args['node-get-macs']:
node.do_get_macs(env, args['<node>'], False)
elif args['fixup']:
fixup.do_fixup(env)
elif args['endpoint-resolve']:
endpoint.do_resolve(env, args['<port>'])
def run_main():
"""Setuptools entry point."""
runtime.run_main(main)
if __name__ == '__main__':
run_main()
|
the-stack_0_182 | from __future__ import print_function
import numpy as np
def q(y_true, y_pred):
'''q value as described in Tropsha, Gramatica, Gombar:
The Importance of Being Earnest'''
y_true = np.array(y_true)
y_pred = np.array(y_pred)
y_mean = np.mean(y_true)
return 1 - np.sum((y_true - y_pred) ** 2) / np.sum((y_true - y_mean) ** 2)
def linreg(x, y):
'''Computes a linear regression through the origin using OLS'''
x = np.array(x)
y = np.array(y)
x = x[:, np.newaxis]
a, _, _, _ = np.linalg.lstsq(x, y, rcond=-1.)
r2 = q(y, (x * a)[:, 0])
return (r2, a) |
the-stack_0_183 | import os
from dateutil.parser import parse as date_parser
from flask import request, current_app
from flask_restful.fields import Integer, List, Nested, Raw, String
from werkzeug.utils import secure_filename
from analysisweb.api import db
from analysisweb_user.models import Measurement, MeasurementFile
from . import (
ResourceBase,
MetaResource,
ResourceInvalidInputException,
ResourceForbiddenActionException,
ResourceNotFoundException,
IDField,
)
class MeasurementResource(ResourceBase):
db_table = Measurement
measurement_file = {
"label": String,
"path": String(
attribute=lambda x: "files/measurement/{}/{}".format(
x.measurement_id, x.path
)
),
}
fields = {
"id": Integer,
"label": String,
"start_date": String,
"end_date": String,
"meta_data": Raw,
"files": List(Nested(measurement_file)),
"jobs": List(IDField),
}
def get(self, id_):
"""
Receive a measurement
---
summary: Find a measurement by ID
tags:
- measurements
parameters:
- name: id
in: path
description: ID of measurement to return
required: true
schema:
type: integer
responses:
200:
description: successful operation
content:
application/json:
schema:
$ref: "#/components/schemas/Measurement"
400:
description: Invalid ID supplied
404:
description: Measurement not found
"""
try:
resource = self.get_resource(id_)
except (ResourceInvalidInputException, ResourceNotFoundException) as e:
return {"status": str(e)}, e.response_code
return self.dump_resource(resource), 200
def delete(self, id_):
"""
Delete a measurement
---
summary: Deletes a measurement
tags:
- measurements
parameters:
- name: id
in: path
description: ID of measurement to return
required: true
schema:
type: integer
responses:
200:
description: Measurement deleted and returned
content:
application/json:
schema:
$ref: "#/components/schemas/Measurement"
400:
description: Invalid ID supplied
404:
description: Measurement not found
405:
description: Cannot delete measurement associated with a job
"""
try:
resource = self.get_resource(id_)
except (ResourceInvalidInputException, ResourceNotFoundException) as e:
return {"status": str(e)}, e.response_code
try:
return self.delete_resource(
current_app.config["MEASUREMENT_FILES_FOLDER"], resource
)
except ResourceForbiddenActionException as e:
return {"status": str(e)}, e.response_code
def put(self, id_):
"""
Update the basic information about a measurement
---
summary: Updates a measurement with new data
tags:
- measurements
parameters:
- name: id
in: path
description: ID of measurement to return
required: true
schema:
type: integer
requestBody:
content:
multipart/form-data:
schema:
properties:
start_date:
type: string
format: date-time
end_date:
type: string
format: date-time
label:
type: string
meta_data:
type: string
responses:
200:
description: Measurement updated and returned
content:
application/json:
schema:
$ref: "#/components/schemas/Measurement"
400:
description: Invalid ID supplied or invalid input
404:
description: Measurement not found
"""
try:
resource = self.get_resource(id_)
except (ResourceInvalidInputException, ResourceNotFoundException) as e:
return {"status": str(e)}, e.response_code
try:
self._update_measurement(resource)
except ResourceInvalidInputException as e:
return {"status": str(e)}, e.response_code
return self.dump_resource(resource), 200
def _update_measurement(self, resource):
start_date, end_date = self.parse_dates(
str(resource.start_date), str(resource.end_date)
)
resource.start_date = start_date
resource.end_date = end_date
resource.label = request.form.get("label", resource.label)
self.load_metadata(request.form.get("meta_data", "{}"), resource)
db.session.commit()
@staticmethod
def parse_dates(start=None, end=None):
try:
start_date = date_parser(request.form.get("start_date", start))
end_date = date_parser(request.form.get("end_date", end))
except ValueError as e:
raise ResourceInvalidInputException(str(e))
if end_date < start_date:
raise ResourceInvalidInputException("end date < start date")
return start_date, end_date
class MeasurementListResource(ResourceBase):
db_table = Measurement
fields = MeasurementResource.fields
def get(self):
"""
Obtain a list of measurements
---
summary: Retrieve a list of measurements
tags:
- measurements
responses:
200:
description: OK
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Measurement"
"""
return self.get_all(), 200
def post(self):
"""
Add a new measurement
---
summary: Add a new measurement
tags:
- measurements
requestBody:
content:
multipart/form-data:
schema:
properties:
start_date:
type: string
format: date-time
end_date:
type: string
format: date-time
label:
type: string
meta_data:
type: string
files:
type: array
items:
$ref: "#/components/schemas/File"
responses:
201:
description: Measurement created
400:
description: Invalid input
"""
try:
measurement_id = self._add_measurement()
except ResourceInvalidInputException as e:
return {"status": str(e)}, e.response_code
return {"status": "success", "id": measurement_id}, 201
def _add_measurement(self):
self._validate_form_data()
start_date, end_date = MeasurementResource.parse_dates()
m = Measurement(
start_date=start_date, end_date=end_date, label=request.form["label"]
)
db.session.add(m)
db.session.flush()
measurement_id = m.id
self.load_metadata(request.form.get("meta_data", "{}"), m)
file_folder = os.path.join(
current_app.config["MEASUREMENT_FILES_FOLDER"], str(measurement_id)
)
os.makedirs(file_folder)
print(request.files)
self._add_measurement_files(m, request.files.items(), file_folder)
db.session.commit()
return measurement_id
@staticmethod
def _add_measurement_files(measurement, file_list, path):
"""
Add files to a measurement
Parameters
----------
measurement: Measurement
the measurement to which add the files
file_list: list of werkzeug.Files
the given list of files
path: str
the folder in which to upload the files to
"""
for label, file in file_list:
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(path, filename))
f = MeasurementFile(label=label, path=filename, measurement=measurement)
db.session.add(f)
@staticmethod
def _validate_form_data():
if (
"start_date" not in request.form
or "end_date" not in request.form
or "label" not in request.form
or not request.files
):
raise ResourceInvalidInputException("Missing input")
class MeasurementMetaResource(MetaResource):
def get(self):
return self.load_meta("user_meta.json")
|
the-stack_0_186 | # -*- coding: utf-8 -*-
"""
pygments.lexers.haskell
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for Haskell and related languages.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
default, include
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
from pygments import unistring as uni
__all__ = ['HaskellLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer',
'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer',
'LiterateCryptolLexer', 'KokaLexer']
line_re = re.compile('.*?\n')
class HaskellLexer(RegexLexer):
"""
A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
.. versionadded:: 0.8
"""
name = 'Haskell'
aliases = ['haskell', 'hs']
filenames = ['*.hs']
mimetypes = ['text/x-haskell']
flags = re.MULTILINE | re.UNICODE
reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
'family', 'if', 'in', 'infix[lr]?', 'instance',
'let', 'newtype', 'of', 'then', 'type', 'where', '_')
ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
tokens = {
'root': [
# Whitespace:
(r'\s+', Text),
# (r'--\s*|.*$', Comment.Doc),
(r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
(r'\{-', Comment.Multiline, 'comment'),
# Lexemes:
# Identifiers
(r'\bimport\b', Keyword.Reserved, 'import'),
(r'\bmodule\b', Keyword.Reserved, 'module'),
(r'\berror\b', Name.Exception),
(r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
(r"'[^\\]'", String.Char), # this has to come before the TH quote
(r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
(r"'?[_" + uni.Ll + r"][\w']*", Name),
(r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
(r"(')[" + uni.Lu + r"][\w\']*", Keyword.Type),
(r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC
(r"(')\([^)]*\)", Keyword.Type), # ..
# Operators
(r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
(r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
(r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
(r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
# Numbers
(r'\d+[eE][+-]?\d+', Number.Float),
(r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
(r'0[oO][0-7]+', Number.Oct),
(r'0[xX][\da-fA-F]+', Number.Hex),
(r'\d+', Number.Integer),
# Character/String Literals
(r"'", String.Char, 'character'),
(r'"', String, 'string'),
# Special
(r'\[\]', Keyword.Type),
(r'\(\)', Name.Builtin),
(r'[][(),;`{}]', Punctuation),
],
'import': [
# Import statements
(r'\s+', Text),
(r'"', String, 'string'),
# after "funclist" state
(r'\)', Punctuation, '#pop'),
(r'qualified\b', Keyword),
# import X as Y
(r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)',
bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
# import X hiding (functions)
(r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()',
bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
# import X (functions)
(r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
# import X
(r'[\w.]+', Name.Namespace, '#pop'),
],
'module': [
(r'\s+', Text),
(r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
(r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'),
],
'funclist': [
(r'\s+', Text),
(r'[' + uni.Lu + r']\w*', Keyword.Type),
(r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function),
(r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
(r'\{-', Comment.Multiline, 'comment'),
(r',', Punctuation),
(r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
# (HACK, but it makes sense to push two instances, believe me)
(r'\(', Punctuation, ('funclist', 'funclist')),
(r'\)', Punctuation, '#pop:2'),
],
# NOTE: the next four states are shared in the AgdaLexer; make sure
# any change is compatible with Agda as well or copy over and change
'comment': [
# Multiline Comments
(r'[^-{}]+', Comment.Multiline),
(r'\{-', Comment.Multiline, '#push'),
(r'-\}', Comment.Multiline, '#pop'),
(r'[-{}]', Comment.Multiline),
],
'character': [
# Allows multi-chars, incorrectly.
(r"[^\\']'", String.Char, '#pop'),
(r"\\", String.Escape, 'escape'),
("'", String.Char, '#pop'),
],
'string': [
(r'[^\\"]+', String),
(r"\\", String.Escape, 'escape'),
('"', String, '#pop'),
],
'escape': [
(r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
(r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
('|'.join(ascii), String.Escape, '#pop'),
(r'o[0-7]+', String.Escape, '#pop'),
(r'x[\da-fA-F]+', String.Escape, '#pop'),
(r'\d+', String.Escape, '#pop'),
(r'\s+\\', String.Escape, '#pop'),
],
}
class IdrisLexer(RegexLexer):
"""
A lexer for the dependently typed programming language Idris.
Based on the Haskell and Agda Lexer.
.. versionadded:: 2.0
"""
name = 'Idris'
aliases = ['idris', 'idr']
filenames = ['*.idr']
mimetypes = ['text/x-idris']
reserved = ('case', 'class', 'data', 'default', 'using', 'do', 'else',
'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto',
'namespace', 'codata', 'mutual', 'private', 'public', 'abstract',
'total', 'partial',
'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with',
'pattern', 'term', 'syntax', 'prefix',
'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit',
'tactics', 'intros', 'intro', 'compute', 'refine', 'exact', 'trivial')
ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
directives = ('lib', 'link', 'flag', 'include', 'hide', 'freeze', 'access',
'default', 'logging', 'dynamic', 'name', 'error_handlers', 'language')
tokens = {
'root': [
# Comments
(r'^(\s*)(%%%s)' % '|'.join(directives),
bygroups(Text, Keyword.Reserved)),
(r'(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$', bygroups(Text, Comment.Single)),
(r'(\s*)(\|{3}.*?)$', bygroups(Text, Comment.Single)),
(r'(\s*)(\{-)', bygroups(Text, Comment.Multiline), 'comment'),
# Declaration
(r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
bygroups(Text, Name.Function, Text, Operator.Word, Text)),
# Identifiers
(r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
(r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
(r"('')?[A-Z][\w\']*", Keyword.Type),
(r'[a-z][\w\']*', Text),
# Special Symbols
(r'(<-|::|->|=>|=)', Operator.Word), # specials
(r'([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
# Numbers
(r'\d+[eE][+-]?\d+', Number.Float),
(r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
(r'0[xX][\da-fA-F]+', Number.Hex),
(r'\d+', Number.Integer),
# Strings
(r"'", String.Char, 'character'),
(r'"', String, 'string'),
(r'[^\s(){}]+', Text),
(r'\s+?', Text), # Whitespace
],
'module': [
(r'\s+', Text),
(r'([A-Z][\w.]*)(\s+)(\()',
bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
(r'[A-Z][\w.]*', Name.Namespace, '#pop'),
],
'funclist': [
(r'\s+', Text),
(r'[A-Z]\w*', Keyword.Type),
(r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
(r'--.*$', Comment.Single),
(r'\{-', Comment.Multiline, 'comment'),
(r',', Punctuation),
(r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
# (HACK, but it makes sense to push two instances, believe me)
(r'\(', Punctuation, ('funclist', 'funclist')),
(r'\)', Punctuation, '#pop:2'),
],
# NOTE: the next four states are shared in the AgdaLexer; make sure
# any change is compatible with Agda as well or copy over and change
'comment': [
# Multiline Comments
(r'[^-{}]+', Comment.Multiline),
(r'\{-', Comment.Multiline, '#push'),
(r'-\}', Comment.Multiline, '#pop'),
(r'[-{}]', Comment.Multiline),
],
'character': [
# Allows multi-chars, incorrectly.
(r"[^\\']", String.Char),
(r"\\", String.Escape, 'escape'),
("'", String.Char, '#pop'),
],
'string': [
(r'[^\\"]+', String),
(r"\\", String.Escape, 'escape'),
('"', String, '#pop'),
],
'escape': [
(r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
(r'\^[][A-Z@^_]', String.Escape, '#pop'),
('|'.join(ascii), String.Escape, '#pop'),
(r'o[0-7]+', String.Escape, '#pop'),
(r'x[\da-fA-F]+', String.Escape, '#pop'),
(r'\d+', String.Escape, '#pop'),
(r'\s+\\', String.Escape, '#pop')
],
}
class AgdaLexer(RegexLexer):
"""
For the `Agda <http://wiki.portal.chalmers.se/agda/pmwiki.php>`_
dependently typed functional programming language and proof assistant.
.. versionadded:: 2.0
"""
name = 'Agda'
aliases = ['agda']
filenames = ['*.agda']
mimetypes = ['text/x-agda']
reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data',
'field', 'forall', 'hiding', 'in', 'inductive', 'infix',
'infixl', 'infixr', 'instance', 'let', 'mutual', 'open',
'pattern', 'postulate', 'primitive', 'private',
'quote', 'quoteGoal', 'quoteTerm',
'record', 'renaming', 'rewrite', 'syntax', 'tactic',
'unquote', 'unquoteDecl', 'using', 'where', 'with']
tokens = {
'root': [
# Declaration
(r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
bygroups(Text, Name.Function, Text, Operator.Word, Text)),
# Comments
(r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
(r'\{-', Comment.Multiline, 'comment'),
# Holes
(r'\{!', Comment.Directive, 'hole'),
# Lexemes:
# Identifiers
(r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
(r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
(r'\b(Set|Prop)\b', Keyword.Type),
# Special Symbols
(r'(\(|\)|\{|\})', Operator),
(u'(\\.{1,3}|\\||\u039B|\u2200|\u2192|:|=|->)', Operator.Word),
# Numbers
(r'\d+[eE][+-]?\d+', Number.Float),
(r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
(r'0[xX][\da-fA-F]+', Number.Hex),
(r'\d+', Number.Integer),
# Strings
(r"'", String.Char, 'character'),
(r'"', String, 'string'),
(r'[^\s(){}]+', Text),
(r'\s+?', Text), # Whitespace
],
'hole': [
# Holes
(r'[^!{}]+', Comment.Directive),
(r'\{!', Comment.Directive, '#push'),
(r'!\}', Comment.Directive, '#pop'),
(r'[!{}]', Comment.Directive),
],
'module': [
(r'\{-', Comment.Multiline, 'comment'),
(r'[a-zA-Z][\w.]*', Name, '#pop'),
(r'[\W0-9_]+', Text)
],
'comment': HaskellLexer.tokens['comment'],
'character': HaskellLexer.tokens['character'],
'string': HaskellLexer.tokens['string'],
'escape': HaskellLexer.tokens['escape']
}
class CryptolLexer(RegexLexer):
"""
FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.
.. versionadded:: 2.0
"""
name = 'Cryptol'
aliases = ['cryptol', 'cry']
filenames = ['*.cry']
mimetypes = ['text/x-cryptol']
reserved = ('Arith', 'Bit', 'Cmp', 'False', 'Inf', 'True', 'else',
'export', 'extern', 'fin', 'if', 'import', 'inf', 'lg2',
'max', 'min', 'module', 'newtype', 'pragma', 'property',
'then', 'type', 'where', 'width')
ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
tokens = {
'root': [
# Whitespace:
(r'\s+', Text),
# (r'--\s*|.*$', Comment.Doc),
(r'//.*$', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
# Lexemes:
# Identifiers
(r'\bimport\b', Keyword.Reserved, 'import'),
(r'\bmodule\b', Keyword.Reserved, 'module'),
(r'\berror\b', Name.Exception),
(r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
(r'^[_a-z][\w\']*', Name.Function),
(r"'?[_a-z][\w']*", Name),
(r"('')?[A-Z][\w\']*", Keyword.Type),
# Operators
(r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
(r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
(r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
(r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
# Numbers
(r'\d+[eE][+-]?\d+', Number.Float),
(r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
(r'0[oO][0-7]+', Number.Oct),
(r'0[xX][\da-fA-F]+', Number.Hex),
(r'\d+', Number.Integer),
# Character/String Literals
(r"'", String.Char, 'character'),
(r'"', String, 'string'),
# Special
(r'\[\]', Keyword.Type),
(r'\(\)', Name.Builtin),
(r'[][(),;`{}]', Punctuation),
],
'import': [
# Import statements
(r'\s+', Text),
(r'"', String, 'string'),
# after "funclist" state
(r'\)', Punctuation, '#pop'),
(r'qualified\b', Keyword),
# import X as Y
(r'([A-Z][\w.]*)(\s+)(as)(\s+)([A-Z][\w.]*)',
bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
# import X hiding (functions)
(r'([A-Z][\w.]*)(\s+)(hiding)(\s+)(\()',
bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
# import X (functions)
(r'([A-Z][\w.]*)(\s+)(\()',
bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
# import X
(r'[\w.]+', Name.Namespace, '#pop'),
],
'module': [
(r'\s+', Text),
(r'([A-Z][\w.]*)(\s+)(\()',
bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
(r'[A-Z][\w.]*', Name.Namespace, '#pop'),
],
'funclist': [
(r'\s+', Text),
(r'[A-Z]\w*', Keyword.Type),
(r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
# TODO: these don't match the comments in docs, remove.
#(r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
#(r'{-', Comment.Multiline, 'comment'),
(r',', Punctuation),
(r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
# (HACK, but it makes sense to push two instances, believe me)
(r'\(', Punctuation, ('funclist', 'funclist')),
(r'\)', Punctuation, '#pop:2'),
],
'comment': [
# Multiline Comments
(r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'character': [
# Allows multi-chars, incorrectly.
(r"[^\\']'", String.Char, '#pop'),
(r"\\", String.Escape, 'escape'),
("'", String.Char, '#pop'),
],
'string': [
(r'[^\\"]+', String),
(r"\\", String.Escape, 'escape'),
('"', String, '#pop'),
],
'escape': [
(r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
(r'\^[][A-Z@^_]', String.Escape, '#pop'),
('|'.join(ascii), String.Escape, '#pop'),
(r'o[0-7]+', String.Escape, '#pop'),
(r'x[\da-fA-F]+', String.Escape, '#pop'),
(r'\d+', String.Escape, '#pop'),
(r'\s+\\', String.Escape, '#pop'),
],
}
EXTRA_KEYWORDS = set(('join', 'split', 'reverse', 'transpose', 'width',
'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
'trace'))
def get_tokens_unprocessed(self, text):
stack = ['root']
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text, stack):
if token is Name and value in self.EXTRA_KEYWORDS:
yield index, Name.Builtin, value
else:
yield index, token, value
class LiterateLexer(Lexer):
"""
Base class for lexers of literate file formats based on LaTeX or Bird-style
(prefixing each code line with ">").
Additional options accepted:
`litstyle`
If given, must be ``"bird"`` or ``"latex"``. If not given, the style
is autodetected: if the first non-whitespace character in the source
is a backslash or percent character, LaTeX is assumed, else Bird.
"""
bird_re = re.compile(r'(>[ \t]*)(.*\n)')
def __init__(self, baselexer, **options):
self.baselexer = baselexer
Lexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
style = self.options.get('litstyle')
if style is None:
style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
code = ''
insertions = []
if style == 'bird':
# bird-style
for match in line_re.finditer(text):
line = match.group()
m = self.bird_re.match(line)
if m:
insertions.append((len(code),
[(0, Comment.Special, m.group(1))]))
code += m.group(2)
else:
insertions.append((len(code), [(0, Text, line)]))
else:
# latex-style
from pygments.lexers.markup import TexLexer
lxlexer = TexLexer(**self.options)
codelines = 0
latex = ''
for match in line_re.finditer(text):
line = match.group()
if codelines:
if line.lstrip().startswith('\\end{code}'):
codelines = 0
latex += line
else:
code += line
elif line.lstrip().startswith('\\begin{code}'):
codelines = 1
latex += line
insertions.append((len(code),
list(lxlexer.get_tokens_unprocessed(latex))))
latex = ''
else:
latex += line
insertions.append((len(code),
list(lxlexer.get_tokens_unprocessed(latex))))
for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
yield item
class LiterateHaskellLexer(LiterateLexer):
"""
For Literate Haskell (Bird-style or LaTeX) source.
Additional options accepted:
`litstyle`
If given, must be ``"bird"`` or ``"latex"``. If not given, the style
is autodetected: if the first non-whitespace character in the source
is a backslash or percent character, LaTeX is assumed, else Bird.
.. versionadded:: 0.9
"""
name = 'Literate Haskell'
aliases = ['lhs', 'literate-haskell', 'lhaskell']
filenames = ['*.lhs']
mimetypes = ['text/x-literate-haskell']
def __init__(self, **options):
hslexer = HaskellLexer(**options)
LiterateLexer.__init__(self, hslexer, **options)
class LiterateIdrisLexer(LiterateLexer):
"""
For Literate Idris (Bird-style or LaTeX) source.
Additional options accepted:
`litstyle`
If given, must be ``"bird"`` or ``"latex"``. If not given, the style
is autodetected: if the first non-whitespace character in the source
is a backslash or percent character, LaTeX is assumed, else Bird.
.. versionadded:: 2.0
"""
name = 'Literate Idris'
aliases = ['lidr', 'literate-idris', 'lidris']
filenames = ['*.lidr']
mimetypes = ['text/x-literate-idris']
def __init__(self, **options):
hslexer = IdrisLexer(**options)
LiterateLexer.__init__(self, hslexer, **options)
class LiterateAgdaLexer(LiterateLexer):
"""
For Literate Agda source.
Additional options accepted:
`litstyle`
If given, must be ``"bird"`` or ``"latex"``. If not given, the style
is autodetected: if the first non-whitespace character in the source
is a backslash or percent character, LaTeX is assumed, else Bird.
.. versionadded:: 2.0
"""
name = 'Literate Agda'
aliases = ['lagda', 'literate-agda']
filenames = ['*.lagda']
mimetypes = ['text/x-literate-agda']
def __init__(self, **options):
agdalexer = AgdaLexer(**options)
LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options)
class LiterateCryptolLexer(LiterateLexer):
"""
For Literate Cryptol (Bird-style or LaTeX) source.
Additional options accepted:
`litstyle`
If given, must be ``"bird"`` or ``"latex"``. If not given, the style
is autodetected: if the first non-whitespace character in the source
is a backslash or percent character, LaTeX is assumed, else Bird.
.. versionadded:: 2.0
"""
name = 'Literate Cryptol'
aliases = ['lcry', 'literate-cryptol', 'lcryptol']
filenames = ['*.lcry']
mimetypes = ['text/x-literate-cryptol']
def __init__(self, **options):
crylexer = CryptolLexer(**options)
LiterateLexer.__init__(self, crylexer, **options)
class KokaLexer(RegexLexer):
"""
Lexer for the `Koka <http://koka.codeplex.com>`_
language.
.. versionadded:: 1.6
"""
name = 'Koka'
aliases = ['koka']
filenames = ['*.kk', '*.kki']
mimetypes = ['text/x-koka']
keywords = [
'infix', 'infixr', 'infixl',
'type', 'cotype', 'rectype', 'alias',
'struct', 'con',
'fun', 'function', 'val', 'var',
'external',
'if', 'then', 'else', 'elif', 'return', 'match',
'private', 'public', 'private',
'module', 'import', 'as',
'include', 'inline',
'rec',
'try', 'yield', 'enum',
'interface', 'instance',
]
# keywords that are followed by a type
typeStartKeywords = [
'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
]
# keywords valid in a type
typekeywords = [
'forall', 'exists', 'some', 'with',
]
# builtin names and special names
builtin = [
'for', 'while', 'repeat',
'foreach', 'foreach-indexed',
'error', 'catch', 'finally',
'cs', 'js', 'file', 'ref', 'assigned',
]
# symbols that can be in an operator
symbols = r'[$%&*+@!/\\^~=.:\-?|<>]+'
# symbol boundary: an operator keyword should not be followed by any of these
sboundary = '(?!'+symbols+')'
# name boundary: a keyword should not be followed by any of these
boundary = '(?![\w/])'
# koka token abstractions
tokenType = Name.Attribute
tokenTypeDef = Name.Class
tokenConstructor = Generic.Emph
# main lexer
tokens = {
'root': [
include('whitespace'),
# go into type mode
(r'::?' + sboundary, tokenType, 'type'),
(r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
'alias-type'),
(r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
'struct-type'),
((r'(%s)' % '|'.join(typeStartKeywords)) +
r'(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
'type'),
# special sequences of tokens (we use ?: for non-capturing group as
# required by 'bygroups')
(r'(module)(\s+)(interface\s+)?((?:[a-z]\w*/)*[a-z]\w*)',
bygroups(Keyword, Text, Keyword, Name.Namespace)),
(r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
r'(?:(\s*)(=)(\s*)((?:qualified\s*)?)'
r'((?:[a-z]\w*/)*[a-z]\w*))?',
bygroups(Keyword, Text, Name.Namespace, Text, Keyword, Text,
Keyword, Name.Namespace)),
(r'(^(?:(?:public|private)\s*)?(?:function|fun|val))'
r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
bygroups(Keyword, Text, Name.Function)),
(r'(^(?:(?:public|private)\s*)?external)(\s+)(inline\s+)?'
r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
bygroups(Keyword, Text, Keyword, Name.Function)),
# keywords
(r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
(r'(%s)' % '|'.join(keywords) + boundary, Keyword),
(r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
(r'::?|:=|\->|[=.]' + sboundary, Keyword),
# names
(r'((?:[a-z]\w*/)*)([A-Z]\w*)',
bygroups(Name.Namespace, tokenConstructor)),
(r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
(r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
bygroups(Name.Namespace, Name)),
(r'_\w*', Name.Variable),
# literal string
(r'@"', String.Double, 'litstring'),
# operators
(symbols + "|/(?![*/])", Operator),
(r'`', Operator),
(r'[{}()\[\];,]', Punctuation),
# literals. No check for literal characters with len > 1
(r'[0-9]+\.[0-9]+([eE][\-+]?[0-9]+)?', Number.Float),
(r'0[xX][0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r"'", String.Char, 'char'),
(r'"', String.Double, 'string'),
],
# type started by alias
'alias-type': [
(r'=', Keyword),
include('type')
],
# type started by struct
'struct-type': [
(r'(?=\((?!,*\)))', Punctuation, '#pop'),
include('type')
],
# type started by colon
'type': [
(r'[(\[<]', tokenType, 'type-nested'),
include('type-content')
],
# type nested in brackets: can contain parameters, comma etc.
'type-nested': [
(r'[)\]>]', tokenType, '#pop'),
(r'[(\[<]', tokenType, 'type-nested'),
(r',', tokenType),
(r'([a-z]\w*)(\s*)(:)(?!:)',
bygroups(Name, Text, tokenType)), # parameter name
include('type-content')
],
# shared contents of a type
'type-content': [
include('whitespace'),
# keywords
(r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
(r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
Keyword, '#pop'), # need to match because names overlap...
# kinds
(r'[EPHVX]' + boundary, tokenType),
# type names
(r'[a-z][0-9]*(?![\w/])', tokenType),
(r'_\w*', tokenType.Variable), # Generic.Emph
(r'((?:[a-z]\w*/)*)([A-Z]\w*)',
bygroups(Name.Namespace, tokenType)),
(r'((?:[a-z]\w*/)*)([a-z]\w+)',
bygroups(Name.Namespace, tokenType)),
# type keyword operators
(r'::|->|[.:|]', tokenType),
# catchall
default('#pop')
],
# comments and literals
'whitespace': [
(r'\n\s*#.*$', Comment.Preproc),
(r'\s+', Text),
(r'/\*', Comment.Multiline, 'comment'),
(r'//.*$', Comment.Single)
],
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'litstring': [
(r'[^"]+', String.Double),
(r'""', String.Escape),
(r'"', String.Double, '#pop'),
],
'string': [
(r'[^\\"\n]+', String.Double),
include('escape-sequence'),
(r'["\n]', String.Double, '#pop'),
],
'char': [
(r'[^\\\'\n]+', String.Char),
include('escape-sequence'),
(r'[\'\n]', String.Char, '#pop'),
],
'escape-sequence': [
(r'\\[nrt\\"\']', String.Escape),
(r'\\x[0-9a-fA-F]{2}', String.Escape),
(r'\\u[0-9a-fA-F]{4}', String.Escape),
# Yes, \U literals are 6 hex digits.
(r'\\U[0-9a-fA-F]{6}', String.Escape)
]
}
|
the-stack_0_188 | from disco import util
from discodb import DiscoDB, Q
from disco.worker.task_io import task_output_stream
def Open(url, task=None):
if task:
disco_data = task.disco_data
ddfs_data = task.ddfs_data
else:
from disco.settings import DiscoSettings
settings = DiscoSettings()
disco_data = settings['DISCO_DATA']
ddfs_data = settings['DDFS_DATA']
scheme, netloc, rest = util.urlsplit(url)
path, rest = rest.split('!', 1) if '!' in rest else (rest, '')
discodb = DiscoDB.load(open(util.localize(path, disco_data=disco_data,
ddfs_data=ddfs_data)))
if rest:
method_name, arg = rest.split('/', 1) if '/' in rest else (rest, None)
method = getattr(discodb, method_name)
if method_name in ('metaquery', 'query'):
return method(Q.urlscan(arg))
return method(*filter(None, arg))
return discodb
def input_stream(fd, size, url, params):
return Open(url, task=globals().get('Task')), size, url
class DiscoDBOutput(object):
def __init__(self, stream, params):
from discodb import DiscoDBConstructor
self.discodb_constructor = DiscoDBConstructor()
self.stream = stream
self.params = params
self.path = stream.path
def add(self, key, val):
self.discodb_constructor.add(key, val)
def close(self):
def flags():
return dict((flag, getattr(self.params, flag))
for flag in ('unique_items', 'disable_compression')
if hasattr(self.params, flag))
self.discodb_constructor.finalize(**flags()).dump(self.stream)
def discodb_output(stream, partition, url, params):
return DiscoDBOutput(stream, params), 'discodb:{0}'.format(url.split(':', 1)[1])
discodb_stream = (task_output_stream, discodb_output)
|
the-stack_0_189 | """
Python interface module for OSQP solver v0.6.2.post5
"""
from __future__ import print_function
from builtins import object
import osqp._osqp as _osqp # Internal low level module
import numpy as np
import scipy.sparse as spa
from warnings import warn
from platform import system
import osqp.codegen as cg
import osqp.utils as utils
import sys
import qdldl
class OSQP(object):
def __init__(self):
self._model = _osqp.OSQP()
def version(self):
return self._model.version()
def setup(self, P=None, q=None, A=None, l=None, u=None, **settings):
"""
Setup OSQP solver problem of the form
minimize 1/2 x' * P * x + q' * x
subject to l <= A * x <= u
solver settings can be specified as additional keyword arguments
"""
# TODO(bart): this will be unnecessary when the derivative will be in C
self._derivative_cache = {'P': P, 'q': q, 'A': A, 'l': l, 'u': u}
unpacked_data, settings = utils.prepare_data(P, q, A, l, u, **settings)
self._model.setup(*unpacked_data, **settings)
def update(self, q=None, l=None, u=None,
Px=None, Px_idx=np.array([]), Ax=None, Ax_idx=np.array([])):
"""
Update OSQP problem arguments
"""
# get problem dimensions
(n, m) = self._model.dimensions()
# check consistency of the input arguments
if q is not None and len(q) != n:
raise ValueError("q must have length n")
if l is not None:
if not isinstance(l, np.ndarray):
raise TypeError("l must be numpy.ndarray, not %s" %
type(l).__name__)
elif len(l) != m:
raise ValueError("l must have length m")
# Convert values to -OSQP_INFTY
l = np.maximum(l, -_osqp.constant('OSQP_INFTY'))
if u is not None:
if not isinstance(u, np.ndarray):
raise TypeError("u must be numpy.ndarray, not %s" %
type(u).__name__)
elif len(u) != m:
raise ValueError("u must have length m")
# Convert values to OSQP_INFTY
u = np.minimum(u, _osqp.constant('OSQP_INFTY'))
if Ax is None:
if len(Ax_idx) > 0:
raise ValueError("Vector Ax has not been specified")
else:
if len(Ax_idx) > 0 and len(Ax) != len(Ax_idx):
raise ValueError("Ax and Ax_idx must have the same lengths")
if Px is None:
if len(Px_idx) > 0:
raise ValueError("Vector Px has not been specified")
else:
if len(Px_idx) > 0 and len(Px) != len(Px_idx):
raise ValueError("Px and Px_idx must have the same lengths")
if q is None and l is None and u is None and Px is None and Ax is None:
raise ValueError("No updatable data has been specified")
# update linear cost
if q is not None:
self._model.update_lin_cost(q)
# update lower bound
if l is not None and u is None:
self._model.update_lower_bound(l)
# update upper bound
if u is not None and l is None:
self._model.update_upper_bound(u)
# update bounds
if l is not None and u is not None:
self._model.update_bounds(l, u)
# update matrix P
if Px is not None and Ax is None:
self._model.update_P(Px, Px_idx, len(Px))
# update matrix A
if Ax is not None and Px is None:
self._model.update_A(Ax, Ax_idx, len(Ax))
# update matrices P and A
if Px is not None and Ax is not None:
self._model.update_P_A(Px, Px_idx, len(Px), Ax, Ax_idx, len(Ax))
# TODO(bart): this will be unnecessary when the derivative will be in C
# update problem data in self._derivative_cache
if q is not None:
self._derivative_cache["q"] = q
if l is not None:
self._derivative_cache["l"] = l
if u is not None:
self._derivative_cache["u"] = u
if Px is not None:
if Px_idx.size == 0:
self._derivative_cache["P"].data = Px
else:
self._derivative_cache["P"].data[Px_idx] = Px
if Ax is not None:
if Ax_idx.size == 0:
self._derivative_cache["A"].data = Ax
else:
self._derivative_cache["A"].data[Ax_idx] = Ax
# delete results from self._derivative_cache to prohibit
# taking the derivative of unsolved problems
if "results" in self._derivative_cache.keys():
del self._derivative_cache["results"]
def update_settings(self, **kwargs):
"""
Update OSQP solver settings
It is possible to change: 'max_iter', 'eps_abs', 'eps_rel',
'eps_prim_inf', 'eps_dual_inf', 'rho'
'alpha', 'delta', 'polish',
'polish_refine_iter',
'verbose', 'scaled_termination',
'check_termination', 'time_limit',
"""
# get arguments
max_iter = kwargs.pop('max_iter', None)
eps_abs = kwargs.pop('eps_abs', None)
eps_rel = kwargs.pop('eps_rel', None)
eps_prim_inf = kwargs.pop('eps_prim_inf', None)
eps_dual_inf = kwargs.pop('eps_dual_inf', None)
rho = kwargs.pop('rho', None)
alpha = kwargs.pop('alpha', None)
delta = kwargs.pop('delta', None)
polish = kwargs.pop('polish', None)
polish_refine_iter = kwargs.pop('polish_refine_iter', None)
verbose = kwargs.pop('verbose', None)
scaled_termination = kwargs.pop('scaled_termination', None)
check_termination = kwargs.pop('check_termination', None)
warm_start = kwargs.pop('warm_start', None)
time_limit = kwargs.pop('time_limit', None)
# update them
if max_iter is not None:
self._model.update_max_iter(max_iter)
if eps_abs is not None:
self._model.update_eps_abs(eps_abs)
if eps_rel is not None:
self._model.update_eps_rel(eps_rel)
if eps_prim_inf is not None:
self._model.update_eps_prim_inf(eps_prim_inf)
if eps_dual_inf is not None:
self._model.update_eps_dual_inf(eps_dual_inf)
if rho is not None:
self._model.update_rho(rho)
if alpha is not None:
self._model.update_alpha(alpha)
if delta is not None:
self._model.update_delta(delta)
if polish is not None:
self._model.update_polish(polish)
if polish_refine_iter is not None:
self._model.update_polish_refine_iter(polish_refine_iter)
if verbose is not None:
self._model.update_verbose(verbose)
if scaled_termination is not None:
self._model.update_scaled_termination(scaled_termination)
if check_termination is not None:
self._model.update_check_termination(check_termination)
if warm_start is not None:
self._model.update_warm_start(warm_start)
if time_limit is not None:
self._model.update_time_limit(time_limit)
if max_iter is None and \
eps_abs is None and \
eps_rel is None and \
eps_prim_inf is None and \
eps_dual_inf is None and \
rho is None and \
alpha is None and \
delta is None and \
polish is None and \
polish_refine_iter is None and \
verbose is None and \
scaled_termination is None and \
check_termination is None and \
warm_start is None:
raise ValueError("No updatable settings has been specified!")
def solve(self):
"""
Solve QP Problem
"""
# Solve QP
results = self._model.solve()
# TODO(bart): this will be unnecessary when the derivative will be in C
self._derivative_cache['results'] = results
return results
def warm_start(self, x=None, y=None):
"""
Warm start primal or dual variables
"""
# get problem dimensions
(n, m) = self._model.dimensions()
if x is not None:
if len(x) != n:
raise ValueError("Wrong dimension for variable x")
if y is None:
self._model.warm_start_x(x)
if y is not None:
if len(y) != m:
raise ValueError("Wrong dimension for variable y")
if x is None:
self._model.warm_start_y(y)
if x is not None and y is not None:
self._model.warm_start(x, y)
if x is None and y is None:
raise ValueError("Unrecognized fields")
def codegen(self, folder, project_type='', parameters='vectors',
python_ext_name='emosqp', force_rewrite=False, compile_python_ext=True,
FLOAT=False, LONG=True):
"""
Generate embeddable C code for the problem
"""
# Check parameters arguments
if parameters == 'vectors':
embedded = 1
elif parameters == 'matrices':
embedded = 2
else:
raise ValueError("Unknown value of 'parameters' argument.")
# Set float and long flags
if FLOAT:
float_flag = 'ON'
else:
float_flag = 'OFF'
if LONG:
long_flag = 'ON'
else:
long_flag = 'OFF'
# Check project_type argument
expectedProject = ('', 'Makefile', 'MinGW Makefiles',
'Unix Makefiles', 'CodeBlocks', 'Xcode')
if project_type not in expectedProject:
raise ValueError("Unknown value of 'project_type' argument.")
if project_type == 'Makefile':
if system() == 'Windows':
project_type = 'MinGW Makefiles'
elif system() == 'Linux' or system() == 'Darwin':
project_type = 'Unix Makefiles'
# Convert workspace to Python
sys.stdout.write("Getting workspace from OSQP object... \t\t\t\t")
sys.stdout.flush()
work = self._model._get_workspace()
print("[done]")
# Generate code with codegen module
cg.codegen(work, folder, python_ext_name, project_type, compile_python_ext,
embedded, force_rewrite, float_flag, long_flag)
def derivative_iterative_refinement(self, rhs, max_iter=20, tol=1e-12):
M = self._derivative_cache['M']
# Prefactor
solver = self._derivative_cache['solver']
sol = solver.solve(rhs)
for k in range(max_iter):
delta_sol = solver.solve(rhs - M @ sol)
sol = sol + delta_sol
if np.linalg.norm(M @ sol - rhs) < tol:
break
if k == max_iter - 1:
warn("max_iter iterative refinement reached.")
return sol
def adjoint_derivative(self, dx=None, dy_u=None, dy_l=None,
P_idx=None, A_idx=None, eps_iter_ref=1e-04):
"""
Compute adjoint derivative after solve.
"""
P, q = self._derivative_cache['P'], self._derivative_cache['q']
A = self._derivative_cache['A']
l, u = self._derivative_cache['l'], self._derivative_cache['u']
try:
results = self._derivative_cache['results']
except KeyError:
raise ValueError("Problem has not been solved. "
"You cannot take derivatives. "
"Please call the solve function.")
if results.info.status != "solved":
raise ValueError("Problem has not been solved to optimality. "
"You cannot take derivatives")
m, n = A.shape
x = results.x
y = results.y
y_u = np.maximum(y, 0)
y_l = -np.minimum(y, 0)
if A_idx is None:
A_idx = A.nonzero()
if P_idx is None:
P_idx = P.nonzero()
if dy_u is None:
dy_u = np.zeros(m)
if dy_l is None:
dy_l = np.zeros(m)
# Make sure M matrix exists
if 'M' not in self._derivative_cache:
# Multiply second-third row by diag(y_u)^-1 and diag(y_l)^-1
# to make the matrix symmetric
inv_dia_y_u = spa.diags(np.reciprocal(y_u + 1e-20))
inv_dia_y_l = spa.diags(np.reciprocal(y_l + 1e-20))
M = spa.bmat([
[P, A.T, -A.T],
[A, spa.diags(A @ x - u) @ inv_dia_y_u, None],
[-A, None, spa.diags(l - A @ x) @ inv_dia_y_l]
], format='csc')
delta = spa.bmat([[eps_iter_ref * spa.eye(n), None],
[None, -eps_iter_ref * spa.eye(2 * m)]],
format='csc')
self._derivative_cache['M'] = M
self._derivative_cache['solver'] = qdldl.Solver(M + delta)
rhs = - np.concatenate([dx, dy_u, dy_l])
r_sol = self.derivative_iterative_refinement(rhs)
r_x, r_yu, r_yl = np.split(r_sol, [n, n+m])
# Extract derivatives for the constraints
rows, cols = A_idx
dA_vals = (y_u[rows] - y_l[rows]) * r_x[cols] + \
(r_yu[rows] - r_yl[rows]) * x[cols]
dA = spa.csc_matrix((dA_vals, (rows, cols)), shape=A.shape)
du = - r_yu
dl = r_yl
# Extract derivatives for the cost (P, q)
rows, cols = P_idx
dP_vals = .5 * (r_x[rows] * x[cols] + r_x[cols] * x[rows])
dP = spa.csc_matrix((dP_vals, P_idx), shape=P.shape)
dq = r_x
return (dP, dq, dA, dl, du)
|
the-stack_0_190 | """
Base interface for a reader class
"""
import numpy as np
import logging
from pathlib import Path
import matplotlib.pyplot as plt
import pandas as pd
logger = logging.getLogger(__name__)
import PyFLOTRAN.utils.SubFishModule as subfish
import seaborn as sns
class BaseReader:
data: np.ndarray # Hint of self.data array
info: dict
def __init__(self, filename=None, header=False, **kwargs):
self.filename = Path(filename)
self.info = {"reader": {}}
self.data = None
self.header = header
self.__dict__.update(kwargs)
self.open_file(filename)
def read_file(self, opened_file):
"""
Reads the data and stores it inside the class
:return:
"""
pass
def open_file(self, filename):
with open(filename) as opened_file:
if self.header:
opened_file.readline() # For now, skips the header if it has
self.read_file(opened_file)
self.build_info()
def read_header(self, opened_file):
"""
Reads the header of the file
:return:
"""
pass
def get_data(self) -> np.ndarray:
"""
Outputs the read data
:return:
"""
return np.array(0)
def build_info(self):
"""
Generates a dictionary containing the basic info of the read data
:return:
"""
self.info = {}
def global_coords_to_local(self, x_local_to_global, y_local_to_global):
"""Converts global data coordinates into local"""
assert len(self.data.shape) >= 2 and self.data.shape[1] >= 2, "Error in data shape"
self.data[:, 0] -= x_local_to_global
self.data[:, 1] -= y_local_to_global
def local_coords_to_global(self, x_local_to_global, y_local_to_global):
"""Converts local data coordinates into global"""
assert len(self.data.shape) >= 2 and self.data.shape[1] >= 2, "Error in data shape"
self.data[:, 0] += x_local_to_global
self.data[:, 1] += y_local_to_global
def dump_to_csv(self, output_file, delimiter=","):
"""
Writes the data into a csv file
:param output_file:
:return:
"""
print(f"Starting dump into {output_file}")
np.savetxt(output_file, self.get_data(), delimiter=delimiter)
print(f"The data has been properly exported to the {output_file} file")
def create_postprocess_dict(self):
self.postprocessing_dict = Path().cwd() / "postprocess"
self.postprocessing_dict.mkdir(exist_ok=True)
def generate_subfish_data(self, subfish_dict: dict, unit_factor=1/(365 * 24), unit_name='d') -> pd.DataFrame:
"""
This method reads a given subfish dict and returns the calculated results
Args:
subfish_dict: dictionary containing subfish module parameters
unit_factor: factor multiplyin the results (originally computed in seconds)
Returns:
Array containing the times and computed values
"""
if not subfish_dict:
raise AttributeError('Please, provide a suitable subfish dict object')
tang_sol = subfish.calculate_tang(subfish_dict)
tang_sol[0] *= unit_factor
tang_sol_pd = pd.DataFrame({f'Time [{unit_name}]': tang_sol[0],
'Result [M]': tang_sol[1]
}
)
return tang_sol_pd
|
the-stack_0_194 | # get human_ebv_tpms.py
import pandas as pd
import argparse
import os
import math
import datetime
import subprocess
# get basename from a file and path string
def get_basename(filepath):
import os
return os.path.basename(os.path.splitext(filepath)[0])
# get and format output directory
def format_odir(odir):
import os
cwd = os.getcwd()
if odir != '':
# if first character is not /, use cwd to make this an absolute path
if odir[0] != '/' and odir[0] != '~':
odir = cwd+odir
if odir[-1] != '/':
odir += '/'
return odir
# make a dated output directory for the files used for the tracks
def make_dated_folder(odir, bname):
date = datetime.datetime.now()
date = date.strftime('%y%m%d')
odir = odir+date+'_'+bname+'_figures/'
if not os.path.isdir(odir):
print('Making output directory '+odir)
os.makedirs(odir)
return odir
# get value associated with keyword in the 9th column of gtf
def get_field_value(key, fields):
if key not in fields:
return None
else:
return fields.split(key+' "')[1].split()[0].replace('";','')
# calculate tpm for a column in the abundance table
def get_tpm(df, col):
new_col = 'TPM_'+col
total_reads = df[d].sum()
df[new_col] = df.apply(lambda x: float(x[d]*1000000)/total_reads, axis=1)
return new_col, df
# calculate tpm for a column in the abundance table
def get_log_tpm(df, col, gene):
tpm_col = 'TPM_'+col
if not gene:
new_col = 'log_'+tpm_col
else:
new_col = 'gene_log_'+TPM_col
df[new_col] = df.apply(lambda x: math.log2(x[tpm_col]+1), axis=1)
return new_col, df
# get gtf file name
parser = argparse.ArgumentParser(description='removes EBV transcripts from GTF file')
parser.add_argument('--human_gtf', help='GTF with human and EBV data')
parser.add_argument('--human_filt_ab', help='Filtered abundance file with human and EBV data')
parser.add_argument('--human_ab', help='Unfiltered abundance file with human and EBV data')
parser.add_argument('--ebv_filt_ab', help='EBV only filtered abundance file')
parser.add_argument('--ebv_ab', help='EBV only unfiltered abundance file')
parser.add_argument('--datasets', help='Comma-separated list of dataset names to use for human+ebv data')
parser.add_argument('--o', help='Prefix for output file')
args = parser.parse_args()
full_gtf = args.human_gtf
full_ab = args.human_filt_ab
full_unf_ab = args.human_ab
ebv_ab = args.ebv_filt_ab
ebv_unf_ab = args.ebv_ab
my_datasets = args.datasets.split(',')
oprefix = args.o
# get all human transcript ids
infile = open(full_gtf, 'r')
human_tids = []
ebv_tids = []
for i, line in enumerate(infile):
line = line.replace('\n', '')
temp = line.split('\t')
fields = temp[-1]
if temp[0] != 'chrEBV' and temp[2] == 'transcript':
human_tids.append(get_field_value('talon_transcript', fields))
elif temp[0] == 'chrEBV' and temp[2] == 'transcript':
ebv_tids.append(get_field_value('talon_transcript', fields))
full_df = pd.read_csv(full_ab, sep='\t')
ebv_df = pd.read_csv(ebv_ab, sep='\t')
# reformat human table
# dc_datasets = ['D4', 'D5', 'D10', 'D11']
datasets = my_datasets
# full_df.drop(dc_datasets, inplace=True, axis=1) # drop datasets we don't want
full_df = full_df.loc[full_df[datasets].sum(axis=1) != 0] # drop transcripts with no reads in datasets we do want
full_df = full_df.loc[full_df['transcript_ID'].isin(human_tids)] # drop ebv transcripts
full_df['ebv'] = 'Human' # give human/ebv designation
full_df = full_df.loc[full_df.transcript_novelty != 'Genomic']
# drop genomic transcripts from the ebv dataset (b/c it's not pre-filtered)
ebv_df = ebv_df.loc[ebv_df.transcript_novelty != 'Genomic']
ebv_df['ebv'] = 'EBV' # human/ebv designation
# merge transcript df so TPMs can be calculated correctly
t_df = pd.concat([full_df, ebv_df])
# combine datasets
combine_datasets = True
if combine_datasets:
t_df['combined'] = t_df[my_datasets].sum(axis=1)
datasets = ['combined']
# # make sure the concatenation worked
# print(t_df.loc[t_df['transcript_ID'] == 121].head())
# print(ebv_df.loc[ebv_df['transcript_ID'] == 121].head())
# print(full_df.loc[full_df['transcript_ID'] == 121].head())
# get tpms and number of human transcripts for
# each dataset and for full/ebv
for d in datasets:
# raw TPM
tpm, t_df = get_tpm(t_df, d)
# log2TPM
log, t_df = get_log_tpm(t_df, d, 0)
# sanity check - sum of all TPMs for each sample
print('TPM total for {}: {}'.format(d, str(t_df[tpm].sum())))
human_df = t_df.loc[(t_df[d] != 0) & (t_df['ebv'] == 'Human')]
ebv_df = t_df.loc[(t_df[d] != 0) & (t_df['ebv'] == 'EBV')]
n_human_transcripts = len(human_df.index)
n_ebv_transcripts = len(ebv_df.index)
print('Number of human transcripts in {}: {}'.format(d, str(n_human_transcripts)))
print('Number of EBV transcripts in {}: {}'.format(d, str(n_ebv_transcripts)))
# add columns for number of dataset human/ebv transcripts
n_transcripts_col = 'n_'+d
t_df[n_transcripts_col] = t_df.apply(lambda x:\
n_human_transcripts if x['ebv'] == 'Human' else n_ebv_transcripts, axis=1)
# add heights geom_text locations for dataset/human/ebv transcripts
human_height = t_df.loc[t_df.ebv == 'Human'][log].max()+1
ebv_height = t_df.loc[t_df.ebv == 'EBV'][log].max()+1
height_col = d+'_height'
t_df[height_col] = t_df.apply(lambda x:\
human_height if x.ebv == 'Human' else ebv_height, axis=1)
# print(human_height)
# print(ebv_height)
# print(t_df.head())
# print(t_df.tail())
# write gene and transcript tables to a csv
t_df['dot_size'] = t_df.apply(lambda x: 1 if x['ebv'] == 'EBV' else 0.6, axis=1)
t_df['alpha'] = t_df.apply(lambda x: 0.5 if x['ebv'] == 'EBV' else 0.2, axis=1)
# bname = get_basename(ebv_ab)
# odir = format_odir(os.path.dirname(ebv_ab))
# odir = make_dated_folder(odir,bname)
to = oprefix+'_ebv_human_transcript_abundance.csv'
t_df.to_csv(to, sep=',', index=False)
## get transcript tpms without filtering for bioreps to use for gene tpms
# read in the unfiltered datasets
full_df = pd.read_csv(full_unf_ab, sep='\t')
ebv_df = pd.read_csv(ebv_unf_ab, sep='\t')
# reformat human table
# dc_datasets = ['D4', 'D5', 'D10', 'D11']
datasets = my_datasets
# full_df.drop(dc_datasets, inplace=True, axis=1) # drop datasets we don't want
full_df = full_df.loc[full_df['transcript_ID'].isin(human_tids)] # drop ebv transcripts
full_df['ebv'] = 'Human' # give human/ebv designation
full_df = full_df.loc[full_df.transcript_novelty != 'Genomic']
# drop genomic transcripts from the ebv dataset (b/c it's not pre-filtered)
ebv_df = ebv_df.loc[ebv_df.transcript_novelty != 'Genomic']
ebv_df['ebv'] = 'EBV' # human/ebv designation
# merge transcript df so TPMs can be calculated correctly
t_df = pd.concat([full_df, ebv_df])
# combine datasets
combine_datasets = True
if combine_datasets:
t_df['combined'] = t_df[datasets].sum(axis=1)
datasets = ['combined']
# # make sure the concatenation worked
# print(t_df.loc[t_df['transcript_ID'] == 121].head())
# print(ebv_df.loc[ebv_df['transcript_ID'] == 121].head())
# print(full_df.loc[full_df['transcript_ID'] == 121].head())
# get tpms and number of human transcripts for
# each dataset and for full/ebv
for d in datasets:
# raw TPM
tpm, t_df = get_tpm(t_df, d)
# log2TPM
log, t_df = get_log_tpm(t_df, d, 0)
# sanity check - sum of all TPMs for each sample
print('TPM total for {}: {}'.format(d, str(t_df[tpm].sum())))
human_df = t_df.loc[(t_df[d] != 0) & (t_df['ebv'] == 'Human')]
ebv_df = t_df.loc[(t_df[d] != 0) & (t_df['ebv'] == 'EBV')]
n_human_transcripts = len(human_df.index)
n_ebv_transcripts = len(ebv_df.index)
print('Number of human transcripts in {}: {}'.format(d, str(n_human_transcripts)))
print('Number of EBV transcripts in {}: {}'.format(d, str(n_ebv_transcripts)))
# add columns for number of dataset human/ebv transcripts
n_transcripts_col = 'n_'+d
t_df[n_transcripts_col] = t_df.apply(lambda x:\
n_human_transcripts if x['ebv'] == 'Human' else n_ebv_transcripts, axis=1)
# add heights geom_text locations for dataset/human/ebv transcripts
human_height = t_df.loc[t_df.ebv == 'Human'][log].max()+1
ebv_height = t_df.loc[t_df.ebv == 'EBV'][log].max()+1
height_col = d+'_height'
t_df[height_col] = t_df.apply(lambda x:\
human_height if x.ebv == 'Human' else ebv_height, axis=1)
# get gene tpms
cols = []
for d in datasets:
cols.append(d)
cols.append('TPM_'+d)
g_df = t_df.groupby(['gene_ID', 'gene_novelty', 'ebv'])[cols].sum()
g_df.reset_index(inplace=True)
# # make sure the groupby worked
# print(g_df.loc[g_df['gene_ID'] == 16].head())
# print(t_df.loc[t_df['gene_ID'] == 16].head())
# print(t_df.loc[t_df['gene_ID'] == 16].head())
# get tpms, heights, and numbers for gene
for d in datasets:
# log2TPM
log, g_df = get_log_tpm(g_df, d, 0)
human_df = g_df.loc[(g_df[d] != 0) & (g_df['ebv'] == 'Human')]
ebv_df = g_df.loc[(g_df[d] != 0) & (g_df['ebv'] == 'EBV')]
n_human_genes = len(human_df.index)
n_ebv_genes = len(ebv_df.index)
print('Number of human genes in {}: {}'.format(d, str(n_human_genes)))
print('Number of EBV genes in {}: {}'.format(d, str(n_ebv_genes)))
# add columns for number of dataset human/ebv genes
n_genes_col = 'n_'+d
g_df[n_genes_col] = g_df.apply(lambda x:\
n_human_genes if x['ebv'] == 'Human' else n_ebv_genes, axis=1)
# add heights geom_text locations for dataset/human/ebv transcripts
human_height = g_df.loc[g_df.ebv == 'Human'][log].max()+0.4
ebv_height = g_df.loc[g_df.ebv == 'EBV'][log].max()+0.4
height_col = d+'_height'
g_df[height_col] = g_df.apply(lambda x:\
human_height if x.ebv == 'Human' else ebv_height, axis=1)
print(human_height)
print(ebv_height)
print(g_df.head())
print(g_df.tail())
# add different dot sizes for human/ebv
# t_df['dot_size'] = t_df.apply(lambda x: 1 if x['ebv'] == 'EBV' else 0.6, axis=1)
g_df['dot_size'] = g_df.apply(lambda x: 1 if x['ebv'] == 'EBV' else 0.6, axis=1)
# t_df['alpha'] = t_df.apply(lambda x: 0.5 if x['ebv'] == 'EBV' else 0.2, axis=1)
g_df['alpha'] = g_df.apply(lambda x: 0.5 if x['ebv'] == 'EBV' else 0.2, axis=1)
# # rename gene/transcript novelty columns
# t_df.rename(index=str, columns={\
# 'transcript_novelty':'Isoform Type'}, inplace=True)
# g_df.rename(index=str, columns={\
# 'gene_novelty':'Gene Type'}, inplace=True)
# write gene table to a csv
go = oprefix+'_ebv_human_gene_abundance.csv'
g_df.to_csv(go, sep=',', index=False)
# make graphs
cmd = 'Rscript plot_ebv_v_human_abundances.R --gene_csv {} --transcript_csv {}'\
' --datasets {}'.format(go, to, ','.join(datasets))
# print(cmd)
|
the-stack_0_195 | #!/usr/bin/env python
"""
Normalize site observed ancestry by genome-wide average.
2*ID_average - ExpectedCopiesOfPop1Ancestry.
@Author: [email protected]
Usage:
HapmixEPop1NormalizedByIDAverage.py -a IDaverage
HapmixEPop1NormalizedByIDAverage.py -h | --help | -v | --version | -f | --format
Notes:
1. Read ExpectedCopiesOfPop1Ancestry from stdin, and output to stdout.
Options:
-a IDaverage Individual average for pop1, one line one person.
-h --help Show this screen.
-v --version Show version.
-f --format Show input/output file format example.
"""
import sys
from docopt import docopt
from signal import signal, SIGPIPE, SIG_DFL
def ShowFormat():
'''File format example'''
print('''
#ExpectedCopiesOfPop1Ancestry, one column each person.
------------------------
1 1 2 1 2 2
1 2 2 1 2 2
#id average:
------------------------
0.8
0.5
0.1
#output:
------------------------
0.40 0.00 -0.20
-0.60 0.00 -0.20
''');
if __name__ == '__main__':
args = docopt(__doc__, version='1.0')
# print(args)
# sys.exit(0)
if(args['--format']):
ShowFormat()
sys.exit(-1)
idav = [] # 2*ID_average
with open(args['-a'],'r') as ifile:
for line in ifile:
line = line.strip()
if line:
idav.append(2 * float(line))
checkLen = True
for line in sys.stdin:
line = line.strip()
if line:
ss = line.split()
ss = [float(x) for x in ss] #number of pop1 copy for each person.
if checkLen:
if len(ss) != len(idav):
sys.stderr.write('Error: numbr of individuals in ID_average file is not the same as that in sys.stdin.\n')
sys.exit(-1)
else:
checkLen = False
out = [ '%.2f'%(y-x) for x,y in zip(idav, ss)]
sys.stdout.write('%s\n'%('\t'.join(out)))
sys.stdout.flush()
sys.stdout.close()
sys.stderr.flush()
sys.stderr.close()
|
the-stack_0_196 | # -*- coding: utf-8 -*-
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for execution_util.py."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import subprocess
from unittest import mock
from gslib import exception
from gslib.tests import testcase
from gslib.utils import execution_util
class TestExecutionUtil(testcase.GsUtilUnitTestCase):
"""Test execution utils."""
@mock.patch.object(subprocess, 'Popen')
def testExternalCommandReturnsNoOutput(self, mock_Popen):
mock_command_process = mock.Mock()
mock_command_process.returncode = 0
mock_command_process.communicate.return_value = (None, None)
mock_Popen.return_value = mock_command_process
stdout, stderr = execution_util.ExecuteExternalCommand(['fake-command'])
self.assertIsNone(stdout)
self.assertIsNone(stderr)
mock_Popen.assert_called_once_with(['fake-command'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@mock.patch.object(subprocess, 'Popen')
def testExternalCommandReturnsStringOutput(self, mock_Popen):
mock_command_process = mock.Mock()
mock_command_process.returncode = 0
mock_command_process.communicate.return_value = ('a', 'b')
mock_Popen.return_value = mock_command_process
stdout, stderr = execution_util.ExecuteExternalCommand(['fake-command'])
self.assertEqual(stdout, 'a')
self.assertEqual(stderr, 'b')
mock_Popen.assert_called_once_with(['fake-command'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@mock.patch.object(subprocess, 'Popen')
def testExternalCommandReturnsBytesOutput(self, mock_Popen):
mock_command_process = mock.Mock()
mock_command_process.returncode = 0
mock_command_process.communicate.return_value = (b'a', b'b')
mock_Popen.return_value = mock_command_process
stdout, stderr = execution_util.ExecuteExternalCommand(['fake-command'])
self.assertEqual(stdout, 'a')
self.assertEqual(stderr, 'b')
mock_Popen.assert_called_once_with(['fake-command'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@mock.patch.object(subprocess, 'Popen')
def testExternalCommandReturnsNoOutput(self, mock_Popen):
mock_command_process = mock.Mock()
mock_command_process.returncode = 1
mock_command_process.communicate.return_value = (None, b'error')
mock_Popen.return_value = mock_command_process
with self.assertRaises(exception.ExternalBinaryError):
execution_util.ExecuteExternalCommand(['fake-command'])
mock_Popen.assert_called_once_with(['fake-command'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@mock.patch.object(subprocess, 'Popen')
def testExternalCommandRaisesFormattedStderr(self, mock_Popen):
mock_command_process = mock.Mock()
mock_command_process.returncode = 1
mock_command_process.communicate.return_value = (None, b'error.\n')
mock_Popen.return_value = mock_command_process
with self.assertRaisesRegexp(exception.ExternalBinaryError, 'error'):
execution_util.ExecuteExternalCommand(['fake-command'])
|
the-stack_0_197 | """Collection of services."""
from typing import Any, Dict, List, Optional
from fastapi import HTTPException, status
from tortoise import QuerySet
from teached.users.models import Teacher
from .models import ( # noqa I202
Announcement,
Assignment,
BookMark,
Category,
Course,
CourseDetailPydantic,
Enrollment,
Language,
Lecture,
Requirement,
Review,
Section,
)
from .schema import CourseDetail
from .utils import unique_slug
async def create_course(*, data: Dict, teacher: Teacher) -> str:
"""Create new course.
Args:
data: Dict of new user info.
teacher: Teacher model instance.
Returns:
Slug of the course
"""
languages = data.pop("languages")
categories = data.pop("categories")
requirements = data.pop("requirements")
course = Course(**data, teacher=teacher)
# TODO: change this to signal
course.slug = unique_slug(title=data.get("title"))
await course.save()
for language in languages:
value, created = await Language.get_or_create(name=language.capitalize())
await course.languages.add(value)
for category in categories:
value, created = await Category.get_or_create(name=category.capitalize())
await course.categories.add(value)
for requirement in requirements:
await Requirement.create(name=requirement.capitalize(), course=course)
return course.slug
async def get_published_courses(
*,
search: Optional[str] = None,
category: Optional[str] = None,
language: Optional[str] = None,
level: Optional[str] = None,
price: Optional[str] = None,
discount: Optional[str] = None,
) -> QuerySet[Course]:
"""Return all published courses.
Args:
search: Search courses by title.
category: Filter by category.
language: Filter by language.
level: Filter by level.
price: Filter by price.
discount: Filter by discount.
Returns:
Query set of course.
"""
courses = Course.filter(is_drift=False, is_active=True)
if search:
courses = courses.filter(title=search)
if category:
courses = courses.filter(categories__name=category)
if language:
courses = courses.filter(languages__name=language)
if level:
courses = courses.filter(level=level)
if price:
courses = courses.filter(price=price)
if discount:
courses = courses.filter(discount=discount)
return courses
async def get_published_course(*, slug: str, user: Any) -> CourseDetail:
"""Return a published courses.
Args:
slug: The slug of course.
user: Current authenticated user.
Returns:
Query set of course.
"""
course = await Course.get(is_drift=False, is_active=True, slug=slug)
pydatic_data = await CourseDetailPydantic.from_tortoise_orm(course)
data = pydatic_data.dict()
data.update(
{
"is_authenticated": user is not None,
"has_enroll": False,
"is_owner": False,
"enrollments": await course.enrollments.all().count(),
"reviews": await course.reviews.all().count(),
}
)
if user:
user_student = await user.students.first()
user_teacher = await user.teachers.first()
if user_student:
data.update(
{
"has_enroll": await course.enrollments.filter(
student=user_student
).first()
is not None
}
)
author = await course.teacher
if user_teacher == author:
data.update({"is_owner": True})
# TODO: change it to computed method.
reviews = course.reviews.all()
try:
rate = sum(review.rate for review in await reviews) / await reviews.count()
except ZeroDivisionError:
rate = 0
data.update({"rate": rate})
return CourseDetail(**data)
async def enroll_to_published_course(*, slug: str, student: Any) -> Dict[str, str]:
"""Enroll new student to a published course.
Args:
slug: The slug of course.
student: Student instances.
Returns:
Dict.
Raises:
HTTPException: If use has already enrolled.
"""
course = await Course.get(is_drift=False, is_active=True, slug=slug)
if await course.enrollments.filter(student=student):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"You already enrolled to {course}",
)
if course.price > 0:
print("Payment")
# TODO: add the stripe payment
# stripe()
# TODO: add payment process to the payment model
# Payment()
await Enrollment.create(course=course, student=student)
return {
"detail": f"Yea! you have enrolled to {course}, go and enjoy the course now :)"
}
async def bookmark_a_published_course(*, slug: str, student: Any) -> Dict[str, str]:
"""Bookmark a published course.
Args:
slug: The slug of course.
student: Student instances.
Returns:
Dict.
Raises:
HTTPException: If use has already bookmarked.
"""
course = await Course.get(is_drift=False, is_active=True, slug=slug)
if await course.book_marks.filter(course=course, student=student):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"You already bookmark {course}",
)
await BookMark.create(course=course, student=student)
return {"detail": f"{course} has been bookmarked :)"}
async def get_bookmarks(*, student: Any) -> List[Dict]:
"""Get list of bookmark.
Args:
student: Student instances.
Returns:
List of bookmarked course.
"""
course_list = []
for bookmark in await BookMark.filter(student=student):
course = await bookmark.course
course_list.append(
{"title": f"{course.title}", "cover": {course.cover}, "slug": course.slug}
)
return course_list
async def create_review_for_published_course(
*, slug: str, data: Dict, student: Any
) -> Dict[str, str]:
"""Create review for a published course.
Args:
slug: The slug of course.
data: Dict of data for review creation.
student: Student instances.
Returns:
Dict.
Raises:
HTTPException: If use has has not enroll for the course or
student review the course already.
"""
course = await Course.get(is_drift=False, is_active=True, slug=slug)
if not await course.enrollments.filter(student=student):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="You need to enroll to the course first",
)
if await course.reviews.filter(student=student):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="You already review this course",
)
await Review.create(**data, course=course, student=student)
return {"detail": "review has been created."}
async def reviews_course_list(*, slug: str) -> List[Dict]:
"""Get all reviews.
Args:
slug: The slug of course.
Returns:
List of reviews.
"""
course = await Course.get(is_drift=False, is_active=True, slug=slug)
review_list = []
for review in await course.reviews.all():
student = await review.student
user = await student.user
review_list.append(
{
"review": f"{review.review}",
"rate": {review.rate},
"user": {"username": user.username},
}
)
return review_list
async def create_course_section(*, data: Dict, course: Course,) -> Dict:
"""Create course section.
Args:
data: Dict of data for section creation.
course: Course instance.
Returns:
The created section info.
Raises:
HTTPException: if the same section was created before.
"""
section, created = await Section.get_or_create(**data, course=course)
if not created:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="This section was been created before",
)
section.slug = unique_slug(title=section.title)
await section.save()
return {
"title": section.title,
"objective": section.objective,
"order": section.order,
"slug": section.slug,
}
async def create_course_announcement(
*, data: Dict, course: Course, teacher: Teacher
) -> Dict:
"""Create course announcement.
Args:
data: Dict of data for section creation.
course: Course instance.
teacher: Teacher instance.
Returns:
The created announcement info.
Raises:
HTTPException: if the same section was created before.
"""
announcement, created = await Announcement.get_or_create(
**data, course=course, teacher=teacher
)
if not created:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="This announcement was been created before",
)
announcement.slug = unique_slug(title=announcement.title)
await announcement.save()
return {
"title": announcement.title,
"description": announcement.description,
"slug": announcement.slug,
}
async def create_section_lecture(*, data: Dict, section_slug: str) -> Dict:
"""Create section lecture.
Args:
data: Dict of data for section creation.
section_slug: The slug of the section.
Returns:
The created lecture info.
Raises:
HTTPException: if the same lecture was created before.
"""
section = await Section.get(slug=section_slug)
lecture, created = await Lecture.get_or_create(**data, section=section)
if not created:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="This lecture was been created before",
)
lecture.slug = unique_slug(title=lecture.title)
await lecture.save()
return {
"title": lecture.title,
"text": lecture.text,
"video": lecture.video,
"order": section.order,
"slug": section.slug,
}
async def create_section_assignment(*, data: Dict, section_slug: str) -> Dict:
"""Create section assignment.
Args:
data: Dict of data for section creation.
section_slug: The slug of the section.
Returns:
The created assignment info.
Raises:
HTTPException: if the same assignment was created before.
"""
section = await Section.get(slug=section_slug)
assignment, created = await Assignment.get_or_create(**data, section=section)
if not created:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="This assignment was been created before",
)
assignment.slug = unique_slug(title=assignment.title)
await assignment.save()
return {
"title": assignment.title,
"text": assignment.description,
"file": assignment.file,
"slug": section.slug,
}
async def update_course_settings(*, data: Dict, teacher: Teacher, slug: str) -> Dict:
"""Update course settings.
Args:
data: Dict of data for section creation.
teacher: Teacher instance.
slug: Course slug.
Returns:
The updated settings info.
"""
courses = Course.filter(slug=slug, teacher=teacher)
await courses.update(**data)
course = await courses.first()
return {
"is_drift": course.is_drift,
"price": course.price,
"discount": course.discount,
"is_active": course.is_active,
}
|
the-stack_0_198 | from __future__ import absolute_import, division, print_function
# LIBTBX_SET_DISPATCHER_NAME phenix.helix_sheet_recs_as_pdb_files
import sys
import iotbx.pdb
from libtbx.utils import Sorry
legend = """phenix.helix_sheet_recs_as_pdb_files:
Given PDB file with HELIX/SHEET records output PDB files corresponding to
each individual HELIX/SHEET record.
How to run:
phenix.helix_sheet_recs_as_pdb_files model.pdb
Feedback:
[email protected]
[email protected]"""
def run(args):
if(len(args)!=1): raise Sorry("PDB file must be provided.")
pdb_inp = iotbx.pdb.input(file_name = args[0])
h = pdb_inp.construct_hierarchy()
asc = h.atom_selection_cache()
sso = pdb_inp.extract_secondary_structure()
for rec in sso.sheets+sso.helices:
file_name = "_".join(rec.as_pdb_str().split())
file_name = file_name[:min(36, len(file_name))]
file_name += ".pdb"
sel_list = rec.as_atom_selections()
assert type(sel_list) == list
if(len(sel_list) == 1):
sel = asc.selection(string=sel_list[0])
else:
sel_str=" or ".join( ["(%s)"%s for s in rec.as_atom_selections()] )
sel = asc.selection(string=sel_str)
h_selected = h.select(sel)
h_selected.write_pdb_file(file_name=file_name)
if (__name__ == "__main__"):
run(args=sys.argv[1:])
|
the-stack_0_199 | # Author: Jacek Komorowski
# Warsaw University of Technology
import os
import configparser
import time
import numpy as np
class ModelParams:
def __init__(self, model_params_path):
config = configparser.ConfigParser()
config.read(model_params_path)
params = config['MODEL']
self.model_params_path = model_params_path
self.model = params.get('model')
self.output_dim = params.getint('output_dim', 256) # Size of the final descriptor
# Add gating as the last step
if 'vlad' in self.model.lower():
self.cluster_size = params.getint('cluster_size', 64) # Size of NetVLAD cluster
self.gating = params.getboolean('gating', True) # Use gating after the NetVlad
#######################################################################
# Model dependent
#######################################################################
if 'MinkFPN' in self.model:
# Models using MinkowskiEngine
self.mink_quantization_size = params.getfloat('mink_quantization_size')
# Size of the local features from backbone network (only for MinkNet based models)
# For PointNet-based models we always use 1024 intermediary features
self.feature_size = params.getint('feature_size', 256)
if 'planes' in params:
self.planes = [int(e) for e in params['planes'].split(',')]
else:
self.planes = [32, 64, 64]
if 'layers' in params:
self.layers = [int(e) for e in params['layers'].split(',')]
else:
self.layers = [1, 1, 1]
self.num_top_down = params.getint('num_top_down', 1)
self.conv0_kernel_size = params.getint('conv0_kernel_size', 5)
def print(self):
print('Model parameters:')
param_dict = vars(self)
for e in param_dict:
print('{}: {}'.format(e, param_dict[e]))
print('')
def get_datetime():
return time.strftime("%Y%m%d_%H%M")
def xyz_from_depth(depth_image, depth_intrinsic, depth_scale=1000.):
# Return X, Y, Z coordinates from a depth map.
# This mimics OpenCV cv2.rgbd.depthTo3d() function
fx = depth_intrinsic[0, 0]
fy = depth_intrinsic[1, 1]
cx = depth_intrinsic[0, 2]
cy = depth_intrinsic[1, 2]
# Construct (y, x) array with pixel coordinates
y, x = np.meshgrid(range(depth_image.shape[0]), range(depth_image.shape[1]), sparse=False, indexing='ij')
X = (x - cx) * depth_image / (fx * depth_scale)
Y = (y - cy) * depth_image / (fy * depth_scale)
xyz = np.stack([X, Y, depth_image / depth_scale], axis=2)
xyz[depth_image == 0] = np.nan
return xyz
class MinkLocParams:
"""
Params for training MinkLoc models on Oxford dataset
"""
def __init__(self, params_path, model_params_path):
"""
Configuration files
:param path: General configuration file
:param model_params: Model-specific configuration
"""
assert os.path.exists(params_path), 'Cannot find configuration file: {}'.format(params_path)
assert os.path.exists(model_params_path), 'Cannot find model-specific configuration file: {}'.format(model_params_path)
self.params_path = params_path
self.model_params_path = model_params_path
# self.model_params_path = model_params_path
config = configparser.ConfigParser()
config.read(self.params_path)
params = config['DEFAULT']
self.num_points = params.getint('num_points', 4096)
self.dataset_folder = params.get('dataset_folder')
self.queries_folder = params.get('queries_folder')
params = config['TRAIN']
self.num_workers = params.getint('num_workers', 0)
self.batch_size = params.getint('batch_size', 128)
# Set batch_expansion_th to turn on dynamic batch sizing
# When number of non-zero triplets falls below batch_expansion_th, expand batch size
self.batch_expansion_th = params.getfloat('batch_expansion_th', None)
if self.batch_expansion_th is not None:
assert 0. < self.batch_expansion_th < 1., 'batch_expansion_th must be between 0 and 1'
self.batch_size_limit = params.getint('batch_size_limit', 256)
# Batch size expansion rate
self.batch_expansion_rate = params.getfloat('batch_expansion_rate', 1.5)
assert self.batch_expansion_rate > 1., 'batch_expansion_rate must be greater than 1'
else:
self.batch_size_limit = self.batch_size
self.batch_expansion_rate = None
self.lr = params.getfloat('lr', 1e-3)
self.scheduler = params.get('scheduler', 'MultiStepLR')
if self.scheduler is not None:
if self.scheduler == 'CosineAnnealingLR':
self.min_lr = params.getfloat('min_lr')
elif self.scheduler == 'MultiStepLR':
scheduler_milestones = params.get('scheduler_milestones')
self.scheduler_milestones = [int(e) for e in scheduler_milestones.split(',')]
else:
raise NotImplementedError('Unsupported LR scheduler: {}'.format(self.scheduler))
self.epochs = params.getint('epochs', 20)
self.weight_decay = params.getfloat('weight_decay', None)
self.normalize_embeddings = params.getboolean('normalize_embeddings', True) # Normalize embeddings during training and evaluation
self.loss = params.get('loss')
if 'Contrastive' in self.loss:
self.pos_margin = params.getfloat('pos_margin', 0.2)
self.neg_margin = params.getfloat('neg_margin', 0.65)
elif 'Triplet' in self.loss:
self.margin = params.getfloat('margin', 0.4) # Margin used in loss function
else:
raise 'Unsupported loss function: {}'.format(self.loss)
self.aug_mode = params.getint('aug_mode', 1) # Augmentation mode (1 is default)
self.train_file = params.get('train_file')
self.val_file = params.get('val_file', None)
self.eval_database_files = ['kitti_evaluation_database.pickle']
self.eval_query_files = ['kitti_evaluation_query.pickle']
# self.eval_database_files = ['oxford_evaluation_database.pickle', 'business_evaluation_database.pickle',
# 'residential_evaluation_database.pickle', 'university_evaluation_database.pickle']
#
# self.eval_query_files = ['oxford_evaluation_query.pickle', 'business_evaluation_query.pickle',
# 'residential_evaluation_query.pickle', 'university_evaluation_query.pickle']
assert len(self.eval_database_files) == len(self.eval_query_files)
# Read model parameters
self.model_params = ModelParams(self.model_params_path)
self._check_params()
def _check_params(self):
assert os.path.exists(self.dataset_folder), 'Cannot access dataset: {}'.format(self.dataset_folder)
assert os.path.exists(self.queries_folder), 'Cannot access dataset: {}'.format(self.queries_folder)
def print(self):
print('Parameters:')
param_dict = vars(self)
for e in param_dict:
if e != 'model_params':
print('{}: {}'.format(e, param_dict[e]))
self.model_params.print()
print('')
|
the-stack_0_200 | from typing import Any
from pyparsing import Word,nums,CaselessLiteral,ParseException
from subprocess import Popen,PIPE,STDOUT,CREATE_NO_WINDOW
from json import loads
import os
import errno
def is_executable()->bool:
"""
Determine if the current script is packaged as an executable\n
(EG: If packed into a .exe with PyInstaller)\n
returns : True/False, if the script is an executable
"""
import sys
return getattr(sys,'frozen',False)
def script_dir()->str:
"""
Get the path to the current script's directory, whether running as an executable or in an interpreter.\n
returns : A string containing the path to the script directory.
"""
from os import path
import sys
return path.dirname(sys.executable) if is_executable() else os.path.join(path.dirname(path.realpath(sys.argv[0])),'app')
def local_path(dir_name:str='')->str:
"""
Get the absolute path to a local file/directory __MEIPASS or .), whether running as an executable or in an interpreter.\n
returns : A string containing the path to the local file/directory
"""
from os import path
import sys
return path.join(sys._MEIPASS, dir_name) if is_executable() else path.join(script_dir(),dir_name)
def convert_size_to_bytes(size_str:str):
"""
Converts a size string (eg: "12gb") to bytes.
"""
multipliers={"kb":1024,"mb":1024000,"gb":1024000000,"tb":1024000000000} #god help whoever converts a tb file
expr=Word(nums+','+'.').setParseAction(lambda toks:float(toks[0])).setResultsName('size')+(CaselessLiteral('kb')^ CaselessLiteral('mb') ^ CaselessLiteral('gb') ^ CaselessLiteral('tb')).setParseAction(lambda toks:multipliers[toks[0]]).setResultsName('mult')
result=None
try:
result=expr.parseString(size_str.replace(',',''))
except ParseException:
return None
return result.size*result.mult
def is_int(s:str):
"""
Return whether or not the str `s` is an int.
"""
try:
int(s)
return True
except ValueError:
return False
def get_video_info(filename:str,ffprobe:str=os.path.join(local_path('resources'),'ffprobe.exe')):
"""
Get the video info from a video file.\n
Returns a JSON object
"""
command = [ffprobe,
"-loglevel", "quiet",
"-print_format", "json",
"-show_format",
"-show_streams",
filename
]
pipe = Popen(command, stdout=PIPE, stderr=STDOUT,creationflags = CREATE_NO_WINDOW)
out, err = pipe.communicate()
return loads(out)
def get_free_space_b(dirname):
"""Return folder/drive free space (in bytes)."""
import ctypes
import os
import platform
import sys
if platform.system() == 'Windows':
free_bytes = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(dirname), None, None, ctypes.pointer(free_bytes))
return free_bytes.value
else:
st = os.statvfs(dirname)
return st.f_bavail * st.f_frsize
def vid_time_to_ms(time_str:str):
h,m,s=time_str.split(':')
s,ms=s.split('.')
ms=ms.rstrip('0')
if not ms:ms='0'
h,m,s,ms=map(float,(h,m,s,ms))
ms+=(s*1000)
ms+=(m*60*1000)
ms+=(h*60*60*1000)
return ms
class Custom_Zip():
"""Same basic functionality as zip() builtin, but can be used with differently size iterators
"""
def __init__(self,*args,default:Any=None):
"""Same as zip() builtin, but returns default from any iters that are exhausted until all are exhausted
Args:
default (Any, optional): [description]. Defaults to None.
"""
self.args=[iter(c) for c in args]
self.default=default
def __iter__(self):
return self
def __next__(self):
yields=[]
for arg in self.args:
yields.append(next(arg,self.default))
if all(c is None for c in yields):
raise StopIteration
return tuple(yields)
def make_dirs(path:str):
"""Make the directory path specified.
Args:
path (str): The path to create. Should either be a file (eg: /foo/bar/baz.txt), or a directory ending in / (/foo/bar/)
"""
if not os.path.exists(os.path.dirname(path)):
try:
os.makedirs(os.path.dirname(path))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
def normalize_extension(ext:str,dot=True):
if dot: return '.'+ext.lstrip('.')
else: return ext.lstrip('.')
class file_str(str):
"""Empty subclass of str builtin, for use with type requiring
"""
pass
def list_get (l:list, idx:int, default:Any=None):
try:
return l[idx]
except IndexError:
return default |
the-stack_0_201 | import functools
import json
import logging
import math
import os
import time
from functools import cached_property
from typing import Callable, Dict, List, Tuple, Type
# https://github.com/prius/python-leetcode
import leetcode.api.default_api # type: ignore
import leetcode.api_client # type: ignore
import leetcode.auth # type: ignore
import leetcode.configuration # type: ignore
import leetcode.models.graphql_query # type: ignore
import leetcode.models.graphql_query_get_question_detail_variables # type: ignore
import leetcode.models.graphql_query_problemset_question_list_variables # type: ignore
import leetcode.models.graphql_query_problemset_question_list_variables_filter_input # type: ignore
import leetcode.models.graphql_question_detail # type: ignore
import urllib3 # type: ignore
from tqdm import tqdm # type: ignore
CACHE_DIR = "cache"
def _get_leetcode_api_client() -> leetcode.api.default_api.DefaultApi:
"""
Leetcode API instance constructor.
This is a singleton, because we don't need to create a separate client
each time
"""
configuration = leetcode.configuration.Configuration()
session_id = os.environ["LEETCODE_SESSION_ID"]
csrf_token = leetcode.auth.get_csrf_cookie(session_id)
configuration.api_key["x-csrftoken"] = csrf_token
configuration.api_key["csrftoken"] = csrf_token
configuration.api_key["LEETCODE_SESSION"] = session_id
configuration.api_key["Referer"] = "https://leetcode.com"
configuration.debug = False
api_instance = leetcode.api.default_api.DefaultApi(
leetcode.api_client.ApiClient(configuration)
)
return api_instance
def retry(times: int, exceptions: Tuple[Type[Exception]], delay: float) -> Callable:
"""
Retry Decorator
Retries the wrapped function/method `times` times if the exceptions listed
in `exceptions` are thrown
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
for attempt in range(times - 1):
try:
return func(*args, **kwargs)
except exceptions:
logging.exception(
"Exception occured, try %s/%s", attempt + 1, times
)
time.sleep(delay)
logging.error("Last try")
return func(*args, **kwargs)
return wrapper
return decorator
class LeetcodeData:
"""
Retrieves and caches the data for problems, acquired from the leetcode API.
This data can be later accessed using provided methods with corresponding
names.
"""
def __init__(self, start: int, stop: int) -> None:
"""
Initialize leetcode API and disk cache for API responses
"""
if start < 0:
raise ValueError(f"Start must be non-negative: {start}")
if stop < 0:
raise ValueError(f"Stop must be non-negative: {start}")
if start > stop:
raise ValueError(f"Start (){start}) must be not greater than stop ({stop})")
self._start = start
self._stop = stop
@cached_property
def _api_instance(self) -> leetcode.api.default_api.DefaultApi:
return _get_leetcode_api_client()
@cached_property
def _cache(
self,
) -> Dict[str, leetcode.models.graphql_question_detail.GraphqlQuestionDetail]:
"""
Cached method to return dict (problem_slug -> question details)
"""
problems = self._get_problems_data()
return {problem.title_slug: problem for problem in problems}
@retry(times=3, exceptions=(urllib3.exceptions.ProtocolError,), delay=5)
def _get_problems_count(self) -> int:
api_instance = self._api_instance
graphql_request = leetcode.models.graphql_query.GraphqlQuery(
query="""
query problemsetQuestionList($categorySlug: String, $limit: Int, $skip: Int, $filters: QuestionListFilterInput) {
problemsetQuestionList: questionList(
categorySlug: $categorySlug
limit: $limit
skip: $skip
filters: $filters
) {
totalNum
}
}
""",
variables=leetcode.models.graphql_query_problemset_question_list_variables.GraphqlQueryProblemsetQuestionListVariables(
category_slug="",
limit=1,
skip=0,
filters=leetcode.models.graphql_query_problemset_question_list_variables_filter_input.GraphqlQueryProblemsetQuestionListVariablesFilterInput(
tags=[],
# difficulty="MEDIUM",
# status="NOT_STARTED",
# list_id="7p5x763", # Top Amazon Questions
# premium_only=False,
),
),
operation_name="problemsetQuestionList",
)
time.sleep(2) # Leetcode has a rate limiter
data = api_instance.graphql_post(body=graphql_request).data
return data.problemset_question_list.total_num or 0
@retry(times=3, exceptions=(urllib3.exceptions.ProtocolError,), delay=5)
def _get_problems_data_page(
self, offset: int, page_size: int, page: int
) -> List[leetcode.models.graphql_question_detail.GraphqlQuestionDetail]:
api_instance = self._api_instance
graphql_request = leetcode.models.graphql_query.GraphqlQuery(
query="""
query problemsetQuestionList($categorySlug: String, $limit: Int, $skip: Int, $filters: QuestionListFilterInput) {
problemsetQuestionList: questionList(
categorySlug: $categorySlug
limit: $limit
skip: $skip
filters: $filters
) {
questions: data {
questionFrontendId
title
titleSlug
categoryTitle
freqBar
content
isPaidOnly
difficulty
likes
dislikes
topicTags {
name
slug
}
stats
hints
}
}
}
""",
variables=leetcode.models.graphql_query_problemset_question_list_variables.GraphqlQueryProblemsetQuestionListVariables(
category_slug="",
limit=page_size,
skip=offset + page * page_size,
filters=leetcode.models.graphql_query_problemset_question_list_variables_filter_input.GraphqlQueryProblemsetQuestionListVariablesFilterInput(),
),
operation_name="problemsetQuestionList",
)
time.sleep(2) # Leetcode has a rate limiter
data = api_instance.graphql_post(
body=graphql_request
).data.problemset_question_list.questions
return data
def _get_problems_data(
self,
) -> List[leetcode.models.graphql_question_detail.GraphqlQuestionDetail]:
problem_count = self._get_problems_count()
if self._start > problem_count:
raise ValueError(
"Start ({self._start}) is greater than problems count ({problem_count})"
)
start = self._start
stop = min(self._stop, problem_count)
page_size = min(3000, stop - start + 1)
problems: List[
leetcode.models.graphql_question_detail.GraphqlQuestionDetail
] = []
logging.info(f"Fetching {stop - start + 1} problems {page_size} per page")
for page in tqdm(
range(math.ceil((stop - start + 1) / page_size)),
unit="problem",
unit_scale=page_size,
):
data = self._get_problems_data_page(start, page_size, page)
problems.extend(data)
return problems
async def all_problems_handles(self) -> List[str]:
"""
Get all problem handles known.
Example: ["two-sum", "three-sum"]
"""
return list(self._cache.keys())
def _get_problem_data(
self, problem_slug: str
) -> leetcode.models.graphql_question_detail.GraphqlQuestionDetail:
"""
TODO: Legacy method. Needed in the old architecture. Can be replaced
with direct cache calls later.
"""
cache = self._cache
if problem_slug in cache:
return cache[problem_slug]
async def _get_description(self, problem_slug: str) -> str:
"""
Problem description
"""
data = self._get_problem_data(problem_slug)
return data.content or "No content"
async def _stats(self, problem_slug: str) -> Dict[str, str]:
"""
Various stats about problem. Such as number of accepted solutions, etc.
"""
data = self._get_problem_data(problem_slug)
return json.loads(data.stats)
async def submissions_total(self, problem_slug: str) -> int:
"""
Total number of submissions of the problem
"""
return int((await self._stats(problem_slug))["totalSubmissionRaw"])
async def submissions_accepted(self, problem_slug: str) -> int:
"""
Number of accepted submissions of the problem
"""
return int((await self._stats(problem_slug))["totalAcceptedRaw"])
async def description(self, problem_slug: str) -> str:
"""
Problem description
"""
return await self._get_description(problem_slug)
async def difficulty(self, problem_slug: str) -> str:
"""
Problem difficulty. Returns colored HTML version, so it can be used
directly in Anki
"""
data = self._get_problem_data(problem_slug)
diff = data.difficulty
if diff == "Easy":
return "<font color='green'>Easy</font>"
if diff == "Medium":
return "<font color='orange'>Medium</font>"
if diff == "Hard":
return "<font color='red'>Hard</font>"
raise ValueError(f"Incorrect difficulty: {diff}")
async def paid(self, problem_slug: str) -> str:
"""
Problem's "available for paid subsribers" status
"""
data = self._get_problem_data(problem_slug)
return data.is_paid_only
async def problem_id(self, problem_slug: str) -> str:
"""
Numerical id of the problem
"""
data = self._get_problem_data(problem_slug)
return data.question_frontend_id
async def likes(self, problem_slug: str) -> int:
"""
Number of likes for the problem
"""
data = self._get_problem_data(problem_slug)
likes = data.likes
if not isinstance(likes, int):
raise ValueError(f"Likes should be int: {likes}")
return likes
async def dislikes(self, problem_slug: str) -> int:
"""
Number of dislikes for the problem
"""
data = self._get_problem_data(problem_slug)
dislikes = data.dislikes
if not isinstance(dislikes, int):
raise ValueError(f"Dislikes should be int: {dislikes}")
return dislikes
async def tags(self, problem_slug: str) -> List[str]:
"""
List of the tags for this problem (string slugs)
"""
data = self._get_problem_data(problem_slug)
return list(map(lambda x: x.slug, data.topic_tags))
async def freq_bar(self, problem_slug: str) -> float:
"""
Returns percentage for frequency bar
"""
data = self._get_problem_data(problem_slug)
return data.freq_bar or 0
async def title(self, problem_slug: str) -> float:
"""
Returns problem title
"""
data = self._get_problem_data(problem_slug)
return data.title
async def category(self, problem_slug: str) -> float:
"""
Returns problem category title
"""
data = self._get_problem_data(problem_slug)
return data.category_title
|
the-stack_0_203 | import pytest
import saltext.credstash.sdbs.credstash as credstash_sdb
@pytest.fixture
def configure_loader_modules():
module_globals = {
"__salt__": {"this_does_not_exist.please_replace_it": lambda: True},
}
return {
credstash_sdb: module_globals,
}
def test_replace_this_this_with_something_meaningful():
assert "this_does_not_exist.please_replace_it" in credstash_sdb.__salt__
assert credstash_sdb.__salt__["this_does_not_exist.please_replace_it"]() is True
|
the-stack_0_204 | import itertools
import networkx as nx
from spacy.tokens import Token
def syntactic_depth(token):
'''Finds token depth in the syntactic tree'''
if token._._syntactic_depth is None:
depth = 0
current_word = token
while not current_word == current_word.head:
depth += 1
current_word = current_word.head
token._._syntactic_depth = depth
return token._._syntactic_depth
Token.set_extension('_syntactic_depth', default=None, force=True)
Token.set_extension('syntactic_depth', getter=syntactic_depth, force=True)
def filter_by_depth(depths, tokens):
if isinstance(depths, int):
depths = set([depths])
return [t for t in tokens if t._.syntactic_depth in depths]
def shallowest_token(tokens):
tokens = sort_by_depth(tokens)
return tokens[0]
def sort_by_depth(tokens):
return sorted(tokens, key=lambda w: (w._.syntactic_depth, w.i))
def sort_by_idx(tokens):
return sorted(tokens, key=lambda w: w.i)
def siblings(token, side=None):
try:
siblings = token.head.children
except:
return []
if side == 'left':
siblings = [s for s in siblings if s.i < token.i]
elif side == 'left':
siblings = [s for s in siblings if s.i > token.i]
return siblings
def doc_to_nx_graph(doc):
edges = []
for token in doc:
for child in token.children:
edges.append(('{0}-{1}'.format(token.text, token.i),
'{0}-{1}'.format(child.text, child.i)))
graph = nx.Graph(edges)
return graph
def shortest_dependency_path(nx_graph, doc, source, target):
source = '{0}-{1}'.format(source.text, source.i)
target = '{0}-{1}'.format(target.text, target.i)
try:
path = nx.shortest_path(nx_graph, source=source, target=target)
except nx.exception.NetworkXNoPath:
path = []
dep_path = []
for node in path:
idx = int(node.split('-')[-1])
token = doc[idx]
dep_path.append(token)
dep_path = sorted(dep_path, key=lambda t: t._.syntactic_depth)
return dep_path
def smallest_connected_subgraph(with_tokens, doc, nx_graph=None):
# Find root nodes
if not nx_graph:
nx_graph = doc_to_nx_graph(doc)
min_depth = min([t._.syntactic_depth for t in with_tokens])
roots = [t for t in with_tokens if t._.syntactic_depth == min_depth]
non_roots = [t for t in with_tokens if t not in roots]
tokens_touched = roots + non_roots
# For each non-root token, trace paths to each root. This will touch every non-root token we're looking for
for token in non_roots:
for root in roots:
path = shortest_dependency_path(nx_graph, doc, token, root)
for t in path:
if t not in tokens_touched:
tokens_touched.append(t)
tokens_touched = sorted(tokens_touched, key=lambda t: t.i)
# Trace paths between roots
for root_x, root_y in itertools.combinations(roots, 2):
path = shortest_dependency_path(nx_graph, doc, root_x, root_y)
for t in path:
if t not in tokens_touched:
tokens_touched.append(t)
return tokens_touched
def idxs_to_tokens(doc, idxs):
return [doc[idx] for idx in idxs]
def token_idxs(tokens):
return [t.i for t in tokens]
def de_duplicate_list(list_):
unique_list = []
for item in list_:
if item not in unique_list:
unique_list.append(item)
return unique_list
def list_contains_duplicates(list_):
unique_list = de_duplicate_list(list_)
if len(list_) > len(unique_list):
return True
return False
def features_are_in_pattern(features, pattern):
for pattern_element in pattern:
for feature in features:
if feature not in pattern_element['PATTERN']:
return False
return True
def flatten_list(list_):
return list(itertools.chain(*list_))
|
the-stack_0_207 | # Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import hashlib
import os
import random
import re
import sys
from abc import ABCMeta, abstractmethod
from threading import Thread
from time import time, sleep
import os.path
from os.path import dirname, exists, isdir, join
import mycroft.util
from mycroft.client.enclosure.api import EnclosureAPI
from mycroft.configuration import Configuration
from mycroft.messagebus.message import Message
from mycroft.metrics import report_timing, Stopwatch
from mycroft.util import (
play_wav, play_mp3, check_for_signal, create_signal, resolve_resource_file
)
from mycroft.util.log import LOG
from queue import Queue, Empty
def send_playback_metric(stopwatch, ident):
"""
Send playback metrics in a background thread
"""
def do_send(stopwatch, ident):
report_timing(ident, 'speech_playback', stopwatch)
t = Thread(target=do_send, args=(stopwatch, ident))
t.daemon = True
t.start()
class PlaybackThread(Thread):
"""
Thread class for playing back tts audio and sending
visime data to enclosure.
"""
def __init__(self, queue):
super(PlaybackThread, self).__init__()
self.queue = queue
self._terminated = False
self._processing_queue = False
self._clear_visimes = False
def init(self, tts):
self.tts = tts
def clear_queue(self):
"""
Remove all pending playbacks.
"""
while not self.queue.empty():
self.queue.get()
try:
self.p.terminate()
except:
pass
def run(self):
"""
Thread main loop. get audio and visime data from queue
and play.
"""
while not self._terminated:
try:
snd_type, data, visimes, ident = self.queue.get(timeout=2)
self.blink(0.5)
if not self._processing_queue:
self._processing_queue = True
self.tts.begin_audio()
stopwatch = Stopwatch()
with stopwatch:
if snd_type == 'wav':
self.p = play_wav(data)
elif snd_type == 'mp3':
self.p = play_mp3(data)
if visimes:
if self.show_visimes(visimes):
self.clear_queue()
else:
self.p.communicate()
self.p.wait()
send_playback_metric(stopwatch, ident)
if self.queue.empty():
self.tts.end_audio()
self._processing_queue = False
self.blink(0.2)
except Empty:
pass
except Exception as e:
LOG.exception(e)
if self._processing_queue:
self.tts.end_audio()
self._processing_queue = False
def show_visimes(self, pairs):
"""
Send visime data to enclosure
Args:
pairs(list): Visime and timing pair
Returns:
True if button has been pressed.
"""
start = time()
for code, duration in pairs:
if self._clear_visimes:
self._clear_visimes = False
return True
if self.enclosure:
# Include time stamp to assist with animation timing
self.enclosure.mouth_viseme(code, start + duration)
delta = time() - start
if delta < duration:
sleep(duration - delta)
return False
def clear_visimes(self):
self._clear_visimes = True
def blink(self, rate=1.0):
""" Blink mycroft's eyes """
if self.enclosure and random.random() < rate:
self.enclosure.eyes_blink("b")
def stop(self):
""" Stop thread """
self._terminated = True
self.clear_queue()
class TTS(object):
"""
TTS abstract class to be implemented by all TTS engines.
It aggregates the minimum required parameters and exposes
``execute(sentence)`` and ``validate_ssml(sentence)`` functions.
Args:
lang (str):
config (dict): Configuration for this specific tts engine
validator (TTSValidator): Used to verify proper installation
phonetic_spelling (bool): Whether to spell certain words phonetically
ssml_tags (list): Supported ssml properties. Ex. ['speak', 'prosody']
"""
__metaclass__ = ABCMeta
def __init__(self, lang, config, validator, audio_ext='wav',
phonetic_spelling=True, ssml_tags=None):
super(TTS, self).__init__()
self.lang = lang or 'en-us'
self.config = config
self.validator = validator
self.phonetic_spelling = phonetic_spelling
self.audio_ext = audio_ext
self.ssml_tags = ssml_tags or []
self.voice = config.get("voice")
self.filename = '/tmp/tts.wav'
self.enclosure = None
random.seed()
self.queue = Queue()
self.playback = PlaybackThread(self.queue)
self.playback.start()
self.clear_cache()
self.spellings = self.load_spellings()
def load_spellings(self):
"""Load phonetic spellings of words as dictionary"""
path = join('text', self.lang, 'phonetic_spellings.txt')
spellings_file = resolve_resource_file(path)
if not spellings_file:
return {}
try:
with open(spellings_file) as f:
lines = filter(bool, f.read().split('\n'))
lines = [i.split(':') for i in lines]
return {key.strip(): value.strip() for key, value in lines}
except ValueError:
LOG.exception('Failed to load phonetic spellings.')
return {}
def begin_audio(self):
"""Helper function for child classes to call in execute()"""
# Create signals informing start of speech
self.ws.emit(Message("recognizer_loop:audio_output_start"))
def end_audio(self):
"""
Helper function for child classes to call in execute().
Sends the recognizer_loop:audio_output_end message, indicating
that speaking is done for the moment. It also checks if cache
directory needs cleaning to free up disk space.
"""
self.ws.emit(Message("recognizer_loop:audio_output_end"))
# Clean the cache as needed
cache_dir = mycroft.util.get_cache_directory("tts")
mycroft.util.curate_cache(cache_dir, min_free_percent=100)
# This check will clear the "signal"
check_for_signal("isSpeaking")
def init(self, ws):
self.ws = ws
self.playback.init(self)
self.enclosure = EnclosureAPI(self.ws)
self.playback.enclosure = self.enclosure
def get_tts(self, sentence, wav_file):
"""
Abstract method that a tts implementation needs to implement.
Should get data from tts.
Args:
sentence(str): Sentence to synthesize
wav_file(str): output file
Returns:
tuple: (wav_file, phoneme)
"""
pass
def modify_tag(self, tag):
"""Override to modify each supported ssml tag"""
return tag
@staticmethod
def remove_ssml(text):
return re.sub('<[^>]*>', '', text).replace(' ', ' ')
def validate_ssml(self, utterance):
"""
Check if engine supports ssml, if not remove all tags
Remove unsupported / invalid tags
Args:
utterance(str): Sentence to validate
Returns: validated_sentence (str)
"""
# if ssml is not supported by TTS engine remove all tags
if not self.ssml_tags:
return self.remove_ssml(utterance)
# find ssml tags in string
tags = re.findall('<[^>]*>', utterance)
for tag in tags:
if any(supported in tag for supported in self.ssml_tags):
utterance = utterance.replace(tag, self.modify_tag(tag))
else:
# remove unsupported tag
utterance = utterance.replace(tag, "")
# return text with supported ssml tags only
return utterance.replace(" ", " ")
def execute(self, sentence, ident=None):
"""
Convert sentence to speech, preprocessing out unsupported ssml
The method caches results if possible using the hash of the
sentence.
Args:
sentence: Sentence to be spoken
ident: Id reference to current interaction
"""
sentence = self.validate_ssml(sentence)
create_signal("isSpeaking")
if self.phonetic_spelling:
for word in re.findall(r"[\w']+", sentence):
if word.lower() in self.spellings:
sentence = sentence.replace(word,
self.spellings[word.lower()])
key = str(hashlib.md5(sentence.encode('utf-8', 'ignore')).hexdigest())
wav_file = os.path.join(mycroft.util.get_cache_directory("tts"),
key + '.' + self.audio_ext)
if os.path.exists(wav_file):
LOG.debug("TTS cache hit")
phonemes = self.load_phonemes(key)
else:
wav_file, phonemes = self.get_tts(sentence, wav_file)
if phonemes:
self.save_phonemes(key, phonemes)
vis = self.visime(phonemes)
self.queue.put((self.audio_ext, wav_file, vis, ident))
def visime(self, phonemes):
"""
Create visimes from phonemes. Needs to be implemented for all
tts backend
Args:
phonemes(str): String with phoneme data
"""
return None
def clear_cache(self):
""" Remove all cached files. """
if not os.path.exists(mycroft.util.get_cache_directory('tts')):
return
for f in os.listdir(mycroft.util.get_cache_directory("tts")):
file_path = os.path.join(mycroft.util.get_cache_directory("tts"),
f)
if os.path.isfile(file_path):
os.unlink(file_path)
def save_phonemes(self, key, phonemes):
"""
Cache phonemes
Args:
key: Hash key for the sentence
phonemes: phoneme string to save
"""
cache_dir = mycroft.util.get_cache_directory("tts")
pho_file = os.path.join(cache_dir, key + ".pho")
try:
with open(pho_file, "w") as cachefile:
cachefile.write(phonemes)
except:
LOG.debug("Failed to write .PHO to cache")
pass
def load_phonemes(self, key):
"""
Load phonemes from cache file.
Args:
Key: Key identifying phoneme cache
"""
pho_file = os.path.join(mycroft.util.get_cache_directory("tts"),
key + ".pho")
if os.path.exists(pho_file):
try:
with open(pho_file, "r") as cachefile:
phonemes = cachefile.read().strip()
return phonemes
except:
LOG.debug("Failed to read .PHO from cache")
return None
def __del__(self):
self.playback.stop()
self.playback.join()
class TTSValidator(object):
"""
TTS Validator abstract class to be implemented by all TTS engines.
It exposes and implements ``validate(tts)`` function as a template to
validate the TTS engines.
"""
__metaclass__ = ABCMeta
def __init__(self, tts):
self.tts = tts
def validate(self):
self.validate_dependencies()
self.validate_instance()
self.validate_filename()
self.validate_lang()
self.validate_connection()
def validate_dependencies(self):
pass
def validate_instance(self):
clazz = self.get_tts_class()
if not isinstance(self.tts, clazz):
raise AttributeError('tts must be instance of ' + clazz.__name__)
def validate_filename(self):
filename = self.tts.filename
if not (filename and filename.endswith('.wav')):
raise AttributeError('file: %s must be in .wav format!' % filename)
dir_path = dirname(filename)
if not (exists(dir_path) and isdir(dir_path)):
raise AttributeError('filename: %s is not valid!' % filename)
@abstractmethod
def validate_lang(self):
pass
@abstractmethod
def validate_connection(self):
pass
@abstractmethod
def get_tts_class(self):
pass
class TTSFactory(object):
from mycroft.tts.espeak_tts import ESpeak
from mycroft.tts.fa_tts import FATTS
from mycroft.tts.google_tts import GoogleTTS
from mycroft.tts.mary_tts import MaryTTS
from mycroft.tts.mimic_tts import Mimic
from mycroft.tts.spdsay_tts import SpdSay
from mycroft.tts.ibm_tts import WatsonTTS
from mycroft.tts.polly_tts import PollyTTS
from mycroft.tts.bing_tts import BingTTS
from mycroft.tts.beepspeak_tts import BeepSpeak
from mycroft.tts.responsive_voice_tts import ResponsiveVoice
CLASSES = {
"mimic": Mimic,
"google": GoogleTTS,
"marytts": MaryTTS,
"fatts": FATTS,
"espeak": ESpeak,
"spdsay": SpdSay,
"polly": PollyTTS,
"watson": WatsonTTS,
"bing": BingTTS,
"beep_speak": BeepSpeak,
"responsive_voice": ResponsiveVoice
}
@staticmethod
def create():
"""
Factory method to create a TTS engine based on configuration.
The configuration file ``mycroft.conf`` contains a ``tts`` section with
the name of a TTS module to be read by this method.
"tts": {
"module": <engine_name>
}
"""
config = Configuration.get()
lang = config.get("lang", "en-us")
tts_module = config.get('tts', {}).get('module', 'mimic')
tts_config = config.get('tts', {}).get(tts_module, {})
tts_lang = tts_config.get('lang', lang)
clazz = TTSFactory.CLASSES.get(tts_module)
tts = clazz(tts_lang, tts_config)
tts.validator.validate()
return tts
|
the-stack_0_208 | import os
# cur_directory = print(getcwd()) # Return the current working directory
# # print(cur_directory)
# print(os.chdir) # Change current working directory
# os.system('mkdir today') # Run the command mkdir in the system shell.
# There was today folder created in the root folder.
# print(dir(os)) # returns a list of all module functions.
# print(help(os)) # returns an extensive manual page crated from the module's docstring/
# import shutil
# shutil.copyfile('data.db', 'archive.db')
# shutil.move('/build/executables', 'installdir')
# import glob
# file_list = glob.glob('*.py')
# print(file_list)
# import sys
# print(sys.argv)
# # The argparse module provides more sophiscated mechanisms to process command line arguments
# import argparse
# parser = argparse.ArgumentParser(prog = 'top', description = 'Show top lines from each file')
# parser.add_argument('filenames', nargs='+')
# parser.add_argument('-l', '--lines', type=int, default=10)
# args = parser.parse_args()
# print(args)
import re
# f_words = re.findall(r'\bf[a-z]*', 'which foot or hand fell fastest')
# print(f_words)
# sub = re.sub(r'\(\b[a-z]+) \1', r'\1', 'cat in the hat')
# print(sub)
a = 'tea for too'.replace('too', 'two')
print(a)
# Mathematics
import math
cos = math.cos(math.pi / 4)
print(cos)
log = math.log(1024, 2)
print(log)
# Random module provides tools for making random selections.
# import random
# fruit = random.choice(['apple', 'pear', 'banana'])
# print(fruit)
# sample = random.sample(range(100), 10) # sampling without replacement
# print(sample)
# random_float = random.random()
# print(random_float)
# integer_number = random.randrange(6) # random integer chosen from range(6)
# print(integer_number)
# The statistics modue calculates basic statistical properties
# (the mean, median, variance, etc) of numeric data.
import statistics
data = [2.75, 1.75, 1.25, 0.25, 0.5, 1.25, 3.5]
print("mean", statistics.mean(data))
print("median :", statistics.median(data))
print("variance :", statistics.variance(data))
#
# from urllib.request import urlopen
# with urlopen('http://tycho.usno.navy.mil/cgi-bin/timer.pl') as response:
# for line in response:
# line = line.decode('utf-8') # Decoding the binary data to text.
# if 'EST' in line or 'EDT' in line: # Look for Eastern Time
# print(line)
#
# import smtplib
#
# server = smplib.SMTP('localhost')
# server.sendmail('[email protected]', '[email protected]',
# """To : [email protected]
# From : [email protected]
#
# Beware the Ideas of March.
# """)
# server.quit()
#
# import zlib
# s = b'witch which has which witches wrist watch'
# print(len(s))
#
# t = zlib.compress(s)
# print(len(t))
#
# u = zlib.decompress(t)
# print(u)
#
# print(zlib.crc32(s))
from timeit import Timer
a = Timer('t=a; a=b; b=t', 'a=1;b=2').timeit()
print(a)
b = Timer('a,b = b,a', 'a=1; b=2').timeit()
print(b)
#
#
# def for_number(number):
# for_list = []
# for i in range(0, 2000000):
# for_list.append(i)
#
# def while_number(number):
# while_list = []
# i = 0
# while i < 2000000:
# while_list.append(i)
# i = i + 1
#
# d = Timer(while_number(2000000)).timeit()
#
# print(d)
#
# c = Timer(for_number(2000000)).timeit()
# print(c)
def average(values):
"""Computes the arithmetic mean of a list of numbers.
>>> print(average([20, 30, 70]))
40.0
"""
return sum(values) / len(values)
import doctest
z = doctest.testmod()
print(z)
import unittest
class TestStatisticalFunctions(unittest.TestCase):
def test_average(self):
self.assertEqual(average([20,30,70]), 40.0)
self.assertEqual(round(average([1, 5, 7]), 1), 4.3)
with self.assertRaises((ZeroDivisionError)):
average([])
with self.assertRaises(TypeError):
average(20, 30, 70)
unittest.main() |
the-stack_0_210 | #!/usr/local/bin/python3
from os import system, path, getcwd
filePath = "assets/alexa-20180320.csv"
print("read file: " + filePath)
with open(filePath) as f:
content = f.readlines()
lines = [x.strip() for x in content]
for i in range(0, 10):
siteName = lines[i].split(",")[1]
print("Dealing with " + str(i) + " " + siteName)
script = "scripts/extractArchive.sh"
siteName = siteName.replace(".", "_")
command = "sh %s %s" % (script, siteName)
print("run command %s" % command)
system(command)
|
the-stack_0_211 | #!/usr/bin/python3
import sys
def safe_print_integer_err(value):
is_int = True
try:
print("{:d}".format(value))
except Exception as e:
print("Exception:", e, file=sys.stderr)
is_int = False
return is_int
|
the-stack_0_213 | import pyspark.sql.functions as psf
from pyspark.sql import SparkSession, DataFrame
from .data_link import DataLink
class JdbcDataLink(DataLink):
def __init__(
self,
environment: str,
session: SparkSession,
url: str,
username: str,
password: str,
driver: str,
table: str,
save_mode: str = "error",
number_of_partitions: int = 1,
partition_column: str = "",
):
super().__init__(environment, session)
self.number_of_partitions = number_of_partitions
self.partition_column = partition_column
self.save_mode = save_mode
self.connection_properties = {
"url": url,
"user": username,
"password": password,
"driver": driver,
"dbtable": table,
}
def read(self) -> DataFrame:
# Partition parameters are only applicable for read operation
# for now, order is important as some values can be overwritten
reader = self.spark.read.format("jdbc").options(**self.connection_properties)
if self.number_of_partitions == 1:
return reader.load()
else:
# We need a partition column
if self.partition_column == "":
raise AssertionError("Partitioning column should not be empty.")
col = psf.col(self.partition_column)
# Retrieve lower and upper bound first to determine the degree of parallelism
lower_bound, upper_bound = (
reader.load()
.select(
psf.min(col).alias("mmin"),
psf.max(col).alias("mmax"),
)
.collect()[0]
)
return (
reader.option("partitionColumn", self.partition_column)
.option("numPartitions", str(self.number_of_partitions))
.option("lowerBound", str(lower_bound))
.option("upperBound", str(upper_bound))
.load()
)
def write(self, frame: DataFrame) -> None:
(
frame.write.format("jdbc")
.mode(self.save_mode)
.options(**self.connection_properties)
.save()
)
|
the-stack_0_219 | #!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2021
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains an object that represents a Telegram Location."""
from typing import Any
from telegram import TelegramObject
class Location(TelegramObject):
"""This object represents a point on the map.
Objects of this class are comparable in terms of equality. Two objects of this class are
considered equal, if their :attr:`longitute` and :attr:`latitude` are equal.
Args:
longitude (:obj:`float`): Longitude as defined by sender.
latitude (:obj:`float`): Latitude as defined by sender.
horizontal_accuracy (:obj:`float`, optional): The radius of uncertainty for the location,
measured in meters; 0-1500.
live_period (:obj:`int`, optional): Time relative to the message sending date, during which
the location can be updated, in seconds. For active live locations only.
heading (:obj:`int`, optional): The direction in which user is moving, in degrees; 1-360.
For active live locations only.
proximity_alert_radius (:obj:`int`, optional): Maximum distance for proximity alerts about
approaching another chat member, in meters. For sent live locations only.
**kwargs (:obj:`dict`): Arbitrary keyword arguments.
Attributes:
longitude (:obj:`float`): Longitude as defined by sender.
latitude (:obj:`float`): Latitude as defined by sender.
horizontal_accuracy (:obj:`float`): Optional. The radius of uncertainty for the location,
measured in meters.
live_period (:obj:`int`): Optional. Time relative to the message sending date, during which
the location can be updated, in seconds. For active live locations only.
heading (:obj:`int`): Optional. The direction in which user is moving, in degrees.
For active live locations only.
proximity_alert_radius (:obj:`int`): Optional. Maximum distance for proximity alerts about
approaching another chat member, in meters. For sent live locations only.
"""
def __init__(
self,
longitude: float,
latitude: float,
horizontal_accuracy: float = None,
live_period: int = None,
heading: int = None,
proximity_alert_radius: int = None,
**_kwargs: Any,
):
# Required
self.longitude = float(longitude)
self.latitude = float(latitude)
# Optionals
self.horizontal_accuracy = float(horizontal_accuracy) if horizontal_accuracy else None
self.live_period = int(live_period) if live_period else None
self.heading = int(heading) if heading else None
self.proximity_alert_radius = (
int(proximity_alert_radius) if proximity_alert_radius else None
)
self._id_attrs = (self.longitude, self.latitude)
|
the-stack_0_221 |
#from app import app, db
from app import db
from sqlalchemy.orm import relationship
import datetime
class Post(db.Model):
"""
Table schema
"""
__tablename__ = "posts"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
post_id = db.Column(db.Text(), nullable=False)
origin_blog = db.Column(db.Integer, db.ForeignKey('blogs.id'))
date = db.Column(db.DateTime(), nullable=False)
content = db.Column(db.Text(), nullable=False)
title = db.Column(db.Text())
post_url = db.Column(db.Text())
blog = relationship("Blog", back_populates="posts")
def __repr__(self):
return "Post: {}.".format(self.title)
@staticmethod
def get_posts():
return Post.query.order_by(Post.date.desc()).limit(10).all()
class Blog(db.Model):
"""
Blog model
"""
__tablename__ = "blogs"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
url = db.Column(db.Text())
feed_url = db.Column(db.Text())
last_update = db.Column(db.DateTime())
name = db.Column(db.Text())
posts = relationship("Post", order_by=Post.id, back_populates="blog")
|
the-stack_0_222 | # -*- coding: utf-8 -*-
"""setup.py"""
import os
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
class Tox(TestCommand):
user_options = [('tox-args=', 'a', 'Arguments to pass to tox')]
def initialize_options(self):
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import tox
import shlex
if self.tox_args:
errno = tox.cmdline(args=shlex.split(self.tox_args))
else:
errno = tox.cmdline(self.test_args)
sys.exit(errno)
def read_content(filepath):
with open(filepath) as fobj:
return fobj.read()
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]
long_description = (
read_content("README.rst") +
read_content(os.path.join("docs/source", "CHANGELOG.rst")))
requires = [
'pyyaml',
'setuptools',
'click',
'sphinx-click',
'texttable',
'coverage'
]
extras_require = {
'reST': ['Sphinx'],
}
if os.environ.get('READTHEDOCS', None):
extras_require['reST'].append('recommonmark')
setup(name='pymerit',
version='0.1.0',
description='Standardized metadata',
long_description=long_description,
author='Venkata Pingali',
author_email='[email protected]',
url='https://github.com/pingali/pymerit',
classifiers=classifiers,
packages=['pymerit'],
data_files = [("", ["LICENSE.txt"])],
install_requires=requires,
include_package_data=True,
extras_require=extras_require,
tests_require=['tox'],
cmdclass={'test': Tox},
entry_points = {
'console_scripts': ['merit=pymerit.cli:main'],
}
)
|
the-stack_0_223 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.training.evaluation."""
import os
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.layers import layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import metrics as metrics_module
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops.losses import losses
from tensorflow.python.platform import test
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import evaluation
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver
from tensorflow.python.training import training
_USE_GLOBAL_STEP = 0
def logistic_classifier(inputs):
return layers.dense(inputs, 1, activation=math_ops.sigmoid)
def local_variable(init_value, name):
return variable_scope.get_variable(
name,
dtype=dtypes.float32,
initializer=init_value,
trainable=False,
collections=[ops.GraphKeys.LOCAL_VARIABLES])
class EvaluateOnceTest(test.TestCase):
def setUp(self):
super(EvaluateOnceTest, self).setUp()
# Create an easy training set:
np.random.seed(0)
self._inputs = np.zeros((16, 4))
self._labels = np.random.randint(0, 2, size=(16, 1)).astype(np.float32)
for i in range(16):
j = int(2 * self._labels[i] + np.random.randint(0, 2))
self._inputs[i, j] = 1
def _train_model(self, checkpoint_dir, num_steps):
"""Trains a simple classification model.
Note that the data has been configured such that after around 300 steps,
the model has memorized the dataset (e.g. we can expect %100 accuracy).
Args:
checkpoint_dir: The directory where the checkpoint is written to.
num_steps: The number of steps to train for.
"""
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = logistic_classifier(tf_inputs)
loss_op = losses.log_loss(labels=tf_labels, predictions=tf_predictions)
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = optimizer.minimize(loss_op,
training.get_or_create_global_step())
with monitored_session.MonitoredTrainingSession(
checkpoint_dir=checkpoint_dir,
hooks=[basic_session_run_hooks.StopAtStepHook(num_steps)]) as session:
loss = None
while not session.should_stop():
_, loss = session.run([train_op, loss_op])
if num_steps >= 300:
assert loss < .015
def testEvaluatePerfectModel(self):
checkpoint_dir = os.path.join(self.get_temp_dir(),
'evaluate_perfect_model_once')
# Train a Model to completion:
self._train_model(checkpoint_dir, num_steps=300)
# Run
inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
labels = constant_op.constant(self._labels, dtype=dtypes.float32)
logits = logistic_classifier(inputs)
predictions = math_ops.round(logits)
accuracy, update_op = metrics_module.accuracy(labels, predictions)
checkpoint_path = saver.latest_checkpoint(checkpoint_dir)
final_ops_values = evaluation._evaluate_once(
checkpoint_path=checkpoint_path,
eval_ops=update_op,
final_ops={'accuracy': (accuracy, update_op)},
hooks=[
evaluation._StopAfterNEvalsHook(1),
])
self.assertGreater(final_ops_values['accuracy'], .99)
def testEvaluateWithFiniteInputs(self):
checkpoint_dir = os.path.join(self.get_temp_dir(),
'evaluate_with_finite_inputs')
# Train a Model to completion:
self._train_model(checkpoint_dir, num_steps=300)
# Run evaluation. Inputs are fed through input producer for one epoch.
all_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
all_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
single_input, single_label = training.slice_input_producer(
[all_inputs, all_labels], num_epochs=1)
inputs, labels = training.batch([single_input, single_label], batch_size=6,
allow_smaller_final_batch=True)
logits = logistic_classifier(inputs)
predictions = math_ops.round(logits)
accuracy, update_op = metrics_module.accuracy(labels, predictions)
checkpoint_path = saver.latest_checkpoint(checkpoint_dir)
final_ops_values = evaluation._evaluate_once(
checkpoint_path=checkpoint_path,
eval_ops=update_op,
final_ops={
'accuracy': (accuracy, update_op),
'eval_steps': evaluation._get_or_create_eval_step()
},
hooks=[
evaluation._StopAfterNEvalsHook(None),
])
self.assertTrue(final_ops_values['accuracy'] > .99)
# Runs evaluation for 4 iterations. First 2 evaluate full batch of 6 inputs
# each; the 3rd iter evaluates the remaining 4 inputs, and the last one
# triggers an error which stops evaluation.
self.assertEqual(final_ops_values['eval_steps'], 4)
def testEvalOpAndFinalOp(self):
checkpoint_dir = os.path.join(self.get_temp_dir(), 'eval_ops_and_final_ops')
# Train a model for a single step to get a checkpoint.
self._train_model(checkpoint_dir, num_steps=1)
checkpoint_path = saver.latest_checkpoint(checkpoint_dir)
# Create the model so we have something to restore.
inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
logistic_classifier(inputs)
num_evals = 5
final_increment = 9.0
my_var = local_variable(0.0, name='MyVar')
eval_ops = state_ops.assign_add(my_var, 1.0)
final_ops = array_ops.identity(my_var) + final_increment
final_hooks = [evaluation._StopAfterNEvalsHook(num_evals),]
initial_hooks = list(final_hooks)
final_ops_values = evaluation._evaluate_once(
checkpoint_path=checkpoint_path,
eval_ops=eval_ops,
final_ops={'value': final_ops},
hooks=final_hooks)
self.assertEqual(final_ops_values['value'], num_evals + final_increment)
self.assertEqual(initial_hooks, final_hooks)
def testMultiEvalStepIncrements(self):
checkpoint_dir = os.path.join(self.get_temp_dir(), 'eval_ops_and_final_ops')
# Train a model for a single step to get a checkpoint.
self._train_model(checkpoint_dir, num_steps=1)
checkpoint_path = saver.latest_checkpoint(checkpoint_dir)
# Create the model so we have something to restore.
inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
logistic_classifier(inputs)
num_evals = 6
my_var = local_variable(0.0, name='MyVar')
# In eval ops, we also increase the eval step one more time.
eval_ops = [state_ops.assign_add(my_var, 1.0),
state_ops.assign_add(
evaluation._get_or_create_eval_step(), 1, use_locking=True)]
expect_eval_update_counts = num_evals // 2
final_ops = array_ops.identity(my_var)
final_ops_values = evaluation._evaluate_once(
checkpoint_path=checkpoint_path,
eval_ops=eval_ops,
final_ops={'value': final_ops},
hooks=[evaluation._StopAfterNEvalsHook(num_evals),])
self.assertEqual(final_ops_values['value'], expect_eval_update_counts)
def testOnlyFinalOp(self):
checkpoint_dir = os.path.join(self.get_temp_dir(), 'only_final_ops')
# Train a model for a single step to get a checkpoint.
self._train_model(checkpoint_dir, num_steps=1)
checkpoint_path = saver.latest_checkpoint(checkpoint_dir)
# Create the model so we have something to restore.
inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
logistic_classifier(inputs)
final_increment = 9.0
my_var = local_variable(0.0, name='MyVar')
final_ops = array_ops.identity(my_var) + final_increment
final_ops_values = evaluation._evaluate_once(
checkpoint_path=checkpoint_path, final_ops={'value': final_ops})
self.assertEqual(final_ops_values['value'], final_increment)
if __name__ == '__main__':
test.main()
|
the-stack_0_224 | class Solution:
def decodeCiphertext(self, encodedText: str, rows: int) -> str:
n = len(encodedText)
cols = n // rows
ans = []
matrix = [[' '] * cols for _ in range(rows)]
for i in range(rows):
for j in range(cols):
matrix[i][j] = encodedText[i * cols + j]
for col in range(cols):
i = 0
j = col
while i < rows and j < cols:
ans.append(matrix[i][j])
i += 1
j += 1
return ''.join(ans).rstrip()
|
the-stack_0_226 | import os
import re
import numpy as np
from PIL import Image
from keras.models import load_model
from keras.preprocessing.image import img_to_array
from sklearn.preprocessing import LabelEncoder
class ModelManager():
def __init__(self, db, s3, graph, backup_model, backup_label_encoder, collection_name='models', bucket_name='models'):
self.conn = db[collection_name]
self.s3_bucket = s3.Bucket(bucket_name)
self.label_encoder = LabelEncoder()
self.model = None
self.backup_model = backup_model
self.backup_label_encoder = backup_label_encoder
self.graph = graph
def predict(self, image_array):
"""Function that accepts a model and image_array and returns a prediction."""
if self.model:
with self.graph.as_default():
out = self.model.predict_classes(image_array)
return self.label_encoder.inverse_transform(out)
with self.graph.as_default():
out = self.backup_model.predict_classes(image_array)
return self.backup_label_encoder.inverse_transform(out)
def preprocess(self, image_file):
"""Preprocess the given image file by resizing and turning it into an array."""
img = Image.open(image_file)
image_resize = img.resize((40, 24))
image = img_to_array(image_resize)
x = image.reshape(1, 40, 24, 1)
return x / 255
def load(self, model_path, label_encoder_path):
"""Load a fitted model from the local filesystem into memory. "./models/classes.npy"""
self.label_encoder.classes_ = np.load(label_encoder_path)
self.model = load_model(model_path)
def load_latest_model(self):
"""Load a fitted model from a remote filesystem into memory."""
latest_model = list(self.conn.find({'stage': 'PRODUCTION'}).sort([("version", -1)]).limit(1))
if len(latest_model) == 0:
return
latest_model = latest_model[0]
if 'MODEL_NAME' in os.environ and latest_model['name'] == os.environ['MODEL_NAME']:
if 'MODEL_VERSION' in os.environ and str(latest_model['version']) == os.environ['MODEL_VERSION']:
return
for f in os.listdir('./models'):
if not f.endswith(".h5") and not f.endswith(".npy"):
continue
os.remove(os.path.join('./models', f))
# Download model h5 file
model = re.sub('name', latest_model['name'], latest_model['naming_format'])
model = re.sub('0', str(latest_model['version']), model)
self.s3_bucket.download_file(model, f'./models/{model}')
# Download model classes npy file
classes = re.sub('name', latest_model['name'], latest_model['naming_format'])
classes = re.sub('0', str(latest_model['version']), classes)
classes = re.sub('h5', 'npy', classes)
self.s3_bucket.download_file(classes, f'./models/{classes}')
os.environ['MODEL_NAME'] = latest_model['name']
os.environ['MODEL_VERSION'] = str(latest_model['version'])
def get_latest_online_model_version(self):
initial_model = list(self.conn.find({"name": 'online-model'}).sort([("version", -1)]).limit(1))[0]
return initial_model.version if initial_model else 0
def _insert(self, name, version, metrics_str, naming_format, stage):
model_doc = {
"name": name,
"version": version,
"naming_format": naming_format,
"metrics": metrics_str,
"stage": stage
}
self.conn.insert_one(model_doc)
def publish_model(self, model_path, classes_path, name, metrics):
latest_model = list(self.conn.find({'stage': 'PRODUCTION', 'name': name}).sort([("version", -1)]).limit(1))
# Save model file to AWS S3 bucket
version = 1
if len(latest_model) > 0:
version = latest_model[0].version + 1
self.s3_bucket.upload_file(Filename=model_path, Key=f'{name}-v{version}.h5')
self.s3_bucket.upload_file(Filename=classes_path, Key=f'{name}-v{version}.npy')
# Create and save a new model doc with related information
self._insert(name, version, metrics, 'name-v0.h5', 'PRODUCTION')
|
the-stack_0_230 | import numpy as np
import tensorflow as tf
from mlagents.envs import UnityEnvironment
initKernelAndBias={
'kernel_initializer' : tf.random_normal_initializer(0., .1),
'bias_initializer' : tf.constant_initializer(0.1)
}
class Actor(object):
def __init__(self, sess, observationDim, actionDim, learning_rate=0.001, update_frequency=10):
self.sess=sess
self.s=tf.placeholder(tf.float32, [1,observationDim],"state")
self.a=tf.placeholder(tf.float32, [1,actionDim],"action")
self.advantage=tf.placeholder(tf.float32,[1,1],"advantage")
self.update_frequency=update_frequency
with tf.variable_scope("ActorMain"):
layer1 = tf.layers.dense(
inputs=self.s,
units=20,
activation=tf.nn.relu,
name='layer1',
**initKernelAndBias
)
layer2=tf.layers.dense(
inputs=layer1,
units=20,
activation=tf.nn.relu,
name='layer2',
**initKernelAndBias
)
self.mu = tf.layers.dense(
inputs=layer2,
units=actionDim,
activation=None,
name='mu',
**initKernelAndBias
)
self.norm_dist = tf.distributions.Normal(loc=self.mu,scale=[1.]*actionDim)
self.var1=tf.get_variable_scope().global_variables()
with tf.variable_scope("Actor2"):
layer1 = tf.layers.dense(
inputs=self.s,
units=20,
activation=tf.nn.relu,
name='layer1',
**initKernelAndBias,
trainable=False
)
layer2=tf.layers.dense(
inputs=layer1,
units=20,
activation=tf.nn.relu,
name='layer2',
**initKernelAndBias,
trainable=False
)
self.mu = tf.layers.dense(
inputs=layer2,
units=actionDim,
activation=None,
name='mu',
**initKernelAndBias,
trainable=False
)
self.norm_dist_behavior = tf.distributions.Normal(loc=self.mu,scale=[1.]*actionDim)
self.sample_op = self.norm_dist_behavior.sample()
self.var2=tf.get_variable_scope().global_variables()
with tf.variable_scope('exp_v'):
self.log_prob = self.norm_dist.log_prob(self.a)
self.exp_v = tf.reduce_mean(self.log_prob*self.advantage)
with tf.variable_scope('train'):
self.train_op = tf.train.AdamOptimizer(learning_rate).minimize(-self.exp_v)
with tf.variable_scope('assign'):
self.assign_target_to_behavior=[tf.assign(r, v) for r, v in zip(self.var2, self.var1)]
def choose_action(self, s):
return self.sess.run(self.sample_op,feed_dict={
self.s:s
})
def learn(self, s, a, advantage, step):
if step % self.update_frequency == 0:
self.sess.run([self.train_op, self.assign_target_to_behavior],feed_dict={
self.s:s,
self.a:a,
self.advantage:advantage
})
else:
self.sess.run(self.train_op,feed_dict={
self.s:s,
self.a:a,
self.advantage:advantage
})
class Critic(object):
def __init__(self, sess, observationDim, learning_rate=0.01, gamma=0.95):
self.sess= sess
self.s = tf.placeholder(tf.float32, [1,observationDim],"state")
self.r = tf.placeholder(tf.float32, [1,1],"reward")
self.v_ = tf.placeholder(tf.float32, [1,1], "value_of_next")
with tf.variable_scope('Critic'):
layer1 = tf.layers.dense(
inputs=self.s,
units=30,
activation=tf.nn.relu,
name='layer1',
**initKernelAndBias
)
layer2 = tf.layers.dense(
inputs=layer1,
units=10,
activation=tf.nn.relu,
name='layer2',
**initKernelAndBias
)
self.v = tf.layers.dense(
inputs=layer2,
units=1,
activation=None,
name='Value',
**initKernelAndBias
)
with tf.variable_scope('square_advantage'):
self.advantage = tf.reduce_mean(self.r + gamma*self.v_-self.v)
self.loss = tf.square(self.advantage)
with tf.variable_scope('train'):
self.train_op = tf.train.AdamOptimizer(learning_rate).minimize(self.loss)
def learn(self, s, r, s_):
v_ = self.sess.run(self.v, feed_dict={
self.s: s_
})
advantage, _ = self.sess.run([self.advantage, self.train_op], feed_dict={
self.s: s,
self.v_: v_,
self.r: r
})
return advantage
env = UnityEnvironment()
brain_name = env.brain_names[0]
brain = env.brains[brain_name]
print(brain.vector_observation_space_size)
print(brain.vector_action_space_size)
sess = tf.Session()
actor = Actor(
sess=sess,
observationDim=brain.vector_observation_space_size,
actionDim=brain.vector_action_space_size[0],
learning_rate=0.02,
update_frequency=10
)
critic = Critic(
sess=sess,
observationDim=brain.vector_observation_space_size,
learning_rate=0.01,
gamma=0.95
)
sess.run(tf.global_variables_initializer())
time=0
gamma=0.9
for i_episode in range(5000):
step=0
discounted_reward=0
observation = env.reset(train_mode=True)[brain_name]
s=observation.vector_observations
while True:
time+=1
step+=1
action = np.squeeze(actor.choose_action(s), axis=0)
# print(action)
observation=env.step(action)[brain_name]
reward=np.array(observation.rewards)
discounted_reward*=gamma #有错
discounted_reward+=reward[0]
advantage = critic.learn(s,reward[np.newaxis,:],observation.vector_observations)
advantage=[[advantage]]
# print(advantage)
actor.learn(s,action[np.newaxis,:],advantage, time)
s=observation.vector_observations
if observation.local_done[0]:
print("episode:", i_episode," steps:", step," rewards:", discounted_reward)
break |
the-stack_0_231 | # Resource object code (Python 3)
# Created by: object code
# Created by: The Resource Compiler for Qt version 6.2.0
# WARNING! All changes made in this file will be lost!
from PySide6 import QtCore
qt_resource_data = b"\
\x00\x00\x0b\x1b\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<svg\x0a xm\
lns:dc=\x22http://p\
url.org/dc/eleme\
nts/1.1/\x22\x0a xml\
ns:cc=\x22http://cr\
eativecommons.or\
g/ns#\x22\x0a xmlns:\
rdf=\x22http://www.\
w3.org/1999/02/2\
2-rdf-syntax-ns#\
\x22\x0a xmlns:svg=\x22\
http://www.w3.or\
g/2000/svg\x22\x0a x\
mlns=\x22http://www\
.w3.org/2000/svg\
\x22\x0a xmlns:sodip\
odi=\x22http://sodi\
podi.sourceforge\
.net/DTD/sodipod\
i-0.dtd\x22\x0a xmln\
s:inkscape=\x22http\
://www.inkscape.\
org/namespaces/i\
nkscape\x22\x0a vers\
ion=\x221.1\x22\x0a wid\
th=\x2224\x22\x0a heigh\
t=\x2224\x22\x0a viewBo\
x=\x220 0 24 24\x22\x0a \
id=\x22svg845\x22\x0a \
sodipodi:docname\
=\x22content-paste-\
func_own.svg\x22\x0a \
inkscape:versio\
n=\x221.0.2-2 (e86c\
870879, 2021-01-\
15)\x22>\x0a <path\x0a \
id=\x22path1499\x22\
\x0a style=\x22str\
oke-width:1.0568\
3\x22\x0a d=\x22M 5.2\
636719 1.8613281\
A 2.2202235 2.0\
122117 0 0 0 3.0\
449219 3.8730469\
L 3.0449219 6.8\
925781 L 5.26367\
19 6.8925781 L 5\
.2636719 3.87304\
69 L 18.585938 3\
.8730469 L 18.58\
5938 9.8671875 L\
20.806641 9.867\
1875 L 20.806641\
3.8730469 C 20.\
806641 2.7562694\
19.807059 1.861\
3281 18.585938 1\
.8613281 L 5.263\
6719 1.8613281 z\
M 9.7050781 5.8\
867188 L 9.70507\
81 8.9042969 L 0\
.82421875 8.9042\
969 L 0.82421875\
10.916016 L 9.7\
050781 10.916016\
L 9.7050781 13.\
935547 L 14.1445\
31 9.9101562 L 9\
.7050781 5.88671\
88 z M 3.0449219\
12.927734 L 3.0\
449219 15.947266\
A 2.2202235 2.0\
122117 0 0 0 5.2\
636719 17.958984\
L 9.1777344 17.\
958984 L 9.17773\
44 15.947266 L 5\
.2636719 15.9472\
66 L 5.2636719 1\
2.927734 L 3.044\
9219 12.927734 z\
\x22 />\x0a <metadat\
a\x0a id=\x22metad\
ata851\x22>\x0a <rd\
f:RDF>\x0a <cc\
:Work\x0a r\
df:about=\x22\x22>\x0a \
<dc:format>\
image/svg+xml</d\
c:format>\x0a \
<dc:type\x0a \
rdf:resour\
ce=\x22http://purl.\
org/dc/dcmitype/\
StillImage\x22 />\x0a \
<dc:title\
></dc:title>\x0a \
</cc:Work>\x0a \
</rdf:RDF>\x0a <\
/metadata>\x0a <de\
fs\x0a id=\x22defs\
849\x22 />\x0a <sodip\
odi:namedview\x0a \
pagecolor=\x22#f\
fffff\x22\x0a bord\
ercolor=\x22#666666\
\x22\x0a borderopa\
city=\x221\x22\x0a ob\
jecttolerance=\x221\
0\x22\x0a gridtole\
rance=\x2210\x22\x0a \
guidetolerance=\x22\
10\x22\x0a inkscap\
e:pageopacity=\x220\
\x22\x0a inkscape:\
pageshadow=\x222\x22\x0a \
inkscape:win\
dow-width=\x221559\x22\
\x0a inkscape:w\
indow-height=\x2292\
7\x22\x0a id=\x22name\
dview847\x22\x0a s\
howgrid=\x22false\x22\x0a\
inkscape:zo\
om=\x2223.208333\x22\x0a \
inkscape:cx=\
\x2212\x22\x0a inksca\
pe:cy=\x2212\x22\x0a \
inkscape:window-\
x=\x220\x22\x0a inksc\
ape:window-y=\x220\x22\
\x0a inkscape:w\
indow-maximized=\
\x220\x22\x0a inkscap\
e:current-layer=\
\x22svg845\x22\x0a in\
kscape:document-\
rotation=\x220\x22 />\x0a\
<text\x0a xml\
:space=\x22preserve\
\x22\x0a style=\x22fo\
nt-size:8.23937p\
x;line-height:1.\
25;font-family:s\
ans-serif;stroke\
-width:1.02992\x22\x0a\
x=\x2212.07681\
8\x22\x0a y=\x2220.66\
8371\x22\x0a id=\x22t\
ext881\x22\x0a tra\
nsform=\x22scale(1.\
0118915,0.988248\
24)\x22><tspan\x0a \
sodipodi:role\
=\x22line\x22\x0a i\
d=\x22tspan879\x22\x0a \
x=\x2212.076818\
\x22\x0a y=\x2220.6\
68371\x22\x0a st\
yle=\x22font-style:\
italic;font-vari\
ant:normal;font-\
weight:bold;font\
-stretch:normal;\
font-size:12.359\
1px;font-family:\
Cambria;-inkscap\
e-font-specifica\
tion:'Cambria Bo\
ld Italic';strok\
e-width:1.02992\x22\
>fx</tspan></tex\
t>\x0a</svg>\x0a\
\x00\x00\x02\xf3\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M14.\
73,20.83L17.58,1\
8L14.73,15.17L16\
.15,13.76L19,16.\
57L21.8,13.76L23\
.22,15.17L20.41,\
18L23.22,20.83L2\
1.8,22.24L19,19.\
4L16.15,22.24L14\
.73,20.83M13,19.\
88C13.04,20.18 1\
2.94,20.5 12.71,\
20.71C12.32,21.1\
11.69,21.1 11.3\
,20.71L7.29,16.7\
C7.06,16.47 6.96\
,16.16 7,15.87V1\
0.75L2.21,4.62C1\
.87,4.19 1.95,3.\
56 2.38,3.22C2.5\
7,3.08 2.78,3 3,\
3V3H17V3C17.22,3\
17.43,3.08 17.6\
2,3.22C18.05,3.5\
6 18.13,4.19 17.\
79,4.62L13,10.75\
V19.88M5.04,5L9,\
10.06V15.58L11,1\
7.58V10.05L14.96\
,5H5.04Z\x22 /></sv\
g>\
\x00\x00\x09\xfd\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0d\x0a<svg\x0d\x0a \
xmlns:dc=\x22http:/\
/purl.org/dc/ele\
ments/1.1/\x22\x0d\x0a \
xmlns:cc=\x22http:/\
/creativecommons\
.org/ns#\x22\x0d\x0a xm\
lns:rdf=\x22http://\
www.w3.org/1999/\
02/22-rdf-syntax\
-ns#\x22\x0d\x0a xmlns:\
svg=\x22http://www.\
w3.org/2000/svg\x22\
\x0d\x0a xmlns=\x22http\
://www.w3.org/20\
00/svg\x22\x0d\x0a xmln\
s:sodipodi=\x22http\
://sodipodi.sour\
ceforge.net/DTD/\
sodipodi-0.dtd\x22\x0d\
\x0a xmlns:inksca\
pe=\x22http://www.i\
nkscape.org/name\
spaces/inkscape\x22\
\x0d\x0a version=\x221.\
1\x22\x0d\x0a width=\x2224\
\x22\x0d\x0a height=\x2224\
\x22\x0d\x0a viewBox=\x220\
0 24 24\x22\x0d\x0a id\
=\x22svg871\x22\x0d\x0a so\
dipodi:docname=\x22\
invert_own.svg\x22\x0d\
\x0a inkscape:ver\
sion=\x221.0.2-2 (e\
86c870879, 2021-\
01-15)\x22>\x0d\x0a <met\
adata\x0d\x0a id=\x22\
metadata877\x22>\x0d\x0a \
<rdf:RDF>\x0d\x0a \
<cc:Work\x0d\x0a \
rdf:about\
=\x22\x22>\x0d\x0a <d\
c:format>image/s\
vg+xml</dc:forma\
t>\x0d\x0a <dc:\
type\x0d\x0a \
rdf:resource=\x22h\
ttp://purl.org/d\
c/dcmitype/Still\
Image\x22 />\x0d\x0a \
<dc:title />\x0d\
\x0a </cc:Work\
>\x0d\x0a </rdf:RDF\
>\x0d\x0a </metadata>\
\x0d\x0a <defs\x0d\x0a \
id=\x22defs875\x22 />\x0d\
\x0a <sodipodi:nam\
edview\x0d\x0a pag\
ecolor=\x22#ffffff\x22\
\x0d\x0a bordercol\
or=\x22#666666\x22\x0d\x0a \
borderopacity\
=\x221\x22\x0d\x0a objec\
ttolerance=\x2210\x22\x0d\
\x0a gridtolera\
nce=\x2210\x22\x0d\x0a g\
uidetolerance=\x221\
0\x22\x0d\x0a inkscap\
e:pageopacity=\x220\
\x22\x0d\x0a inkscape\
:pageshadow=\x222\x22\x0d\
\x0a inkscape:w\
indow-width=\x22135\
8\x22\x0d\x0a inkscap\
e:window-height=\
\x22927\x22\x0d\x0a id=\x22\
namedview873\x22\x0d\x0a \
showgrid=\x22fa\
lse\x22\x0d\x0a inksc\
ape:zoom=\x2212.521\
682\x22\x0d\x0a inksc\
ape:cx=\x2222.20088\
3\x22\x0d\x0a inkscap\
e:cy=\x2212.69278\x22\x0d\
\x0a inkscape:w\
indow-x=\x22476\x22\x0d\x0a \
inkscape:win\
dow-y=\x2255\x22\x0d\x0a \
inkscape:window\
-maximized=\x220\x22\x0d\x0a\
inkscape:cu\
rrent-layer=\x22svg\
871\x22\x0d\x0a inksc\
ape:document-rot\
ation=\x220\x22 />\x0d\x0a \
<path\x0d\x0a id=\x22\
path1451\x22\x0d\x0a \
style=\x22stroke-wi\
dth:0.812468\x22\x0d\x0a \
d=\x22m 5.90861\
81,2.0811647 c -\
2.2861249,0 -4.1\
421232,1.7863062\
-4.1421232,3.98\
58465 v 6.375866\
8 c 0,2.199539 1\
.8559983,3.98398\
7 4.1421232,3.98\
3987 H 7.321002 \
v -4.621946 c 0,\
-2.1995404 1.854\
135,-3.9858464 4\
.14026,-3.985846\
4 h 5.215386 V 6\
.0670112 c 0,-2.\
1995403 -1.85599\
8,-3.9858465 -4.\
142123,-3.985846\
5 z\x22 />\x0d\x0a <path\
\x0d\x0a d=\x22m 12.5\
45747,9.1785429 \
h 5.921856 c 2.0\
43043,0 3.70116,\
1.6002091 3.7011\
6,3.5718951 v 5.\
715032 c 0,1.971\
686 -1.658117,3.\
571897 -3.70116,\
3.571897 h -5.92\
1856 c -2.043034\
,0 -3.70116,-1.6\
00211 -3.70116,-\
3.571897 v -5.71\
5032 c 0,-1.9716\
86 1.658126,-3.5\
718951 3.70116,-\
3.5718951 m 0,1.\
4287581 c -1.228\
78,0 -2.220696,0\
.957267 -2.22069\
6,2.143137 v 5.7\
15032 c 0,1.1858\
71 0.991916,2.14\
3138 2.220696,2.\
143138 h 5.92185\
6 c 1.22879,0 2.\
220696,-0.957267\
2.220696,-2.143\
138 v -5.715032 \
c 0,-1.18587 -0.\
991906,-2.143137\
-2.220696,-2.14\
3137 z\x22\x0d\x0a id\
=\x22path1462\x22\x0d\x0a \
style=\x22stroke-\
width:0.727191\x22 \
/>\x0d\x0a</svg>\x0d\x0a\
\x00\x00\x01\x94\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M19,\
6.41L17.59,5L12,\
10.59L6.41,5L5,6\
.41L10.59,12L5,1\
7.59L6.41,19L12,\
13.41L17.59,19L1\
9,17.59L13.41,12\
L19,6.41Z\x22 /></s\
vg>\
\x00\x00\x05\xf8\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M12,\
8A4,4 0 0,1 16,1\
2A4,4 0 0,1 12,1\
6A4,4 0 0,1 8,12\
A4,4 0 0,1 12,8M\
12,10A2,2 0 0,0 \
10,12A2,2 0 0,0 \
12,14A2,2 0 0,0 \
14,12A2,2 0 0,0 \
12,10M10,22C9.75\
,22 9.54,21.82 9\
.5,21.58L9.13,18\
.93C8.5,18.68 7.\
96,18.34 7.44,17\
.94L4.95,18.95C4\
.73,19.03 4.46,1\
8.95 4.34,18.73L\
2.34,15.27C2.21,\
15.05 2.27,14.78\
2.46,14.63L4.57\
,12.97L4.5,12L4.\
57,11L2.46,9.37C\
2.27,9.22 2.21,8\
.95 2.34,8.73L4.\
34,5.27C4.46,5.0\
5 4.73,4.96 4.95\
,5.05L7.44,6.05C\
7.96,5.66 8.5,5.\
32 9.13,5.07L9.5\
,2.42C9.54,2.18 \
9.75,2 10,2H14C1\
4.25,2 14.46,2.1\
8 14.5,2.42L14.8\
7,5.07C15.5,5.32\
16.04,5.66 16.5\
6,6.05L19.05,5.0\
5C19.27,4.96 19.\
54,5.05 19.66,5.\
27L21.66,8.73C21\
.79,8.95 21.73,9\
.22 21.54,9.37L1\
9.43,11L19.5,12L\
19.43,13L21.54,1\
4.63C21.73,14.78\
21.79,15.05 21.\
66,15.27L19.66,1\
8.73C19.54,18.95\
19.27,19.04 19.\
05,18.95L16.56,1\
7.95C16.04,18.34\
15.5,18.68 14.8\
7,18.93L14.5,21.\
58C14.46,21.82 1\
4.25,22 14,22H10\
M11.25,4L10.88,6\
.61C9.68,6.86 8.\
62,7.5 7.85,8.39\
L5.44,7.35L4.69,\
8.65L6.8,10.2C6.\
4,11.37 6.4,12.6\
4 6.8,13.8L4.68,\
15.36L5.43,16.66\
L7.86,15.62C8.63\
,16.5 9.68,17.14\
10.87,17.38L11.\
24,20H12.76L13.1\
3,17.39C14.32,17\
.14 15.37,16.5 1\
6.14,15.62L18.57\
,16.66L19.32,15.\
36L17.2,13.81C17\
.6,12.64 17.6,11\
.37 17.2,10.2L19\
.31,8.65L18.56,7\
.35L16.15,8.39C1\
5.38,7.5 14.32,6\
.86 13.12,6.62L1\
2.75,4H11.25Z\x22 /\
></svg>\
\x00\x00\x09\x8b\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<svg\x0a xm\
lns:dc=\x22http://p\
url.org/dc/eleme\
nts/1.1/\x22\x0a xml\
ns:cc=\x22http://cr\
eativecommons.or\
g/ns#\x22\x0a xmlns:\
rdf=\x22http://www.\
w3.org/1999/02/2\
2-rdf-syntax-ns#\
\x22\x0a xmlns:svg=\x22\
http://www.w3.or\
g/2000/svg\x22\x0a x\
mlns=\x22http://www\
.w3.org/2000/svg\
\x22\x0a xmlns:sodip\
odi=\x22http://sodi\
podi.sourceforge\
.net/DTD/sodipod\
i-0.dtd\x22\x0a xmln\
s:inkscape=\x22http\
://www.inkscape.\
org/namespaces/i\
nkscape\x22\x0a vers\
ion=\x221.1\x22\x0a wid\
th=\x2224\x22\x0a heigh\
t=\x2224\x22\x0a viewBo\
x=\x220 0 24 24\x22\x0a \
id=\x22svg845\x22\x0a \
sodipodi:docname\
=\x22content-import\
-fx_own.svg\x22\x0a \
inkscape:version\
=\x221.0.2-2 (e86c8\
70879, 2021-01-1\
5)\x22>\x0a <metadata\
\x0a id=\x22metada\
ta851\x22>\x0a <rdf\
:RDF>\x0a <cc:\
Work\x0a rd\
f:about=\x22\x22>\x0a \
<dc:format>i\
mage/svg+xml</dc\
:format>\x0a \
<dc:type\x0a \
rdf:resourc\
e=\x22http://purl.o\
rg/dc/dcmitype/S\
tillImage\x22 />\x0a \
<dc:title \
/>\x0a </cc:Wo\
rk>\x0a </rdf:RD\
F>\x0a </metadata>\
\x0a <defs\x0a id\
=\x22defs849\x22 />\x0a \
<sodipodi:namedv\
iew\x0a pagecol\
or=\x22#ffffff\x22\x0a \
bordercolor=\x22#\
666666\x22\x0a bor\
deropacity=\x221\x22\x0a \
objecttolera\
nce=\x2210\x22\x0a gr\
idtolerance=\x2210\x22\
\x0a guidetoler\
ance=\x2210\x22\x0a i\
nkscape:pageopac\
ity=\x220\x22\x0a ink\
scape:pageshadow\
=\x222\x22\x0a inksca\
pe:window-width=\
\x221559\x22\x0a inks\
cape:window-heig\
ht=\x22927\x22\x0a id\
=\x22namedview847\x22\x0a\
showgrid=\x22f\
alse\x22\x0a inksc\
ape:zoom=\x2223.208\
333\x22\x0a inksca\
pe:cx=\x2212\x22\x0a \
inkscape:cy=\x2212\x22\
\x0a inkscape:w\
indow-x=\x220\x22\x0a \
inkscape:window\
-y=\x220\x22\x0a inks\
cape:window-maxi\
mized=\x220\x22\x0a i\
nkscape:current-\
layer=\x22svg845\x22\x0a \
inkscape:doc\
ument-rotation=\x22\
0\x22 />\x0a <path\x0a \
id=\x22path843\x22\x0a\
d=\x22M 12 0 C\
10.7 0 9.599687\
5 0.84 9.1796875\
2 L 5 2 A 2 2 0\
0 0 3 4 L 3 20 \
A 2 2 0 0 0 5 22\
L 7.109375 22 L\
7.109375 20 L 5\
20 L 5 4 L 7 4 \
L 7 7 L 17 7 L 1\
7 4 L 19 4 L 19 \
10.642578 L 21 1\
0.642578 L 21 4 \
A 2 2 0 0 0 19 2\
L 14.820312 2 C\
14.400315 0.84 \
13.3 0 12 0 z M \
12 2 A 1 1 0 0 1\
13 3 A 1 1 0 0 \
1 12 4 A 1 1 0 0\
1 11 3 A 1 1 0 \
0 1 12 2 z \x22 />\x0a\
<text\x0a xml\
:space=\x22preserve\
\x22\x0a style=\x22fo\
nt-size:8.23937p\
x;line-height:1.\
25;font-family:s\
ans-serif;stroke\
-width:1.02992\x22\x0a\
x=\x2211.35293\
1\x22\x0a y=\x2220.84\
2772\x22\x0a id=\x22t\
ext881\x22\x0a tra\
nsform=\x22scale(1.\
0118915,0.988248\
24)\x22><tspan\x0a \
sodipodi:role\
=\x22line\x22\x0a i\
d=\x22tspan879\x22\x0a \
x=\x2211.352931\
\x22\x0a y=\x2220.8\
42772\x22\x0a st\
yle=\x22font-style:\
italic;font-vari\
ant:normal;font-\
weight:bold;font\
-stretch:normal;\
font-size:12.359\
1px;font-family:\
Cambria;-inkscap\
e-font-specifica\
tion:'Cambria Bo\
ld Italic';strok\
e-width:1.02992\x22\
>fx</tspan></tex\
t>\x0a</svg>\x0a\
\x00\x00\x0b\xd2\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0d\x0a<svg\x0d\x0a \
xmlns:dc=\x22http:/\
/purl.org/dc/ele\
ments/1.1/\x22\x0d\x0a \
xmlns:cc=\x22http:/\
/creativecommons\
.org/ns#\x22\x0d\x0a xm\
lns:rdf=\x22http://\
www.w3.org/1999/\
02/22-rdf-syntax\
-ns#\x22\x0d\x0a xmlns:\
svg=\x22http://www.\
w3.org/2000/svg\x22\
\x0d\x0a xmlns=\x22http\
://www.w3.org/20\
00/svg\x22\x0d\x0a xmln\
s:sodipodi=\x22http\
://sodipodi.sour\
ceforge.net/DTD/\
sodipodi-0.dtd\x22\x0d\
\x0a xmlns:inksca\
pe=\x22http://www.i\
nkscape.org/name\
spaces/inkscape\x22\
\x0d\x0a version=\x221.\
1\x22\x0d\x0a width=\x2224\
\x22\x0d\x0a height=\x2224\
\x22\x0d\x0a viewBox=\x220\
0 24 24\x22\x0d\x0a id\
=\x22svg1501\x22\x0d\x0a s\
odipodi:docname=\
\x22import-csv_own.\
svg\x22\x0d\x0a inkscap\
e:version=\x221.0.2\
-2 (e86c870879, \
2021-01-15)\x22>\x0d\x0a \
<metadata\x0d\x0a \
id=\x22metadata150\
7\x22>\x0d\x0a <rdf:RD\
F>\x0d\x0a <cc:Wo\
rk\x0d\x0a rdf\
:about=\x22\x22>\x0d\x0a \
<dc:format>i\
mage/svg+xml</dc\
:format>\x0d\x0a \
<dc:type\x0d\x0a \
rdf:resou\
rce=\x22http://purl\
.org/dc/dcmitype\
/StillImage\x22 />\x0d\
\x0a <dc:tit\
le></dc:title>\x0d\x0a\
</cc:Work>\
\x0d\x0a </rdf:RDF>\
\x0d\x0a </metadata>\x0d\
\x0a <defs\x0d\x0a i\
d=\x22defs1505\x22 />\x0d\
\x0a <sodipodi:nam\
edview\x0d\x0a pag\
ecolor=\x22#ffffff\x22\
\x0d\x0a bordercol\
or=\x22#666666\x22\x0d\x0a \
borderopacity\
=\x221\x22\x0d\x0a objec\
ttolerance=\x2210\x22\x0d\
\x0a gridtolera\
nce=\x2210\x22\x0d\x0a g\
uidetolerance=\x221\
0\x22\x0d\x0a inkscap\
e:pageopacity=\x220\
\x22\x0d\x0a inkscape\
:pageshadow=\x222\x22\x0d\
\x0a inkscape:w\
indow-width=\x22172\
0\x22\x0d\x0a inkscap\
e:window-height=\
\x22952\x22\x0d\x0a id=\x22\
namedview1503\x22\x0d\x0a\
showgrid=\x22f\
alse\x22\x0d\x0a inks\
cape:zoom=\x2230.70\
8333\x22\x0d\x0a inks\
cape:cx=\x228.61032\
64\x22\x0d\x0a inksca\
pe:cy=\x2212.194623\
\x22\x0d\x0a inkscape\
:window-x=\x220\x22\x0d\x0a \
inkscape:win\
dow-y=\x220\x22\x0d\x0a \
inkscape:window-\
maximized=\x220\x22\x0d\x0a \
inkscape:cur\
rent-layer=\x22svg1\
501\x22 />\x0d\x0a <path\
\x0d\x0a d=\x22M 14.0\
51328,8.6485752 \
9.6108804,4.6241\
52 V 7.6424694 H\
0.72998644 V 9.\
6546808 H 9.6108\
804 V 12.672999 \
M 20.711998,14.6\
85211 V 2.611940\
3 c 0,-1.1167775\
-0.9991,-2.0122\
1167 -2.220222,-\
2.01221167 H 5.1\
704335 A 2.22022\
35,2.0122117 0 0\
0 2.9502099,2.6\
119403 V 5.63025\
78 H 5.1704335 V\
2.6119403 H 18.\
491776 V 14.6852\
11 H 5.1704335 V\
11.666893 H 2.9\
502099 v 3.01831\
8 a 2.2202235,2.\
0122117 0 0 0 2.\
2202236,2.012212\
H 18.491776 a 2\
.2202235,2.01221\
17 0 0 0 2.22022\
2,-2.012212 z\x22\x0d\x0a\
id=\x22path149\
9\x22\x0d\x0a style=\x22\
stroke-width:1.0\
5683\x22 />\x0d\x0a <tex\
t\x0d\x0a xml:spac\
e=\x22preserve\x22\x0d\x0a \
style=\x22font-s\
tyle:normal;font\
-weight:normal;f\
ont-size:5.33333\
333px;line-heigh\
t:1.25;font-fami\
ly:sans-serif;fi\
ll:#000000;fill-\
opacity:1;stroke\
:none;\x22\x0d\x0a x=\
\x229.1831751\x22\x0d\x0a \
y=\x2219.082767\x22\x0d\
\x0a id=\x22text15\
11\x22><tspan\x0d\x0a \
sodipodi:role\
=\x22line\x22\x0d\x0a \
id=\x22tspan1509\x22\x0d\x0a\
x=\x229.1831\
751\x22\x0d\x0a y=\x22\
19.082767\x22></tsp\
an></text>\x0d\x0a <t\
ext\x0d\x0a xml:sp\
ace=\x22preserve\x22\x0d\x0a\
style=\x22font\
-size:9.19055px;\
line-height:0;fo\
nt-family:sans-s\
erif;stroke-widt\
h:3.44647\x22\x0d\x0a \
x=\x220.19669378\x22\x0d\
\x0a y=\x2227.3268\
41\x22\x0d\x0a id=\x22te\
xt1519\x22\x0d\x0a tr\
ansform=\x22scale(1\
.1595229,0.86242\
367)\x22><tspan\x0d\x0a \
sodipodi:ro\
le=\x22line\x22\x0d\x0a \
id=\x22tspan1517\x22\
\x0d\x0a x=\x220.19\
669378\x22\x0d\x0a \
y=\x2227.326841\x22\x0d\x0a \
style=\x22fon\
t-style:normal;f\
ont-variant:norm\
al;font-weight:b\
old;font-stretch\
:normal;line-hei\
ght:1.25;font-fa\
mily:sans-serif;\
-inkscape-font-s\
pecification:'sa\
ns-serif Bold';s\
troke-width:3.44\
647\x22>CSV</tspan>\
</text>\x0d\x0a</svg>\x0d\
\x0a\
\x00\x00\x01\x8a\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M6,1\
9A2,2 0 0,0 8,21\
H16A2,2 0 0,0 18\
,19V7H6V19M8,9H1\
6V19H8V9M15.5,4L\
14.5,3H9.5L8.5,4\
H5V6H19V4H15.5Z\x22\
/></svg>\
\x00\x00\x01\xb2\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M5,4\
H19A2,2 0 0,1 21\
,6V18A2,2 0 0,1 \
19,20H5A2,2 0 0,\
1 3,18V6A2,2 0 0\
,1 5,4M5,8V12H11\
V8H5M13,8V12H19V\
8H13M5,14V18H11V\
14H5M13,14V18H19\
V14H13Z\x22 /></svg\
>\
\x00\x00\x01\xe2\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M19,\
3H5A2,2 0 0,0 3,\
5V19A2,2 0 0,0 5\
,21H19A2,2 0 0,0\
21,19V5A2,2 0 0\
,0 19,3M19,19H5V\
5H19V19M17,8.4L1\
3.4,12L17,15.6L1\
5.6,17L12,13.4L8\
.4,17L7,15.6L10.\
6,12L7,8.4L8.4,7\
L12,10.6L15.6,7L\
17,8.4Z\x22 /></svg\
>\
\x00\x00\x02g\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M15,\
19.88C15.04,20.1\
8 14.94,20.5 14.\
71,20.71C14.32,2\
1.1 13.69,21.1 1\
3.3,20.71L9.29,1\
6.7C9.06,16.47 8\
.96,16.16 9,15.8\
7V10.75L4.21,4.6\
2C3.87,4.19 3.95\
,3.56 4.38,3.22C\
4.57,3.08 4.78,3\
5,3V3H19V3C19.2\
2,3 19.43,3.08 1\
9.62,3.22C20.05,\
3.56 20.13,4.19 \
19.79,4.62L15,10\
.75V19.88M7.04,5\
L11,10.06V15.58L\
13,17.58V10.05L1\
6.96,5H7.04Z\x22 />\
</svg>\
\x00\x00\x01n\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M11,\
4H13V16L18.5,10.\
5L19.92,11.92L12\
,19.84L4.08,11.9\
2L5.5,10.5L11,16\
V4Z\x22 /></svg>\
\x00\x00\x92\xc2\
\x89\
PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\
\x00\x02\x00\x00\x00\x02\x00\x08\x02\x00\x00\x00{\x1aC\xad\
\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00\
\x09pHYs\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7o\
\xa8d\x00\x00\x92WIDATx^\xed\x9d\x07`\
\x1c\xc5\xd5\xc7wv\xaf\xaa7\xdb*\x96+\xee\xb8\x00\
\xee\xc6\x06SM\x87PC \x10 \x01\x02\xa1\x84\x12\
\xc2G\x80@\x12j\x80$\x94@ \x94\x00!\xf4b\
\xc0\x06\xe3\x0666\x06\x83\x1b\xb87\xf5^O\xd7\xb6\
|\xef\xcd\x8e\x84e\xe9\xa4k\xaa\xfb~:\xddm\x99\
\x9d\x9d\x9d\xf2\xfe3\xb3\xb3\xb3\xcc0\x0c\x89 \x08\x82\
\xb0\x1e\xb2\xf8%\x08\x82 ,\x06\x09\x00A\x10\x84E\
!\x01 \x08\x82\xb0($\x00\x04A\x10\x16\x85\x04\x80\
\x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82 \x08\
\x8bB\x02@\x10\x04aQH\x00\x08\x82 ,\x0a\x09\
\x00A\x10\x84E!\x01 \x08\x82\xb0($\x00\x04A\
\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\x14\
\x12\x00\x82 \x08\x8bB\x02@\x10\x04aQH\x00\x08\
\x82 ,\x0a\x09\x00A\x10\x84E!\x01 \x08\x82\xb0\
($\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\
\x10\x04AX\x14\x12\x00\x82 \x08\x8bB\x02@\x10\x04\
aQH\x00\x08\x82 ,\x0a\x09\x00A\x10\x84E!\
\x01 \x08\x82\xb0($\x00\x04A\x10\x16\x85\x04\x80 \
\x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82 \x08\x8b\
B\x02@\x10\x04aQH\x00\x08\x82 ,\x0a\x09\x00\
A\x10\x84E!\x01 \x08\x82\xb0($\x00\x04A\x10\
\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\
\x00\x82 \x08\x8bB\x02@\x10\x04aQH\x00\x08\x82\
,\x0a\x09\x00A\x10\x84E!\x01 \x08\x82\xb0(\
$\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\
\x04AX\x14\x12\x00\x82 \x08\x8bB\x02@\x10\x04a\
QH\x00\x08\x82 ,\x0a\x09\x00A\x10\x84E!\x01\
\x08\x82\xb0($\x00\x04A\x10\x16\x85\x04\x80 \x08\
\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82 \x08\x8bB\
\x02@\x10\x04aQH\x00\x08\x82 ,\x0a\x09\x00A\
\x10\x84E!\x01 \x08\x82\xb0($\x00\x04A\x10\x16\
\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\
\x82 \x08\x8bB\x02@\x10\x04aQH\x00\x08\x82 \
,\x0a\x09\x00A\x10\x84E!\x01 \x08\x82\xb0($\
\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04\
AX\x14\x12\x00\x82 \x08\x8bB\x02@\x10\x04aQ\
H\x00\x08\x82 ,\x0a\x09\x00A\x10\x84E!\x01 \
\x08\x82\xb0($\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\
\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82 \x08\x8bB\x02\
@\x10\x04aQH\x00\x08\x82 ,\x0a\x09\x00A\x10\
\x84E!\x01 \x08\x82\xb0($\x00\x04A\x10\x16\x85\
\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82\
\x08\x8bB\x02@\x10\x04aQH\x00\x08\x82 ,\
\x0a\x09\x00A\x10\x84E!\x01 \x08\x82\xb0($\x00\
\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04A\
X\x14\x12\x00\x82 \x08\x8bB\x02@\x10\x04aQH\
\x00\x08\x82 ,\x0a\x09\x00A\x10\x84E!\x01 \x08\
\x82\xb0($\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\
\x90\x00\x10\x04AX\x14\x12\x00\x82 \x08\x8bB\x02@\
\x10\x04aQH\x00\x08\x82 ,\x0a\x09\x00A\x10\x84\
E!\x01 \x08\x82\xb0($\x00\x04A\x10\x16\x85\x04\
\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82 \
\x08\x8bB\x02@\x10\x04aQH\x00\x08\x82 ,\x0a\
\x09\x00A\x10\x84E!\x01 \x08\x82\xb0($\x00\x04\
A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\
\x14\x12\x00\x82 \x08\x8bB\x02@\x10\x04aQH\x00\
\x08\x82 ,\x0a\x09\x00A\x10\x84E!\x01 \x08\x82\
\xb0($\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\
\x00\x10\x04AX\x14\x12\x00\x82 \x08\x8bB\x02@\x10\
\x04aQH\x00\x08\x82 ,\x0a\x09\x00A\x10\x84E\
!\x01 \x08\x82\xb0($\x00\x04A\x10\x16\x85\x04\x80\
\x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82 \x08\
\x8bB\x02@\x10\x04aQH\x00\x08\x82 ,\x0a\x09\
\x00A\x10\x84E!\x01 \x08\x82\xb0($\x00\x04A\
\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\x14\
\x12\x00\x82 \x08\x8bB\x02@\x10\x04aQH\x00\x08\
\x82 ,\x0a\x09\x00A\x10\x84E!\x01 \x08\x82\xb0\
($\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\
\x10\x04AX\x14\x12\x00\x82 \x08\x8bB\x02@\x10\x04\
aQH\x00\x08\x82 ,\x0a\x09\x00A\x10\x84Ea\
\x86a\x88E\xa2_\xc0\x13\xd4LS\x06\x7f\xb0\xca\xf8\
\x8a!1\xd9\x90\x0c\xa63P}fn#\x08\xc2\xd2\
\x90\x00\xf47L\x01\xd0\xb4 \xf3\x96\x1b\x9eB\x16\xf0\
HL\x97\xec)\xba;ON\xca\x91e\x05\xdc0\x12\
\x00\x82 H\x00\xfa\x1f\x90\xa0j\xfd~m\xcb3\xb6\
\xf2/\xf4\xfa\xbd\x92\x04U~\xa8\xf83=q\x04\xcb\
>B\x1fs\x9d+c\x08\x09\x00A\x10\x00\x09@?\
\x01\xd2\x11;x\x0c\xc9W\xfa\xad\xf2\xf9\xcf%\x7f\xfd\
\xc1V\xde\x90tp\x90\x98\xcf\x8e~N\xc9\x18\x03\xfb\
\x18\xa3;@\x04ai\xc8\x04\xf4\x1f\xc0\xa6\xab5;\
\xe45\xd7\xca\xbezq\x17\xe0\x00\x0c\xbc# \xb1\xc6\
B\xfd\xb3\x9f\x1a\xe5_\xe3:A\x10\xd6\x86\x04\xa0\xff\
\xa0k\xba\xfa\xed\x03Rc\x91\x0eu\xff6\xe6\x9da\
_\x10\xfc\x19\xccW\xed\xff\xfe\x05CW\xc5\x0e\x82 \
\xac\x0a\x09@\xffA-\xfd\xc2^\xfc\x09\xef\xdbi\xf7\
6\xaf\xcc\x9b\x00\xb8\xdd^\xbc4\xd8Tan%\x08\
\xc2\xb2\x90\x00\xf4\x17\x0c]\xde\xf9*\xd6\xfd\xc3Ak\
*Z\xf7\x86X&\x08\xc2\xaa\x90\x00\xf4\x13\x0c]5\
\x9a\xca\x99\xde\xb6\xef\xa7\x0d\x86\xa1\x1bF\xbe\xb1Z\xac\
\x12\x04aUH\x00\xfa\x09\x86\xae\xe9\xbaf0\xc9h\
{\xff\xf7 x\xf7\x90\xe1k\x14\xab\x04AX\x15\x12\
\x80~\x02S\xec\xcc\xe6\xc0\x85\xce\xdb\x00\x06(\x80\x9a\
\x98-\xd6\x08\x82\xb0*$\x00\xfd\x04\xc6\x149s\x12\
,t\xfaX\x07(\x84\xceXc\xd6\x99\xf8\xe8\x80\xae\
\xd3\x83 \x04aYH\x00\xfa\x0b\x8c\xa9\xc3/\x94l\
n\xec\x04\xea\x10|^Lv&\x0f>\x0c\x16\xb1-\
@\x10\x84U!\x01\xe8?\xb82F\xfaF\xdf(\x85\
\xf3|\xaf\xe6W\x96\x9e\xe7m\xa8\xe6jA-\x00\x82\
\xb0($\x00\xfd\x07\x83I\x09\x13.0\xf2\x8e\xc1%\
\xec\xdf\xe1\xe8\x12N\x01a.\xe2\xf0\x1fX\xc7\x87\x01\
XS![\xf5k_]\x09\x1e\xc8\xb7\x83\x03\xee\x0d\
A\x10V\x81\xe6\x02\xea'\x98\xe9\x88f>P\xeb_\
s\x97\xad\xe0\x03\xc9\xc0G\x7fq;\x13I\x0c?f\
\x8f\x8fxL\xcc\x90\xb4\xe4\xa1\xc6\xb1\xef8\x923`\
\x1d\x9f\x13\xa3\x1e!\x82\xb0\x12$\x00\xfd\x04L\xc7f\
\x03\xaf\xeb\xaaQ\xb8D\xdb\xf7\xb1V\xb5\xc5\xa6\xd6\xc1\
\x96&-\xc3\x99;N\x93\x14\xe7\xbe\xb7\x0dC3\xdd\
\x81\xbd\x87\xe3\xb4\xcc#\xa4#\x9ft\xa4d\xf3C\xf9\
v\xbeD\x10D\xbf\x87\x04\xa0\xdf\x822\x10\xa8g\x9a\
\x0au{U\xb7\xdb\x5cI\x90\xd4Mk\xefq\xec~\
\x917\x09t1\x1b\xa8!\xe9\xc9\xc3\xd4\xe3\xdeKH\
JE\x1d\x91\x98L\xb3\x84\x12\x845 \x01\xe8\xb7\xf0\
\x94E\x83n\xae\xc1\x87\x19,\xe0\xf7hk\xef\xb0\x15\
\xbe'k\x9a!\xe3.p\xc6\x98\xa1e\xcd\x90\xe6\xfc\
\xdd\x91\x9c\x83\xdd@\xd4\x04 \x08k@\x02\xd0oi\
NY\xb0\xe6\xad\x92XS\x03\xfeu\x7fr\xec|\x11\
S_\xbc.\x066\xcbz\xfa\xa1\xc6Q/9\x93\xd2\
\xcd\xae!\xd8KJ@\x10\xfd\x1bj\xec\xf7[\xd0~\
#-\x0b\x02\xc5f\xb7\x1f~[0\xffT\xa8\xf9\x83\
\x02\xa0\xa5\xc7n\x7fC\xa9\xda$\xad\xfce\xa0\xae\x00\
\x9b\x0dmd\x83 \x88\xfe\x07\x09\x80\x15\xb1;\xdd\xb6\
\xd9\x0f\x05\x86\x9d\xc7g\x0fm1\xf4\xa0\x01\xdf\xb0/\
\xae\xf66\xd5\xf2U\xaa\xfe\x13D?\x87\xba\x80\xa2\xa4\
9\xdexe\x99\x7f\xfd\xb8\x8a@\xed\x9a\xff\xf6b|\
\x9ez\xf5\xab\xdb\x1c\x85\x1f\xe1\x93\x02\xba\xd1\xd2\xe5\xa3\
g\x1c.\xcd}\xca\x914\x08\xaf\x05\xef\x177\xef \
\x08\xa2\x7fA\x02\x10%\x9a\xa1\x9b\xad'MU\x8d`\
\x93\xea\xab\x81e\x9b+\x9d9\xdc\x8a\xe2\xe8\x13\x02\xa0\
\x1b\x86\x1a\xf0\x06\xbe\xbe\xcf\xbe\xfb%\x99? \x86]\
A\xa8mL\xcb<L\x9f\xfblBr\x06n$\x01\
\x88~\x0a\x09@d`\x9f9\x8f0#\xe8\xd1j\
wxv~\xe2*}[\xf1\x94\xf2h\xd4$\xc5!\
\xbb\x07\xfa\xf3Ow\x0e?MN\x19\xc9\xec\x09\xfc \
Ao3\xa3\xf8`\xb0$\xa9\xfe&\xfd\x8bk\xed%\
\xcb@\x0f`#o\xce\xc8\xf0\xa5e\x8cgG=\xef\
H\x86v\x80\x80d\x80 \xfa\x19$\x00\x91\x01\xd1e\
\x18\x9aZ\xbeA\xda\xf0\x90T\xb5^R\xbd\x07\x99E\
f0\x1d\xcc\xa7\xcd\xa1gM\xb7\x1dq\xbb\x92>\x1e\
m\xaa\xd9\xbf\xd2+\x0d(\x5cO\xc0\xef\xf1\xaf\xbb\xcf\
\xbd\xe7\x150\xfb\x07\xday|F\xec\xa8\x7f;\x13R\
a\x19tOf\x8a\xb9\x9d \x88\xfe\x01\x09@d\x80\
\xf1\xd7\xf6/\xd2\xd7\xdc*\x05\x1b\x98!\x83\xb5?\xc8\
\xacC|\xf2a\x94\x1a\x18LC\x96\x8d\x99\x0f\xd8G\
\x9cm\xc8\x8a\xdc[;R\xcc\x0c\x10\x0c4\x05V\xdd\
l/\xfaH\xd65I\xc6\xce-\xb3\xadc\xa4\x8d\x97\
\x8e\x86v\x00\xbe<@<8F\x10D\x7f\x81\x8at\
X\x80\x914\x0c]\xd7Uu\xfb\x8b\xc6\xea\x9b\xa4`\
#\x98C>\xaf~;6\xdd\xc0\x1b\xa7\x0a\x0e\xae7\
t\xf6\xcd\x9f\x02[_\xef\xfd\xb1lw$\xd8f>\
\x10\x18r\x9e\xc1lh\xf7\xe1\x0a\xf8#d\xacn\xab\
\xf1\xe5M\xbe\xa6Z3\x06@-P\x16\x08\x82\xe8\x17\
P\x0b ,x,\x19j\xd9\xd7l\xf9\xe5z\xb0\x0e\
b\x0d\x05 \x0c\xc0\xa1\xccd}\xfeK\xf6\xbc\xb9\xbd\
\xb3\x05\xd0\x02\x5cc\xc0\xd7\x10\x5c}\x8b\xa3\xe8c\x9c\
(\x02*\x07<\xbc`\xf7\xb5\xf4\xd1\xf2\xd1\xaf;\x92\
2\xcc\xeb\xa6\xa6\x00A\xf4\x0f\xa8$\x87\x8b\x1e\xf0\xe9\
\xeb\xee4\x02h\xfd\xc3\x17M\xbc\xa7\xaa\xeb\xc6\xc6G\
\xb0\xd1\xd0\xcb1$\xbb+\xc9>\xfb\xe1\xc0\x88\x9fI\
\xcc\xc6\x1b\x00&L\xa9\xd9i\xac\xbe\xc6\xefm\x10\x1b\
\x08\x82\xe8\x17\x90\x00t\x02Z{n\xef\xf5=o\xcb\
5?\xa0\x99\x84\x9a<\x9fE'<\x18N\xc1S\xbd\
\xb1a\xcfJ\xfe\xfeE>\xda\xb2W\x82\x93\xc01\xd9\
\xe9NvL\xbdC\xcd=\x1er\x86y\xe9X\xe3g\
\x92R\xfe\xa5\xbc\xe4,\xd5S\x81M!\xd1\x19Dm\
G\x82\xe8\xdb\x90\x00t\x82\xd9o\xa3\xfa\x1b\xb5\xddo\
\xf0\xbe\xf1\xc8\xbaq\xd0\x5c\x82\x1fz\xd0\xb9\xebi\xbd\
\x17[\xff\x03\xb1;\xddl\xd6\xc3\x9e\xa1\x17@\xd5\x1f\
;\xbb\xf0\x22\x10V\xb3K_}}\xd0\x07M\x19>\
{P\xf3v\x82 \xfa(}[\x00x5\x14L\xd4\
\x8f\xdf|[\xf3vN\xcb\x8ap\x15!p\x18|3\
O\xa1\xd2T\xc2\xf0\x15+\x91\xc1\xd0VBu\x99\xb1\
\x9a\xad5\xe5\xfbqC_\xc0\xe5Nv\xcf\xbc\xdb\x9f\
\xbb\x00.\xd8\x8cS\xfcf\x86R\xb6\x9a-:9\xd0\
T\x8bC]\xf9\x0d$\x13q\x18A\x10}\x8a~\xd0\
\x020{#\xd0\xbe\xc3\x0a\xf6MhA]\x0d\xe8Z\
\x80\xbf\xf9\xc44N\xf8\xcdmV\xe4\xa6\xca\xf4\xb6\xb1\
\xd0\xf0V\xf3W\xa8G\xe8\x03\xde3\xc5\x09\xf6\x99\xe6\
K\x09n\x17\x1b{7\xd8\xc3\xc5d\xbb\xc3e\x9b\xf9\
\x80\x7f\xf8Ex\xcb\x17\xa3\x17/\x05\xfb\x83\x1a\xf7\xeb\
\xab~\x13\xf45`T\x98\x7f\x04A\xf4M\xfa\xf6(\
\xa0\xe6\xc0\xc3\x0f\xd3\xfd\x9e\xfa\xbd_\xf8~\xf8o\x96\
\xbcCV\xfd\x86\x22{\xa5\x9c\xa6\x01'fM<\x97\
%e*|\xe0\x0a\xb8\x8ex(\x0e\xb7q\xea\xae\xb7\
\xf5/o\x94\xb9\xb5\x8b\xd4\x03\x13C\x97\xb4\xa9w\xba\
&\x5c\xd17\x9a\x00\xcdq\xab\x06\xfc\x81/n\xb0\x15\
\x7f$\xeb\xfa\x8f\xcf\x07@c !O:e\x99\xcd\
\x99\x80\xc2\x8a\x91\xdaW.\x8b \x88\x1f\xe9\x93\x02`\
\x86\x19l\x0e,hj@\xda\xf7\x81\xba\xf7cV\xb6\
\x8ai\x1e\xd8\xdcl\xa2Q\x15\xd0\x89l\xd7R\xc6)\
\xf9\xc7\xb2\x11\xe7\xdaR\xf2p\x0f\xd6c\xf9_\x18\xf0\
s\x19\xc1\x9doI_\xde\x8c\x86\x0f\xcc_T\xc6\x0e\
\xbcQ\xa7\xdc\xee\x9a\xf8\xab.\x7f\xdfV\xe8\x14\x8d\xc8\
L\x9b\x17\x0e\x1f\xbf\xd7\xa3}\xfd'\xfb\xbe\xff\xc1&\
8\xbeY\x04\x99\x9e3G\x9e\xfb\xb4\xe2L\xe2\xbdD\
\xe83\xc9\x00A\xf4-\xfap\x17\x10X\x9d@\xed^\
}\xe1\xf1\xda\xaa\x9b\xe4\xe2%\xb2\xee\xe5}\x17\xdc\x0a\
\xe1\xc7\x5c`\xccPmu\x9b\xd8\xa6G\xa5\x0f\x8f\xf5\
\xee_\xa5\xe3\xfc7:\xda\xe3p\xc1W\xab\xcb6'\
7o\xe1\x1fu0\x10\x16\xd9\x91\x1a\xa6\xea\xc4B\x9b\
Fp\xa9\x07\x82\xf1\x87\xf1\xa98\xdd\xc9\xee\xd9\xf7\
J\xf9's\xbfa\x0b\xec\x82\xdd\x06+\xf9B\xfa\xe0\
\xa8\xa0\xb7\x01\xfb\xdd\x840\x10\x04\xd1\x97\xe8\xab\x02`\
\x18z\xb0`\xb5\xb4\xfcRV\xbfW\x06c\xd4\x99\xf5\
A\x13\x15\xf4\xdaV_\xa1myR\xd7\xb1\xd7B\xec\
\xe8\x1c|\xaeWsd\x19J\x02T\xfe\xa36rx\
`\xca!\xddf%\xe14\xbaa\xa8\x9a\x1e\xd0\x0c]\
\xd7`Y\xec\x88\x06CR\xec\xca\x8c?K\xf9'\xea\
\xa8\x9d\x22\xcf@\x9c3o\x85\xbe\xea*\xd5\x07\x1a\xc0\
\x1f\x7f&\x08\xa2O\xd1\xc7\xba\x80\xcc~\x09\xf8\x0f\x14\
\xacd+/\x97\xf5\x00n\xc5\x1ai'\xc0\x81\xe0\x08\
,8\xd4\xe6\xf5\xd1\x978\xa6\xdd\xc9\x0f\xc2\x16\x83\xe9\
\x14xF&\x05\xebK\xb5\xcf.\xb2\xd5o\x85\xc3\
\xc39\xdd\x810]\xd2\xe0\xb4\x8e4\xed\xd4U\xce\xc4\
$\xacBw\x01\x10NP\x17\xee\xb5Q\xeb\xf5\xbf\xb8\
\xa5fQ\xb1Z\xd8\xa0z55\xd5a\x1f\x93\xe68\
\x7f\x98\xfd\x94\xd1\x03\x14\x19\x22\x0f\x5cA t\xa8\xdd\
\xf3C;\x83G:\xfe\x06\x9b\xd4/o\x91\xf6\xbdg\
\xb6\x03ph\x93\xf9\x95:B:\xe1C\xbb\xd3e\xf0\
\xbb\x04\x07\x88\x1c\x06\x07V\xba\xe8\x92\x09\x82\x88\x91\xbe\
'\x00\x10\x5c\xb5f\x9f\xb4\xe2R\xd6\xb0\x13\xfb\xe3#\
1.\xe6\xc52\xd9\xa1O\xbf\xcf1\xf2ln\xff\xc3\
j\x03\xe9zP\x03\xdb\xb7\xebm0\xb1\x11\x193<\
#N\x10\xca<\xc3~\x91:\xefn\xb4\xbc]c\x0d\
1f\x0ciG\xb5\xe7\xe9\x8dU\xef\x16\xf8\x1b\x828\
\x1eJF\x0b\x8d{\xcd\x16R^\x82\xf3\xf2q\x09\x97\
\x8cKKv9\xe1\x880\xaf\xbd\x05hu\xe9\xfez\
\xfd\xab\xdb\x8c\xbd\x1f\xc1*\xdaz~)x\xa6\xbc\xa3\
\xe5Y\x8f\xda\xdc\x19\xba\xe6W=\xe5R\xc0+\xbb\xd3\
\x14w&\xc408\x04\xd0\x1dA\x10\xbd\x8c>'\x00\
z0\xe07\x96\x9c+Wm\xe0\xb65B{\x8a\xc7\
\xa0\x0d\xd7\x13\xb2\xd5\x13?q'\xa5\x86yK\x16\x8e\
\x0bVm7>>^\xe6'\x15[\xc3\x01O(\x05\
\x9di\xb6\x05\xef\xdbS\x87\xc22\xeb\x1ak\x08Fx\
\xf1\xbe\xba\xdf~^V\xe53k\xe0:7\xd1\x10T\
\x1d/\x18\xbbh\xb0\xf5\xa3\xc8\xfa\x88d\xe7\xdb'\xe5\
\x0eHt\xc8\x11<\xcf\x8c\xa0\x00\x80\x17\xaaG]y\
\x9d^\xb4\x04_\x1a`F\x05WU\x96:<8\xf0\
(V\xf2\xa9\x1c\xf4\x82KI\xb6I\xae\x14u\xe4e\
\x8e\xd1\xe7\xda\xed.tF\x10D/C\xb9\xfbn\xa8\
\x96\xf6\x19\xd0\x80\xef{\x8fm\xfb\x0f\xaf\xbcGh\xfd\
\x01\xf3\x10\xf8\x04=R\xedNe\xd8)\xb2\x1c\xee\x1c\
\xf7\x8a;\xdd\x1f\xd4\x95\xaa\xaf\xd1\xbaq\x9f\xcc\xed\x1d\
\xc3\xbbIXp\xc4\x85\xae\x11\xa7\xca\xb2-\xe2\x00\x87\
\x01\xceR*\x19K\xf77^\xf7yy\xb5\x0f\x22\x08\
\xaf\xaf\xe5B\xc5\xafXA\x9b]\xe5S\xbf.\xf7.\
\x18\xe2v\xdb\xb1\x12\xcf\xc3\x18V\xa8\xc0\x19v\xe6\xc8\
v)g\x9eQ\xb7\x9b5\xec\xc1\x8dx>\xfe\xf1\xd7\
\xc8\xd5\xdf\xc9\xc1z\xa6y\x99\xeecZ\x93\xe4\xabV\
J\x97\xc8e\xeb\xb4\x94\x11\xcc=\x88\x9f'\xfe\x97O\
\x10D\xd4\xf4\xb5\xb694\x00\xf6|\x18\x97\x1b\x8e\xb6\
\x8a/\xd4\xca\xcdb%\x1c\x18\xb3\x8f\xbfL\xcb\x98h\
\x88\x9au\xe7\xf0\xf6\x86!g\x8cM8\xe2V\x065\
\xe2.\x82I\x15\x8d\xeau+\xca\xab\xfd:\xef\x7f\xef\
$r`\xf7\xd7\x15\xfe\xcb?\xc3\xb7\x98\xf1\xb9)\x22\
6\xca\xcc\x95j\x9b\xf7\x844\xf4tX\xd41-\xda\
?#\xec\xc1G\xa7\xcb\xbf4V\xfe\xda\xa8\xdc\x14\xf9\
y\x08\x82\xe8Z\xfa\x98\x00\xe8\xbe\x1a\xa5\xea\x1b\xb1\x12\
\x13\x86\xa1\xf9\xa5\xaa\x0db-<\xec\x09i\xca\x91\x8f\
\xcbY\x93\xd1\xb8\xa1\xdd3-<\xfe\xf3\x8fi\x09q\
\x03,\x98;\xa5\x01G\xe8G\xffGv\xb8M\x1f\xba\
\x82\x80\xaa\xdf\xb5\xba\xa8\xd2\xaf\xf1\xb3\xf3\x9a~\x87\xc8\
\x0c\x9f\x90\xfe\xb2\xcc\xff\xd6\xf6j\xbc%\x1c9p\x0e\
Yq\xc8\xd3\xef\xd1\x07\xcd\xe1\xb3F\x878#ow\
\xe8\xe0\xc0S\xaa}\xf3\x07\x03\x9f\xcd\x16\x08\x07\x04A\
\xf4(}L\x00\xb4\xf2oX\xa0N\xac\xc4\x06\x03\xd3\
T\xb1F\xact\x06v^\xf0A\xf1\xb6\x94!\xf2\xd1\
\xcf\x19\xc3O\x15\xf6\x1em\x19x\x04\x16\x10>\xa6\x08\
\xa0}\xe4\xc6X\xd6\x86\x9cc;\xeaY[\xe2\x00~\
lWU\x80w\xd5\x07>/\x0f\x808\xa2\xb9\x85\x8f\
\xd8\xdc\x012\xce\xf4\xcf\xa4w\xf745j\xa0\x05\x91\
aF\x05\xc8\x88\x0c\xed\x80c\x9e5\x06L\xe5\x17\xdd\
\x1ep\x12S\x1d\x98\xc1*6h\xdf?\xd5\x1cc\x04\
A\xf4\x0a\xfa\x98\x00x\xf6|\x16\x97\xf1\xe6h\xfft\
I\xaf\xd9!\xd6#\xc0\x90]\x99\xf69\x7f\x97\xe7=\
\xafe\xcf\xd7\x1c\x19h\x12\xd1\x04\x82a\x03S\x07\xf1\
\xa9\xe8\xce4=w\xbe>\xfb\xef\xae\xb9\x7fe\xeeL\
Y\x96\xbb\xb4\xda\xbb\xa9\xa4\xa1*\xc0\xef\xf1F\xc8\xda\
r\x7f\xa5GE\xa3\x1c%\xd0\x940\xa4\xea\xef:=\
^\xc6>;\xc3(\xf8\xcc\x00\xbd\xe1\x82 v\x10\x04\
\xd1\xa3\xf4\x95Q@8\xa6\x05\x82Z\xf6\xda\xc9\x19\xc1\
\xcdh\xbec\xb3!x\xd5\x86\xe4\xb7g$_\x18Y\
/\xd0\x81\xd1\xa5\xab~\xa3\xa9$X_\x5c\xb5s\xb5\
\xb7j\x9f\xa1k\xc9\x83F\xa5\x8f\x9ekO\xcc2\x12\
\xf2\x14\xc5\x0en0\x98LDr\xdc\x1b\x01x\x0d\x86\
\xf4\x9bw\xbf\xf9_e2?\x91\xb99\x5c@\xb7^\
\x9a\xaa\x9c0\xe9\x10h\x9f\x88M\x91\x00\x17\xa5U\xef\
6>\x9c\x8f\x01\xe9\xf0\xd2\xcc\xcb\xd7\xdd\xf9\xec\x847\
\x1c)\xb9\xb0\xdcu\xed!\x82 \xc2\xa7\xaf\xb4\x00\xcc\
J\xb4\x9e\xe9\xaa\xe3\x86+V\xd1\x02\xf3\x03&H\xd6\
\xa1\xfe\x1b\x19\xbc\x03D\xa0\xd8]J\xca0\xd7\xe0\xd9\
yG\xdf|\xc8\xd9\xff\x18u\xee\x93\xd9\xf3ntf\
\x1f!'\x0f\x03\xebo\xba1-\xa3X\x8e+hU\
\xf9\xdb\xc6\x0a\xed\x830F\x22\xf7^7\xa4B%\x0b\
\xef\xd5F\x05\x9c\xd0_\xb7\x0b\x03\xd19\x188#\xe8\
\xd5p\x0eQX\x89sT\x10\x04\x11\x1d}E\x00D\
\x0d\xba!\xe0\x06\xe3\xc1;[b\x05|\xd4\xe2y\xf9\
\xe0\x9f\x19\xaan\xb5n\x9an\xf85h\x1e\xc1\xd9\xc3\
1\xc4\xad\x00\xcdhTA\x02\xa3\x0f\xb0\x92\x98\x05\xdf\
\xe1\x9eXv\xe0\x94J\xe8>\xe2\xa0\x12\x04\xd1\x15\xf4\
\x15\x01\xe0Uh&\x07\x13\x87\x8b\x0d1\xc2\xeb\xe36\
W\xaa\xb9\x165f\xb088D^,r\x84\x8b.\
\xc6\xa6\xb0TE\xc5;\x00\x917\x01\xe0\x80LY\x8d\
\xda\x1c\xc3a,i\x88\x14\xc6\x83\x14f\xc8\xe4\x84T\
[J6\xef\xcd#\x08\xa2W\xd0W\x04@\x90:|\
\x16\x0e)\x89\xd9\xba\x1a\xd8e\xa2\xcb)\x83\xc5z\x9f\
\x05\x94f|\x22\x98b\xb0\xc2\x11\xdbq\x1bc\xa3R\
\xf8Lz\xd1\xe2p\xa7J\x99\x87wz\xbc\x1929\
s\x9cbN\xaa\x1as\xf2\x11\x04\x11\x17\xfa\x98\x00\xb0\
AG\x1a\x8a3\xf6>\x04\x1c\x95\xc2\x14=k\x86X\
\xefv\xf8-\x8d\x1f\xd1t|a\xfcA\x08\xa7!h\
\xb1\xa3\xb3G\x0erD\xd6\x8f\x8f^\x83\xffy\x09\xf2\
\x88\x81\xe91\xf4\x00\x19\x12S\x8c\x11\xe7@L\xe2\xfd\
\x88\xb6\x01\xe6W\x01\xe0\x92+\xcd8\xfc\x1e\x1c?\x1a\
\x83\xde\x10\x04\x11_\xfa\x98\x00\xd8S\x87h)\xe3\xb9\
\x05\x8b\x09hD\x18\x92\xa2d\x1d&\xd6{\x02~\x0d\
h\x1f\xdf\xfe\xbe\xf4\xf2%\xc5W//\xd9Z\xed\xe3\
\xb7f\xc3\x05+\xd3\x8cM\xcct\x0eK\xb1\xe1\x90\xfb\
\xf0\xc0\xc6\x0f\x7f\x04\xf8\xa8\x5cW\xaa3\xdc\x990\xda\
\x03L9\x93\x87\x9d\xc1\x06\x1f\x8b]Pbc+\xb0\
\xb5\xc6\x98ns\xcbS\xfe\xcf\xeeL\xc1-\xa8[\xa4\
\x01\x04\xd1+\xe8c\x02 )\x0e[\xde\x5c\xb0\x22b\
5Z\xc0\x04\xb2\xe4a\xf6\x9c\x1ek\x01\x00\xd8\x0a\x91\
\xa47v\xd6_\xb5\xban\xe1\xbe\xc6\xb7w\xd6_\xb0\
\xb8\xa8\xa8QC\xeb\x1c\x89\xc2\x0dp\xdb\xce\x1a\x91\xc2\
\xa4\xf0\x1f\xe9\x02\xcf\x99S\x96\xce\x1b\xea\xb4+1\xc6\
\xa4\xa18\xdc\xb6y\xff\xd2\x07\x1f\xdd\xae\x02\xe1;\xe5\
e7\x1b\x7f\xb52\xf2\x5c2\xfb\x04\xd1\xdb\xe8k\x02\
\x00fs\xe4\x85\x86+\x13\xfb\x1c\xf0\xe5$bk\xa4\
\x18\x8a;p\xf8\xbd\x8c\x0fJ\xe9\x11\xccG\x19|\x01\
\xf5\x9d\x1d8\xb0\x95\xdbFV\xecQO_X\xf0U\
\xa9\x1f\x87\xf5 \xd8-\xc4\x9dw\x04\xd4\xa9\xaf\x99\x98\
qr~\x22\xce\x04d\xbe\x9c\x0b\x11{\x0f\x00\xb6\x89\
z\xba]f\x7f\x99\x919}h\x16\x06$j\xf0P\
\x089\xc3\x89\xf9f\xfdC\x9f\xfe\x90\x94~\xa8\xc6\x1f\
z\xc0\x80`$\xdb\xb5\xdc\xe3\x94c\x9ewL\xbe\xa1\
\x8b\xe6@%\x08\x22\x16\xfa\xca\x83`\x02\x1dg\x17\x93\
\xea\xf7\x7f\x95\xb0\xea\x12\x16\xf4\x198\x0bC4&\xcc\
\x90\x1d\xec\xa8\x7f\xdbrgw\xe1\x1cm\x1d\xc2\xa3\xdd\
h\x0ah\x97}R\xb8\xb4\xcc\x0b\x01\xe2}#\xa8h\
\x03\x9d\xf2G\xa7\xe7\xe7\xa78`\x156a\xbfy\x87\
p\xa3\xcf\xca\x1b\xfd\x97|V\xf2]\x85\x1f\xfd\xe5\xdb\
M\x0f\x7f\x04\x05S\x05k\xad\xc8\xd2\x95c\x93\xff0\
3\xd7\x06K\xf1\xc0\x9cS\x0e\xc7\xe6\x1aj\xd0\xeb\xf5\
\x97~\xa35T\xd8\xb3\x0eq\x0d\x9c\xa8\xd8\xb1\x8b\xa9\
\xd3K \x08\xa2G\xe8c\xd3A\x9b\xc6\xcd\x99\x9c\x1d\
\xd4e\xa9|5Z\xb8h\xec\xbf\xc4tU\xda\xbfX\
K\x19\xa1\xa4\x8f\x16\x9bz\x02\x9b,k\x86\xbe\xb8\xa0\
\xa9\xa5\xff\x04\xaa\xe8\x1eU\xffh_\xe3\xf4\x81\x09\xd9\
\x896.\x00\x9d\x5c!j\x093\x92\x1c\xb69\xd9.\
\x8f\xdf\xbf\xa3^S\xb9\x02\x1ct\xa0\xb9m\x90\xcbv\
\xf3\x94\xf4k&g9m\xf1\x9b\x9e\x08\xbcF\xbf\x0c\
\xc6\x14\xc5\xeet\xa6\x0fue\x8fw\xa4d\xa3\xd4\xe0\
\x05\xc1\xbe8\x9d\x88 \x88\xb8\xd2\xc7Z\x00&\x10f\
]S\x1b\xbf\xba\xdf\xb5\xfb\xdfh\xca\xd1\x02\xe10\xfc\
v@\xdbd.\xe0u\xc2\x22\xefx\x17N\x0d\xbb[\
\x9a\xf7\xaa-\xe70\xb4\x85=TK\x85p\xbd\xba\xb9\
\xec\xe6\xaf\xeaU|S1b\x9a\xcb\x9c\x04\xdbgg\
\x0d\xcdp\x9a\xb3\xa9\xe1\xd0\x99\x8eCh\xa6#|/\
\xdbSs\xc7\xda\x9a\x22\x9f\x16\xd0\x99\x8e\xf3~\xa2o\
\x8a\xcc\xdc\x0a;,\xcb\xf9\xc8\xac\xf4!\x19\xc9d\x91\
\x09\x82\x00\xfa\xaa\x00\xf0\x1b\x00\x86\x7f\xfb[\xb6\x8d\x7f\
1|5\xb0\x0d\xb6\xb7k\xd7\xc4\x05\xc2\x97\xcd\xce$\
\xc5\xd0|\xfc\xee+\xc3\xba)`O\x92\xa6\xfe\xc9~\
\xc8\x99\xe1\xbe 7\xee\xc0Uh\xfas\x1b\xca\xfe\xf8\
m\x83\xce;sL)\xd3\x0d#?\xd1\xf6\xd4\xfc\x9c\
\xe9\xd9\x09\xe6\xd5u\xfc\xf2\xb2\x03\xd31\xa0\xaa\xdb\xab\
}\x9bk\x82\xb5^5\xa8\x06\xdd\x0eGv\xb2}b\
\xba}h\x9a\xe9\x15\xca\x9dpJ\x10\x84\x85\xe9\xbb\x02\
\x00\x95b\xb4bj\xd0o\xac\xbe\xde(X,\x19\x9a\
9\xcbqk\xc0)lbR\xc2 m\xce\xbf\xecF\
\x83\xb1\xec2I\xf3r\x8b\xda\xec\x85\xe2\xd4N|\xcf\
\x955\x017u?\xcd\x09\xf0\xdf\x8d%7|\xdd\x08\
M\x9b\xe6\x9a\xbe\xa1Klp\xa2m\xdd\xf9\xc3\xb1'\
\x08\x95\xa1\x936\x8a\xf0\xc9\x904\xa6+pexy\
\xfcH>\xae\x08#\x07\xef\xf9\xa3\x1b\x12\x00\x82 \x80\
>)\x00-\x98\x81\x87o\xadz;+[\xee+\xdf\
\x22Un\xb0\x1b\xb5\x92\xae\xa1\xa5\xb3\xa5\xf8\xdd#]\
yGH\x99\x87\xb1\xec#m6\x1bXBu\xff\x22\
\xe3\xcb[\xa4`#:\x80\xa3\xb9%d\x8aK\x9a\xfd\
\x88m\xd8\xa9\xb0\xb1\xa7\xfa\x82\x82\xaa\xf6\xaf\xcd\x95\x7f\
\xfc\xbaN\x97\xf0-[\x184n\xa6s\x12\x95\x17\x8f\
\xcd\x9d\x94\xe5B[\x8e\x8f\xed\xc6\xaf\xef\x9e \x08k\
\xd3\x1f\x04\x80/\x81q\x04\xbb\xc9\xff\xd0\xa8\xa3\xb5l\
\xfe6i1\x9aF\xb0r\x0b[r\x1e\xbe\x16\x18w\
\xa2\x1b<Rq\x1a'\xbc\xeb\xc8\x1a\xdfs7\x03\xf0\
\x1e\xc0k\x9bKo\x5c\xd3\xa0\x19\xaa$+f\xfb\x04\
\x14ip\x82\xf2\xc9\x19\xc3\xb2p\x1e<H0\x94\x00\
<\x80 \x08\x226\xfa\xdc(\xa0v\xe05b\xb4\x8b\
\xfcN0,\x9b\xf6\xb1\xe5\x1b?&\xe6\x16\xc5=\xc0\
H\x19i\x14\xaf\x90t\xbf\xf9:/\xdcahl\xcf\
\xdb\x9a;W\xce\x18c\x1e\xdb\xec\xbe;ac2\x13\
S\x14myi\xe0\x00\xfd2\xea\x83\xfa\xbb\xbbj\x8f\
\x18\xe8\xccM\xb2C\xb0L\xa7=\x13\xc0>\x82\xa8\x19\
\x98\x11t`\x15\x87\xc7\x19\xb6\xaf\xf8] q\x1f\xc8\
\xdc\xdf\xafcSD\x01\xfe\xe0\x95cq\xe9\xe2+6\
\x93\xa0%\x8f\xe2\x1a.\xf2\x13\xf3\xa5\xfe\x06\x5ca\xcb\
\xc5\xc2e\xe2\x85\xf2k\xee\xdd\xd7\xda\xb7[\x00Q\x83\
\xa3\x88\xaa6J\x9f\x9eo\xa8>3\xbd\x18\xce\xc1\xa0\
0{\x82>\xf79\xc7\xe0\x99<\xeb\xf6@\xd2Ar\
\xa8\xba\xf6\xd2w\xc5\xbf\xff\xb6I7tY\xb2\x09#\
%IC\x13\xe5%g\x0eOq\x81#|\xc5\x16\xf5\
\x05u\x80\xf9lD;\xb1#\xcc\xd0A@d\xc2_\
\x7f\x8eL^\xcc\x0f\xbax\xcc>\x07\x99\xe98\xd2\xe2\
3\xffm9;\x8fj\xde\x8e\xc5\xbd\xf8\xd5\xb7ie\
?\xc5\xf5\xf0\xcb\x84?\xbcd\xbe\xde\x8b\xb3\x96U\x05\
\xc0\xd0\xc1B\x18\xfb\x16j\xabof8.\x08\xfb\x8f\
d\xd8\x0c\xfb\x14'\x9b\xf5\xa8m\xd8\xc9,\x8c\x89\x8e\
\xe3\x0e$\x07\x84\x0b\x02\xf7\xf8\xfa\xf2\xfb\xbe\xab\xd3\x0f\
\xc8=\x10\xb6\xe1I\xd2?\x8f\xce\x9b20\x81o\x83\
=\xbd7c\xf5,\xf8\x0c5\xb4\xefT\xcd\x13\x94\xca\
=\x81\x0ao\xb0\xc1\xaf\xfaU=\xa8\x1bv\x99%;\
\x95L\xb7=7\xc9\x95\xea\x92\x9cv;\x94Ts2\
oqp\xff\xa2\xa5\x80\x9b\x0b\xf0\xcf\xaf\x14'i2\
\xb7wi.\xe2\xd9\x19\xcd!\xff\xe6\xa7\xe5\xdb\xcd\x9f\
~\x90\x81E\xac\xe27Fms\xf3\x0a\xbe\xf93\xaa\
X\x0dAL\xc7\xbd\x10\xab\x0a\x00\xff\x03\xd4\x8a\xcd\xc6\
\xd2\xf3\xe4\x80\xc7\x1c\x84\x8f\x19\x15\x12\xcb\x96 \xcd{\
\x1a'\x1d\xc2't\xbbU\x06Z\x92C\xd3\x8d\xffl\
,\xb9\xed\x9b\x06\xec\xa2\xc2\xdb\x03|\x14\x8fd\xe4'\
\xd8\x16\x9e:8\x07\xfb\x82DN\xeb\xa9\x9b\x16=\x8e\
\x99\x84\x10cX\xce$\x19\xfea\xd9\xe3\xf3o\xab\xd3\
\xff\xb5\xb9bmY0\xa8K~\xdd\x00\xa3\xafAq\
DI\x10\xc8\x0c\xdaz\xcc&3\xa7,'\xda\xd8Y\
\xc3]?\x1d\xea\x1c20\xd5\x06i\x8d\xf7W\xc0a\
s\x89\xe5\x11,\x96#\xc1<\xd9\x8f\x05\x1f\xd6\xc3{\
3(\xcf\x99f\x00\xb8Csn\x0f4\x9c\x9dL\xdb\
$r\x0e\x1ej\x1e.\xa9\xba^Y[\xbf\xa36\xb8\
\xa2\x5c\xfb\xba\xd4\xb3\xbfQ\xd3d\x1b\x1f\x1cfd9\
\xf4I\x99\xaey\xb9\x09\x93\x92\xd5\xdc\x81YN\xc5l\
h\x8a3\xe0\xe9\xc0U{\x83\xea\xdar\xf0y\xd1\xea\
a\xf3+\xa8\xaa\x15\xd5\xf5?x\xe4Ue\xbe\xf5\xa5\
\xf5{\x1b\x0c\x1d\xce\xc2I\x93\xd5\xf1\x19\xaeY\xf9)\
\xd3\x93\x82C\xb2R\x12\x9cN\xd4_H\xc0\x96 `\
!\x84\xcf\x8f*\xd5#\x98Y\x0b+a\xe6*\xff\xe5\
\xa9\x88a\xf3\x07\xb5\xa2\xea\x1a\x88\xd55\xd5lSE\
\xd3\xeez\x9fOrh<\xfd\x14&;\x8d\xc0\xb0\x14\
\xdb\xa4\xac\x84\x99\x99\x8e\x91\x89zvV\xba\xcb\x0e\x99\
\xce\xf4\xa9%\xae\xb9>\x08\xef{\x06\x8b\x0a@\x0b\xba\
\xae\xea\xfb\x16\xe9k\xccqA\x90\x14\xcd\xe9+;\xa4\
9\x7f\xb7\x0f=\xb1\x07\xe7\x8a\x08\xe8\xc6\x93_\x97\xfe\
ec\x03\xd6&\xf8\xfd\x0d\xbe]\x1a\x93\xaa<3?\
wl\x96\x0bVa\x9bu\x05\x00\x93\x0a\xff*\x1a\xbc\
\x1b\xea\xa5\xcd\xa5u\xcbJ\x83\xeb\xcb\x9a\x82\xb0\x0d\xe2\
\x05\xfey\x94\xf1h\x0b\x01>N\xa2\x83\xed\xb1IF\
v\x82\xed\xd4a\x89G\x0cr\x1d\x96i\x1f\x9c\x9a\x00\
;\xcd\x18\x8f\xce\x06\x99s\x96\xe0\xd1\xfc\x8b\x172\x0c\
j\xa7\xe5\xbd\xc6\x17\xd8X\x11\xe0\xb3\x5c\xa1;\x87\x22\
\xcd\x18\xe4\x96\xb9\x81\xec8 pF0V\xde\xa0\xbe\
\xa36\xf0]Y\xd3w\xd5\x81\xc5\x85\xdeZ\xaf\xaaq\
\x7fx\x94\x80\xdd1\xbd\xf8\xd1\xd0\xda\x99\xe4\x90\xd9\xcc\
\xbc\x84Y\x19\xf2\xc4A\xc9\x87\xa6;\xb2\x12\xa1n\x81\
\xc3\x851\xfa\xc2\xb8vn@\xcct\x90\xea\xbc\xfe\xad\
u\xfa\xb7%u_Ui_\x9445\x05\xf4 \xf8\
\x80\xd7\x8e\x0eM\xdfxT\x98@\x9b\x1bT\xc6\x18\x93\
\xe1\x9e?\x90\x8d\xcfN=,M\x19\x96\x91\xc8/V\
\x84\xb9\xe3g_\xba\x1a3\x83\xf1x\xc3\xc0\xf8\x02\xc1\
\xed\xb5\xc1-\xd5\x81\xed\x15\x8d_U\x1b[\xaa|\xaa\
\xa1A\xf4\xf2\x81\xe6\x10bp\x86\xb5~X\xe1\xd1\xc1\
7`\x8c3\xc8]\xb0u\x5c\xa6{N\xba>&;\
mB\xbact\x9a\xd3\x09I\xcb\xa3$\x9cH\xee:\
,/\x00\xd8Y`\xe8\x95\x9b\x8d\xcf.dj=$\
\x18~x\x8a\x18\xf6\x14}\xce\x13\xae!Gs\x87\xdd\
\x0d\xef\xc5\x96\x82\x9a\xfe\xef\xf5\x85wn\xf4B*\x99\
\x85\x01\xb3\xa4\xc4\xc6\xa6\xb2\x97N\x1c<,\xc5\x09!\
\xb5\xa6\x00`\xe14\x8c\xc2\xda\x86\xc7\xb6\xf8>\xdd\xd7\
P\x1b\xd0}\x9a\x0e\xe5\x0f\xe2\x02L\x8e\x99\x86X\xec\
Db\xb6\x0fw\xc7\x93\x1cm\x91\xa6K\x8aC\xd6\xd3\
\xec\xb69y\x09wNI\x1e\x9c\x9e\xc4mQ\x07\x1e\
\x84\x04\xc2\x06\xdf\x98\xb7\xf0Wl\x80\xa2\xae\x80\x8f\x1d\
z\xb8\xb6\xa8\xe1\xa7KJ\xa1\xd5b:\x82`\xfdb\
l\xc2\x1d3r\xa0R\xd9qH|\xc1\xe0\xff\xb6\xd5\
>\xfbCC\xb17X\x1f\x80*\xbc\x8c\xe3\xca\x18\xd7\
7\x11\x86\x03\x81M\xe0\xb7Y\xbb\xc5\xa0\xc1_\x82\xc2\
\xd2\xed\xec\xc4\xe1\xa97\x8eO\x1c\x94\xeaf`\x89;\
\x8a\xbcf\xb0\x041_P\xfb\xf77{_\xdc\xaf\x97\
\xfa\x15o0\x881\x0a\x07\xf3\xf8\xe5^0\x1e\x0c\x8c\
\x02\x0c\x0b\xdf\xce\x8f\x85P\x80na\x18l\x8c\xa5\xd8\
\xd9\xa1\xe9\xae\x9b&'\xcd\x1c\x9c\xce3u'q\xd5\
\xd5\xf0\xc4\xc3\xa0V7y\xff\xb9\xa1\xf6\xad\xbdM\xf5\
\x01\xbd\x1e\xd2\xc6\xbc*\xf81m<\x82\xe1\x14Z\xc7\
\xc3\xcc\x0f\xc5x5}\xc0\x0f\xbf-\x00K\xc9\x0e6\
\xc0\xa9\x5cx\x88\xfb\x8aI\x03\x5cv|y8\x1e\xd5\
CX]\x00ZP\xf7,4V\xff\xd6P}\xbc\xf5\
\xcb\xad,$\x8c\xe2\xd6\xa6\xfd\xc91\xf2L&\xf7X\
:A1~hm\xf1c\xdf7\x04\xa1\x0cq;\x85\
\x16\xdf\x90\x86')\xcf\x1e\x9b3!\xcb\xc5Wyp\
{4'u\x03-\x05\xd2\x1b\xd4V\x157\xbe\xbe\xa5\
bq\x85\x04u^,Z\xbc\xa8\xc5\x0b(\xe2\x8a$\
\x9d>\xc4y\xf6\xd8\xac#\xb3]\x89PYCx,\
c\x01\xef\xe8\x5c\x10Hp\x00\xdfe\x8d\xfe\xeb?\xaf\
\xda]\xef\x17A3\xf4S\x06;\xee\x9e\x93o\x1a\x88\
P|YT\x7f\xce\xa2\x12h\xc44c8\x15\xf9\xcf\
\xd33/\x1a\x9f\x01\xcbhR\xf1~\x05/\xb6\xa6a\
\x91\xf4\xbdu\xc1O\xf6\xd6?\xf7}\xf5^>\xb0\x19\
\xb7\xc5\x92\x13\x0c=\xc9a\xbbp\x98\xfd\xbcqY\x13\
\xb2\xcc\xbbM@\x9b\x0b7\x13\xc30\xca=\xfe\x8f\xf7\
{\x9f\xd9\x5c\xbb\xb3\x1e\xab\xfb\xb1\x03\x9eLK\xd6.\
\x9b4p\xc1\x88T\x97\x1d\xda\xdf\x98\xb8\xe6%\xb7\x0a\
@\x17\x80\x92\x89\xf1\x87\x97\x064\xf9\x83+\x0a=\x1f\
\xeem\x5c\xb8\xdf\xefk\x9e\xa9%.\xf0\xc8\x932\x9d\
\xec\x82C\x92O\xccO\x9a\x9a\x93\x8833\xf2\xc4\xeb\
\xeak<\x08\x12\x00\x81\xae\xabj\xe9ji\xd9\xe58\
W\x04\xcf\x04<rt\xe6L\x93\xe6<a\x1f<\xb7\
\xa7*\xda\x10\x06\xa8R=\xbe\xae\xe4\xe1\x1f\xfc\xb0\xda\
\xfcL/f\xd2\x09i\x8e\xff\x9e\x94\x97\x9d\x08\x01C\
\x0b\xd5\xcdY\xa7\xfb1$\x0d\xae~K\x85\xe7\xf7k\
\xab7U\xf9= \x89\x98P<\xb1\xe2|\xe9hm\
\xe0\xc7mS\x0e\x1fh\x7frvzNZ\x92i\x1a\
x,wr288\xa8\x19\xb7\xae*~uG#\
\xe6#\xd3=\x93\xce\x1fj\xfb\xc7\xf1#\xc4j\x08\x0e\
\x12\x00\xfc5\xb4T\x87\xfd\xf9\xe3\xb3\xe7\xe4$A\xa2\
\xe3\xad\x80f\x01\xd0\x0c\xed\x99\xef\xca\x9f\xdb\xee-\xf4\
\x04t\xac\xd1\xc3F\xae\x851\xe4\x04\xee\x01\x9cV\xcf\
v\xdb\x7f?)\xf9\xfc\x09Y 8\xed(\x1f\x9e_\
\xdfW\xe3\xbdby\xc9\xe6\x1aM\x83\xa3\xa0\xb0\xc4p\
\xde\x16\xb0#\x05\xdfW\xc1&e:\xfe8=kj\
v2\xe6z<y\x1c<\xef\x18a\x0c1\xf1\x8d\xed\
e\x9e\xdf\x7fU\xf5M\xa5\xdf\x0b\xa6\xbfy\xb2\xdex\
\x817Ip\xdc\x89b0=\xcd&\x9d6\x22\xf9\x81\
9\xd9\x0a6\xb9ZGr\xd7\xd3\x1f\x9e\x03\x88\x0bP\
\xbb\x90\x93\xf2\xa5\xe4\x91R\xe9J\xa6\xf9\xb1\xcb\x15\xb3\
\x9d,\xeb^i\xff\x87z\xda8\x962\x0cR\x07\x5c\
vs\x0a\x01vE\x99\x93\x9ff\x04|_U\x04$\
]\xe6\xa6\x09\xc3R\xe9SW\x166\xce\xceM\xcat\
\x81\x00\xf0\xb0\xf1\xbf~\x03/\x90\x10\xddX,\xa1\xcc\
l\xab\x0e>\xfaM\xc9o\xd7T\x154\xaaA\xbd\xd9\
\xdc\x80\x1b,8q\x06\xfc\x06OU\xdd\xd8\xdf\xa8>\
\xf7C]Yc\xe0\x90t[\x9a\xd3\x81\xe74\xe5&\
D60M\xf3\xf3\xdf\x95\xfdcK\x03\x84\x0c\x9d\xc2\
\x05p\xdf&\xa6*'\x8d\x84\x8a|G\x146\xf8_\
\xdf\xd9x`m\x13\xce\xe3\xd5\x8dOv\xd7\xcf\xce\xb2\
\xe7\xa5\xday\xd7\x0d\xd4W\xf4o*\xbd\xd7//y\
i\xa7\xa7\x16\xe5\x02\xf3\xaa\xe8\xee\xc1xi?l\xe1\
\xd0|\xb0\xe1\x09J\x8b\x0a\x9b6W\xfbf\x0er%\
c\x1b\xa8\x95\x9f^U\xff\xd7\xa6\x9a\xcb\x96\x95\x16y\
\xb0\x0f\x074\x22^\xe5\x02\xcfo\x18\x9a\xc4J<\xea\
\x1b\xbb\x1a\x0d\xbfwxzB2\x7fu\x11\xe6\x06\xf3\
;N\xe7jA\x98~\xce\x8eZ\xff\x1f\xbf\xac\xb8\xed\
\xab\xaa\xbd\x8d\xaa\x8a\xa6_FA\x8c\xeb\x19y7\x03\
D)\xfa\xeb3\xd8\xa6\xca\xc0?\xb7T\xe7'\xd8\x0e\
Is(\xfcD\xe6\x17w\xdb\xb5\x90\x00\x08 \xdba\
J\xa7\x8e\xd03\x0e\x97\x0a?\x92p\x92Q\x04\xf25\
\xe4FV\xb2BM\x1ciK?\x04]\xc6;\xf3u\
\x0c\x9c\xce\xe4\x88\xec\x04%\xd0\xb8\xaaJE\x8b\xc73\
\x07l\xac\xf0\xeb\xab\x8a<\xc7\xe4%\xa69m-\x19\
\x06\xb6\x8b\xa5>\x0eT\x06y\x99\xc7\xeb\xf9\xd7\x0f\xb5\
7|^\xbe\xba\xc4\xcf\xab\xba\xc0\x8fW+\x16\xe2\x09\
\xf8\xc9O\x8b\x7f`\x5c\xe5\x8d\xd5\xea\xc2=\x8d\x8a\x1e\
\x9c\x9c\x99\xc0\xe7d\xc2\x141\x9d\xb6\x02\xcd\x88\xf1\xec\
\xf7u\x7fXW\x09&\xac\xc5\x89\xf9{h\x9a\xed\xa4\
\x91\xe9|1$\x07\x09\x00?\x10\xc3\xe13\xb4e\xc5\
\xfe\x9f\x8cHJ\xb4+\xaa\xae\x81\x10\xfe\xee\xcb\xda\x9d\
\x0d`\xfb\xcd\x9b\x03f\xe5\xb19\xd01\xc3}B\x1f\
w\xd5\x05?\xd9\xd7xL\xb6\x9c\x9e\xe0D\x1d\xc3\x12\
a\x04T\xe3\xaae\xc5/mm\xf4\xf2\xf6\x02\xbf\xcc\
8\x9c\xb4\x053r!.5CZ]\x1e\x5cV\xe4\
\x99\x96i\x1b\x90\xe8l9I\xfb\x91\x1f5<\xd5\xe0\
\x13T\xf5\xe7\xbf\xaf\xbeiU\xc5\xda\x0a\x1f\xbf\x9f\xdb\
|\x9e\xf8\x9e\x0e#\x0b}\x84\x7f\xfc\xe0\xb9\xe1\xd4\xd2\
gEM_\x944BYNp\xf0\x9d\xf1>i\xbb\
\x90\x00\x1c\x04S\x92\x07K\x09C\x8c\xb2/\x98\x16\x10\
\xe5\x0aR\x08\xda\x04E\x8b\xb5\x94qJ\xea\xb0\x9e\xea\
\x0b\xb2\xc9lv~\xaa\xbf\xc9\xf3m\xb5\xae\xf1<k\
f\x91J\x9f\xbe\xba\xd8s\xec\x90\xa4\x14'\xd4\x92`\
{7e\x9dn\x01\xaeD\xde_\xe7\xbbfe\xc9s\
\x9b\xeb<\xaa\xc6\x14\xbc\xb7\xd9\xfd\x17\x08\xa2\xdb\x18\xd4\
\x97\x96\xf8w\x97WM\x1e\x94\x9c\xe2\xb4\xb5\x1b\x06(\
\xc9\xdf\x944\xfenUq\xa3\xc63\x09w\xd3\xe2\xee\
\xd04h\x01D&\x00?b\xd8\x1b\x82\xea;\xbb\x1a\
\xa6g\xbb\xef^W\xf9\xfc6\x8f\x17\xbb]\xb8\xff]\
\x1b\x1b\xac\xc6\xaf\xad*\xf6\xce\xc8\xb2\x0dHr\xc0\x09\
=A\xfd7\xcbK?\xdc\xe7U%\xdd\x86\xad\x9d\xae\
\x0b\x00x\x8c\x95o\xc8\xe1/\xef\xac\x1f\xe2\xd4\xc7\x0d\
pC\x85\xdc\xdca\xba\x88\x0b\x10\x8fx\x16O\xf0/\
_W>\xb6\xb1\xb6!\xa0as\xaaK#\xb55\x90\
\x8e\x86bh\x1a+\xf0\xe8\x9f\x17\xd5\xcf\x1c\x94\x98\xe5\
n?\x83\xc5\x9d^-\x00\x07\xb4\xcb\xcc\xba\x87\xd9?\
\x89Y\x0eW \x82`\x19r\x08\xdf\x19\xa7\xe8B_\
\xe4\xb4\xd1,c\x92^\xb8H\xd2\x03 \x01p>h\
\x97a;\xa0\xecs-y\xac-u8\x86\xa0[\x92\
\xa7\x15\xfc*\xa7f'i^\x0f\xd6/q\x9d\xf1Q\
\x8c\xac\xc2\xa7\xad)m\x9a?81\xc5\x81\xf3a\x1c\
\xa0\x0e}\x12LU\xbc\x06\xf85\x16\xee\xaa\xb9fE\
\xc9\xfa\xca\xa0y\xfd\x98\x13z\xe8\xc20\xba%\xe9\x87\
F\xb6\xa4\xa0qB\x8a\x94\x9f\xe2n\xde\xcc\xbb\x840\
O\xe8o\xee\xa8\xbb}uY\xa9\x97w\x9a7\x87S\
\xfc\xc4(\x00\xbc\x1f\xdc\xa3I\xef\xedi\xd8T\xe5\x87\
DG\xef\xcd\xbb\xdf-'\xe8\x0ax^\xaa\x0c\x18\xab\
K\xbc\x0b\xf2\xdc\xc9.\xfb\xcd\xab\xca\xdf\xdf\xd3\xc0\xef\
E\xf1\x8b\xee\xca\x00\xc05\xf2x\xc4\xf1L+K\x03\
M\x01\xed\xc8\x9c\x04\xd8\x80\xba\x13\x97\x13\xa3Gx\x85\
\xbb\xeb\xfc\x97/)\xfc\xb8\x10+\xfe\xb0\xda\xa5\x17\x15\
\x0a~\xadF\x99W_\xb4\xcf\x93\x9bh\x1f\x9df7\
7viHz{\x0b\x00\x13\xc7\xd0\xab\xbc*\x0e\xbf\
\xad\xf6/\xd9[\xb3t_\xed\xf6\x1a_\xbd_\xab\xf4\
k\x89\x8a\xe1Tp\x80\x9cY\x1cbGd8&\xb3\
\x94!\x12\xce\x17\xb4\x1c\xea\xfe\xbc\x89\xcd\xbd\x87\xe5\xc2\
\x8f\xd4\xe41\xbc\x1d`\x8e\x0c\xe9>\xcc\xa09\x14\xf9\
\xc8\xa1)~O\xd3w5Z\x10\xc7\xfa\x99\x01f\xe5\
^um\xa9\xf7\xa8\xbc\x844\xa7\x08\x18l4\x17\xfa\
\x1e\x5c\xd75]\xfa\xe7\xc6\xea\xdb\xd7TT\x07x\xfc\
\x9b\xd7\xdf\xa5\xa5!4\x18\x97xr(\xa1P#V\
_\xdf\xdd$\x05\x03S\xb2\x13\xf0\xbdj\x98Ia\xb7\
\x01U\xc8\xbb\xd6V|_'\xba\xad\xf8\x1f~Z\x88\
E\x00\xf0\xda1\x10R\x00M\x14.p\xcf\xcd\xc5\xae\
\x84\x9f\x06\xfe\xab\x03\xc6\xd7\x95\xfe-\x95\x9e\x97\xb6\x81\
\xf5\xc7\xf0\xe0\xc9\xf9w\x17\xc1/\x97\xc3O\x03\xf5\xf2\
5e\xbeF\x7fpv\x8e\xdb\x8e\xddp\x18\xcdf\x9c\
D\x07\xaf_B6c\xdf\x97{.YR\xb4\xb5V\
5+\x17]zQ\xed\xc3\xe3\x12\xfe0D\x12\xb65\
W\x157\x1d\x9a\xe1\x1c\x96j\xb6\x03\xba04|8\
Ao\x05\xc2\xd6\xe0\xf3\xffcs\xfd\x8a\xc2\xc6\x1fj\
\xd4\xa0\x86\x93\xe5CU\x08\x0a!\x18e\xbb\xa2O\xc9\
t\x1e\x95\x97\xfc\xebI\x19\x90#b\xc9\x0am1p\
t\xb3f\xec_\xaa\xaf\xf9\xad\xe4\xaf\xc7\x882\x13\xc8\
\x90dG\x9a4\xe7\xaf\xb6!'\x08\xa7\xdd\x8b\x99k\
\x1b}\xeac\xeb\x8a\xfe\xb1\xcd\xd7R\xd3\xc4\xcd\xcc\x98\
\x9c\xe1x\xe6\xd8\xbc\xe1)\x0e\xa8\xd7\xf4\xecC4\xb1\
\xa0\x1bz \xa0\xdd\xf1e\xf9\x7fw{\x02\xba\x81M\
\xb0.,\x02\x11``\xde\x83o\x88q\xc3\xa1(\xc7\
\xe6\xbb\x9e::\xd7m3\xe3\x99\xfdrY\xc9\xfb\xbb\
\xeb\x0d^3\xc6\xf0\xb6\x09\xf3\x05C\x1d\x7f?~\xb8\
X\x09A\x9ba\xa0\xbd\x07L\x06\xb86\xcd\x1c+\x09\
\xb1\xd0\x05\xf7\xde;\x00\x22\x1e2\xb9\xdb&];1\
\xe3\xe6\xc32e\xd0\x00\xd3jF\x8bY\x94\xd6\x975\
]\xb1\xb4\xa4\xd0\xa3\x81G=\xd6\xbal\xc6\xcc\xe9\x18\
0\xc62\x5c\xca\x13\xf3\x06\x1d\x9b\x9f\xd4\xa5\x81\xea\xa5\
-\x00\x88\x02\xb0\x02k\x8b\x1b.\xf8\xa4d\xf1\xfe\xa6\
2\xaf\x86\xcfX\xf3\x98\xe0\x89\x8e\xe6X3X\xa1G\
\xff\xa2\xaci\xd9~\xcf\x8c\x81\xaet\x17\x96\xc3xE\
\x16\xf8\x83O\xed\xa4\x1db\xa4\x1fj\x14-e:\x98\
Z^\x00\x18\xd3u\xbfQ\xf8I0}\xa2\x9c8\x98\
w\x15vk\x9e\xe1\x01cN\xbb2oHZS}\
\xfdw\xb5 S\xa6\xb5\xc0\xedeM\xfa7e\xbe\x13\
\x86&&\xd9[\xac\x7f\x8f\xe7\xeap\xe1\x05\x12\x7f\x83\
A\xed\xee/K\x9e\xdf\x81\xd3``\xd0{M\xf0\xcd\
\xbc\x87\xf1\xc9\x18d\xc8\x9du\xea\xca\xa2\xc6\xa3\xb3]\
.\x87\xed\xbeu\x95\xff\xd9V\x83\xfd\x92\xb8\xb3\xfd0\
\xc7\xd6\x05\xd4\xe3\xe0%a\x19h\xc6\xdc\xdam\x98Q\
\x0f\xa5~uiS\xb6\xdbvh\xa6\x13\xda\xfe\xcdV\
!2ZrZ\xa5G\xfd\xd9\x92\xa2}\x8d\xfc\xaeR\
\xf7_R\x1b\xcc\x00\x98\x01\xf1\x06\x8d\xef\xca\x1b\xe6\xe6\
$d\x82\xe8uY)\xee\xa5\x02\xa0j\xfa\xf3\xdf\xd7\
\xdc\xbc\xba\xb2\xdc\x8bY.d\xda\xc0\x1e]*\xf3i\
\xef\xef\xae\xcbp\xd9&f\xb9\xe3\x1fMIy,\xe3\
P\xbd\xf03Y\x0b@0P\x9ba\xa3\x1e\x94K\x96\
j\x89\xa3l\xe9#\x9bS\xad\x07\x98\x9a\x93\xe8ol\
\x5cW\x03\xb9\x03\xf2\x87\xe8\x0e.i\x0a~Y\xea\x9d\
9\x08\xf2\x8d\xd9\x17\x04Q\xd2c!\x8c\x10\xac\xeb7\
\xf9\xfd\xd7.+\xfb\xdf^/\xbf\xf3\x22v\xf4ZJ\
<\xfa\xca\xc2\xc6\x80$?\xbe\xa9&\xc8\xcdv\x07\xb1\
\xdd\xc7\x05\xa0\xf7\xc06\x94{\xa7\x0dr\xe5\xa58\xc5\
z\xc4\x19\x05\xadJq\x83\x1f\x1am\x9b\xaa\xf1\xf6R\
\xaf\xcbh(Q\xac6h|]\xee=}\x08\xd40\
\xba\xeaA\xd4\x96zb\xaf\x00k\xd9\xa0\xce\x86\xb1p\
O\xdd_\xd6W\xd7\x06 \x1a\xc0\xae\xf1\xcaw\xfb\xa0\
=\x86\x88\xa9\x0aHw}U\xbe\xae\xb8\x91G\x1c\x1e\
%\xf6\xc7\x8e\xac\xb0\xbc\xa3\x94y\x8f\x1b\xae\x0c\x19o\
|A\x13\x1fOj\xf8\xea\xe4/\xaf\xf6\xef\xf9\xcc\xc0\
\xb7\x8f\xf5\x04\x8c%9\x9dw\x1d=\xfc\x86167\
\x06H\xc1z\x0d\x8f\x8fo+|W.-,nR\
1\x13\xf1l\x83q\x1a\xc78\x897\x18:\x0c\x9e\x0e\
\x0d\xbf\x87\xd6W\x7fP\xe4\xc1'ezq\x80[\x80\
X\xfd\xa1>x\xf7\xda\xf2&\x9c!\x80\xe7E\xa2\xeb\
)\x0b\x18W|VX\xd6\xe0\xc3\x95h2\x0a\xa6\xd3\
\xd3\x1b+\xd7\x95\xfb{gF\xe3!\xc2R\xb1\xb1*\
\xf8\xe0\x86z\x95\x97_,\xc2\xf1\x0ej\xef\x12\x00\xd3\
P\xed\xaa\xf5\xdd\xf5eE#\xf6\xferm\x87\xff\x10\
\xe5J\xec\xe3\xbb\xeb\xfc\xc6\xb5\x9fW\x144\xf81\xda\
\xe2\x17O\xb2\x84}\x8dr\xce\x5cy\xf6\xc3\x86-\x01\
\xef\xc0\xf1\x82\x8ecm4U^s\x83\xbao\x91\x8e\
\xe3\x95\xe3\x9d2\x9d\x01\xd7\x0c\xd7-\xcb\xf2u\xd3\xf3\
/\x19\xa1\xf0\xf7\x19\xc06\x0c\x06\xc4\xc9\x96Z\xf5\x92\
\xc5\x85{!R0\xe3tw\xd8\xa2\xc0\x0c\xe7S\x9b\
\xea\x9e\xdb\xda\xc8\x0c\xf3\x99H\xb1\xab7\x83\xf9\x0f\xe7\
\xe9\xc4\xb0B\x8e0\x17\x88\xae\x052\xba!\x95\xf9\xd9\
}\xeb\xab\x03\x1aT\x1a\xf8\xa6H\x80t\xfa\xa6\xb4\xf1\
\xd5]MPf\xc0\x02\x862/=\x08\xde\xd2\x040\
l\xc6\xbb{\x1a\xb6Wy\xb9m\x84\xcb\x8csY\xee\
]\x02\x00\x17\x1c0\xf4\xbb\xd6V\x96\x04x\x89\x8a\xc0\
r1&\xcb{\x1a\x02\xcfl\xac\xc1>qH\xe1\xf8\
\xc2\x149\xefX\xf9\x98\xe7%W\x0a\x0f&3\xb3\x8e\
\x1e\xac\x93V]\xaf\x15\x7f-\xc5u\xaa\x90\x88Hr\
*\xf7\x1e=\xe2\x17\xc3\x15;\xce&\x82O\xb1\x9b\xdb\
7T\xf9\xaf[QR\xe3\x17\x0d\x94\xdel\x9b\xf84\
\x8a\xd2\xdb\xbb\x1a\xff\xfcM\xb9\xef\x80\x19\xd4\x08\xa2]\
\xf8\xbdx\xf6\xfe\x9e\xc6\xf5\xa5\x0dQdk\x8f7p\
\xc3\xe7\x95uA>\x94\xbc7\xc3\x0bs\xa5W{\xf0\
\x9br\x8d\x0fP\xe5\x95\xbcx\xd2\xcbZ\x00\x12\xdbU\
\xe3__\xee\x15\xa6?\xfc\x8b\xe5V\x0f\xd4rqA\
ciC\xc0\xdc\x16O\xb0\x99\xc1\xe4A3\xe4\xd9\x8f\
J\x8ed\x1c\x97\xcc\xc7\xe0C\xfb\x00\xdf+\xb9\xe2\x97\
\xc1\xdd\xef\xeb\x5c\xa3\x85\xfbn\x05m\xfb\x1ds\x87]\
<\x14\x9b\x04\xa2.\x8d5\x05\xb6\xb6\xdc\x7f\xf6\x87\x05\
\xdbj B`\xd5\x0c[\xcbB/\x02.\xa0\xca\xe3\
{\xe0\x9b\x0aU\xe3s\x19\x88\xcd\x04\xd1>\xdc\x14\x1a\
\x0d\xaav\xcf\xba*\x8f\x1f\xdf\x9f\xca3}'\x19\x1b\
\x1f,\xe7%cia\xd3\xaez\xb3P\xc4\x07<{\
s\x00\xf8r\x9ch6\xf9\x9f\x15\x07?\xdfW\xd3\x15\
\xe5\xb6\xd7\x09\xc0\xba\x92\x86j?\x1f\xea\x8c-\xa0p\
M\x01:\xc7o\xb9\xc8\xab}WZ/\xb6\xc6\x0f3\
8\xd0\xd8W\x06\x1f/\xcf\x7fAr\xa6\xe0\xc9\x0ch\
\x07`\xa2\xe8\x81\x1a\xb6\xf6fo\xd1\xba\x1ei\x07\xf0\
\x80\xc9I\x0e\xdb\xfd\xc7\x8c\xbc\xfc\xd0T|f\x0d\x22\
\x83Wn`\xc7\xe6\x1a\xffu\xcb\x8b\xbd*\xce.\x8d\
9\x14\xc3\x1bn\xacv\x1b\xd0J9\xf1\xfd\xd2\xbd\x8d\
*\x04\x18S\x12\x13\x93 \xda\x073\x88\x99\xeb\x0dy\
]\xa5\xfa\xce\xd6*\xcc\xd6\xe1X]\xde1\xa0K\xda\
\xc2=\x8d8\xa1\x0a\xefJ\x8a\x0b\xbc\xc8a\x80\xd0{\
,_q\xcb\xc0f\x89\xf0\xeb\xfa\xcb\xbb|A\x15<\
\x8f\xb3\x85\xe9]\x02\x00i\xf2Ea\x839\xcfAt\
\x044\xe9\xa3\x02\xafX\xe9\x02 \x84\xca\x80\xc3\xe5\x99\
\x0fK67N\xbd\xc5+\xac8\x03\x83\xe6w,\xff\
y`\xdf\x22\xe1\xae'\x00\x19\xb8\xfd\xf0\x01?\x1f\x9f\
f\xae\xf1o\xcc\xf4\xdfV\xf9\xcf\x5c\xb8o;\xb6\x03\
\xf8\x86p\x8aJ\xf7\xf2\x9f-U\xfb\x9b\xf8\x0d=\x82\
\x08\x1b4\xb4\xccx~WS\x8dO\xfd1\xc3w\x00\
wR\xe3\x09./\xc5\xc1\x85\xb0\x1e\xc7\x92\xc0G\x88\
0\x05?\x80\xa8n\xf1=\xf1a}ES\x89G\xeb\
\xe7]@\x10m\x1b\xea\x19\xf6\xab\xc4\xc0\xd6\xaa\xa6\xb8\
\xa6l+\xf0\xe6\x8c\xac(\xc3NR\x8e}\xc5\xb0'\
\xf1>:\xec\x0f\xc2\xcc\xa4y\xe4/o\x08\x96|\xa5\
\xe9Z\xf7\x1bY\xac\x101|\xdb\xed\xfd3\x07^6\
\x96\xb7\x03\xb0\xc1\x0b\x8a\x85c\x9c\xbf\xab\x0a\xfejY\
\xb1W\x03m\xc5\x80a\xde\x84\xb8\xeei\xf8\xd3v\x86\
\xc7\xef\x7flS\x03\x9fY \x8e\x99\x1b\xae\x9c\xe9L\
\xe5\x05\x86\xbf\xb6\x09/\x1a\xbf\xb1v\xc1\xa3\x85\xbb\xe2\
1\x81\x91\xd2\xf3\xb1\xd1E\xe0\xb5\xf1\xeb\xe5\x91\xcd\xa3\
\x1c\xcb\x99X5c@8\xed\x1a\xf89\xf8\x82y.\
s\x99o\xc7\xa5X\xe0M\x81\x1fj\xb5o\xcb\xa1\xf6\
\xd0\xb9o\xa6UxqKE}\x80\xcf\xef\x8c\xf0\x1d\
\x11\xd3\x12w\x92\x8dI\x03\xdc\xca\xa5\xa3\x13\x97\x9c\x94\
\xb5\xe9\xcc\xccMg\x0f\xdcpV\xe6'\xa7\xe4\xfd\xf2\
\x10w~\x92\xe2\x80L\x8dN\xa3/l\x98R\x98C\
\xa5r\xbf\xbe\xa9\xa46\xeem\xe3\xde%\x00\xbe@@\
5\x9f\xb5\x8c\x81\xba\xa6 \xb7&]\x0b\x1b0Y\x99\
q\x1f\xb3'\xf1\x14\xe1\xadR@\xf3J\x9f]\xa8\xef\
\xfd8\xae\xb6,2 S\xff\xdf\xf4\xccK\xc6\xa6\x9a\
=A-y|k\x8d\xff\xac\x85\x85\xbb\xeb\x9b'\xd5\
\xe9\xb1\x00\xfe\x08XgU7\xae\xfchw\xb9\x0f\xbb\
q\xa1\xa8\x88\x1dq\x811\xc5P\x18\x96=\xd9.I\
I69\xcb%\x0ft\xd9\xd3\x1d\xb2\x03J-X\x03\
\x14\x07\x88\xa4\x96\xbb\xe6\xfd\x16\xc8\x06\xd0p\xc5\xca\x0b\
F9\xf6\x9c\xf0|\x81\xcb\xb8\xb7\xabs\x828\x0f\xc4\
2\x9cT\xe6*\x1c\xcf\x18\x87Z\xf1\xff\xb6U\x85\xe5\
%\x0e\x953>-\x0a\xc4xz\xc8Q\xe6x\xafi\
\x19\xec\xa9\xa3\xb3\xbf=\x7f\xd8\xfd\xf3\xf2&\xe4ff\
ffe\xa6\xa5\x0f\xcc\xc8\x9c\x92\x9d\xf8\xe7\xa3\x86\xac\
;\x7f\xc4\x9b\x0b\xf2N\xc9\xc7\x07\xd6\xf8\xe5G\x0c\x1e\
\x83)\x85y\xd4&\xb1\xc2&s\x06\xb0x\xd2\xbb\xa6\
\x82\xf0\x05\x83s\xde\xdc\xbf\xbf1\x08\xd7,6E\x8a\
!\x0ds\xf8\xbf\xfa\xf9\xa4xG\xd4\xc1\xa0,K\x86\
V\xbd\xc9X|>S\x9bZz\xe6 \x9d4\xc5\x1e\
<\xe1\xe3\x84\x01\xa3\xc5\xa6\xee\x05\xb28\x97P\xe3\xda\
\x95eo\xec\xac\x97\xa4 \x93\xec<\x95\xb1\xf4M\x1f\
\xe0~\xe3\xe4|\x17\xbe\x99P\xb8\xefA \x0e?\xd9\
\xe7\xb9\xe8\xb3b>\xa7\x9d&\xf3\xd7\xda\xc4\x07\x88\x04\
IJs\xbb\xe6gK'\xa5\xf9O\x98<\xc2m\x07\
\xcf\xf1\xc1Qh\xb0\x19\x86\xbc\xb5\xa4\xf2?\xdf\x15,\
\xaaO.lD\xa7\xa8\x94\xe2\xc8.\xa4\x87\xa6\x82\x00\
\x09T\x86%)s\x12\x9b&'kN\x87}gu\
\xd3\xb7\xd2\xa0U\xa5\x1e\xa8'\x99\x86\xacK\x81\xdc\x98\
\x9dd\x9b\x97\xe4\x9d\x92\x1cLp\xda\xf64\xe8\xeb\x83\
\xe9k\xcb\x9a|Z\xf4\xa5\xbc5\x86K\x96\xd7\x9e\x9b\
\x9f\x9d\xec\xc4N\xd9\xd0@\xa4\x96\xd5{\x8f~o\x7f\
5\x9fBEl\x8d\x1c(\xec %\xc7\xe7\xd8_>\
\xf9\x10\xfe\xd6\x10\xf0\x0a\xbe\xc5,\x8c\x12>\x9f\x8c\xb6\
\x81W\xaf!\xbf\xc9\xb7/\xdb\xfd\xdc\x9e\x00w\x16\x19\
\x98]%\x09\xd2\xee\xf2\xfc\xe0/f\x8c\xc5w\x05\xc4\
\xbb\xdc\xf6.\x01\x80\xa2y\xd2k[\xbem\xb4G\x9d\
+!\xca\xe6d\xd9\xde9cd\xdcc\xaa]\xc0\x84\
\xe9\x05\x8b\xb5/n\x90T/\xafM\xe2&\xc8\x01\x92\
\xe2\xd4\xa7\xdd\xef\x18u&\x7f`\xa0;B\xd2\x16O\
P\xfd\xc3\x9a\xca\xffl\xad\xe5\xf6\x0d\xa7\xa81C2\
y\x80\xf3\x89\xb99\x87\xa4\xdb\xb7\x95\xd5\xaf\xa9\xd6v\
TzJ\xab\xeb|A5\xd1\xe5\xc8\xcbL\x1f\x99\xe1\
\x9e\x9dn\x8c\x18\x98\xce/\xc4\xf4)\xce\xd9\xae%\xcb\
\xf95\xfd\xe7\x8b\x0b\x97\x97\xfa\xcc\x82\x13=\x18T\x08\
!$>x\xc4f\x0ft\x1e;4yv\x9629\
'\xc5\xa6t$*\x10\x92\x1f\xcaj?+\xd1\xde\xdb\
W\xbf\xb1\x92\x0f&\xc1\x0b\x85\xe8\xd2A-M7q\
\xa4\x9b\x05@\x97\xb4\xbcD\xe7E#\x13\xce\x18\x960\
j\x004U!\x07\xfc\x98\x8e\x8dM\xfe\x8f\x0a=\xaf\
\xefh\xf8\xbc\xd8\xa7\xf1\xf7\x8c\xc6-\xa3b\x02C\x0d\
CK\xb6\xdb~1&\xf1\x8c|\xc7\x84\x9c\x0c\x9e\x87\
DN\x04'\x0d\xfe\xe0'\xfb\xbdo\xee\xaa]^\x8c\
\x83\xf1\xa1\xe0hXl\xa2\x09\x82\xe9\xe1\xbd\xd3\xd3\xaf\
\x9c4\xb0\xb3\x8cj\xac)j8\x7f1\x9c2\xd6\x0c\
\x9d\x97 \xbf\xbd wxFB\xc7f\x1d;\x7f\x0c\
V\xee\x0d\xfc\xe2\xd3\xe2\xaf+\xf0\xe5\xa0\xad\xae\x10\x03\
\x0e\xfb\xb11f@%\x88K\x08\xdf\xc6\xc6\xa6\xd9g\
d\xcaS\xf3R\xa7\xa6\xb1\x91\x03SqS\x17dH\
\xa0w\x09\x00\xc4\xc3\x9d\xab\x8a\xff\xf9}c\xd4\xe9\x03\
\xd7s\xf90\xe5\xcf\xc7\x8d\x8a5\x85\xc3\x03#\xcf\xd0\
\xd5\xf2o\x8c\xa5?cA/NR\xc7k\x04\x90\xf0\
\xb2#Y\x9d\xf7\xb2+\xf7\xb0.J\xb9N\x81\x94\xf5\
\xab\xdamkk^\xd9Z\x09\x81\xe2\x93\xc3\xf1H1\
\xa4\xf1\xa9JF\x92c]\x997\x00UoM\xe7\x9d\
\x032\xe8\x97&\xe9vf$\xc8\xd2YC]\x0f\x1d\
3\xd4\xf4\x06.\xa7\xb3r\x15)f\xa63\xb6\x97\xd5\
\x1f\xbd\xb0P\x95\xec\xb1\xfa\x8e\xdd\xfd\xba\xac\xb3\x04\xbb\
\xf4\xdc\xb1y\xb3r\x12\x5c\x8a\x19\xe8N\x0c\x0a\x86\x03\
_>exU\xe3\xcd\xed\xf5w}Y\xeae\x0a\xd8\
\x22\x8c\x8d\x8e\x8f\x8c\x8a\xee\x14\x00C\xd2'\xa4\xdb\xff\
}\x5c\xce\xb0d|\xaf/\xf8w\x90y5\xaf\x1dl\
\xffS\xdfU\xdd\xb7\xa1\x1a\x5c\xc4+\x91\xd1\x9a\x19\x0c\
j\x18\xaf\x1d\x9f\x93\x9b\xec\xc0\xdev<\xb1nV\x86\
\x9a\x0d\x0e\x1a\xbd\x80\xaa>\xffC\xdd]_U\xa1\x06\
D\x1b\xe3\xbc\x0c\x1a\xc7\xe7'<\x7f|\x9e\xd3\xd6a\
#\xd2\x90>\xdaS{\xc5\xf2R\xf3]\xa2Q\x03q\
\xfb\xc2\xdc\x81'\x8fN\xc7 w\xec\x93\xc8\xe9\xc6\xe3\
\x1b\xab\xefYW\xd1~QB7\xfc\xe2\x0d\xc3\xc5\xd8\
\xcfG\xbb.\x1b\x9b\x9c\x9d\x86o\xf7P\xf8\xbb\x87\xc0\
\x07\xd8\xd9\xce\x81\xf1\xa0glS( *fe'\
8\x14\xbcf\xc8\x1fbkx`v6\x8c$;;\
c\xfc@\x9e\xe1\xbb\x07\x14p\xdb\xa0\xc3\xe5\xd9\x8f\x18\
\x8e$\xc6\xdfU\x02gG\x0b\x12h\xb4-?_\xdd\
\xf7\x89a\xde\x13\xc6\xd0\xe1\x8f8\xae[\x80\xf2\xf0\xe0\
\xcc\xcc\x8bG\xa7\xe3=/\x0c\x02\x88\x13\x06nK\x9d\
\xf6y\x91\xc7\xa7B\xfb\x052\x16>?\x86\xc1C\xc3\
'\xa9P;\xd3\xd9K{\xbc\x87\xffw\xd7{;j\
x\xde3\xc3\xce}\x8c\x07\xa6WAM\xbfnM-\
\xb7\xfe\x18\xaa\xa8\xe1\xb7pu(9\xbf\x99\x9c\xb6\xfd\
\xc2\x91\xf3\xf3\x13\x13\xec \xc4X2;\x8fm\xa8v\
\xe1\x83\xbcr\x92M\xb9dB\xfa\xe7g\xe5\x9f64\
A\x91\x148\x92\x0bysX\xfb\x18\x18\xe6c\xf2\xdc\
o,\x18jZ\x7fn\xa6\xc0&\x9b{\x05\xb8\xc6\x98\
\xcb\xc6n<\x22\xeb\xe1Y\x99\xa9\x0e\xb8V\xb0\xc3\xd1\
_/?\x96\x7f\x0cv\xc4@\xc7\x8b\xc7\x0d\xceG#\
\xc6S\x02\x03!4\x95\x7f\xe3\x02\x08\x92\xd3\xae\x5c5\
1\xf3\x99\xa3\x06eB\xfaa\xfe\x8c\xee\xec\xf8L\xe6\
\xde\xfa`\xad_\xbc\xc5/$L\xaa\xf7\xab\xd8\xe5\x17\
\xf5U\xe2E\x1a\xf9I\x8e\xa3\x86$\xa0\xccu\xe6\x91\
p\xc0\xa4\xb9\xd9\xe6\x9d\x00\xbcF\xbcL\xbe\x03\xbe\xa0\
\xec\xa5;\x95\xd3\x87\xb8~3)\xed\xe3S\xf3w]\
2\xfcOs\xf3G\x0cHK\xb0+\x8a\x22\x8c3O\
\xc1\xa8C\xdc\x09\xbdK\x00 N\xa7\x0cJ\x18\x9c\x04\
\x0a\xa0\xf2\xf8\x0d{\x9a\x1d\x8c\x1f\x8c\xa7q\xa9\xf2\xa8\
t7\xdeH\xee\x16 gc\x05\x87)\xca\xd0S\xd8\
\x09oJ\xf6D\x0c6\xe40|\xe53T\xae\xfdl\
\xd5\xf5\xfe\xe2U\xe0R\x04\xa8\x1b\x0d\x0a\x86\x8b1\xbb\
\xc2\xee\x9f=\xf0\xc2Q\xc9<z\xd0\xd6C\xdc@\x96\
\x12\x01\xe7\x85\x11\x7fp\xc1\xfc\x85%@.l\xd2\xae\
\xf9\xa2\xfc\xa1o* \x9b\x02z\xf8i\xd19h[\
\xd7\x96x\xbf\xad\xf4\xf3\x93E\x9b\x0912!\xa9\x95\
4\xa7m\xfd\xb9\x83\xffoz\x96\xc3\xc9\xe7\xe7\xc7\x17\
\xa7\xf3+\xc3\xaf\x8e`8\xdf\x1c\x07\x9aG\x8c\x0d\xcd\
Lzz~\xce+Gg`g+f\xbf\xee\xcaF\
\xf1\x03b\x16\xd2k|\xaa\xfd\x1fs\xf3\xb2\x12lf\
\x04\x88\xd8\xe0\x0e~\xc4\xbcp\x5c\xd0/\x1e\x9fy\xed\
\xe4\x0c\xac\x87\x9a\xc6):\xf8c1\xf05<\xd9\xf6\
\xe6\x82\xfcCR\xed\xe2\x14\xe6IZ\xd2\x82\x87\xc6\x0c\
\x10\xd6\x93\x18;\xfd\x90\x94[\xa7da|G\x07\xf7\
\xb8\xcc\xa7\x955u\xee\x03\x9f:\x02\xealb5R\
\xcc\xc8\x19$5%\xba\xcc\xf7Sv\x92u\x9bK\x19\
\x1b\x9db\xb3c\xe5\x8a\x9f\x98A\xa3[J\xb4\xb1\xe3\
r\x1c\x9f\x9e\x92\xfb\xf5\xf9C\xfey\x5c\xfe\x1dS\x07\
L\xcdIt\xd8\x1d\xfc\x10~\xd4\x8f\xd1\x07\x9f\xae2\
\xd4\xbdL\x00$c\x90\xdb~\xfa\xb0D\x1e0,\xdb\
bs\xa7\xf0\x88\x95\x99~\xfe\x98\x8c4\xb7]\xee\xfa\
Q@\x07\x01\xf9\xc2\x96>\x96\xcd\xf8\x8b\x84I\x08\x0a\
\x00\x81\xc7\x9bA\xba\xda$/\xbf,\xb8\xfbC\x03$\
\x0d]E\x9b\xf5b\x004\xe0\xa1\xd9\xd9?\x1f\x9d\xc6\
\x87\xc4\x84_\xbe\xf1=\xa5On\xae}{{\x1df\
\xe4xf\x15\x05\xaaa\xef\xefm\x14kQ\x03e\xc3\
P\x86&\xc9o\x9e\x9c; \xd9\x8d\x83[b\x03\x13\
Q\x91\x8f\x19\x91\xf9\xe4\x91Y\x89\x0e\x10\xcb\xe8-E\
O\x01\xb5\x0f0\xc4\xbf\x99\x9c\x01\xd6\xbfy\x93\xf8\x0d\
\x01\xec\xc6\xab\xbctL\xca\xb84l:\x85\x9f?\xda\
\xa0\x83\x8c\x1a\x86|\xc3\xe4\xb4\x04>\xfe1L\xaf\xc0\
\xc0]:>u\xc1`h\x05`\x07\x11 v\x84\x09\
\xd6\xe8\x99_\x93J\xea\x9b\xc4\x96\x10\x98^\xa3\xb4G\
z\x8a\x16 \xa6\x0cIe\x0az\x80~\x84\xe5\x0f\xe4\
#Yf\xa9\x0e%\xdd!\x1f\x91\xa2]>&\xf3_\
\xb3R6\x9e?\xf4\xd5\x93\x87M\xc8NJt:\xa0\
\xda\x82\xaf7\xe0!4\x0f\xe96\xba\xdbPv\x0c\xe4\
\x00\x88\xa9\xdf\x1e6`\xfa '\x84\x8d\xe7\xa0p\x1f\
\xae\x05\xc7\x0e&\x8fO\x87\x03\xc3<\x22\x9e`)\x93\
ee\xc4\x99\xf2\x09\xef\x1a\xb6D\xcc\x1e(\x02<w\
\xeb\x01\xe9\xab\xdb\xbc\x05kMW\xdd\x8cyJ\xbb\x8d\
\x9d>\x22\x09\xe7X\x0b;\xd7\x02p\x09MA\xe3\xce\
\xaf*\x9b|\x81\xb8\xc6\xaaQ\xd3\xa4.\xd9\xdf \xd6\
\x22\x07\xeb\xaa\x18\x1e\x96\xee`\x1f\x9d\x91;)\xcb\x0d\
\xcbq2\xd6\xe0\x0f;mT\xfaG'\x0fI\xc6\xc1\
S\x10a|\x8e\x0f\xb1\xb7\xf7\x02a\xc4`26+\
'\xe1\xb4\xe1\x89\xbc\xbf!L\xb0\x8a\x99\xear<>\
;\xdd)\xf3,\x1b%`\xfd\xa5,\xa7r\xfa\xc8T\
\x8cF\xfc\x84\x0bd\xcc{ge\xa79\xc4jd\xf0\
\xfb5\x1d\x1a\x01\xb0\xd6AZA\xeb\xc4\x0eg\x02\
\x05\x886\xaf\x98\xad\x94=~[e#\xde\xd1\x0d\x1b\
C\xb1;\x1e\x9a\xc4\x96\xfed\xc8\x1bg\x8c\xfa\xcb\x9c\
AgM\xc8Nv\xc1\xd5b,\xe1\xad\xf98\xe5\xdd\
(\xe8u\x02\x00\xb8\xed\xb6Gg\x0d\x9a\x94a\x83u\
\x14O\xdc\xd3I\x01\x84T\x87\xb4\xf1\xe9\xc6\x0d\xab+\
\xbe\xc3)^ya\xe8F0\xdc\xf8%\xcb\x19c\x95\
\xd9\x7fe\x8a\x0bg\x0a\xc2N\x04\x9e\xb4\xc1:\xc7\x17\
?\xc7v\x80\x1e\xe4\xce\xbb\x0f\x1e\x0b\x86_3^\xfa\
\xbe\x1a\x15\x15\xb3pX\xb9\x8d_\x0e|I\x15~\xed\
\xeae\xc5\xd0t\x16;\xe2\xc1\xb6j\x8fx\x183*\
\xe0\x02\x0cINV\xa4\xc7\xe7\x0e\x18\xe8r\xc15\x99\
\xd1/vG\x0b^0\x07\x96\xc6d\xda\x1f\x98\x91\x85\
\xf60\xfc(\xebQx\xb0\xf1\x8e\xcaUc\x13\xec6\
\xbc\x8df^\x0bn\xed\x00\xbcV\x1eqL\x9a\x98\x97\
6\x05\x9f\x22\x8fV\xe9\xd1+\xe3\x0fG\xa4$B\x93\
\x93\xc7!~\xc2\x81\xbb\x1a\xe8V\x0e\xcb4\xfbU\xa2\
\x00'\x8f\xd9[\xe5\xc1*ch\x1f @\x89v\xbc\
\x17\x1eC\xceC\xea\xfd\xc6\x9f\xbe\xae\x0ch\xfc\xfd\x84\
\xe1a\x93\xa5\x05\x93G\xe5%9\xc0\xee\x83\x06\x99\xfe\
\x98\x08\x17\x9c\x83V\xbb\x81\xde%\x00&`\xbbGe\
\xb9_<.{l\xaa\x03\x22\xc4\xbcu\xd91-}\
d[\xab\x03?\xfb\xb4hCE\x8f\xcd+\x00\x19P\
\x1e\xba\x80\x9d\xf8\xa6l\x13\xcf\x09\x9b\x12\xc0\xd4\xa0\xb4\
\xf6\xd6`\xc1r\xee\xaa;\x81\xd3\xb3\x92z\xff\xe2\x02\
/\xde\xf1\xc4L&v\x84\x0d[[\xa9\xfdP\x1b7\
\xe9\x82HyxCug7\xec:\x02\xa5U2~\
>6y\xfe\xd0\x14\x1c?\x17\xf7R\x03\xd5~&\xff\
dt\xc6\x83\xb3\xb2\xa0\xb0r\xff\xa37\x1a\xdd\xc9\x00\
\xb7<n@\x12\xb6\xdd\x22\x07r\xeb\xdc\x9c\x84\xe8\xbb\
\x9b\x0d\xc3.\x1b\xa7\x8c\xc8\x10\xab\xe1\x83u\x0b\xbc\x1d\
=9\xcb\x15M4c^\xc0j}am\xa7\x9d\x8a\
r\x9a\xcbn\x8f\xa4q\xd4.p\xc2\xd7v7\xbd\xb2\
-\xdc&,\x9au<i\xdc\xb3i\x1c\xe8\x8d\x02\xc0\
uQ\x1e\x9c\x9a\xb8\xf0\xb4\xfcK\xc7$g\xb9\xb0:\
\x83\x95z\x94\x06\xac\xd8\x8b\x054\xad,\xcb\xc9\x5c8\
\x17\x0f\x88\x84\x99w\x8c\x0a\x9fv\xcd\xf2\xe2-\xd5~\
\xf3iw\xbe\xb1\xfb\xe0w\x13\x15[\xd6d\xe5\xc8\xbf\
I\xf6T\xcc\x99\xa6\x80A&P\x1b\xd8\xaa_\x07\xf7\
.\x12\xe3\x82\xba\x09\x8c\xb77w\xd4\xf81\xba\xb0Z\
(6\x87\x8fa\xd4\x05\xb4\xad\x15\x9dt\xb0v\x0aO\
1\x1d\x92\xa4\xae\xc9\xf7M\xb9\x8a\xe1\x8a\x160U\x87\
\xa6*w\xce\x1a\xa4\xe0\x13mJ\xdc\x8b\x16\xe4?\xa8\
\xc4\x82\xe7?\x9f\x90y\xd2P>\xd6;*\x93\xda\x9d\
`\x11\x91X\xae\xd3H\xb1\xf1\x08\x8a\x10\xb3R:<\
\x8d\x8f\x1a\x8a\x0e&\x1d\x92lK\xc4\xd4\x88\xcc\x0b\xec\
\xf6\xc5\xfb\x9e\xb6\xc1\x09\xfc\xe6g\xa4\x87\x83s~H\
\xb5\xc7\x87W\x11:3@Q\xccscwh,m\
\x00\x00\xca4\xe4\xde\xbb\xd6V<\xbf\xa1\xb2\xc2\xe3\xe7\
\x99\x9a\xdb\x1a\xd38\xb5\x07\x0fc\xc4\x97\xd6\x0d\xf4F\
\x01h!\xd5\xa5\xfcyv\xf6\xc2\xd3\x86\xfeq\xb2{\
t*\xb4\xf4\xb1}gF!\xb4\xa2&e:\x1f\x9e\
\xe2\xfc\xf0\xb4!\xb7L\xc9\x90A\x03\xf0\x9e\x01\xc64\
\xa4\xce\xf6\xba\xe0E\x9f\x16m\xab\x16\xd3\x9f\xf5\x08l\
\xf0\xb1\xd21/J\x8e$\x19o%b\xa8\xb1Tj\
\x01i\xed\xcd\xfe\xfd\xcb\xbb-#\xc0\x894]zi\
GC\xd4y\x1e\x8b\x8b!}\xbb\xb7H\xac\xc7\x04\x96\
\x80\xcfvV\xa2\x1a\xc5\x10\x05N\x99\xdd:u@l\
\xa58,\x98\xa4\xdf<)\xd5\x8e5\x0d\xb1\xa5\xf7\x82\
\x0a`\xa4\xba\x9cN|\xe69\xf2\xe0\xf2\xc8L\xe4\xfd\
\x13\xd1\x01\xc5s@\x82CQ\xa0\xba\x16U\xc20#\
'\xd9\x8e\xd7\x10\xe5\xe1\x92\xaa\xe2\x8b\xb3:8\x1cv\
\x0cMs\x0f\x8c\xeeN\xc3\x01\xe0hY\x86\x1d\xce\xb7\
}]u\xe6G\x05\xefl\xabR5\xbc\xfd\xc0u\xa5\
\xf7g\x94V\xf4j\x01\x80\xa4\x84\x1a\xde\x884\xc7\x95\
\xd3\x86|q\xce\x90\xef\xcf\xcd\xfbx\x9e\xf3\xadi\xd2\
\xe2\xa3\x5c\xdb.\x18\xf2\xd9O\x86^|\xc4\x90\xa1i\
\xce\xab&\xa6_?1-\xc1\xce\xab\x0f\xbc\xa6\x06\xbf\
E\x8d\x81+\x96\x95l\xac\xf4u\x9c'\xba\x10&\xdb\
\x07\x1e\xa1\xccz\xd4pA\xa3\x98\xb7P\xd0\xe6\x19\xcc\
_'\xaf\xba\xd2\xbb\xeb}]\x0bbn7K^\x97\
\x01'\x0d\xa8\xaa7h\x8b\xda\x86\x81\xea\x1a\xcc(P\
:y\x99mx`Zl\xf0\xe0\xab\x5c\xa3K\x13<\
\xc8\x90F&+sr\xa0b\xde\xf5\xb9Wg\x87\x0e\
J\xbe~r2,\xc2\xa9yz\xf5R \xcfCt\
\xd8\x14|\xac\x81\x9b\xa1\x08\x93\x1bk\xb5,\x06\xfb\xcf\
3\xbc\xa8\xc1G\xe9\x87\x1dG\xc2@\x04G|8\x86\
]\xf4$!!\x13\x89A\xfc(G\xe7\xba`1\x96\
\xa2\xc7\x8b\x126\x03T\x1d\xea\x9a\xeaU\xab\xaa\x8fz\
c\xe7S\xdf\x14\xad-\xf1\xd4y\xc5\x0dH\xee\xbb\xf8\
17\x88\x83{\x19\xbdZ\x00 -\xb1G\x05r\x16\
&\xab\x9c\x95\x962u\xf4\xf0\xb9\x93\xc7\x1c>jX\
Z\x12/\xff\x98\xe9\x99\xdd\xa6\xdcrx\xe6\xcd\x93\xd3\
\xf1\x8e\x01>Rn\x16V\xb6\xbd\xc6\x7f\xf5\xf2\xe2\xad\
5]8;t\x07\xf0\xbe &\xe7\x1f\xcb\x8e\xfc;\
S\xdc\x98\x11P\x9d0\xc2\x0d\xcdo\xff\xe6\xff\x82\x05\
+x8M\xe7]\x08d\xd3\x00>\xf1%V#\x05\
\xeb5\x86T\xc7\x87X\xc4\x06\x16\x9b\xa0\xa6\xef\xaen\
\x844\x05\xc4\xe6\xf0\xc1'~\xb0$\x9d=,!\xc9\
\xe9\xe0\xaa\xda\xc5`\x18\x8d\xeb\xa7d\x0dI\xb2C\x92\
\xf2^\xb4^Z\x929\xa2\x8b\x0f\xbe\x22\x8f]4k\
\xbc\xfa\x14=\xb1E\x0d?7\x06\x9d\xafE\x08?H\
\x04?\x94\x07\x18<C\xbahl:\xef\x042\xd7\xa2\
A\x84\xd1|\x8a\x84/\xeeh\x94\xee\xfe\xb6\xf1\xa2O\
\x8a\xcf\xfd\xb8\xe0\xd6\xcfK\xb6VxP\x8f\xc4\xa0k\
1\x8e\x91\xab@t'\xecBz\xb5\x00\x84\x0f4<\
\xaf\x9e\x94\xfe\xcbCS\xa1\x09\x8a1\xcd\xb32\xa4\xcc\
\x8eZ\xf5\xb2%%?`\x07x\x0fE\xbd\xac(\xb9\
s\xd9\xdc'dG\x0af\x06\xec/\xc4Nk\xcdW\
+\x7f~\xa5\xba\x7f\x09\x9f4\xaa+1\xf0)`\x19\
kE\xd1\xc7\x00\xc4db\xc7\x0f\xd9\x87\x03\xaf\x995\
iF\xa1/Z\xc3\xcd\x1bx\x896\xe9\xca\xc9\x19\xe0\
\x19\xe3C\xa7\xbb\x14\xecs\x94\x98\xc3\xa1\xfcbL\x12\
\xc3'\xa5qd/\xd1GA\x93\xc0\xa4q\x99\x09\x13\
\xb9\x04\x98\xa9\x1b\x1f\xb8\x12\xd4\xaa\xd2\xb7\xd5\xea\x0b?\
\xd4\xcf{\xbf`\xca\xab\xbb\x9f\xfc\xb6\xfc\xabbO\xb9\
'\xc0\xdb&\xbd\x94~\x22\x00\x10\xfb\x8a,\xffa\xda\
\x80\xdf\x1d\x96\x8e\x8fU\xa0\xa91\xbf\xa4\x9d\xf5\x81+\
>+\xf9\xbe\xda\x8b&\xb0\x1b\xea\x8cm\xe0\xed\x80c\
\xd8Q\xcfJ\xceT3\x0bb\xb8\x98\x8cCB\xbf\xbc\
1\xb0\xfb\x03s\xa6\x08\xe1:\xee0\xc9icN\xbc\
G\x12%\x182&\x0f2b~\xcf\x1a/o\x9e\x80\
\x1a\xfd\x00P\xa6\x1a\xba|\xca\xb0D\x87-\xea\xf1\x82\
\x91\x01'\x81\x8f,\xc9\xf3\xf3\x13\x5c\x0a\xb68c\xac\
#\x13=\x0c&';kd\x12>\xd8\x18\xcf\xa4\xc4\
\xcc\x8d7\xf9\xf0\x07sM\xb1W\xbb\xe7\xdb\xda\x9f,\
*>\xeb\xa3\xc2K\x17\x17\xbc\xb1\xb9\xa4\xd1\xcb\xf3\xbd\
\x01r n\x19C\xe3\xc0,^=H?\x11\x00\x1e\
\xed\xa8\x01\xbf<4\xe3\x97\xe3\xd2\xec\x0a\xdaWL\x09\
\xa8'\x1alG\xad\xef\xf2%%\xf0\xcd\xd3\xbf[\xe1\
\xd9\x01\x83\xa6d\xcf\x94g<`\xe0;\xe51\xcd1\
\x1c\xcc\xd0\xfdul\xed\xad\xc1=Kt\xad\xcb\x9e\x0f\
0$\x9b\xacLM\x8b\xc1ta\x1c\xea\x87fG>\
\xbc\xaf=<A\xa3\xd6\x1f\xa5\x0c\x1b\x9a\x0c\xcd\x90\xf9\
\x03\xedf\x9c\x8a\xad]\x0a\x96e<\xd1\xd8Lw^\
\x82\x1d7\xf4p\x81%\xa2\x07\x13\x133\x0e;.?\
%\xc5\xce+bh\x85\xe3\x00x\xdb\xf2\x83\x99\xd3\xcc\
4\x12\x0b\xe8\xc6\xf6z\xed\xe3b\xef\xb5k\xea'\xbf\
Qp\xdb\xca\xc25\xfb+\xcb\x1a\xfd\x1a\xbf'\xc8\xab\
\xa4\xe8\xae\x07\xe9/\x02\xd0\x8c\xd3&\xdf>5\xeb\xe6\
\xc9\xe9\xf8,\x0c\x7fX\x94\xc70\xdb\xd5\x10\xbcjY\
\xe9\xae:?w\xd5#\xe8l\xc8\x09\xf2\x9c\xbf\x1b\xb6\
\x04\x9e9 d\xfc\xfe\x86\xe6S\xd6\xde\x10\xd8\xfb)\
\xdf\xd6E\x187N\x1b\xc0\xf3{t0\xb7]\x9e\x98\
\x83\xd2\x15;\xbb\x8a\xcb\xf0\xfdFQ\x01\xa5\xcan\xd3\
\xb3R\x13\xc5z\xb7\x22_16\x01~\xa8\x05\xd0\x0f\
\x18\x9e\xea\xbc\xe5\xf0L\xbc\xdf\xdd\xe5\xa9\x89uPn\
\xeaY\xa3\xdfx~\xbb\xe7\xac%\x15g~Xp\xd5\
\xd2\xa2\x0fv\xd6`WuO\xd3\xdf\x04\x00b\xdc\xae\
\xc8\xd7L\xce\xbcxT\x92\x18T\x00\x12\x00\xf1lH\
\x9b\xaa\x02\x97~R\xb4\x15\xfb\x82\x10\xe1\xbc\xbb`8\
\xda\xd9&\xe7\xcd\xb7\xcd}\x1c\x9f\x0f\x80L\xc1@\xa0\
\xd0\x18\x1a\xc1\x06e\xcdo\xbd{>\xd35\x15\xb7\xc4\
\x1b\xb8\xfaq\x99I\x133\x1d\xcd-\xcf\xf0\xae\x1d\xdc\
1\xcd\x8c\xa7\xe1\xa9\xca\xc4\xcc\xe6\xb9eb\xc2\xd8]\
\x8e3\x0fG\x07\xc4XN\x82}H2\xd6\xc4\xbb\x19\
\x88\xc3s\x87\xf3&\x00)@\xdf\x87\xc9\xd2/&\xa4\
\x1d9\xc8\x01\xb6\x01Kb\x17Z\x03\xf4\x19\xef\xc11\
|!\x07d#U\x97w7\xe8\xef\xed\xf5^\xb1\xa2\
r\xea\x7fw\xbc\xb4\xa5j_\x9d_\xe5S\xd4\x99\x98\
\x87u\x1b\xfdM\x00\xb0\x81\x87S`\xca\x7f\x9c1\xe8\
\xea\x09i\xcef\x09\xe0\xe8\xdb\xeb\xd5\xebV\x94\xedk\
\xe8\xee\xf9\x188f\xd0\x98\x9c{\x14\x9b)\xde%i\
6\x19q\xa7\xdad[s\x9d\xbe\xef\xd3\xae\xe8a\x80\
\x08p\xd9\xe5K\xc7$\xf3sE`\xbf\x98.C\xdc\
A\x1c\xde;-\xcbe\x8f\xd9\xec\xf23\x97:r \
\x12\xf8z4\xa4(j\xba\xd2\xc5\xb7\xcd\xdb\x03\x92%\
))!\xd9\x01\xcd\xca\xee.\xa2D\xdc\x81r\x079\
\xfbw\x87ed%\x98\x0f\x99F\x9f!;\xc6,\xf2\
b\xd1\xfc\xf0\x1f\xc8EP\x19+\xf0J\xb7\xae\xa9<\
\xf3\xc3}\xf7\xac-\xad\xf5\xf8P\x8a\xb8\xd3\xee\xa4\xff\
\xb5\x00\x04\x0e\x1b\xfb\xdd\xd4\xac\x8b\xc6\xa6(\x0c\xe7\x0a\
\xe0\xa9\x0c*\xac\xaf\xaf\xf2\x9e\xf5Q\xc1\x9e\xfa\xee\x18\
\x83\xdf.\x86l\x93\x87\x9d\xca\x8e\xfa\x97aO\xc1Q\
\xa1h\xf2q\x8c&\x0b6\xe8\xab\xaf\xf5\x15\xaf2\x9f\
\x13\x8ec\xd8 \xc3)L:oL\xc6\xb4\x01n\xcc\
\xea\xe0q\x875\x0d3#\xe27N\xc7\xca\xae\x18\x9b\
4'\x0f\xe4*\xe6B\xc2\xfd-\xf4aeGl\x89\
\x10\x08\x83\xd3\xa6\xb8b\x1f\x8f\x149\x90J\xa0\xdd\x87\
f\xc5\xfc\xee\x1a\xa2\x97\xc0\xd8\xb4\xbc\x94\x07g\x0dL\
\xc4\x01A\xe2\x9el\xfc\xca\x5cg\x98:\xc0\xc7\x04\x16\
6\xe9Oni\x98\xf8\xbf}\xcfl\xa8n\xf0\x05y\
H\xf0\x8b\x17\xc28\x9a\x81\xf6\xe9\xb7\x02\x00q\x8c\xed\
\x80\xe9\x03o\x98\x98\x0e\xf56~\x0f\x14o\x09@I\
.lP\xaf^^\xbc\xbf>\xc0\xe3\xb8\xbb1k\x00\
xOx\xce\xc3\x92#\x096`\x05\xc4\xcc}z\xc0\
\xf6\xf9\xd5\xfe\x1d\xef\xf0\x91\x02\xa6\xf3x\x81/\x06x\
\xf5\xf8\x81G\x0e\xb4s\xc9\xe98_a\xc7%\x0f\x13\
;eh\xe25\x87\x0d\x8aS>\xc1\xf9\xe2\xab}j\
\xd4Z\x02qb\xb3)\x0e{\x5c:\xa3\x22\x04s\x8f\
1\xc2\x0d\xf1\xd0\x9f%\x80g\x0d\x0b \xb2 ;i\
x\xf2\x1f\xa7g\xbal8\x22\x08\xeb\xe4|gw \
\x1a\x1d\x10\x0e,e\xb0\x1c0\xd8]_W\x9e\xf3Q\
\xc1\x07;\xaau|r\xc0\xd0\xf1\x11\xfe.W\x80\xfe\
)\x00f\xcb\x0bp\xd8\x94\x1b\x0f\xcf<wd2\xa4\
0\xaff\xc3\x07.\xd9X_\xee?\xed\x83\x82\x82\xc6\
Xf$\x8b\x12\xae\xfd\xf0\xaf\xc8\xf9'\xb2\xb9O\xea\
\xf6$\x9c\xc2\x82\xc9P\x17\xc0@\xfa\xeb\x94\xaf\x7f\xef\
+X\xc7\x07\xb1\xc6\x013\x1e\x00XLMp=3\
o\xd0\xa1\x19\x89\xcdo\x9a\x0b\x85\xc6\xc3'\x9d72\
\xe9\x99\xf9\xb9Yn\xa8#\x99>\xc4\x04^\xb4$y\
\xa1\xe9\x15\xb5\x99a\xb2\x0d\xfeq(Pw\x03!\x97\
%#Y\xf3\x9a\x05\x97\xe8\xd3\xf0\x22\x88\x09\x099\xf1\
\xa2q\x19w\x1d\x9e\xe2\x82\x22\xd1\x8dC|yq4\
K\x84Y\xae\xc0\xe4\x83\xfe\xb0ok\xd4+WT>\
\xb0\xaa\xd0\x8f\xc6\xdf\xdc\xd5\xb5\xf4O\x018\x10\x87M\
\xbe\x7f\xd6\xc0\x1b'\xa5\xbbd^\xa9m~\x14\xa0\xac\
I\xbffYqQ\x83\x0a\x1a\xdbS\x0fj\xc89s\
\x959\x7f\xc5wI\xc2\x0a\xafcb\x96T}\xf6/\
.\x09\xee\xfd\x98w\x99\xc49dY)\xeeE\xa7\xe7\
\xfdij\xc6@'\x9f\xb3\xc5l\x04a\xb4\x88E\x80\
I\xca\xe1\x99\xce\xc7fgB\x03\x99\xc5\xf0\x00\xc1\xc1\
p\xef\xf1Iy\xbe\x16\x05\xfc\x0dq\x10\x9e\xa8=\x88\
\x1e~J\x99\xd9\xb1\xf6\xc07\x10}\x1f\xc8Jx\x7f\
V\xbal\xe2\xa0g\xe6\x0f\x1a\x9c\xe0\x80\xa4\xe5\xed\x00\
\xf1\x17\xa7:X8\x98\xb6\x1e\x9b\xdd*3\x1e\xdb\xe6\
\xbd\xe4\x93\xe2\xe2\x06s*3\x08\x06\xc2\x97\xe3O?\
\x17\x00^s\x95\xdd\x0e\xdbM\x87g\xfdtT\x0a\x93\
[\xaa\xfc\xba&ik\xca}\xa7|\xb0{O\x9d\x9f\
+C\xb7\x82\xc1\x82\x8f\xac(\x83\x17(\xc7\xbc\xac\xdb\
@\x030\x0c\xb2\xa4\xf0\x5c\xe0e\xab\x7f\x13(\xfb\xd6\
\xd0\xe2\x160\x1e\x15xJ\x87\x22_>)s\xc3\x85\
\xc3\x9f\x9e\x931.M\xc9t\xe2\xbb\xd4\xa1\x96\x9f\xe4\
\x90\xb3\x9c\xca\xe4L\xdb\xfb'\xe5-<=\xff\xbc\xb1\
\x03\x12]\xfc\x8d\xdeq\xad\x88\xd8y\xb1\x13+\x11b\
\xde0\xe9\x11\xcc\xf3\xf2\xe4\x88gl\x10=\x88\xd9\x0e\
\xc0\x82(\xcb\x0b\x86%/?=o\xca\x00\x87\x0dV\
\xd1\xdc\x9a&\xb7\x9b\xb2\x1b\x86\xc2\x0c\x0c|\xf3\x02\xb2\
\xb4\xa8\xe9\xb8\xf7\xf7\xed\xaa\x0d`P 0\xc2a\xfc\
\xe9\xff-\x00\x13E\x91\xef\x9d\x99u\xed\xf8\xac\xe6w\
\x1e\xc1\x0f\xbe\x1e\xab\xd8\xa3\xff\xfa\xb3\x82\x92\xeaZ\xe1\
\xae\xfb\x81\x04\x1f0\xc5v$\xb4\x03\x92\xb9y\xd31\
\x13@5DW\xd9\x92\x0b\xd5=\xef\x9a\xae\xe2\x0aZ\
1E\x96\xcf\x1c\x97\xb5\xf4\x9c\x91\xdf^0\xe2\x8b3\
\xf3?;5g\xf5YC\xbe\xfd\xe9\xb0Eg\x8c\x98\
\x91\x9b\xc8gW\x8ds\xae\xe3\x0dl\x86\xfdI\xe6z\
ThPC\x0b\xf7\x1dq\xf1\x84\x87\xd9\xa8\x97\xdd\xdd\
f\x14\x88n\x04-lJ\xa2\xf3\xf5\x13\xf3\xff43\
s\xa0\x8b\xe7\xd4\x9eMg&U\xf9\xb5K\x96\x14o\
\xa84\x1f]\xea\xaa\xd0XE\x00\xc0\xaa:m\xb6\xdb\
\xa6e\xe1\xabq\xf1\x85\xed\x86\x84\xdfh\x95\xd6W\xeb\
'|\x5c^Z]\xc7\xbb\x82\xba\xdf\xb8\x80\xee\xcb\xf2\
\x90\x13\x95\xe3\xdf2\xec\xc9\x10N\xbc\x03,c3@\
\xd2\x9a\xd8\xda\xdb\x9a\x8a7Cm$\x8e7\x85\xf9\xd0\
#\xb3\xae!\xcb\x92\xec\xb6+\x83\xd3\xdc#\xb3\x92\xb2\
\x93\x9d8\x99/7u\xbc2\x12\xef\xbc\xc1\x1b\xb2)\
\xf6\xe8K\x16\x1c\xaf\x06\x83\xfe@O\xcd\xf2\xcd\xf6\xf5\
\xcc\xbc\x82\xdd\x07\x17i+\xc2\x8b\x03Ks+\x97M\
\xc8\x5cv\xd6\xd0\x13\x06\xe3KF\xb1,\xf2\x82\xc8s\
.v\xcawe]\xbc\x15pf\xd9P\xb6\xd7\x06.\
\xf8\xb8\xa0\xdc\x83\xe3&x\x10\xe0?\xce\x01\xb0\x8c\x00\
p\xec\x8a\xfc\x7f\xd33\xae\x1e\x9f\x8aS\xa6\xf3\xf7u\
\x99\xa9[\xe63\xae\x5cYQZ\xe7\xe9\xfe\x12\xc0-\
1\xffJ\x1f-\xcfyX\xb7\xe3\xebGp\xf8%\x86\
\x04\x0c\x9e\xd7\xbe\xec\xac\xc0\xee\x85-\xb7.\xe2\x00\x9c\
L\x9c\x13\xcf\x8e\xeb|\xd5D,rGqG\x96\xd9\
\xa0$G\xd4y\x18\xc2\xe4Su\xaf\xda\x03-\x00\x00\
\xf2\xc9f\x9c]\x5c\xac\x12\xfd\x86\xe6L\x8f\xb6\x1e\xc8\
Jt\xbdxb\xfe{'f]<2)\xc1n\x88\
W\xce\xa1\xb9\xd0\xbb\xe5\xbe,G\x14AV\x1d\xd0\xaf\
]Q\x5c\xeb\xe7\xb7*Q\x90\xc4\xfexa\x15\x01h\
\x01*\xbcw\xcc\x18\xf0\xd3\x91P\xd7\x86\xb84c\x14\
l-[]\xa1\x1e\xb7\xb0\xb4\xb4\xf3\xb7\xcau\x15\x10\
\x08y\xc8\x02\xf9\xf87d\x9c+\x02\xc7\xe0\xe3VX\
\xd2|\xf2\x97\xd7\xabEk\xb8\xab>\x0c\x160&\xa5\
7\x14\xc1\xb5\x8aM\x91\xc2\xa4j\xddV\xa1\xf6\xc0(\
\x08s\xa3\xc7[\xef\xd7\xba\xcb\x00\x10=\x09\xb4\x83\
g\x0dI\xff\xeb\xd1y\x1b/\x18\xf1\xeb\x09\xa9\x0e\x10\
\x7fn\x8e\xcdB\xd9m\xa0\x11`\xf2\xcab\xef\xb3\x1b\
\xcb\xf1\xec\xf8\x893\x16\x13\x000B\xb2l\x93m\x7f\
\x9c9\xe8\xf2qiv\xd9\x90\xb91\xc2\xe6\x15\x93*\
\xbc\xfa5+\xcb\x8bk\x1a\x0c\xa3;_\xd9(\x90\xf1\
\xb6\x04S2\xc6K\xb3\xffj\xd8\x13\xb1\xce\x01[1\
\xe3\x81\xf4\x07\xa5\xcf/\x0d\xec\xfeP\xef\xd6wI\xc6\
\x1b\x8ccix\x1a\x7f\x18-*\xe0\xda\xcb=Za\
}\x8fL\xe8\xc4\xde\xdd\xdb\x18\x88\x7f\x0d\x8c\xe8E`\
\x1d\x05\xab\x19\xf0\x0d\x86\x11\xedm\xb2\xd3v\xd7\xcc\x9c\
\x8d?\x1b\xf9\xd0\xac\x01\x17\x8cJ\x1c\x8c#\xa8\xb1?\
\x96w\x1c\xe0\x17,\xeb]f.\x18\xb48\xb0_\x9a\
\xfd\xf3{\xcf\xea\xfd\xd5\xd8\x04\x11{\xe2\x86\xb5\x04\x00\
\x12\x16S\x95\xb1\x04\x87\xfc\xc7Y\x03/\xc1\xd9\x110\
\xa5\xcd\x84\xd7\x99\xf4yy\xe0\xcc\x8fK\xcbj=\xa6\
\xfb\xee\x04\x03\x81\x1a/+CNb'\xbe#\xd9\xf1\
\x9e0\xe6E\xcc\x04\xcc\x08\xfa\xd9\x9a\x9b\xfc%_\x0b\
\xd7}\x96\xe1\xb9\x99\xa6\xb4E\x05\x0b\x18zI]\xac\
o'\x8e\x8e\x7fn\xf5`6\xc1\x9cB\xf4[x\x19\
\x84o\xd3(\xe0\x97\xccX\xba\xdbv\xc9\xb8\xf4G\xe6\
\xe6.?{\xe8\xf2\xb3\xf2\xcf\xcd1\x12\xa1\x8d\xc0\x8b\
&\x96M\x09\x1f4\x15\xc7\xc7\x17\xccoH}P\x7f\
hC\x9d\xc7\x1f\xff\xe7\x96,\xd7\x05\xd4\x0c\xb3\xcb\xec\
\xff\xa6\x0d\xfc\xc5\x98$gs+\x00?\x8c\xedi\xd4\
\xafZY^P\xdb\x88\xab=Q\xe1\xc3\x17\x9d\xa4\x8d\
\x91\xe7<\x02\x1a\x00\xe1\x81\x0c\x06\x1f\xf85\xd4&\xdb\
\xb2\x8b\xfc;\xdf\xc5v\x00\x0f\xae8\xa0\x8f\xc0\x03l\
\xe4\xa6\xa7:\x95hs\x1d/m\xff\xdd\xe3\xe3>\xc1\
z\x97\xc7\x00\xaf\xe1a>\xd8\x5c\xee)lT\xb1=\
\xd6\xe5\xe7$z\x1d\xa6\x12@\xb6Mv\xd8\xc6d\xb8\
\x9f8e\xcc\xf6\x8b\x0fy\xe3\xb8\x81\xb7LN?!\
\xcf\x9e\x97\xa0@\xb64\xb3$\xe6\x0e\xd3\x924\xafp\
\x0fb\x05\x84\xe0\xbb*\xff\xb7eX\xf5\xc1\x0c\x89Y\
2>XT\x00P\xbe%\x96\xe8\xb0\xdd=3\xfb\xe7\
\xa3\xcd\xe9\x85a\x1b\x97\x02f\xac\xaeP\x7f\xfeYY\
E}c\xfc\xe29\x5cx 0\xb7\xc9\xf9'\xc8\xc7\
>\xcfl\x89x\xc7\x93\xe7(l\x0cj~\xe5\xeb;\
|\x85k\x9a\xb3W\x9f\x83%\xd8X\x963\xeaN|\
\xbc\x15\xbe\xb1\xc2_\xdf\xe4\x8b\xeb\xc0\xa8\xd0\xf0\x1e8\
I3\xd6\x94z\xfd\xf8\x10@\x9f\x8ct\x22F\xb8\xb9\
\xe0\xdf\xbc\x0d\x08?\x0eE9jh\xfa-G\x0cx\
\xfa\xb8\xfcE\xa7\x0d\xfe\xcfQ\xe9\xa7\x0eM\xb0\x9b\xc3\
\x0b\xf1\xb1^X\xc2\x85\xb8e\x17&yT\xf9\xbf\xdb\
b~)S\x1b\xac\xda\x02\xc0\xa4D\x9c6\xe5\xf7\xd3\
\x06\xfd\xec\x10|\xa5<\xe8*\xa63gs\x9dz\xe5\
\xca\xca\xd2\xfan\xefm\xc0\x0c\x86a\xc3|6`\x1a\
\x9b\xf3\xb0ls\xc1\x16\x05\x9b\x02|R\xd9`\xbd}\
\xe5%\xfe]\x1fv\xf9\xbb$\xbb\x04\xc3m\x93\x07\xe1\
;\xe1\xa3\x81\x19\xf8\x10\x87\xdf0\xfe\xb0\xa6\x8c\x17\xc5\
\xae7\xc7X\x8cY\xc0\xd0\xfe\xfdC\x8df\x0e\x11\xee\
\xa2\xc6>\xd1\x9b1\x8b\xa4\xf8G\xcc-`/\x12\x1d\
JN\xb2\xe3\xf8C\xb2\xfeul\xde\x96\xf3\x86\xbdp\
L\xf6\xcfF\xa7\x1e\x9e\xe9p\x98m\xc58\xe5P^\
\xd7\xd1?-\x09\x14\xd6z\xe2\x9b\xe9-\xdb\x05$\x80\
DLr\xda\x1f:2\xf7\xd21\xc92/\xe1`c\
Q\xb8uiUi\xe0\xa7\x9f\x14\x15U\xd5\xf1!\xc0\
\xa6\xf3n\x05\x1f\x15\xce?I9\xf1m\xc9\x91\xc4C\
\x80\x0d\x01\xc8x\x86\xee\x97\xd7\xdd\xea\xdf\xf7\x19(V\
\x9f\x13\x01P\xda\xfc\x147\xbf\xcd.\xb6D\x82y\x0c\
[Q\x12\xd8[\xe3\xc5R\xd8\xe5`\xa3\xf0\xbeu\x15\
\xbb\x1ax\x95\xae\xdfc\x85k\x8c\x1f\xbc2\x00\xff\xa8\
\x08PEKKr-\x18\x9e\xf2\xd7\xb99o\x9c\x92\
\xff\xd1\xa9\xf9s\x068\xa1\x08c\x94B\xe9\x8d\xb1\xd7\
\x86\x9f\xa8>\xa0}R\x08\xd9\x1e\xd5\xc7\xdc\x1c;$\
\x008\xd9\x81\x1d\xda\x01Gd\x9e<$Y\x91\xf9\xe3\
a\xb8\x1dSmK\x9dt\xd5\xe7\x95\xc55M\xa8\x01\
\xdd\x0bV3\x00Y\x963&(\xd3\xef\xc3w\xca\x83\
\xe9\xe79\x0e\x094(\xab\xae\x0e\xec\xfd\x88u\xdd\xbb\
$\xbb\x00(\x04PNF\xa4\xe1\x94n\xd8\x9d\x1f)\
\x22RX\x99O]VP\xdf\x1d\xc6\x8a\xe9[*\x1a\
\xfe\xf9}\x03\x9c\xcb\xcc*q+y\xbd\x92\xfe}u\
q\x877\x03Lx\xde\xc0\x1f(\xb1r\x8aC\x99<\
0\xe1\xad\xd3\x87\xbdv\xec\xc0#\x068%\x99[\x0f\
\x14\x81(\xf3\xaci\x93\xa0\xf6\xf7E\x897\xa0\xe2-\
\xc0xi\xb5\xd5\x05\xa0\x85D\x97\xfd\xa9\xf9\x83.\x1b\
\x95h\xa6$\xd6\xbfa\xab\xa1\xaf\xad\x0c^\xb2\xb4\xb4\
\xb2\xfb\xfb\x82\x0e@\x1ev\xaa<\xff?\x92=I\xe6\
9\x01u\x00~\xf5\xa0\xb4\xf6w\xfe}\xcbM7}\
\x044\xa4\x93\x13T\xf8\x8e\xa5B\xadJ\xf2}\xebk\
+=]>X\x0b\xa2\xfa\xb1\xcd\x8d\xfc\x0d\xae\x04\x11\
\x11:\x94\xd2\xa3\x87\xa6\xfe\xe7\xc4\xbc\xab\xc7\xa5\xd8d\
n\xb4\xa3\x87\xd7<\x0ci_}\xb0.\x10\xcf\xdcH\
\x02 \x001w\xdb\xed\xbf\x9b>\xe0\xfcaI\xb6\x03\
n\xe0\x18\x06\xdbP\x13\xbcby\xf9\xbe\xeaFsL\
\x08\xdf\xdc\x8d\xa0\x1e)l\xe0a\xca\x9cG\x0c{\x12\
\xe4#\x1d\xefV@f`,P'\x7fy\x8dw\xfb\
;\xba\x86\xef6h\x81\x8f\x5c19`\x111k!\
b\xa3\xf0\xbf\xbb\x80\x18\x86\xfa\x11\x93\xa5\xf9\xa3\x06\xb9\
m\x0a\x18W\xb1#\x0a\x0c\xa3V5\xae^YU\x8f\
/\xd0\xe8\x82D\xc1\xe8\xd15]{vC\xc9\x07{\
\x1a0\x13`\xf9\xeb\xff\xfcx\x13\x8ch\x03f\x8af\
\xc4\xa6P`4\xf2\x1a\xa4,g\xbal\xb7N\x1ft\
\xe9\x984\xb3\x1a\x1f=\xbc\xb9Q\x174j\x03\xf1\xec\
\xf7%\x01\x10`\xa5\x9aI\xc9N\xc7\xfds\x07\x9e?\
<Q\x86\xe4\x16;@o\xd9\x9a\xca\xe0\xafW\x96\xd7\
5\xf9\xccm\xdd\x09\x06\x0b5\x80\xc9y\xc7\xb0yO\
0\xfe\x8c\x18~p\x5c\x10c\xaa\xd7\xb6\xfeN\xb5`\
\x05\xe4O\xee\x1c\xbfa\xaf\xaek\xde\xf2\xede_\xbd\
X\xf6\xe9=\xf5_\xfc\xb1l\xcdsM%\x9b4\x1c\
\xc7\x82\x97\xd3rq\xdd\x0a\x06\x1ac\xd9e\x97\xe7\xa6\
\xf8b*\x0e\xd8~`\xabK\x9b\xfe\xbb\xad\xaaK\xae\
\x04{n\x8d\x0fv5\xdc\xf5u\xbd\x0e\xd5\x7f\x86\xaf\
l \x880\x11%\x16\xe0Y>\xc1\xa6\xdczD\xd6\
\xb84\x1b\x96\xe5\xd8P\x0d\x06\x1f\xf4'NZ\xdd_\
\x04\x00EY\xd4o\x11\xb4p\xe6\x0f\x87\xff\x9a\xdb\x84\
\xfb\x90\x18\x09\x0e\xfb\x1fg\x0d<}h\x12\xce\xfd\x0e\
\x11\x8d\xf5m<\xee\xabJ\xdf\xc5KJK\x1b\xf8;\
\xe5\x85\xb5\xedV\x0c\xd9.\xe7\xceeG\xfe]r\xa4\
\xf2\x0c\xc0\xe7\x06\x81l\x10\xa8\x95\xbe\xb8*X\xb0\xdc\
0T-\xd0\x14,Y\xeb\xfd\xfc\xe6\xe0\x1b\x13m\x8b\
\x8fO\xdfvgf\xe9\xb3\xee=\xcfe\xec\xb8\xd7\xfe\
\xd9i\xfa\xff\xc6\x06V\x5c\x17(Zi\x04\xea\xf8\xfd\
c~!\xdd~)\xa0[7\xcc\x1c\xca\xe7\x85\x8e\x12\
\xb8z\xa8_\x055\xe9\xce\xaf\xeb>\xdb[\xaf\xeb\xd0\
\x5c\xe3\xc9\x12\x1b\xa6'\x1c\xe9\xbbr\xdf\x1d_U\xe0\
\xb4sP\xdc\xe2\xff\x00&\xd1\xcb\xc1\xa2\xc13\x02\xff\
\xe6?\xb0\xd5\x1bT\xcb\x1a\x03Q\xe4\xdc4\x97r\xcb\
\x94L%\xe6\x8a\x84\x8e\xcf!c\xd9\x8f\xa1\xf4\xb4\xa2\
\x9f\x08\x00&\x8f\x18\xaa\x83_\xc1@\xb0\xb6\xbe\xd1\xef\
\xf7\xf3UL=s\x07,\xa1\x93\xd0\xe0\x1c\xd1\x12K\
q9\xfevT\xf6%\xa3\x92p\x05o\xbdp\xff\x0d\
\x05\x0c\xc2/\x97\x96T54\xf2\xf1\xe8\xdd\x0d\x04\x83\
I\x8a\x92w\xac|\xd4\xd3\x92#\x05r\x01\xdf\xc2[\
\x02z\x90\xad\xfa\x8d\xb6\xe9I\xf5\xb3\x9fJ\xcb/\xb6\
\xed{C\x0ex\xb8\x95\xc4l\x82\x17\xc1u\x8ci^\
\xb6\xff}e\xd9\xa5\xdag?\xf3\x14o\xe0\x17e\xde\
P\xea^\x981,\xd59$)\xa6\xbc\x07\x81\x86\xc4\
\xd2\x0d\xe3W\xcbJ\xde\xdb\x0dz\x06)\x12\xf3\x85p\
\x0f Z>\xda[{\xfe\xa2\xa22/\xa62\xc4/\
o\xcdcL\x12\xd6\x00\xb2\x80i4\xf8\xaf$iZ\
p}Q\xcd\xcd\x9f\x17\xff\xe4\xe3\x82\xdb\xbf,\xd7\xb4\
H;a0\xff\x1c\x9a\xe5\xcap\xc4\x9a\x8b <\xf1\
\xad\x8d\xf4\x97\x16\x00\x93\x1aUu}\x99\xe7O\xab\x8a\
\xe6\xbc\xb1{\xd8\xcb{\xc6\xbfQ8\xf2\x95\xbd\xd3\xff\
\xb7\xe7\xde\xd5Ek\x8a\x1b\xeb}P\x99\x83\xd8\xeb\xec\
zyI\x87V\x9b\xdbn\xbb}Z\xd6i\xf9n\x85\
w\xe5\xc1V\xc6\xa0\x9e)\xaf\xad\xd4.\xf8\xb4\xac\xb0\
\xae'\xfa\x828\x12\x93\xe5\xec\xd9l\xce\xdf$w\x86\
\xd8\x0e\x86\x1e.-\xd8`lx\xd8V\xf1\x9d\xa4b\
\xef\x0a\x17\x07\x1er<\xa4\xe5\x1b=\x91\x0c\xd5\xa8\xda\
\xecZz\xae\xba\xe5)MU!Z\xb87\xdd\x09K\
s\xd9\x8e\xcb7\x9f\xbf\x8b\x0e\xf3z\xb0\x946j\xf2\
5+\xca^\xd8TU\xef\x8b\xf5Ay\xf0\xd1\xe3\x0f\
\xbe\xb7\xb3\xf6\xfa\x15\x1558\xef\x0f\xafS`\x94\xe1\
.\xc2\x22\xa0\xe5\x97XP\xd3\xf7\xd6\xf9>/\xa8\xbf\
wu\xf1\x94\xff\xed>yQ\xf9K\xdb\x1b\xd6\x97\x07\
\xbe\xad\x0e\xd4G:%\x03\xcf=n;K\xb1\xc7\x9a\
\x8f\xcc\xb2\xca3\x7f|\xe8\x12\x01@\x01m\x86\xafa\
A\xe2R\xca\x7fx#F8\x8d\x0d\xf3\x14\xc0\x9e\xaa\
\xa6\x1b\x96\x17\x9f\xbb\xa8\xf8\x1f?xv\xd4\x07\x83\x9a\
\xa4\xea,\xa8\x1b{\x1b\x82\xb0\xe5gKJ\xafZZ\
\xf4C\x154\x08t\x1d>\xe1\x9d>\xd9\xe5\xfc\xeb\xdc\
A\x97\x1c\x92\xa0\x80\xcd\xc4?\x88v\x15\xac\xe7\xc6\x1a\
\xf5\x86/\xca\xc0R\xe8\xf1\xbb\x90\x88\x80\xc4W\xf2\x8e\
V@\x03ln\xac\x9f\x8al\x81_\xe1t\x0c\x9a\x93\
\xdb\x1a\x92jlxL\xfd\xfanX\x82#\xbb\xf9Z\
lL:i\xb0K\xc1\x1b\x12\x86\x16\xf5y\xf1b\xf1\
_5\xa4\xff[Ww\xe3\x8a\xe2jO\x93\x88\x07\xfe\
\xcfs_\xfb\xbeC\x9e\xc1o\xb3\xd9h\xe6!\xc3\xa8\
\x0f\xe87\x7f^v\xd5\xe7\xa5uA=\xa6QJD\
\xef\x06\xf3\xbc\xf8\x98\x80M\xf8\x91\x92:\xcf?7\x94\
\xff\xf2\xb3\xe2\x0b\x16\x15\xfe\xf4\xd3\xd2\xc7\x7fh(k\
\x02\x07\x98\xd3 OT7\xa9%\xfce\x8d<\x07\x85\
\x05\xda\x0f<\x15\x962\xb1)&\x9asv<\xe8\xb2\
\x16\x80\x19\xbb\xba\xae\xf9\x1a5O\xb5\xea\xaf\x87\xca&\
\xc6.\x169\x1e!q\x02N\xf3\xd9\xfe\xfac\x16\x16\
\xbd\xbf\xcf\xdb\x80/\x1c\xc7\xba\xe1\x8f\xff\xfc\xa71h\
,)\x0e\x9e\xfc\xe1\xfe\x15\xbb\xab!\x14\xd0\xa4\xeb4\
\xf5\xccCS\x13\x9c\xff7#\xfb\xb4!\x89\xf8Zt\
\x88*\x1d\xfee\xc3`+K\x82\x0f~]%rD\
\xf7b\x06\x8c\xc969w\x9er\xd4\xbf\x0cWj\xf3\
&\xc4\x5c\x16NC\xf0\xa3+\xcd\xc7v\xbc\xacmx\
X2\x1f&\xe8\xaek\x81$\x83\x10\xcc\x18\x9c6\x02\
\xe7\xd9\xc5x\x8d\x16~\x1dx\xb9L\xd5\xb5\x0f\x0a\xfd\
S\xfe\xb7\xff\xd9MU\xc5\xf5>(j\xfc\x05\xc2\x98\
\x0b\xd1\x92\xb7\xbe4\xcc\x86\xe67\xfe\xa1\xf2\xd5\xf8\x82\
on\xad\x9c\xf4\xda\xae\xb7\xf66\xe9\x86\xd2\x1cI8\
$\xd8<\x84\xe8_\x80!6?\x98\x0d #\xd4\xfa\
\x82;\xaa}/m\xae8\xfe\x9d=\xd3\xde,\xbek\
]\xcdG\xfb\xbd{\x1at\xb0\x16\xcd9\x01\xeb\x1a\xf0\
\x09h\xd2\xc6\x1a\x8d\xdfB\x0bw\xf8\x99\xe9\xaa!\xa0\
\xd5\x05c\x9d\xc9\x1cLg|\xf3d\x17\x09\x00X\xfe\
\x80\xff\x87\x97\xd4\x15W\x06?\xbb8\xf8\xc9\x05\xea\xa7\
\x17i\xb0\xfc\xfd\xbfUU\x8b\xe6\x09\xa0\xd0\xac-\xf5\
\x5c\xffyY\x13\x9au\x88\x9e\x90>C\x9cy\x82\xd2\
U_T-\xde]\xc3\xd7\xc2%\xc9a{t\xde\xc0\
KG'\xc88\x18\x04\xf2\x82y\xfbE\xdfZ\xd9\xc0\
\xfd\x89\xe7\xb5D\x0a\xcb9\xb2n\xf85b%J\x98\
\xb1\xed%\xadvgw^\x07\x8f}\x9c\xd8\xe2\x99\xa3\
s\x14|F&\xf6amL\xe7\x0f\x96y5\xe3\xff\
\xd6V\x9d\xf1\xfe\xbe\x1bW\x16o\xac\xf4\x06\xb1\x83\x8b\
\xd77Z'8\x14\xa1\x96RT\x5c\xeb\xf9\xf3\xba\xca\
s?.\xbcnuuS\x80'.FEO&+\
\xd1\xf5\xf0Z\x01\xd8\xfd\xc6\x86\xffm\xab\xfe\xf5\xb2\xa2\
s?*<\xe1\xfd\xfd\xb7\xac\xad\xdaP\x1d\x08\x88\xd6\
0|\xb7\xce7\x00\xc3\x97\xb6\x7f\xb4\xaf\x91w&\xb7\
\xd9\xdb\x01\x86\xf4\xd1\x8e\x9a\x9a@\xac\xbd\x94\x0ac6\
\xcc\xd2p\xeaH\xce\x1e\x9ax\x0a\x00\xd7R\xdd\xd0\xb4\
`\xdd>}\xd1\xe9\xca7wJ\x85\x8b\x95\xaa\xf5\x8a\
g\x9b\x5c\xfb\xad\x5c\xbcD\xfa\xe6\x1e\xe3\xfdYF\xd5\
f\xacv\xc52\x896\xa4\x0f\x9c\xc80\xf6\xd7\xfb.\
\xfa\xac\xac\xdc\x8b\xf5=s{h\xc0@\x18\xd5\x01\xfd\
\x8eo\xea\x1a\x22\xea\xc2c,\xd1\xe9\xf8\xbf\x19\xb9\x0b\
\x86%\xa1q\xc0S\xa0\x08\x8f\x1f\x90\xc8W\xe2\x93\x0c\
Q\xa2\xf9S\xca\xdf\x13\xcb\x91\x03u\x1a|\xb2,P\
\xaf\x7f~%\xber\xa0\xdb\xe0\xd9\x97I\xf2\xa8t\xe7\
q\xd9n\xc8\xd5b{\xb4\xa0_\x06\xe6d\xfc\x95\xa4\
}>\xe9\xd5\x1d\x8d'\xbcWt\xc4k{\xee\xfd\xaa\
r\xc9\xe6\xbd{\xeb\xbc\xc5\x9e`y\x93Z\xd1\xa4\x96\
{\x82E\x8d\xc1\x8d\xc55/\xaf\xdd1\xff\xad\xfd\xd3\
\xdf*\xfa\xdb\x86\xea\x8d\xd5\x01\xcc\x8e\xdc,\xf0O\x8f\
&k\x0f\xd1\xff/\x1b\xbb$to0X\xea\x09,\
\xf9\xa1\xf8\x82E\x05\x87\xbd^r\xc3\x17Uo\xedn\
\xdaX\x13\xf0\xa8P\x9e\xb1s\x82G\x82\x99E!J\
Z\x03E\xdf\x90>-\xf0\xac.\xf6p\xf3\xc5-^\
\xe8\x01\x08\xe6^p\xb8\xad\xc6\xf7\xd8\xf7M\x90\xc3\xc4\
\x8eHA\x8b\x87\xdf6\x19>\x10.\xb4~bWl\
\xc4\xb9\x05\x00\x81\xf2Wl4\x96]$Um\x850\
c=\x8b\xff\xf1\xf6\x13\x8fTO\xb9\xbe\xfc\x0a\xad\x84\
\xbf\xdf*\xdak\xe0\x87\xa1\x87\xcfo\xa9n\xf0r\xcb\
\xc5\xebt\x00.\xb7\x0b\x06\x01\xcf_\xdc\x18\xb8sM\
\xb9\xaaa\xeb/\x1cLo\x93\x1c\xf2\xdf\xe6e\xffz\
|R\xa2]\xb1+\xec\xa7\xc3\x9c\xbf\x99\x92\x19uj\
\xc6\x0b\xa3|-\xab\xdf-V\xa2\x00\xe3\x03\xfe\x0c\xa9\
a\x7f`\xd7\xfbbc\xd7\x83)\x81H\x8a\xcc~6\
&\x19\xb2\x89\xd8\x11\x03\xe8\x05\xbf\x9c\x16 \x93\x94\xfa\
\xf4\x7fl\xaa\xbah\x8dw\xf6\xeb\x05G\xbe\xb9\xeb\xb8\
w\xf7\x9e\xf0\xee\xdec\xde\xd9;\xfb\x8d\xdd'|T\
v\xf3&ms\x8d\xaf\xf9\xb1J<\xd8\xcc\xa8f\xd8\
\x88\xfe\x07d\xb4W\xb7T\xfc|q\xe1q\xef\x16\x5c\
\xbc\xbaqi\x91\xd7\xc3{\xe51\xd5\xcdt\xe7\xdf\x22\
\x0b\xf0\x05q\xa4\x80oa\x0cZ\xac\xbfZZ\xb2\x1e\
*\x9e\xb0\x0ds/\x8e\x14\x0f\x01\xf6\x18|S\xd6p\
\xe5\xd2\x12\x8f\xae\xc5\x94\xd3\xf9\xb1i\x0e%\xdd\xd9\x8e\
0EM\x9c\x05@\xf5\xd4\xc9k\xafc\x0d\xfb\xf9\xa5\
\xb6w\xb9\xbad4\x95\xea+\xaeP\x1bJc\x88\x0e\
\xd0?C\xd5\x8c\xf7\xf6b[\x8c\xfb\x13v\x940\xf6\
q\x81wOC\x10\xe5'\x92xLv\xc8\x7f\x98\x99\
\xbb\xed\xa2\x91\xbb/:\xe4\xd1\xf9C\xd3\xdd\xf68&\
C\x14@\xe8\xfd\xfb\x96\x1b\xc18\xccQ\xc14\x95\xed\
\xff\xc0h~\x9cXl\xedz \xf3\x1d7\x22}A\
~\x02_\x83\xf3\xc6\xed\xd4\xe8\x11\xfc\xf3G8t\x83\
A\xb3\x1d\x12\xbc\xa4I-\xf2\xa8\xe5^\xcd\xa3\x99\xdd\
\xb726\x80\xf8P\xdf\xe8s\x22\xd1\x870\x8c\x12\x9f\
\xb4\xb2\xd4W\xee\x0dB\xf2c{\x8f\xdb\xf9\xc81*\
\xfc\xfaU\xcbKwV5\xf951z\x02\xbe\xf9G\
,\xf2\x7f\xa3\xde\x1f|i}\xd1\xd9\x8bK\xbf\xafU\
1\xbf\xf3\xdd\xd1\x02\xed\x07}d\xb2\x9cj\xc7`\xb7\
k\x5c\xa3 \x9e\x02\x80\xcf\xe3\xec|Q\xaa\xdb\xc7\xd7\
\xc0,\xb7w\xb9\x0a6\xb1\x98\xda\xa8m\x7f%\xa6\xd1\
\xf4\x8c}SRo\xbe !\xfc\xa80\x95\xa21\xa0\
o.\xf7\x80\x12\x99)\x17\x1ep(c\xb2\xec\xb0\xc9\
N\x9b,\xf3{BQf\x9ex\xa1\x06\xed\x85\x0bc\
\xcaQ\x1cL\x0f\xb8\x9e\xba\x1d\x86\xafBl\xea6\x18\
\x0e\x07zl\xce\x80\x5c'\x0e\xc3\x88\xadx\xb4\xc2L\
\x17\xb4\xee\x90; \x99\xb8\xcf\xb8\x91\xa7\x1b\xef-\xe2\
9\x9f\x97\xa3\x9eL\xc4^F\xfcR\xa0w\xc2&d\
:mXjE\xbe03F\xa4@\xbe\x82\xef}\x0d\
\xea)\x0bKn\xfd\xa2\xe4\xad\x1f\xca\xab<~s\x90\
\x0bD!d\xe5\x80\xa6\xad/\xa9\xff\xc7w\x95\x97,\
.\xfc\xbf\x0d\x8d\xbe \x1aC\xfcD\x9b\xd5\x9a\x83)\
\x1f38Q\xb1\xd9\xc4Z<\x88\xa7\x00H\xfeZ\xa3\
\xe8\x13\x1eZn\x1b\xdb^.n\xe0[\xc1I\xc9\x0a\
\xa8x\xf2\xadQ\x80\x9e|\xb4\xb5\x04\xaa\xf1\xcdg\x0a\
+j\xcd\x14Puc7T\x08#)\xf8\xfc\x1cx\
\xb0\xf8\x15\xeb|\xa1\x87\x084\x96\xe9\xfe*\x0cC\xec\
@\xce\xf5\x95K\x81\x06\x8c!\xb1\xa9\xfb\xc8Hr^\
=e\x10\xaf\xad\xc7\xef\xe4<\xa9\xf0\xd7\x8c H\xa8\
\x96M-\xb4^#\xac\x10\x1f\xd3\x07\xb92\x5c\xe6s\
}\xed\xe4\x88\xf0\xc1C\x99T\x17\xd4\xff\xbb\xd3s\xcd\
\xaa\xdaI\xff\xdb3\xe9\x95=S_\xdd5\xf5\xbf\xbb\
&\xbf\xbak\xcc\x8b;N^Xz\xef\xd7\xd5\xab\xcb\
qH:66\xf0D1\x9c\x0f\x0a\xa6a\xa4\xd9\x8d\
\xe3r\x9c1x\xd2\x0e\xf1\x14\x00\xe6\xafP\x9a\xcaq\
\xc1\x5c\xef\x98\xc6}*Z\x9c\xe8@E\xdcZ\xd1\xd8\
\xa2\x8c\xe1\x82\x12\x0d\x07\x19EMZ<c\xb1\x03\xe0\
\x84\xcd\x81\xe4\x15\x04\xb3\xe1a6\x10\xf9\x17\x0fR\xa4\
\xc0Q\xacq\x9fl\xe81\xb6*9xkJ1\xd4\
\x9a2h\xbaEW%\x8a\x1aQ$.\x1a\x932?\
\xc7\x0d\x0b\x18;F\xb7>\x91@\xb4\xa2\xbfG|\x86\
\xdb\xf9\x93a8\x9b\x16\xcfi\x80\xd8\x1e-x<\xfc\
\x07uV\x11\xd0\x0b\xbc\xda\xfeF\xb5\xd4\xa75\x1a\x0c\
\xfc\xc7\x92\x85\x1f \x86\xae\x8ef@E\xce\x1b\x9e\x90\
\x91\x8aCQ\x80x\x99\xaf\xb8\xb6\x004\xaf\xa4z\xc2\
\xa9DB\xb5L1\xfc\xbezT\x8b\xa8\xd1\x5c\xc9\x11\
\xc7\x03\xb6\xf9 E\x98\xaab\x84vC~G\xe1\xc6\
G\x88\x01|\xd1\x00\xaeH-\xaf\x16\xc4\xdf\xa8\x132\
XW\x04\xd7\xc2\x9f\xea\x8a\x09\xac#Ce\xc60\x1a\
k*\xc4rwa\x9e\x0eH\xb0\xcb\xaf,\xc8\xcfK\
\xb4\xe1\xfd\x1cX\x8f>V\x88\x98\xe8\xdf\xf1\xce\xf3\x9a\
t\xe3a\xd0\xe64\xfb\x06\xb9=\x88\x01\xd3C\xfe\xcd\
i\xd9p\xc06A\x8cQ\xcbX\x8a\x83\x9d3:\x9d\
/r\xff\xe2\x94Vq\x15\x00&\x1b2\xefa\xeb\x0c\
\x94E\xc6lv\xf3\xee_\x14\xe09\x86\xc8\xf8r\x9c\
\xc8\xc0\xca.\x060\xc3\x09\x9a\x1c\xb3\xfc\x87\x830\xd2\
\x86\xe6\xad\xaf.\xd8X\xbeue\xd5\xf6/\x1bJ\xb7\
\xeb\xaa9OQ\xf4\x12\xc43A<\x80\x10`r\xa0\
\x8f\xcd+\xdd\x0d\xa4\x84MV\x1e\x9c\x95\xe9T\xf8P\
L\xa2\xfb\xe1\xd1\xde\xcf\x95\x97\xd7\xf8S\xdc\xce\xb3\x87\
%\xe0\x856\xd7\xcf\xfb\x00\x86>w\x90sL\x06\x0f\
v\x5c\x89k\x17\x90-Y\xb6%\x9b\x19\x89o\xe8\x00\
C\x95R\x9c)Yb-b\xd0\xffc'\x0e\xe5\x1d\
\xc7\xfc\xa1\x820S\x12\xcd\x9c\xa40\xe3\x90D\xb4\x9f\
\xb1\xcar\x08\xcc\xc0`\x98x\xe8\x9a\xca6\x05\x16\x9d\
\xa9\xbf3-y\xc5\xe9\xe9__\x9c\xfa\xd5\xcf\x5cK\
NT\xdf<\xbc\xfa\xe3k\x03\x8d\x15x\x09\xa6\xbb\x08\
\xb1\xa7\xe4q\x9b\x1d\xeb%`\x1b\x05\x0b\x82\xecN\xcb\
4\x97\xba\x1f\xb3Js\xfc\xb0\xd4\x17f\xa5\xf2\x89\xb8\
u\x9c\x84;\x1e\x0dg\x82h\xc1\x1c\x96\xce$\xf9\xc2\
q\x19\x8e\xe6\xc7\xbd\xc5\xbe\xde\x09\x96G|yj\x82\
\xddv\xdb\xe1in\x9cJ(\xce\x01\x8e\xa7\x00\xe8\xae\
\x81j\xcaH\x0cug`GL\xc6\x18\x899\xc4z\
4\x18\xd3\x068\xd3\x9d\x90\x8c\x06N\x0d\x1cvB\x82\
;\xa7M\x19\x9d\x9d\x1e\xb9\xc9\x0d\x0bn\xce\x11]\x0b\
j\x85\x8b\xbd\x8b/p|v\x06\xab\xf8V\xd2|\xb0\
\xd3L@\x9c\x9b3\xd8\x90R\xb1P\xfa`\x9e\x7f\xe5\
MZ\xe5\xb7\x86\x1e\x99\xb1\x83\x9c\xac'\x0e\xe1\xf1\x18\
\xf3c\xb4\xd8\x17\x06\x85B\xc9\x1c\x90/\xb6t;(\
\x00\x98\x82\xc6\xfc1\x83\xfe\xef\xb04\x19\x1b\xe8pi\
\xe1\xa6i\x9f\x00\xb2D\xcbwo\xa4_Ev\xfb`\
\xd1\xe3\xa6bb\xa6\xeb\xaa\xf1\xc9(\x01\xbd55L\
\xa0\x0c\x80\x89v*\x0c\x0a\xc5\xe8\x01)]\x11\xd8x\
\x0a\x80\xecH\x0c\x0e=\x1foy\x8b\x0d!\xd1%f\
o\xd8\xa65\xc54\xeep@\x92\xfb\xa8\x1c\x07\xcey\
\xcc\xbb\xf3\xc3\x01CfHc\xd2l\x87\xa6\xc7\xa2=\
\x9d`\xda\xb2\xc0\x96\xa7\xf5\xcf\x7fm\xafX-\xe1\xe4\
\xb1\x07\x07\x10\xd6uI\x97U\x9f\xb2\xe7mc\xd9%\
j\xe1\x0a\xb1#l\xec)\x83\x0c[\xb2\x1e\xb6\xf2u\
\x00\x8a\x8f;Kv\xa7\x9a\xab=\xcb\x95\x87\x0d\xba{\
Z\x96\xce\xc7\xcd\xf5'x\xae z\x05\x90\x14\xb7N\
\x1b0k\x90\x1d\xcdAo\x06\xabf\xc6\x919\xae\x9f\
\x8dK\x13!\x8d\xb7d\xc5\xf5\x1e\x00\x98\x91\xe1\xc7\xb3\
Q\x17JL1k\xc2bk\x1b\x183\x8c\xa6r\xe9\
\xd3\xb3\x82E_\xe8:\xb4q\xa2h\xecc\x9d\xf1g\
\xa3\xd3\xdd6\xac-r\xa9\xec\x08\xfe\xec\xb4\x0e\x8e\x92\
\xec\xd2=\xd3\x07%8pz7\xb1/\xae\xe0iT\
\xbfo\xdd\xfd\xca\xc6\xbfJ*\x0e6e\xd8@9\xb8\
\xfc\x83f\x0185\x1e\x04\xdeW/\xad\xbe\xce\xbf\xeb\
]\x0d_\xd9\x15nT0\xc5.\x0d?\xd7|\x0a.\
6\xf0\x0e\xb0\x9a4VwdA\x18{\xd2La\x94\
0\xbb\xcc~uh\xc6_\x8fHJ\x10\x99(\xfa\xd9\
B{\x0f\x98\xf3\x0c\xc3\xd6U\x99\x8e\x88\x0cH\x04\x9b\
\xac\xfc\xdf\xd4A\xb9\x89\x0a\xda\x8e\xde\x97\xc1\xb8\xb5\xc2\
F\xf0\xf84\xfb_f\x0dL\xb0c\xd6\xe1\xa53\xce\
\x19(\xce\x02\xa0\xd8\xec\xec\xb0\xdb\x8c\xec\xf9\x10\xa5\x1d\
\x84\x14\xed5\xfcyJ\x8d\xd5\xd7k\xf8\xe0XdW\
\x85q\xc1K\xd2\xdc\xc1IwNJ\xb0\x851n\x90\
\x8b\x84,\xcb\xc6\xb5\x87\xa6O\xc3\xf9g\x10\xd3\x93\xf8\
\x02V]\xdf\xf9\xbam\xebS\xd8\xcf\xd3\x9c^mO\
\xc3\x13\x13\x81}X\xd9\x0d\xd4I\xeb\xfeO\xaf\xfc&\
\xfcz/\x1c'\x0f\x9eo\xd8\x9db=z\xe0\x94\x8a\
/\xfbd\xc5\xe6\xe2!\x8d\x7f\x9c\x84Is\x94\x80b\
\x1a\x17M\xce}\xe7\xf8\x01n\x9b\xcct\x99\xbf\x95\xb3\
o\xc3\x8b\x19s\xdbl}_\xcb\xfa\x07X\xf2\x0f\x1b\
\xe4z\xf6\x98\x1c\x97\x0d\xd6zc\xb2@\x08G&\x19\
\x10\xc2a)\x0e,\xa4\x1c\xb1/~\xc4U\x00x\xf0\
l\x8ed\xdbQ\x8f\xcb\x93o\xd4SF\x19L\x06)\
\xe3\x0a\x8b\x8a\xa0'\x0c\x86\xeb\x805|\x94\x0e\x22\x1d\
\xbe\xbc\xe5\xd2\xd2\x0b\xf5\x8aoy]\x0f\xe1\x1e\x85\x85\
\x19#\x97\x1e\x96{\xf5\xa4\x0cG\xf3\x04I\xfcd\xf8\
%\xcein\xe2\xef\xc6r\xc8\xc6\xe5cR/\x9f\x90\
\x8e&7\xde\xe0y\xf8)\xf5\xa0\xcf\xd8\xfe\x02.\x86\
\x87y\x1c8g\x81z\xe3\x87g\xf1ij\x1e|\xf8\
\x17.B\xc3\xb2\x0e\xd73&7{\x11\xf6)\x9b1\
\x8f\x82\xd8\x08\xa6OJ\x1d\xff\x13\xb1\xb5W\x80\xad\xa6\
)y\x19o.\xc8\x9d\x9e\x03\xb2\x04am\xfe\x8a\xf8\
*\xe3\x03O\x13\xf8\xe2\x8b\x91\xc3{\xb4\x0c\xb7\xd3\xd1\
S\xe1'\xda\x22K\xf2\x11\x83\x12\x1e\x9e=0I\xe1\
\x8f\xd3\x98\xc9\xdb\xc3\x98%\x19\x032<Yy\xf4\xa8\
!#2\x12\xb8\xd9\x8f\xbf\xc92\x89\xa7\x00\x9858\
(\xbc\xb2\xdd\xadL\xbc\xdev\xe2\x1b\xd2\x89\xef\xab\xd3\
\x1f\xf5\x8c\xb9Y\x9d\xfa\xa8q\xe2B\xfbI\xef\x18\xd3\
\x1e\x96pBS.\xbaP\xb1\x83b\xe1)\xd6\xbf\xb8\
V\xad\xde\x16]\xec\x83\xa5\xb8\xf5\x88\xcc\x7f\x1e9 \
\xcd\xc5\xdf\xb9\x8c\xad\x01\xee\x13\xc3%\x9d\xd7\xa8\xe1\x5c\
\x89\x8a\xf2\xd7Y\x03\xef\x9e\x99\x9d\xea\x8a\xe5e\xb4\x1d\
\x80'\x05\x1bQ\xfb\xf53R\xfd\xae\xf0\xeb\x14\x98\xba\
\xcd\x09l\x14}\x1a(\xfa\x92w\x18`\x9fA\xa7\xc8\
\x8e$i\xfa\x9f\xe1\xc7\xcc\xbe\x91\x02B\xac\xc3\xc7\xee\
v\x1e\xf9\xa0\xac\xc4:%g\x5c\x81l\x89\x113u\
P\xc2\xab'\x0e\xbeyb\x9a\x0d\xa7\x8d\xc6\x8bl)\
\x1e\xdd\x0c\xc3\xd9F!;\x85\x95.mA\x03cH\
\xc9\x90C\x89\xde\x82Y\x0fdg\x1f\x92\xf2\xcc\xfcA\
`<\xc2\xef}\xedB0wC\xb0\x8c1)\xf2\x7f\
O\xc8\x9b\x91\xed\x82\x15nU\xbb\x8a\xb8\xb6\x00\x103\
\xb4(\x03\xcc\x95a\xcf\x9a\xe8\x1esv\xea\xf4\xdf8\
\xc7\x9cm\x1b0Ar\x0f\xb0\x8f:\xc7\x98z\x97\xa4\
8\xb0\x80Ck\x80?\xf3c4\x16\x18\xcb~\xae\x97\
\xae\x13\xb6;B\x1c6\xe5\xd4\xd1\xa9\x9f\x9c\x9a\xf7\xeb\
\x89iG\xa4+Y.\x19\x14\x1d\xc2`\x97\xa5L\x97\
mr:\xbbj\x5c\xf2\xa2S\x07\x9e?6\xcd\xce\xc7\
~\x99\x98\xc7\xc6\x0f\xf4Pm\xaaN.|\x06\x0d\x05\
\x0b\xdf\x9ebX\xf0\x03Q\xa6i\xf2\xd6' \x12\xcc\
\xc9F:\x03o\xb7;\xd2F\xf9\x0f\xbf\xdf\xb0%\xf1\
\xe9\x90#C\xc6\xa6\x98a\x8c\xbdNN\x1d)6\xf5\
\x0ex\x84\x98H\xc9\x0e\xe5\xc6i\x83\x9e\x9c7p\xe6\
\x00{\xf3\x0b\xf5\xe2\x9ev\xe1\xa0\x0dp\xc1\xf9q\xc4\
AT@\x98\xd9\xc0\xc4x\xce\xe2BD\x8d\xc8[<\
#)\x8c\x1d;$\xf9\x95\xe3rF\xa78\xcd\xf1\x10\
=\x0a\xb3\xc9\xc6\xf1\xb9\xae\x17\x8e\xcf\x1f\x96\xe6\xe4\x81\
\xc4\x8dX$\xba\x86\xb8\x0b\xc0\x8f\x1c\x10d3\xbe\xf1\
\x03\x7f\xf6Q\x17\xe8\x87\xdf\xc5/\x09\xed=\xaf\xd01\
\xa9\xa9T_s\x93Z\xb3\x83\xb7\xb4#\x18\xda\xd8|\
\x16yh\x9a\xfb\xce\xe9\x03^>i\xc8;'\x0d~\
\xef\xa4\xbc\xf7N\xce\x7fgA\xee;'\xe5\xbd\xba`\
\xe8]\xb3sFg%\xb7$y\x97\xa1{\x8a60\
\x9f\xc7\x5c\xe6\xdf\x91b\x18u\xbb\x9aj\xc3|@\x9a\
G)c\xee\xd1\xa7\x19\xe3\xae\xc0x\xc4O\xe7\x12\x8a\
7U\xb1i\x84\x1e4\x1er\xad}\xc2e\x92\x8c\xc2\
(v\xf7.0T6\xc6\xce\x18\x99\xfc\xc2q\x83o\
\x9f\x98\xe8\xc4\xc7\x04 nu^\x15\x0f\xe7r\xa3\x04\
\xbdF\xdf\xc1\x7f\xf8bs\x07\xda\x93\xedbW\x14\xf0\
P\x1aY\x8a\x86\x05\xa0\xb7\x829B,Z\x04\x9e,\
\xbc\x18\xcd\xceM\xfc\xcf\x89y\xc7\x0e\xc4\xd7\xbf\xf2N\
Xh\xeb\xf1\xf4\xefR\xf0\x0c\xd8\x0d\xc2O\xc8O\xca\
\xa4\xdf\x1c\x9a\xf6\xcfc\xf2F\xa6\xbb\xe4\xae4\xce-\
t\xc99\xd0,\x1d\x8c\xd8(\xc3\xc7\xe6\xb4\x8f\xfd\x99\
>\xf9wPG\x87\x0c\xc7\x8b\x04F4k\xd8k,\
\xbfT-\xff:|\xe3iz\x89/\xee\xe3\xc8\xb2\x9c\
\x95\xe4\x1a\x9b\x958=7yVN\xc2\x8c\xbc\x94\xf1\
Y\x09\x03\x92\x9c\x0a:\x11\xae\xc4\x91]\x02K\xf4o\
\xc5\xeb\xc1\xd3D|\x22\x88\x02\x8c\x85`\x83\xd2\xb4\xd3\
\x5c\xec\x18~5\xfc\xa2\x14\xbb}\xf2\xf5\x0c4\xd5\x9e\
bf\xa3\x8e\xc1\x87m\x0d\xd9\xb0'\x06&\xde\x9c2\
\xed\x06\xd9\xee\x94\xbb#r\xa2\xe1\x80P\xb1\xcc\x04\xc7\
\xaf\xa7\xe5\xad\xf9\xc9\x90KG%\x0fM\x02Q\x80+\
\x85]p-\xf0\xe9<\xba\x22\xc2T\x17\x5c2\x0c\x9b\
\xcc\xe6\x0f\xb2\xdd<%\xa3\xdc\xc7\xdf\x04\x8b[#\x06\
\xdb\x0dL\x1a\x86C\xcf\xe3\x1cT\x22j\xcc\xdc\x85\x80\
\x1ddlX\xaa\xe3\x99\x13\x86\xdf89='\x01\x1a\
j`\x8a\xa3(\xc4\x91\x81M\xf8\xe6\x11\x0ep\xaa\xc9\
Y\xceW\x8f\x1d\xf4\xbbi\x03\x93\xf9\x8c\xa5\xcd\x85\xb2\
k\x03\xd1\x1d\x22\xd3\x168\xab}\xfc\xe5\xfa\x11wC\
\x22\x08\x8b\x05\xc5\x18b\xc1S$}~\xad^\xfd\x03\
w\x15\x13]\x1dqm\x013\xa4\xf9\xaa\xc5J\x14p\
\xe5\x90\x0dUQ\x1b\x22\xb5\x12 }\xca\xb8K\xd8\xf1\
\xff5r\x8eF\x93\xd8\xa1A\x84\xca\xbf\x969U>\
\xf6\x0d\xf7\xa4\xab\xf8\xd4\xb2\xdd\x1dQ\x91\xc2K\x01\x06\
\x12b(/\xcd}\xff\xbc\xdc\xb7O\x1e\xf2\xf0\xcc\xac\
t\x07\x83\xf6\x00\x1fme:\x8c\x1b\xdc?L\x8f$\
E\xf9\xdb\x9c\x81/\x9e4d\x9fO\xf2\xaa\xb2\x19\x88\
(\x80P&\xdb\xe4\xdc\xb4$\xb1N\xf4.\xb03\x14\
\x0cP\xb2S\xb9\xe5\xf0\xac7\x17\xe4.\x18\xec\x82\xa6\
@\xa4\xc50\x0app\x80$\xa5:\xd9c3\xd3\xfe\
{b\xdeq\xc3R\xcdN\xc6\xee87\xa7g\x04\x00\
.\x9c)\x0e\xe7\xb8\x8b\xf4)wH\x8a\x13/\x96a\
\xeb\x18/\xdbS\xac\xad\xb8\x5c\xad\xda\x12\xed\xf3\x01=\
\x06\x06^\x8f\xfe\xb9\x5c\x1c\x1d\x85\x807ZDV\x06\
;\xa6!>e\x9b-\xf3P\xfb\xfc\x7f\x07g>\xcd\
r\xe7\x06\xddC%\xc5\xcd\xa7\xbfF=\xc0\xe9\x88\x14\
\x97\xdf1X\xcf\x99\x1b\x9c\xf9\xb8s\xc1k\xf2\x80\xf1\
p\x08\xaf\xfa\x08\x7fz9B\x05 \xe309?\xc5\
\xf9\xf3C\xb3\xd6\xfft\xc4\x133S\x8e\xc9K\xcas\
I\x0eY\xc6\x91\xd3\x18{(\x7f\x10\x95\xf81\x17\xcc\
xm\x03w\x0c{\xcd\xb6\xbe\x18|\xc5=\x91\x92\xec\
lR\x86\xfd\xf6\xc3\xd2\xbf\xbfx\xe49\xa3S\x9c6\
e\xd1\xae\x1a\xcd\xf4/\x84o\x1d\x03\x89{X:s\
\xda\xf0\x8d\x95\xbd\x99^\x1e\xbc.\xc2\xac^\xf0B'\
CUjt\x86\xfb\xdf'\x0cy\xfb\x94\xbc\xf9\xb9\xae\
d\xbck\xc3's\xfc\xb1\xcf\x91g\xb3\xa8\xe0Y\xcc\
\xf4\x05\xbd\x84\xda\xef\xd0D\xe5\xec\x11)K\xcf\xc8\xff\
\xe9\x84A\x99n>\x8c\x05m2\x94\xe8n\xb2\xcc\xf1\
o>G\x84\xae\xfa\x82\xdb^c\xeb\xefff\x04\xf3\
\x98\x85K7\x92\x87J\xb3\xff\xa6\x0c8\x5c\xe6\xb5\xae\
\xde\x0f\xa6\xa9\xa1\xab\x1b\x1ee\x9b\xff.6E\x08\xcf\
\x10`\xa6\xdd\xf2\xdc'\xd8\xe0c\xd0\xa6Gb\x9b\x0f\
HG|-\xb3\xea\xa9\x90\xfd%\xba\xafN\xd6\x03:\
T\xfa\x99Sv\xa5\x04\xed\x03\x9d\xa9\xb9\x12\xda}\xc8\
h\x91\xf9\xdf\x0b\xc1(\x97\x0cU\x97\x8a\xea\xbc\xfb\x1a\
\xb5e;J\x97\xd6\xbb\xf6T{\xfd\x1aT\xe8\x00\x1e\
%p\x89<^\x0fD\xc4\x14\xecFG\xe0\x04\xea\x19\
\x98\xe9dY\x1a\x97\xa4\x9c\x9c\xaf\x1c5|\xe0\xd8t\
Gr\xf3\xd3\x82;K+\x8f]T\xe3Su\xdeL\
\x8d&GB8\x1f?\xc2y\xde\x94a\xb0\xdcq\xb4\
\x7fYT\x7f\xce\xa2\x92`KbF\x86~B~\xd2\
\xbf\x8e\xc9u\xe3h\x87hX\xbc\xb7\xfe\x17\x9f\x95\xa8\
Q\x9e]:&\xcf\xfd\xdf\x05\xf9\x1d_`\x07,\xdf\
_w\xde'\xa5f\xa2E\x06?f\xa6\xa3\xfa\xfd\x9f\
\xcfjY\x8d\x1aQ\x94\x0c\xc9\xa7j\xdfW\xfb\x16\xed\
\xa8|\xb3@+\xf2\xe0\xabgq\x07\xaf\xa1Gg\xa0\
\xf1H\xf4\xdcP\x18\x9b\x94\xc2~59k\xfa w\
^\x0a\xbe\xa0\x00jp\xa6\x9bn\xa6\xa7\x05\xc0\xd0!\
>\x03[\x9ec\x9b\x1e\x92T/\x04\xc5L8(\x95\
rR\x9e1\xef\x19{\xe6\x04s[\xd4\xb9\xaa{\xe0\
\xb6\xc8\x08\xec|\x9d\xad\xbd\x15C\x8f\xff\x91\x05\xd8L\
\x08\xdd\x91\xae\xcf\x7f\xd5=\x10\xae:\xb2K\xe6G\x8b\
\xa4\x84e8\xd2\x1ca\xf5c\x8c\x22\xb8\x19\xbe\xcd*\
\x8c\xdcCy.^\xf0\x18\xc3\xcbi\xf9V5\xa3\xda\
\xe3\xfdjg\xf1f=\xed\x87j\x7f\xb97\xd0\xe4S\
}\xaa\x1aT\x9c\xa0\x13\xf8\x985\x9f8\x0a\x8a\x9fC\
\x96d\xd5\xef\xb6\xdb\x9c\x0e%\xc3\xe5\x98\x90\xe9\x9a\xe4\
\xf6\xcf\x1c\x9e\x99\x99\xe0\xb0\xc9\x8a\x88$\x1eu~U\
;\xfb\xa3\xfd_U\x04\xf0\xce9/\xf6\x91eE\x08\
%c\xc9v\xf9\xbbs\x87$\xbbq\x02\x92p\x04 \
\x00Z\x03g\x8f\xe8<\xf8g\x9c8\x04\x05\xc0e\x8b\
<\x9c<:?\x01\x01XZ\x1a\x9d\x00\xc0\xe9\x8f\xcd\
s\xbf\xba`\x08W\xc8\x88N-\xe0\x02P\x12q\xc9\
13\x02\x93f9j\xde\xfb\xf9LX\xe7I\x17M\
\x00L\xc0;\x8c\x00\xee'\x16dIj\xf4\xab\xab\xf6\
T\xbe_j|_\xd9T\x11`\xd5^\x15\x1a\xe9\xe8\
\x0aN\x82\xfb\xb9C\xfc\xe1\xc7\x00\xb0\xcf\xdc\xc0]p\
\xef$\xb7M\x1e\xe00\x06$9\xe6d*\xe7\x0cO\
\x1c\x99\x9df\xfb\xb1z\x0by\xc4\x92\x02 \xa2Z\xf5\
\x06\xb6\xbe\xac|\xfbgn\x980\x121\xda\xe0'e\
\x98|\xe4\xe3r\xc6\xa1\x98\x97cH\xd1n\x00/\x04\
\xaa\x0c\x95;\x95O\x8e\xc1\x1bH\xd81\x1da\x8ar\
1\xd4SF\xdbN\xfe\xd0fsa\xde\x89\xf0\x921\
\xd7\x99\xf7T\xcc$\xe5G\x9b\xc6\xde\xf4\xc8\x8cY\x9e\
oqs/\x8f\xd2\xcei\xc9\xbb?^)\x968\xb8\
DhN\xea\xba\xeeW\xf5\xc6&oSP\xf3+n\
\xbf\x86]\x8a\x9a\xaeC#\xdf\xae\xc8.E\xb2\x05\xbc\
\x89N[\x82\xcb\xe9\xb4\xd9\x14~\x1f\x9c\xfb\x85\xc0\x92\
\xf0Y\x92>\xdcSw\xd9\xd22<\x95\xb9\xd5<C\
\xb8`4\x1bL\x9f\x95\xe5x\xf7\xf4\xe1f\x84w\x1c\
\xed\xab\x8a\x1a~\xba\xa8\x10\xe4\x0a\x09\xff<&\x86t\
|~\xe2S`\x86#\x17\x00\x8c<.\x00\xbf\x5c\x0a\
-\x80\x96\xab\x8f\x04C:zp\xc2\xcb\x0b\x86\xc0Y\
\xa3\xcaZ\xc6\xf2\xfd\xf5\x17..\xc6\xc5\xc8/\x1c\x98\
\xee\xac{\xfb\xe2\xe9p0>u\x11\x8b=\xe5qa\
^\xc1\x019\x0c2\x8fQ\xdb\xe4+\xf0\xb2\x82z_\
Au\xfd\x9e&es\xbd\xbe\xb7\xd6_\x17\xc0\x82k\
\xba\xc2\x0cb\x1e\x82\x87\xb3L\xb7ml\x86c\xbc[\
\x1b\x92\xe6\x1a\x92\x9e8$A\xcaKv\x1d8\x0f\x0d\
w\x05g\x89*\xc2\xe2A\x0f\x0b\x80\x09\x0f\x83\xe1\xdf\
\xf4\x9c\xbc\xe9A>k&\x02\xc5\x0cc=)\x9f\x1d\
\xf3\x82\x92zH\x8f\xc5Px\x18\x98\x03\x18\xce\x00\xfa\
\xe9\xb9R\xc57\x98\x01#\x0c\xb0H\x88\x09\xd7\xd8\x0f\
\xff\x1d\xac\x81o\xbd\xfc\x92{?\xcdy\x9b\x7fcf\
\xe2f\x11\xfbp\x01X\xc6$\xe3\xdb\xf9\xcf\x01\x91\xdd\
R(\x9aT\xfd\xbc\x85{\xbf\xaa\xc2w\x97F\x93\x18\
\xdc#H\xc7;\xa7f\x5c3\x99O~\xdeY\xa2z\
\xfd\xfe\x92\x9a&\x11\xb6\x88\xe0\xce]\x0e\xfb\xa0\xd4D\
3\xefE\x94\x7fx\x114<\x81`Em\xa3\xb8\xf8\
\xc8q;m\xd9i\xc9\xb0\x10E\xd6\x05\x9b\xeb\xf1\x07\
+j\x1a\xf8\xd9#>\x1cp\xd8\xe5\xbc\x8c\x140\xdd\
]Tt \x8aP\x18\xcc\xec\x04!\xe435\x1b\xba\
\xae\x1arS \xe0\x0d\x04T\x15j\x198*\xd1n\
S\x5c.{\xa2\xc3\x0emIX\xe5\x15}\x11 ~\
0\xd4\x10q\x88\x0f\xdf\xd0\xc3\xf4\x22\x01\xd0\xd5@\xe0\
\x87\xe7\x95\x0d\x0f\x1cp?\x00\xa2[6\x12\xf3\xe4\xb9\
\x8f+\x03\x0e\x87M\xbd$\xd6\xda\xd2\x12\x8d\xde\xca\xed\
\xf6ON6\xb4@\xb8A\xc5\x03\x9b\xab\xe9\xeel\xf9\
\xb4\xa5\x8a3\x91_{\xd7\xe4b+q`\xde\x86\xc8\
\x84\xb5\x03#\x94\x17f\xf1\x8f\xdb\xb1d\xa3{S\x0a\
\xe0X(\xd1\xbfXV\xfaY\xa1\x07o\x10cb\x1c\
xtx`\x08\x0c\x87b{{A\xf6\xf4\x1cs\x08\
P'\xa9j\x96\x053\x14\x11\x9d\xf1\xa0\x8b\x15Ka\
\xd3|8\x9e:\x8a\xc3\x01\xe1\x03\x1cj\xc6V\xa4\xfc\
x\xe5\xd1\x1d\x8e\xff<\x04@tW\xd0\x09\xdcw\xfc\
\xe2\xe9b\x9e\x8a\xaf\x1dD\xcbN\x93VNp\x05\x0e\
\xc7\xbc\xd7\x15A\x8c\x9c\x18\x1aJ\xf1\x03\xe2\x821Y\
\xb1\xbb\x1c\x13\x7f\xa5M\xbeC\x92\xed\x18OX,\xb1\
9ex\x0a\x8c/~\x13\xac/\xe0\xb5\xec^\x0a\xbf\
\x04\xc4\x9d5J\x1bzV$E\x17.\x13\xaeK\x96\
\x14E\x9a\xf8[\xc5\x91\xc0\xbd\xe9-\x15\x84>\x0d\x8f\
I\x01_\x85\xff\x1f?\xb8\xb1\xf9\xff\xc7\xe4\x82\xa2\xc9\
Q\x0d\xed\xc1\xefj\x96\x14x\xb85\xc3\xbbt\xc2A\
$p;\xc0\x06\xba\xe4\xc3\x06\xba\xc1\x17\x0e\xdf\x11\x1a\
\xee\x06S?\xd23\xf2\x03\x05bS$\x88#c\xc8\
x\xe6\xf1<>\xa3\xf2\x81\x1f\xcd\xaf<\xba\xc3\x0f\x8c\
\x02\xb1-\xbe\x98\xd7&\xd2F`\x86\xb75\x07\x84\x03\
i\xe5\x84G/w!<\xediz\x85\x00\xb4 K\
\xcc1\xeeg\xda\xc4\x9b \x8a\xf8\x88\x0b(C\xd8\x08\
\x90\x1a\x0b\xd8\xd2\x9fiU\x9b\xb8\xab^\x0d\x04\xda>\
\xf1\x1a=)W\xacw\x06\x18\x1c\x1d\xdfQ\xa6I\xb9\
\xc7\xdb\x86/\xc0\x5cF\xf4\x10X4\x01C\xf2\xf9\x83\
\xf7\xac-\x7f\xfa\xfb\xda\xd6m\x86h\x80v\xc5]G\
\xa4\xdb\xf8\xe4$\x04\xd1\xdb\xe8\x15]@-4\x07\xc6\
\xf0mz\x06\xe7\xd3\xd7|f\x01\xc4\x8e \xd8\x95\x9c\
/\x1f\xfb\xaa\x92\x92\x8fb\xdck\x0d%\xbf\x06\xbf\xa7\
Z^v\x81T\xb3\x03\xbb\xb3p4\x0e\xaa\xd9\x81A\
\xe6\xae`\x9d\xefb\xb2\x94\x7f\x82m\xee\x93\xd0\x0a\x82\
]\xbd\xf7\xd2\xfa#-Y\x8e\x7f\xe3o\xb9W\xbb\xf5\
\x8b\x92\x8f\xf77\xe1j\xb4ia@\xab\x8e\xb7\x1dF\
\xa58\x96\xffd\x88MQ(Y\x89^H\xef\xaa\x98\
\xa0]Gd\xc7\xb8K\xf4)\xbf\x83\xd0\xf1*\x18|\
t\xb4\x95\x8d\xfb\xb5%\xe7\xe9\x15\xdf\xfcX\x5c{!\
\xbc\x9c;\x12\xd2\x94y\xff\x0a\x0c\xbd\xc0P\x14\x088\
v\xf9\x1d\x1cfp\x87\xf7\x91t\xbb+8\xf1\x16e\
\xe6\x03\xb2\x82c\x03\x00\xb1\x9f\xe8F\xcc\xbe\xc5\xc6\xa0\
\xfa\xe4\x86\xaa\xd3>,\xf8\xa8\xa0\x89\xa7O\x0ci\xc1\
e\xdf&K\x17\x8fN\xb2A\x03Ol%\x88\xdeE\
\xefj\x01\xb4\x80\xa12$\xef\x96\xe7m\x1b\xeea\x9a\
n\xc8\x06\xd3dC\xc6\xf1\x95F\xca\x10\xdb\x89\x1f1\
W\x0a\x94OnU{\x97\xc5\xc4\xd8\xe4\xa3\xbat\x1c\
\xcam\xf8\xf6\xaf\xb0}w\x87\xd1P.I\x01\xbe\xd3\
T\x03\x08\xb3l(.)k2\x9b\xfag{\xfa\x08\
\xd8\xc3d\xea%\x88\x193\xf6Q]\x0f\xce\x15\x98\xa3\
0\xbf@\xa2\x98\xf1\xcf\xdb^\xf0\xc3\x9f8\xf7\x04\xb4\
\xc5{k\xef\xfe\xa6\xbe\xca\x17\x94\xf0\xd1\x14\x1c\xc5e\
\x1e\x15\x9d$\xf3\x16\x00\xcbK\x90?<9;7\xda\
\x811\x04\xd1\xd5\xf4V\x010\xff4\x7f`\xf3\xbf\x94\
M\x0f\xe3\xa0nh\x17\x98\xc5\x9bIr\xcapi\xd6\
#\xca\xc0\xc3y\xb1\xea]\xe5\x0am\x87\xb9\xd0\x1c\xb1\
\x86\x1a\xf0Ul\xb15l\x91|\xd5\x9a\xbf\x11\x1f\xbf\
\x02\xf5r\x0d42\x8ep\xa4\xe5\xf3\xf9\x18\x102\x10\
\xb1\xc3\xe3\x5c/\xab\xac\x95\xa0=eN\x01\xa8\xc86\
E\x81\x05XS\xf8;\x0f4]\xf7\xf9\x03\x0dM\x9e\
\x8a&m\x9f\xea\xdaT\x15\xd8W\xe7[U\x1a\xac\xf1\
\x07[\xe6\xf1hI\x8b\x18\x04\x00\x8e\x95\x9e9*\xeb\
\xccC2\xc0\x03\xf3\x8f z\x1b\xbdT\x00L l\
\x10:\xdf\xb6W\x95o\xee\x905\xfep\x8c(I\x86\
\x94\x94\xcfN|\xd7\x960\xa0\x97\xdb\xcd\xe6\xe8\xc5o\
nL\xf8\x02^\x87\xf9\xa6x\xa8\xf5\xc3F\xb2\x0d\xf1\
A7\xb4:op\xda\x7f\xf7\xf8\xf11g\xcc'\x8a\
\xc4p\xd05\xb6\xba \xa21\xba!A4hO\xea\
F\x00*\xff\x86\xb9\x0bRE\x83\x9d\xe6~L\x96\x98\
\x81\x16\xc0\xfcL\xe5\xb53Ga\xb3\x8e\xd2\x97\xe8\xad\
\xf4\xf6n\x07(;\xaeQ\xe7\x18\x13\xae\xe3\x05\x18*\
\xcf\xb0\x81\x9b\xd4\xc6\x02\xe3\x933\xf5\xaaM|\xa2\xa6\
f3\xdb{A\x13\xc0\x1fN\x84\x05\xb8\x04\xf8\xc6\x89\
\xd8\xcc\xedD\xcc`{\xd1\xcc\x17/~_Wg0\
?\xd4\x1b4\xc3\xab\x19\x8d\x9a\xee\x0d\xea\x1eMoR\
uO\xd0\xa8\x0f\x18\x8dAX\xd6\xfc\xfc\x85\x02\x90\x10\
85\x04>\x98cN\x9e\x13\xeb8c<\x1e\xc2b\
\x18\x896\xdb\xf5\xd3s\xc0\xeb\xde\x9e1\x09k\xd3\xab\
\x05\x00\xeb\xc6`\xf4mN\xc7\xe4\xeb\x83S\xfe\x88%\
\x15\xca,\x14Y^v\x8d\xc6}\xfa\xca_i\x81J\
^\xfa{)\xe6%pp,yk\xc4\x08a\xe1\x94\
\x88\x16\xa8\xb4\xab\x0cg\xfa\xf1\x04\xd9s\xdb\x1b\xf1N\
\x11\x87W\xfa\xf9\x17|\xe3\xc7\x5cl\x06\x97\xf8\x0eL\
\x08s\x85\xaf\xc7\x80\x0c\xad\x0a\x9c\xa2Z>ux\xd2\
\xac\xdcDSc\xc4>\x82\xe8}\xf4\xf6\x16\x80\x09\x14\
L\xd7\xf8\x0b\xd5\x897\xc3\x82\xc1\xf8DY\xba\xa4\xe8\
6\xa3\xb1HZx\x92Q\xb6\x8ejZ\x96\xc6\x90l\
\x90'$\xe9\xf6e{\xcb\x9a|=8\xe72Z|\
\xdd8:\xdb\xf1\xf0\xec,\xa8\xa6@\x98zs\xed\x84\
\xfa\x86\x00\xa0\x04(N\xe7\xa4\xab\xd5C\x7f\xc7\xeb\
i\x0a\xef\xb7\xe5\x0dvO\x99\xf6\xe5\xadZS\x95\xc1\
\xa7\xd8\xc6\xb99\xc4\x02\x14<\xf1C\xf4w \x95\xd9\
\x97\xbb\xcb\xfeW\x100\x0c>\x97g\xf7\x02'\xe4y\
\x0e\xf2\xa9\x9c\xeebwL\xcfr\xdal\x98O\xb1\x05\
@\x10\xbd\x97\xbe\xd2\x02\xc0V:N\x161\xf1\x0a}\
\xc2\xf5\x06\xe3\xe35xs\x00k~\xf5\xbb\x8dO~\
\xa2\x95\x7f\x83E\x10,\x01\x96\x7f\xf8GE\x80_.\
\x0c\xb1\xf6\xed\x12\xbd\x1cU\xd7\x9f\xdf\x8d\xf7\xd5M\xc4\
\xd6\xee\x02\xce\xc7oM\x19C\xdd\xfa\x9d\x87gN\xcc\
J\xc0\x8dfH\xba=0\x04\x11>\xbdz\x14P[\
\xb0~\xaf\xa9\xbe\xcd\xcf\xda6\xdd\x07e\x8b\x1bx.\
\x04\xb0'q\x98q\xdc\x1bZ0\xd0\xb4\xf5\xedT}\
\x07\x93\xa5\xa6\xa4\xc3\x13F\x9fcs\xa7`c\x9cF\
\xd9\xf7_tC\xddZX}\xcc\xe2\x9a\x1e\xd3y\xec\
\xed\xc1\xbb\xcaO\xcc\x1b\xf4\x93Q\xa9xK\x01\xa7{\
\x14;\x09\xa2\xd7\xd2\xd7\x04\x80\x87\xd6\xd0\x83\x81\x0d\x7f\
\xb3mz\x9c?\xb2\x83m\x00\x03\x9b2\xba\xee\xca\x94\
\x03\x8d\x92\x11\x80u\x00K\xa0-A\xcd;\xd9>\xf1\
*%m\x14\x1e\x8d\xcf\xf7P\xb9\xec'\x98\xed;H\
goP_\xf0\xde\xbe\xefk\xfd\xdd\xff\x8a\x1b\xcc\x90\
f\xfec\xec\xe7\x87$\xdc93;\xc9i\xc3\x0d\xdd\
\xdf\x0c!\x88\xc8\xe9c\xf5b\xb3Y\xcdd\xbb}\xe2\
o\xfc\x93~\x87w\x83y\x97\x0f\xbe\x19\x1c.\xc6W\
\xc5\x8c\x00,@\xe1\x13\xe5/\xd8\xa4\xec}\xc3\xf8\xf4\
\x82@\xc1r.\x0aD?\xc2LQC\xfa\xe7\xa6\xaa\
\x1f\xea\xfc\x98\x17z\x00\x9e\xdd\x98\xf4\xabC\x5c\xf7\xcc\
\xc9A\xeb\xcf3_\x8f\x04\x85 \x22\xa5\xafv\x8c\xc8\
6\xa7{\xf2U\xfa\xb8k\x0dY\x81\xc2\x86\xa6\x80\x97\
9\xd1D\xe0\xdf\xb8\x80\xbdD\xcc\xf0U\xb0\xb5\xb7\xa8\
\x95\xdf\x9b\x0e\x89~\x02\xb7\xbd\xb5\x01\xf5\xfd\xbd\x8df\
\xb2\x9b\x9b\xbb\x03l}\xe0\x17f/I\xbfpD\xc2\
\xcd3\xc4\x8b\x18\x09\xa2\x0f\xd1\x87\xb3,4\x04\x1c\x93\
\xae\x09\x8c\xbd\xe1\xc7r\xdf\xa6\xee\x85\xb5B\xbc\x03\xc0\
\x98\xb7\x5c\xdb\xf4\x04\xb7\x19D\x7f\x01m\xb0\xfe\xca\xf7\
U?\xd4\x04\xf8\x13\x82\xdd\x99\x99q42\xd8\x7f\x99\
\xb1_\x8fM\xbewNN\x8a\xcbF\xb9\x8b\xe8s\xf4\
I\x01\x00{\xce\x91d{\x82\x9c>\x0a\xb7\x98\x9b\x9b\
K \xeek\x86;\xc4f\x80R\xb2$PS \xb6\
\x12\xfd\x01VP\xa7\xde\xff]-\x7f\xa8\xb7%\xf1\xbb\
\x03\xa8S\xe8\xb2\x94l\x93o?,\xf5\xb6\x19\xd9\x89\
v1\x8f7\xcfk\x04\xd1g\xe8\xdb\x8dV]\xd7\xb5\
\x1f\x9e\x87\x05~'\xaeC\xc0Fh>i\xff\xdbb\
\x95\xe8\xd3`\xe5[W5\xedo\xdf\x95\xfb5~'\
\xa8\x9b\xe0\x1d?`\xeb%6$\xd1\xfe\xe2q\xb9\xd7\
N\x19\xe8\xb4A3\x80\xec>\xd1'\xe9\xdb\x02\xc04\
\x9f\xcb\xb7\x13\x17x\xb1\xec\x00\xd3F\xe8u{\xccU\
\xa2O\xc3;\xdf\xd9\xa6\x8a\xa6\x0f\x0a\xfc\x98\xb6\x9d\xa5\
~\xdc\xc0\x8e\x1f\xdd\xa9\xc8'\x0fu/<%g\xf6\
\xe0$\x9ch\x94\xa6\xf4 \xfa,}[\x00\xf8C^\
a\x15<0\x19`)\x0c\xa3e\xc6_\xa2/\xc3[\
|\x7f\xf9\xba\xa2\xc6\xa7ri\xef6\xe3k\x0cp;\
\x1e\x9e\x9e\xf6\xc4\xd19\xd9\xc9n2\xf9D_\xa7\x8f\
\xb7\x00l.\xc3\x9e\x06\x0b\x9dw\x01!\x86\xec\x1a$\
\x16\x89\xbe\x0c\x88\xf9\xbb\xdf\x17\xaf(\x0dH\x0c\xdf\x1d\
\x8dc\xbd\xba\x06\xf0\x97\xbf\xda\x05\x87\x95\xa59\xe5\x0b\
F:\x97\x9e\x96s\xee\xf8L\xb7\x1d\xc7\x9e\xf1\x0f\xa9\
\x00\xd1\x87\xe9\xe3\x02 \xdb\xd8\x88s\xf9bg&\x00\
$\x82\xc9\xc6\xe0\x13\xc5*\xd1\x97\xa9\x0fh\xff\xdc\x11\
\xc0\xa7\xaf\xb8\x09\xeeB+lN\x22\x22K\xe7\x0cO\
x\xfd\x84\x9c\x87\xe6\x0e\x19\x94\x02\x15\x7flu\x8an\
\x1f\xb2\xffD_\xa6o\x0b\x00\xc0\x0e9\xd7\xb0\xa7\xc0\
\x02\x8e\x04\xe7\xa3\xc1\x0f\x02ko\xb0\x99IZ\xdax\
g\xce\xe1b+\xd1'\xe1\x89,I\xab\x0a\xeb\xbf\xab\
\xf43sJ\xa88\x81\x9d\x84\xe0}s6\x82\x0f\x94\
\x8d\x0c\x87|\xe2\xe0\x84\x95\xa7\x0f\xfe\xc7\xfc\xdc\x89\x03\
\x12\x1c6\x94\x9c~Pj\x08\xc2\xa4\x8fM\x05q\x10\
\xdc\xb8\x1b\xda\x86\xc7\xa4M\x7f\x83\xa6:\xf6\x04\x98%\
\xb4\x05,\xd1\xfcm\x92J\x821\xe7o\x8e!'\xb0\
\x83\x1c\x10}\x07LL|\x7f\xaf>\xe3\x8d\xdd\x15>\
\xb4\xd21\xd6\xc0\xc1C\xa8\xce\x83G\xd8\x91\xc4\xbd\x93\
\xb1G\xc9\xb0\xdb\xf4\xf1\xe9\x09\x17\x8cL\x98\x95\x930\
*\xdd\xec\xef\x81|\xc3\x8b\x0a,\xe2\x0f\xb6\x00\xf0\x87\
\xfa2}]\x00t(\x8dj\xb0Q\xff\xfc\xd7\xac\
t\x95\xa4\xab\x07\x15K\x9c\x10\x14\x0a\xae\xcd\xad\x8e\xbe\
\xdc5\xe5\xb7\xb2\xc2\x9f\xd4'\xfa&(\xf6\xba\xf1\x87\
\xb5\xa5\xcf~_\xc7@\xee\xb1\xf7'\xd6\xd4\xc4\xdc\x8f\
\xdd\x83\xbaK\xb6%\xc9jN\x8asf\xb6\xfb\xfca\
\xaeq\xd9\xa98\xa7\x0f\x02\x19H\x97d\x10\x00\xca9\
D\x7f\xa3\xcf\xb7\x00\xcc_\xcd\xdf\xa0}\xff\x1c\xfb\xe1\
\x09I\x0b@9\x85?\xa8\xdca\xe16$-!G\
\x9at\x8b}\xc4\x19\xb2\xcd\x0e%\x98\x04\xa0\xef\x02r\
\xbe\xa3\xce\xf7\xd3E\x05\x85\x8d8\xf33&0OM\
\xf8\xe7\x89}`\xca\x8a\xed\x98G\x84\x03\x81\xe9\x08\xdc\
\xdb\x98<(\xc1>$I\x19f\xf3\x1d20uH\
\xaak\xb8K\x1b\x9a\x99\x94\xe4\xb4\x9b\xadD~\xa0\xe9\
\xdc\x5cj^&\x88\xfeB\xdf\x16\x80\x16\xf8xP\xc9\
\xdfPj\xdf\xfa\xa4T\xb4\xcc\xd0\x83Xz\xed\xc9\xde\
\xfcs\x13\xc6] ;Sd\x9a\x0e\xba\x1f`HA\
Mk\x0c\x1a*\xd8o\xfeR_U\xc3E\xc9`\x9a\
\x86wk\x03\x9a\xc6W\x0dU\xc7\xa1;\xd0\x5c\xc0\xf7\
\xb4@\xfd@3\x9dK\x9a\xae9\xed\x0e\xa7\xdd\x96\x9e\
\xe4Nr\xd8\x14I\x87/\x80\xfb\x8eP\xfd\x80\xb0\x14\
\xfdG\x00\xb0\x92\x86KP\xe8\x83\xbc/\x08\xcc\x82\x03\
[\xeeL\x01\x13\xd0\xfd\x13\x05\x13q\x07\xad8&\xb2\
\x98\xd3\xfb\xc0\x8c\xdb\x81\xd9nq\x06nZ\x9a\x8c\xe6\
\x1a\xff6\x17\x04$\x00\x84\xa5\xe8/\x02\x80\x7fXv\
\xc5\x90\xf0\x96k\xe2\x9b\xf0\x97\xcau\xdf\xc7LUP\
v\x9e\xaeQr@v\x87\xcc\xc2;\x0bq\x91\xb2\x08\
aE\xfa\x89\x00\x10\x04A\x10\x91B\x1d#\x04A\x10\
\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\
\x00\x82 \x08\x8bB\x02@\x10\x04aQH\x00\x08\x82\
,\x0a\x09\x00A\x10\x84E!\x01 \x08\x82\xb0(\
$\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\
\x04AX\x14\x12\x00\x82 \x08\x8bB\x02@\x10\x04a\
QH\x00\x08\x82 ,\x0a\x09\x00A\x10\x84E!\x01\
\x08\x82\xb0($\x00\x04A\x10\x16\x85\x04\x80 \x08\
\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82 \x08\x8bB\
\x02@\x10\x04aQH\x00\x08\x82 ,\x0a\x09\x00A\
\x10\x84E!\x01 \x08\x82\xb0($\x00\x04A\x10\x16\
\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\
\x82 \x08\x8bB\x02@\x10\x04aQH\x00\x08\x82 \
,\x0a\x09\x00A\x10\x84E!\x01 \x08\x82\xb0($\
\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04\
AX\x14\x12\x00\x82 \x08\x8bB\x02@\x10\x04aQ\
H\x00\x08\x82 ,\x0a\x09\x00A\x10\x84E!\x01 \
\x08\x82\xb0(\xcc0\x0c\xb1H\x10]\xc3\x9a5k<\
\x1e\x8f\xdf\xefOLL\xcc\xc9\xc9\x19=z\xb4\xd8A\
\xf4n|>_K\xc2\xa5\xa6\xa6\x8a\xadD?\x82\x04\
\x80\xe8B^}\xf5\xd5_\xfd\xeaW`D\xc4:g\
\xfe\xfc\xf9K\x97.\x15+D\xaf\xe4\xcb/\xbf\xbc\xf9\
\xe6\x9bW\xaf^-\xd6%\x09\x04\xe0\xac\xb3\xcez\xfe\
\xf9\xe7\xc5:\xd1/\xa0. \xa2\x0b\xf9\xea\xab\xaf\x0e\
\xb2\xfe\xc0\xb2e\xcb\xbe\xf9\xe6\x1b\xb1B\xf4J\xdex\
\xe3\x8d\x03\xad?PWW\xf7\xc2\x0b/\xac[\xb7N\
\xac\x13\xfd\x02\x12\x00\x82 \x08\x8bB\x02@\x10\x04a\
QH\x00\x08\x82 ,\x0a\x09\x00A\x10\x84E!\x01\
\x08\x82\xb0($\x00\x04A\x10\x16\x85\x04\x80 \x08\
\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82 \x08\x8bB\
\x02@\x10\x04aQH\x00\x08\x82 ,\x0a\x09\x00A\
\x10\x07\x13\x0c\x06\xc5\x12\xd1\xaf!\x01 \x08\xe2`T\
U\x15KD\xbf\x86\x04\x80 \x88\x83!\x01\xb0\x08$\
\x00\x04A\x1c\x0cu\x01Y\x04\x12\x00\x82 \x0e\x86Z\
\x00\x16\x81\x04\x80 \x88\x83!\x01\xb0\x08\xf4JHK\
SPP\xb0u\xeb\xd6\x92\x92\x92\x8a\x8a\x8a\xb4\xb4\xb4\
\xc1\x83\x07\xe7\xe7\xe7\xc3wJJ\x8ap\x11\x1b7\xdc\
p\xc3\xdf\xfe\xf67\xb1r\x00_\x7f\xfd\xf5\x11G\x1c\
!V\xe2Daaa%\x07\xae\x05\xbe!c\x0f8\
\x80\xdc\xdc\x5c\xe1\xae\xbb(---..\x86\xb8\x85\
\xef\xba\xba:\x88\xd2\xd4\xd4T\xf3{\xe6\xcc\x99\xb2\xdc\
}u\xaf\xa6\xa6&\x08\x06\x84\xc7\xfc\xce\xce\xce6S\
9//O\xb8h\xc39\xe7\x9c\xf3\xd6[o\x89\x95\
\x03\xf8\xea\xab\xaf\xa6M\x9b&V\xe2AMMM\xd9\
\x010\xc6\x06\x1e@VV\x96p\xd7\xed\xac_\xbf~\
\x1fGQ\x14\xc8?\xa3F\x8d\x8a{\x8e\xed\x15@9\
!\xc2\x072\xc4u\xd7]w\xec\xb1\xc7B\x19\x86\x0c\
1o\xde\xbc\x9f\xfe\xf4\xa7\xcf?\xff\xbc\xd8\x1d-\xcf\
=\xf7\x1c\xf8\x03\xbe\x81\x9f3f\xcc\x00\xff\xe1,`\
\xd1\xc4\xee0x\xf1\xc5\x17\xcf<\xf3\xcc\xe9\xd3\xa7O\
\x9a4i\xf6\xec\xd9\x17]t\xd1\xca\x95+\xc5\xbe6\
\xec\xda\xb5\xeb\xee\xbb\xef\x1e9r\xa4\xc8\x04m\x98<\
y\xf2_\xfe\xf2\x17p&\x0e\x88\x96\xeb\xaf\xbf^\xf8\
\xd8\x1a\x10\x00\xe1\x226\xa0\xa2\xfa\xee\xbb\xef\xfe\xf2\x97\
\xbf\xcc\xc9\xc9\x11^\x87\x00\x04\x00\xe2\x04R\x0aRP\
\x1c\xdc\x05,[\xb6\x0c\x22v\xfe\xfc\xf9\x1d\xdbw\x9b\
\xcd6w\xee\xdc[n\xb9\xe5\x8b/\xbe\x10Gv\x01\
EEEO>\xf9\xe4\x09'\x9c \xce\xda\x86!C\
\x86\xdcv\xdbm\x1b7n\x14\x07\x1c\xc0\xe9\xa7\x9f.\
\x1c\xb5\x06\x04@\xb8\x88\x8dM\x9b6=\xf8\xe0\x83\x10\
Q\xc2\xdf\x10@N\xbe\xef\xbe\xfb\xc0\xb18,\x06\xa0\
\xa2s\xcd5\xd7\x80\x87\x90\xb7\xa7N\x9d\x0aE\xec\xfe\
\xfb\xef\x17\xfbZ\x03\x01\x1b1b\x84\x08\xc1\x01$%\
%]x\xe1\x85q\x09L\xef\x81\x04 2~\xf6\xb3\
\x9f\x89\xec\xd0\x9a\xaa\xaa*\xe1\x22r\xa0^&|i\
\xcd\xd9g\x9f-\x5c\x84\x81\xcb\xe5\x12\x8753g\xce\
\x1c\xb1\xef\x00\xc0\xe2\x9cz\xea\xa9\xc2E\x18\x1c\x7f\xfc\
\xf1;w\xee\x14\x07GN\xd7\x09\xc0\xf6\xed\xdb\xc1\xa0\
\xb7\xbd\xeap8\xed\xb4\xd3V\xadZ%<\x8a\x07\x8b\
\x17/\x06\xd3\xe0p8\xc4\x09\x22\xe1\x98c\x8ey\xfd\
\xf5\xd7\x85Gq\x02r\x14\xd4'\xc4\x09\xc2\x00\xb2\x04\
\xd8Gq0\xe7\xe4\x93O\x16\xfbZ\x13\xbb\x00,\x5c\
\xb80\x8a\xaa4\xd4\x8a>\xfe\xf8c\xe1ET\xb4+\
i\xabW\xaf\x16\xbb9k\xd7\xae\x1d=z\xb4\xd8\x17\
\x02\xa8\xa5\x09\xd7\xfd\x02\x12\x80\xc8\x00\xa3,2Bk\
\xa0e-\x5cD\xce\xfe\xfd\xfb\x85/\xad9\xee\xb8\xe3\
\x84\x8b0\x10\xc7\x1c\xc0\x981c\xc4\xbefn\xbd\xf5\
V\xb1/\x12\x92\x93\x93_~\xf9e\xe1E\x84t\x85\
\x00\xf8|\xbe\xdbo\xbf]x\x14\x03\xa0\x1f[\xb6l\
\x11\x9eF\x0b\xd4\x16'L\x98 |\x8c\x81K.\xb9\
D\xd7u\xe1il\xbc\xf0\xc2\x0b\x19\x19\x19\xc2\xdf\xb0\
Q\x14\xe5\xd1G\x1f\x15^\x18F\xa8vC,\x02\x00\
\x1as\xfe\xf9\xe7\x0b\x8f\xa2\x02T\xed \xa1\x0a\x9f\x13\
O<Q\xf8r\x00\x1f~\xf8\xa1\xd8m\x18\xd0\x94\x84\
\x96\x99\xd8\x11\x1ahk\x8a\x03\xfa\x05$\x00\x91\xd1\x87\
\x04 //O\xec3\x0c\xa8\xe9\xc4\xd8u{\xed\xb5\
\xd7\x0a\xbf\x22!\xee\x02\xf0\xce;\xeft\xda\xdb\x13\x11\
Q\xd7\xbe\xbf\xfc\xf2\xcb\xf8\xf6\x86O\x9d:u\xcf\x9e\
=\xc2\xf7h\xb9\xf2\xca+\x85wQ\x01M\x01\xbf\xdf\
\x0f\xfe@\xbbDljM\xd4\x02\xf0\xfc\xf3\xcf\x0b/\
b\x03\xda|\x9f|\xf2\x89\xf04\x12\xda\x15\x80\xd7^\
{\xcd\xdc\xfb\xdf\xff\xfeWl\xea\x8c\xac\xac,\xf3\x90\
\xfe\x01\x09@d\xf4!\x01HII1w\xbd\xf5\xd6\
[q\xb9\xe5\x18\xaa\xcf\xb4\x03\xe2+\x00P\xb7\x15\xc7\
\xc7\x95g\x9eyF\x9c l\x8a\x8a\x8a\x12\x12\x12\xc4\
\xf1\xf1\xe3\xf8\xe3\x8f\x17'\x88\x8a\xdf\xfe\xf6\xb7\xc2\xa3\
\x18\x18>|\xf8\xae]\xbb\xe6\xce\x9d+\xd6[\x13\x9d\
\x00<\xf4\xd0C\xe2\xf8x\x00\x99y\xe1\xc2\x85\xc2\xeb\
\xb0iW\x00^|\xf1E\xd8\x05\xed\x00\xb1\x1e\x06$\
\x00\x96\xa6\x0f\x09\x00c8\xc4\xeb\xb9\xe7\x9e\x13\xeb\xf1\
\xe0\x95W^1O\x17&q\x14\x80vG\x13\xc5\x8b\
\x07\x1f|P\x9c&<~\xf3\x9b\xdf\x88#;\x04\xaa\
\xab\x83\x06\x0d\x1a=zt~~\xfe\x80\x01\x03\xc4\xd6\
\x0e\xf9\xfd\xef\x7f/\xce\x11!\x7f\xf8\xc3\x1f\x84\x17\xf1\
T\xbfV\x14\x02\xd0i\xc7cjj*\xb4\xa5N\
9\xe5\x94\xcb.\xbb\xec\xdcs\xcf\x05\x15\x1c3f\x8c\
\xd8\x17\x9a%K\x96\x88\x13\x84G(\x01\xf8\xf6\xdbo\
#\xd2\xf2\xcc\xccL\xe1c\xbf\x80\x04 2\xfa\x90\x00\
\x00\x8f?\xfe\xb8XjCRR\xd2%\x97\x5c\x02\x95\
\xfa\xf7\xde{o\xdd\xbau\xd0\x04\xbe\xf7\xde{\xa1\xec\
\x89\xdd!\x80\xcaWDc\x93\xe2%\x00\x9dv \xb8\
\xdd\xee\x0b.\xb8\x00\xae\xe2\xf3\xcf?\x87\x0alSS\
SYY\xd9\xc6\x8d\x1b?\xf9\xe4\x93\x07\x1ex`\xca\
\x94)\xc2]h\xe0@q\xb208\xf3\xcc3\xc5a\
m\x985k\xd6}\xf7\xdd\xb7x\xf1\xe2\x8a\x8a\x0a\xe1\
\xba\x99\xea\xea\xea\xf7\xdf\x7f\xff\xa2\x8b.\x12NC\x10\
\xc58%\xf0V\x1c\x1c\x82\xe4\xe4\xe4\x993gB~\
\x00\x97\xabW\xaf\x86\xe0A\xd2\x9ft\xd2Ibw\xd8\
D*\x00\x1d\xd7?.\xbc\xf0\xc2\xb7\xdf~[8m\
\xcd\xb6m\xdb\xce;\xef\xbc\x0enfL\x9a4I8\
\x0d\x8fP\x020n\xdc8\xb1\xd2\x06\xd0\xef\xb1c\xc7\
:\x9dN\xb1\xce\x01\xf7\xc2\xc7~\x01\x09@d\xf4-\
\x01h\x17\xa8\x93>\xfa\xe8\xa3\x81@@\x1c\xd9\x9a\x15\
+V\xfc\xe4'?\x11N\xdb\xe3\xba\xeb\xae\x13N\xc3\
.\x02PTT\x94\x96\x96&\x8el\x03T\xdf\xc0\
\xaei\x9a&\x5c\x87\xe0\xcb/\xbf\xecx\xf8\x13\x88\x84\
p\x1a\x06PQ\x15\x875\x03\x15C\xa8\xbco\xdf\xbe\
]\xb8\xe8\x10\x10\xdd\xc3\x0e;L\x1c\xd9\x86\xdbo\xbf\
]\xb8\x0b\x9b\xc9\x93'\x8b\x83\xdb\x03\xda7\xa0\x88\xc2\
ik@\xfb\xcf:\xeb,\xe1.\x0c\x22\x12\x80\xdd\xbb\
w'&&\x8a#[3|\xf8\xf07\xdexC\xb8\
\x0bMMM\xcd5\xd7\x5c#\x8ei\xc3-\xb7\xdc\x22\
\xdc\x85A\xbb\x02p\xe8\xa1\x87\x8a\xa5\xd6\xdct\xd3M\
\xd02\x10G\x1a\xc6\x96-[\x9ex\xe2\x09\xf3\xc68\
\xec\x12[\xfb\x05$\x00\x91\xd1\xd7\x05\x00Z\xd9\x8d\x8d\
\x8d\xe2\x98\xd0tP\xea\x800\xcd\x1c\x10\x17\x01\x80\x9a\
\xa08\xac\x0d\xe7\x9f\x7f~D\xf5\xe5\xc7\x1e{L\x1c\
\xd9\x1e\x10Z\xe1\xae3\x0e\xbc\xd7\x0am\xa9?\xfd\xe9\
O\xa1\x045\x14^\xafw\xfa\xf4\xe9\xc2\x8b\xd6@\xb5\
W8\x0a\x0f\xa8\xcb\x8b#\xdb\x00a;p\xa0K(\
^z\xe9%q@gD$\x00\xa1\x14\x17\xc4\xaf\xb8\
\xb8X8\x0a\x030\xf4\xe2\xc86\xac_\xbf^8\xea\
\x8cv\x05\xa0-\x83\x07\x0f\xfe\xf4\xd3O\xc51m\x80\
T\x13K\xfd\x05\x12\x80\xc8\xe8\xd3\x02\xf0\xf7\xbf\xff]\
\xb8\x0e\x83\xab\xae\xbaJ\x1c\xd6\x86\xcb/\xbf\x5c8\xea\
\x8c\xd8\x05\xe0_\xff\xfa\x978\xa6\x0d\xa7\x9cr\x8ap\
\x14\x09\xaf\xbd\xf6\x9a8\xbe=\x0a\x0a\x0a\x84\xbb\x0e\xf9\
\xf8\xe3\x8f\xc1\xb1,\xcb\xd0\x1e*//\x17[#\x04\
\x8c\xa9y\xd2\xb6\x84o\xd7@\xce;x\xfe`\xcd\x9a\
5\xc2]g\xac\x5c\xb92??_\x1c\x16\x9a\xf0\x05\
\xd4\xb8\x9a1c\xc6@\x93N8\x0a\x9b\xb6M.\
\x93\x1bn\xb8A\xb8\xe8\x8cp\x04`\xdc\xb8qQ\x0f\
3\xed\xa3\x90\x00DF\xdf\x15\x80H\xef\xdf655\
A;]\x1c\xdc\x9a\xf0\xef\x83\xc5.\x00S\xa7N\x15\
\xc7\xb4&%%\xc5\x1c\xb0\x18\x05\x0f>\xf8\xa0\xf0\xa5\
\x0d\xf7\xde{\xafp\xd4\x19`\xc5B\xf5\xab\x84O\xa8\
:2\xc8\x9ep\xd1\x19\x1d\xdc\x1d\xb9\xef\xbe\xfb\x84\xa3\
\xf0\x80\xb6Tjj\xaa88\x04\xe1\x0b\x00\xe4^q\
Lk\xa2\x1b\xc4\xb9s\xe7NEQ\x84\x17\x07\x10~\
k\xa9S\x01\xc8\xcb\xcb\xdb\xbd{\xb7pm\x19h2\
8K\x00m\xfc\x0b/\xbcP\xac\x84\x87\xdb\xed\x0e5\
z\xaf\xaa\xaaj\xc9\x92%b\xa5+Y\xbat)H\
\x85Xi\xcd\xdbo\xbf\x1d\xdd\x93\xb7\xc0-\xb7\xdc\x12\
j\x9c{\xf8\xc3\xd5sss!\x8a\xc4J\xb4\x84\xaa\
t\x87\xba\xea\xb6\xbc\xf1\xc6\x1bb\xa953g\xce\xbc\
\xed\xb6\xdb\xc4Jx\x0c\x192d\xfb\xf6\xedqy\xae\
\x0dZ0\xed\xe6\x90\xdbo\xbf\xbd\xd3\x81\x06\xed2r\
\xe4\xc8k\xaf\xbdV\xac\x1c@uuu\xf8\xfdW\x1d\
\xf3\xea\xab\xaf\x86\xaa\xf1\xf4cH\x00\xfa?O=\xf5\
\xd4\xc5\x17_,V\x22\x01\x9a;\xa1\xe6P[\xb8p\
\xa1X\xeaJB\x8d!\xb9\xf4\xd2K\x8f=\xf6X\xb1\
\x12\x15\xa1\xba\x95\xa1\x0e\xf8\xce;\xef\x88\x95\xae\xa7]\
\xa3\x06\x94\x95\x95\x89\xa5\x0e)))\xf9\xe8\xa3\x8f\xc4\
JkB\xf9\xdc1\x03\x07\x0e|\xeb\xad\xb7\x06\x0d\x1a\
$\xd6\xa3%T\xc2]q\xc5\x15b)rB\xe5\xe1\
\x15+V\x88\xa5\x18x\xec\xb1\xc7\xe6\xcd\x9b'V\xac\
\x04\x09@?\xe7\xd6[o\xed\xa07\xbfSB\x95\xba\
\x88\x9e\x9d\x89\x8e\xa2\xa2\x22\xa8\x94\x89\x95\xd6\xdc|\xf3\
\xcdb)Z\x16,X\x10j&\xb2\x95+W\x8a\xa5\
\xaeg\xfc\xf8\xf1\xed\x0eB\xaf\xad\xad\x15K\x1d\x12J\
\xab\xc6\x8e\x1d\x1bj\xd2\xaaN\x193f\x0cx\x1b\xe3\
\x93\x83\xed&\xdc\x05\x17\x5c\x10K\x15\xfb\x88#\x8e\x80\
\xe8\x12+\x07\xd0\xc1\xad\x9409\xef\xbc\xf3B\xf5U\
\xf6{H\x00\xfa33f\xccx\xe0\x81\x07\xc4JT\
\x84\x1a*\xbes\xe7\xce\xa6\xa6&\xb1\xd25\xac^\xbd\
Z,\xb5f\xda\xb4iq\xe9\xa6\x08\xd5\x17\xb1n\xdd\
:\xb1\xd4-\xb4;\xd4=L\x01\xd8\xb0a\x83Xj\
M\x073\x80\x86\xc3\xacY\xb3:\x1e\x06\xd61\xeb\xd7\
\xafo7\xfc\xa1\xba\xdd\xc2\xa7\xdd\xe796o\xde\x5c\
__/V\x22\xc7\xe9tvpO\xa8\xdfC\x02@\
t\x04\xb4\x8b\x0fz\x10\xa6\x85\xef\xbf\xff^,u\x0d\
_~\xf9\xa5XjM\xa8\xfb\xf0\x91\x12\xea.e\xec\
5\xca\x88HOO\x17K\x07\x10\xa3\x00\x1c}\xf4\xd1\
b\xa9'\xf8\xe2\x8b/\xc4Rk@W\xc4R\xb4\x84\
zx\x22\x16\xcd\xbe\xff\xfe\xfb\x87\x0e\x1d*V\xac\x07\
\x09\x00\xd1\x11\x8c\xb1P}\xa3[\xb6l\x11K]\xc3\
\x9a5k\xc4Rk\x8e:\xea(\xb1\x14\x1b\xd0\x92h\
\xd7\xf8\x06\x83\xc1\xeel\x04\x843\x03e(\xbe\xfb\xee\
;\xb1\xd4\x9axEQt\xb4+\x00\x90\x91B=u\
\x15>\xa1\xa6\xd3\xa8\xab\xab\x13K\x91\xf3\xcb_\xfeR\
,Y\x12\x12\x00\xa2\x13B\xf5\xdbv\xa9\x00\x04\x02\x81\
P-\x80\xd8\xedH\x0b\xa1&\x7f\xefjm;\x90\xc6\
\xc6F\xb1\x14!\x9b6m\xf2\xfb\xfdb\xe5\x00\x86\x0c\
\x19\x12\xc5t\xd0q\xa4]\x01\x88K\x90B\xbd\xa8.\
\x16\x01\xb08\xfd\xea\x95\x90\xdb\xb6m\xbb\xec\xb2\xcb\xc2\
\x1cA\xd1\x16\xa8\xa4\xfc\xeaW\xbf\xea\xe0\xb1C \xd4\
\xab\xf2JKK\xa3\x1e;QPP\x00\x85V\xac\x1c\
\xc0q\xc7\x1d\xf7\xe9\xa7\x9f\x8a\x95\xce\x80\xc0\x8b\xa5\x03\
\x981cF\xa8zt\xf8\xdcs\xcf=w\xddu\x97\
X9\x80+\xae\xb8\xa2\x83\xa7\xb4L\xa2~%$\xd4\
m\xdbm\xef\x1fr\xc8!;v\xec\x10+1s\xc9\
%\x97\xb4;\x88\xf0\xd1G\x1f\x85\x90\x8b\x95.&7\
7\xb7\xed\x1b\x81\x86\x0d\x1b\xb6g\xcf\x1e\xb1\x12\x82\x0f\
>\xf8\xa0\xdd\x97\x9c,X\xb0\xc0|N-\x16\xae\xbb\
\xee\xba\x7f\xfc\xe3\x1fb\xe5\x00\xbe\xea\xec\x95\x90\xa0\xdc\
\xed\xf6\x19\x8e\x1a5\xea\xc0\x97\x01\xb4\xcd\xae-[:\
\xb0H\xdb\xb7o\xff\xdf\xff\xfe'V\x0e\xe0\x91G\x1e\
\xb9\xf1\xc6\x1b\xc5J\x08 Z\x16/^,V\x0e\x00\
\x048\xd4|\x15\x96\x80?\x0d\xd0O\x88\xe5\xce\x95I\
\xa7s\xbd\xf6\xad\x07\xc1@\x00\xc4\xee\x18x\xf6\xd9g\
\x85w\xad9\xef\xbc\xf3\x84\x8b\xd0D\xfd \xd8\xb2e\
\xcb\x84\xd3\xd6\xc48a\xf2A\xfc\xe9O\x7f\x12\xfe\xb6\
\x06\x04O\xb8\x88\x8a\xea\xea\xea\x95+W>\xf5\xd4S\
\x90!\x8f>\xfa\xe8I\x93&\x99\x13\x82B\xeeJJ\
J\x0a\xe7\xf1\x85\xa1C\x87\x0a\xbfB\x13j\xfc{\xf8\
\x0f\xc7v@\xa8\xe9N;}\x10,\xea\xeaW,\x84\
\x93^\xa1\x1e\x04\x0bgf\x94~L\xbf\xea\x02\x8a\xbd\
nXYYYXX(V\x08N\xa8\x89\xd8\x1a\x1a\
\x1a\xc4R\x17\x10\xea.h|;7\xc0\x1c\x8b\xa5\xd6\
\x84y\x0f\xf6 \xa0\xb1u\xef\xbd\xf7\xce\x9b7\x0f\x02\
\x09\xdfW_}\xf5\x13O<\xb1|\xf9\xf2\x8d\x1b7\
B\xd5\x15\xday\x90\xbb\xc0\xdc@\x1dY\x1c\x10\x1b\xa1\
\xfa=\x92\x93\x93\xc5RO\x10]\xd4\x11=E\xbf\x12\
\x00\x9f\xcf'\x96b\x00\xaaob\x89\xe0\x842(\xb1\
\x8c\xbd\xeb\x94\x9a\x9a\x1a\xb1\xd4\x9avo\xdbFM\xbc\
\x04\xe0\xe5\x97_>\xfc\xf0\xc3g\xcd\x9au\xe7\x9dw\
~\xfe\xf9\xe7bk\x17\x13*\x90\xa1:\xca\xbb\x87\x1e\
\x11\x80\x81\x03\x07\x8a%\x22B\xfa\x95\x00\xc4\xa5\xee\x93\
\x97\x97'\x96\x08N\xa8X\xd5u],u\x01\xa1\xec\
H\xf7\xb4\x00\xc2\xafI|\xf8\xe1\x87`\xfa/\xbe\xf8\
\xe2o\xbf\xfdVl\xea.BEQ\xcf\xf6h\xf7\x88\
\x00t\xfa&w\x22\x14\xfdJ\x00f\xcf\x9e-\x96\xa2\
e\xca\x94)\x99\x99\x99b\xa5=\xda\xbd\xd7\xda\xbf1\
B\xdc`\xe8`\x8e\xfe\xd8\x09\x06\x83b\xa95\xf1}\
\xfaLUU\xb1\xd4\x9a0/\xed\xf1\xc7\x1f?\xf5\xd4\
S\xbb\xdf\xf4\x9b\x84R\xa9.\xed\x9a\xeb\x94v\xa7l\
\xebR\xae\xbc\xf2\xcaP\x8ft\x10\x9d\xd2\xafF\x01\x01\
\x0f=\xf4PEE\x85X\x89\x9c3\xcf<\xb3c\x15\
9\xf7\xdcs\xdf|\xf3M\xb1r\x00\xfdx\x14\xd0\xa2\
E\x8b\xda}\x1e\xd8|\x03\x97X\x09A\xd4\xa3\x80\x9e\
y\xe6\x99v\xdfo\x1e\xce\xd0\xa3\xf0y\xfa\xe9\xa7\xdb\
\x9d'\xe3\xb6\xdbn\xbb\xef\xbe\xfb\xc4J\x08n\xba\xe9\
\xa6G\x1eyD\xact\xc8\xfc\xf9\xf3AQR8f\
2A\xa1\x0b\x04\x02~\xbf\x1f\xbe\x17.\x5c\xd8\xb63\
m\xe8\xd0\xa1{\xf7\xee\x15+!\xb8\xfd\xf6\xdb\xdb\x0d\
\xe4\x1dw\xdcq\xef\xbd\xf7\x8a\x95h\x89z\x14\x10\xa4\
l\xbb\x0e&M\x9a\xf4\xe7?\xffY\xacD\x08D\x1a\
\xcbr\xcb\xb7\xb9\x00\x1cr\xc8!a6\xd9i\x14\
P\xfb\xe0\x9d`\x22l\xce9\xe7\x1c\x11q\xad\xe9\xc7\
\xa3\x80\xda\x1dx\x07\x80\xe9\x14.B\x13\xf5(\xa0\xd7\
_\x7f]8m\x0d\xa8\x8ep\x11\x0f\x1e~\xf8a\xe1\
ok:}\xfd}\xc7\xca\x07\xb5\xe0\x8b/\xbe\xf8\xed\
\xb7\xdf\xae\xa9\xa9\x11\x07\x84\xa6\xdd\xd6F8\xa3\x80\xfe\
\xf2\x97\xbf\x08\xd7\xad\xe9\xd9Q@\xa1\x06b\x1cz\xe8\
\xa1\xc2E\x0fA\xa3\x80\xda\x85\x1e\x04\x8b\x0f\x9a\xa6\x89\
\xa5~G(}\xea\xd2.\xa0P\x9e\x17\x15\x15\x89\xa5\
xPYY)\x96Z\xd3\xf1\xa5\xed\xd9\xb3\xa7\x83\xf9\
\xf5@\xf3\xa0I\xf7\xd2K/\x9du\xd6Y\xe1DQ\
\xd4}&\xa1n\xf6\x16\x17\x17\x8b\xa5\x9e \xd4\x1b\x05\
\xba\xf4\x8e\x11\x115$\x00\x91\x01\xadN\xb1\xd4\x1a\xa8\
G\x88\xa5~G\xa89\x7f\xbat\xe8E(\xd3\xb9}\
\xfbv\xb1\x14\x0f6o\xde,\x96Z3x\xf0`\xb1\
\xd4\x1e\x8f?\xfex\xa8!\x98\xcf=\xf7\xdcc\x8f=\
\x96\x93\x93#\xd6\xc3 \xea\xa9 B\x09@|52\
RB\x09@\xbc\x06\xbf\x12\xf1\x85\x04 2\xda\x9d\xbc\
\x17\xe8\x9d\x02\x00M<\xb1\x14\x03\xa1\x04\xa0\xe3\x17\x91\
\xc7H\xa8\x8e\xdd\xb2\xb2\xb2P#D\xa3 \xd4\x94\x0f\
3g\xce\x14K\xed\x11\xaa\xff\xe7\xaf\x7f\xfd\xebe\x97\
]&V\xc2\xa6\xdd\x16@8\x097r\xe4H\xb1\xd4\
\x9a\x9e\x15\x00\x87\xc3\xd1n\xcd\xa0\x1b\xa6\x8f%\xa2\x80\
\x04 2B\xbd\x04\xaa\xbf\xb6\x00\xea\xeb\xeb\xd7\xae]\
+VZ\xd3\xa5\x02\x90\x9b\x9b\x1bj\x0e\xa2P\xe1\x89\
\x94\x8a\x8a\x8av\xa7[8\xf4\xd0C;\x18\x09\xf6\xde\
{\xef\xb5\x9d\xb9\x01\x98>}\xfao\x7f\xfb[\xb1\x12\
\x09Qw\x01\x85\x9a\x13{\xef\xde\xbd\xfb\xf6\xed\x13+\
=\xc1\x8c\x193\xc4RkBM]G\xf4 $\x00\
\x91\x11\xaa\x05\xb0u\xebV\xb1\x149q\xa9\xa7w\x11\
\x9f|\xf2\x89Xj\xcd\xd0\xa1C;\x1e/\x1b;\xa1\
F\x9b|\xf0\xc1\x07b)6B\xbdM\xa5\xe3Y\x8b\
\xb7m\xdb&\x96Z\x13\xea\xad\xe5\x9d\x12\xb5\x00\xa4\xa6\
\xa6\x86\x9a\xc78\xf6w\xda\xb0\x18\x86;\x87\x12\x80P\
\x93W\x13=\x08\x09@d\x84\x1a1\x16\xca.\x84C\
\xa8\xd7\xba\xc6\x0e\xd4pc\x9c\x1e#\x94\x00ti\xf5\
\xdf\x04\xea\xd4b\xa95\xf1z\x1bet\x02\x10\xea\x96\
x\xa8\xa9\xea;%j\x01\x00B5\x02b\x14\x80\xca\
\xca\xca\xf7\xdf\x7f_\xacDN(\x01\x085\xb2\x8b\xe8\
AH\x00\x22c\xe2\xc4\x89b\xa95_\x87\xfd\x16\xef\
\x83x\xed\xb5\xd7b\x7f\xc1a(***\xce<\xf3\
\xcc\xaa\xaa*\xb1\x1e!px\xa8\x99\xe0b|%o\
8\x84j\x01\x80\x09\xee\xf4\xf9\x83N\xf9\xfe\xfb\xef\x17\
-Z$V\x0e\xc0\xe9tv\x5c\x97\x0f\xd5\xbb\x92\x9d\
\x9d-\x96\x22$\x16\x018\xf2\xc8#\xc5Rk^|\
\xf1\xc5X\x1e\x07\xbb\xf8\xe2\x8b;}\x0a\xa1\x03B\x09\
\xc0\xf2\xe5\xcb\xe3{\x0f\x9f\x88\x1d\x12\x80\xc8\x085\x19\
=\xd4\xb9JKK\xc5J\xd8,]\xba\xf4\xa7?\xfd\
\xa9X\xe9\x1a\xc0\xd2\x9d}\xf6\xd9\xd1\x0d\xc2{\xea\xa9\
\xa7BuOE\xdd\xe3\x11>\xf3\xe6\xcd\x0bU\xc3}\
\xe8\xa1\x87\xc4R\xb4\xfc\xe1\x0f\x7f\x10K\xad\xb9\xfc\xf2\
\xcbC\xcd\x0fa\xd2\xee\x14\xfc@\xd4*\x1b\x8b\x00\x9c\
r\xca)b\xa95\xc1`\xf0\xf9\xe7\x9f\x17+\x11r\
\xf5\xd5W\xb7+\x8d\xe1\x93\x9c\x9c\xdc\xee<\xd5\xc0\xd3\
O?-\x96\x88\xde\x01\x09@d\x8c\x1d;6\xd4}\
\xe0\x7f\xff\xfb\xdfb)<\xd6\xaf_\xdf\x0df\x14X\
\xb1bE\xa8\xe7\xd7:\x00\xea\x80\xa1\x9e\x93:\xf9\xe4\
\x93#\x1a\xe9\x185\x97^z\xa9Xj\xcd\xb7\xdf~\
\xfb\xc4\x13O\x88\x95\xc8y\xfd\xf5\xd7\xdf~\xfbm\xb1\
\xd2\x1a\x10\x00\xb1\x14\x82P/\xa5\x8a\xaen\xbb`\xc1\
\x82\x1f~\xf8A\xacD\xce\xa4I\x93\xc6\x8c\x19#V\
Z\xf3\xb7\xbf\xfd-\xd4D\x17\x1dp\xe3\x8d7\xfe\xf3\
\x9f\xff\x14+1\x10\xea5[\x8f<\xf2H\xbc\xee\xe1\
\x13\xf1\x01\xaaxDD\x84z\xa4\x10\xaar{\xf6\xec\
\x11\x8e:c\xf1\xe2\xc5\xa1FL\xb7\x10\xfb\x93\xc0\x07\
r\xec\xb1\xc7\x96\x97\x97\x0b\xd7a\x10\xaa\x12\x07@\xed\
R8\xea\x8c\xa8\x9f\x046\x09\xf5\xa0\x96\xc9\xfb\xef\xbf\
/\xdcE\x02\xc8\xa1\xddn\x17^\xb4&\x9c\x08\x0fu\
E\xd0\xcc\x12.\xc2\x06tT\x1c\xdc\x86!C\x86\x08\
G\x9d\xd1\xc1\xd0#\xd8%\x1c\x85\xc7%\x97\x5c\x22\x8e\
\x0cM\xa7O\x02\xb7\x10\xaa\xad\x0c\xf9\x10ZQ\xc2Q\
7BO\x02\xb7\x0b\x09@\xc4<\xf9\xe4\x93\x22\xef\xb4\
\x012\xb7p\xd4!\x1d\xf8p \xf1\x15\x00\x00\x9a/\
\xdf}\xf7\x9d8\xa0C:0+\xa3F\x8d\x12\x8e\xc2\
F\x01\x00B5\x02\x80\xe4\xe4\xe4O>\xf9D\xb8\
\x0b\x8f\xff\xfd\xef\x7fYYY\xe2\xf86\x84c\xdd^\
~\xf9e\xe1\xba\x0d\xabV\xad\x12\x8e:\xa3\xa4\xa4\xe4\
\xb4\xd3N\x13\x87\xb5G\xf8\x02\xd0q\x03\xe2\xb9\xe7\x9e\
\x13\xee:d\xeb\xd6\xadaN\xa4\x18\xbe\x00\xb4;\x07\
\x94\xc9\x91G\x1eYVV&\xdcE\x85\xcf\xe7{\xe5\
\x95W\xae\xb9\xe6\x9a\xf0\xa7`!\x01h\x17\x12\x80\x88\
\xe9\xf8A\x1b\xa8\x09\x16\x14\x14\x08\xa7m\xa8\xae\xae\xee\
\xc0\xa2\x1dD\xdc\x05\x00HII\x816\xbe8\xa6=\
\xa0vv\xe1\x85\x17\x0a\xd7\xed\x016T8\x0d\x83\xd8\
\x05\xa0\xa6\xa6\xa6\xe3\xe7ro\xbd\xf5V\xe1\xb43\xee\
\xb9\xe7\x1eqL{\xfc\xf9\xcf\x7f\x16\xee:\xa4\x83\xe9\
\x8e'O\x9e\x0c\x86I\xb8\x0b\xcdG\x1f}\xd4\xe9\xfc\
e\xe1\x0b\x00\xf0\xeb_\xffZ\x1c\xd6\x1e\x7f\xfd\xeb_\
\x85\xbb\x10\xfc\xe7?\xff\xe9\xb41\xdaB\xf8\x02\x00\xcc\
\x9b7O\x1c\xd6\x86\x09\x13&\xbc\xf6\xdak\xc2]$\
@\xce\xb9\xf9\xe6\x9b[\x9e\x82\xfe\xdd\xef~'vt\
\x06\x09@\xbb\x90\x00D\xc3\x15W\x5c!\xb2O{@\
q\xba\xfb\xee\xbb7o\xde,\x5cs`\xf5\xce;\xef\
\x0c5}B\xbbC\x0f\xbbB\x00L\xa6M\x9b\x062\
P\x5c\x5c,\x0e\xe6\xacY\xb3\xe6\xb6\xdbn\xeb\xd8\x16\
D\xda\xd1\x11\xbb\x00\x00\xedN\xbfz `Oo\xba\
\xe9\xa6P~\x82\xcd\xba\xe3\x8e;B\x0d\xdf29\xfa\
\xe8\xa3\x85\xeb0\xe8\xa0sl\xca\x94)\xe6\x9d\xf3v\
\xd9\xb0aC\x98\xaf\x1a\xce\xcf\xcf\x17\xc7\x84A\xa7\x8f\
}Ar\xff\xfb\xdf\xffnjj\x12\x07p \xf5\xa1\
}\x00\x17.\x1c\xb5\xa1\xdd\x9b\xe1\x11\x09\x00\xe4\xf9P\
\xbdm&\x0b\x16,x\xfb\xed\xb7\xeb\xea\xea\xc4\x01!\
\xf0x<\x1f~\xf8!\xb4J\xc7\x8f\x1f/\x8el\xe6\
\x8c3\xce\x10\x8e:\x83\x04\xa0]\xfa\xdbt\xd0\xdd\x03\
4`G\x8d\x1a\xd5\xe9H;\xa8\xa7@\xedUQ\x14\
h4t\xf0\xa2\xb1\xfb\xee\xbb\x0f\xcaI\xdb\xc1\xa0 \
\x001N\x07\xdd)#G\x8e\x84@B\x09\xdc\xbf\x7f\
\x7f\xa7\xf7\x0cA\x1b@$\xc6\x8e\x1d+\xd6\xc3 \xea\
\xe9\xa0\x0f\x02*\xb9`X\xc5Jh@_\xb3\xb3\xb3\
\x07\x0d\x1a\x04\xdf\x90F%%%`\xe6:\x1d\x9f\x03\
W\xf4\xd6[o\xb55.\xa1\xd8\xb8qc\xc7\x8fA\
\x9c|\xf2\xc9s\xe7\xce\x05\xc9\x81\x90\xe8\xba\xbe\x9b\xb3\
b\xc5\x8aP\x0fU\xb4\x05\x04 \xd4\x03\x07\xedr\xff\
\xfd\xf7\xff\xfe\xf7\xbf\x17+\xa1\x81\xf8\x81\xb6\x85\xd7\xeb\
\xdd\xb3gO\xc7\x133\xfc\xf0\xc3\x0f\xc7\x1f\x7f|\xdb\
\xd7\xa3\x82\x00\x84\x1a\x9e\xdb.\xcf>\xfbl\xa8\x1b\xc2\
\x07r\xe4\x91GB\xfcgdd\xa4\xa7\xa7\xc37\x94\
,\xc8\x93\xd0\xf83\x9fj\xee\xe0\x09\xb2c\x8f=v\
\xc9\x92%b\xa5Ch:\xe8\xf61u\x80\x88\x94\xbf\
\xff\xfd\xef\x22\x06c\xe3\x91G\x1e\x01\xdf\xda\x1do\x13\
\x97\x16\xc0\xbb\xef\xbe+\x96b\xc3\xedv\x83\x09\x13'\
\x0b\x9b\xb8\xb4\x00L:\x98\x803\x16f\xcc\x98\x01f\
N\x9c#l~\xf7\xbb\xdf\x89\xe3c\xe6\xe9\xa7\x9fn\
[M\x8e\xa8\x05`\xd2qGP\xf8@\x9d`\xe7\xce\
\x9d\xe0\xe1\xb8q\xe3\xc4\xa6\x03\x88\xa8\x05`\xd2\xe9\x9b\
\x15b\xe1\xfc\xf3\xcf\x17\xa7\xe9\x0cj\x01\xb4\x0b\x09@\
\xf4\xc4n\x05Z\x86\xd3\xb4[M\x8e\x8b\x00\xc0\xaeu\
\xeb\xd6\x8d\x181B\xacG\x85\xcdf\x83z\x96y\xa2\
\x88\x88\xa3\x00\x00w\xdey\xa78>N\x9cp\xc2\x09\
P\xd9\x14\xbeGH8cf:&99\x19Zx\
\xe0U\xdb\xa1\xa5Q\x08\x00\xd0\xf1\x8d\xe5p\x987o\
\x1eT\xbaM\xdf\xda}\x12;\x0a\x01\x00^}\xf5\xd5\
X\xe6\x96\xe8\x00\xc8\x12\xe2\x1c\x9dA\x02\xd0.$\x00\
1q\xf5\xd5W\x8b|\x14!S\xa7N\x05\xbb,|\
1\x8cv\x9f!\x88\x97\x00\x00\xe5\xe5\xe5\xe7\x9dw\x9e\
\xd8\x14!PG\xfe\xfc\xf3\xcfM\x7f\x22%\xbe\x02\x00\
<\xf1\xc4\x13\xb9\xb9\xb9\xc2\x97\x18\x80\x06\xcd\x03\x0f<\
<\x8d\x96\x8e\xef\x96w\xcc\xa9\xa7\x9e\xbae\xcb\x16\
\xd3\x9f\xb6\xaf\xb4\x8dN\x00\x800\xef1\xb4\xcb=\xf7\
\xdc#|\xe1\xb4\xfb\xb0wt\x02\x00\xacZ\xb5j\xca\
\x94)\xc2\x978q\xe6\x99g\x06\x83Aq\x82\xce \
\x01h\x17\x12\x80Xy\xe1\x85\x17\x22z\x19\xa4\xa2(\
\x7f\xfa\xd3\x9f\xc4\xc1\xcd\xb4;\x1dP\xec\x02 \xcb\xb2\
\xd8\xcd\xf9\xd7\xbf\xfe\xd5\xf1c\xaem9\xc8(DJ\
\xdc\x05\x00\x802\xff\xc7?\xfe\xd1\xe1p\x08\xbf\x22\xe7\
\xaa\xab\xae*))\x11\xde\xc5\x06Di\xa8\x07\x03C\
1p\xe0\xc0\x83Fg\xb6\xadkG-\x00\xc0;\xef\
\xbc\x13\xe6[\x12[\x986mZ\xdb1\xacg\x9cq\
\x86\xd8}\x00Q\x0b\x80\xc9+\xaf\xbc\x12\xd1-\x84v\
ILL\xbc\xf4\xd2K\x17-Z$<\x0d\x0f\x12\x80\
v!\x01\x88\x03555P\xf3\x0a5}q\x0b\xe3\
\xc7\x8f\x7f\xf0\xc1\x07\xdb\x1d\x01\xbdm\xdb\xb6\xb6\x93k\
\x82\x99\x13\xbb\xc3@\x1c\xd3\x1a\xa7\xd3)v7\xb3g\
\xcf\x9e\x9bn\xba)\xd4`\xa4\x16\xc6\x8e\x1d\xfb\xfb\xdf\
\xff\xfe\x9bo\xbe\x11\x87E\xcb\x7f\xfe\xf3\x9f\xb6\xf61\
\xa2!7\xa1\x00\x0b~\xff\xfd\xf7\xb7[K\x0d\xc5\xdc\
\xb9s\xff\xf2\x97\xbf\xac_\xbf^x\x11'\xea\xea\xea\
\x1ez\xe8\xa1C\x0e9D\x9c&4\xb3f\xcdz\xf8\
\xe1\x87\xdb\xbe'\xb2\xed\xfd\xff+\xaf\xbcR\xec\x8b\x0a\
0j\xcf>\xfbl8\x91\x03\xe6\xf8\xe5\x97_\x16\x87\
\xb5\xa6m\xdf\xfd\x84\x09\x13\xa2\xee1;\x10\xa8\xee\xfc\
\xe2\x17\xbf\x186l\x98\xf07<\xa0\x01q\xddu\xd7\
\xbd\xf7\xde{\xba\xae\x0b\x8f\x22\x01\xf2s\xdb\xb79\xfd\
\xe4'?\x11\xbb\xad\x0a\x8d\x02\x8a'\xabW\xaf\x86\x0c\
ZZZZYYYQQQ]]\x0dU9`\
\xf0\xe0\xc1`\xf5N8\xe1\x04\xe1.\x04P\x0b\x83\xa2\
\xab\xaa*\xd4\xd3'O\x9e\x1c\xd1;\x17\xdb\xedc\x85\
\xba\x12x(VZ\xf3\xfa\xeb\xaf\x7f\xf1\xc5\x17\x10T\
\x10$\xc0\x1c?\x93\x93\x93\x03\xdf\xd0\xf2\x88\xbd\x9av\
\x10-\x1e\x8f'\x10\x08@x\xc0\xff\x88\xc6\x11u\
\x0a\x5c\xe0\xa7\x9f~\x0aQ\x07\x86\xb5\xb6\xb6\xd6\xfc\x86\
8\xcc:\x00\xb0\xce'\x9dtR\xa8Y\x1c\xe2\x054\
k\xbe\xe4@\xd2\x83\xa1\x04\x92\x93\x93!\xe9\xa1>\x0e\
\xdfP\xa1n\xdb\xd5\xd3BAA\x819`)==\
=777\xd4<\xcf\x91\xb2c\xc7\x8e\x8f>\xfa\x08\
<7),,\x04\xff\xa1\xaa1j\xd4(H\x85\x05\
\x0b\x16\xb4{\xa7\xb7\x05\xbf\xdf\xff\xc3\x0f?@\x0c\xdb\
l\xb6\x94\x94\x94\xf0\x07J\x85\x09x\xbet\xe9\xd2\xed\
\xdb\xb7C\xaa\xb5$\x1f\xd4Z222 \x90\xf0\x0d\
@\xf2\x1dv\xd8a`\xfd\xc3\x7fX!\x14\xd0v\x84\
4jjj\xd24-!!\x01jl\x916\x95\xfa\
\x1f$\x00\xfd\x84v\x05\x00$\x04J\x94X!\x08\x82\
h\x0dM\x06\xd7\x9f\xe9\xf81\x1c\x82 ,\x0e\x09@\
\x7f\x86\x04\x80 \x88\x0e \x01\xe8\xcf\xd8l6\xb1D\
\x10\x04\xd1\x06\x12\x80\xfe\x0c\xb5\x00\x08\x82\xe8\x00\x12\x80\
\xfe\x0c\x09\x00A\x10\x1d@\x02\xd0\x9f!\x01 \x08\xa2\
\x03H\x00\xfa3t\x0f\x80 \x88\x0e \x01\xe8\xcfP\
\x0b\x80 \x88\x0e \x01\xe8\xcf\x90\x00\x10\x04\xd1\x01$\
\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04\
AX\x14\x12\x00\x82 \x08\x8bB\x02@\x10\x04aQ\
H\x00\xfa\x09\xf7\xde{\xef\x84\x09\x13Z\xde\x93\x95\x94\
\x944}\xfa\xf4\xdbn\xbb\xcd\x5c%\x08\x82h\x0b\xbd\
\x0f\xa0\xbfQ[[\x0b2\x90\x90\x90 \xd6\x09\x82 \
B@\x02@\x10\x04aQ\xa8\x0b\x88 \x08\xc2\xa2\x90\
\x00\x10\x04AX\x14\x12\x00\x82 \x08\x8bB\x02@\x10\
\x04aQH\x00\x08\x82 ,\x0a\x09\x00A\x10\x84E\
!\x01 \x08\x82\xb0($\x00\x04A\x10\x16\x85\x04\x80\
\x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82 \x08\
\x8bB\x02@\x10\x04aQH\x00\x08\x82 ,\x0a\x09\
\x00A\x10\x84E!\x01 \x08\x82\xb0($\x00\x04A\
\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\x14\
\x12\x00\x82 \x08\x8bB\x02@\x10\x04aQH\x00\x08\
\x82 ,\x0a\x09\x00A\x10\x84E!\x01 \x08\x82\xb0\
($\x00\x04A\x10\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\
\x10\x04AX\x14\x12\x00\x82 \x08\x8bB\x02@\x10\x04\
aQH\x00\x08\x82 ,\x0a\x09\x00A\x10\x84E!\
\x01 \x08\x82\xb0($\x00\x04A\x10\x16\x85\x04\x80 \
\x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\x00\x82 \x08\x8b\
B\x02@\x10\x04aQH\x00\x08\x82 ,\x0a\x09\x00\
A\x10\x84E!\x01 \x08\x82\xb0($\x00\x04A\x10\
\x16\x85\x04\x80 \x08\xc2\xa2\x90\x00\x10\x04AX\x14\x12\
\x00\x82 \x08\x8bB\x02@\x10\x04aQH\x00\x08\x82\
,\x0a\x09\x00A\x10\x84%\x91\xa4\xff\x07\x17\x83m\
\x0c\x9a\x8fEd\x00\x00\x00\x00IEND\xaeB`\
\x82\
\x00\x00;\xb8\
\x89\
PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\
\x00\x00\xbb\x00\x00\x00\xc2\x08\x02\x00\x00\x00\x9e\xdf$4\
\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00\
\x09pHYs\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7o\
\xa8d\x00\x00;MIDATx^\xed}\x07\x80\
\x1c\xc5\x95vUu\xf7\xa4\x9d\x99\x8d\xd2f\xadr\x02\
I \x84$$$!\x09\x01\xc6\x80\xc1\x18\xec\x03\x1b\
l\xc0>\x13\x0c\x18\xb0\x8f\xb31\xc6\x19\x1f8\x03>\
\x93\xc3\x19\xfb76&Ge\x94\x05\x08%\x10\xcaa\
\x836\xa7\xd9\x9d\x99\x0eU\xff{\xd5\xbd\xab\x80\xa4\x9d\
\xd9\x9d\xb4\xb0\xdf\xec\xcetWWw\xbdz\xef\xabW\
\xaf\xaa\x13\x15B\x90\x01\x0c f0\xe7w\x00\x03\x88\
\x0d\x03\x8c\x19@|\x18\xe8\x952\x0e\xd2\x22\xb6Q(\
|`\x95\xca\x15A(\x13DPN\xa1\x9dS;-\
\x0d\x18`L\xc6\xc1f\x8ce\x194\x5c':*\xa9\
\xdeA('Z\x90{K\x99\xbf\x981\x05\xf2\xd0\x01\
\xc6\x0c\xa0\x1b`\x11\xb3m\xbf\xb5\xf5a\xb5n\x05o\
\xdbK\x088\x15p-\x94g\x0d\xa7E\xa7\xf117\
{\xf2\x86\x0c0f\x00H\x14\xecs\x04\x89\x1c\xdc\xa0\
\xbcs\x15\x89\xb6\x1dM\x0bA8d\xc8*\xa7g=\
\xa6\xe4\x8d\x81m\x94\xa6!\x0c\x1d\x88|3\x08@\x02\
\xb3y\x07[s\x13\x8b\xb49\x91\xcca\x80p\x06Y\
\x12\xaa\xe4\x8b\xfeC\xd4\xbd\x8b\xeb\xe9\xc0\x00c2\x08\
\xdc\xe2\xe6\x86_\x93P\x15\x07\xef\xf2\x09>P\xec\x9e\
\xe0#h\xa4)\xfa\xe1\x93\x82\x9b\xce\x86\xd4b\x801\
\x19\x04\xf3\xe0\x0a\xad\xfa-\xd9\xdd\x1c3\xb6\x85!\x92\
\xdcF\x88V\xbd\xd8\xe8\xac\xb7SS\x8c\x01\xc6d\x0c\
\x04g;\x9fE\xef\x12\x0b\xac\xce\xaa\xf5\xcf9\xcb\xa9\
\xc5\x00c2\x05\xd0\xcb\x88\xce:\x18G\xf7L\x19!\
\xb8\x10\xe5b\x95\xb3\x9aZ\x0c0&S \x8a\xe1\
\x16\x84\xb30br\x92\x8e\x07\xd9c\x89H\xc8YM\
-\x06\x18\x93)\xa0\x8aFU\x17.\xf4\xece\x04P\
\xc6\xcc*r\xd6R\x8b\x01\xc6d\x0a(UX\xfeD\
X\xe8q~\x0c(\x05\xe1N\xa8\xe0b\x9c\xc2\xe1\xd0\
A\xa5tFm\x801\x19\x03J\xcdaW\x10\xd5\x8b\
\xfd\xd2\x09\x81\x13}\xcc\x1d(;\x15\x16\xd1\xdb\xa4\x16\
\x03\x8c\xc9 x\xf2FDF\x7f\x97\xc42\x93kE\
\x95\xc5\x97\x87\xdb\x9b$\xbd\x06|\xccg\x15\x10\xf6\xfa\
N\xfa\x8a(\x9d\x87K\xd8\xe5Hp\x82g\x07\xecE\
\x1c$\xc1:N\xca\xd0\xceJ\xba\xf2\x86Hk\x0d\xee\
(\xd3!\x83<Lr1p^)S`\x1b\x02y\
\xa1\xb7D\xd7\xfcX=\xf02\x0c\x9f\x80\x18\x98N\x1d\
\x1b\xc1\x8f\xdd\x099\xf3{\x82X\x81\x0a1\xff\xdf\xae\
@\x1e\xac\xe3\x04_\xf2;\xa9\x01\xc6d\x0a\xd0\x10]\
\x8c\xe0\xdc\x14\x95\x0b\xad}\xaf[\x8d[U\xb3\x15R\
:\xad<w\xc98\x8b(\xee}\xcf\x0ba\xd9\xf9\x80\
\xb0\x9f\x95\x7f\x1a9\xf3!W\xb0H\xee*\xd3\xe5\
R\x920\xc0\x98\xcc\x05\xf2Fo\xa3\x96\x09\xde\xc3\xe4\
\x9a\xea\xf1\x83\xad:\xd7\xfe\xd4\xb5\xfb)\xe9t`\xc0\
$\x83\x0aAx`\xa8y\xf6\x8b>\x7f6\x12\x0f\xe2\
\xa0d\x9e\xd3\x1e`L\xe6B\x9a\x06\x19`\xaf\xc1\x1f\
\x15T\x8fvXk\xefR+_d\x96%\x18n\x82\
l\x94\x0a\xab`\x1a\x99\xf9GW\xa0\x18{\xa6d:\
\x99\x01\xc6d.\xbaL\x03\xe6?\xc2F\x96\xa9G\xd7\
\xff\xdc\xb5\xf3)4\x9fs\xbd\x15$3\x9e{\xb2\x98\
\xf3\xb4\xdb\x9fk\xf7V\xb05\x19\xd4\x19\x18+e.\
\xd0\xe0\x88\xee\x05\x07\x8a\xaai\x93\xef4\xca/\x00\xdf\
\x02\x94Aj`\xe8\x22\x94\xc6\xcdd\xf97\xf5\xd6\x03\
\xe8\x98>\xc1\xb3Da\x801\xfd\x12\x9a\xdb\xab\xce\xb8\
O\x1fz\xb9<\xd7\xdd\xcd\x0c \xcd{t\xc5\xf5\xe1\
\xce\x16\xb9\x9ax\x07\x03\xf8\xec\xf6J]\x15\x97\xcdQ\
~\x1dZE@\xfb\x95\xbf\x19\x8cHG\x9b\xb9\xeeN\
W\xe5k8c\xc3!\xe0qd\xe6y\x93\xc9\xac?\
\xbb\xfc\x85X\x17\x0c\x92\xbb6$\x02\x9f]\xc6X\x82\
\xdb\x0e\xd62Mat\x9a\x91fXV=\xb9\xd4\xe5\
U\x14W\xbf`\x0c\x17\xc2\xd4\xc3\xfa\xbb\xbf\xd2v?\
\xcd\xe4\xcc\x1e\xf6N\xd8\x18\xa8\x95\x7f*\x9f\xf5\xa8/\
\x90\x87\x89\x03\x8c\xe9\x0b\xb0\xdf\x975\x16F\x87\xd5\xb2\
\xa3c\xe7[\x9e\x83\xcf+\x1d\x07\xa5\x1e,\xa2\xb8\x98\
wp\xb4\xfc\x22\xf7\xb0\x0bYp\x04\xd5|r'\x07\
\x99F\x22\x9c\x02&\xc4\x8cv\xf2\x157i5K\x80\
@\x90(\x1d&\x83/+o<\x9d\xf3\x84+\x00\x9e\
\xc6AB\xe4\xff\xec1\x06a\x99u\x1b\xc9\xc6\xfbH\
\xe3\xfb\xc4\x0c\x1f\xa5G\x18\xc1r\xd0\xb7\xea\xe2\x05S\
\xd5\xd3~\xa0\xe4\x8eG#\xd8.?\xc3\x18c\x03\xea\
\x03C\xee\xe8\xfa_y\xf7\xfc\x15xr81pr\
o\xce\xe3n_6,CCa\x14\xefu\xea#>\
{\x8c\xb1Lk\xff\x1b|\xcd\xf7\x89\xd1N\x05\xc3[\
\x0c\x8f\xe4\x01(D\x8eN-\xd0\xb0`LL\xff\xb5\
6\xfcR\xc1\x14\xbc\x0f1S\x19\x03\xdf\x86\xde\xa9\xaf\
\xbcC\xabz\x8dq\x8b0\xecomo*r\xc6\x93\
\xb3\xc0\xd3\xe0\xc54\x09\xb9[\xe5\xb32V\x02\xad\x0a\
\xc197\xcd\xedO\x89U\xb7\x13#\x04\xfa\x93\xd7\x99\
\x1c\x83\x04x\xd7\x10Qp\x92Cp\xfa\xde\xcf\xf5m\
\xff\xc8|5i.\x9f:\xfd\xd7\xfa\x90\xcb\x05U\x91\
(P\x039\xf7G[\xb7\x89\xd5\xb7G:[l\x0d\
\x00\xbd\x90G}\xc0g\xc5\xc7\xc8j\x0a\xb3\xf6]\xba\
\xf4Zn\xb4B\xb5\x9111\x0022\xca\xf8\xdc\xa7\
\xb5\xd2Y\x99\xe9c\xba\x01u\xd4#\xed\xc6\xaa\xef\xb9\
\xaa^\xc7s\x08\xe0\x0e\xa4\xbc@\x14+w4;\xeb\
\x1f.\x7f\x9e]\xef\xbe8\x9b\xcf\x8a\x8f\x01p=\xc2\
\xd7\xdf-t\xa4K\xec\xcd\x04\x03I\xce\xc5\xa6\xdf\xa2\
[\xcap\x08\xa2y\xfc\xda\x8c\xfb\xf5\xe1W\x12\xaaJ\
\x17c\x83*\xcd;\xc5\xaa\x1b\xa3\xe1v'\xa1\x0f\xf8\
\xf43\x06\xe9!\x09\xc2\xf7<\xcf\x9a?B\xbd\x82\xaf\
\x90gdb\x03\x84\xc2\x826mj\xdf\xb3\x5c^\x22\
)\x07\xb1\x19\x09<\x03I\x99\xdb\x1bpM\xb9\xcb,\
Y\x00\xa6\xb5\xab\x8e>\x85\x12\xa5n5[x\x89\xd9\
Q\x8f\xce\xd6\xe9\x9fz\xd3\xbd|\xfa\x19cw%f\
4d\xed~N\xf6\xef\xf1\xf5,\xa8_8\x067\xdc\
\xbb\xfe\x02\xa3Y'5\xb3\xa1\xb9\xbd\xf4\x8c\xfb;*\
\xbe\x02\xce\x05\xfb_\xac\x04\x826\xef\xe2\xabn1\xf0\
&\x04y&\xaa+=.\xa4\x991\x92\xe8P\xa7C\
\xdf2\xad+]\xa2{\xc5\xc9\x15'`7\xf8\xa6\x1d\
\x95Jg\x0d\xc5k\x94\xe2\x03\xf4J\x14\xa7\xca(m\
\xde\xd6\x5c\xb7\x1f\x13\xfa\x03<\xde\x80w\xfa=\xd1\x92\
\xf3\xa0\xc2\xb6N\xf1\x9b\x0a\xa5v\x15}\xe3|\xbd\xb3\
\x05g\x10d\x14k\xc3\xd9-\x06d\x82\x8f\xb1\x1d$\
\x12\x02V\xd0]Z\x067un\xe9\xf2\xd2!\xbb6\
\xf8-+\x19G\xdd\x1c\xd8\x87\x0dU\x8ap\x93\xbc\x8e\
:\xce#`\xa0\x88\x17\x9cP+\x124\xb6;\x89\x99\
\x0d\xect)\xd3\x5c\x1e\x18=E\x87}\x15\xe3\x5cT\
/V\x05\xbb\xa8\xd0~\xbe\xf2;F\x04b\x1aL\xb4\
[T\xecH\xf3X\xa9\xabt\xf8\xa1<\xda\xd1\xb6w\
E\xe4\xa3\xbf\x15\xb0\x1d\xcc\x8c\x0a\x85\x85Iq\xe7\xa0\
s\x0b&\x5cF\xfd\xf9\x8a\x0c\xef!w\xdc\x03\x16\xa9\
\x14s\xd7\xf3|\xf5w\xf1\x19Ox\x04gK\x5c\x80\
\x1e\xc9\x9ar\xb7\xe7\xa4\xeb\xfa\x87\x93\xe9\xd2\xad\xa9G\
\xf5\x15\xb7\xaa\xd5\xaf1\xce\x0f\xcd\xd3\x80\xbb\xf1\x95\x92\
\xcf/Q\xdd>l\x89\xa8\xd4X\xab\x95\x1e\xc6\xd8\x85\
\x82\x90\xb0`\x99:\xd9\xf7\xb2\xb9\xf7uZ\xbb\x92Z\
\x1d\x90\xdceS\xa4\x11fa\x9a\x15\x1c\xa7\x94\xcf\xa7\
\xc3/S\x83\xa5\xb8\x05[\x8a\xfc\xc4\x00Y\x960v\
\xfe\x8b\xac\xbe\x035\x05\xfa\xea\x15e\xe00\xe6)?\
\xf0L\xf8VR\xafpC\x1c\xdf$q\xb5\x16\xbb\xe2\
\xf0\x17\x0dwX\xef\xfe\x5c\xdb\xf7\xff \x09\xf6\xefj\
5\x94\x17\xcfd\xb3\xfe\xa2\xb8\xfd\xb2\xe3\xc2#\xc7r\
\xfct\xf6J \xa6\xde\xb2\x97\xbf\xb2\xc0Zy;\xab\
^\xc8xX\xbaS)6\xfe\xd9\x0b\x94\x0aSm\xdd\
L7\xff\x8e\xbc:?\xbc\x7f%\x8cWd\x07\x05\xdf\
1\x02\xaf\xaff\xaa[\xea#\xf6\xbd\x8e\x06\xc8\xc2\x5c\
\xd91\xd2\xb4/\xf8\x84\x88qT\xf5p\xa0\xfeP\x9f\
\x0a\x8c\x9e\xbc3~F\xca\xcf\x97\xc7\x86\x14\xd8\x04\x9b\
\x05\xadYA^\x9ec\xc0\x90[\x9e\x9f\x92L\xea\x19\
ic\x0c\x8c\xef\x8c\x03\xab\xc8\xd2\xaf\xd3\xb6\xbd\x0c\xa4\
\xefI\x5c\xac\x93\x11VW]gm}\x88\x83\x83\x8d\
\xc3r8\x83k\xb9\x0a\x84\xe2\x03\xf7\x12\x93V\x8e\x05\
\xdc182F\xb5\xf6\x1dP\x0cDt\xa6\xc5u\x0b\
\x06\xf5\x16,;\x1bz\x03A\x14M\x99\xf6\x0bR~\
.\xc7\xc6\xe6\x18\x1dtN\xc3\xf5|\xe5\xb7M\x88i\
\xc0\xfd\xc4VB\xaa{%\xdbU\xc2\xbf~`9]\
~-\xe3:\xa6\x22\xe7{\x00\xec\x08\x99\xc0\xe4\xe0/\
\xf8\xe8\xab]\xa7\xdf-wB\x9fdg8\x1e\xb0D\
J\x8c\xb6\x83\xd6\xa2\xaf\xaam\xdb`\xf7X\x8a;\x1c\
\x14\x22\x18(\xd6\x95c]\xb0\xd2\x9d\xe5\xc7F\x9a\x04\
\x80\x9c@Gyh\xd1\x12\x8e>\xb5\xb5\xf9\x8dj\xb3\
\xb2\xdd\x0c[f\xb6K\x1b\x93\xe3\xfa\xf2P\xed\xf3\xa3\
\x07)\x10\x8bak\x01!8\xf8\x0f\xb9kO\x90J\
\xc7_\xa3\xd3\x5c\xfd=\xb2\xefE\xdb\xd3\xe0\x00\xd0\xfe\
\xca\x1eN\xceyUs{\x84\x8ct\x0ek\x15(\x0e\
\xac\x1c^\xe540\x06\xca3\x9b\xf7\x91e_\xa7\xed\
;1\xa68L\x9a\x1eaKK\x99\x8bO\xfd\x95k\
\xc4\xa5\x9201\xb9I\xce\x0d\x0b\x94\xb5\xeby\xb0I\
\x1c\xe5\xd9%\xe2\xe9l\xda1\xf4\x1b\xd9\xb3\xefAS\
\xc5#p\xec@\xcd\x08\xb2\xa3\xa9\xe3/\x9b\x1a_8\
\x10m7p\xd4\xc8\xd0\xa4\xb8\xd5\xf6\xc1\xa5>\xf7\xb5\
\xe3|W\x8f\xcb\x09x\xdc\xb0G\x8cu\xef\x06\xf8u\
\x1em\xe3\xeb\xee\x14{_\x83U$\x87\xac\x0a\x96T\
z\x16;\xe3w\xaa7\x8f[Q\xb3\xa3\x8e\xe8a\xe6\
\xcdQ\xbc\xf9\xa0a\xc8\x08\xc0|\x12\xa9g\x0c7\xf4\
\xa8Xx\x19k\xdc(\x8d\x11\xa7\x01p\x1f4:\xf7\
\x15\x99\xe7\xbe\xe5\xf5g\xc7\x18\x87\xc2~F\xe3v\xf1\
\xfa\x02&\x0buRc\x01\x16H\x0cw\x8ez\xdeK\
Zv\x05,\xd3\xc3\xd4\x97@\x80\xd5\xde\xdc\xd7z\xdb\
;\xb5\x8d\x11\xbb\x8dC\xcf\x0b\x05\x81\xa8\x1c+\x8c\xbd\
\x06\xfaW\x85\xf1\xe1\x01\xf7\xf3\x9f+\x19\x94\xe5bq\
\xcc\x5c#\x901p\x08\xb3\xc3\x5c~3\xafZ\x88\x17\
\xd1\xd8\xaa\x90\xcd\x90f\x0f3\x06\xcf\xa15o3#\
\x0c9\x09S\x89'h\x8e\xb8\xc65\xfa2M\xf3`\
6\x09\xe5\x9e{\xa0\xdd\xa4\x0eh\xf1}/\xd2\x8f\x9f\
\x91\xee!N\xba\x00\xec]\xe0\xcf\xe8 -;\x95\xa1\
\x9f\xb7\x9fo\x1b\x0b\x14on\xd4\xe0J\xe3\xbb\xa8\x0e\
y$;\xfd\xc4\x90\x9e\x9b\x1a\xc3\xaf\xf0\x0c\xbf\x801\
5n\x81c\x00\x9eS'b\xf1\xfe\xd0\xcd\xef\xd45\
E@AX\xbf\xee\x8a:\xbf\xce\x0a\x1a\xb91b\xbe\
[\x17>o\x88\xd7\xab\xa1\x9b\x902\xc6$\x15d\xc3\
\xfe\x85i\xa4x\xb6h\xddM\xdb\xf7`\x22\x96'\xff\
\xa2\xcd\xac\xe9\x03f\xb4Q+Ly\x84Z\x9d$\xd2\
\xa4\x1c\x5c\xc8j\xd7[\xc1\xe1\xd4[(\xcbI\xf6@\
\xf1\x93\x00\x17\xb3\xe7\xd5X\xa3\xac\x13B\xad_a6\
lqVb\x01\xa5\xda\xf8k\xac\xbc\x09\xd0[K\x0d\
\xf5\x0c\xe9\xd1\x04\xcb\x1b\xeb;\xed\xfb0\xdcrR\x13\
\x0eJ\xeaC\xe6\xcd\xcb\xea\x9a\xa2\x10\xdf#\x03\x9c\xf4\
\xe3\x006\xbf[\x1f\xbdv\x11^7(O[\xc4M\
b\xea\xc9Vg?H*.\x82E\x8e\xb68v\x89\
\xb0\x05'\xc9\xebV\x8b\xe57\x88\x86\xcdv9\xa9f\
\x0c\x8f4+\x8d\xef9+}\x82\x10V\x944nt\
\xd6b\x83\xe6\xcbQ\xce|\x80\x15LBm\xa0\xa2l\
J\xe0\xbf\xfc\xb3U\x87\x09\xb0`o$\x83N\xe3g\
=\xc3\x5c^\xfb\x08\xc9\x80n\xf2\x1f\xaf\xaaj\x88Z\
\xb2t\xe9KN\x08\x18ZZ\x84\xac\xae\x8d\xfek{\
\x13\xc6\xc1\xf1\x03\xca`\x8a\x8bM\xfd)/\x9cy\xa2\
g\xd6K\xcf\x06\xfd\x13\xe98h\xbd\xf7#\x81\xb3\xf0\
]\x03\xad\x94\xc1\xaa{\x8f\xeax#q\xdfA\xa1.\
\xf5k\x9c\x95\x9e\x80\xfeTNN\xa8\xc1!\xec\xac\xc7\
\xc4\xb0\x0b\x1c\x82 ;\xec\x87\xcf\xc1\x9f\xcd\x1aT\xa8\
\xb4\x1e\xb3\x86|I\x9d\xf3\xa8\x9a5H\xee\xdb\x83!\
{\x8d]m\xfa;u0f\xc4;I\xf0\xcfI>\
\x01 \x92\xc2|/\xec\xe9\x0cY@\x9e\xf8`\xab\x02\
x\xc7\xc0\xd3\xcc{T\x0c\x9a\x22+},@!6\
\x9d \x86\xaa\xdfh}\xf8gPQ\xaa\x19\xd3\xb1g\
Q\x22z$\xa90Nx\xf3\x0eg=\x0e\x08\xe6\xc9\
\xd7f\xfe\x91\xcd~\xc2*\x9ak\xb9\xf2P\x87\x92=\
\x10\x22H\xa7\xabpw\x0e/\x99\xcbg\xfc\xd13\xeb\
7\xd4\x9b\x0f#\x05\xf47\xc8\xad\xa4`sM{\xa3\
.\x03\xdb8\xb1\xb6.\xda\xd0a\x22\xef{\x09pV\
\x824}\xd0\xe3\xfe\x0c\xc3\x08!\x0e,\x12\x96\x95\xb2\
\xb1\x12F\xfePV\xed\xdf\xcf\xcf3\xb6\xa0\xbd\xfb\xd6\
bQlA\xa2Z^\xe0\x8a\xf8:\xa6\xc3\xeb\xcb\xcd\
\xa8\xe8\xac1\xda\xaa\x1bw\xae\x0a7\xee\x13\xdc\x0a\x14\
\x8e\xca\x1d=K\xcb*\x10\xbeRE\xd1 \x0f\x8aI\
\x1d-%\xdc\xcd`\x1d\x04\xf9\xce\x0b\xef\xfd\xbf\x86\x80\
,\xc8N\x8e\x15@\xf4\xa7\xa7(\xe7L\x1c\x09\x1e\xd0\
I\x8a\x07P)\xabi\xb7xu.\x0ar\xc2\xaa\xd9\
\xd5\xe7\xderz\xces)\xf31v3\xe5\xf9\x1e\xe8\
\x92\xa0\xa6}\xa5)\xd4\x0f\xea\xc8\xe2\x7fn\xb6\xf4\xc9\
\x0e\x14\xcd\xa3\x04\x87z\xcaf\x94\x9eu\xc7\xc8K\xff\
4\xea\xb2\x87\x8af\x7f\xd7]t\x1a\x0b\x0c\x05\xba\xd8\
ylU:\xcb\x09\x05\x9aA^\xdfW\xa9\x15\xa2F\
\xe2?<\xc4\xc9\x95J\x81\x1c\xed\xf4\x06P`\xb4u\
\x17\x0a\xd13P8a\x84\xadH{\xca\x18\xe3\xb4\xd1\
v\xdd\x0b\x85K\xff\xdfW\xc0\x11\xadDF\xeep<\
[\xaa\x04\xc8\x16;,.\xa2\x168`(=\x16\xcb\
\x1d\x01 Y\xc8\x846\xd3{\x81\x95\xac\x02\xf8\x8e\xb5\
`\xe6b\xaa;e\x8c\x91\x8d\x942#k\x98\x93\xd0\
G\xc8\x16\xafz\xf0N\x9c\xbe\xc0\x16K\x02\xa7*\x9c\
E\x09'G\x92\xa1*4[11\x8a\x89\xdf\xc9\xc0\
\x0e\xf9\xcc\x8c\xcdI\x1c\x03\xd8D\xfcCH\x0c\x13Z\
\xb6d\xcc\x97\xad\x06\x8bR\xc6\x18\x07\xd9\xc3\xce\xc0\xc0\
\xbb\xcf\xe6\x00\x97\x0e\xa1/\x0b\x969\xeb\xfd\x16@\xcd\
\xf1Y`;0[\xdc\x86W)\x1d\x15\x94\xa7q{\
\x0b\x977\x9b\xe4O\xeeq\x7f[2\x96?NI\xa1\
\x8fq@\x0b\xcf\x14\x8a\xbb\xd7\xcd\xa2\x1b\x18\xbbS\x85\
\x17Ls\xd6S\x0e\x19\x96\x1d\x82\xc5\xf1\xaa\xf1\xa3\xe0\
d=\x0e\xba\xdd\xd8\x8c\x11\x85\xae\xf8b\x11<4\x1c\
\xbf\xd4\xc7\x86\x0f\xce\xedC\xa7\x04\xd1\x81\x22\x86\x7f\x09\
4\x891\xd5'\x05\x96\xb5\x00\xe0\x92'GL\xfe)\
x\xe2T3F\xcb\x1eb\x05\xc7\xcb*\xf7\x09\xe0\xa6\
\x04Q\x94\x82S\x9d\xf5t@\xd6\x01\x15\xfa\xfc\x87\x07\
\xaf]X}\xfd\xd2\x9amM\x11\x19\x8f\xc6\x0a\xd9\xfb\
\xd1\x09\xf9\xee\xa1A\x15\xa7>b\x03\xbaW9\xd9;\
\xa7\xc4\x93\xed\xee\xcb\x8d\xb1\xc05\xca\x86~\x81\x96\xcd\
\xc7^\xd1I<\x02\xd8\x1fP\xcaU/;\xe5\x87\x9a\
;\x08)\xa9f\x0cQ\x5cj\xe9,P\x95\xb3\xda[\
\x80\xceh`\xa8V\x9c6\x1f\x03@?G\xc8s;\
\xdb\xbe\xbd\xaa\xf5\x95}\xa1\xe7w\xb6}\xe5\xcd\xaa\xaa\
\x90\x85\xe6\x8c\xa7I\x0c\xf2\xaa\x97\x0c\x0fR\xbce:\
F\xc0\xc1)t\x0f\x97W\xb85\xa5\x8f\x9a\x14\x8a\xcb\
\xab\xce~\x84\x97\x9duL\xca\xe2\x85\xe5\xccK\xc7_\
\xaf\x8c\xb8\xccvf)g\x0c\xe8y\xc4\x15\xc2\x93\x8f\
n\x10\xaf\xeeqR\xe3\x85P\xbc\xfa\xe4\x9fQ\xd5\xed\
\xac\xa7\x1c\xf6\x94RD7\xff\xbd\x03\xe7\x0b\xa42i\
u\x87y\xd1+\x07\xd6\x1d\x8c\xe2\xe0\x07\x81=\x95\xcc\
~\x22@#\xbeqB\xde\xf9\xe5YxV\xc9\xbe\x1c\
\x0e\xe1l=\x0c\x90\xe6x\x02\x8d\xd1_N\xcb\x9fZ\
\x01#\x9d>0\x06w\x05\xc9)\x9e\x15>\xe3O|\
\xea}$\xf7dKN>\xa1 \xa8d\xcd*9[\
\x99\xf7\x84k\xd2\xad\xdd\xddQ\xcaf\xf0\x1cpy\xff\
G\xdb\xfeu\xbe\x95WS#\x82\xcfQ\xe8\x95\xbf\x11\
\xccE\xe7<\xae\x96\xccH\xe2\x09\xc2\x13B\xeaMt\
\xea\xd65oU.\xae\x0d\x83@`xL'd\xb0\
\x9b\xbdvQyy\xd0~-\x05\xd4\xaf\x87f)Y\
B\xebB\xd1\xab\x17\xd5|P\x1f\xc5\xe3\xcat\xfb\x80\
\x87\x80-\xcc\x04\xf3*\x8c\xfc\xe7\xd8\xc0\x8f\xa6\x97\xa8\
\xb0\x94\x08\xd8'4q\xcaC\x98F8\x1c=\xf8\x9e\
\xd5^\xaf\x15\x8c\xf4\x0c\x9e\xa0h\xd8\xeb\x1d^\x85T\
_\xed`k\xc3\x1d(28#u\xabP%\xbd!\
\x0c\xa1\xdc$\xfb\xdf\xb4\x82\xc3\x95\xdc\xd1NR:\xa0\
2f\x09\xfe\xe6\x81\xcen\x97\x0eN\xa0\xc3\xe4\xaf\xed\
\x0bM\x1d\xec+\xcaR%cz\xa8!\x92\x8f\x0a\xbf\
K\x9dY\xe4\xe9\x88Fw\xb4Y0b\x06\x1c\xb5\xa3\
\x9dV\xe8Q\xef8%\xf7\xc6I\x05n5q\xa7\xba\
\xe0\xd0x,\x88Y\x14Es\xbbs+<E\xe3]\
0\x90FFB\x85`\xdb\xa1\x82R\xedcl@\xa1\
\xdc2C\xeb\xee\xf5\xec~\x1cm\x8f\x22\xe3t\xc81\
\x80\x95\xb1\x17PPX\x94\xc1\x83\x93Uh^2\xfb\
Y\xb5\xf8TT^\xcc\x91cb\x01r=\xbb\xa5\xf6\
\x8eum&^}\x8c\xb0\xf5[\xecS\x17]R\x91\
\x07\xe1\x06\xae\xe2\x08\xf8\xc4\x12\xda\x86\x80\xef%{\x9a\
\xefZ\xdb\x5c\x15\xb1tNy\xd7\xc3\x9e\x15F\xbd\x0a\
=\xb5\xc0\xfd\xdb3r\x87\xe4\x05\x0e7a\x8a\x916\
\xc6\xc8 FD\xb7\xffK\xdd\xf4K\x81O\x14C1\
\x8e\xa9\x08GB\xf8R5J\x14aEd\xc8\x09\xfd\
\xafL\xd7\xfcd\xca\xcf\xb5\x91\x17\xc7z\xd1k\xc2\x01\
\xb5\xb0\xf8c\x1bk\x7f\xb2\xa1]>4JN\x05b\
\xff+\xca\xb3\xd4?\xcf-\x9eZ\xe4\xb3kw\xe2\xcb\
\x05\x0f7\x84n\x9a\xdb\x9b\x22[\x9a\x8d\x96\xb0i\x98\
\x86\xd7\xe5*\x0ah\x13r\xb5\x8a\x1c\xfbP\xd8>\x9c\
\xac)G\x1a\x19\x03\xcd\x0e\xabm\x1aQ\xb1\xea\x16q\
\xe0M\x22,\xfb$\xfe\x91\x80\xac\x90D\x89\xaf\xd0\x9a\
\xf9\x88&\xda\xc5\x92k\x88\x05q\x83\xdcfgW\xdc\
\xd6\xb9/z\x0aN\xc2\xa4\xd4\xa3K\x83\x7f\xdbTs\
\xeb\xbb!p\x9e]\xbe\x04\xa2\x03Z\x96\xa5\xae\xff\xf2\
0\xec\x9c\x90J=xA\xe7H\x82X\x94+P3\
\xac\x9e\xdcS\x8e\xbeP98R\xc1<\x9f9\xc6t\
\xc3.\x1d\xbe\xad\xa6\xed\xb4vi\xa4n+i\xd8\xa8\
\x89\x16\xc2\xf1\xfeY\xa2\x06\xa3\xde\x11\x9e\xd2\xd3H\xfe\
\xa9\xb4\xe8LUUAu\xe6\xfe7\xc4\xea\xef\xc9g\
s\x1c\xea\xa0\xa8\xe2!3~\xab\x0e\xbd\x00\x12\xd3\xd5\
=\x19\xa6\xf5\xc8\x96\x86\x9f\xbc\xdb\xcaaLj\x8b&\
\xedZ\x9c\xa5<5\xbfdb\x81\x07\x8d\x8f\x13\xb4\x89\
\x8b?\xd2\x81\x8c`\x8c\x5c\x02m\x82\xa2\xe5\x07Y\x80\
\xea\xed\xfa\xb6\xd1\xadea4l\xa5\x0b/\xc7K}\
q#\xe6\xc1=\x15\xb78\xe7\x05W\xc1\xf8\xf4\x054\
\x18\xc7\xfc}\xcb\xc1\xef\xaei\xb7\x84\x09c\x1a\xdb\x03\
\x02\x85\xcb|\xca[_\x18Z\x80'aA\xe3\xc8\x19\
\xdc\xa1\x7f\x22=\xca=\x0a\xd8\xe6\xa4\x22e\xf8\x0b\x22\
\xd9\x0a\xed\xfe\xc6?\x1bv\x8a\x96\x7f\x12=\xe37B\
\x0b\x00\xbd\x04\xb5$\xc9\x08\xb1\xa2\xf4\xad/\xea;\x9f\
\x87h\x11\x88\x98\x8e\x96\x80r^:\xae\xf0\xc7\xa7e\
S\xa6`\x1b\xc0\x16\x01_\xbc\xb2\xc3X\xf0\xc2\xae\xf5\
\xb5\x1d\x98\x02\xbd\x8d\x03g\xb7\xfe\x854\xfb\x98^\x03\
\xc7Z\x8d\x9b\xc8\xdb_\x16f\x04\xd6\xb0!\xe3\xf4\xbc\
B5\x1f\x9f\xf5\x98\xabl\xbaM2'w\x0a\x01\xfa\
4\xb9\xf5\xf4\x07\xd5\xff\xbd\xa1\x13\x02aFT\xd0\xb1\
\xbd\xa9\x22\x8b-\xbcxX\xd0\x03\x99\xf0\xa2\xb6~\xda\
=e\x84\x8f\xe9\x0d\xa0\xef\xc9\x9fD\xcf\xb8_@\x04\
\x03+xM<\x18\x80\x0b3D\x97_e\xeey\x15\
\x83\xc54Aa\xec\xeaSJ\x7f0)[!\x0c\x87\
\x84]\xd8\xdb\xc1\x17\xbc\xb8kCm\x04\x18dwX\
\xfd\x11\xfd\xd6\xc7\xc8\x0f\xc0\xac\xdf\x22\x16_\xce\xf4\x0e\
\xdb2\x18r\x82-T\x1f\x99\xfd\x17<\x81\x85s\xb1\
)\x1duw\xeb\xd3\xe2\xe2\x99M5w\xbe\xd7\x8e\x97\
\x9d#}\xe5X\x87\x88r\x9f\xfa\xca\x05e\xc5~\x0d\
WAV\xf8\xa4)\xf0\xea\x1d\xfa\xab\x8fA=K(\
\x05\xe3\xd8\xb4\xfb\xb8\x96\x85,\x82?4\x83 f\x07\
Yz\x0d>a$\xe5\xed\xc1\x11\x0b\x04c\xf4\x8a\x89\
\xc5wN\x08\xda\xcc\xc6MR\xec\x03\x1d\xd6\xe5oT\
~\xd4\x18\x05Ye\xfc\xd5\xcf\xd0_}L7p\xd2\
\x0cF)\x0d[\xc4\xa2+\xa8\xd9f\xd3\x06\x8d\x03\xbf\
Z\x90\xcf|\xd03\xe4,\x991\xd5\x90g\x8b\x88a\
\xf1\xc7\xdf\xaf\xbc{S\x18\xd4l\xcf\xe0\xc1\x02pe\
l6}\xfa\xdc\xb2\xa1A7\xd2\xa8_\xf9\x98\xd4\x9f\
WJ0\xa05\x83%\x94\xac\x22\xe1\xaf \x95\x8b!\
\x22\x96\x0dW\x9a\x85\xeb\xec\xc0\x1b\x86\xa7\x90\xe5\x8c\x92\
3\xb1)m\xd1\xd2\xd1P\x95\xb1)%Ab\xe8\xeb\
\x1bu\x0b\xd9\x0c\xe4\xe0 JcD,\xda\x17:\xa3\
\xd03\xc8\xa7\x22\xc1e\xbbM\xb1\x84\xbdC\x7f\xed\x95\
>\x09Vq\x9e\x98\xfb(q\xae\x7f\xc0\xb39\xc8\x19\
\xb3\x93\xbd\xffS\xb3z5r(}\xb8i\xf2\xa0[\
\xc7xeO\x0a\xa3kI\x0bJv\x87\xac\xef,\xab\
\xab\xed\xb4\xf0\x0a\xf7\xfe3h\xea\xf7>\xa6\x1b8\xd7\
\xeb/'\x81\x11\xe4\xe0rjE\xe54\x19\xc6\x9b\x8c\
\x87\xc9\xfeWy\xce8\x1a\x1c\x8a\x86JGS\xd6\x14\
efy\x8e\xd0#\xeb\xeaup1N\xf8BIC\
\xc4\x5c^\x19\x9aQ\xe2\xcf\[email protected]\x93\x9fL\
F\xbf\x8fc\x8e\x82\xe0\xa6Q\xb3\x96,\xbf\x86\x18x\
\x01\xa5\x1d\xd3\xc0\x17se[S\xefs\x0f;\x0f\xf2\
\xa4\xcb\xf9\x87\x0d\xf3\x81u\xfb\xff\xe7#\x1dc\x1a\x1c\
\xd59\x9eetP}fA\xe9\xd0lw7W2\
\xb9{\xfa\xf4\xf8\x98.P%PF|CD\xed\x0a\
j\xe9\xd2\xd3\xa0!\x04x\x9d\xaa7\xf1\x09\x8c\xd9C\
\xd3\x15i\xaa\x8c\xce(\xcf\x8evvlh\xe2\xce]\
\xf9\x92\x19\x0d\x11\xbe\xaa\xbac\xfe\x10\x7f\xd0\xadHz\
C\xf2g\x951\x8790;\x1e\xc59r9\xe2E\
#b\x8f\x0e\xcb\xce\x14\x8a\xd3\xe0\xfa\x0c<\x0a\xcb\x19\
M\xf3&\xf2\xca7\x08\xd7\x813P\x9e\x82\x11\x84E\
k\xdf\xb1\x02c\xd5\xeca\xb6Y\xec\x1dR\x07Y\xcb\
)E~+\xdc\xb1\xbe\xc1\xbe\xf0\x85\xe2iKB\xeb\
#\xd6\x9a\x83\x9ds\xcb\xb2\x82.<Ur\x18\x9d2\
\x0e\xc9\xed\x95\xe0\xe0xt\xc1\x1b\xc3\xe6\x9ev\xab#\
j\xedl\xeeh\x89\x18\x05>\xd7\xf0lO\x96G\x1d\
\x19T\x83n\x0d\xc61\xa8 {\x9f\x04\x01hh\xed\
{\x9d\xaf\xba]\x18\xed\x18\x06Kb\x02=\xa9\xa2\xf1\
\x19\x0f\xba*\xe6S\x86\xb7U\xa7\x01x\xc6\x92\xff\xe2\
\x9d\x03\x7f\xd9\x11\x8d\xc0\xd8I^Qco\x98\x98\xef\
yt^q\x85\xbc\xdc\x13\x90\x99\x94I:c\xda#\
\xd1?mi[V\x19\xfa\xa8\xd94,\xbcx\x04\xc2\
\x0b*@ITS\xf8)\xf9\xee9\xa5\x81\x1b&\xe6\
y\xb5\x04\x8f~q:DXb\xffb\xbe\xe66\x12\
m\xc3\x9abL\x89\xd5e\xae\x1c2\xf37\xea\x90s\
\x9c\xac\xa9\x85T\xb8\x08E\xcc\xdf\xaf\xaf\xfa\xd3\xc7\x11\
$\xb1\xac8&S1)\xcf\xf5\xf0\xfc\xd2aA\x17\
\xf0;\xc6\x07\xb6\xa5\x18\xc9b\x0c\x1c\x96\x0b\xbe\xae:\
\xf4\x9dw\xea\xf6\x85$O\xa4S>\x0aP\xb8\xa0\xe2\
\x94\x5c\xf7\x03s\x8aF\xe5\xb9@w\x09W\x93Y\xb5\
\x8c\xbfs\x13\xd5\xf1I\xfc\xc0\x19`*\x9eOP=\
|\xf6\xc3\xae\xe23qj6MS\x0c\xa0\xa2\x9f,\
\xdd\xfd\xf0n\xd3\xc0\xa9>\x80d\x8e \xa7\x16x\x9e\
>\xb7d\xb0\xd7\x19=ARFM\xf1%+\x8e1\
-\xfe\xc4\x87\xcdw\xacj\xa8\x0bc\xdb9\xae\x87\x85\
-\x9c\xd4F\xac\x97v\xb7\xe6y\xd4\x09\x05\xde\xe3e\
\xec=\xfc\xa54\xefd^\xb9\x88Y:\x88\x01\x1c\xc5\
\x02\xb8\xc1j\x16[Y\xa3\xd4\xdc\x11\xc7\xa4rj0\
\xa58+\x1a\x0a\xado\xc6f\x03\xa2\xa1\x13\xa4\xa4\xa6\
\xd3X}0<\xbd\xd0\x97\x8f\xa4\x01\x80J\xd2&\xe1\
'\x91`\xc6H\x97\x81q\xdbK\xbb[\xeeZ\xd7\xd8\
\xa6\xc3\xa2\x0ck\x8fK\x19l^\xb0\xa5\xd3\x22\xab\x0f\
v\x9c1\xc8S\x16p\xc1\xba\xcd2;G\x1f\x01\xa6\
\xa0\xc1\xa14o<\xaf^\xc6\xac0\x1c\xd4y\xcc\x92\
\x19e\xd5\xaf\x9b\x81\x93\xd26z\xa2\xd4\xa5\xa8s\x86\
\xe6\x98\x9dm\x1b\x1b\xb9\x89b!i\xe0\xaf\xa6\xd3\x5c\
w\xb0c\xc1\xd0\x80_\xb3\x9f\xa7)\xbb\x02\x9bP\xe9\
F\xa2}\x8c\xecfv\xb5F\xae_T\xd3\xa4\xe3h\
\x08\xc2\x93\x134\x12g\x9b\xdc\x1c1\xc5\xea\xda\xe8\xe7\
*|A\x17^S\x92(\xed\xc8\xa3S\xea/'9\
\xa3E\xe5\xdbB>r\x06RpD\x22\x04\xabY\xc2\
\x03\xc3\xa8\xf44\x89*1F@aR2zj\xa1\
?\x14j{\xbf\xd9\xc2\x98F6\x16@}\x84\xaf\xae\
\xee\x98U\xe2\xcdqw\xdd\x90\x85\xf2\xa5T\xc2c\x22\
\xc1m\x0b*\xa4\x0b\xfe\xe3\xb5\x0d5:\xd6\x0d\xfb\x80\
X\x01\xe4b{\xda\xf5\x877\x81\xe6\x90vNr\xa2\
\x00\x01K\xe9|6\xef\x09\xe2\x09J1)\x04\x0fP\
yn\xb4\x92\x95\xb7X\xd5\xef\x92\xae{GR\x0f\xbf\
[\xf9\xd9Y\xc3\xbf1L\xd1\xf0\x09\xbb\xd0\xcc\x1c\xa5\
ml\x8c\xde\xbc\xac\xa69\xea\xdc]\x9b\x09t\x01$\
\xdc\x1b\xd3]\xcd\xd1\xf7\xeb\xc2\x0eWb\xaf\xa3T\x13\
\xf4\xe5o\x1e\x08\x1dl\x97\x8f\x9eO,dkf\x85\
\xd3\xd8\x8c\xdf\x11W@^\xb0\x82s!\x0c4\xc0\xa3\
d\xd97\x8d\xdd/\xe1\xed0qP<\x81@2\xdc\
5k\xe8\xd7*\xc0\xc7\xd8\xefB\xc2\x7fH_[\x17\
\xbd\xf4\xd5\x03\x1f7\x83B`\xd5\x96\xad{!=H\
<c\xd6\xd7\xb47E\xf1*\x22\xf4\xad1S\x06\xb3\
\xe37\xab\x0a[\x1f\x1c\x84\xc1p\x82a\x8b\x83\xf7\xfc\
\x95-`s\x9f$\xee \x16&\xc0\xd3\xa0\xf6\xb9\xde\
L\xd7\xde\x11\xaeZ\x9f\x16O#\x05c~\x97z\xef\
\xbc\x11\xd7\x9e\x9c\x8d\x93\x8d\xd8;\xe1\x17l\xd8\xd2\x1c\
\xbdyiu\xd8\xc4\x8b'\x90\xd1(o:\x9dM\x82\
\x19\x03\x95ZQ\xd9nO\x81\xf7\x0e\xbaE^;\x10\
vV\x92\x00\x90P\x194\x99M\xbf\x9f\xa8^\x8e-\
\x9aA\xf4\x80\x93\xf3V\xd4\xb5\xf4*}\xdf\x1bN\xbe\
t\x00x\xf3\x83\xc9\x83\xae\x1a\x9fc\xaf\xc9o\x90\x98\
nh\x8c^\xfc\xca\xbe\xed\xe8idBz\x1c\xa1\x83\
\x043\x06\xda\xc1\xc66\x18x\xf4\xe9\xb0\xdb\x1a;\xed\
N*\x19\xc0\x89B\xa6(C?\xa7\xcc\xff\xab\xd0\xfc\
2h\xb0/\x09\xa6\xc2\xea`\xabo5j\xd6Y\x1c\
\xefF\x90\xd9S\x07\xe9ih\xc0\xad\xdc;}\xf05\
c\xa5\xa7\xc1\xfb*\x81\xe2\xf8\x1a\xf7\x0f\x1a\x8do-\
\xa9\x0e[\xd0\x18Q0\xf45\xce,N\xaa\x91`\xc6\
Dt\xddtn\xef\xea=Z;\x8d\xae\x11p\x12A\
\x07MR\xa6\xfd\x8aj~\xd9\x96\xbb:P+L\x16\
]\xc1\xf7\xbe~\xa8\x89\xa7\x1c\xc0\x8f\x1fN\xcd\xbfz\
l\xb6\xdd9\xc1\xaa\x9d\xbe\xad9z\xc9+\x95\xbb\xdb\
\x0cG\xb64\x09\x98`\xc3(8\x85\xda'\xba\x80\xe5\
\x14\x96\x0am`L3\xecBz\xce\xb3D\xf59o\
\xde\xc4>\x8b*\x5c'\xabo\x097\xa4\xed%\xa2 \
G@U\x7f5c\xf0\x97Fe#g\x08N\x07\xa0\
S\xc1\xee)r\xcb\xf2\x83\x11|6':$;\x7f\
\x8a\x91\xe0\xb3\x04\xe0F?\xf7\xf7\xad\x1bB\xf2B\xf9\
^\x01,7\xb3@\xfd\xf7\x17F\xa4F#\xe0\xdc\xf9\
\x817\xad\x15\xb7\x123\x0c\xca\x90}\x14\xd0\x86\x11\xc5\
\xcdO\xbf\xd75\xeab9q\x93\x1e\xdbt\x18\xe6\x8f\
\xd64<\xb3\xad\x05\x05\x00;aCBI&\x0dr\
?8\xabxd\xae\xf6qm\xdb\x9a&kGC\xc7\
\xc1\xa6\xd6\x88afy\x5c\xa5\xf9\xb9#\xf2\xbc3r\
\x05>\x1f\x0f+b\x1f)\x91\xf4J0c\xc0\xdew\
\xaf\xac\xfe\xdf\x0fC\xbd\x96\x10\x04\xbav\xa8\xf2\x8b\xb3\
G\xa5\xa6\x09a\xed\x057\xeb\xde\x13\x8b\xaf\xa4F\x18\
\xcf\x90\xe2\x95{ \x05g\xae\x809\xfb\xff<%\xa7\
B\xe0\xe3\xe4N-\xc04Q\xd3\xbasm\xf3_\xb7\
5\xe0\xc5ax\xcaM*E\x90\xf1\xd9J\x9e\xdf\xb5\
\xbe6\xacs\xc2\xc1\xe5\xe0<)>t\xd3\x22\x5c\xa3\
\x02|\xe6%\x15\x9e\xfb\xe6U\xd8\x87I,c\x12}\
\x96\x00*i\xf0\xd7\xf6w\xe0U\xd0r\xce\xdbN\x8f\
\x05\xd8/\x08\x12\xd0\xe8\x0f\xa7\x17\x96\x05\xf0E\xa0\xce\
\x86\xe4\x02G\xb1\x8a\xbf\x88\x04G\x88\x9aex\xb9'\
j\x1f\xa4g\xcc\xd2\xd9\xfe\x7f\xf3\xe0\x18\x16\x1c\x86J\
\x97\xba\x97\xcd<5\x82!T\xc6\xe6\x97z\xeb;\xf9\
\xc6\xa6\xa8\x1c\x0c`X\x03j\xad\x8f\x88\xfd\xed\x102\
J\xe9\xa5\xa2\xa4Lr\x92\x89\x12]\xd0\x8d-\xc6\xdf\
>n-\xf2\xd0\xb1\xf9\xddomI\x0cm\x12\xddz\
\x04=\xa5\xd0W\xe6W\x880aY\xc4\xfe4@\xdb\
\x1a\x84\x8e\xcbf\xa3ra\xdc\x8b\xeb)\x00h\x11G\
O\x10\xd3T|\x9e\x9e\xf3O\xa2e\xa1\xd8\xf2\xf4\x0e\
\xa8\x9e\x03\x81V\xde\x12\xad^\x099\x1d\x81\x12\xea\x92\
O\x0c\x94\x8bRM\xa1\xf7\xce\x18|\xc5\xa8\x80T\x8f\
|\xef\x1a\xfc\xa1\x80RpL\xc4\x9cr\xab\xfdk\xf3\
\x82UvZ7\xae\xa8\xbb\xef\xbdzh\x88\x00\x1e\xc7\
\x93\x19O\x84\x84\xfb[Q\xe8\xd5.\x1a\x9a%\x8f\x0c\
v\x8f\xf9~DYMF\xf9\x97\xc7\xe4\xe5x5\x96\
\xfc\xb1\xd2Q\x00\x22\xa8\xb9c\xe9\xb4_\x12\xcd%\x9b\
\xb1\xbc\x88\x07\x86\xb6f'[z\x8d\xb1\xfbU\x01m\
\x00s\xd9\xf6H)\x804\xf7\xcd(\xbajt\x0e\x95\
C\xea\x989+\x0c\x93<\xb4\xa5\xe5\xf9\xed\xad\xc8\xaf\
\x04\xd9:\xc1\x86\x01\x923Fo;u\xd0\xd4B7\
\x1c\x1c\xbb\x99\x98\xa7Q!\xb3\x8b\xb2\xf1\xb9\xb0c\x8c\
{$\x12H\x04\xc6\x94\xe1\x17\xb3s^\x10*>\xc5\
[\xb2\x067P\x18=\xad\xbb3|`mZ\xe8b\
\x17\xa9\xa9\xf4\xa2\xe1~\x01\xcaE\xbe\xc4\xca\x19\xa8B\
\xa7!\xee^\xd7\xd0\x19\xc1\xf779\xa9}C\xe2\x19\
\x03\xf0j\xea\xef\xce(\x9c\x98\xa7\xc2:xxYF\
\x0f\x95\x04\xb7\x09\xba\x88pq\xeb\xaa\xfa\x0f\xea\xa2\xb6\
#u\xb6\xa5\x04(\xb7M\xf8\xbc\xb1\xca\x8c\xdfP\xc5\
\x83g\x9d\xb0\x0b\x90&3Z]+\xaeBO\xc3\x0d\
\x99=u\x90Z\x10QK<\xfd!\xbe\xb1M\x0e\xe7\
b\x22\xae\xac\x0e\xf6Q\xf5Q\xeb\xfa%\xd5V\x82f\
\xfc\x92\xe2\xfc\xc1\xd8\xa3\x0a\xbcO\x9d]46\x1b<\
<\xac\xf6\xccn\x9c\xaf\x97\xd8\xd6\xa4_\xf9v\xd5\xc6\
\xfa\x88\xbd\x9az\xe0\xd8\xba\xe2<z\xee?\x99\xea\xcc\
\x08\xdb\x9c\xa1\xa6A\xd6~\xdf8\xb0T\xe6J%\xa0\
xZ\xd3\x16}\xf3@\xd8~\x94-\xfe\xc7\x07\xba\xb6\
\xc1\xfa\xa8%1\x5cO\x0ac\x90\xdb\x94\x95eg\xbd\
ra\xf9\xd7\xc7\x04\x0a<]\x8f\xdf\x91\x9e\x03\x8d`\
/\xc8\x08\xb3\xc0M=x^\x07Xe7\x02Q\x1f\
\xb1n\x5cZ\xbd\x15\xdf\xb3\x89\xf7T\xcb\xc4\xd4\x01\xc6\
\xb0\x10\x08\xab\x05\x93\x943\xff@4\x9cCs\x18\x0f\
\x862\xdb\xe9\xca\x1b\x8c\xbdo\xe0\xbd\xba\xa9\x13\x0c\xf5\
\xf6\xcf\x1d\xcdQT\x97\x0cz\xe3\x85\x10\xad\xba\xb5\xad\
\xbe\xd3Y\xed-\xa4\xc5xR\x18\xd3\x8dl\x8f\xf2\x8b\
\x19E\xaf\x5cX\xf1\x93I\xde\xd1\xd9\x1e\xd0=\xfaJ\
\xb9\x89Q21\xdf}\xff)\xeeW/\x1c\xf2\xbdS\
\xf2\xf0\x11`\x18\xf7\x80ap,\xb0\xbd\xd5\xf8\xea\xdb\
U\x1f7%\xe1\xb2\x87\x98A\xcb\xe6\x93yO\x11\x97\
\x9f\xe1\xcc+J\x8d\x1e\xc7\xd2\xc9\xda;\xa2\xfb\x97\xc6\
o\xb7^\x02\x0a\xb28yzG\xbb\xd3\xa0\xe2\x07H\
\x0f\x8a\xdf\xb0\xb7\xcaY\xef\x13\xb0\x01%\xb1\xad )\
\xa1\xe7\xc5R\xa0\xe6\xa2\xb15\xb4\xb7\xae1\x1c\xd6\xb3\
|\xee\xe1\xc5\x83s\xfc\x1e\xd8\x08\xd1\xa5e\xf2\xfb\xdf\
o\xf8\xdf\x0f\xdb:\x9d\x93Rr\x17\xc2G\xe7x\x1e\
\x9aS4\xb1\x00_\xcf\x0d)\xf61S\x06\xf0p@\
\x14\xbe\xff-k\xcd\xf7i\xb4\x09k\x80\xc0Q\x1cW\
\xdc\xd6\xb4\xdf\xb8\x87\x9d\x0f\xce\x08\x92\xbaZAR\x00\
\xe6\xe9\x8c\xea\x13\x9f\xdd\xdfnZ\xbd+G\x1aX\x9c\
W\x91\xf5\xcc\x82r\x99\xd0KH\xaa\xe0\xdcC\x12\x19\
\x13;L\xd3\xfc\xf3\xe6\x96{\xdfo2\xb8\x09\xb12\
\xc4\x9b\xc8\x1cBF\xe5h\x8f\xcd/\x1e\x9b\x9b\x84+\
\xc6c\x83\xe0\xa6Y\xb3\x92.\xfd\xa6e\x85\xd1\xd5\x08\
p\x8e\xf8 P\xc5\x9dmM\xfb\xbd\xabb>\xe4I\
\xaal\xa0\x85\xb6\xb0~\xf2\xb3{\xa1Wr\x92\xe2\x06\
\x1a\xf9\x8c\x22\xcfK\x17\x0eu\x12z\x05Y~\xca\xa7\
=\x8e\x07EQ\xae\x9f\x98\xfb\xcd\x93\xb3]\x0a\x04=\
\xb2\xc7\x96-wG\x8by\xcd\xc2\x9a\x8f\xb0\x13O\x13\
\xb3\x99\xa2\x94\xcc\xa2\xb3\x1ed.\xf9\xec \xc1\xb9\xc0\
\xa7^Y\x91\x16\xf6\xce\x7f\x9a\xfb\x17\x12|\xacw2\
!\xf0\x95\x07\xd0\x86\xec&\xd4;\x80&\xb3\xd4\x98\xe7\
\xc6\x8e\x07\x94!9\x91o/\x00\xf4P\x18\xfb\xd1\xe9\
\x83\xfe\xeb\xd4\x5c\x97*\xf5c\x0bH\xc9\xce6\xfd\xba\
E5\x1f6\xe1C{b\x19v%\x1c\xe0BX\xf9\
<:\xe7Q\xe2\xceF\x1ac\xfc\x09\x89\x0cG\xda\xab\
\xbf\xab\xef~Y\x86\xc1\xbd7g\x0f\xa0\xc4\xadR\xbc\
#\xbb\xb7@\xc9(+\x14}\xbe\xb2Qv\xcc\x99\xc2\
\x184\x04\x0cQ\x18\xfb\xe6\xc9y\xdf\x1c\x97\xa3)h\
\x10H\x02\x8e@\x0f\xb5\xa3%r\xed\xc2\x1a\xf8\xc6|\
\xa9\x05\xca%ES\x8a\xa6\xb3i\xbf\x16xa\xb9d\
\x0cn\x13<\xdaJa\xc8\xbdg!\xb7\x926O#\
\x88\xca\x94)9\x10\xd9\xf5\xb6\xee\xa8C~rQ\x9e\
\xb3\xda7d\x0cc\xba\xe0V\xd9\x0f\xa6\x14\xdc1)\
WC/\x0a\x1e\x05\x07(\xf0\xbd\xab\xdd\xf8\xf6\x92\x83\
\xbbZ\xa32WZ\xc0\xe9\x90s\xd8\xcc?\x0a\xd5g\
\x9b\x0eO#\x00\x9b\xac\x88\xb2\xf6V}\xef\xdb2-\
I\x10\xdf=}\x90tm\xbd\x03\xf5jlB1r\
\xbd\xef\xc88\xc6@\xf54\x85\xdd8)\xffk\xa3\xf0\
\xf6.\xe9\x0a\xa57\x14ds\xa3\xfe\xf5\xb7\xaa\xb6a\
\xf7\x84p\xb2\xa7\x0a8\x1dLUV:W\x9d\xf5\x00\
\xce\xd3`\xcb\x05F\xe3#j\x84\xd1\xae\xac\xb9-\xbc\
g\x11\x87Q_\x12Nq@\xed\xc7\xe5\xfb'\xe4\xbb\
\xa0S\x965\x8f\xad\xee\x90\x8f\xe2\x9d<\x80a\xd9\xca\
\x84\xfc\x84\xbc\x88J$\xf7i \xbd\x00t\x00\x00\x88\
\xf5f\x97d\xe9&\xff\xa0!\x827~\xc9N\x00\xc4\
m\xd2\xc5\x86\xba\xc8\xec\xd2\xac\x1c\xe8\xd8S=z\xb2\
E\xc3\x9b\xe5D`(\xa9YF\xe5\x19\x03\x9c\xad\x06\
A\xb8\xa1\xd4,&\xfe\x91,g$d\xb1wH\x14\
@\x01\x8aBUa\xbd\x89\xd7\xcc\xa32\xba\x14\xd2\x03\
\xb0#\x830\x88\x91\x87f\x17\x0e\xcb\xe9\xf3xS\xee\
\x9d\x81>\xc6\x81K\xa5\xff5\xa5\xe0\xabc\x83@\x0d\
\x88h\xb0a\xc9\xe9\xd7\xf7\x1b\xc3\x97\xbcv`O\x9b\
\x81\xad<\xc6\xd6\x96P\x08\xa6\xb2\xa1\x17\xd09\x8f\x08\
-(\xe9\x022\xc8\xbbm\x8cv\xbe\xea\xa6H\xf5J\
{F8\x81\xb2\x81\xa1!\xf0\xbd|L\xde\xe9\x83\xbc\
h68\xf0\x09}\xac\xd4\x8c\xfc\xc6\x8b\x07\xe8uc\
\xfd3K\xfd1\x92\xecD\x90\xc7\xcd8\x1fs\x18\xf0\
\xb9\x0b\xb3\x8b}\x84\x8bw\x1b\x22\xe0\xede\x8d\xb1\x99\
\xb4\xe9\xe2\xfd\xfa\xf0\xec\x12_\xd0\x85\xd7\x88\xc8\xcc\xa9\
\x04\x16\xc9\xfc\xa5$g$\xa9^B,\xe0\xae\xec9\
1L\xb7\x94\xaa\x85\x86\xabH\xc9\x1f\x8b\xd9\x12)\x1b\
^\xfe|\xc1\x10\xcf\x86\xba\xce\xfd\x9d&\x98\x1fg9\
\x9dM\x9f\x84\x1c1\x80\x85)\xbd\xb0\xc2\xff\x83\xa9\x85\
>\xe7\xe5}}\x94\x87\xe2\xac\xe6\x09\xc9\x9a~\x80x\
\xbai}\x7fU\xdd\xdfw\xb4\xca'R\xca:\xa3\xcc\
\xb4\xd8\xa7\xbc|\xd1\x90\x8a\x80\xf3|\x9e\x14Cz\x11\
\xc2\xab\x97Y\xcbo`z;\x9e\xa1\xc7\x89\x13\x02\xee\
\x85\xa9\x1ec\xd63\xde\xb2\xa9=\xbeP).\xd8\x96\
jh\xed\xbc|q\xe3G\xcd\x9d\xa6|\x98\x9e\xbd\xe9\
(\xe0\x0b\x9a\x84\xa20\xfe\xc5\xe1\x81?\xce*a\xd8\
\x81'\x86\xbb\x96ee\xb2\x8fq\x00\x9e\xe6\xac\x12\x1f\
P\xfb\xfd\xfa(^E/\x07P\xf0\xd7i\x90\x0f\xea\
;\xe7\x94\xf8\x03n\xd8\x98\xd0\xf6\x1c3\xa8\x7f\x08\xcd\
\x19)\xaa\x97`L\x83\x8d\x1a\x9a\xb5\xa0\x1c<\xcd\xab\
\x96\x7f\x04\xcb\x1d\x85y\x8ec\xd7\xde\xc1\xe7V\xffc\
t\xa0@#\x9b\x1a\xf5N\x88\xb3\x81\x1fPu\xd9\xea\
\xa1t[\x0b\xd0WN\xcew\xff\xf7\xe4\xec\x1b'\xe4\
k\xf2\xfd\xc6\x89\xd1\x0e\xf6J\x19s\x96\xa0G\x18\xa6\
\xf9\x83\xd5\xf5Omo\x92gO\xa1\xfeRn|u\
\x11{\xfe\xf3C\x86\x05\xdd\xe9\xa1\x0c\xc8\x01\x9e\xba\xfe\
}s\xd1U\xccl\x03\x91\xc0Z\xf2\xed\x15\x10\xbd\xab\
|\xc1?\xb5\x82S\xe4\xfb9\x13\x09\xdb\xbb\x99\x5c\xbc\
\xf2q\xd3\xef\xb7\xb6\xd6F\x04\x8c\x0e\xc0\x94\xe0\xd1\xbc\
\x8cA\xc4\xf2\xb3\xa9\x85S\x0a\xbdx'Pbu\x02\
\xbe\x8b\x83\xf7\xea'\x8c\x01\xbb@\xf7\xf4?\xef6=\
\xfcas\xd4\x0ei\x108\xab59_y|NA\
I~\xae\x93\x96Z\x80\x02q\xd0[\xf9\xb6\xb5\xea6\
\x1am\x97\x0e\x05\xbc\x0d\xc7~J\xf1\x91i\xbft\x8d\
\xfc\xa2\x9d3Q\xb0'\xbe%;\x91\x9b0\xa2l\xec\
\xd0#\x86\x95\xe5\xd6\xf2|\xaa\x0a\x0c\x95\xde\x0e\xb6\x02\
m\xed]\x12\x02\xe4\xa9\x95\xf1qL7l9\x0d\x8b\
\xdf\xb3\xba\xfe\xd1\x8f\xed\xa7:\xd9\x1fP\x1c\x1d\xec!\
\x0b\xcf/*\xca\x0d\xc8\xd5\x94\x0e\x00\xa5\x5c\xd0\x19\x11\
\xde\xb8\xcdz\xf3Rf\x86pJ\x06%\xc3+\xf8\xa8\
\xe21\xe6>\xef->\x09rA\x93\xb7w\xe9+\x90\
\x0e\xb2\x03\x94\xe34\xbbO\xea6#\x94\x81\x8b\xb8\x0d\
u\x91@\x80\x09,+\xf3\xe6c\x8e\x07p\xb0\x00P\
\xfa\x19EnhU\x1b\x9b \xa69\xa4\x8f\x0e\x8bl\
\xac\x8f\xcc-v\xf9\xdd\xa9\xee\x9e\xa04\x94\x0c\x8c\xe3\
\xc9#\xd9#x\xf5b\xc2\x0dd-\x1a\x0c\xaf,W\
\xf6\xff\xcb\xcc\x1a\xa6\xe0<M\x82\xa8\x8c\x859e\xca\
\xba\xc2?\xae\xdap\x16e\xa6\x84\x03\x8f\xdc_|\x8c\
\x0d\x90\x16|\xb2%\xf8\x7f\xaf\xa8{z;\xc4\x0d\x18\
4H\x851\xd8\x00}\xf7\xdb\x9f/*\xce\x0d8\xb9\
S\x0bKX`%\xde\xb8\x95\xbc\xf5%bvb\xcf\
\xd1\x05\xca41\xe7\x19W\xf9Lg\xbd\xdf\x82\x8b\xfe\
\xe3c\x1c\xc8\xf6\xc3(;\xb3\xd8\x17\xd2\xf9\xe6\xa6\x88\
d\xbclM\x94\x84\x0d\xb2\xb9!<\xabP\xf3{\xf0\
\x85\x22\x00\xb9O\x8a \x1b<e\xde\x02\x11\x1c.\xaa\
\x97\xc2\xe8\x09%\x93\xa38\x8c<*_\xb1\xb2F\xd0\
l\xf9\xf8\xb4\xd4\x0a\x960H\xdf\xd2\xcf|L7\xc0\
\x06\x06\x17?^}\xf0\xb1\x8f\xda\xd10\xd8\x99K\xea\
\x08>,K}\xe9s\x85\x859\xe9y\xf1<\xea\x13\
\xbc`\xcbv\xf1\xe6\xa5Bow\xb8,CT\xa6\xf9\
\xcc9OyJ\xa6\xf6_\xc6\xc0\xa7\xbf\xf9\x98\xc3\xa0\
P2\xbd\xc8\xd7\x161>l\xd6M\xe4\x8a\xdd\x04h\
K\x94lj\x0c\xcf,te{\xf0\xb1\x9d\xb6\xc1R\
\x09\xf4*\x9e|\x9c\xa7\xa9ZJ\x85\x81Q\xaa\xf4?\
\x18\xd3\xec{\xc9\xf0\x95\xb3\xdc\xd1r\x1d\x93\xed]\xfa\
\x05d\x9b\x84\xda\xf4O\x1fc\xf3\x1d~aT\xf9\xb3\
\xb55\x8f|\xdc\x89\x8c\xc1\x89W\x8c\x1f\xc0\x1c\xe3s\
\xd4\xe7\x16\x14\x16\x04\xfd\xa9n\xd0R\x9d\xb6r\xad\xba\
\xf5d\xd1\xd7M3\x84\xf7\x19\xe19<\x10\x91QW\
\xc0\x98\xf9\xb0\xb7|\x06d\xeb_\xce\xc6\xa6J\xbf\xf5\
1\xd8j\x11\x0a\xa5\xd3\x8a\xbc\x0d\x9d\xfa\xb6\x16\xd3\x92\
\x8f\xcf\xc4\x8d\x94\xd4F\xf9\xe6\x06}N\x91+\x00\x9e\
&\x95\x00\x0eH\xd9p\xd9WB\xb3\x87\x93\xaa\x85D\
\x98\xf8\xcc)\x94\x17\xba\xd3\xa8\xb2\xffe#k\x84\x9a\
;\x22\xb1\xf3%)A\xff\xf51]\x90\xd3Y\xd4\xb4\
\xf8\x8f\xd7\xd4>\xfaQ+\x9a\x0b\x92\xa4\xc1\x14B\xc7\
\xe5\xd0\xbf\xce+,\xc9\x0fP\xf9\xf4\x95T\xc2Q,\
\x08\xd3\xbc\xd5z\xfbr\xa2\x87`\x0d\xd4\x0d\x8e\x10=\
\x8e\x16\xe0\xd3\x7f\xe7\x1ez\x0e\x88\xda\xe7\xeboS\x04\
Y\xa3\xfe\x1c\xc7\xd8\xc0v+/\xf7\x9c:\xc8\xb5\xb3\
\xc5\xdc\xddn\xc8\xe76B\x1a\xfe\xd4G\xe9\x07\x0d\x91\
9\x85\xee\x80G\xc5\xa4\x14B\xca%\xe1\x1dD\xb3\xca\
\xe9\xc1\x95\x02\x1fs/?\x00Kg\x95\xaf\x9b\xc1Q\
Jp\x18\xbe\x80\xbf\x9f\x00\x09\xdf\xdf}L78\xb8\
{\xc3\xfa\xf9\x9a\xdaG\xb6\x870\xd4\x04\x93\x09\xbc\xd5\
\x18x21W\xfd\xdb\xd9E\x83\xb2\xfdN\xd6\xd4\x02\
\xbd HR\xbf\xc9\x5c\xfcUf\x80l\x90\x04b\xe1\
/we\xf3\xa9\xbf\xf3\x0e_ 3f:\x80*P\
\x97~\xefc\x0e\x87\xa6(\xa7\x17\xb9k\xdb\xccm\xad\
:^M\xe9\x80\xd6F\xac\x0d\xf5\xd1\x19\x83\xb5l\x9c\
\xa7\x91\xfe'\x95@\xaf\xc2HV1\xcd\x86\xd1\xd3\x12\
6\x9e\xf2\x91\x5c\xa6V\x84V\xbdex\xca\x94\x9c\
\x11\x18\xb6w\xa1[\xf4#\x16\x11\xf6\x9eNb\x8a+\
\xe2\x84\x8e\x9f\x1a\x1fc\x03\xaa\xd3\xa9\x1b?\x5cY\xfb\
\xb7\xdd!\x0bBMH\xc2\xd6\x8c\xa7\x17\xa6\xe4k\x7f\
]P\x94\xedK\xdf\xcdr\x96nA\xdf\xb4\xfcza\
tH\x17\x83b@LC\xdd9b\xfao5\xbcY\
\xce&\x04\xa6snE\x1bv\xb5\xed]EZ\xf7{\
\xbd\x22\xac\x96\x05*\xa6\xba\x07OP\x14\x9c\xd9\x91\xdd\
\x03\x02\x8f\x9bZd\x0cc@\x0e\xe0\xb0=b\x06\xd8\
\xaap\x1a\xa3\xa3IG='T\x93\x1d\x08\xb7E\xf4\
;V\xd4\xbd\xb2\xbf\xc3\xe4\x18\x08\xc3a\x801\x82X\
\xd3\x07y\x1e\x9e[T\x14\xc0k\x1f\x9dx\x22\x85\xc0\
\xa7\xf3\x12\x8bW.\xe6+o\xa3z+\xac\xc0\x1f\x08\
\x81\x95c.2\xfb\x11\xb5l\x167\xa2\xbcq\x8b\xb9\
\xf39V\xfd\x1a\xd3;@\x1f\xe0y\xa4\x0ad\xe4\xce\
<\xa2\xf4l2\xf2r\xad`\x22q\x05\x18S\xf0\x08\
\xa88Y@J\x90)\x8c\x11\xf8\x5c\x22\xbb\xe6\xa8\x04\
\xc30:\x22Q\xaf\xcb\xe5vw\xbf\x9b\xcf\xd6\x0a\xf8\
\x8d\x13\x8eHem@\xd1a\xc3\xfc\xe9\x9a\xda'\xb6\
w\x00\x87\xba\xc8\x06\x9eF\x9c^\xa0=9op~\
\xc0\x97\xfa1\x8a\xa3j\xc1\xad\xda5b\xd9\xb7\x04^\
\x1aa7\x13\x94\x9ai\x012\xfe[V\xd5B\xd6\xb2\
\x8dXQH\xc2\xd8\x1d\x9a\x07\xee\x05\xf2\xdb\xdfX5\
FU\x917.2\xe9g\xfe\xd2S\x81\x878\x97\x99\
Bg\x931\x8c\x11<dX\xdb\x9b\xa2\xaf\xedl~\
\xbdZ\xdf\x1b2!\x85\x11V\x92\xa5^X\xeeZ0\
4{|\x9e;\xe8\xd1\x04\xb1\xdf7\xd43\xa0^\xed\
\x91\xe8m\xcb\xeb^\xad\xb4/U\x03\xad\xe2y4P\
\xfb\xa4\x5c\xf6\xc4\xfc\xe2\xf2\x9c,;g\x8a\x01\x82\x81\
\xc6y\xe5\x22\xbe\xfa\x0e\x12i\x94\xad\x04M i\x8d\
WH\x80[<1\x03\xc0f\xf2\xe6?\x95L\xbe\x9d\
\x8e\xb9VQ]\xa9\x9c\xd796c\x0eO\x94,G\
\xbf.\xb0A\xc06h\x13(pB\x5cawA{\
\x1a;~\xf1~\xe3\x92\xeah\xc8\xe0XL\xf7?l\
\xa3$\xa0\xb1i\x05\xea\x8f\xa6\x15\x8f\xcd\x97/\x85\xc6\
;\x91{(\xde>r[X\xff\xd5\xfa\xda'w\xd8\
W\xc5\xc2\x111 \x06\xfd\xce*r=\xbd\xa0\xdc\xeb\
\xb2\xaf\x80=\xf1\x91\x12\x0c\xa7\xca\xc2\xe25+\xade\
\xdfdF\x04'\xc5\xe4&\x80]\xe5\x133F\xe6\xc6\
\x5c\xf8\xd0\xe1\xe1\x97\xba\xa6\xfd\x922\x06,JM]\
\x8e?V\xb2\xe5\x02A\xf4\x0e\x1e\x0dq\x01:\xb7\xbd\
\x1f\xc6j\xb0\xe9\xc4\xb5\x8a\x1dp\xb8\xc5\xfb\xdb/}\
\xabfK\x93\x01#\x1c\xbb\xda\x87\xfe\xe5\x0f\xa4\xefn\
\xe7\xcf\xedn=%@\xcb\x82\x1e\xd0\x8f\xbd\xcd9\xc4\
\xb1 \xf7\xa4\x1eM\x9dV\xe4\xdd\xdb\xa2\xefl31\
^\x94\xed\x176\xee\x0bq\xdd\xe0s\xcb\xc1\xcd`6\
g\x9f\x94@\xca\x05`4PA\xf3'\x09\xbc\x9e&\
\xea\xa4uU\xd9\xc9z\x1c\x1c\xca\xc5M\xd2\xb4\x09\xc8\
\xc7\x06O\xc5.7%u9\x1ec \xae\xd0\xf5m\
\x7f\x15[\x1f\xb2\xb6\xff\xd5\xda\xfe\xac\xd8\xfb2\xd4\x8d\
\x87\x1bD\xeeD\x9c\x99O\x9cpk\x0fv\xdc\xb0\xf4\
`+\xbe\x1f\x10\x94!?\xc7\x02\xa4\x02o\x16U\x85\
G\xf8\xe9\xe8|_\xec\x02\xb8\x18\x9dW\xe6\x85Xx\
S\x83\x81\x13\xbf\xe0\x22\xa5\xb2\x03\xd4\xfc\xd2\x98|\xa8\
i\x0a\xb4|\x5c\xf8\xcb[\xc2\x8a\xa7\xf1\x1dg\xb57\
\xa0\xa4\xe5cRz6\xf5\x14\xa0F\x92_\x97#z\
%\xb9\x0c}\x8e0\xda\xf7\xd3\x15\xffI\x9a\xb6I\x7f\
\x02\xbd\x11\xe6\xc1\x05\xf8\xcd\x1a\xac\xccy\x9c\xe6\x9f$\
E\x83\xde\xa1W\x22\xc2q1d\xa3\xfb\xdb\x22\xf3^\
\xaej\x0b\xe3\xa3\xf7m\x1c\xf7\x80\x8e/\x16%~\xd7\
\xf2K\xca\xf1-\xd9\xb1\x15mW0\xa4\xf3\x9b\x97\xd7\
\xbc\x06\xbe\x05V\xe5\x9e7\x8e\x0f\xde=\xbd\x08\x97\x93\
\xaf\xe5c\x02\x04\x13f\xd8|\xf3R\xda\xbc\xd5I\x8a\
\x17X3\xf9\x15\x1c\xaa\x5c\xf06S\xf0e\xbe\x98\x94\
L\x1c\x1d1A\xf1\xd1\xfaMb\xc9WI\xa3\xa4\x0b\
H ?\xf8\x85\xa4!\xb4\xa3\x8e/\xbd\xce\xaaY#\
sK\x91\xe3\x87\xdc\x0d\x0f\xf8\xc4\xd6\xa6\xf6\xb0|*\
\x82\xb4\x1c\x00\x97\x8f\x09\x14\x01\xcb\xaf\x0e\xe9w\xaf\xa9\
3\xe5}(\xb1\xc0>\xac\xdf\xc5\xfe0\xbb\xe8\x86\xf1\
\xfe,M\xd1\x14\xfa\x1fC\xdd\xdf9%_\xf6\xae\xe9\
\x84\xa8[K\xdbv;+\xbd\x00\xea\x03>\x82\xb4\xef\
\xd7w\xbd\xe4$&\x19G\xfb\x18\xa3\xa3\x85,\xba\x88\
\xb4\xee\x95\xe2H\x1b\x1d\x0e\xc8\x0b\xb1#\x04\x04Z\x16\
\xfb\xfcB%P\xdc\xbb\xd7Sc\xf3\x12\x10\xfb\x91\xa9\
\xff\xd8Q\xd9\x01\xc4D\xdb\xd9\xa4<1`/\xc8\x95\
\xebV^\xb9\xa0|t\x8e;\xb6\x9d\x10\xb8\xa3\xfc\x82\
\xc0\x1a\xbe\xf1^\xcaC\xd3`\xb1\x1d\x22\xd1\xe0\x82G\
V\xffD\xdd\xf18F }\x01T\x83\x0b\xabt\x9e\
k\xee#T\xc1\x13\xf5I\xad\xd1\x11\xf6\xe6\x9c\x8b\x9d\
O\x91\xd6}r\x0d\xacx,\x17\xa2\xa0\xcd\xa8\x19\x82\
\xf8\x06\xa2\x1d'\xb1\x17\xa0\xf4\xbd\x9a\xb6Z|\xfec\
\x1cM\x1ds\x0a\xecb\xb6\xd4u\x00u\xb1s\x8b\x15\
\xb0+\xb0\x84\xb9T\xe6V\x19\x8c\xd1\xd1e\xa5\x8b,\
6LC\xab|%\xf6\x0a\x1c\x0fh\x0f\xa8O\xeb\x0e\
\x11\xa9w\x92\x92\x89#=D\xb4ET\xbd\x852H\
m\xc2\x97\x93\xde\x0dL\x90\xa9\x90\x05\xdf\xfbp(\xfe\
\x88\x13x\x90\xd7\xb6\xd5\x18\xb2\xe3\x93%\xc5d;\xc8\
\x0d\xdf&\x17\xbb\xdb\xa1/\x8bi\x17\x1b\xb2\x0c\xdc\xd9\
\xf9u\xd6\xe5B\x9a\xa0\x87jy\xb4\x11e\xe8;\xc0\
oF\xea\x88\xde\x8e\x1ar\x92\x92\x85#\x18C\xa3\xf5\
Jg\x1d.\xd8\xeb'Fh\x9f\x89\x22\xf6\x0e\xd8\xb4\
\xb6\xd5\x87\xe2\xf0\x116\xc0\xad\xe0\xce\xa2\xaa\xd3J\x91\
\xa9\xa1\xc0.!e\xd7f\xbb6\xfcs\xbe\xa4H\xf1\
\x02\xf6\xa2\xa1}\x0cg){\xb3\xfb\x91\xc0\xe92E\
\x98\xcd\xb5\xd09\xc8AJ2q\xa4\x8f\xb1\xc2\xc4\xc4\
\xb0\xc2Y=>\xa0e(\x22\x1aiCz\xf5\x1a\x96\
'\x10#9\x0f\x01\x07\xc7\xa0\x13j\x9a\xf0\x03\xfb&\
[?\xa0\x0c\xb0\x07\xf4\xd6\x00\xbc\xcd\x05W\x08\xbe\x0a\
H\x16\x8c\xbf\xbd&\xae\xd1Z\x05u\xc1\xe7\x88\xf4\x0d\
\xd2W\xe24k\xa8\xb9\xdeYN&\x8ed\x0ce\x82\
\xa9\xb1\x94\x88\xed\x8aRU\xf39\xebq\x03\xcb\x18\xc2\
\xc2qW\x0e\x9b\x13\x0a\x98\xe7\x86\x10\xaaW\xad;^\
8V\x15V\xb8\xad\xe9\xc0\xa6\xbam\xcb\x1b\xb7\xafn\
?\xb8\x9d\x9bQ\xdcf\xff\xf7\x0a\x093-\xf2Vj\
\x14\x0fh\xaf$\x11G\xf6Jj\x80\xa9\x01Y`\x8f\
\xa5\x0a\x93\x04\xdd\xc1\x02g-n\xe0\xf1\xe7O\xa8\x90\
S<\x18\xea\xc7\xea\xdbQ/D\xa1bd\x16*\x1c\
?I\x80-\x0c\xca$\xa5\xeb\xac\xdd\xac\xbfq1\xff\
\xf7\xe9\x81e\x17\xe5\xbe\xfb\xb5\xecuWz\x16\x9ek\
\xfesr\xd3\xeb7\xe9\xa1z9\xdc\x94\xf9\xe2\x84\x16\
,\x95F\xeek\x15\xe4I\x06\xf8c\xde\x9c|{)\
\xa98\x821\xdc3\xd8\x0c\x8e\xb0\xcdyb`\xdf\x90\
7\x86\xd0\xbe\x5ct-N\x1f\xe4\x86q2\x9aFN\
p;\xc9=\x01\xf2\xb9UetQn\xfc6\x8a\x09\
\xd2\xfe\x08n\x19V\xe5\x9b\xe17\xbf\xe2Z\xf4\x05Z\
\xbf\x81X\x11\xb4\x8eCZN\x8d\xf6`\xfd+\xe4\xe5\
\xd9\xd1\xe5\xb7[\x0d\x1b\xe4\xb9\xf78\x00|\xe7YC\
\xa4\x1e\xfb\xfc8`9x%L\xc9\x1f\xd4\xa7G\x82\
\xc7\x88#\x18\xc3\x5cYF\xc5\x97A\x80\x1em\x01~\
Zk\xff\xd8\xea\xec\xd3pn\x90\xdf;\xa7\xd8\x85\xa7\
\x15eH\x12\x0bP2A\xc6\xe4\xa8'\xe7&\xf1\x0e\
\x01\xd9_\x08}\xeb_\xf8;7h\xf5\xab\x88\x05F\
=Z@X\xc7\xd7\xe9\x98\x11e\xcf\xf3b\xc9\xd5f\
\xe52gC\xcc\xd0\x82\x85B\x0d\xd8\x17\xb1\xf7\x11\xc8\
Vo\x01\xf3f\xdb\xabI\xc5\x91q\x0c\x94;l\x01\
\x1du\x05\xa1\xf2\xc1\xdd\xc7wpx\xed\x1e\x8c\xaa\xde\
\xbe\xc4\xa8Z\xc1\xf1\xb1o\xbd\x98\x98\x01\xbb\xd0+G\
\xe7zU<\x0b\xd1\xa3W\x17\xd8s\xe1u\xbb~\x8d\
\xfctj\xa1\xcf\x95\xac+\xbd\xb1\x183\x1aY\x7f\xaf\
\xb2\xe97\xc4\xc41\xbc<\xc7g\xd3\xe8\x10\x80\xe4\x00\
y\x1a\x1fF\xb6md\xd5\xcd\xd1]/X\x16\x04\xc5\
\xb1\xaa\x82*\x1a\x19v\x19\x9eT\xed+0\xec5\xfd\
c\xb9\xab\x00dL\x8eV\x0e\xe1h\xc6(\xaaFO\
\xbdS\x14\xcd\x05\xdb\x9c\xa0h40|:\x0e\x8aU\
\xb7X8\xe3\x17\x9f\x98\xa8lY\xb3Ye\xfe\xbb'\
\xfaT`B\x0f\x84\xb1{k\xc6\x98\xb8\xe9\xe4\xdc\xd3\
\x8b\x9c\xb7e\xda\x07I,\x80\x06|\xe7?\xd4m\x7f\
\xc6\xae\x07- \x13\xe5\xa6\xc3!m\x83\x80m`1\
\xaa\xb7\x92\xf5?\xe4\x0d\xef\xc5>x\x82\xfdX\xd9\x5c\
\xa1\xb9\x9d\xf5\xde\x03\x8aT\x22E\xe7+\xaaGJ\x9a\
x\x9d\x1c\x8e#\x19#\xcbR]\x01u\xce\x03l\xd2\
wyp\x94\x90\xcfL\x00\x0a#I\x08\xe5\xbe2\x81\
\xee\x052\xcagL\xc2W\xb8\x8e,\xbe\x82\xd7o@\
\x8f$!\x0f\x14\x13l\x8d\x7f\xfd\xd4\x92\xeb'\xe6\xb9\
\xa0\x1dK7\x22\x0b\xc3/\xa7L;I^\x11\xe3b\
\xe2\xda1\xd9\xd7\x9e\x94\xeb\x981\xa1\xc0rd\x91\xdc\
\x88\x88\xedO\xe2bl\xb0\xf7\x83\xecTo\x13\x1f=\
\x8a\xf3\xe6R|\xf8wr\x1c\x1f\xb4`2\xcf\x9b\xd4\
u\x88\x98\x8b\xec\x82\xbd\x17h\xc3\xc8\x9d\x98=>\xc1\
\xcf5:\x1e\xc0\xfcG\x09j'\xa0(<\xdab\x85\
\xaa\xcc\xc6\xedzk\x95;X\xaa\xe4\x8fQ\xfd\x83\x8d\
\x03\xcb\xe9\xba\xff\x82\xa1\x92\xbd'\x88\x8b\x22\xfb\xcb\xc8\
\x9c\xc7\x95\xbc1\xc8\xf0x\xda=\x1c\x03J\xd2M\xfe\
\xf6\xee\xb6\xdb\xd75\xb6D@\xdd8\x96\x95\x87\x85-\
\xf8/_\x89)\xb2T\xe5\x97\xd3\xf2/\x1d\x9d\xe3\x82\
\x91\x92\xdc7\xae\x82z\x84\xecX\xe1\x80\xa2i\xf5\x1f\
\x02\xbb~\x0f\x01)\x9ey\x8a\x09P\x07[\x12!\x14\
\x95\xcfz\xc6S>\x03E\xef\xe9\xc4>\xd6]\x08\xbd\
e\x87\xf2\xda\xf9\xc2\xd21k\x9c'\x98\x9c\xf7\xf8i\
^\xe5\xdc\x17\x95\xdc\xd1R%\xf1\x1d\xa1\x17\xf8$c\
\x10v\xa2$\x84\x0dH@#\xcaEj|\xfc4}\
\xffg\xd4\xb2/i\xb17\x08\xe2+f3\x1fP\x0a\
\xa7\xc4\xf5\x80I\xd4\x99,\x07\xba\x80}-\xd1'?\
n_s\xa0u_D\xb4\xe8x\x05\x97FI\xc0\xa5\
\x0c\xf1\x8a3\x8a\xb2\xae\x1c\xe3\x1f\x95\x1f\x80|1\x1b\
2>\xd8U6;\x1b\xc5\xab\xb3H$$/P\x92\
\xf5\x8b\x1d2;/\x9a\xe9:\xfb\xff`wX\xee\x89\
1\xb23\x16\xb4s\xdb\xbf\xb4\x0dwS\x1e\xc2\x96\x12\
\x178\xb6)>\xe1N\xd7\xc4o\xc9\xdb\xe4\xd2\xcd\x98\
#\x01\xa2`\x22\xfe[\xba\xbe\xe3\x1f\xca\xbbw\xc9:\
\xdb\xb5\x94_\x81\x0a:\xfbQ5o4t_\xd0\xb3\
bJO\x90\x05\xc1\xbfSQXm\xea\xd4\x1b\x22V\
\x9b\xceM\x18/\x0a+\xe0V\x07y\x94<\x9f&\x9b\
\x9f\xfcO\x8eR\xa4$\xbcu\xc7\xd2\xac5\xdf@C\
\xa2\xf5\xe2,H\xaa\xc7\xf2\x16[\xf3_\xf4\xe5\x16\xc1\
JO\x8c\xc1\xbaC.nE\x8dM\x0f\xb2-\xbf\x97\
\x15\x94!\xb5\x93\xe5\xd8\xb0\x88Pp_ %m\x1f\
yC\xf6\x94\x9b\xa9\xe6\xb6\xa5M\x1bcz\x84\x05\xe3\
\x89\xcd\xffK7\xdeO\x84\x01\xb2v\xf9\x09!\xb2\xca\
\xe8\x99\x7fT\x07M\xee\xfb\x9d\xa1 X\x0a\xea\xdf\x0d\
\xdb~\xe6\x96?\x93\x0f~\xed$\xc5\x09\xa9H\xc1U\
?\x99\xfd\xb0\xbbt\xa64_L\xf2s\x01\xea\x14\xfc\
\xa3'\xc5\xe6\xdf\x0b\xa3Evg\xce\xa6c\x03\xd4\x0d\
e\xb9\xb2\x8c\xb17\xb8O\xfe\x16\x0eV\x92\xe3w\x8f\
\x89^\x96\x04\xbbi\xe3\xaf\xe5\xa7\xdd\x03j\x91t\x81\
j`\xa7E;\xaa\xc8;7\xf1\xa6\x8fd\xae>!\
\x95t\xb1\x01\xa4\xb1\x22M\xceJ/\x00M\x07L'\
L\xc5\x8c\xfb\x9d\x8e\xd0\x95+\xe3\xae\xa6\x0b\xfe&\x8a\
\xcf\x92\xad\xf8D\xfbC_d\xe5Oa\xf3\x9f\xf3N\
\xfc\xb6\xa2\xe2\xd3\xb8\x9c\x0d)A\xaf\xb9I\xa9\xe2r\
\x8f\xfb*?\xe5.\xa2\xb8\xa5\x8b\xc1\xa7\xc0!{:\
\xaa\xade\xd7\x9a\x8d[{;O\x936\xa0\xf0\xf8B\
\xbf^\x02\x0c-\x7f\xe10\xf1\xbd\xd0\x11\x9d\x0a\xe8\x93\
\xa9j\xfe\xc9\xda\xdc\xc7\x8d\xe9\x7f\xa1%\xb3\x0co\x05\
Q\xbc\xf2\xea\x0e$\x10\x0e\x07\x14O\xd4U\xc6\x8bg\
\x19\xd3\x1fp\x9f\xf7w6h<\xec\x82\xd1rj[\
V/{\xa5np3b|\xfcw\xfa\xfe=\xf6m\
\xf1\xa8/IC\x11\xa8 3\xfe\xa0\x0c\x9a\x1c\xeb\xfd\
E\xe9\x06\x9a\x05\xba\xda\x8d\xbf\xa3[\xfe\xe8$\xc5\x09\
\xd8\x1f\xfd\xa2\xe2e\xb3\x1e\xa4e\xf3\xa4\xc7\x89\xa3\xee\
\x87\x19\x02\xba)\xcb\xec\xa8g\xd1\x1a\x1eie\x5c\x97\
\x8f{u3O\xd0\xd0\x06\xbb\xb3K\x08\x12\xc5\xf6h\
i\xd0m\x9f\x19\x83\xaf6 \xfa\xd6\xc7\xe8\xe6\xfb\x88\
\x19\x86c\xd9\x95\x80\xf8\x8d\xf9K\xc5\xec\x87\xb5\xfc\x93\
\xec\xb4\xb4T/v\xa0\x1e`\xac\xbb\xf3\x1ft\xed\xf7\
Qz\xfc\x8fO`[\x93\xdc\x95\xcb\xe7>\xeb\x1d\x0c\
\xb5\x8e\xaf\xcaro\xc7\x16\xb0\x0c{B?/\xf7\xb7\
\x0fbo\xc2d\xf8\xb6#\x81\xde]2\xdbG\xf4\xb5\
H`:(F\x1b{%\x9fp\x87\xed]\xeddt\
3\xa1*\xba\xe2F\xde\xb8\xc5VF\x7f\x00\xe59\xa7\
\xa19\xd0,q\xcb,\xcd(\x84\xa7@\xcb\x1d\x11\xff\
\xde\x0e?\x91d\xa8QX\xb2\x97\x1d\xd2\xc94\x5c\xb4\
\x13\xd8!U\xa7\x1a}f\x8c\x0d\xd5\xeb>\xf9:\xcb\
\x8ei\x10\xd8`\xb1\xd6m{\xf9\xf2\xeb\xad\xb6]2\
1\xc3\x81\xed\xda\x95WA\xf2'\xa3\xeb\x8c\xdf\x1e8\
1E\xa8R~\x8e\xaayl\xda\xc5\x0bP$~w\
)\x15\x17\xba\xd6\xe4\xa2\x5c\x90?v\xb2\xbdW\x8a\x91\
H\xb7\xa6\x8d\xfb\x9a5\xe16P\x1aVH69\xec\
\x80C\x07\xf8\xa2\xab\xac\xfa\xf7\xc1i\xdb~;S\x81\
\x06`\x8afN\xf9\x1f\xaa\xb8\xe3\xb0\x06\xb6\x0e\xf8\x82\
\xaaQ\xe2-f'\xdd\xe4\xa4\x7fJ\x91\x18\xc6 \xed\
)h\xdb\xe3\x9a\xf0-k\xd2]\x84i\xa8?\x0c\xf4\
\xb1\xad\x89\x8e\x03b\xc5w\x8c\xb6\x03\x99<t\x92U\
@x\x0bFY\x15\x97H\xd2\xc7\x08\xa8&\xd4\x8b\x11\
E!\x13nS\x5c>y\x98\x1eN\x11\xf4_$\xd2\
\xc7\x00\x18\xa1\xaeqWZ\x13nG\xae\xd8\x9e\x19\xcf\
/0\xf04t\xf1\x95V\xe3f\x99+\xa3\x01Bk\
\x13n\xe4\xfe\x12g\xbd'HW\xaaP\x18Q\x97,\
P\x87\x9d\x87\xfc\xf9T\xa3\xafc\xa5\xa3\xd0u4\x11\
\xd9\xfc0^_bE\xec\xb6\x0a\xa4\xc1M\x81r6\
\xffY%Xn7i\x993\xf3 \xeb\x10\xedhb\
K\xbeB\x9aw\xe0\xac\x81<\xd9\x0a\x89\x87\x8b\xect\
C2z\xc1\x17\xe3@\xf82\xeb!{\xa6\xfb\xd3\xea\
]l$\xd8\xc7 \x11\x10\xcc5\xeej~\xca\x7f\xc1\
\xe1e\x08\x09\x7f\xe0\xb7\x05\x0d\xed\xb7\x16^\xce\xeb\xdf\
\x83eg\x87\x0c\x84\xb4\xb7\xcb\x97\xa3\xcc~D\xaf\xf8\
\x8a\x80\xbe\x06\xbd%0\xe3(\x99!\x1f\x9e\x17\xe1\x9a\
\xc7\x98\xf0=e\xfa\xaf\x99\x82\x17y\x01\x9c\xed\x9fR\
$\xd8\xc7t\x03\x0f+Hx\xeb\x13\xea\xc6\x9fR\x8b\
\x0b&\xa8\xc5\x04\x937p\x04\x87\xa8\xe7\xbeF=A\
\xd9\x18\xed\x96\x9aA@u\xc8\xb98\x8e\x0f\xb2\x11\x91\
\xfd\xcb\xd4\x0f\xee\x12\xedu\x84\xe8r\xa3M\x1f\x90\x99\
\x09\xc5C\x0a&\xd1)\xbf\xd0r\x87\xc3\x96\xb8N\xda\
\xf7_$\x8d1\xf6\xc7\x8a\xea[\x1eQ6\xdf\x8f7\
\xe8\xe29\x17\xdc\x02v`\xc1a\xe4\x8c\xdf*\x83a\
\x1c\x9bq\x8d\xb2\x9b\xc2\xdd\x9a\x11\xa6\x1e\xa9\xdf\xaa\xb6\
o%\x91&+\x1a\xc2y3\xa0\xbbg\xb0\xc8;\xcd\
\x95S.\xa7\xea\x11\x9fz\xefb#Y\x8c\xb1\x01\x07\
\x87\xc3G>~Vy\xef.\x86\xef\x9bGk\xe0\x07\
\xec\xe2/\xa7\xe7\xbe\xa0\xfa\x06e\xb8\xa2\xbb\xf4\xe3\x90\
\x1dO\xbc\xca\x15J\xec\xcb\xc5\xc1\xafd\x1c\xe9\x93\x8a\
\xa4;RP\xa6g\xd4\x97\xc4I7c\x88\x08n\x06\
\x13\xa4\x0dB\x07\xc4[\x17\xf3\xc6\xcd\xf6\xfd\x86\xc9\xe4\
mB\x80\x9c\x90\x93\xf2\xb0\x00U\x80o<\x0bh\xa7\
\x7f\xa6\x90\x5c\x1f\xd3\x0d\xe8\x94\xc2[\x9et}p\x0f\
\x07\xb5c\x81\x10\xd9\x00q8\xcb*'\xe7\xbf\xac\xb8\
\xf3\xf1\x0c\xc3gN\xf9\xfd\x12)\x0a\xd6\xd0\xd3\x8c\xbf\
\xc2\x9cp\x07z\x18\x8a!%\xe5D\xe1\xaa\x08U\x91\
W>'j\xd7\xdbn\x7f\x00\x99\x8f\x14\xf9\x18\xa7\xdb\
\xe1fd\xd3#\xda\xe6{q\xa0A\xe4{p\x81:\
\x10 \x07\x87\xb3s\xfe\xa5\xf8\xf2\xa4\x93\xc7\x9c\xd2\xed\
\xcb\x0e\x0cW\x06\xbcO\x06!e>\x06\xa7\xcd\xf1<\
\xc2\x84\xeb\xf8I\xb7\xc0P\xdbN\xc5\xb1*\xd0\xa1m\
\xb7x\xeb\x8bV\xdd{\xc8+\x18\xa6\x22\xb9\xe0_8\
\x97N\xc3O\xbf\xba2\xeb\xd3\x8d\x14\xf9\x98n\x08\x08\
t-3\xb2\xe5Qu\xf3\xafl\xff\x82\x14\xc1`X\
\xf0\xac\xa1\xe2\xec\xe7,C\xef\xdc\xf6|6\xdf\x01a\
f\xa7\x7f\xb2o\xf4\x97To\x10\xf2}Ff;2\
\x1f)g\x8c,NpC\xdf\xf8\x07u\xf3\x03x9\
\x16N\xa7\x02c\x80\x10\x9c{\xf2\x99\x1e\x22B\x87u\
\x00\xf6F\xaa\xcf,=_\x9b\xf0m%g\x14\xee\x0d\
\xcc\x19\xe8\xa3\xd2\x8aT3\xc6\x06\x14\x8a\xf76oy\
\x5c\xddr\xaf|\xa3\x19\x92\x06\xd3\x91%G\x12\xc2\xde\
\xea\x19\xc4\xa7\xffF+\x9f\x03\xdb\x07\x18\x93^\xa4\xcd\
\xd53\xd5\xed\x9d\xf4m>\xee&\xc1\x14\xa0\x00\xd2V\
2\xc1qB]<\xb6\xe7\xe4E\xa4\x9e\xae\xfd\x9e\xd9\
\xf0\xa1\x9dq\x00iD:\x83\x03<a9\xf1F}\
\xec\xad\x87X\x00\xa1\xcd\x91.\x04\x07M\x18\xc5P\x1a\
\xae\xb36?\xe8\xd0j\x00\xe9Cz\x18\x03\x04\x90\xc0\
\x97\x86\xb3\x5c\x08Pl\x22@\x82C\x08\xdc\xd6\x05\x99\
\x11\x1d\x8dR\xb3Po>\xe0\xa4\x0e MH\xf3\x00\
\x84sn}\xf4\x04,\x80\x1b\xb1S\x8e\x0bpDV\
\x84\xec\x7f\xdeY\x1d@\x9a\x90f\xc6P+\xe2\x89\xec\
\xc4\x85\x9e\x02\x14\xdb\xfd\xf0\xd6=\xf6\xea\x00\xd2\x854\
3F\xce\xce\xf5\xe4]$p\xcc\x84#\xbb>?5\
n\x00}C\xba}\x8c\xea\x11Z\x0e,\xf4\xdc+!\
\x04\xf3\x14:\x8b\x03H\x13\xd2\xcd\x18\xa6\xd2\xe1\x97\xc9\
\xc5\x9e\x86\xcd8`b\xa2\xec\x5cgu\x00iB\x9a\
\x19\x03\xa0#/\x13Z\x10\x16p\x0a\xe6X\xd3\x89\x02\
\x81\x81\x8c\x953\xde]<\xd9I\x1d@\x9a\x90~\xc6\
(\x9e<6\xf6Z\xbcC\xc5\x8eU\x8e\x82\xa4\x11\xf6\
XJ\x16\x9dp+K\xf9\x0bd\x07p\x14\xd2\xce\x18\
\xc9\x86\xf1\xd7\xf1\x92Y\x84\xa9\x9f\x0c\x82\xd1\xbdP\xca\
U\x8f1\xe6\x1bj\xd9\xbc\x98\xa2\x9d\x01$\x13\xe99\
\xaf\xd4\x8d\xae\xd2\x85\x15m\xb7>|\x8c~\xf4 \xb1\
t9\x92vNk\xc3\x9f\xe5+&\x13\xbf\xa7\x0d\xff\
\x02S5 \xcc\xe1\x93{\x03H=\xd2\xcc\x98n\xc8\
a6\x89\xb6\x1f\xd4\xb6=D\xaa\x96\x08n\x80XT\
\x0b\x84\xcb/\xf3\x8d\xfb\x0as\x07\xd9\xc0\xd5\x0e\x99\x81\
\x0cb\x0c\x08#\x978\xd0\x85p\x13\x5c\x89\xa0.|\
;\x19U\xc0\xdb\xa4\xe5Y)\x03\xf8$2\x861\xf8\
q\xce\x1f\xd9\xeb\x0ed\x12\xfe\x0e\xf4E\x99\x81La\
\xcc\x00\xfa\x0b\x06\x5c\xfd\x00\xe2\xc3\x00c\x06\x10\x1f\x06\
\x183\x80\xf80\xc0\x98\x01\xc4\x87\x01\xc6\x0c \x1e\x10\
\xf2\xff\x01\xbb-E\x13\xd9\xd5\xbc\x12\x00\x00\x00\x00I\
END\xaeB`\x82\
\x00\x00\x01\xb7\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M6.1\
,10L4,18V8H21A2,\
2 0 0,0 19,6H12L\
10,4H4A2,2 0 0,0\
2,6V18A2,2 0 0,\
0 4,20H19C19.9,2\
0 20.7,19.4 20.9\
,18.5L23.2,10H6.\
1M19,18H6L7.6,12\
H20.6L19,18Z\x22 />\
</svg>\
\x00\x00\x04\xa1\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22>\x0d\x0a<svg xm\
lns=\x22http://www.\
w3.org/2000/svg\x22\
\x0d\x0a version=\x22\
1.1\x22 id=\x22mdi-set\
tings\x22 width=\x2224\
\x22 height=\x2224\x22 vi\
ewBox=\x220 0 24 24\
\x22><path d=\x22M12,1\
5.5A3.5,3.5 0 0,\
1 8.5,12A3.5,3.5\
0 0,1 12,8.5A3.\
5,3.5 0 0,1 15.5\
,12A3.5,3.5 0 0,\
1 12,15.5M19.43,\
12.97C19.47,12.6\
5 19.5,12.33 19.\
5,12C19.5,11.67 \
19.47,11.34 19.4\
3,11L21.54,9.37C\
21.73,9.22 21.78\
,8.95 21.66,8.73\
L19.66,5.27C19.5\
4,5.05 19.27,4.9\
6 19.05,5.05L16.\
56,6.05C16.04,5.\
66 15.5,5.32 14.\
87,5.07L14.5,2.4\
2C14.46,2.18 14.\
25,2 14,2H10C9.7\
5,2 9.54,2.18 9.\
5,2.42L9.13,5.07\
C8.5,5.32 7.96,5\
.66 7.44,6.05L4.\
95,5.05C4.73,4.9\
6 4.46,5.05 4.34\
,5.27L2.34,8.73C\
2.21,8.95 2.27,9\
.22 2.46,9.37L4.\
57,11C4.53,11.34\
4.5,11.67 4.5,1\
2C4.5,12.33 4.53\
,12.65 4.57,12.9\
7L2.46,14.63C2.2\
7,14.78 2.21,15.\
05 2.34,15.27L4.\
34,18.73C4.46,18\
.95 4.73,19.03 4\
.95,18.95L7.44,1\
7.94C7.96,18.34 \
8.5,18.68 9.13,1\
8.93L9.5,21.58C9\
.54,21.82 9.75,2\
2 10,22H14C14.25\
,22 14.46,21.82 \
14.5,21.58L14.87\
,18.93C15.5,18.6\
7 16.04,18.34 16\
.56,17.94L19.05,\
18.95C19.27,19.0\
3 19.54,18.95 19\
.66,18.73L21.66,\
15.27C21.78,15.0\
5 21.73,14.78 21\
.54,14.63L19.43,\
12.97Z\x22 /></svg>\
\
\x00\x00\x01\xb6\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M2 1\
2H4V17H20V12H22V\
17C22 18.11 21.1\
1 19 20 19H4C2.9\
19 2 18.11 2 17\
V12M12 2L6.46 7.\
46L7.88 8.88L11 \
5.75V15H13V5.75L\
16.13 8.88L17.55\
7.45L12 2Z\x22 /><\
/svg>\
\x00\x00\x01!\
\x89\
PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\
\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0w=\xf8\
\x00\x00\x00\x09pHYs\x00\x00\x0e\xc3\x00\x00\x0e\xc3\
\x01\xc7o\xa8d\x00\x00\x00\x19tEXtSof\
tware\x00www.inksca\
pe.org\x9b\xee<\x1a\x00\x00\x00\xaeID\
ATH\x89\xed\x94\xcd\x09\xc2@\x14\x84\xbf\xb7XF\
\xacDm@\x0d\x12\xc4\x9f\x0a\xd2S:P<h\xc8\
\xc1\x02\x226\x92\xd4\x91\xe7!\x046\xba k<\xe9\
\xcei\xf7cv\x86\xbd\x0c\x04\xbd\x91\xd8\x97\xe5z;\
7\x0d\x99\x22\xd1gaZ\xabJZ\x5c\x8e\xd7\x8e\x19\
\xdb0$\x1c@\x91\x08!\xb3\xd9\xe8\xc5\x00\x98F&\
y~\xb8\xfb\x84\xc7\xc9f\x0a\xa6\x04\xc667.\xb3\
o8@q>\xdd\x5c\xdcY\xf0M\x85\x82P\x10\x0a\
~\xb6\xa0\x1d.?-V\xfb\x99\x8b\xf7\xd6T\xd0\xba\
]TS\xc6\xc9\xce\xb3B\xbbCe\xd3\xde\x0fT%\
}6x\xaa\x12L:\xe0\xfd?\xea\x01\x9e\x91)\xfb\
]\xf2X\xf2\x00\x00\x00\x00IEND\xaeB`\x82\
\
\x00\x00\x04]\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M4,2\
H6V4C6,5.44 6.68\
,6.61 7.88,7.78C\
8.74,8.61 9.89,9\
.41 11.09,10.2L9\
.26,11.39C8.27,1\
0.72 7.31,10 6.5\
,9.21C5.07,7.82 \
4,6.1 4,4V2M18,2\
H20V4C20,6.1 18.\
93,7.82 17.5,9.2\
1C16.09,10.59 14\
.29,11.73 12.54,\
12.84C10.79,13.9\
6 9.09,15.05 7.8\
8,16.22C6.68,17.\
39 6,18.56 6,20V\
22H4V20C4,17.9 5\
.07,16.18 6.5,14\
.79C7.91,13.41 9\
.71,12.27 11.46,\
11.16C13.21,10.0\
4 14.91,8.95 16.\
12,7.78C17.32,6.\
61 18,5.44 18,4V\
2M14.74,12.61C15\
.73,13.28 16.69,\
14 17.5,14.79C18\
.93,16.18 20,17.\
9 20,20V22H18V20\
C18,18.56 17.32,\
17.39 16.12,16.2\
2C15.26,15.39 14\
.11,14.59 12.91,\
13.8L14.74,12.61\
M7,3H17V4L16.94,\
4.5H7.06L7,4V3M7\
.68,6H16.32C16.0\
8,6.34 15.8,6.69\
15.42,7.06L14.9\
1,7.5H9.07L8.58,\
7.06C8.2,6.69 7.\
92,6.34 7.68,6M9\
.09,16.5H14.93L1\
5.42,16.94C15.8,\
17.31 16.08,17.6\
6 16.32,18H7.68C\
7.92,17.66 8.2,1\
7.31 8.58,16.94L\
9.09,16.5M7.06,1\
9.5H16.94L17,20V\
21H7V20L7.06,19.\
5Z\x22 /></svg>\
\x00\x00\x02\x04\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22>\x0d\x0a<svg xm\
lns=\x22http://www.\
w3.org/2000/svg\x22\
\x0d\x0a version=\x22\
1.1\x22 id=\x22mdi-goo\
gle-spreadsheet\x22\
width=\x2224\x22 heig\
ht=\x2224\x22 viewBox=\
\x220 0 24 24\x22><pat\
h d=\x22M19,11V9H11\
V5H9V9H5V11H9V19\
H11V11H19M19,3C1\
9.5,3 20,3.2 20.\
39,3.61C20.8,4 2\
1,4.5 21,5V19C21\
,19.5 20.8,20 20\
.39,20.39C20,20.\
8 19.5,21 19,21H\
5C4.5,21 4,20.8 \
3.61,20.39C3.2,2\
0 3,19.5 3,19V5C\
3,4.5 3.2,4 3.61\
,3.61C4,3.2 4.5,\
3 5,3H19Z\x22 /></s\
vg>\
\x00\x003\xf2\
\x00\
\x02IFx\x9c\xed}\xf7\x9b\x1c\xe5\x95.\xbb\x7f\xc0\
\xfd\xe1*\x00\x1ai\x04\x02\x05r\xc6\x16\x06\x13l\xb0\
\xc1`\x0cw\x17\x1b\xd6k\x1bgl/\x18{\x1d\xf0\
\xda`s\xb9\xbb\x06/\x98\xe5\xc1\xf8A\x1a\xc4\xe4\x91\
F\x9a\x19\xe54\xca9\xc7\xd1L\xf7\xe4\x9cCO\x87\
\xca\xdd\xe7~\xe7\x0bU\xd5-\x8d4\x9d\xa6\xab[\xf5\
\xea)\xf5\xc4\x9e\xaa\xafN\x9d\xef=\xf9\x8a+\xfe\x81\
\xfc\xab\xdds\x05\xf9\xff\x9a+n\x7f\xfc\x1f\xaf\x98y\
\xc5\x15W,\x22G-9\xde\xfa\x07\xf6u\x84\xe7\xde\
\x7f\xbcb\xef\xff\xba\x82\x1e\x02\xe0\xc2\x85\x0b\x17.\x5c\
\xb8p\xe1\xc2\x85\x0b\x17.\x5c\xb8p\xe1\xc2\xc5\x04\x88\
\x90\x7f\x10\xc1\x83}L\xffEl\x87\xf9\xfdH\xa6O\
\xd5\x85\x8b\xa4\x10\xb6\xc9\xb5\x11\x8e\x80\xac\x87!\xa0\x92\
C\x8b\x80J>\xd6\xc3a\xfa\xbdp$\x9c\xe9Su\
\xe1\x22)\xa0\xac\x87\x89<\xf7\xf8$\xf8\x8fC:\xdc\
W\xa5\xc3=+5r\xa8\xf0\xc8j\x0d\xfe~|\x1c\
\xc6$\x9d\xfe\x8c\x0b\x17\xd9\x06\xa1\xc7Q\xce}\xb2\x0e\
\xef\x1eW`^\xa9\x0a\xb3\x8b\xc8Q\xac\xc2\x9cb\x8d\
\xbe\xd2\xcf\x0bU\xb8\xa7R\x82J\x8f\x04\xaa\xa6\x93\xdf\
cz\xde\x85\x8bl\x00\xd3\xe5\x11\x18W4\xf8\x97Z\
\x05\xae)ar\x8d2\x1e{\xa0\xac\xe3\xf7\xe6\x97)\
\xf0\xe7\xa3A\xd0t\xc3\x95u\x17Y\x83\x08\xe7-\xef\
\x9e\xd0 \xbfH\xe3\xfa\xfb|9\xa7\x07\xf9~\x1e\x91\
w|]Ht\xff\xe66\x05\x22aW\xd6]d\x07\
P\xd6\x87B\x1a\xdcX\xae0}\x8e\xb2^4\x81\xac\
\x17k\xb6\x9fQ\xe1\xcbke\xd0\x0d#\xd3\x97\xe0\xc2\
\xc5\xa4\x80\xb2\xfe\xd1i\xce\xc7'\xe0.\x13\xf1\x99|\
\xf2z\xb8+\x94\xe9Kp\xe1bR\xd0\x8c0<\x5c\
c\xe9\xf4I\xcb:\x7f6\xde<(e\xfa\x12\x5c\xb8\
\x98\x14\xfc\x8a\x01wT\x0a\xbd>99\xb7s\x99\x9f\
\xee\x0cf\xfa\x12\x5c\xb8\x98\x14\x82j\x18n_\xa9\xc5\
-\xebs\xb8\x1d\xfb\xf2.\x97\xc3\xb8\xc8\x0e\x18\xc4\xb6\
\xfc\xa7u\xfe\x849\xcc_O\xca\x99\xbe\x04\x17.&\
\x05\xb4M74\x87\xe2\xe70\xc5*\x5c_\xaaB\xc7\
\x88+\xeb.\x9c\x0b\x11+\xc5\x1c\xae\xa0j\xc0\xfbG\
\x03\xa6\x9e\x9e\xcd\xe58\x8a\x97\xc7\xf2\xf4b\xe6c\xff\
\xf1v\x89\xc6N]\xb8p*\xcc\xbc\x80p\x18\xde>\
*\xf1X\xa9f\xc9\xba\xcd\xffh\x97u\xfa\x0c\xf0\xd8\
\xe9]\xc4\x9e=;\xa8\xbay`.\x1c\x0d\x94sY\
3\xe0o'\x82D\x86\x15\xd3\xced\xf2~\xbe.\x8f\
\xb5G\xefX\xa1\xc1\x8e\x0e\x85\xca\xb9\xab\xd7]8\x11\
B\x9f\xabF\x18\xde9.C~\xa1n\xc6@Q\xc6\
\xf3\xf9\xc7\xb7\xad\x90\xe1\xba\x12\xc1U,\x9bua\xb9\
\x06\xaf\xee\x96\xc0;\xaa\x9b\xf9b\xae\xac\xbbp\x220\
\xc7K\xd5\xc2D\x9f\x87\xb8\xdfE\xe4\xbf0Y\xcf#\
\x1f\x7fn\x95\x0c\xbd>\x09\xda\x87\x02\xf0\xde\xde\x01x\
i\xb7\x02/\xedUa\xe9\xc91\x18\x0b1\xce\x12v\
s`\x5c8\x1cXo\xf1\xdf\xc7\x14\xc6\xbb\xb9\xac\x0b\
\xfe\x8d\xf2\xfe@\xb5\x06G\xfaY~:\xca3=\xb8\
\xfe\xc6\x8f#aV\xa7\x11vs\x1b]8\x10\x11\xce\
30\x17\x00\xf59\xca\xf7\x5c\x94q\x9e\xcf(\xf2\x1a\
?_\xad\xc2PP7k\x93\xc2\xbc\xfe\xc8\xaa\xbf\x03\
\x9e\xbf\xeb\xca\xb9\x0bg\x02eV\xd6\x0d\xf8\x9f\x13\x12\
\xe4\x17[9^(\xdf\xc8\xd7\xa9Oe\xa5\x06\x87\xfa\
\xdc\x5ct\x17\xd9\x0d\xe4#\x9f\x9c\x0eR}\x9e_\xa4\
\x10\xf9\xd6\x98>'r?\xabX\x81\xcfTi0(\
k\xccw\xe8\xca\xba\x8b,\x83\xe0\x1d\x0a\xe1\xe7\xef\x9d\
\x90\xad\xf8\xbf\xcd\xa7\x88\xc7b\xc2[N\x0f\x11}\xce\
k\xa6sI\xd6\xed\xfd\x0f\xc2\x91\x0b\xf4D0\xfdH\
\x11w?\xcbb\xd0\xfbG\xe4\xb7\xb4>\x04\xd7\x94(\
L\x8f\x17b\xde\xaeb\xfa\x18\x17W\xa90\x10\xd4r\
\xf6>G\xa2\xec\x0e\xb6\x1ea\x9b\x1d\xa2\x91W\xdd&\
\xef.\xb2\x0bBwa\xaf\x8b\x0fN\xca\xb4\x96\x82\xd6\
\xd3\xa1\x8c\x0b\xeeB\xe4\xfc\x81\x1a\x1d\x8e\xf6\xeb9t\
\x9f\xadk\xb0\xebmE\x0b\xc3\xf1\x9e\x10\x14{Tx\
\xff\x94\x0ao\x1d\x96\xe0?\x8f(\xf0\xe1\x19\x05*\x1b\
\x15h\x19\x96\xa9\xcd\x1eq\xf5{\xd6\x81\xe9\xf3\x08|\
r&\x04s1\x16\xc4yy\xfe\xa7V\x5c\xf4\xbe\x1a\
\x15|R\xd8\xf4\xa9\xe4\xc6\xdd\xb5\xe44\xc2\xfd\xa5;\
\xda$r\xad\x1a\x5c\x8f\xd7_\xcc\xe2b\xac\x1f\x82F\
?\x9eK\x8e\x85e\x0a<\xb7E\x81\xa6\x11\xd5\xf4\xab\
\xba\xc8\x0eH\x9a\x01K\xcf\xca\xdc\x7f\xae\x98\xb5\xa3\x22\
Vt\xef*\x05\x8e\x0c\x189\xe9\x1fg\xfd\x9b\xc2\xd0\
\xe63\xe07\xbb\x024\xc7\xc7\x8c\x1d\x5c(\x7f\x93|\
=\x9f\xd7\x8a\xcf/'\xba\xfed\x10Fe\xb7v\xd6\
\xc9\xb0\xf3\xd2\xbf\x9f\x96\xe0\xfa\x12\xd5\xecm1\x9b\xdf\
O\xbc\xe7\xf7\x11~\xde\xee\xcb%\xdeb\x81\x99\xd5\x11\
\x18\x0e\xc8\xf0\xc5\xb5Dg\x17\xf1=M\xc4\xcb.\x90\
\xdfc\xaf\x11\xa71\x07\xa2\x1f~\xb0S\x05\xdd\xc6i\
\x5c8\x0b(\xbb\xa8\xcf\xffvJ6\xf3Y\x84.\x17\
z\xfd\xfej\x0dN\x0ej\x99>\xd5\xb4\x01m\xcdV\
\xa2\xcf?S\xa5L\xbe\xae*F\xeeq\xed\xf2\xc8\xf3\
\xf1\x83\x1d*\x8c)n\x8f''\xc2\x88\x10\xdeB\xf4\
\xf9\xbc\x12\x9b\x0d\xcauY\x1e\xf9|q\x95\x06\x1d>\
\x95\xee\xef\xb9\x0a|\xde\x7f\xb9Wf:<\x01Y\xa7\
}o\x8at*\xef\xd7\x94\xaaP\xd1\xa0\xba\xdc\xdd!\
\x10\x9c\x05\xf3r?\x22r>\xb7\xd8\xda\xabgS\x19\
g\x9f\x7fv\x95\x0a\xc7\x06\xf4\x9c\xdc\x93\xed\xbe\x13\xcf\
`\x08\xe6\x95^$\x17\xf9\x92z\xdd\xcaW\xc65\xbc\
\xa7R\x85\x80l\xcf\xe5\xcc\xad\xb5\xcb&\x88{\xb0\xe4\
\xb4L\xeb\xe0\xcc\xb8\xbf\xc9O\xb1\xef\xa8\x06\x8dcz\
\xce\xde'!\x83\xaan\xc0W7\xb2\xe7\x9b\xc6\x0f\x12\
\xd1\xeb\x17\xa8\xa3\xfd\xafc\xaa+\xeb\x19\x84\xd0c!\
-\x0c\x1f\x9f\x91\x99\xddi\xca\xb8\xa5\xdb\x1f\xa8\xd1h\
<\x94\xe6&\xe6\xe8}\x12\xfe\xd5\xc6\x01\x09f\x97\xa8\
\x9c\xb3%\xa6\xd7\xa3d\xbdP\xa3\xdc\xef\xe1\x1a\x09$\
U\xa3\x7f#\x15kh\xfaD\xed\xf1[\xb3\xc7\xbd\xb8\
\xb7\x96\x9dm\xe6\xdaE g\xef\xe1\xc5 r\xc7\x97\
\x9cQ`~\x99\xd0\xe5\x8aM\xa7+\xf0Y\xa2\xcf\xcf\
\x0c\xa99\x1f\xff\x8ep\x7fx\x85W1kJ\xe2\xea\
\xf9q\x09\xbd~K\x85\x0cg\xfbC\xa9Y\xc3\x98\xdc\
\x04\x9a\x93a\xcb\x8f>/\xc6\x1b\xf5\x1a\xbe,\xeb\x1c\
1\x1e\xfai\x9dr\x1e?g\x87\x0e\x9f\xab\xd6\xe0X\
\xbfF\xd72\xd7!\xfc\xe9\x7f9\x91\xbc.?\xbfv\
\x5c\x83\x05e\x1a\xd4v\x1af^s\xb2\xe7j\xcep\
\x88\xb0\xb9\x0d\xdd>\x15\x8e\x12[j[\x87\x0a\x1b\xdb\
tX\xdf\xa6Bm\xbb\x0a\x87\xfbuh\x1b\x91h\xed\
\x98\x90\xf3\x5c\xd5W\xb1\xb0?\xf3\xc8[\xae+e\xfe\
sa\x7f\x8a\xfc\xdc{\x89\x1dZ7\xac\xd3\xfb\x9f\x8b\
\xb1\xa2X\xe0\x9a\xe8a\x03\xde8$\xa7X\xd6\xd9+\
\xae\xf3\xdaV-)\xbe\x1e\x9bk\x86\xb6\xc5\x86\xc6 \
<_\xab\xc1\xfd+C\xf4y\xca/\xb1\xea\xd9\xf3\x8b\
q\xbf\xd6`\xf1\xf2\x00\xbcP\xab\xc0jo\x80>\x17\
\xb1u\x04\xb9\x8a0\xcfW,:'\xd3\xbd\xda\x8c\x11\
\x15Z\xb6\xe8\xbd+58;ty\xc5\xfc\x84^\x7f\
\xf3Hje}\x0e\xef\x17\x826\xff\x866-)\x9b\
\xc7\xe0z\x19sn\xce\x0ci\xf0/[esn\xc3\
\xe4\x9e;\x05\x9eY'\xc1Y\xa2\xff\xf1=\x8c\x1c\xe6\
\xa4\x88p\xc4\x80\xbf\x9dV\xe0\xdabK\xb6\xed\xbd\xa3\
\xd1\xafx\x84\xf0\x16#\x8d\xfe`'\xae\xaf\xd8\xeb>\
<\x93J9\xb7x\xe1\xa22\x19\xf6t\xc9I\xe9u\
\x94u\xec\xd9\xbd\xf4\xac\x067\x95\xb3xG<\xbd\xd5\
D\x8d\xe4\x22\xf2\xbb\x1f\x9c\x08\x82\xa2\xe9Y\xb7g\xc7\
\xeem\xf6\xdeD\xd6\xe7a\xaa\xcf\x8b\xeby\xbe\xa2\xcd\
\x0e\x15\xb5\xfd\xf7W\xab\xe0\x19fy\xb9F\x82\xbb\x9b\
\xbd\x1f\x80\xbdo\x8c\x1c\x18\x82\xe0P\x03\xf8{\x0eC\
\xb0g?\x8cw\x1f\x82\xd0\xd09P\x02\x83\xe4\xfbF\
\xc6m_\xc1}\xb7y\xc7,.W\x9c\x02\xee\xces\
\xfc\xef\xa9\x94\xa1w$\x18\xf75\xda\xedJ\xac\x07\xfb\
\xef\xe3\xb2M7\xa9\xe6\xbe1)\xbd^,\x93\xe7C\
\xe3u6\x1a\xfcy\xff\x08H\xaa\x11\xc5i\x9c\x0e\xbb\
<G\xd5\x0d\x84m\xf6\x08\xf9\xb8\xb0^\xa1\xbcqN\
\x94o\x91\xddW\x9c\xd7u\xb8WO:\xbe'l}\
<\x17\x94\x9d\xd0h\x1b\xf4\x1f|\x03\xbc\x95_\x02O\
\xf1\x22h(\xb8\x0a\x1a\x96N'\xafWCc\xf1\x02\
\xf0\xae\xfc\x22t\xef\xfd\x03\xf8\x06=\xac\x06;\x83\xb2\
\x8e\xc7PP\x83E+8\xafK\x91\xac\xe3\xf1\x8d-\
\x0a\xdd+\xe3\x95\xa90}\x06\xc3tm*\xcf\x05`\
Q\xa9\x92\x9ag\x90\xe7\xf7|x\x82\xe5!;\xdd\xef\
/|\xa6t=\xc8:\x0eK\x06\x9c\x1eTaeC\
\x08\xde?4\x06+\xeb%83\xa0B\x7f\x88\xac\x93\
\x87\xf1\x16\xb3\x06\xba\x90\xe53\xcd\xa1u\x16\x1a4P\
}\x9e\x1a\x1f\x01\xca\xba\xae\xc90\xe2YK\xe4y>\
x\x96\xcc\x80\xfa\x8f\xa7C\xfd\x12\x22\xe3\xf81\xca\xfa\
\x12\xfc|\x1a\xfd\x1e~\xadq\xd950r\xae\x1c\xc2\
\xbaj\xf3\x0fO\xdd\xda\xdb}s\xef\xed\x1f\xa6r\x9e\
\xa8\x7f}6\xe7.B\x9f\x5cC\xe4so\x87d\xca\
m\xdcz\x9d\xc8\xb9gD\xa6y\x1b4\x0fo\xa2\x99\
<q=\x83\x0a=\x16\x94+\xd00\xe2\xec\x1c>\xb4\
\xa0\x85\x8cc\x0f\xc5wO\xe9\xb4\xc7\xff\xdc\x12[\xbe\
\x16\xb9&\xe4+\x8bW)\xd4>\xb7\xaf\xbf\xc8\xbd\xfe\
\xcc*\x0d\x8e\x0f\x18)\xab/`\xf7F'\xba\xfcO\
\xe0)\x98E\xe4y\x06\x95k\xaa\xcb\x972Y\xf7\x90\
\xcf\x85\x8c{\xcc\xef\xcd\xa0\xba\xbeg\xefk\xa0J\xbe\
\x94\xc5\x5c\xe29o\xe1\x97\xc0\xf5|x5\xf3O%\
$\xebE\xf6\xfc_\x96\xef\x88\xfd\x8b-N\x19\xdfy\
\x8d\xcb:\xfc\x9fM\xb2\xad\xde1yY7\xdf\x87\x1c\
O\xadq\xf6\x5c6\xa1\x83\xc6d\x0d\xbe\xb3U\x99\xf4\
\x1aP{\xa6\x90\xdd\xc7;+Uh\x19M]\xdd\x9c\
\xd0A]\xc7\x96\x12=~\x15\x95q\x0f\x91\xe1\xc9\x1c\
L\xc7O\x83\xfa\x82\x99\xd0\x7f\xf4]\xfa>\x99Z{\
\xe4^E\xf5Zr2\xc5y\xfa\x8d\x15\x0a\x1c\xeeO\
|\x8d\xf1\xf7vvip}Yj\xfd\xfev\x99\x9f\
Kda\xf9\xe9\x11G\xcb:\xda\xd1\xbf9\xc0jb\
.\xda71V\xe7\x90\xe3\xee\x95\x1a\xec\xed\xd6R\xe2\
?\xb7\xef\x09\xe3\x1d\xbb\xc0\xbbl\x8e\xa9\xc7'+\xeb\
\x0d&\xc7!\xc7\xa7\xd7\x12\xfb\xf5\x90\x15\x07\x9f\xe2{\
@mi\xcd\x80\xffwX\xe2k\xab\xdar\x1e'\xea\
M\x19\x9dC\x849\x8e7U\xa8P\xd3\xacP\x9d\x1e\
N\xd0\xd6G>\xf8\xfaA\x96o\x9a\x0eYg\x9cV\
\x81om\x09\x81\x843f\xc1y\x1d{\xf0\xfe\xaf\xf3\
\xfa)\xe7\x8a+\x1f\x8f\x5c\x17\xf6o\xe9\xf2\xa5.\xff\
\xdc\x8cG\x13\x19i\xaf\xfd\x81\xc9Q&+\xe7Q2\
\xcf\xb9MK\xcdc\xa0)\xf1\xfb-Ru=\xe2\x9a\
>8%\xc1\xb5e\xb19\xfc\x17\xaa\xd5\xc0\x9a$\x85\
\xfa7\xb0\x8f\xc8\xcd+4\xd8\xde\xa1&}.\x0a\xe1\
\x167\x94+)\xc9C\x9bX\xf7)po\x95\x0a\x9d\
Ag\xfac\x02\x8a\x01\xdf\xdc*\xc7\xdf\xd3\x9f\xdc\x87\
\x176\x87h\x9dp\xaa \xe4\x228\xdeO}+\xf5\
T\x9f\x8b#NY\xe7\xdc\xdd\xfb\xe9\x5c\x08\xf6\x1d\xca\
\x98\xac\x0b\x0e\x85\xb1\xc9M\x9d:<\xb71d\xb3\x85\
&\xa8K*\xc4\x5c\x00\x05~\xbe[\x86\xe3\x83zJ\
r\xfcq\xf6\x02\xdaVyi\xd2\xebb\x1f\xc2\xd9\xe2\
g\x87\x9c\x99\xaf\xdd\xe2\x8b\x10\x9bS\xf4\xc5\x8d\xef\xda\
\xee*\x97\xc0\xaf\xa6V\xd6\xf1\x18i^\xc7lL\x94\
\xd7\x8fg\xc4\xc5\xd7\xa3e\x9d\xf0\x99\xa5WA\xd7\xc9\
b3\x8fo\xaaa\xe7O\x22\x07\xf2To\x00\x9e\xda\
\xa8\xd0\x18h\xbe\xc9g\x18\x87D\xdd\xfb\xd2\x86a\xe8\
\xf7+\x94\xaf\x88\xbc\xc3d\xcfao\xa7j\xd6\xb2\xa6\
E\xd69G\xc3k\xd8X\xdf\xefHY?5\x1c\x86\
\x9b\x97\xab\xa6\xff6\x1e~6\xbf\x5c\x83\xd6\xa1\x14\xce\
V\xe4\xebs\xa2\xe6\xe7\x09s\x17\x8b\xc3p\xceN\xde\
\xa7s\xdf\xeb\xd4\x0f\xea\x84\xbc<\xa1\xeb\xa9? \xa4\
\xc3\xa9\x8e\x11\xd8Edc\x8fg\x10<\xbd\xe3\x94\xdf\
\xdb\xf3\x0aS\xf57\x97\xd7\x05\xadx_\x1ad\xdd.\
?%\x07\xda\x1c\xc9\xd7O\x0c\x85aQ\xb9F}*\
\xf1\xd6\x8a]_\xa6A]\xaf?e\xe7\x22tAC\
\xd5\xb3\x8c\xab'!\xeb\xc8{\xd0'\x83\xf2\xde\xbd\xfd\
g\xd8\xb4\xc2\x11ql\xea\xe3\x85\x08\x7f\xf6\xac\x1cq\
=\x12\xdd\x0fF|/%\x7f\x93\xbcQ\xc5\xd9@z\
e\xdd&\xf3\xc5\xfb\xdbRs\xe2)F\xc3h\x18\xee\
\xaa\xd4b|\x04\x93\xe307\x95\xcb0\x18L\xadm\
\x8a\xc7\x91\xb2\x7fN\x88\xb7\x5cH\xaf\xa3O\xa6{\xf7\
\xcf\x9d\xb3\xa7\xc6(<*\xd7 j!x;\xbf\x14\
\xe79\xe0\xfb\xec\xe9\x14sJ\xf4\xf4p\x18\x91\xfbG\
>\xdeT?\xe4\x9c\xf5\xb6a8d\xc0\xe3\xeb\xe5\xb8\
\x9fy\xfc\xf9'\xd6\xcad\xcfM\xe1\xf5\xf0\xf5\xa9\xdf\
\xfa\x87\xb8\xfc\x8c\x13\xe9u\xc1a\xfa\x8e\xfe%%\xb1\
\xdclF\xc7\xa8L\xb8t\x9c9^q\xe9s\x16\x87\
\x9dW\x22C\xfd\x88\x91\xd2})U@\x9f\xed_O\
*\x10]_1\xf1\xfeD\xe3\xd6\xbcW\xd5\xdbGe\
\xd0SX\x7f!rq\xc6:\x0f\x12Y\xbd:*N\
\x9a\x98\xcf\x91\xfc^\xc1\xd50\xec\xa9J\xae\xb6\xc0\xd4\
Q\x11[\xad\x9a\xad.\xcda\xf7\xf4B@\x7f\xd9\x9d\
+X\xff\xcct\xc9:\xad\x9b_\xa9@W\xc0p\xa4\
^\xc7\xf3\x19\x0d\xa8p\xffj{}\xe8\xc43\xb8\xe6\
|\xca\xaekQ\xb9\x02\xdd\xe3ZJ\xed=\xd3n\xd3\
5\xf0\x94\xdd\xce\xe3\xfe\xd3\x12\x94u\xf6\x8c`\x1e\x8d\
<\xea\xe5\xdc \x89\xf3\xe2\xe7f\xe8\x0a\x18\x9a\x0ca\
\x9e\xe7\x94\xc9\xb8l<\xa0\xb1\xa4\xfd\xa1\x84\xf3\x15.\
\xbd\xcfk\x90Gd\xe3_\xb7\xca4\x0f\xd6\xa9\xb2\x8e\
\xebp\xa2O\x86\xfb\xaa\x15\xab/\xd5D\xb2\x8ey\xa5\
+5\xd8\xd7\xa3\xd1\x18xJ\xfb\x95\xd8xj\xf7\xfe\
7\xcc\x1c\x98\x84\xfc\xeb\x9c\xabw\xed|\x99\xc8gr\
6\x05\xb5\x1d\x95q\xe8\xda\xffG\xea\xaf\xf7,\xbb\x1a\
Zk_\x06%4\x925\xb5hx\x8e;\xba\x0dZ\
s\x94.YG\x7fc\xa5Gr\xecz\x98\xf9\x8dF\
\x04Z\xc7Txt\x9df\xab\xbb\xe0~S\x11\xcfF\
\x9f\xfa*\xd6\x1b\xdd\x88\xa4\xa1\x1e%b\xe5\x09J\xbe\
vh*\x99g\xf3\x1dN\x9f\x94\xbd*\xe2\xa5(\xe7\
\xcd\xe5\xb7\x82*\xfb\x92\xca!\xa1\xf9\xe8\x8a\x1f\xba\xb6\
~\x87\xec\x133Y\xfe\x01\x8fS\xf5\x10\xd9\xcf\x96\x9e\
\x08x\x8e\xd8\xe7\xe1\xcb\xebR\xac\xd7\x85\xaf\x91\xf6\x89\
Pi|6\x1b\xd6C\xccu\xf9\xb4N\x86\xefmW\
\xe1ib\xb3~\xa1J\x82\xa7\x88\x0d\xfa\xe26\x05\x96\
\xd6\xa9\xa0i\xda\x94\xe82|\x7f\x7f\xef\x01\xf0\x96\xdc\
\xc8\xe2JK&\xc9\xdb\x970\xbb\xd4S\xb8\x00\x86\xbc\
k\x93:W\x94cE\x0e@\xe7\xee\xdf\x90\xbf\x7f\x15\
=\x8fz\xd3\x9fI\xf8\xd1\xea\xe7L]\xe1t\xd0g\
\x96\xf0\xad\xbf\x1f\x1eJ\xb1>g\xb5:\x98\xb3S?\
\xacg\x0d\xa7\x8bpN\x82\x87A\xf8\xa8_6`(\
\xa0\xd3\x5cPM\xd7m\xfd\xf1\xd3\xaf\xcb\x04\xb7\xf2\xb5\
\xd7\x82\x87\xf0\x06\xaa\xd3'\xe3\x87\xc4\x9f)\x9c\x0bC\
\xcd;\x08wQ\xe3\xce\xf3\xb6C\x95\xfd\xd0\xb9\xe5\xfb\
\xe4}\xaf\x84\x86\x82\x994\xaf\xcc<\x0fr\xf4\xed{\
\xdd\xd1\xf9\xdav\xe09\xee\x22\x1c\xe6\xe6\xe5\xa9\xe6.\
\xac\x86\xef\xed\x13\x1ahF$\xab8]\xd4\x01L\xd7\
\xd3\xbe7\x22R-xF\x9a\xa3b\xf6\xf3\x08\x0c\xb5\
@\xfb\x96\xef\x12\xae|-\xe3(K\x99\xac\x09\x9f\xa2\
\xd0\xf9\xde\xc2k\xa1e\xfd7@\x1a:\xc7\xea\xa8\xd0\
sMm\xe7\xf8j\x1ah\xeeg\xc8\x07\x1d\xbb\x88>\
G9\xa71\xa9\x19p\x8e\xfc\x8dsK\x18\x7fiY\
\xf7,\xcd)\xa3\xf7\xd6q\x11B\x06q-z\x98\xc9\
9\xf6\xee\x99C\xe7\xaei6\x9f\xdb\xe4s[\xed>\
\x17\xe1\xc7\xc0<\x87\xdf\x1c\x90\xa9>\x07\x888z=\
\xb2\x01\xf8li\xaa\x1f\x02}\xc7\xa0s\xef\x9b\xd0\xbc\
b1x>\xb9\x1a\xea\x0b\xae\xa2u\x1c\xcd\x15wC\
\xe7\xbe?\xc1x\xf7\x01P$\x1f\x7f>\x13\xf3\x0f\xa1\
l\xc8\xd28tl\xfe\x1e\xab\xf3\xe3:\x1c\xb9\x8b\x97\
\xbcz\x89\xbc7\xad\xfa\x02\x04\xc7:S|\x95\xa9\x87\
\xf0\xe1\xd6v\xeap\xdb\x0a{\xad\xa4\xd5G\x96\xf56\
\xd1\xe3\x92u\x1a\x8f\x22?\x7f-y\x8f\x0f\xcfj \
gam\xb5S!\xe6\xf5Z\xb1\xf3\x08\xa8\xaa\x0a\xfe\
\xf1Q\xc2\xa7e\xdb\xbc\xa1\xe8\x18\xfb\xa4\xc1\xfd\x91\x11\
\xfaL\x85\xa0\x8b\xf0s\x0f\xb5C\xad\xf8+\xf3\x7f\xce\
\x84\xe6\x9a\xaf\x90\xbf;D\xedr\xa7\xc2\xb4\xa9\xc9\x9a\
\xed\xeb\xd5\xe1\xc6\x0a\xcd\xaco2\xfb\xbe\x13\xdeqc\
\xb9\x06\xef\x9f\xd2\xe0\xc9\x0d2\xaf\xe1\x88\xeeSf\xf5\
N\xb5lPZ\x0bX\xac\xc3\xe3\xeb58\xd6\xe5\xa7\
\xeb\xacg\x09\x8f\xcb\x06D\x04\x87\xb2\xd5\x8f\x8a\x7fv\
?el\x9cg\xb2\x10\xcf\x08\xee\x09\x9d[\xbfO\xf5\
9\x95m\xca\xcf\xa7Q;\x00\xf9K\xcb\xea'!4\
\xde\x9b\x92\xdc\xc3t\x02u9\xe6\xd7l!\xfa\x1cm\
F\x8b\xb3Xug\xf3JT\x9a\x17\xaf\x11\xee\x11P\
t8\xd8\xa3\xc2/v\x87`Q\x85\xcc\xe5]1\x7f\
/\x9f\xd7\x87/ |\xe5';\x15\xd8\xd5\xa5\xc0\xa8\
\x1c\xa6qD\x11as\xf0r\xb8\xb0\x81\xee\x13\x84{\
wn\x7f\x85p\x95\x99\x9c\x9fOg\xb5\xdbh\x1b\x10\
\xbb\xb4\xa5\xe6\x09\x08\x05\xc7\xb2B\x7f\xa9\x06\xfa\xd2u\
XXn\xf5_\x9b\xc3\x0f\x8c%\xddL\xf4\xfc\xd66\
\xab\xb7\xa6}?\xc4\xbe1\x9e\xfe\x10,;\xd4\x07\x7f\
\xdc\xd2\x07o\xec\xf1\xc1\xd2\x03}p\xba'\x08\x11]\
7\xfd\xcc\xd9\xe2ku\xc1\x10\xe1v\xb7\x22\x8fCW\
\xed\xcf\xac>\x1c6\xdeRO\xf5\xf9W!0\xd2\xee\
h\x9f\x8b}\xdf\xdb\xd4\x815H6]n\xab\x83\x9a\
_\xaa@\xb5\x97\xf5e<\xcf\x17\x11\xdb+%\xe6\x80\
\x0b|\xcf\x91\x88s_\xbf\x1c@\xfb\xbc\xa9\x12\xe1\xe7\
\xbf\xa3\xbe\x15\xf4\x9b{\x97\xda};3\xa8\x1d\x1a\xf0\
\x0df\xfaT/\x890\xf7\xb7\x1c\xef\x0f\x139Wi\
\xaf{\xd6\x9f\xcb\xb2Eo\xaaP`kK\x0a\xeb\x0d\
\x1c\x0a\x11KpaA\x91\xfc\xd0\xb5\xf9\xdb\x84\xa3\x5c\
\xc5s\xdd\xa7\x99z\x1de\xbe\xb9\xe6I\x08\x8cvg\
\x85\x8f\x01\xcf\xb1\xa6\xc5 v\xa8n\xf3\x09\x0a\x0e\xa3\
\xc2\xb5%\x1a\xacoUh\xbf\xa2\x5c\x87\xc8\xab\xba\xdc\
!\xf6_\xcc\x91\xc1\x1e2\xf5\x22\xa7\x80\xd6\xfa1y\
\xf7,\x99\x09\xde\xca\x07\xc17\xdc\xcd\xea\xf6\x1d*\xea\
\x11\x1bw>3l\x10\xbb\xd2\xf2\x99\xd3\xbaR.\xeb\
\xa8\xe77\xb7\xca\x9cs\xe4~/\xd9l\x89e\xa7\x1b\
\xd8\xa3\x8f\xc6Cw\xbeB\xf3}=<N$8K\
C\xc14h]\xfb\x0c\x04}\xbd\x99>\xd5K\x82\xc9\
\xb9\x01\xeb\xdb\x0d\xea3d\xba\xdc\xaa\x1d\xc6\xcf\xaf#\
\xfa|]\x93L\xf9\xcd\xe5\x82\xcbZ\xd6m\xbeJ\x9d\
\xf0\xf3\xce=\xaf\x99\xb5\x1c^\x9b-J\xf9\xf9\xca\x07\
\xc1O\xf8\xb9\x93\xed.q=x?\x0f\xf5\xeb\xb0\xa8\
\x0ccA2\xef[\xa70\x9dN>F\xff\xf9\xbe\x8e\
\x14\xcd\xe0\xc8\x22\x5cn\xd7k\x87\x90\x0bM\x09\x10;\
\xf4\xd7Dw\xe7\x99\xf9[fN\xd9\xc7\xd3\xa1i\xf5\
\xd3\xe0\x1f\xed\xe0\xfe|gB\xf8K\x90\x8fR\x7fK\
\x99\x8d\x97\xdbc\xf7D\x9fW7\xca\xd4F3c\x11\
\x97\x09.g\xbd\xcer\x96U\xe8\xde\xfb[\xab^i\
)\xcf\xab\x119\x8be\xf7\xc0\xf8h\x9f\xe3\xf5\x81\x88\
\x87\xd6\x8f\xe8\xcc\xafX\x18\xed?\xc7\x1eU\x0b\xcb4\
8\xd4\xed\xdc<\xf2\xb4#2\xf5=\xde2\x0b+\x8e\
\xaa+A\xe8E9'\xfc\xfc\x1c\xcfm7\xf59\xb1\
C\x9bW?C\xf4y\x8f\xa3\xe7C\xd8\xe3\xfe\xeb\xda\
D\x9c\x88\xcd\xb8\xc8+\xb4\xf89\xf2\xf6\xf5-lF\
\xb2S\xaf%\xed\xb8\xccd]\xf8X5C\x83\xee]\
\xbf\xa4\xfe\x16\x0f\xef_]/\xf4:\xea\xf3\x92\x9b\x88\
\x9c;\xdf\x0e\xc59\xe0\x18\x0f83\xa4\xc3\x02\xca\xcf\
\xf5\x98\x5cE\xe6W\xdc\xd7\x99|?\xbc\x5c@\xc4\x01\
\xbd\x80\xa6\x0a\x22\xee\xdf\xb3\xffuh\xf8d\xd6y\xf5\
M\xa8\xd3\x9bj\xbe\x0a\xfe1\xe7\xf3\x16\x04>\xb7\x1b\
\xdaQ\x9f\xdbr\xb8\x0a-\xff\xf9\xf5\xa5\x1a\xac\xc5\xfe\
\xa6\xae_\x99\xed\x81\x97\xc1:\x98{\xbd\xaeB\xdf\x9e\
\xd7h.\xae5\x97`\x86U\xdbD\xf4\xb9o\xa4\x0f\
\xd0\x15\x97h\xef\xdb\xb4\xc3\x96\xd3V?,\xecP\x9d\
\xce.\x98U,\xfa\x9ec\xdc_\x83\xb3=!\xd3n\
M\xffy\x81\xc9\x0fA\xf4\xb3\x01\xf1\xa1=\xaf\x00\xac\
<\xb9)\x5cbj\x9b\xea\xfa\xd4\xfd\xc1\x0c\x81\xc6\xfd\
5\x19\xfa\x0f\xfc\x89\xf9[luD\xa2\xb6\xa3\xb1\xea\
K0:\xdc\x93\xe9S\xbd$\x04\x0f\xa3\xfe\x96\x0a>\
\xc7\x84\xe7\xd6\xe6\xf1Y&\x0bKU\xa8j\x9aZ\xde\
b\xe5P\xf3^\x0a\x84_\xe9\x9a\x04\xa1\xb1.\xf0\x0f\
6\x81<\xde\x0b\xba\xae\x10\xddj\x98\xf5eS\xeb\x17\
!\x7f\xd3\xc8\xfd\x98\x99\xaek\xd0\xbb\xe7\xdf\xa1\xae\xc0\
\xb2A\xeb9O\xc7\xfe\x1b\xde\xb2\xdb@\xf6\xb5d\x05\
o\xc1\x9c\xfdc\xfd\x06\xdcXf\xcf\xaf\xb5z\xdf#\
??\xde\xaf\xd0\xfe\x1dSz^\x116\xffL'\xb6\
\xd0\xc0\xb9jh\xac\xfc<x\x8b\x17\xd2ZI\xef\xb2\
|\xf0\x16^\x03M\xc57@s\xf5\x17!\xd0}\x90\
\xec\xb1\xfa\xd4\xaew\x84\xf5=\xcaE\x08\x7f*\xc6\x89\
\xfa\x0e\xff\x17x>\xc9\xe3v\xa8\x95\xaf\xe8]2\x13\
\xda\xd6|\x05t\x7f7\xdfS\x9d\xc9\xe7\x04\x0fA}\
\xbe\xb1C\xa7s\x15\xf3m5Dy4o\x91\xcd\xaf\
F\x7f\xcbT\xf9\xce\xed\xf9\x08\xd8#g\xe0\xd4Rh\
$\xf2,\xfa6\xd0z\xf6%3\xf8L\x9f\xe8\xba\xe0\
6\xb2\x8f\x0e\xd4/\x07U\x0e\xc2\x94\xf4_\xcbqY\
\xd75\x05zv\xbd\xcc\xf2\xcf\x97N\x03\xd1\xb7]\xc8\
zS\xe9-\xa0\xfa\xdb3}\xaa\x97\x84\x90\xf5\x9d\xdd\
:\x9b\x0dP\xc8fk\xb0\xde\xb2:\x8d\x8b\xa2\x1dz\
\xb0#\x94\xda\xde<\x93:\xaf\x08\x04\xc7\x06\xa1\xa3\xea\
!\xc2\x0f\xaf\xe4\xeb|\xf1Z\xf7s\xfc\x19\xc0z\xc9\
\xae\x1d?!<zjzj\xe4\x92\xac\xdbm \x94\
\xf3\xfeC\xffEkP\xcf-\x9d\xce\xedQ\xa6\xd7Q\
\xce\xdbV?\x06\xcax\xbb#zRO\x08q-a\
\xcc?'r^!r\xb7X\xfe\xb9\xf01\xdeD\xbe\
\xbe\xd2#\xf3\xfe\x08\xe9\xbd\x1e\xfb\x1a\xe3\xbe)\x05\xc7\
\xa0e\xcd\xd7L;\xdf3\x09Y\x17\xfa\xa6\x9e\xfb\x05\
\xba\xb6~\x17\xb4\xd0`z\xe3\xf84v\x98;\xb6\xa9\
X\x7f\xf4\xb7\xf4\xecc\xf5\xfet&\xd8\x92if,\
\xb4\x9e\xdc\x8b\xe6\x8a\xdbAE~\xee\xf08y\x84\xcb\
\xee\xde\x1e\x9d\xce\x92\x16u\x9eyEVN\x17\xd6\xcd\
\xed\xe9\x96\xac\xf9\xa6S\xd4\xcbA\xacs{\xcd\xe3Q\
=a'\xd3\x8f\xad\xc1\xb6\xbf\xb29)WB\xd7\xbe\
7\xd2\x1a\xb7\xcb\xb5\x9c^j\x1f\xa1>?\xf8&\xf5\
\xb7\x88\x99\xa7\xf6\xfe`m\xd5\x8f\x82:\xde\x96\x15v\
(\xf6VY\xdb\xaa\xb3^\xf8\x5c\xc6\xc5\xdc\xd8\xd9<\
\x8fkc\xcb\xd4\xd6\xeb\x0b?5\xce\xde\x1c\xac+\xb1\
\xd5\x98'\xd0C\x99\xf3z6\x7f9\x0f|]{\xd3\
\xb6\xcfF 7\xf2a\xec\xfe\xf3\x9e\xbd\xbf\xa1y\xb9\
\xf6\xdaP\x9a\xbb\x88\xfd\xedV|\x0e\xa4\x91\x06G\xe7\
\xbc\xd9\xfd\xd1\xb5\x9d\x06\xd1\xe7\x96\x8fe\x16\xb7A\xf1\
c\xaci\xde\xd6*\x81n\xf0\x9e7Su~\xdc\x16\
\xd5\x95\x00\xb4T}!:\xb7\x22\x01Y?W`\xf9\
~\x9b\xab\x1f\x02E\x0d\xa5\xef\xbc\x1dz\xcf\xe3\x81\xd8\
O\x07\x8e\xbd\x07\xdee\xb3LNn\x8f\x87\xb6U=\
\x0c\xf2XS\xa6O\xf5\x92\xc0}\x5c\x0b\x1b\xb0\xbd\xcb\
\x80\x85\xdc\xaf\x18[7\x87\xfa|}sf\xea\xe6\x04\
\x17\x18\xeb9B\xd6vf\xfc\xf2}\x91\xbe\xb2tf\
[\xef\xa1\xb4\x9d\xb7S\xf5[<\xc0\x99\xed\xfd\x07\xdf\
v\xe8,3\xc7E\xf0G\xda\xaft\xf9gx\x0f\
j\xe7_+\x9e\xe3\xfa6\xc2\xcf\x97k\x17\xa8\x9bc\
\xb3\xc3\xb6\xb6\xc8\xa0f\xc8\xa7 8L\xe3\xea\xe7\x93\
\x9e_e?\xd8\xfe0\x13z\x8f/M\xdfyg\xc1\
\xfd\xbf Dm\x02\xd1\x81\x03'>\x00\xcf\xb2Y\xa6\
\x1f\x00{\xb8P[\x94\x1c\xad\x95\x8b!4\x5cG\xed\
p\xa7\xeea\xc2FF\x9d~\xa0\x17\xe5\x9c\xf5\x9d3\
{\x10q\xff\x22\xfa[\xaa\x9b\x14r)F\xc6lj\
\xfc\xbb:\xd9C\xbd\x85\xf3S0\xe7$\xda^\xf5\x92\
\xd7\xba\x0d\xaf\xa4\xe7\xbc\xb3X\xd6#\xdc\x87\xd4\x7f\xe8\
\x0d\x1a'\xaa\xe7=\xd9\xe9\x1c\x02\xae\xdb\x9bW|\x16\
\xe4\x91s\x8e\xbf\xc6\x08\xefy\xb8\xb5\xc3\xa0\xfd\xb8\xac\
\xfe-\xcc\xaf\x88s|\xe7\x15k\xb0\xb5]\xcbx\xdd\
\x1c\xae\xa5&\xfb\xc9\x9a\xe7\xa7X\xd6\x99\x9d\xda\xb9\xf1\
\x1b\xe99o\x98\xa2\xbc\xa04@\xd7u\x18\x22\xfa\xdc\
[0\x8b\xe6\x9f7\xf0\xf8\x84\xe0\xe8\xcd\xa8\xcfG\xea\
\x1d/\xe7\x08\xd6GT\x83\xf9\xa5:\xf3\x9d\x17\x8b\xb8\
?\xfb\xfc\xa6\xe5:\xd4\xb6)\x0e\xd1M\x11P%\x1f\
Y\xf7\xd9)\x93sK\xd6\x89]\xb5\xf6\xa94\x9dv\
\xe6\xd7N\xe4\x01\x89\xe7\x0ey\xc6\xf8\xd8\x10\xb4z\x8f\
\x82\xe7\xd4\x1ehk<\x0e\xa1\xc0\x985\x17\x0e\xc2\xb4\
\xde\xbf\xff\xf0\x7fB\xe3\xa7\xf9\xe0\xb1\xc98\xe5}\xc8\
[V\xde\x07\xd2\xe0)\x87\xc8\xc6\xc4\x10\xbd\x9a7\xb7\
\x1bp\xebr\x1b_1\xf3\xcfu\xb8\x8e\xf0\xf3\xd5M\
\x12\xedk\xe1\x8cx\x00\xcb\x8bnZ6/\xa5|\xdd\
\xc3}f\x9d[\xbf\x99\x9e\xb3v\x00\x87\xa5\x1d\x1b\x0c\
\x0d\x94\xb1fh?\xfc\x11\xb4U\xdeg\xc6\xd3\x04\x8f\
\xc3\xfe\xa0\xadU\x8f@\xfb\xd1b\x90\xc7Z\xa9>\xf7\
\x14\x5c\xc9mP\xab\x7f\x0b\xd5\xe7\x15\xb7\x81<\x9c-\
\xfa<\x0cG\xfbU3_\x91\xf5\x02\xd5M\xbf\x22\xda\
\xa7\xdb\xdadp\xd2\xa5\xe0\xb9\x84\xc9\xfdj(\xb9)\
\xe9\x19\x9c\xb1z\x1d\x9f\x9d\xd3\x9b^K\xd3yg^\
\xef\xe9\xb2\x0f\xba\xf7\xfc\x0a\xbc\xc5\x8b\xac\xb8\x82\xc8Y\
\xb1}\xcc\xea\xfbg\x92\x9f\xbb\x01\xbc\xcb\xe6\xd09\xed\
\xf5Q~\xc5i\x84\x9f\xdfG\xe4\xfcl\xd6\xf4\xb6\xdf\
\x8c\xfc\xbc\x9c\xe9\xf1<\xdeCt\x0e\x8f\xfb\xa3>\xdf\
\xd4\xa6\x99\xf1P\xa7@\xec\xc3\xdd\x87\xdeN-_\xa7\
\xf7{&\x0c7T\xa6\xf1\xbc\xa7~\x1d\xc5\xdf\x0d\xfa\
\xba\xa0\xb5\xf2~6\x87\x85\xf7\xa7\x98\xec\xdaxcz\
C7\x96,\x02i\xe0\xa4\xa3\xe4\xe2B\x08\xf3\x99R\
\xa7\x86tXX\xa6\xb2x?\xea\xf1b\xab\xef9\xd6\
_l!\xfc\xdc\x91\xe0\xfa14\xd2B\xf6\xd6<\xb3\
\xff_*8LS\xd1|\x90\x86\xcfd\xfa\x0aS\x8e\
\xe0\xa0\x17\x9aW=\x02f\xde\xa1\xd8\xc7\xe2\xd0\x03\xe2\
h]u?\x84\xfa\x8f;b\x9f\xba\x14\x0c\xea?7\
h^b\x1e\xed\xdf\xc2k\xfd\xb9\x0f\xfdZ\xf2\xf5u\
\xcd*\xed#\xeaDX\xb90\x12tl\xf9.\xd5\xc5\
\x93\x9e\xe5sQ\x0eC\xee\xe3\x86o\x82\x9e#\xf9Y\
V\x0e\xa2\x04-k\x9e0c>l\xb6\xd6d\xf2\xe3\
b\xd7\x87\xfcN\xc1l\x08\xf4\x1e\xa39~\x197\xdb\
&\x80\xc8\xc9B\x19\xa9\x1b6\xa8>\x9fm\xda\xa0\x8a\
\x19/\xc2\xfc\xae\xbd\xedAG?\xb3\xa2\x86\x0e\xf7\xa7\
@\xd7.\xb2\xfe\xd8G\xe7\xfc\xfc\xf4\xc9\xca7{N\
f\xd0\xbcwE\x0a8z^C<\xa03y\x0d\x03\
z\x0f\xbf\xc5\xeb\x82\xa6'i\xcbO\xa7\xb9\xe9\xc3\xe7\
\x8a\x99\xef\xd4\xa1\xcb\xc4\xea,\x22P\xd5\xac\xc3\xf5\xe5\
V\xac\x9f\xf5>g\xfd\x5c\xae+Q\xa9>\xc7>\xff\
\x8e\xees-r\x8f4\x05\xfav\xff\xc2\x8cS'\xa2\
\xd7\xa9\xac\x93\xe7\x04s\x03F=+\xa7\xa6fc\x8a\
\x80\xb1e%\xd0\x03M+\xee\x85:\xbe>\xe7\x12\xb5\
o\xcc\xba\x17\xc2\xd5\xab\xbf\x02*\xce\xd0\x9d\xc2\x1c\xa8\
x\x10\xa1\xfa\x5c\xa3\xfd\x88h=Q!\x9bK!z\
\xa1/(\xd5aw\x97|^n\xb8\x13\xc1j\x94U\
\xe8\xde\xf5ofn\xae\xa8=JDWa?\x87\x81\
\xa3o\xb3\xfaS\x9c\x01\x1e\xc9\x8dz\x0a\xbc\x9e\xd1\xe6\
\x0d4oE\xf8X\x92\xe5xt\x0f\xfct\x1e\xf8\xba\
\x0f9\xcc7\xc7\xeb\xe6\x22\xacO\x11\xd6\xcd1\x1bT\
\xe8tf\x87^G\xf8\xb9\xdd\xdf\x228B\xe6\xfd\xe8\
\xd10\x9fA\xec\xc5\xb0\xfbU\x1e\x9f\x9en\xf6\xd5\x11\
z\xc7^\xe3h\xc6=DM\x9e-\x1f\x12ykS\
\xd9\xed0\xe2\xad\x89z\xc6\x1du\x13\x93\x00\xde\xbf\xee\
}\xbfO\xda\x8e9\xdfN\x9d\x09\x8d[_s\xd6:\
\xf1\xdc\xd7#}\x1a\x9d\xab(\xea\xe6\x04O\xc7\xb8?\
\xe6\xa5\x1f\xeb\x96\x1c\xab\xc3\xed`v\x96\x0c\x83{_\
\xa1\xf1\xba\xfa\x8f\xa7G\xd7\xdb\x11YnZ\xf1 t\
lx\x06<\xcbf\xd3\x19;\xecgX\x9eK#\xe6\
D\xd2X\xc8UD\x97\xcf\x83\xb6m\xbf\x8c\xda\xc3r\
\x85\xbb\x08\xa0\x9e;[\xfeX\x8a\xe3m3x\x0f\xa3\
\xa7\x1d\xb5^x\xff6v\x18\xb0\xa0\x8c\xdb\xa0\xbcw\
\x8b\xe8\x81\x8e~\x98U^\x85\xd5\xe28L\x87_\x08\
\xba\x16\x84\xbe=\xafZz\xfa\xe3\x19Q\xfbr\xdb\xea\
/\x822\xe2\xa5{\xf7X_=\xb1\xa1\x96A\xc7\xee\
\xd7\xe0L\xcd\x8bp\xa2\xf2y\xa8[\xfb}\xe8\xd9\xff\
\x06\x8c6\x14\xc3H\x7f\xb3\x99\x07\x1f\xceQY\xc7\x1e\
-\xcd\xe5w\xa64\xde&t\xca\xe9\xc2{\x1ca\x9b\
R\x7f\x1c9N\x0e\xeb\xb4\xae?\xb6\xef\xdc\x9c\xa20\
\xcc#r\xee\x1dLOMB*Am\xc50\xcbW\
\xef\xdd\xf9\x12\xe5)gE<[\xd4\x08\x14\xcc\x80\xf6\
5_!\xdcFr\xd6\xbe\x9aa(r\x10Z\xca\xef\
H\xb9\xac\xe3\xfb\x9d^v\xa7#\xd6\x1a\xfd(5-\
\xbc_\xae\x99\xdf\x22\xe6\x96k\xac\xcf\x7f\x8b\xe6\x88s\
\xbd\x14hN\xa3:\x0e}{\x7fm\xf6\xc1\xb0\xebr\
\x8c\x87\xb4\xd5<F\xf3;\xc0\xce\xb9]\xd0\x9eO\xcd\
\xcb\xefI=\x87A\x9bh\xf9\xc3\x8e\xe8Kw\xb4\xdf\
<\x5c\xb1\xe5oY\x07\xf2\x96\xc3\xbdh\x87NM\
\x8f\x88d\x81\xfa\x1c\xe5\x1ck\x9f\xebM\xdb\xd3\xe2\xe8\
\xad\xab\x1e\x00]\x0f\x99\xb3\x1eY\x1d\x87\xf3\xafk*\
\x80k\xd1\xb4\xf1\xfbQ\xeb\x95*Yo\xaf})c\
\xf2#\xfc\xe7\x1b\xdb\x18?\xa7\xbd\xb8D\x0d4\xef\xb5\
\xb8\xb0\x14\xfb\x1494\xeeo\x83\xe9?W\xfc\xd0\xb7\
\xffW`\x8fk3\xbe\xc8>o\xa9~\x94\xf0\xf3\x86\
h\x1f\x8a\x0b\x0bd9\xfa\xcf\x14\xa6\xb4N\x91\xca:\
\xfah=\xeb2\xb7\xde\xe4\xef\x1e\xe9\xe7s\xcfyN\
\x0b\xd6\xcb\xd1\x98\x11\xd6Y\xa0>\xef\x0e\x82a8_\
\x1eD\xbf\xc8\xbe\x03\xbfe=G\x96\xda\xfd\x8a<\x96\
_\xf5(\x18Z\x08\xec\xb3O]D\x03\x97$4t\
\x0e\xbcE\x8b\xac\xba\x8a\x04}\xec\xe6\xfc\x0br4\x96\
\xdf\x03\xb2\xafkJ\xd7\x5c\xf4\xa4E;tk'\xca\
\xb9UK\x94Wd\xf5r\xb9\xa5\x1c\xeb\xe6T\x9aw\
\xefd\x99\x10\xbe?\x8d\xe8so\xed\xafX_.\xd1\
sd\xa9U\xab\xde\xba\xea!\xaa\xcf\xa9\xefD\xfc.\
8\xc2-\xe0(\x84iO=\x05:7\xbd\x105\x7f\
+1\xdf\x8b\xe8\xa73\x0dz\xf7\xfe\x9e\xf6+\x99R\
Y\x8f0\xff\xc4\xfe>\x83\xd5\x87\x8a\x1e\xa2\x85\xa2G\
\xb4B\xed\xd0C=,N\xe4t\x84\xb9\x0f\xb0\xb1\xf6\
\xd7|\xb6k\xb4\x1e\xa2r\xbe\xfa+\xa0\xcb#\xae.\
\x9f\x04\xc2\xdcv\x91F\x9b\xc0St\x0b\x97\xf3\xc4\xb8\
;\xb5G?\x9e\x06-\x15w\x83<\xde\xc9do\x0a\
\xb5\x0b\xd6\xcd\xa1>G\xbf\x22\xab\x0b\x15\xba\x9c\xd5D\
\xd3\xbe\x16\xadD\xce!\x9c\x15\xfes\xac!m\xda\xfe\
\x1f<\xbe)jv\xad\xf5n\xad\xfa\x02\xc8c-Q\
\xfc<\x1b\xae+S`\xfb>\xd3\x89#\x8d\xd5\xb4O\
\xb1\xbd\xbf\xc5\xa5\xe2\xa9V\xdd\x06\xe7?\x85\xf3\xc0\xcf\
s\x03\x22\xfc\xfd\xd3\x8d0\x95\xdd0\xec\xe8\x16us\
:\xe3\xe7f\xfe9\xcb\xcb\xdd\xda\xaad\x81>\x8f\xf0\
\xf8n\x18Zv\xbfE\xd6x6\xed\x19%\xe2E\x82\
g6\xaf|\x08\xb4`7\xe3\xe7\xe27]Y\x9f4\
\xb0ft\xe8\xcc\x12\x22\xef\xd7\xf3u\xe5yD\x17\xd5\
\xe7\xd3i?\x0c&\xe7\x0ba\xa4\xbed\xcak\xc3\xd1\
\x7f\xbe\xb3\x9b\xd5\x13\x89Z9\xda/\x97\xe7+b\xdd\
\xdc\xe6\x16\xc5\xa1Yh\xd1@yU\x88>o\xde\xfe\
:\x93q\xb2\xc7\xd6\xc7\xf0\x96\xb6\x9aGi<\xd4\xf5\
\xb7$\x0ef\xef\x1b\xe0\xeb:@l\xcb\xbb\xcc\xb9\xb9\
\x97\xe2\xe8x?ZW}\x01\xa4\xa1\x06\xfa\xfbS\xdd\
gwW\xb7\x0e7W\xf0x\xbf\xadn\x8e\xcem)\
Sag\x97\xcc\xfb\xce9_&\xf0\x1e4\xefx\xc3\
\xec\x09\x18UsA\xf4O{\xf5c\xa0\x85\xfa\xcd\x18\
\xbe\x93\xf3/\xb3\x01\xb8~\xb24\x0e\x03G\xde\xa2>\
[\xef'\xb3\xc9\x9a_i\xfaX\x18\xb7\xb9\x12\xea\xc9\
\xd7[V?\x09\x83G\xff\x02\x86\x1a\x9aR\xfdB\xfd\
\x13D\x9fon\xe3\xfd[\xccZ\x22>W\x91\xe8\xf3\
\x9b\xcbu\xd8\xd2\xe1\xfc\x9a\x1a!\xaf\xaa\xe4\x87\xd6\xdd\
o\xb2:\xf6X\xff9Yw\xec\xd5\xa8\xf0Y#\x8e\
\xee\xdb\x9dE0s_1\xb7=8\x08\xa1\xbe\xc3\xe0\
o\xdf\x01}'\x97@\xe7\xc1w\xa0\xff\xf42\xf0\xb5\
\xef\x82`\xffQP\xa41\xca-#S\xec\xe5\xc2\xbf\
\x87\xf3\xe60/\x97\xe9r^\xef\xcfs\x00\xd0\x7f\xbe\
\x89\xf0s\xcd\xf1\xfc\x9c\xf9}\xb1\x86\xab\x8d\xc89\xd6\
\x8b6\x14Lc\xf5\x16\xa6M\x84\xbd\x8cQ\xce\xdb\x5c\
\xde\x92b\xc4\xd6'\xe0\xb2\x86\xc3\xa2\x17\x16\x08\xf3\x89\
~`\xfaZ\xa6\x22\xcfY\xc4\x0f\xc9I\xec&\xfc\xdc\
\x9c\xabX\xcc\xea+D\xbd\xff-\xc4>\xddF\xfb\x14\
\x81\xa3eB\xc4\x034%H\xf4\xf9\xffe\xf1\xbc%\
\xacF\x9d\xc6\xb1\x85\xff\xbc\xea\x11Pq\x06C8l\
\xc5\x04\x1c|].\x92\x87x\xf6j\x9aY\x9c\x08\xfd\
\x89\xf9\x85\xd1us\xd8\xe7\x7f\x0b\xd1\xe7\x99\xee;7\
\x19\x88\xb8W\xeb\x9e7\x19?\xbf@\xec\xbf\xb9\xf2A\
\x22\xe7\x1d zR9\xf9\xd9u\x91:\xa0\xfc\xee\xeb\
VaA\xb9\xad^\xae\xd0\xaa\x9b\xc3>\xa2\x1b[\xe4\
\xac\xd9\xe3i=\xfb\xbew\xb8\x8cO\xb3\xf9\xbe\x98\xac\
cozy\xb4\x95\xc6?\x5c\xeery\x80\xf6\xd0#\
zm]\xab\xce\xfaq\x15\xb1\x1a\xd1\xbc\x22\x11\xff\xc7\
\xb9-\x1al\xa6}D\x9d\x9d\x0fb\x97\xd9\xae\x03\x7f\
\xa65A\xde\x8fg\x80\x98\x91\xe6\xe1\xb6?\xeasy\
\xd4\x13\xe5o\xc9\x027\x92\x8b$\x81\xf2\xbb\xbbG\xa5\
}\xce\x19/\x8f\xae\x9b\xbb\x89\xc8\xff\xaeN5+|\
ox\x8e8\xa7\xa2m\xdf\xdb6\xdbsZ\x14oi\
E\xde\x12\xe8\x05\xdd\xd5\xe5\x97\x1dP\x9f\xe3|s\x16\
\xf7Wh\x1eW^!\xd3\xef\xf8\xf5\x15^\x99\xf6)\
\xca\x06\x99@Y\xef:\xf86\xd4\x7f\xc2\xf8\xb9\xd7^\
\x0b\x8d\xf5\xa1\xe5\xf7Bp\xe0\x0c\xf5\x81\x89\x9a\xb8l\
\xb8.\x17\x89\x83\xf6\x9d\x0b\x1bpj@c\xf1P^\
\xeb?;jn\x8b\x0a;;\xe4L\x9f\xea%!\xe6\
\xa4\xe3<\xf4\xb6\xc3\x1f\xb1:\xe8%\xd1\xba\xdcK>\
o\xc4\xd9Q\xbe\x96\x0c\xccJw\x91I oY\xd7\
\xca\xfbZ\xd8\xe7\xb6p[\x14\xfd-\xeb[\xd5\xac\xf1\
\xb7\x84\x0d\x05z\x0e\xbdg\xf6\xb0\xf0\xc4\xf8[\x9a\x88\
\x9c\x87\x06\xcfq9we=\xd7!\xfc\xcdx\x9c!\
\xfa|Q\x85\x15\xeb\x9fc\x9b\xad\x88\xfd\xe8vu\xa9\
\xbc\xe7\xa83\xe3\x87Q1\x0aC\x87\xee\x13\xcbh\x1f\
o\xb3\xce\xc2\xd6\xf3\xb8\xa9\xec\x0e\x90\xc7{h\xdc\xc0\
\xb0\xd5\xf2\xbb\xc8]\xb0|m\x03\xd6P~\xcelP\
V7\xc7\xeb\x89x_\xe8\xb5-\xce\xcf?\x17q|\
\x9c\xa9\xd3{\xe4\xaf4Nt~]\x00\xf9\xbc\xe2\x1e\
\x08\xf4\x1dg\xbd\x06];\xf4\xb2\x01\xca\xef\x89A\xec\
Sd\x9b7\x87>E\xecYD^\x17\x91\xaf\xef\xef\
T\xb2B&h\x9f\x22C\x83\x8ec\x054\xc6\x7f\x8e\
\xf0qoL\xfdzc\xf9\xdd\xa0K>\xeao\x09S\
\x9d\x9e\x1d\xf6\xf5\xe5\x0a\xbbO\xcc\xf4\x05Ct\xec\xe3\
BG\xec\xef\xe3\xbd\xde\xd0\xa63;\xb4H5g\x9e\
\x8bc>\xd1\xe7+\x1b\x15S\x1e\x9c*\x13f\x1d4\
\xf6\x9d;\xfe\x919s\xc8\xcc\xf3\xe7\xbc\xc5[v\x1b\
\xf8\xfb\x0eG]\x8f\x1b\xfbw6\xa2d\x98\xeb&\xa1\
\xa3\x84\xec\xdb_\xed=\xa5\xec97g\x06Us\xee\
\xf9\x9cB[\xec\x1f\xeb\xe6\x88\x9c\x9f\x1dT\xb2\x82\xc3\
\x8a\xeb\xeb9SNsA\xeb>\x8e\xae\x9b\xf3\x16L\
\x87\xe6\xf2\xdb\xc1\x08\x0dZk\xe1L\x93\xc3E\x0c,\
\xbd\x1c\x86\x86!\x0d\xfe^\xa7\xc3\xcfv\x04\xe0\x81\xca\
\x00\xdc\xb2B\x81\x1b+\x14\xb8\x81\x1c\x9fY\xa9\xc0\x0b\
\xeb\xc7\xe1\xdd\x93*\xec\xe8TA\xd343\x8fk}\
\x9b\x067\x94\xdb\xe6o\x15Z\xfa\x1c\xfb\xb7\xacm\xd5\
\xb2&O\x9b\xceH;\xf97:[\xa7a\xa9U\xaf\
%d\xbd\xa9\xfcV\x08\x0d\x9cd\xfc\xdc\xe5-\x8e\x87\
\xb0\xbb\x84\x9c\xcb\xaa\x06?\xdc\xa9\xc2\xdcb\x85\xc75\
Y\xef\xdb\xd8\x03}\xe4\xac\xdeY\x85{V\xaap\xaa\
W\x82\xbd=,Nd\xfa\xce\x0b\xf9,Q\xec\xc7E\
\xbe^7\xe4|\xff\xb9\xc8\xa7\xc4\xa3\xe7T\x19\xd1\xdd\
3bzZ\xcc\xa05\xbc\xcd\xe5\xb7\x11\xf9\xd6\xcc\x9a\
\xe9\xf8\xfe\x86}\xef\xd4\xcd:a\xb0\xed\x8d\xd8;:\
\x1c\xb1\xbe\xe6>C\xc9\x83\xe5\xe7\x85\xa9\x0f|y\x83\
\x04\xb7\xae\xd0\xa2zj]H\xceMy/\xb2\xd7\x80\
2\xbd\x1d='\x97\x1d\xe8W_\xd5\xa8f\xc5L\x07\
<ECW\xa0\xe7\xd8\xdf\x00\xe7\xb7\xd4\xf3Y\x80V\
\xdd\xdc4\xda\xeb9\xd8{\x90\xe7+\x86\xe3\xaa\xb5\x88\
\xb5u4M\x01\x7f\xff\x19\x18i(\x87\x81\x13\x1fA\
\xf7\x91\xff\x81\xc1SK`\xace-\x04G\xdb\xa2x\
\xa3\x8b\xe4@\xd7[7\xe0\xcd\xfd>\xb8\xa6D1m\
\xc99\x97\x90s\xbb\xbe\x17>DV7\x17c\x87\x96\
+p\xb4[\xe1\xf5|\xce\xbf_\xb8\x1e}gJ\x89\
>\xbf*jf\x94\x99\xafXv+\xe8\xd2\x10\xcb?\
O Vd\xe7\x89\xc3\xcd\xdb\xc8s\xf3Y\xf0|2\
\x87\xf0\xa4+y\x0d\x13\xef\xa5N\xfe\xbe\xf7\xd3k\xa1\
}\xdd\xb30>\xdc\xee\xcaz\x12\x10\xb6\x94fD\xe0\
\x83\x132\xe5\x22\x13\xf1\x95D\x0e\x94{\xec;\xb7\xae\
U\xcd\xf4\xa5^\x12\xa6\xadM\xe4o\xa8\xbe\x9c\xce\xcd\
\x89\xae\xc3e<\xbd\x89\xd8\xa1R\xff1\xf2\xb3\xf1?\
\xb7l\xa6\x05{.B\xa3-\xd0\xb6\xfde\x22\xe3\xb3\
/\xde\xc3\x87\xef)\x8d\xc57\xc2\xc0\xc9\x0fi\x1d\xc8\
T\xe8\x0b\x91w\x1c[\x07{\xbe?\x22;\x0cq\xa1\
\x8f\x0e\xb6\xfbh\x0f\x96\xfcB^\xf3\x96\x22YG\x7f\
\xcb\xee.-{\xe2\xfe\x11^S\xb4\xf69:w'\
Z\xfe\xa6Sy\x93}\xed\x09\xc7\xfd\xc3\x5c6d\x7f\
\x1f\xb4\xd6<\xc6\xe3Q3.!\xeb3\xa0n\xc94\
6\xfb\xaa\xe0J\xe8\xda\xf7:\x8d\xdb\xa6\x1b\x96<\x87\
i\x1d\xf0xP\x81\x1d\xcd\xe3\xb0\xe2\xe4(\x1c\xef\x0a\
\x80\xa6\xea\xe6\xcfd\x03P\xcft\x8f\xabp\xdb\x0a\x16\
\xd3\xcc+N\xad^G\x1f\xfa\xde\x8e@v\xac\x07\xb7\
GU%\x04\xed\xeb\xbfn\xd6\xcb\x89\xd7\xe6\xf2; \
\xd4s\x90\xc6\x81\x13\x8d\x89\xd2x\x94*A\xc7\xe6\xef\
\xd0^H\xf5f_\xb5\x89\xe7~\xa1\x1d\xec]j\xfb\
\xb9\x82\xaba\xf0lQt\x1e|\xca\x96 \xda\x8e\x18\
\x0f\xa9\xb0\xc2#\xc1\xe3\xeb\x88\x1e,\x91\x19'-f\
5\x92h\x9b\xbdX\x1b\x82\xfd\xbdFV\xd4\x9d`\x9e\
\xe9;'\xb4K\xda\x9f\xc9p\x98\x87jT\x904\xe7\
\xefs\xc2'BgVx\x97s]\xcb\xe6\x155\x11\
~\xae\xfa\xbb\xe8z%\x93\x93\x8b\xcf\xc8\xc0\xb1\xbf\x9c\
\xcf\x8f\xe2\xe8\xc3F\x8f\x82<\x18h;\x94r{\xd5\
\xe2)8K1\x02\xdf\xda\xa6@~\xb12\xe1\xbd\x9d\
Cg\xae\xa9\xf0\xdeIb\x8b9<\x07\x1be\xf0\xa9\
u\x12\xcd\x1dO\x87\xac\xe3^q\x0dY\xab\x9d\x1d\xd9\
\xd2':lr\xf6\xde\xbaU\xd0\xb5\xe5\xdb\xd0\xb3\xe3\
\x87 \x0d\xd7\xb394I\xc6x\xd5`?4U\xde\
Gg\x1b%\xda_S\xcc\x05\xeb\xdc\xf5\x0b\x1a\xcbM\
\xb5\xac\xe3\x11T4\xf8\xee\x0e\x16\xff\xa3>\xe7\x89d\
\x9d\xcfa\x9b_&C\x85Wst_x\xcf\x90\x02\
\xd7\x95]\xda\xaf\x98\xb0^\xe7s\xd1\xdf;\x9e\x0d\xb3\
\x5c\xce\xcf{\x08\xd3y\x05\xe1\x0b~/\x91\xf7\xf6w\
\xd4\x12\x9d<+\xe1y\xeb\x0d\xa2/\x1b\xce\xe0,\xbf\
\x8b<;\x83)\x95u\xc1\xbf\xcb<\x0a\xcc-\xb6b\
\x81\x13\xc9\xba\xdd\xd7v3\xe1\xc1>\xc9\xb9>\x88\x82\
:\x99\xc7\x83\xd2\xa0\xd3\x8b\xd9Z\xcc\x22\xef\xfd\xf5-\
\xce\x8f\x1f\xa5\x0b\xf6\xe7\xa3i\xc7\x9f\xcc\xfa\xd3\x84\xf4\
\xba\xfd(\xb8\x0a\xfa[\x0f\xa5T\x97R\xdf3y\xbf\
g6\xaa\xf1\xf9(\xb8\xbc\xbf\xbcOs\xacm\xf6\xbd\
u\xfd<_%M\x1c\x86\xcb\xfb\xe7V:_\xaf\xa7\
\x0bv_z\xf3\xba\xaf\xb3\xb9\xca)\x90u\xccW\xe8\
=U\x92\xd2~ax\x9eCA\xa2\xd3K\xe2\xdc\xeb\
\xb9\x8e\x7ft\x8dBx\xb1s\xfa\xb2\xd1\xd6G|\xaf\
zd\xf9HZe]\xecs7\x97\x8c\xa7\xe7Zb\
|\xbf\x90$\xd7H\xe79\x1a\x9a\x0a\xed\xab\xbf\xccb\
EI\xceWf\xb2>\x0d\x06O\xbc\x9fb\xbe\x0ep\
\xbc_\x9eT\xbc\xfcBz\xfd\xde\x0a\x09Z\x87\x9d\xb5\
\x87\xb3\x1c\xad0,\xae\xe6\xe7Y\x98.\x9d\xce\xebH\
K\x82i\xbb\x8e\x0b\xf2l\x07\xcaz\xd80\xa0s\xdd\
\xd3\x9c\xbf\xa4@\xd6\x09g\x1f=S\x90R\xce\x80o\
\xb5\xa5%h\xc6\xbf\xe3\x95\xf5\xdb\xcb$h\xe8w\xce\
\x1e.v<\x5c\xff/\x97\xf7\x99u\x9f\xe9\xe2/\x98\
'sk\x99?\xe9\xf3\x8e\x95gY3\xa0\xd5\xa7\xc3\
\xbeN\x09\xde\xd9\xd1\x05?]\xd9\x08?^\xe1\x85w\
w\xf5\xc2\x91^\x19Z\xc7\x0dPu\xdd\xf6;\xd1\xef\
3\x95\x10~\xeb\xa3\xd5?5\xe7\xaa'\xaf\xd7gB\
\xe3\xc9\xcd)\xe7\xeb\xcd\xc3\xba9\x9b'\xae\xfbL\x8e\
\xc5\x952t\xfb\x9c\xe9s{u\xdb\x08\xcf)O\x97\
\xcf\x91\xf9b\x1e\xa9\x96\x92>W\x91K\x88\xfbQm\
\x9b\x06\xdf$\xf6\xee\xad\xe5\x92\xd5/\x8c\xf78\xc5\xcf\
\xe7\x92\xbf}\xebr\x05\xfe\xb56\x04\xab\x9b\xe4(\xdf\
o&\xf5}\xff\xd9\xe5\xd0P0\x93\xce\xa3NV\xd6\
\xbd\x85\xd7\x814\xd6\x9ab\xbd\x1e\x81\xa0\xac\xc3\xbc\xd2\
I\xe4B\xd9\x8fB\xd6w\xf3\xc9\xf5*h\xba3c\
)8\x1f\xf1b~\xa5\x94\xe8u\xf2\xfa\xa3\x9d\xc9?\
\xebx\x1f\x06C\x06\xfcd\x8f\xc2{\xca\x88\xb9I*\
\x8di\x08]$\xfe&\x9d\xabT\xc4r|^\xd9\x19\
\x82\xa1P\x98\xda\x87\x99Dh\xf8\x1cx\x8ao\x88\xca\
\x0fN\xf4h\xdf\xf8\x02h\x9a?\xe5>\xc7\x10\xd9/\
\x9f\xdd\x18\xdf^/\xb8\xc1_O;\xd7\x0f\xd3\x17\xc0\
\x9e\xfeJZe\x1de\xed\xd3\xba\xc4d\xdd\xca\xe3\x8e\
@\xbbO\x83\xa76h<?\xcd\xd2\xe3B\xa7\xb3\xcf\
\xf9^\xc29\xa45\xcbZ\x81\xa7\xc9\xfd\xeb\xf7\xab\x19\
\xcd\x8d\xd55\x05Z7\xff\x90\xf3\x98\xe9\x93\x9csb\
\x1d\xf5\xdc&E\x0e4\xd8P\x99\x92\xbeJ\xa6}C\
\xd4\xc0\xa8d\xc0\xf3\x9b\x15\xaa'&\xb3g\xcf\x11\xba\
\x86\xbc>I\xd6W\xd1\x9d\x1b;\xc5s\xfb\xf6\xd6P\
\x1a9\x8cJ\xeb6N\xf4%'\xeb\xc3A\xdc\x1fQ\
G+\xf1\xed\xad1|\xf2\xd1\xb5:\x04\x95\xcc\xcd\xb3\
\xa6\xf3\x07\xfc}\xd0\x84\xf38\x13\x98QHs\x16\x08\
Oo[\xf3U\x96\xaf\x10I\xfeZD\xed\x82\xa2\x85\
\xe1g\xbb\xc4~xi\xdb\x94\xd5\xe1(\xb4\xf7&\xd6\
\xe9\x9c\x1cp\xf6\x9cp\xdc\xd3+\x9bt3\xd6\x9b\x0e\
Y\x7fq\x8bDs\xe3\x13\x81\xa8\xe9{\xe7H\x90r\
\xf0\xb8}aQ\x07\xd3?\xaf\xef\xf7\x83fd\x86\xcb\
\xb0>1\x06\x8c\xb7o\x85\xc6\xe2\x05\xe7\xf5\xf1\x9d\x8c\
\xef\xa5u\xf5\x13 \x07\x87\xa2rk\x93\x01\xae\xf1\x08\
\xd1\xe7\xdf\xdb\xc6\xe6i2}\xad_\x92\xc3\xcc\xe6\xb9\
\x82\xff\xb4I\x87\xa3\x03\x1a\xeb\xe1\xe7\xe0\x1c\x01\x5c'\
IQ\xe1+\xebY\xbf\x96\xbc\xc2\xe4\xb9\xbbU\xab\xa1\
\xc1\xdd+\x15\x18\xf5\x07\x13\x8ew\xe0}\xf0\x0c\xe1\xf9\
(I\xe7\x1a\x8b\x9a\xc0\x9b+B\xd0:\xa6&T7\
\x97*`\xde\xc1X\xdbF\xc2\xdd\x17\x99\xf2.\xfa)\
\x89\x19yt\x86\x18\xbe~\xccf\x154\x14\x5c\x05\xad\
+\xee\x03\xc5\xdfOd*\xf9g\x95q9\x03\x02Z\
\x04^\xdc\xa6\x99={\xec\x1c\x05e\xff\xc1\xd5:\xbc\
~@\x82\xc5U2\xdc\xbe\x5c\x85[\x09\xe7\xbds\x85\
\x02\x0f\xadQ\xa0\xb8.\x08\xb2\xa6\xa5`E\xd2\x0f\x91\
\xef\xd46\xa2\xc2\x83\xd5*\xb3\xf7\x92\x96u\xd1\xb3\x0e\
{\x90*tfz\xa2\xba\x07\xf3\xa6_\xd9\xc7uM\
q\xf2\xe7\x85z\x08\xe5\xfd\x0f\xbb\xc62\xca\xdbYN\
e\x18\x02C\x1eh\xdf\xfe*x\x0b\xaf\xe5\x9c\xe6\x7f\
\x9b\xbd\xdb\x1bD\xce:9ZJo\x83\xbe\xe3\x1f\xd0\
\x99W\xa9\xaaI\xc7\xf7\x18\x93\x0d\xf8\xd1n\xcd\xea\xc3\
\x86=\xd9L\x8e\xa2\xc27k\x15\x18\x0ajT'H\
\xc4fm\x1fS\xa0a@\x81>\xbfF\xfd-z8\
\xf3\xf6\xfeda\xcf\xe1\xc4\x9e\xa1\xf3J\x14\xcb\xceK\
H\xc6\xd9\xbaa\xde\xd0\xeb\x87d\xba\xaf\x19\x89\xe6J\
\x91\x7f\xbd\xe3*\xf5W\xce*L\xec\x9c\xce;\xe8\xde\
\xac\xc0-\x15*\xf8\x03R\xc6jj\x22\x22o\x96\xdb\
\xdd\xa1\xd0(t\x1f\xf9+\xb4\xaf\xfe\x124\x97\xdc\x04\
\x9e\xc2y\xd0R~\x07tm\xfc\x06\xf4\xd7\xd7@\xd8\
\xd0x=\x13\xab\xf9K\xb4f7b\xab\x19\x0f\xaaa\
xi\x17\xe6+\xa9\xa6\xef\xca\xb4\xf9\xc9\xfd{p\x8d\
\x0aAY\xb5\xe5wB\x14o\xb2\xe2\x16\xd9!\xeb\x02\
\x22\x8e\xba\xafS\x85\x07\xaa\x95\xa8\xde\xb9\x93\x96\xf5B\
\xf6{8#\xe3\x83\xd3*\xe1\xc4\x89\xdb*b-\xf7\
w+pCY\x1a\xfc\xa2\xe4\xbd\x8e\xf4\xc8\x8e\x98=\
g\x97\x19\xcc\xab\xc4\x1e\x1dX\xd7\xad\x1b:\xcf\xb3L\
u\x1ec\x98\xf1\xf3\x1d\xccWk\xf7\xb9\xcc\xe63\xc1\
\x9f\xdf*\xc1hH3\xfb\x80\xe4\x12\xcc\xf5&\xffZ\
F\x15\xb8\xbf\x1a\xed\xb8\xf8|\x1e\xb4wz\x99\x06{\
\xdaC\x84\xb7\x18\xacgg\x82\xf7\xc9\xe0qN\xec\x03\
9\xb7XMx\xaf\x99X\xd65(o\x90\x1d\xe17\
\x988\xc7!\xf5\xfd\xd0\xf0\xbd\xd0\xf7\xf6\xe3\x9dLw\
\xcc\x15\xfeY\x9b\x0e\xb8\xa3R\x83\xf1\x10\x9bO%\xea\
\xc6s\x09\x96\xac\xb3g_\xd6t\xea\x9fy\xb1V\x86\
\x05\xa5\x8a\xd9\x17\xc0\xf2Y[\xfb\xdd\xb5%*<\xb1\
F\x82wO\xa8\xd0\xe9\xd3\xa2\xf7\xb8\x04\x87\xa4\x88\xfb\
]\xd9\xa8F\xfb\xd3S\xa8\xd7\xff~:\xe4\x88\xfbh\
\xe7\x15\x13\x1d\xc9\xbd\xbf\x15\xa3\x18\x0f\xc9\xf0\xab\xddA\
\xc8/V\xa3\xe2\x0f,\xde\xac\x11}\xae\x80_\xd6\xc0\
>\xb7\xcc\x09\xfa ]\x88\xd5-\x01Y\x87M\x0d#\
\xf0\xad\xea\x1eX\xbcZ\x85;W\xe9pw\x8d\x06O\
\xd5\x8c\xc3\x87\xfb\xfa``\x5c\xa7\xbd0\x22\x11\xeb\xbe\
%\x7f\x12\xec}V5\xa1l\xf2^L\xa9\xe40\xc4\
\xeeZZ'\xc1\xe50\xb0\xc8\xcc\x1fR4\xf8\xd1\xb6\
]\xcf<\x9b\x8f\x19\xebG\xd1f\xbfw\x95\x06!\
\x22\xe7`\xd3U\xb9\x8e\x08\xd7\x01\xd4v\xb2\xd5\x10#\
oC\xdb[!\xf6\xb8\xa2\x1b\xcc\x06\x8fy.R\xd5\
S_\xbc\xef\xd6v\x0d\xae+\xb6\xe5\xbb\xa4\x90\xc3l\
\xeaHm\xfd\x9aS\x81\xd7\xe8\x0bJ\xf0\xdb=\xe3D\
\x9f3\xbfr\xac\xdexa\x8b\x04>I6{\x8f\x89\
\x1ep\xb9\x0e;w\x8f~\x85\x0b\xe6\x87G\xef\xc1\xa9\
\xd1\xeb\xe2o\xd6\x0fcLNcz(\x05\xfeu1\
g\x0f\xfb\x1b\xb4\xf4\x0e\xe5\xf4\xfd\x14\x5c\x12\xe3'?\
\xda\xe6\xa79DV,\xce\xe20\x9f#\xb6\xd9HP\
1c\xb0N\xca\xfd\xbf\x5c\x80k\x8d\xdc\xf1\xab\x1bd\
\xcb\xff\x9b$G\x17\xbc\xff\xa9\x0d\xe8\xe7\xc8\x8e\x1ed\
\x89\x02\xaf\xcd\x17\x92\xe0\xd7\xbb|\xcc\xbe\x8f\xf1\xdbb\
\xdd\xf4\xd77\xcb0.)Q\xfb\xb3\x8b\xa9\x85]\xb7\
\x944\x88\x1c\x8d\xe4y\x8b\xb0\xa5\xd7{\xc6s~\xae\
\x0b\xfa\xc6\xbf\xbf-D\xd6N\x8f\xf2+\x88\xf5xx\
\x8d\xc8\x85\x13\xfe\x9e\xcb\x83\xb78\x15\xb8\xf6!\xc5\x80\
/\xad\xb5\xe5\xec&X/8\x9b\xe7y<Mt\xba\
_\xcd\xcd{*\xb8\xb6\x9f\xe8\xea\xdf\xee\x0dP}\x9e\
o\xdb\x0f\x85\xcc?G\xf4\xb9/$\x9b\xf6\x98\x8b\xcc\
C\xec\xab\x87\xfb0\x8fE\xa7<3a\xfdN\xe3\xb9\
:\xec\xee\xca]\x9b\x14\xaf\x0b9\xc9\x0f\xb7\x05X\x1f\
\xb7\x18]\x8e\x9f?\xbcV\x82!\xbfb\xf6\xc1qe\
\xdd!\xe0\xb2\x8e>\x9f\x82z\x0d\x16\x94\x0a\xdf\xe3\xc5\
e\xde\xec\xc5f\xeba\x82\xb3\xb0K\xeb\xe4\x9c\xf3\xa7\
\xd9\xf9\x1e\xfa\xcf\xff}\x97\x1f\xe6\x0a\x19\xb7\xe5\xf7\xa3\
O\xfd\x1bD\x9f\x8f\x04\xa5(\xffZ\xa21\x10\x17\xe9\
\x83nD\xe8|\xd4\x1b\xf8\xfc\xa5\xfc\x8b\xd4\x0e\x8a:\
\x99Y<\xb7\x11\xefsE\xa3BsyS\x91\xf3\xed\
$\x98\xf9-\x8a\x0a?\xde\x1e\xa0=\xe9b\xfd\xe7\xb8\
\x1e\x8f\xadQ\xa0g\x5c\xc9\xaa~\xa3\x97+\x22\x5c\xbf\
\x1f\xebS\xe1\xeb\x9bU\x98\x7f\x91~e\xf9|\x86*\
\xf64{~\x8b\x0cg\x87u\xd3\x16\x0d\x87s\xcb\x06\
\xc3k\x09\xc8\x0a\xfcn\xcf8\x8b\xf9_\xc0g\xf5\xb5\
\x0d2\x8c\x06eS\xce\x9d:C\xd6\x05C\xc4\xd6C\
6\xa0\x86\xe1\xc4\x80\x06\x7f\xde?\x06\xf7V\xc94F\
2\xa7\x88\xe5\xec]S\xaa\x92\xaf\xe9\xf0\xd6\x91 \x9c\
\x1e\xd2i>_\xaa\xe3\xeeN\x00\x8b\xdb\x19T\xce\xbf\
]+\x915\xd0\xa3\xfc\xaa\xe8\xbb\xc2\xbd\xef\xcbke\
\xe8#\xfa<\xd7\xae\xffr\x81\xe8\xf3Ok\xda4\x1d\
\xc6B\x1a\xb4\x8eH\xf4\xc0\x8f\x15\xde+\x03r\xf8\x9e\
\xa2\x8f0Dx\xcb\xef\xf71\xde\x12\x9b\x1f\x87\xfe\x97\
\xa7\xd7\x13;t<\x18\x95\xa3\xe4\x22\xbb\x10\x9d\xb3\x13\
1g@D}-\xc7e}\x9cp\x92\x97v0\xbf\
\x22\xb5\xd5cb\xffO\xafW\xa0\x17\xf9yX\xc4\xfd\
]Yw\x91=\x10\xfa\x19\xed\xd0_\xed\x09\xd0\xfc\xf3\
<\xbb\xff\xbc\x90\xe9\xf3\xc7\xd7)0\x12\x08\xb9|\xc5\
E\xd6\x82\xca\xb9\xac\x12}\x1e\xb2\xc5\xfdmv(\xf9\
\xfc\x19\xc2[\xda\xc7d\xb3\x9f\x86\xebsq\xe1D\xd8\
\xb9\xd7\x85\xeci\x85\xd8&\xbf\xdf\x1f4\xeb\x9e\xcd\xde\
8\x18\x1f%\xc7\x13\xebd\x18\xf6Kf\xdc\xdf\xe5-\
.\x9c\x8a\xe8\x9a$\x96#M\xebL\x89\x8e\xc6\xb8\xff\
\xcb;C\xac_t\xa1\x1e\x15K\xc8#\xaf\xcfn\x94\
\xa1{\x5c\xbel\xf2q]d70~0\x22\x1b0\
\x10\x0c\xc3\x80\x14\x86\xd1\xa0J\xe3_\xd8_\xe7\xf7\xfb\
\xfc\xc0\xeaV\xd4(\x8e\x8e:\x1ek\x84Yo\x12\xab\
\xf6\xc0\x85\x0b' \xd6w\x84u\xe8\x1b\xdbu\xf8\xb7\
\xdd\x0a<R\x1d\x82\xbb*Uz<Z1\x06/\xef\
\x96\xe0\xd5\xdd\x01\xb3\x17\x94\xf0\xa1\x8b\xbeE_[/\
C\xafO\xb6\xf2\x14s\xdc\xff\xe4\x22\xbb`\xfaG\xb9\
\x1e\xff\xd9\xce\x909\x9f(\x9f\xd7\x93\xd8g\x11E\xd7\
YX\xfa\xfc\xfej\x19\x86\xc6%\x1eo\x08\xbb\x22\xee\
\xc2q\x88p\x9e1*\xe9\xf0\x9d\xed\xaa\x99\x9fo\xca\
3\xfa\xca\x0b\xad\xcf\xf3\x8al\xf5\xed\x5c\xd6\xffy\xa3\
\x04\xddc\xd2y=[\x5c\xb8p\x12D\xfd\xeeGG\
Y\x0d\xe8\xe4s\xf0\x99\xee_\xbcJ%\xfa<\xe4\xe6\
q\xb9p<P\xd6\x1b\x87$\x9e\xbf\x16\x9f\xac__\
\xaaB\xd7\xa8\x14\xd5C\xc6\x85\x0b\xa7\x02u\xfa\x1f\x8f\
\x5c8'\xf1Ru\x84\xe8\x8bY{f\xc0\xdc\x1b\x5c\
Yw\xe1d\xa0\x1f\xf1\xd9MJ\xdc\xb3\xaaD\xec\xe8\
\xbb;\x9c3\x7f\xcb\x85\x8b\x8b!\xa4\x1a\xf0\xe4\xba\x04\
g+\x16b|\xd4\xb9\xf3\xa1]\xb8\xb0\x03{*>\
\xbd\x81\xcf\xda\x8c\xab?\xacJ{\xc4>W\x9b\x1d=\
\xcf]\xb8\xc0Y!?\xd8\xa9\xf1\x9a\xd8\xf8\xf4:>\
\x1b\xefok\xc9\xf4%\xb8p1)\xa0=\xb9\xb3\x8b\
\xcd\x1a\x8aW\xd6\x17\x96j\xb4n\xd4\x85\x8bl\x00\xad\
\x9d\xd5\x0dx\xb1V\xe2\xf3*'\xe6\xedy\xdc\xff\x22\
z#c>\xafSg\x88\xbap\x11\x0b\x91\xcf\xe8\x19\
\xd6\xe0\xb3U\x0a\xcd[\x9c\xb8\xc7\x93\x88\xa5jp\xdb\
\x0a\x05\x0e\xf7\x1b\x8e\x9e\xc1\xe5\xc2\x85\x1d\x22G\x00\xfd\
\xe3\xc7z$\xb8q\xf9E\xfc1T\xa7+4VZ\
?\xac\xa5\xac\x17\xb2\x0b\x17S\x0d\x94{\x9cS\xfc\xbb\
\xfd2|\xbeF\x8f\x9e9\x8fs\x8b\xaad\xf8\xc5~\
\x05\xda\x87%7n\xe4\x22\xab!\xf8\x0c\xf6>\xe8\xf4\
\x1bp\xa0O\x87\xda\x0e\x03\xb6t\x1ap\xb0\xcf\xa0\xb3\
Jt\xc3\xea\x17\xed\xc2E\xb6\x22\x12\x93\xa7\x88\xff\x04\
O1s^\xc2\xd6\x0c)\x17.\x5c\xb8p\xe1\xc2\x85\
\x0b\x17\xe9\xc1\x15.\x5c\xb8p\xe1\xc2\x85\x0b\x17.\x5c\
\xb8p\xe1\xc2\x85\x0b\x17.\x5c\xd8\xf0\xff\x01S\xd9\xcc\
\xc2\
\x00\x00\x01\xe9\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22?><!DOCTYPE s\
vg PUBLIC \x22-//W3\
C//DTD SVG 1.1//\
EN\x22 \x22http://www.\
w3.org/Graphics/\
SVG/1.1/DTD/svg1\
1.dtd\x22><svg xmln\
s=\x22http://www.w3\
.org/2000/svg\x22 x\
mlns:xlink=\x22http\
://www.w3.org/19\
99/xlink\x22 versio\
n=\x221.1\x22 width=\x222\
4\x22 height=\x2224\x22 v\
iewBox=\x220 0 24 2\
4\x22><path d=\x22M17 \
3H5C3.89 3 3 3.9\
3 5V19C3 20.1 3\
.89 21 5 21H19C2\
0.1 21 21 20.1 2\
1 19V7L17 3M19 1\
9H5V5H16.17L19 7\
.83V19M12 12C10.\
34 12 9 13.34 9 \
15S10.34 18 12 1\
8 15 16.66 15 15\
13.66 12 12 12M\
6 6H15V10H6V6Z\x22 \
/></svg>\
\x00\x00\x0d\x03\
<\
?xml version=\x221.\
0\x22 encoding=\x22UTF\
-8\x22 standalone=\x22\
no\x22?>\x0a<svg\x0a ve\
rsion=\x221.1\x22\x0a w\
idth=\x2224\x22\x0a hei\
ght=\x2224\x22\x0a view\
Box=\x220 0 24 24\x22\x0a\
id=\x22svg4\x22\x0a \
sodipodi:docname\
=\x22folder-open-ou\
tline-dna_mdi.sv\
g\x22\x0a inkscape:v\
ersion=\x221.1.1 (3\
bf5ae0d25, 2021-\
09-20)\x22\x0a xmlns\
:inkscape=\x22http:\
//www.inkscape.o\
rg/namespaces/in\
kscape\x22\x0a xmlns\
:sodipodi=\x22http:\
//sodipodi.sourc\
eforge.net/DTD/s\
odipodi-0.dtd\x22\x0a \
xmlns=\x22http://\
www.w3.org/2000/\
svg\x22\x0a xmlns:sv\
g=\x22http://www.w3\
.org/2000/svg\x22>\x0a\
<defs\x0a id=\
\x22defs8\x22 />\x0a <so\
dipodi:namedview\
\x0a id=\x22namedv\
iew6\x22\x0a pagec\
olor=\x22#ffffff\x22\x0a \
bordercolor=\
\x22#666666\x22\x0a b\
orderopacity=\x221.\
0\x22\x0a inkscape\
:pageshadow=\x222\x22\x0a\
inkscape:pa\
geopacity=\x220.0\x22\x0a\
inkscape:pa\
gecheckerboard=\x22\
0\x22\x0a showgrid\
=\x22false\x22\x0a in\
kscape:zoom=\x2234.\
666667\x22\x0a ink\
scape:cx=\x227.6298\
077\x22\x0a inksca\
pe:cy=\x2214.4375\x22\x0a\
inkscape:wi\
ndow-width=\x221920\
\x22\x0a inkscape:\
window-height=\x221\
017\x22\x0a inksca\
pe:window-x=\x22-8\x22\
\x0a inkscape:w\
indow-y=\x22-8\x22\x0a \
inkscape:windo\
w-maximized=\x221\x22\x0a\
inkscape:cu\
rrent-layer=\x22svg\
4\x22 />\x0a <path\x0a \
id=\x22path2\x22\x0a \
d=\x22M 3.019531\
2 1.1054688 A 2 \
2 0 0 0 1.019531\
2 3.1054688 L 1.\
0195312 15.10546\
9 A 2 2 0 0 0 3.\
0195312 17.10546\
9 L 9.1445312 17\
.105469 L 9.1445\
312 15.105469 L \
5.0195312 15.105\
469 L 6.6191406 \
9.1054688 L 19.6\
19141 9.1054688 \
L 18.939453 11.6\
54297 L 20.98828\
1 11.654297 L 22\
.21875 7.1054688\
L 5.1191406 7.1\
054688 L 3.01953\
12 15.105469 L 3\
.0195312 5.10546\
88 L 20.019531 5\
.1054688 A 2 2 0\
0 0 18.019531 3\
.1054688 L 11.01\
9531 3.1054688 L\
9.0195312 1.105\
4688 L 3.0195312\
1.1054688 z \x22 /\
>\x0a <path\x0a i\
d=\x22path18\x22\x0a \
style=\x22stroke-wi\
dth:0.603887\x22\x0a \
d=\x22m 10.05859\
4,12.818359 v 1.\
00586 c 0,1.0549\
12 0.776328,1.91\
8934 1.814453,2.\
617187 0.588029,\
0.396848 1.28520\
6,0.757182 2.003\
906,1.09375 L 15\
.205078,16.9375 \
c -0.871154,-0.3\
96849 -1.8288,-0\
.885793 -2.45312\
5,-1.302734 0,0 \
0.04152,0.0293 0\
,0 h -0.02344 v \
-0.01758 c -0.77\
673,-0.553241 -1\
.05543,-1.11269 \
-1.045673,-1.792\
969 l 0.01443,-1\
.00586 z m 9.931\
339,0 1e-6,1.034\
706 c 0,0.687102\
-0.218218,1.223\
105 -1.009465,1.\
781701 h -0.002 \
c -0.04135,0.029\
17 -0.07983,0.05\
873 -0.123047,0.\
08789 -0.878413,\
0.587737 -2.1123\
8,1.134646 -3.38\
2813,1.697266 -1\
.270432,0.557597\
-2.576002,1.130\
991 -3.599609,1.\
824219 -1.038125\
,0.698253 -1.814\
453,1.562273 -1.\
814453,2.617187 \
v 1.003906 l 1.7\
2521,-0.01442 4e\
-6,-0.97506 c 3e\
-6,-0.723369 0.2\
20042,-1.327079 \
1.091196,-1.9148\
13 0.878413,-0.5\
87742 2.112379,-\
1.134646 3.38281\
2,-1.697266 1.27\
0434,-0.557598 2\
.576004,-1.12903\
7 3.59961,-1.822\
266 1.038124,-0.\
698253 1.816406,\
-1.562275 1.8164\
06,-2.617187 v -\
1.00586 z m -2.1\
34464,5.330079 -\
1.330078,0.59765\
6 c 0.871154,0.3\
96848 1.705751,0\
.797899 2.330078\
,1.214844 0.0289\
3,0.01952 0.0539\
3,0.03906 0.0820\
3,0.05859 h 0.06\
445 v 0.04687 c \
0.776774,0.55339\
6 0.892039,0.869\
797 0.915866,1.5\
4973 l -3e-6,1.2\
49098 1.756009,-\
3e-6 V 21.86132 \
c 0,-1.054914 -0\
.778282,-1.91893\
4 -1.816406,-2.6\
17187 -0.58803,-\
0.396847 -1.2832\
51,-0.759138 -2.\
001953,-1.095703\
z\x22\x0a sodipod\
i:nodetypes=\x22csc\
ccccccscccsccccc\
sccscccscccccccc\
sccsccc\x22 />\x0a <r\
ect\x0a style=\x22\
fill:#000000\x22\x0a \
id=\x22rect2651-\
5\x22\x0a width=\x226\
.4471159\x22\x0a h\
eight=\x221.3990391\
\x22\x0a x=\x2212.612\
979\x22\x0a y=\x2220.\
646629\x22\x0a ry=\
\x220.57692307\x22 />\x0a\
<rect\x0a sty\
le=\x22fill:#000000\
\x22\x0a id=\x22rect2\
651-5-8\x22\x0a wi\
dth=\x226.4471159\x22\x0a\
height=\x221.3\
990391\x22\x0a x=\x22\
12.526443\x22\x0a \
y=\x2213.492789\x22\x0a \
ry=\x220.5769230\
7\x22\x0a rx=\x220.57\
692307\x22 />\x0a</svg\
>\x0a\
\x00\x00\x01\x8c\
\x89\
PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\
\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0w=\xf8\
\x00\x00\x00\x09pHYs\x00\x00\x0e\xc3\x00\x00\x0e\xc3\
\x01\xc7o\xa8d\x00\x00\x00\x19tEXtSof\
tware\x00www.inksca\
pe.org\x9b\xee<\x1a\x00\x00\x01\x19ID\
ATH\x89\xed\x94=N\xc3@\x10F\xdfDH)\
S\xd2\x84\xce\x1c$-\x07\xa0@\x89 \x8d\x8f@*\
\xc8\x9a[\xa4\x85\xc6\x90\x03p\x13\x0ab*6\x0d%\
\xb5\xf1P,X\xd8\xf87\xb6h\xc8\xd7yw\xf6=\
\xcdx\xb5\xb0OM$\xf3e^N\x90d\x05\x8cw\
\xe4Y\x10\x9f\xa5\xf7\xf8\xbd0\xc8\xea:\xc1qgu\
\xf5sa\xf0\xbb\xa0s\x8e\xaa\x04\xbd\xa7\xbb@5D\
\x19\x01w\xfd\x0bTC\xd8\xce0\xc7\xef\xa0IQ\xc9\
A\x07\xfc\xda\xc1'1f\x13\x00\xe7EE\xbbv\xb0\
F\xedY\x0a\x17\xae\xcb\x0a\x8b\x05\xca-\xca\xc8\x8d \
\xbf\xa7aSx\xb9\x00I\xdc\x5c\xb7\xb3\x9c\xe4>3\
\x96\x1a8\x94\xfd\x03\xd1\x0bL\xf4\x86\xf1\x16<\xe8\x94\
\xa7(F\x18\xa2v\x8a\x99\xc4\x04\x1b\x03\xf5\xf0\x8a\x0e\
\x00\xd1K\x82\xe8\x86S\xf9\x00;O\xc7\xe2\xe0\xcb&\
\xf0\xf2\x0e\xd2\xe8\x15&\x1ab\xbc\x05@[x\x03\x01\
_\x9d<\x1f\x82$\xc0\xbc\x0d\xbc\x99\xc0Y\x0a\xefx\
\x93\xfc\xf9[d{`\xbeV\x08\xc4\xcf\x17\xb4\x86\xab\
\xf8\x1d\xce\xff\xc7|\x02\xfeFi\xc4QH\xcf\xf9\x00\
\x00\x00\x00IEND\xaeB`\x82\
\x00\x00\x01`\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M20 3h-1V2c\
0-.55-.45-1-1-1s\
-1 .45-1 1v1H7V2\
c0-.55-.45-1-1-1\
s-1 .45-1 1v1H4c\
-1.1 0-2 .9-2 2v\
16c0 1.1.9 2 2 2\
h16c1.1 0 2-.9 2\
-2V5c0-1.1-.9-2-\
2-2zm-1 18H5c-.5\
5 0-1-.45-1-1V8h\
16v12c0 .55-.45 \
1-1 1z\x22/></svg>\
\x00\x00\x01\x0c\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M8.71 12.29\
L11.3 9.7c.39-.3\
9 1.02-.39 1.41 \
0l2.59 2.59c.63.\
63.18 1.71-.71 1\
.71H9.41c-.89 0-\
1.33-1.08-.7-1.7\
1z\x22/></svg>\
\x00\x00\x01\x0c\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M8.71 11.71\
l2.59 2.59c.39.3\
9 1.02.39 1.41 0\
l2.59-2.59c.63-.\
63.18-1.71-.71-1\
.71H9.41c-.89 0-\
1.33 1.08-.7 1.7\
1z\x22/></svg>\
\x00\x00\x01Q\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#4\
d5157\x22><g><rect \
fill=\x22none\x22 fill\
-rule=\x22evenodd\x22 \
height=\x2224\x22 widt\
h=\x2224\x22/><g><path\
d=\x22M13,5v14c0,0\
.6-0.4,1-1,1l0,0\
c-0.6,0-1-0.4-1-\
1V5c0-0.5,0.4-1,\
1-1l0,0C12.6,4,1\
3,4.4,13,5z\x22 fil\
l-rule=\x22evenodd\x22\
/></g></g></svg>\
\
\x00\x00\x02\xc9\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#4\
d5157\x22><g><path \
d=\x22M0,0h24v24H0V\
0z\x22 fill=\x22none\x22/\
></g><g><g><path\
d=\x22M12,5V3.21c0\
-0.45-0.54-0.67-\
0.85-0.35L8.35,5\
.65c-0.2,0.2-0.2\
,0.51,0,0.71l2.7\
9,2.79C11.46,9.4\
6,12,9.24,12,8.7\
9V7 c3.31,0,6,2.\
69,6,6c0,2.72-1.\
83,5.02-4.31,5.7\
5C13.27,18.87,13\
,19.27,13,19.7v0\
c0,0.65,0.62,1.1\
6,1.25,0.97 C17.\
57,19.7,20,16.64\
,20,13C20,8.58,1\
6.42,5,12,5z\x22/><\
path d=\x22M6,13c0-\
1.34,0.44-2.58,1\
.19-3.59c0.3-0.4\
,0.26-0.95-0.09-\
1.31l0,0C6.68,7.\
68,5.96,7.72,5.6\
,8.2C4.6,9.54,4,\
11.2,4,13 c0,3.6\
4,2.43,6.7,5.75,\
7.67C10.38,20.86\
,11,20.35,11,19.\
7v0c0-0.43-0.27-\
0.83-0.69-0.95C7\
.83,18.02,6,15.7\
2,6,13z\x22/></g></\
g></svg>\
\x00\x00\x00\xfb\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M4.41 22H20\
c1.1 0 2-.9 2-2V\
4.41c0-.89-1.08-\
1.34-1.71-.71L3.\
71 20.29c-.63.63\
-.19 1.71.7 1.71\
z\x22/></svg>\
\x00\x00\x02\x98\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#4\
d5157\x22><g><rect \
fill=\x22none\x22 heig\
ht=\x2224\x22 width=\x222\
4\x22/><rect fill=\x22\
none\x22 height=\x2224\
\x22 width=\x2224\x22/></\
g><g><g><path d=\
\x22M20.08,11.42l-4\
.04-5.65C15.7,5.\
29,15.15,5,14.56\
,5h0c-1.49,0-2.3\
5,1.68-1.49,2.89\
L16,12l-2.93,4.1\
1 c-0.87,1.21,0,\
2.89,1.49,2.89h0\
c0.59,0,1.15-0.2\
9,1.49-0.77l4.04\
-5.65C20.33,12.2\
3,20.33,11.77,20\
.08,11.42z\x22/><pa\
th d=\x22M13.08,11.\
42L9.05,5.77C8.7\
,5.29,8.15,5,7.5\
6,5h0C6.07,5,5.2\
,6.68,6.07,7.89L\
9,12l-2.93,4.11C\
5.2,17.32,6.07,1\
9,7.56,19h0 c0.5\
9,0,1.15-0.29,1.\
49-0.77l4.04-5.6\
5C13.33,12.23,13\
.33,11.77,13.08,\
11.42z\x22/></g></g\
></svg>\
\x00\x00\x00\xfb\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M4.41 22H20\
c1.1 0 2-.9 2-2V\
4.41c0-.89-1.08-\
1.34-1.71-.71L3.\
71 20.29c-.63.63\
-.19 1.71.7 1.71\
z\x22/></svg>\
\x00\x00\x01Z\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#b\
abdc2\x22><g><rect \
fill=\x22none\x22 fill\
-rule=\x22evenodd\x22 \
height=\x2224\x22 widt\
h=\x2224\x22/><g><path\
d=\x22M19,13H5c-0.\
55,0-1-0.45-1-1v\
0c0-0.55,0.45-1,\
1-1h14c0.55,0,1,\
0.45,1,1v0 C20,1\
2.55,19.55,13,19\
,13z\x22 fill-rule=\
\x22evenodd\x22/></g><\
/g></svg>\
\x00\x00\x01\xc2\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M11 18c0 1.\
1-.9 2-2 2s-2-.9\
-2-2 .9-2 2-2 2 \
.9 2 2zm-2-8c-1.\
1 0-2 .9-2 2s.9 \
2 2 2 2-.9 2-2-.\
9-2-2-2zm0-6c-1.\
1 0-2 .9-2 2s.9 \
2 2 2 2-.9 2-2-.\
9-2-2-2zm6 4c1.1\
0 2-.9 2-2s-.9-\
2-2-2-2 .9-2 2 .\
9 2 2 2zm0 2c-1.\
1 0-2 .9-2 2s.9 \
2 2 2 2-.9 2-2-.\
9-2-2-2zm0 6c-1.\
1 0-2 .9-2 2s.9 \
2 2 2 2-.9 2-2-.\
9-2-2-2z\x22/></svg\
>\
\x00\x00\x01D\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M9 16.17L5.\
53 12.7c-.39-.39\
-1.02-.39-1.41 0\
-.39.39-.39 1.02\
0 1.41l4.18 4.1\
8c.39.39 1.02.39\
1.41 0L20.29 7.\
71c.39-.39.39-1.\
02 0-1.41-.39-.3\
9-1.02-.39-1.41 \
0L9 16.17z\x22/></s\
vg>\
\x00\x00\x01D\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M11.29 8.71\
L6.7 13.3c-.39.3\
9-.39 1.02 0 1.4\
1.39.39 1.02.39 \
1.41 0L12 10.83l\
3.88 3.88c.39.39\
1.02.39 1.41 0 \
.39-.39.39-1.02 \
0-1.41L12.7 8.71\
c-.38-.39-1.02-.\
39-1.41 0z\x22/></s\
vg>\
\x00\x00\x01h\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M20 6h-8l-1\
.41-1.41C10.21 4\
.21 9.7 4 9.17 4\
H4c-1.1 0-1.99.9\
-1.99 2L2 18c0 1\
.1.9 2 2 2h16c1.\
1 0 2-.9 2-2V8c0\
-1.1-.9-2-2-2zm-\
1 12H5c-.55 0-1-\
.45-1-1V9c0-.55.\
45-1 1-1h14c.55 \
0 1 .45 1 1v8c0 \
.55-.45 1-1 1z\x22/\
></svg>\
\x00\x00\x01\x1b\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M12 2C6.48 \
2 2 6.48 2 12s4.\
48 10 10 10 10-4\
.48 10-10S17.52 \
2 12 2zm0 18c-4.\
42 0-8-3.58-8-8s\
3.58-8 8-8 8 3.5\
8 8 8-3.58 8-8 8\
z\x22/></svg>\
\x00\x00\x01\xcc\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#eff\
1f1\x22><path d=\x22M1\
8,13c1.1,0,2,0.9\
,2,2s-0.9,2-2,2s\
-2-0.9-2-2S16.9,\
13,18,13z M10,15\
c0,1.1,0.9,2,2,2\
s2-0.9,2-2s-0.9-\
2-2-2S10,13.9,10\
,15z M4,15c0,1.1\
,0.9,2,2,2s2-0.9\
,2-2s-0.9-2-2-2S\
4,13.9,4,15z M8,\
9c0-1.1-0.9-2-2-\
2S4,7.9,4,9s0.9,\
2,2,2S8,10.1,8,9\
z M10,9 c0,1.1,0\
.9,2,2,2s2-0.9,2\
-2s-0.9-2-2-2S10\
,7.9,10,9z M16,9\
c0,1.1,0.9,2,2,2\
s2-0.9,2-2s-0.9-\
2-2-2S16,7.9,16,\
9z\x22/></svg>\
\x00\x00\x01d\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M5 13h11.17\
l-4.88 4.88c-.39\
.39-.39 1.03 0 1\
.42.39.39 1.02.3\
9 1.41 0l6.59-6.\
59c.39-.39.39-1.\
02 0-1.41l-6.58-\
6.6c-.39-.39-1.0\
2-.39-1.41 0-.39\
.39-.39 1.02 0 1\
.41L16.17 11H5c-\
.55 0-1 .45-1 1s\
.45 1 1 1z\x22/></s\
vg>\
\x00\x00\x01a\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M13 19V7.83\
l4.88 4.88c.39.3\
9 1.03.39 1.42 0\
.39-.39.39-1.02\
0-1.41l-6.59-6.\
59c-.39-.39-1.02\
-.39-1.41 0l-6.6\
6.58c-.39.39-.3\
9 1.02 0 1.41.39\
.39 1.02.39 1.41\
0L11 7.83V19c0 \
.55.45 1 1 1s1-.\
45 1-1z\x22/></svg>\
\
\x00\x00\x01<\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M18 19H6c-.\
55 0-1-.45-1-1V6\
c0-.55.45-1 1-1h\
12c.55 0 1 .45 1\
1v12c0 .55-.45 \
1-1 1zm1-16H5c-1\
.1 0-2 .9-2 2v14\
c0 1.1.9 2 2 2h1\
4c1.1 0 2-.9 2-2\
V5c0-1.1-.9-2-2-\
2z\x22/></svg>\
\x00\x00\x01D\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M11.29 8.71\
L6.7 13.3c-.39.3\
9-.39 1.02 0 1.4\
1.39.39 1.02.39 \
1.41 0L12 10.83l\
3.88 3.88c.39.39\
1.02.39 1.41 0 \
.39-.39.39-1.02 \
0-1.41L12.7 8.71\
c-.38-.39-1.02-.\
39-1.41 0z\x22/></s\
vg>\
\x00\x00\x01\x95\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M19 3H5c-1.\
1 0-2 .9-2 2v14c\
0 1.1.9 2 2 2h14\
c1.1 0 2-.9 2-2V\
5c0-1.1-.9-2-2-2\
zm-8.29 13.29c-.\
39.39-1.02.39-1.\
41 0L5.71 12.7c-\
.39-.39-.39-1.02\
0-1.41.39-.39 1\
.02-.39 1.41 0L1\
0 14.17l6.88-6.8\
8c.39-.39 1.02-.\
39 1.41 0 .39.39\
.39 1.02 0 1.41l\
-7.58 7.59z\x22/></\
svg>\
\x00\x00\x01\x89\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M12 2C6.48 \
2 2 6.48 2 12s4.\
48 10 10 10 10-4\
.48 10-10S17.52 \
2 12 2zM9.29 16.\
29L5.7 12.7c-.39\
-.39-.39-1.02 0-\
1.41.39-.39 1.02\
-.39 1.41 0L10 1\
4.17l6.88-6.88c.\
39-.39 1.02-.39 \
1.41 0 .39.39.39\
1.02 0 1.41l-7.\
59 7.59c-.38.39-\
1.02.39-1.41 0z\x22\
/></svg>\
\x00\x00\x01\xcc\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M1\
8,13c1.1,0,2,0.9\
,2,2s-0.9,2-2,2s\
-2-0.9-2-2S16.9,\
13,18,13z M10,15\
c0,1.1,0.9,2,2,2\
s2-0.9,2-2s-0.9-\
2-2-2S10,13.9,10\
,15z M4,15c0,1.1\
,0.9,2,2,2s2-0.9\
,2-2s-0.9-2-2-2S\
4,13.9,4,15z M8,\
9c0-1.1-0.9-2-2-\
2S4,7.9,4,9s0.9,\
2,2,2S8,10.1,8,9\
z M10,9 c0,1.1,0\
.9,2,2,2s2-0.9,2\
-2s-0.9-2-2-2S10\
,7.9,10,9z M16,9\
c0,1.1,0.9,2,2,2\
s2-0.9,2-2s-0.9-\
2-2-2S16,7.9,16,\
9z\x22/></svg>\
\x00\x00\x02\x98\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#b\
abdc2\x22><g><rect \
fill=\x22none\x22 heig\
ht=\x2224\x22 width=\x222\
4\x22/><rect fill=\x22\
none\x22 height=\x2224\
\x22 width=\x2224\x22/></\
g><g><g><path d=\
\x22M20.08,11.42l-4\
.04-5.65C15.7,5.\
29,15.15,5,14.56\
,5h0c-1.49,0-2.3\
5,1.68-1.49,2.89\
L16,12l-2.93,4.1\
1 c-0.87,1.21,0,\
2.89,1.49,2.89h0\
c0.59,0,1.15-0.2\
9,1.49-0.77l4.04\
-5.65C20.33,12.2\
3,20.33,11.77,20\
.08,11.42z\x22/><pa\
th d=\x22M13.08,11.\
42L9.05,5.77C8.7\
,5.29,8.15,5,7.5\
6,5h0C6.07,5,5.2\
,6.68,6.07,7.89L\
9,12l-2.93,4.11C\
5.2,17.32,6.07,1\
9,7.56,19h0 c0.5\
9,0,1.15-0.29,1.\
49-0.77l4.04-5.6\
5C13.33,12.23,13\
.33,11.77,13.08,\
11.42z\x22/></g></g\
></svg>\
\x00\x00\x01\x18\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#4\
d5157\x22><g><rect \
fill=\x22none\x22 fill\
-rule=\x22evenodd\x22 \
height=\x2224\x22 widt\
h=\x2224\x22/><g><rect\
fill-rule=\x22even\
odd\x22 height=\x2224\x22\
width=\x221\x22 x=\x2211\
\x22 y=\x220\x22/></g></g\
></svg>\
\x00\x00\x00\xfb\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#2e4\
65e\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M4.41 22H20\
c1.1 0 2-.9 2-2V\
4.41c0-.89-1.08-\
1.34-1.71-.71L3.\
71 20.29c-.63.63\
-.19 1.71.7 1.71\
z\x22/></svg>\
\x00\x00\x029\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M12 2C6.48 \
2 2 6.48 2 12s4.\
48 10 10 10 10-4\
.48 10-10S17.52 \
2 12 2zm1 17h-2v\
-2h2v2zm2.07-7.7\
5l-.9.92c-.5.51-\
.86.97-1.04 1.69\
-.08.32-.13.68-.\
13 1.14h-2v-.5c0\
-.46.08-.9.22-1.\
31.2-.58.53-1.1.\
95-1.52l1.24-1.2\
6c.46-.44.68-1.1\
.55-1.8-.13-.72-\
.69-1.33-1.39-1.\
53-1.11-.31-2.14\
.32-2.47 1.27-.1\
2.37-.43.65-.82.\
65h-.3C8.4 9 8 8\
.44 8.16 7.88c.4\
3-1.47 1.68-2.59\
3.23-2.83 1.52-\
.24 2.97.55 3.87\
1.8 1.18 1.63.8\
3 3.38-.19 4.4z\x22\
/></svg>\
\x00\x00\x01\xae\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M18.3 5.71c\
-.39-.39-1.02-.3\
9-1.41 0L12 10.5\
9 7.11 5.7c-.39-\
.39-1.02-.39-1.4\
1 0-.39.39-.39 1\
.02 0 1.41L10.59\
12 5.7 16.89c-.\
39.39-.39 1.02 0\
1.41.39.39 1.02\
.39 1.41 0L12 13\
.41l4.89 4.89c.3\
9.39 1.02.39 1.4\
1 0 .39-.39.39-1\
.02 0-1.41L13.41\
12l4.89-4.89c.3\
8-.38.38-1.02 0-\
1.4z\x22/></svg>\
\x00\x00\x01\xc2\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#eff\
1f1\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M11 18c0 1.\
1-.9 2-2 2s-2-.9\
-2-2 .9-2 2-2 2 \
.9 2 2zm-2-8c-1.\
1 0-2 .9-2 2s.9 \
2 2 2 2-.9 2-2-.\
9-2-2-2zm0-6c-1.\
1 0-2 .9-2 2s.9 \
2 2 2 2-.9 2-2-.\
9-2-2-2zm6 4c1.1\
0 2-.9 2-2s-.9-\
2-2-2-2 .9-2 2 .\
9 2 2 2zm0 2c-1.\
1 0-2 .9-2 2s.9 \
2 2 2 2-.9 2-2-.\
9-2-2-2zm0 6c-1.\
1 0-2 .9-2 2s.9 \
2 2 2 2-.9 2-2-.\
9-2-2-2z\x22/></svg\
>\
\x00\x00\x01`\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M20 3h-1V2c\
0-.55-.45-1-1-1s\
-1 .45-1 1v1H7V2\
c0-.55-.45-1-1-1\
s-1 .45-1 1v1H4c\
-1.1 0-2 .9-2 2v\
16c0 1.1.9 2 2 2\
h16c1.1 0 2-.9 2\
-2V5c0-1.1-.9-2-\
2-2zm-1 18H5c-.5\
5 0-1-.45-1-1V8h\
16v12c0 .55-.45 \
1-1 1z\x22/></svg>\
\x00\x00\x01\x18\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#b\
abdc2\x22><g><rect \
fill=\x22none\x22 fill\
-rule=\x22evenodd\x22 \
height=\x2224\x22 widt\
h=\x2224\x22/><g><rect\
fill-rule=\x22even\
odd\x22 height=\x2224\x22\
width=\x221\x22 x=\x2211\
\x22 y=\x220\x22/></g></g\
></svg>\
\x00\x00\x01:\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M12 2C6.48 \
2 2 6.48 2 12s4.\
48 10 10 10 10-4\
.48 10-10S17.52 \
2 12 2zm0 18c-4.\
42 0-8-3.58-8-8s\
3.58-8 8-8 8 3.5\
8 8 8-3.58 8-8 8\
z\x22/><circle cx=\x22\
12\x22 cy=\x2212\x22 r=\x225\
\x22/></svg>\
\x00\x00\x01\x0c\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M8.71 11.71\
l2.59 2.59c.39.3\
9 1.02.39 1.41 0\
l2.59-2.59c.63-.\
63.18-1.71-.71-1\
.71H9.41c-.89 0-\
1.33 1.08-.7 1.7\
1z\x22/></svg>\
\x00\x00\x01d\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M19 11H7.83\
l4.88-4.88c.39-.\
39.39-1.03 0-1.4\
2-.39-.39-1.02-.\
39-1.41 0l-6.59 \
6.59c-.39.39-.39\
1.02 0 1.41l6.5\
9 6.59c.39.39 1.\
02.39 1.41 0 .39\
-.39.39-1.02 0-1\
.41L7.83 13H19c.\
55 0 1-.45 1-1s-\
.45-1-1-1z\x22/></s\
vg>\
\x00\x00\x01d\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M19 11H7.83\
l4.88-4.88c.39-.\
39.39-1.03 0-1.4\
2-.39-.39-1.02-.\
39-1.41 0l-6.59 \
6.59c-.39.39-.39\
1.02 0 1.41l6.5\
9 6.59c.39.39 1.\
02.39 1.41 0 .39\
-.39.39-1.02 0-1\
.41L7.83 13H19c.\
55 0 1-.45 1-1s-\
.45-1-1-1z\x22/></s\
vg>\
\x00\x00\x01W\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M2\
4 24H0V0h24v24z\x22\
fill=\x22none\x22 opa\
city=\x22.87\x22/><pat\
h d=\x22M15.88 9.29\
L12 13.17 8.12 9\
.29c-.39-.39-1.0\
2-.39-1.41 0-.39\
.39-.39 1.02 0 1\
.41l4.59 4.59c.3\
9.39 1.02.39 1.4\
1 0l4.59-4.59c.3\
9-.39.39-1.02 0-\
1.41-.39-.38-1.0\
3-.39-1.42 0z\x22/>\
</svg>\
\x00\x00\x01*\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#008\
1db\x22><path d=\x22M0\
0h24v24H0z\x22 fil\
l=\x22none\x22/><path \
d=\x22M19 3H5c-1.1 \
0-2 .9-2 2v14c0 \
1.1.9 2 2 2h14c1\
.1 0 2-.9 2-2V5c\
0-1.1-.9-2-2-2zm\
-3 10H8c-.55 0-1\
-.45-1-1s.45-1 1\
-1h8c.55 0 1 .45\
1 1s-.45 1-1 1z\
\x22/></svg>\
\x00\x00\x01*\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0z\x22 fil\
l=\x22none\x22/><path \
d=\x22M19 3H5c-1.1 \
0-2 .9-2 2v14c0 \
1.1.9 2 2 2h14c1\
.1 0 2-.9 2-2V5c\
0-1.1-.9-2-2-2zm\
-3 10H8c-.55 0-1\
-.45-1-1s.45-1 1\
-1h8c.55 0 1 .45\
1 1s-.45 1-1 1z\
\x22/></svg>\
\x00\x00\x01a\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M13 19V7.83\
l4.88 4.88c.39.3\
9 1.03.39 1.42 0\
.39-.39.39-1.02\
0-1.41l-6.59-6.\
59c-.39-.39-1.02\
-.39-1.41 0l-6.6\
6.58c-.39.39-.3\
9 1.02 0 1.41.39\
.39 1.02.39 1.41\
0L11 7.83V19c0 \
.55.45 1 1 1s1-.\
45 1-1z\x22/></svg>\
\
\x00\x00\x01G\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M9.29 6.71c\
-.39.39-.39 1.02\
0 1.41L13.17 12\
l-3.88 3.88c-.39\
.39-.39 1.02 0 1\
.41.39.39 1.02.3\
9 1.41 0l4.59-4.\
59c.39-.39.39-1.\
02 0-1.41L10.7 6\
.7c-.38-.38-1.02\
-.38-1.41.01z\x22/>\
</svg>\
\x00\x00\x01W\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M2\
4 24H0V0h24v24z\x22\
fill=\x22none\x22 opa\
city=\x22.87\x22/><pat\
h d=\x22M15.88 9.29\
L12 13.17 8.12 9\
.29c-.39-.39-1.0\
2-.39-1.41 0-.39\
.39-.39 1.02 0 1\
.41l4.59 4.59c.3\
9.39 1.02.39 1.4\
1 0l4.59-4.59c.3\
9-.39.39-1.02 0-\
1.41-.39-.38-1.0\
3-.39-1.42 0z\x22/>\
</svg>\
\x00\x00\x02\x98\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#b\
abdc2\x22><g><rect \
fill=\x22none\x22 heig\
ht=\x2224\x22 width=\x222\
4\x22/><rect fill=\x22\
none\x22 height=\x2224\
\x22 width=\x2224\x22/></\
g><g><g><path d=\
\x22M20.08,11.42l-4\
.04-5.65C15.7,5.\
29,15.15,5,14.56\
,5h0c-1.49,0-2.3\
5,1.68-1.49,2.89\
L16,12l-2.93,4.1\
1 c-0.87,1.21,0,\
2.89,1.49,2.89h0\
c0.59,0,1.15-0.2\
9,1.49-0.77l4.04\
-5.65C20.33,12.2\
3,20.33,11.77,20\
.08,11.42z\x22/><pa\
th d=\x22M13.08,11.\
42L9.05,5.77C8.7\
,5.29,8.15,5,7.5\
6,5h0C6.07,5,5.2\
,6.68,6.07,7.89L\
9,12l-2.93,4.11C\
5.2,17.32,6.07,1\
9,7.56,19h0 c0.5\
9,0,1.15-0.29,1.\
49-0.77l4.04-5.6\
5C13.33,12.23,13\
.33,11.77,13.08,\
11.42z\x22/></g></g\
></svg>\
\x00\x00\x01Z\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#4\
d5157\x22><g><rect \
fill=\x22none\x22 fill\
-rule=\x22evenodd\x22 \
height=\x2224\x22 widt\
h=\x2224\x22/><g><path\
d=\x22M19,13H5c-0.\
55,0-1-0.45-1-1v\
0c0-0.55,0.45-1,\
1-1h14c0.55,0,1,\
0.45,1,1v0 C20,1\
2.55,19.55,13,19\
,13z\x22 fill-rule=\
\x22evenodd\x22/></g><\
/g></svg>\
\x00\x00\x01\xb0\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M3 13h2v-2H\
3v2zm0 4h2v-2H3v\
2zm2 4v-2H3c0 1.\
1.89 2 2 2zM3 9h\
2V7H3v2zm12 12h2\
v-2h-2v2zm4-18H9\
c-1.11 0-2 .9-2 \
2v10c0 1.1.89 2 \
2 2h10c1.1 0 2-.\
9 2-2V5c0-1.1-.9\
-2-2-2zm-1 12h-8\
c-.55 0-1-.45-1-\
1V6c0-.55.45-1 1\
-1h8c.55 0 1 .45\
1 1v8c0 .55-.45\
1-1 1zm-7 6h2v-\
2h-2v2zm-4 0h2v-\
2H7v2z\x22/></svg>\
\x00\x00\x01<\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M18 19H6c-.\
55 0-1-.45-1-1V6\
c0-.55.45-1 1-1h\
12c.55 0 1 .45 1\
1v12c0 .55-.45 \
1-1 1zm1-16H5c-1\
.1 0-2 .9-2 2v14\
c0 1.1.9 2 2 2h1\
4c1.1 0 2-.9 2-2\
V5c0-1.1-.9-2-2-\
2z\x22/></svg>\
\x00\x00\x01\xa0\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M20 6h-8l-1\
.41-1.41C10.21 4\
.21 9.7 4 9.17 4\
H4c-1.11 0-1.99.\
89-1.99 2L2 18c0\
1.11.89 2 2 2h1\
6c1.1 0 2-.9 2-2\
V8c0-1.1-.9-2-2-\
2zm-2 8h-2v2c0 .\
55-.45 1-1 1s-1-\
.45-1-1v-2h-2c-.\
55 0-1-.45-1-1s.\
45-1 1-1h2v-2c0-\
.55.45-1 1-1s1 .\
45 1 1v2h2c.55 0\
1 .45 1 1s-.45 \
1-1 1z\x22/></svg>\
\x00\x00\x01\x95\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#008\
1db\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M19 3H5c-1.\
1 0-2 .9-2 2v14c\
0 1.1.9 2 2 2h14\
c1.1 0 2-.9 2-2V\
5c0-1.1-.9-2-2-2\
zm-8.29 13.29c-.\
39.39-1.02.39-1.\
41 0L5.71 12.7c-\
.39-.39-.39-1.02\
0-1.41.39-.39 1\
.02-.39 1.41 0L1\
0 14.17l6.88-6.8\
8c.39-.39 1.02-.\
39 1.41 0 .39.39\
.39 1.02 0 1.41l\
-7.58 7.59z\x22/></\
svg>\
\x00\x00\x01:\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#008\
1db\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M12 2C6.48 \
2 2 6.48 2 12s4.\
48 10 10 10 10-4\
.48 10-10S17.52 \
2 12 2zm0 18c-4.\
42 0-8-3.58-8-8s\
3.58-8 8-8 8 3.5\
8 8 8-3.58 8-8 8\
z\x22/><circle cx=\x22\
12\x22 cy=\x2212\x22 r=\x225\
\x22/></svg>\
\x00\x00\x01Q\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#b\
abdc2\x22><g><rect \
fill=\x22none\x22 fill\
-rule=\x22evenodd\x22 \
height=\x2224\x22 widt\
h=\x2224\x22/><g><path\
d=\x22M13,5v14c0,0\
.6-0.4,1-1,1l0,0\
c-0.6,0-1-0.4-1-\
1V5c0-0.5,0.4-1,\
1-1l0,0C12.6,4,1\
3,4.4,13,5z\x22 fil\
l-rule=\x22evenodd\x22\
/></g></g></svg>\
\
\x00\x00\x01\xae\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M18.3 5.71c\
-.39-.39-1.02-.3\
9-1.41 0L12 10.5\
9 7.11 5.7c-.39-\
.39-1.02-.39-1.4\
1 0-.39.39-.39 1\
.02 0 1.41L10.59\
12 5.7 16.89c-.\
39.39-.39 1.02 0\
1.41.39.39 1.02\
.39 1.41 0L12 13\
.41l4.89 4.89c.3\
9.39 1.02.39 1.4\
1 0 .39-.39.39-1\
.02 0-1.41L13.41\
12l4.89-4.89c.3\
8-.38.38-1.02 0-\
1.4z\x22/></svg>\
\x00\x00\x01\xc7\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#0\
081db\x22><rect fil\
l=\x22none\x22 height=\
\x2224\x22 width=\x2224\x22/\
><path d=\x22M14.29\
,5.71L14.29,5.71\
c-0.39,0.39-0.39\
,1.02,0,1.41L18.\
17,11H3c-0.55,0-\
1,0.45-1,1v0c0,0\
.55,0.45,1,1,1h1\
5.18l-3.88,3.88 \
c-0.39,0.39-0.39\
,1.02,0,1.41l0,0\
c0.39,0.39,1.02,\
0.39,1.41,0l5.59\
-5.59c0.39-0.39,\
0.39-1.02,0-1.41\
L15.7,5.71 C15.3\
2,5.32,14.68,5.3\
2,14.29,5.71z\x22/>\
</svg>\
\x00\x00\x01\x0c\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M8.71 12.29\
L11.3 9.7c.39-.3\
9 1.02-.39 1.41 \
0l2.59 2.59c.63.\
63.18 1.71-.71 1\
.71H9.41c-.89 0-\
1.33-1.08-.7-1.7\
1z\x22/></svg>\
\x00\x00\x01\xb0\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M3 13h2v-2H\
3v2zm0 4h2v-2H3v\
2zm2 4v-2H3c0 1.\
1.89 2 2 2zM3 9h\
2V7H3v2zm12 12h2\
v-2h-2v2zm4-18H9\
c-1.11 0-2 .9-2 \
2v10c0 1.1.89 2 \
2 2h10c1.1 0 2-.\
9 2-2V5c0-1.1-.9\
-2-2-2zm-1 12h-8\
c-.55 0-1-.45-1-\
1V6c0-.55.45-1 1\
-1h8c.55 0 1 .45\
1 1v8c0 .55-.45\
1-1 1zm-7 6h2v-\
2h-2v2zm-4 0h2v-\
2H7v2z\x22/></svg>\
\x00\x00\x01\x1b\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#bab\
dc2\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M12 2C6.48 \
2 2 6.48 2 12s4.\
48 10 10 10 10-4\
.48 10-10S17.52 \
2 12 2zm0 18c-4.\
42 0-8-3.58-8-8s\
3.58-8 8-8 8 3.5\
8 8 8-3.58 8-8 8\
z\x22/></svg>\
\x00\x00\x01G\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M9.29 6.71c\
-.39.39-.39 1.02\
0 1.41L13.17 12\
l-3.88 3.88c-.39\
.39-.39 1.02 0 1\
.41.39.39 1.02.3\
9 1.41 0l4.59-4.\
59c.39-.39.39-1.\
02 0-1.41L10.7 6\
.7c-.38-.38-1.02\
-.38-1.41.01z\x22/>\
</svg>\
\x00\x00\x01p\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M6 19c0 1.1\
.9 2 2 2h8c1.1 0\
2-.9 2-2V9c0-1.\
1-.9-2-2-2H8c-1.\
1 0-2 .9-2 2v10z\
M18 4h-2.5l-.71-\
.71c-.18-.18-.44\
-.29-.7-.29H9.91\
c-.26 0-.52.11-.\
7.29L8.5 4H6c-.5\
5 0-1 .45-1 1s.4\
5 1 1 1h12c.55 0\
1-.45 1-1s-.45-\
1-1-1z\x22/></svg>\
\x00\x00\x01\xc7\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#0\
e527c\x22><rect fil\
l=\x22none\x22 height=\
\x2224\x22 width=\x2224\x22/\
><path d=\x22M14.29\
,5.71L14.29,5.71\
c-0.39,0.39-0.39\
,1.02,0,1.41L18.\
17,11H3c-0.55,0-\
1,0.45-1,1v0c0,0\
.55,0.45,1,1,1h1\
5.18l-3.88,3.88 \
c-0.39,0.39-0.39\
,1.02,0,1.41l0,0\
c0.39,0.39,1.02,\
0.39,1.41,0l5.59\
-5.59c0.39-0.39,\
0.39-1.02,0-1.41\
L15.7,5.71 C15.3\
2,5.32,14.68,5.3\
2,14.29,5.71z\x22/>\
</svg>\
\x00\x00\x01\xfe\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22 opaci\
ty=\x22.87\x22/><path \
d=\x22M12 2C6.47 2 \
2 6.47 2 12s4.47\
10 10 10 10-4.4\
7 10-10S17.53 2 \
12 2zm4.3 14.3c-\
.39.39-1.02.39-1\
.41 0L12 13.41 9\
.11 16.3c-.39.39\
-1.02.39-1.41 0-\
.39-.39-.39-1.02\
0-1.41L10.59 12\
7.7 9.11c-.39-.\
39-.39-1.02 0-1.\
41.39-.39 1.02-.\
39 1.41 0L12 10.\
59l2.89-2.89c.39\
-.39 1.02-.39 1.\
41 0 .39.39.39 1\
.02 0 1.41L13.41\
12l2.89 2.89c.3\
8.38.38 1.02 0 1\
.41z\x22/></svg>\
\x00\x00\x01\x9a\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M17.59 3.59\
c-.38-.38-.89-.5\
9-1.42-.59H5c-1.\
11 0-2 .9-2 2v14\
c0 1.1.9 2 2 2h1\
4c1.1 0 2-.9 2-2\
V7.83c0-.53-.21-\
1.04-.59-1.41l-2\
.82-2.83zM12 19c\
-1.66 0-3-1.34-3\
-3s1.34-3 3-3 3 \
1.34 3 3-1.34 3-\
3 3zm1-10H7c-1.1\
0-2-.9-2-2s.9-2\
2-2h6c1.1 0 2 .\
9 2 2s-.9 2-2 2z\
\x22/></svg>\
\x00\x00\x02\x98\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 enable-ba\
ckground=\x22new 0 \
0 24 24\x22 height=\
\x2224px\x22 viewBox=\x22\
0 0 24 24\x22 width\
=\x2224px\x22 fill=\x22#4\
d5157\x22><g><rect \
fill=\x22none\x22 heig\
ht=\x2224\x22 width=\x222\
4\x22/><rect fill=\x22\
none\x22 height=\x2224\
\x22 width=\x2224\x22/></\
g><g><g><path d=\
\x22M20.08,11.42l-4\
.04-5.65C15.7,5.\
29,15.15,5,14.56\
,5h0c-1.49,0-2.3\
5,1.68-1.49,2.89\
L16,12l-2.93,4.1\
1 c-0.87,1.21,0,\
2.89,1.49,2.89h0\
c0.59,0,1.15-0.2\
9,1.49-0.77l4.04\
-5.65C20.33,12.2\
3,20.33,11.77,20\
.08,11.42z\x22/><pa\
th d=\x22M13.08,11.\
42L9.05,5.77C8.7\
,5.29,8.15,5,7.5\
6,5h0C6.07,5,5.2\
,6.68,6.07,7.89L\
9,12l-2.93,4.11C\
5.2,17.32,6.07,1\
9,7.56,19h0 c0.5\
9,0,1.15-0.29,1.\
49-0.77l4.04-5.6\
5C13.33,12.23,13\
.33,11.77,13.08,\
11.42z\x22/></g></g\
></svg>\
\x00\x00\x01d\
<\
svg xmlns=\x22http:\
//www.w3.org/200\
0/svg\x22 height=\x222\
4px\x22 viewBox=\x220 \
0 24 24\x22 width=\x22\
24px\x22 fill=\x22#4d5\
157\x22><path d=\x22M0\
0h24v24H0V0z\x22 f\
ill=\x22none\x22/><pat\
h d=\x22M5 13h11.17\
l-4.88 4.88c-.39\
.39-.39 1.03 0 1\
.42.39.39 1.02.3\
9 1.41 0l6.59-6.\
59c.39-.39.39-1.\
02 0-1.41l-6.58-\
6.6c-.39-.39-1.0\
2-.39-1.41 0-.39\
.39-.39 1.02 0 1\
.41L16.17 11H5c-\
.55 0-1 .45-1 1s\
.45 1 1 1z\x22/></s\
vg>\
"
qt_resource_name = b"\
\x00\x05\
\x00o\xa6S\
\x00i\
\x00c\x00o\x00n\x00s\
\x00\x03\
\x00\x00p7\
\x00i\
\x00m\x00g\
\x00\x09\
\x07i\x93X\
\x00i\
\x00m\x00p\x00o\x00r\x00t\x00-\x00f\x00x\
\x00\x0d\
\x0e\x8f\x9f\xc5\
\x00f\
\x00i\x00l\x00t\x00e\x00r\x00-\x00r\x00e\x00m\x00o\x00v\x00e\
\x00\x06\
\x07\x05\xcc\x94\
\x00i\
\x00n\x00v\x00e\x00r\x00t\
\x00\x05\
\x00j6\x95\
\x00c\
\x00l\x00o\x00s\x00e\
\x00\x03\
\x00\x00jW\
\x00c\
\x00o\x00g\
\x00\x08\
\x08\xaa\x8d\x18\
\x00p\
\x00a\x00s\x00t\x00e\x00-\x00f\x00x\
\x00\x0a\
\x06\x992F\
\x00i\
\x00m\x00p\x00o\x00r\x00t\x00-\x00c\x00s\x00v\
\x00\x05\
\x00j+\x82\
\x00c\
\x00l\x00e\x00a\x00r\
\x00\x05\
\x00zy%\
\x00t\
\x00a\x00b\x00l\x00e\
\x00\x09\
\x06\x99\x17\xc9\
\x00c\
\x00l\x00o\x00s\x00e\x00-\x00m\x00d\x00i\
\x00\x06\
\x06\xd0:\xc2\
\x00f\
\x00i\x00l\x00t\x00e\x00r\
\x00\x0a\
\x06\xae\xa5\xfe\
\x00a\
\x00r\x00r\x00o\x00w\x00-\x00d\x00o\x00w\x00n\
\x00\x0e\
\x00R\x02\x87\
\x00g\
\x00m\x00s\x00-\x00s\x00p\x00l\x00a\x00s\x00h\x00.\x00p\x00n\x00g\
\x00\x0c\
\x0cf\x9f'\
\x00G\
\x00M\x00S\x00-\x00l\x00o\x00g\x00o\x00.\x00p\x00n\x00g\
\x00\x0f\
\x0f\xda\xd2\xf3\
\x00S\
\x00t\x00y\x00l\x00e\x00L\x00i\x00g\x00h\x00t\x00I\x00c\x00o\x00n\x00s\
\x00\x08\
\x07O\xafS\
\x00A\
\x00p\x00p\x00I\x00c\x00o\x00n\x00s\
\x00\x1b\
\x05\xed\x82\x07\
\x00f\
\x00o\x00l\x00d\x00e\x00r\x00-\x00o\x00p\x00e\x00n\x00-\x00o\x00u\x00t\x00l\x00i\
\x00n\x00e\x00_\x00m\x00d\x00i\x00.\x00s\x00v\x00g\
\x00\x0c\
\x0b\xdf,\xc7\
\x00s\
\x00e\x00t\x00t\x00i\x00n\x00g\x00s\x00.\x00s\x00v\x00g\
\x00\x15\
\x0d\xf0,\x07\
\x00t\
\x00r\x00a\x00y\x00-\x00a\x00r\x00r\x00o\x00w\x00-\x00u\x00p\x00_\x00m\x00d\x00i\
\x00.\x00s\x00v\x00g\
\x00 \
\x0b9J\x87\
\x00c\
\x00h\x00e\x00c\x00k\x00_\x00b\x00o\x00x\x00_\x00o\x00u\x00t\x00l\x00i\x00n\x00e\
\x00_\x00b\x00l\x00a\x00n\x00k\x00_\x002\x004\x00d\x00p\x00.\x00p\x00n\x00g\
\x00\x0b\
\x03~\xd4\x07\
\x00d\
\x00n\x00a\x00_\x00m\x00d\x00i\x00.\x00s\x00v\x00g\
\x00\x16\
\x02\x1e2\x07\
\x00g\
\x00o\x00o\x00g\x00l\x00e\x00-\x00s\x00p\x00r\x00e\x00a\x00d\x00s\x00h\x00e\x00e\
\x00t\x00.\x00s\x00v\x00g\
\x00\x0c\
\x0cf\x87\xff\
\x00G\
\x00M\x00S\x00-\x00l\x00o\x00g\x00o\x00.\x00i\x00c\x00o\
\x00\x1c\
\x00\xed\x17\xc7\
\x00c\
\x00o\x00n\x00t\x00e\x00n\x00t\x00-\x00s\x00a\x00v\x00e\x00-\x00o\x00u\x00t\x00l\
\x00i\x00n\x00e\x00_\x00m\x00d\x00i\x00.\x00s\x00v\x00g\
\x00\x1f\
\x05\xd2W\x87\
\x00f\
\x00o\x00l\x00d\x00e\x00r\x00-\x00o\x00p\x00e\x00n\x00-\x00o\x00u\x00t\x00l\x00i\
\x00n\x00e\x00-\x00d\x00n\x00a\x00_\x00m\x00d\x00i\x00.\x00s\x00v\x00g\
\x00\x12\
\x0b\xe4\xfb\x87\
\x00c\
\x00h\x00e\x00c\x00k\x00_\x00b\x00o\x00x\x00_\x002\x004\x00d\x00p\x00.\x00p\x00n\
\x00g\
\x00 \
\x04\x16\x94g\
\x00c\
\x00a\x00l\x00e\x00n\x00d\x00a\x00r\x00_\x00t\x00o\x00d\x00a\x00y\x00_\x00d\x00i\
\x00s\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x16\
\x0d\x097G\
\x00a\
\x00r\x00r\x00o\x00w\x00_\x00d\x00r\x00o\x00p\x00_\x00u\x00p\x00_\x002\x004\x00d\
\x00p\x00.\x00s\x00v\x00g\
\x00\x18\
\x0e\xc6\xf1\xc7\
\x00a\
\x00r\x00r\x00o\x00w\x00_\x00d\x00r\x00o\x00p\x00_\x00d\x00o\x00w\x00n\x00_\x002\
\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x16\
\x02jS\x07\
\x00v\
\x00e\x00r\x00t\x00i\x00c\x00a\x00l\x00_\x00r\x00u\x00l\x00e\x00_\x002\x004\x00d\
\x00p\x00.\x00s\x00v\x00g\
\x00\x14\
\x02\x1e\x04\x87\
\x00r\
\x00e\x00s\x00t\x00a\x00r\x00t\x00_\x00a\x00l\x00t\x00_\x002\x004\x00d\x00p\x00.\
\x00s\x00v\x00g\
\x00\x16\
\x0e\xd3\x0d\xe7\
\x00c\
\x00o\x00r\x00n\x00e\x00r\x00_\x00b\x00u\x00t\x00t\x00o\x00n\x00_\x002\x004\x00d\
\x00p\x00.\x00s\x00v\x00g\
\x00\x1d\
\x05\xe3\xd5G\
\x00d\
\x00o\x00u\x00b\x00l\x00e\x00_\x00a\x00r\x00r\x00o\x00w\x00_\x00s\x00i\x00d\x00e\
\x00b\x00a\x00r\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1f\
\x03\xf9#\x87\
\x00c\
\x00o\x00r\x00n\x00e\x00r\x00_\x00b\x00u\x00t\x00t\x00o\x00n\x00_\x00d\x00i\x00s\
\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00!\
\x07\xa7\xcd\xa7\
\x00h\
\x00o\x00r\x00i\x00z\x00o\x00n\x00t\x00a\x00l\x00_\x00r\x00u\x00l\x00e\x00_\x00d\
\x00i\x00s\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\
\x00\x22\
\x05Vv'\
\x00d\
\x00r\x00a\x00g\x00_\x00i\x00n\x00d\x00i\x00c\x00a\x00t\x00o\x00r\x00_\x00h\x00o\
\x00r\x00i\x00z\x00o\x00n\x00t\x00a\x00l\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\
\x00g\
\x00\x0e\
\x07\xc7d\x07\
\x00c\
\x00h\x00e\x00c\x00k\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x14\
\x02\x87a\x87\
\x00e\
\x00x\x00p\x00a\x00n\x00d\x00_\x00l\x00e\x00s\x00s\x00_\x002\x004\x00d\x00p\x00.\
\x00s\x00v\x00g\
\x00\x14\
\x0e0\xe6G\
\x00f\
\x00o\x00l\x00d\x00e\x00r\x00_\x00o\x00p\x00e\x00n\x00_\x002\x004\x00d\x00p\x00.\
\x00s\x00v\x00g\
\x00\x1f\
\x0dx\xe8'\
\x00r\
\x00a\x00d\x00i\x00o\x00_\x00b\x00u\x00t\x00t\x00o\x00n\x00_\x00u\x00n\x00c\x00h\
\x00e\x00c\x00k\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00(\
\x09e\x9a\xe7\
\x00d\
\x00r\x00a\x00g\x00_\x00i\x00n\x00d\x00i\x00c\x00a\x00t\x00o\x00r\x00_\x00v\x00e\
\x00r\x00t\x00i\x00c\x00a\x00l\x00_\x00s\x00i\x00d\x00e\x00b\x00a\x00r\x00_\x002\
\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1f\
\x03P'\x07\
\x00a\
\x00r\x00r\x00o\x00w\x00_\x00f\x00o\x00r\x00w\x00a\x00r\x00d\x00_\x00d\x00i\x00s\
\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x15\
\x04\x1bkG\
\x00a\
\x00r\x00r\x00o\x00w\x00_\x00u\x00p\x00w\x00a\x00r\x00d\x00_\x002\x004\x00d\x00p\
\x00.\x00s\x00v\x00g\
\x00 \
\x0b9G\x07\
\x00c\
\x00h\x00e\x00c\x00k\x00_\x00b\x00o\x00x\x00_\x00o\x00u\x00t\x00l\x00i\x00n\x00e\
\x00_\x00b\x00l\x00a\x00n\x00k\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1d\
\x0ev6\x87\
\x00e\
\x00x\x00p\x00a\x00n\x00d\x00_\x00l\x00e\x00s\x00s\x00_\x00d\x00i\x00s\x00a\x00b\
\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1b\
\x0c\x84\xeeg\
\x00c\
\x00h\x00e\x00c\x00k\x00_\x00b\x00o\x00x\x00_\x00d\x00i\x00s\x00a\x00b\x00l\x00e\
\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x15\
\x05p\x09\x07\
\x00c\
\x00h\x00e\x00c\x00k\x00_\x00c\x00i\x00r\x00c\x00l\x00e\x00_\x002\x004\x00d\x00p\
\x00.\x00s\x00v\x00g\
\x00 \
\x0490g\
\x00d\
\x00r\x00a\x00g\x00_\x00i\x00n\x00d\x00i\x00c\x00a\x00t\x00o\x00r\x00_\x00v\x00e\
\x00r\x00t\x00i\x00c\x00a\x00l\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00&\
\x04\xc0\xfa\x87\
\x00d\
\x00o\x00u\x00b\x00l\x00e\x00_\x00a\x00r\x00r\x00o\x00w\x00_\x00d\x00i\x00s\x00a\
\x00b\x00l\x00e\x00d\x00_\x00s\x00i\x00d\x00e\x00b\x00a\x00r\x00_\x002\x004\x00d\
\x00p\x00.\x00s\x00v\x00g\
\x00\x16\
\x04\xe6P'\
\x00v\
\x00e\x00r\x00t\x00i\x00c\x00a\x00l\x00_\x00l\x00i\x00n\x00e\x00_\x002\x004\x00d\
\x00p\x00.\x00s\x00v\x00g\
\x00\x1e\
\x0f9l\xc7\
\x00c\
\x00o\x00r\x00n\x00e\x00r\x00_\x00b\x00u\x00t\x00t\x00o\x00n\x00_\x00p\x00r\x00e\
\x00s\x00s\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x0d\
\x075\x9f\xc7\
\x00h\
\x00e\x00l\x00p\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x17\
\x06\xd9\x8fG\
\x00c\
\x00l\x00o\x00s\x00e\x00_\x00d\x00i\x00s\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\
\x00d\x00p\x00.\x00s\x00v\x00g\
\x00*\
\x07\xe9\x1fG\
\x00d\
\x00r\x00a\x00g\x00_\x00i\x00n\x00d\x00i\x00c\x00a\x00t\x00o\x00r\x00_\x00h\x00o\
\x00r\x00i\x00z\x00o\x00n\x00t\x00a\x00l\x00_\x00s\x00i\x00d\x00e\x00b\x00a\x00r\
\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x17\
\x00\x0c\xb2\x87\
\x00c\
\x00a\x00l\x00e\x00n\x00d\x00a\x00r\x00_\x00t\x00o\x00d\x00a\x00y\x00_\x002\x004\
\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1f\
\x08C\xae\xc7\
\x00v\
\x00e\x00r\x00t\x00i\x00c\x00a\x00l\x00_\x00l\x00i\x00n\x00e\x00_\x00d\x00i\x00s\
\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00&\
\x03\x16\xa1\xa7\
\x00r\
\x00a\x00d\x00i\x00o\x00_\x00b\x00u\x00t\x00t\x00o\x00n\x00_\x00c\x00h\x00e\x00c\
\x00k\x00e\x00d\x00_\x00d\x00i\x00s\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\
\x00p\x00.\x00s\x00v\x00g\
\x00!\
\x0c}&\xe7\
\x00a\
\x00r\x00r\x00o\x00w\x00_\x00d\x00r\x00o\x00p\x00_\x00d\x00o\x00w\x00n\x00_\x00d\
\x00i\x00s\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\
\x00\x13\
\x08*\xc8G\
\x00a\
\x00r\x00r\x00o\x00w\x00_\x00b\x00a\x00c\x00k\x00_\x002\x004\x00d\x00p\x00.\x00s\
\x00v\x00g\
\x00\x1c\
\x0bL\x9d\xe7\
\x00a\
\x00r\x00r\x00o\x00w\x00_\x00b\x00a\x00c\x00k\x00_\x00d\x00i\x00s\x00a\x00b\x00l\
\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x14\
\x07\xc3!\x87\
\x00e\
\x00x\x00p\x00a\x00n\x00d\x00_\x00m\x00o\x00r\x00e\x00_\x002\x004\x00d\x00p\x00.\
\x00s\x00v\x00g\
\x00 \
\x01\x0a:'\
\x00i\
\x00n\x00d\x00e\x00t\x00e\x00r\x00m\x00i\x00n\x00a\x00t\x00e\x00_\x00c\x00h\x00e\
\x00c\x00k\x00_\x00b\x00o\x00x\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00)\
\x05\x01U\xc7\
\x00i\
\x00n\x00d\x00e\x00t\x00e\x00r\x00m\x00i\x00n\x00a\x00t\x00e\x00_\x00c\x00h\x00e\
\x00c\x00k\x00_\x00b\x00o\x00x\x00_\x00d\x00i\x00s\x00a\x00b\x00l\x00e\x00d\x00_\
\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1e\
\x0f/\x91\x87\
\x00a\
\x00r\x00r\x00o\x00w\x00_\x00u\x00p\x00w\x00a\x00r\x00d\x00_\x00d\x00i\x00s\x00a\
\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1f\
\x0c\x02\xce\x87\
\x00c\
\x00h\x00e\x00v\x00r\x00o\x00n\x00_\x00r\x00i\x00g\x00h\x00t\x00_\x00d\x00i\x00s\
\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1d\
\x06w\xe7\x87\
\x00e\
\x00x\x00p\x00a\x00n\x00d\x00_\x00m\x00o\x00r\x00e\x00_\x00d\x00i\x00s\x00a\x00b\
\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1e\
\x0c'\xe2\x87\
\x00d\
\x00o\x00u\x00b\x00l\x00e\x00_\x00a\x00r\x00r\x00o\x00w\x00_\x00d\x00i\x00s\x00a\
\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x18\
\x07k\xaf\x07\
\x00h\
\x00o\x00r\x00i\x00z\x00o\x00n\x00t\x00a\x00l\x00_\x00r\x00u\x00l\x00e\x00_\x002\
\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x16\
\x01\xe2Q'\
\x00f\
\x00l\x00i\x00p\x00_\x00t\x00o\x00_\x00f\x00r\x00o\x00n\x00t\x00_\x002\x004\x00d\
\x00p\x00.\x00s\x00v\x00g\
\x00)\
\x0a\xa4\xd9\x07\
\x00c\
\x00h\x00e\x00c\x00k\x00_\x00b\x00o\x00x\x00_\x00o\x00u\x00t\x00l\x00i\x00n\x00e\
\x00_\x00b\x00l\x00a\x00n\x00k\x00_\x00d\x00i\x00s\x00a\x00b\x00l\x00e\x00d\x00_\
\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1a\
\x09\x1b\x9bG\
\x00c\
\x00r\x00e\x00a\x00t\x00e\x00_\x00n\x00e\x00w\x00_\x00f\x00o\x00l\x00d\x00e\x00r\
\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x12\
\x0b\xe4\xf6\x07\
\x00c\
\x00h\x00e\x00c\x00k\x00_\x00b\x00o\x00x\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\
\x00g\
\x00\x1d\
\x00\xdb\x8a\x87\
\x00r\
\x00a\x00d\x00i\x00o\x00_\x00b\x00u\x00t\x00t\x00o\x00n\x00_\x00c\x00h\x00e\x00c\
\x00k\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1f\
\x08&\x0d\xc7\
\x00v\
\x00e\x00r\x00t\x00i\x00c\x00a\x00l\x00_\x00r\x00u\x00l\x00e\x00_\x00d\x00i\x00s\
\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x0e\
\x0c`$\xe7\
\x00c\
\x00l\x00o\x00s\x00e\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x0d\
\x0d\xa6\x1f\xc7\
\x00e\
\x00a\x00s\x00t\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1f\
\x04\xacU\x07\
\x00a\
\x00r\x00r\x00o\x00w\x00_\x00d\x00r\x00o\x00p\x00_\x00u\x00p\x00_\x00d\x00i\x00s\
\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x1f\
\x08ao\xc7\
\x00f\
\x00l\x00i\x00p\x00_\x00t\x00o\x00_\x00f\x00r\x00o\x00n\x00t\x00_\x00d\x00i\x00s\
\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00(\
\x0f@Ig\
\x00r\
\x00a\x00d\x00i\x00o\x00_\x00b\x00u\x00t\x00t\x00o\x00n\x00_\x00u\x00n\x00c\x00h\
\x00e\x00c\x00k\x00e\x00d\x00_\x00d\x00i\x00s\x00a\x00b\x00l\x00e\x00d\x00_\x002\
\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x16\
\x03gr'\
\x00c\
\x00h\x00e\x00v\x00r\x00o\x00n\x00_\x00r\x00i\x00g\x00h\x00t\x00_\x002\x004\x00d\
\x00p\x00.\x00s\x00v\x00g\
\x00\x0f\
\x0f'\x95g\
\x00d\
\x00e\x00l\x00e\x00t\x00e\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x16\
\x01\xbc~\xc7\
\x00e\
\x00a\x00s\x00t\x00_\x00d\x00i\x00s\x00a\x00b\x00l\x00e\x00d\x00_\x002\x004\x00d\
\x00p\x00.\x00s\x00v\x00g\
\x00\x0f\
\x03\x8a\xb8g\
\x00c\
\x00a\x00n\x00c\x00e\x00l\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x0d\
\x09\xaa?\x87\
\x00s\
\x00a\x00v\x00e\x00_\x002\x004\x00d\x00p\x00.\x00s\x00v\x00g\
\x00\x15\
\x07\xd7s\x07\
\x00d\
\x00o\x00u\x00b\x00l\x00e\x00_\x00a\x00r\x00r\x00o\x00w\x00_\x002\x004\x00d\x00p\
\x00.\x00s\x00v\x00g\
\x00\x16\
\x0a\xc1\x08\xa7\
\x00a\
\x00r\x00r\x00o\x00w\x00_\x00f\x00o\x00r\x00w\x00a\x00r\x00d\x00_\x002\x004\x00d\
\x00p\x00.\x00s\x00v\x00g\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x0e\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00v\x00\x00\x00\x00\x00\x01\x00\x00\x19\xaf\
\x00\x00\x01|\x92\x17g\xb0\
\x00\x00\x00\x10\x00\x02\x00\x00\x00\x02\x00\x00\x00W\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\xb2\x00\x00\x00\x00\x00\x01\x00\x005\x10\
\x00\x00\x01|\x92\x17g\xb1\
\x00\x00\x00f\x00\x00\x00\x00\x00\x01\x00\x00\x18\x17\
\x00\x00\x01|\x92\x17g\xb0\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x0f\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\xc2\x00\x00\x00\x00\x00\x01\x00\x006\x9e\
\x00\x00\x01|\x92\x17g\xb5\
\x00\x00\x00\xd2\x00\x00\x00\x00\x00\x01\x00\x008T\
\x00\x00\x01|\x92\x17g\xaf\
\x00\x00\x00\x98\x00\x00\x00\x00\x00\x01\x00\x00):\
\x00\x00\x01|\x92\x17g\xb4\
\x00\x00\x00\xfc\x00\x00\x00\x00\x00\x01\x00\x00<\xa5\
\x00\x00\x01|\x92\x17g\xaf\
\x00\x00\x00\xea\x00\x00\x00\x00\x00\x01\x00\x00::\
\x00\x00\x01|\x92\x17g\xb2\
\x00\x00\x00T\x00\x00\x00\x00\x00\x01\x00\x00\x0e\x16\
\x00\x00\x01|\x92\x17g\xb5\
\x00\x00\x00\x1c\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01|x\x8d\xb7\x92\
\x00\x00\x00\x82\x00\x00\x00\x00\x00\x01\x00\x00\x1f\xab\
\x00\x00\x01|x\x8f\x19P\
\x00\x00\x004\x00\x00\x00\x00\x00\x01\x00\x00\x0b\x1f\
\x00\x00\x01|\x92\x17g\xb3\
\x00\x00\x01z\x00\x02\x00\x00\x00\x0a\x00\x00\x00M\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x01V\x00\x02\x00\x00\x00<\x00\x00\x00\x11\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x0a\x06\x00\x00\x00\x00\x00\x01\x00\x01\x8b\xc1\
\x00\x00\x01|K\xb5\xff\xdb\
\x00\x00\x0ez\x00\x00\x00\x00\x00\x01\x00\x01\xa5L\
\x00\x00\x01|K\xb5\xff\xf3\
\x00\x00\x0b\xb0\x00\x00\x00\x00\x00\x01\x00\x01\x94\xba\
\x00\x00\x01|K\xb5\xff\xf2\
\x00\x00\x10t\x00\x00\x00\x00\x00\x01\x00\x01\xb1\xfe\
\x00\x00\x01|K\xb5\xff\xeb\
\x00\x00\x0d\x8c\x00\x00\x00\x00\x00\x01\x00\x01\x9f\x1b\
\x00\x00\x01|K\xb5\xff\xee\
\x00\x00\x04X\x00\x00\x00\x00\x00\x01\x00\x01e\x94\
\x00\x00\x01|K\xb5\xff\xf6\
\x00\x00\x04&\x00\x00\x00\x00\x00\x01\x00\x01d?\
\x00\x00\x01|K\xb5\xff\xf8\
\x00\x00\x05\xf0\x00\x00\x00\x00\x00\x01\x00\x01qg\
\x00\x00\x01|K\xb5\xff\xeb\
\x00\x00\x0a~\x00\x00\x00\x00\x00\x01\x00\x01\x8eA\
\x00\x00\x01|K\xb5\xff\xf4\
\x00\x00\x06\xe6\x00\x00\x00\x00\x00\x01\x00\x01w\x0a\
\x00\x00\x01|K\xb5\xff\xd9\
\x00\x00\x10\x1e\x00\x00\x00\x00\x00\x01\x00\x01\xaf?\
\x00\x00\x01|K\xb5\xff\xe0\
\x00\x00\x10\xa6\x00\x00\x00\x00\x00\x01\x00\x01\xb3\xc9\
\x00\x00\x01|K\xb5\xff\xdc\
\x00\x00\x04\xf8\x00\x00\x00\x00\x00\x01\x00\x01k\xfc\
\x00\x00\x01|K\xb5\xff\xe3\
\x00\x00\x03x\x00\x00\x00\x00\x00\x01\x00\x01`\xbb\
\x00\x00\x01|K\xb5\xff\xdc\
\x00\x00\x07*\x00\x00\x00\x00\x00\x01\x00\x01xr\
\x00\x00\x01|K\xb5\xff\xda\
\x00\x00\x08L\x00\x00\x00\x00\x00\x01\x00\x01\x7f\x85\
\x00\x00\x01|K\xb5\xff\xe9\
\x00\x00\x0f@\x00\x00\x00\x00\x00\x01\x00\x01\xab\x5c\
\x00\x00\x01|K\xb5\xff\xd8\
\x00\x00\x08\x92\x00\x00\x00\x00\x00\x01\x00\x01\x81U\
\x00\x00\x01|K\xb5\xff\xe7\
\x00\x00\x08\xe4\x00\x00\x00\x00\x00\x01\x00\x01\x83\xf1\
\x00\x00\x01|K\xb5\xff\xf7\
\x00\x00\x0b\xf6\x00\x00\x00\x00\x00\x01\x00\x01\x95\xe8\
\x00\x00\x01|K\xb5\xff\xf3\
\x00\x00\x05\x84\x00\x00\x00\x00\x00\x01\x00\x01nY\
\x00\x00\x01|K\xb5\xff\xe8\
\x00\x00\x08\x1c\x00\x00\x00\x00\x00\x01\x00\x01}\xf8\
\x00\x00\x01|K\xb5\xff\xdf\
\x00\x00\x04\xb8\x00\x00\x00\x00\x00\x01\x00\x01i`\
\x00\x00\x01|K\xb5\xff\xe7\
\x00\x00\x0c\xd4\x00\x00\x00\x00\x00\x01\x00\x01\x99\xc6\
\x00\x00\x01|K\xb5\xff\xed\
\x00\x00\x09x\x00\x00\x00\x00\x00\x01\x00\x01\x88I\
\x00\x00\x01|K\xb5\xff\xe2\
\x00\x00\x09X\x00\x00\x00\x00\x00\x01\x00\x01\x86\x0c\
\x00\x00\x01|K\xb5\xff\xf1\
\x00\x00\x0dV\x00\x00\x00\x00\x00\x01\x00\x01\x9d\xbd\
\x00\x00\x01|K\xb5\xff\xf1\
\x00\x00\x05<\x00\x00\x00\x00\x00\x01\x00\x01l\xfb\
\x00\x00\x01|K\xb5\xff\xf2\
\x00\x00\x0b\x82\x00\x00\x00\x00\x00\x01\x00\x01\x93_\
\x00\x00\x01|K\xb5\xff\xed\
\x00\x00\x05\xce\x00\x00\x00\x00\x00\x01\x00\x01p\x1f\
\x00\x00\x01|K\xb5\xff\xdd\
\x00\x00\x10\xea\x00\x00\x00\x00\x00\x01\x00\x01\xb7i\
\x00\x00\x01|K\xb5\xff\xe5\
\x00\x00\x09\xac\x00\x00\x00\x00\x00\x01\x00\x01\x89\xfb\
\x00\x00\x01|K\xb5\xff\xe8\
\x00\x00\x0e\xba\x00\x00\x00\x00\x00\x01\x00\x01\xa6\x8a\
\x00\x00\x01|K\xb5\xff\xf9\
\x00\x00\x0b\x18\x00\x00\x00\x00\x00\x01\x00\x01\x90\x8f\
\x00\x00\x01|K\xb5\xff\xd3\
\x00\x00\x0a:\x00\x00\x00\x00\x00\x01\x00\x01\x8d%\
\x00\x00\x01|K\xb5\xff\xf7\
\x00\x00\x0f\x84\x00\x00\x00\x00\x00\x01\x00\x01\xacl\
\x00\x00\x01|K\xb5\xff\xee\
\x00\x00\x0e\x16\x00\x00\x00\x00\x00\x01\x00\x01\xa2\x0f\
\x00\x00\x01|K\xb5\xff\xe4\
\x00\x00\x06\x90\x00\x00\x00\x00\x00\x01\x00\x01u:\
\x00\x00\x01|K\xb5\xff\xea\
\x00\x00\x10\xca\x00\x00\x00\x00\x00\x01\x00\x01\xb5\xcb\
\x00\x00\x01|K\xb5\xff\xf6\
\x00\x00\x0d\xbe\x00\x00\x00\x00\x00\x01\x00\x01\xa0\xcf\
\x00\x00\x01|K\xb5\xff\xdf\
\x00\x00\x11\x1a\x00\x00\x00\x00\x00\x01\x00\x01\xba\x05\
\x00\x00\x01|K\xb5\xff\xd9\
\x00\x00\x07Z\x00\x00\x00\x00\x00\x01\x00\x01y\xd7\
\x00\x00\x01|K\xb5\xff\xde\
\x00\x00\x0bD\x00\x00\x00\x00\x00\x01\x00\x01\x91\xf7\
\x00\x00\x01|K\xb5\xff\xd4\
\x00\x00\x0eP\x00\x00\x00\x00\x00\x01\x00\x01\xa3\xb3\
\x00\x00\x01|K\xb5\xff\xdd\
\x00\x00\x0c\x90\x00\x00\x00\x00\x00\x01\x00\x01\x98{\
\x00\x00\x01|K\xb5\xff\xe0\
\x00\x00\x0d\x14\x00\x00\x00\x00\x00\x01\x00\x01\x9b!\
\x00\x00\x01|K\xb5\xff\xe6\
\x00\x00\x0e\xfe\x00\x00\x00\x00\x00\x01\x00\x01\xa7\xdf\
\x00\x00\x01|K\xb5\xff\xe1\
\x00\x00\x0a\xd0\x00\x00\x00\x00\x00\x01\x00\x01\x8f\x7f\
\x00\x00\x01|K\xb5\xff\xd6\
\x00\x00\x07\xe0\x00\x00\x00\x00\x00\x01\x00\x01|_\
\x00\x00\x01|K\xb5\xff\xde\
\x00\x00\x03\xbe\x00\x00\x00\x00\x00\x01\x00\x01b\x1f\
\x00\x00\x01|K\xb5\xff\xd6\
\x00\x00\x06L\x00\x00\x00\x00\x00\x01\x00\x01t\x1b\
\x00\x00\x01|K\xb5\xff\xf4\
\x00\x00\x0f \x00\x00\x00\x00\x00\x01\x00\x01\xa9\x91\
\x00\x00\x01|K\xb5\xff\xea\
\x00\x00\x06\x1e\x00\x00\x00\x00\x00\x01\x00\x01r\xaf\
\x00\x00\x01|K\xb5\xff\xf0\
\x00\x00\x07\xa0\x00\x00\x00\x00\x00\x01\x00\x01{\x17\
\x00\x00\x01|K\xb5\xff\xec\
\x00\x00\x03\xf0\x00\x00\x00\x00\x00\x01\x00\x01c/\
\x00\x00\x01|K\xb5\xff\xd5\
\x00\x00\x04\x86\x00\x00\x00\x00\x00\x01\x00\x01ha\
\x00\x00\x01|K\xb5\xff\xe2\
\x00\x00\x10P\x00\x00\x00\x00\x00\x01\x00\x01\xb0\x8a\
\x00\x00\x01|K\xb5\xff\xe4\
\x00\x00\x0cN\x00\x00\x00\x00\x00\x01\x00\x01\x97\x16\
\x00\x00\x01|K\xb5\xff\xda\
\x00\x00\x09\x16\x00\x00\x00\x00\x00\x01\x00\x01\x85\x0d\
\x00\x00\x01|K\xb5\xff\xe3\
\x00\x00\x0f\xc8\x00\x00\x00\x00\x00\x01\x00\x01\xae \
\x00\x00\x01|K\xb5\xff\xf5\
\x00\x00\x02\xcc\x00\x00\x00\x00\x00\x01\x00\x01P7\
\x00\x00\x01|\x92\x17g\xb1\
\x00\x00\x02|\x00\x00\x00\x00\x00\x01\x00\x01\x1a9\
\x00\x00\x01|\x92\x17g\xb4\
\x00\x00\x02`\x00\x00\x00\x00\x00\x01\x00\x01\x15\xd8\
\x00\x00\x01|\x92\x17g\xb2\
\x00\x00\x03\x0a\x00\x00\x00\x00\x00\x01\x00\x01R$\
\x00\x00\x01|z\x1a\x99\xb6\
\x00\x00\x01\x90\x00\x00\x00\x00\x00\x01\x00\x01\x0c\x99\
\x00\x00\x01|\x92\x17g\xb3\
\x00\x00\x02\x1a\x00\x00\x00\x00\x00\x01\x00\x01\x14\xb3\
\x00\x00\x01|x\xc9R\x9c\
\x00\x00\x01\xcc\x00\x00\x00\x00\x00\x01\x00\x01\x0eT\
\x00\x00\x01|\x92\x17g\xb5\
\x00\x00\x03N\x00\x00\x00\x00\x00\x01\x00\x01_+\
\x00\x00\x01|x\xc8\xac\x18\
\x00\x00\x02\xae\x00\x01\x00\x00\x00\x01\x00\x01\x1cA\
\x00\x00\x01|\x92\x17g\xae\
\x00\x00\x01\xea\x00\x00\x00\x00\x00\x01\x00\x01\x12\xf9\
\x00\x00\x01|\x92\x17g\xb6\
\x00\x00\x01\x16\x00\x00\x00\x00\x00\x01\x00\x00>\x17\
\x00\x00\x01|\x92\x17g\xb7\
\x00\x00\x018\x00\x00\x00\x00\x00\x01\x00\x00\xd0\xdd\
\x00\x00\x01|\x92\x17g\xb7\
"
def qInitResources():
QtCore.qRegisterResourceData(0x03, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x03, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
the-stack_0_234 |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class lsp_admin_group_include_all(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls - based on the path /brocade_mpls_rpc/show-mpls-lsp-debug/output/lsp/show-mpls-lsp-debug-info/show-mpls-lsp-common-info/lsp-config-frr-admin-groups/lsp-admin-group/lsp-admin-group-include-all. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__lsp_admin_group_include_all_group_id',)
_yang_name = 'lsp-admin-group-include-all'
_rest_name = 'lsp-admin-group-include-all'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__lsp_admin_group_include_all_group_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-admin-group-include-all-group-id", rest_name="lsp-admin-group-include-all-group-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_mpls_rpc', u'show-mpls-lsp-debug', u'output', u'lsp', u'show-mpls-lsp-debug-info', u'show-mpls-lsp-common-info', u'lsp-config-frr-admin-groups', u'lsp-admin-group', u'lsp-admin-group-include-all']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'show-mpls-lsp-debug', u'output', u'lsp', u'lsp-config-frr-admin-groups', u'lsp-admin-group-include-all']
def _get_lsp_admin_group_include_all_group_id(self):
"""
Getter method for lsp_admin_group_include_all_group_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_debug/output/lsp/show_mpls_lsp_debug_info/show_mpls_lsp_common_info/lsp_config_frr_admin_groups/lsp_admin_group/lsp_admin_group_include_all/lsp_admin_group_include_all_group_id (uint32)
YANG Description: Include all admin group id
"""
return self.__lsp_admin_group_include_all_group_id
def _set_lsp_admin_group_include_all_group_id(self, v, load=False):
"""
Setter method for lsp_admin_group_include_all_group_id, mapped from YANG variable /brocade_mpls_rpc/show_mpls_lsp_debug/output/lsp/show_mpls_lsp_debug_info/show_mpls_lsp_common_info/lsp_config_frr_admin_groups/lsp_admin_group/lsp_admin_group_include_all/lsp_admin_group_include_all_group_id (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_admin_group_include_all_group_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_admin_group_include_all_group_id() directly.
YANG Description: Include all admin group id
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-admin-group-include-all-group-id", rest_name="lsp-admin-group-include-all-group-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_admin_group_include_all_group_id must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-admin-group-include-all-group-id", rest_name="lsp-admin-group-include-all-group-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_admin_group_include_all_group_id = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_admin_group_include_all_group_id(self):
self.__lsp_admin_group_include_all_group_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-admin-group-include-all-group-id", rest_name="lsp-admin-group-include-all-group-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
lsp_admin_group_include_all_group_id = __builtin__.property(_get_lsp_admin_group_include_all_group_id, _set_lsp_admin_group_include_all_group_id)
_pyangbind_elements = {'lsp_admin_group_include_all_group_id': lsp_admin_group_include_all_group_id, }
|
the-stack_0_235 | from __future__ import absolute_import
import pynndescent
from ann_benchmarks.algorithms.base import BaseANN
class PyNNDescent(BaseANN):
def __init__(self, metric, n_neighbors=10, n_trees=8, leaf_size=20):
self._n_neighbors = int(n_neighbors)
self._n_trees = int(n_trees)
self._leaf_size = int(leaf_size)
self._queue_size=None
self._pynnd_metric = {'angular': 'cosine',
'euclidean': 'euclidean',
'hamming': 'hamming',
'jaccard': 'jaccard'}[metric]
def fit(self, X):
self._index = pynndescent.NNDescent(X,
n_neighbors=self._n_neighbors,
n_trees=self._n_trees,
leaf_size=self._leaf_size,
metric=self._pynnd_metric)
def set_query_arguments(self, queue_size):
self._queue_size = float(queue_size)
def query(self, v, n):
ind, dist = self._index.query(v.reshape(1, -1).astype('float32'), k=n, queue_size=self._queue_size)
return ind[0]
def use_threads(self):
return False
def __str__(self):
return 'PyNNDescent(n_neighbors=%d, n_trees=%d, leaf_size=%d, queue_size=%.2f)' % (self._n_neighbors,
self._n_trees,
self._leaf_size,
self._queue_size) |
the-stack_0_236 | import os
import json
import uuid
import boto3
from datasources.stac.query import STACQuery
from datasources.sources.base import Datasource
client = boto3.client('s3')
bucket = 'usgs-lidar-public'
class USGS3DEP(Datasource):
stac_compliant = False
tags = ['Elevation', 'Raster']
def __init__(self, manifest):
super().__init__(manifest)
def search(self, spatial, temporal=None, properties=None, limit=10, **kwargs):
from db import Database
names = []
stac_query = STACQuery(spatial, temporal, properties)
# projects = stac_query.check_spatial(self.__class__.__name__)[:limit]
with Database.load(read_only=True, deployed=True) as db:
projects = db.spatial_query({"type": "Feature", "geometry": stac_query.spatial})
searches = 0
for item in projects:
if item['name'] not in names:
if temporal and item['year']:
if stac_query.temporal[0].year != item['year'] or stac_query.temporal[1].year != item['year']:
continue
if properties:
item.update({'properties': stac_query})
if searches < limit:
self.manifest.searches.append([self, item])
searches+=1
# searches = 0
# for item in projects:
# if item['name'] not in names:
# # Temporal check by checking year of start/end date
# if temporal and item['year']:
# if stac_query.temporal[0].year == item['year'] or stac_query.temporal[1].year == item['year']:
# if properties:
# item.update({'properties': stac_query})
# self.manifest.searches.append([self, item])
# names.append(item['name'])
# else:
# self.manifest.searches.append([self, item])
# names.append(item['name'])
def execute(self, query):
# Download metadata from query item
response = client.get_object(Bucket=bucket, Key=os.path.join(query['name'], 'ept.json'))
metadata = json.loads(response['Body'].read().decode('utf-8'))
xvals = [x[0] for x in query['geometry']['coordinates'][0]]
yvals = [y[1] for y in query['geometry']['coordinates'][0]]
stac_item = {
'id': str(uuid.uuid4()),
'type': 'Feature',
'bbox': [min(xvals), min(yvals), max(xvals), max(yvals)],
'geometry': query['geometry'],
'properties': {
'datetime': f"{query['year']}-01-01T00:00:00.00Z",
'eo:epsg': metadata['srs']['horizontal'],
'pc:count': metadata['points'],
'pc:type': 'lidar',
'pc:encoding': metadata['dataType'],
'pc:schema': metadata['schema'],
'legacy:span': metadata['span'],
'legacy:version': metadata['version'],
},
'assets': {
's3path': {
'href': f"s3://{bucket}/{query['name']}",
'title': 'EPT data'
}
},
}
if "properties" in list(query):
if query['properties'].check_properties(stac_item['properties']):
return [stac_item]
else:
return [stac_item] |
the-stack_0_238 | from __future__ import absolute_import
# Copyright (c) 2010-2018 openpyxl
"""Write the shared string table."""
from io import BytesIO
# package imports
from openpyxl2.xml.constants import SHEET_MAIN_NS
from openpyxl2.xml.functions import Element, xmlfile, SubElement
PRESERVE_SPACE = '{%s}space' % "http://www.w3.org/XML/1998/namespace"
def write_string_table(string_table):
"""Write the string table xml."""
out = BytesIO()
with xmlfile(out) as xf:
with xf.element("sst", xmlns=SHEET_MAIN_NS, uniqueCount="%d" % len(string_table)):
for key in string_table:
el = Element('si')
text = SubElement(el, 't')
text.text = key
if key.strip() != key:
text.set(PRESERVE_SPACE, 'preserve')
xf.write(el)
return out.getvalue()
|
the-stack_0_239 | """
Building Block
==============
"""
from __future__ import annotations
import logging
import os
import typing
import warnings
from collections.abc import Collection
from functools import partial
import numpy as np
import rdkit.Chem.AllChem as rdkit
import vabene
from ...utilities import OneOrMany, flatten, remake
from ..atoms import Atom
from ..bonds import Bond
from ..functional_groups import FunctionalGroup, FunctionalGroupFactory
from .molecule import Molecule
logger = logging.getLogger(__name__)
class BuildingBlock(Molecule):
"""
Represents a building block of a :class:`.ConstructedMolecule`.
A :class:`BuildingBlock` can represent either an entire molecule or
a molecular fragments used to construct a
:class:`.ConstructedMolecule`. The building block uses
:class:`.FunctionalGroup` instances to identify which atoms are
modified during construction.
"""
# Maps file extensions to functions which can be used to
# create an rdkit molecule from that file type.
_init_funcs = {
'.mol': partial(
rdkit.MolFromMolFile,
sanitize=False,
removeHs=False
),
'.sdf': partial(
rdkit.MolFromMolFile,
sanitize=False,
removeHs=False
),
'.pdb': partial(
rdkit.MolFromPDBFile,
sanitize=False,
removeHs=False,
proximityBonding=False,
),
}
_placer_ids: frozenset[int]
_core_ids: frozenset[int]
def __init__(
self,
smiles: str,
functional_groups: typing.Iterable[
typing.Union[FunctionalGroup, FunctionalGroupFactory]
] = (),
placer_ids: typing.Optional[tuple[int, ...]] = None,
position_matrix: typing.Optional[np.ndarray] = None,
) -> None:
"""
Initialize a :class:`.BuildingBlock`.
Notes:
The molecule is given 3D coordinates with
:func:`rdkit.ETKDGv2`.
Parameters:
smiles:
A SMILES string of the molecule.
functional_groups:
An :class:`iterable` of :class:`.FunctionalGroup` or
:class:`.FunctionalGroupFactory` or both.
:class:`.FunctionalGroup` instances are added to the
building block and :class:`.FunctionalGroupFactory`
instances are used to create :class:`.FunctionalGroup`
instances the building block should hold.
:class:`.FunctionalGroup` instances are used to
identify which atoms are modified during
:class:`.ConstructedMolecule` construction.
placer_ids:
The ids of *placer* atoms. These are the atoms which
should be used for calculating the position of the
building block. Depending on the values passed to
`placer_ids`, and the functional groups in the building
block, different *placer* ids will be used by the
building block.
#. `placer_ids` is passed to the initializer: the
passed *placer* ids will be used by the building
block.
#. `placer_ids` is ``None`` and the building block has
functional groups: The *placer* ids of the
functional groups will be used as the *placer* ids
of the building block.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. All atoms of the molecule will be used for
*placer* ids.
position_matrix:
The position matrix the building block should use. If
``None``, :func:`rdkit.ETKDGv2` will be used to
calculate it.
Raises:
:class:`RuntimeError`
If embedding the molecule fails.
"""
molecule = rdkit.AddHs(rdkit.MolFromSmiles(smiles))
if position_matrix is None:
params = rdkit.ETKDGv2()
random_seed = 4
params.randomSeed = random_seed
if rdkit.EmbedMolecule(molecule, params) == -1:
raise RuntimeError(
f'Embedding with seed value of {random_seed} '
'failed.'
)
rdkit.Kekulize(molecule)
else:
# Make sure the position matrix always holds floats.
position_matrix = np.array(
position_matrix,
dtype=np.float64,
)
conformer = rdkit.Conformer(molecule.GetNumAtoms())
for atom_id, position in enumerate(position_matrix):
conformer.SetAtomPosition(atom_id, position)
molecule.AddConformer(conformer)
self._init_from_rdkit_mol(
molecule=molecule,
functional_groups=functional_groups,
placer_ids=placer_ids,
)
@classmethod
def init_from_molecule(
cls,
molecule: Molecule,
functional_groups: typing.Iterable[
typing.Union[FunctionalGroup, FunctionalGroupFactory]
] = (),
placer_ids: typing.Optional[tuple[int, ...]] = None,
) -> BuildingBlock:
"""
Initialize from a :class:`.Molecule`.
Parameters:
molecule:
The molecule to initialize from.
functional_groups:
An :class:`iterable` of :class:`.FunctionalGroup` or
:class:`.FunctionalGroupFactory` or both.
:class:`.FunctionalGroup` instances are added to the
building block and :class:`.FunctionalGroupFactory`
instances are used to create :class:`.FunctionalGroup`
instances the building block should hold.
:class:`.FunctionalGroup` instances are used to
identify which atoms are modified during
:class:`.ConstructedMolecule` construction.
placer_ids:
The ids of *placer* atoms. These are the atoms which
should be used for calculating the position of the
building block. Depending on the values passed to
`placer_ids`, and the functional groups in the
building block, different *placer* ids will be used by
the building block.
#. `placer_ids` is passed to the initializer: the
passed *placer* ids will be used by the building
block.
#. `placer_ids` is ``None`` and the building block has
functional groups: The *placer* ids of the
functional groups will be used as the *placer* ids
of the building block.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. All atoms of the molecule will be used for
*placer* ids.
Returns:
The building block. It will have the same atoms, bonds and
atomic positions as `molecule`.
"""
return cls.init(
atoms=tuple(molecule.get_atoms()),
bonds=tuple(molecule.get_bonds()),
position_matrix=molecule.get_position_matrix(),
functional_groups=functional_groups,
placer_ids=placer_ids,
)
@classmethod
def init_from_vabene_molecule(
cls,
molecule: vabene.Molecule,
functional_groups: typing.Iterable[
typing.Union[FunctionalGroup, FunctionalGroupFactory]
] = (),
placer_ids: typing.Optional[tuple[int, ...]] = None,
position_matrix: typing.Optional[np.ndarray] = None,
) -> BuildingBlock:
"""
Initialize from a :mod:`vabene.Molecule`.
Notes:
The molecule is given 3D coordinates with
:func:`rdkit.ETKDGv2()`.
Parameters:
molecule:
The :class:`vabene.Molecule` from which to initialize.
functional_groups:
An :class:`iterable` of :class:`.FunctionalGroup` or
:class:`.FunctionalGroupFactory` or both.
:class:`.FunctionalGroup` instances are added to the
building block and :class:`.FunctionalGroupFactory`
instances are used to create :class:`.FunctionalGroup`
instances the building block should hold.
:class:`.FunctionalGroup` instances are used to
identify which atoms are modified during
:class:`.ConstructedMolecule` construction.
placer_ids:
The ids of *placer* atoms. These are the atoms which
should be used for calculating the position of the
building block. Depending on the values passed to
`placer_ids`, and the functional groups in the building
block, different *placer* ids will be used by the
building block.
#. `placer_ids` is passed to the initializer: the
passed *placer* ids will be used by the building
block.
#. `placer_ids` is ``None`` and the building block has
functional groups: The *placer* ids of the
functional groups will be used as the *placer* ids
of the building block.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. All atoms of the molecule will be used for
*placer* ids.
position_matrix:
The position matrix the building block should use. If
``None``, :func:`rdkit.ETKDGv2` will be used to
calculate it.
Returns:
The building block.
Raises:
:class:`RuntimeError`
If embedding the molecule fails.
"""
editable = rdkit.EditableMol(rdkit.Mol())
for atom in molecule.get_atoms():
rdkit_atom = rdkit.Atom(atom.get_atomic_number())
rdkit_atom.SetFormalCharge(atom.get_charge())
editable.AddAtom(rdkit_atom)
for bond in molecule.get_bonds():
editable.AddBond(
beginAtomIdx=bond.get_atom1_id(),
endAtomIdx=bond.get_atom2_id(),
order=rdkit.BondType(bond.get_order()),
)
rdkit_molecule = editable.GetMol()
rdkit.SanitizeMol(rdkit_molecule)
rdkit_molecule = rdkit.AddHs(rdkit_molecule)
if position_matrix is None:
params = rdkit.ETKDGv2()
random_seed = 4
params.randomSeed = random_seed
if rdkit.EmbedMolecule(rdkit_molecule, params) == -1:
raise RuntimeError(
f'Embedding with seed value of {random_seed} '
'failed.'
)
else:
# Make sure the position matrix always holds floats.
position_matrix = np.array(
position_matrix,
dtype=np.float64,
)
conformer = rdkit.Conformer(rdkit_molecule.GetNumAtoms())
for atom_id, position in enumerate(position_matrix):
conformer.SetAtomPosition(atom_id, position)
rdkit_molecule.AddConformer(conformer)
rdkit.Kekulize(rdkit_molecule)
return cls.init_from_rdkit_mol(
molecule=rdkit_molecule,
functional_groups=functional_groups,
placer_ids=placer_ids,
)
@classmethod
def init(
cls,
atoms: tuple[Atom, ...],
bonds: tuple[Bond, ...],
position_matrix: np.ndarray,
functional_groups: typing.Iterable[
typing.Union[FunctionalGroup, FunctionalGroupFactory]
] = (),
placer_ids: typing.Optional[tuple[int, ...]] = None,
) -> BuildingBlock:
"""
Initialize a :class:`.BuildingBlock` from its components.
Parameters:
atoms:
The atoms of the building block.
bonds:
The bonds of the building block.
position_matrix:
An ``(n, 3)`` position matrix of the building block.
functional_groups:
An :class:`iterable` holding the
:class:`.FunctionalGroup` instances the building block
should have, and / or :class:`.FunctionalGroupFactory`
instances used for creating them.
placer_ids:
The ids of *placer* atoms. These are the atoms which
should be used for calculating the position of the
building block. Depending on the values passed to
`placer_ids`, and the functional groups in the building
block, different *placer* ids will be used by the
building block.
#. `placer_ids` is passed to the initializer: the
passed *placer* ids will be used by the building
block.
#. `placer_ids` is ``None`` and the building block has
functional groups: The *placer* ids of the
functional groups will be used as the *placer* ids
of the building block.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. All atoms of the molecule will be used for
*placer* ids.
Returns:
The building block.
"""
building_block = cls.__new__(cls)
Molecule.__init__(
self=building_block,
atoms=atoms,
bonds=bonds,
position_matrix=position_matrix,
)
functional_groups = building_block._extract_functional_groups(
functional_groups=functional_groups,
)
building_block._with_functional_groups(functional_groups)
building_block._placer_ids = (
building_block._normalize_placer_ids(
placer_ids=placer_ids,
functional_groups=building_block._functional_groups,
)
)
building_block._core_ids = frozenset(
building_block._get_core_ids(
functional_groups=building_block._functional_groups,
)
)
return building_block
@classmethod
def init_from_file(
cls,
path: str,
functional_groups: typing.Iterable[
typing.Union[FunctionalGroup, FunctionalGroupFactory]
] = (),
placer_ids: typing.Optional[tuple[int, ...]] = None,
):
"""
Initialize from a file.
Parameters:
path:
The path to a molecular structure file. Supported file
types are:
#. ``.mol``, ``.sdf`` - MDL V3000 MOL file
functional_groups:
An :class:`iterable` of :class:`.FunctionalGroup` or
:class:`.FunctionalGroupFactory` or both.
:class:`.FunctionalGroup` instances are added to the
building block and :class:`.FunctionalGroupFactory`
instances are used to create :class:`.FunctionalGroup`
instances the building block should hold.
:class:`.FunctionalGroup` instances are used to
identify which atoms are modified during
:class:`.ConstructedMolecule` construction.
placer_ids:
The ids of *placer* atoms. These are the atoms which
should be used for calculating the position of the
building block. Depending on the values passed to
`placer_ids`, and the functional groups in the building
block, different *placer* ids will be used by the
building block.
#. `placer_ids` is passed to the initializer: the
passed *placer* ids will be used by the building
block.
#. `placer_ids` is ``None`` and the building block has
functional groups: The *placer* ids of the
functional groups will be used as the *placer* ids
of the building block.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. All atoms of the molecule will be used for
*placer* ids.
Returns:
The building block.
Raises:
:class:`ValueError`
If the file type cannot be used for initialization.
"""
_, extension = os.path.splitext(path)
if extension == '.pdb':
warnings.warn(
'Loading from .pdb files is deprecated and will be '
'removed from stk versions released after 1st Nov '
'2022. Please use .mol files for loading molecules '
'instead.',
category=FutureWarning,
)
if extension not in cls._init_funcs:
raise ValueError(
f'Unable to initialize from "{extension}" files.'
)
# This remake needs to be here because molecules loaded
# with rdkit often have issues, because rdkit tries to do
# bits of structural analysis like stereocenters. Remake
# gets rid of all this problematic metadata.
molecule = remake(cls._init_funcs[extension](path))
return cls.init_from_rdkit_mol(
molecule=molecule,
functional_groups=functional_groups,
placer_ids=placer_ids,
)
@classmethod
def init_from_rdkit_mol(
cls,
molecule: rdkit.Mol,
functional_groups: typing.Iterable[
typing.Union[FunctionalGroup, FunctionalGroupFactory]
] = (),
placer_ids: typing.Optional[tuple[int, ...]] = None,
) -> BuildingBlock:
"""
Initialize from an :mod:`rdkit` molecule.
Warnings:
For :mod:`rdkit` molecules with non-integer bond orders,
such as 1.5, the molecule should be kekulized prior to
calling this method. Otherwise, all bond orders will be
set to an integer value in the building block.
Parameters:
molecule:
The molecule.
functional_groups:
An :class:`iterable` of :class:`.FunctionalGroup` or
:class:`.FunctionalGroupFactory` or both.
:class:`.FunctionalGroup` instances are added to the
building block and :class:`.FunctionalGroupFactory`
instances are used to create :class:`.FunctionalGroup`
instances the building block should hold.
:class:`.FunctionalGroup` instances are used to
identify which atoms are modified during
:class:`.ConstructedMolecule` construction.
placer_ids:
The ids of *placer* atoms. These are the atoms which
should be used for calculating the position of the
building block. Depending on the values passed to
`placer_ids`, and the functional groups in the building
block, different *placer* ids will be used by the
building block.
#. `placer_ids` is passed to the initializer: the
passed *placer* ids will be used by the building
block.
#. `placer_ids` is ``None`` and the building block has
functional groups: The *placer* ids of the
functional groups will be used as the *placer* ids
of the building block.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. All atoms of the molecule will be used for
*placer* ids.
Returns:
The molecule.
"""
building_block = cls.__new__(cls)
building_block._init_from_rdkit_mol(
molecule=molecule,
functional_groups=functional_groups,
placer_ids=placer_ids,
)
return building_block
def _init_from_rdkit_mol(
self,
molecule: rdkit.Mol,
functional_groups: typing.Iterable[
typing.Union[FunctionalGroup, FunctionalGroupFactory]
],
placer_ids: typing.Optional[tuple[int, ...]],
) -> None:
"""
Initialize from an :mod:`rdkit` molecule.
Parameters:
molecule:
The molecule.
functional_groups:
An :class:`iterable` of :class:`.FunctionalGroup` or
:class:`.FunctionalGroupFactory` or both.
:class:`.FunctionalGroup` instances are added to the
building block and :class:`.FunctionalGroupFactory`
instances are used to create :class:`.FunctionalGroup`
instances the building block should hold.
:class:`.FunctionalGroup` instances are used to
identify which atoms are modified during
:class:`.ConstructedMolecule` construction.
placer_ids:
The ids of *placer* atoms. These are the atoms which
should be used for calculating the position of the
building block. Depending on the values passed to
`placer_ids`, and the functional groups in the building
block, different *placer* ids will be used by the
building block.
#. `placer_ids` is passed to the initializer: the
passed *placer* ids will be used by the building
block.
#. `placer_ids` is ``None`` and the building block has
functional groups: The *placer* ids of the
functional groups will be used as the *placer* ids
of the building block.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. All atoms of the molecule will be used for
*placer* ids.
"""
atoms = tuple(
Atom(a.GetIdx(), a.GetAtomicNum(), a.GetFormalCharge())
for a in molecule.GetAtoms()
)
bonds = tuple(
Bond(
atom1=atoms[b.GetBeginAtomIdx()],
atom2=atoms[b.GetEndAtomIdx()],
order=(
9 if b.GetBondType() == rdkit.BondType.DATIVE
else b.GetBondTypeAsDouble()
)
)
for b in molecule.GetBonds()
)
position_matrix = molecule.GetConformer().GetPositions()
super().__init__(atoms, bonds, position_matrix)
self._with_functional_groups(self._extract_functional_groups(
functional_groups=functional_groups,
))
self._placer_ids = self._normalize_placer_ids(
placer_ids=placer_ids,
functional_groups=self._functional_groups,
)
self._core_ids = frozenset(self._get_core_ids(
functional_groups=self._functional_groups,
))
def _normalize_placer_ids(
self,
placer_ids: typing.Optional[tuple[int, ...]],
functional_groups: Collection[FunctionalGroup],
) -> frozenset[int]:
"""
Get the final *placer* ids.
Parameters:
placer_ids: The ids of *placer* atoms or ``None``.
functional_groups:
The :class:`.FunctionalGroup` instances of the building
block.
Returns:
Depending on the input values, this function will return
different things.
#. `placer_ids` is a :class:`tuple` of :class`int`: the
`placer_ids` will be returned.
#. `placer_ids` is ``None`` and `functional_groups` is not
empty: The *placer* ids of the functional groups will
be returned.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. The ids of all atoms in the building block will
be returned.
"""
if placer_ids is not None:
return frozenset(placer_ids)
if functional_groups:
return frozenset(flatten(
functional_group.get_placer_ids()
for functional_group in functional_groups
))
return frozenset(atom.get_id() for atom in self._atoms)
def _get_core_ids(
self,
functional_groups: typing.Iterable[FunctionalGroup],
) -> typing.Iterator[int]:
"""
Get the final *core* ids.
This method may return duplicate ids.
Parameters:
functional_groups:
The :class:`.FunctionalGroup` instances of the building
block.
Yields:
The id of an atom defining the core of the molecule.
"""
functional_group_atom_ids = {
atom_id
for functional_group in functional_groups
for atom_id in functional_group.get_atom_ids()
}
for atom in self._atoms:
atom_id = atom.get_id()
if atom_id not in functional_group_atom_ids:
yield atom_id
for functional_group in functional_groups:
for atom_id in functional_group.get_core_atom_ids():
yield atom_id
def _extract_functional_groups(
self,
functional_groups: typing.Iterable[
typing.Union[FunctionalGroup, FunctionalGroupFactory]
],
) -> typing.Iterator[FunctionalGroup]:
"""
Yield functional groups.
The input can be a mixture of :class:`.FunctionalGroup` and
:class:`.FunctionalGroupFactory`. The output yields
:class:`.FunctionalGroup` instances only. Either those
held directly in `functional_groups` or created by the
factories in `functional_groups`.
Parameters:
functional_groups:
Can be an :class:`iterable` of both
:class:`.FunctionalGroup` and
:class:`.FunctionalGroupFactory`.
Yields:
A functional group from `functional_groups`, or created
by a factory in `functional_groups`.
"""
for functional_group in functional_groups:
if isinstance(functional_group, FunctionalGroup):
yield functional_group
else:
# Else it's a factory.
yield from functional_group.get_functional_groups(self)
def _with_functional_groups(
self,
functional_groups: typing.Iterable[FunctionalGroup],
) -> BuildingBlock:
"""
Modify the molecule.
"""
self._functional_groups = tuple(functional_groups)
return self
def with_functional_groups(
self,
functional_groups: typing.Iterable[FunctionalGroup],
) -> BuildingBlock:
"""
Return a clone with specific functional groups.
Parameters:
functional_groups:
:class:`.FunctionalGroup` instances which the clone
should have.
Returns:
The clone.
"""
return self.clone()._with_functional_groups(functional_groups)
def _with_canonical_atom_ordering(self) -> BuildingBlock:
ordering = rdkit.CanonicalRankAtoms(self.to_rdkit_mol())
super()._with_canonical_atom_ordering()
id_map = {
old_id: new_id
for old_id, new_id in enumerate(ordering)
}
self._functional_groups = tuple(
functional_group.with_ids(id_map)
for functional_group in self._functional_groups
)
self._placer_ids = frozenset(
id_map[placer_id]
for placer_id in self._placer_ids
)
self._core_ids = frozenset(
id_map[core_id]
for core_id in self._core_ids
)
return self
def get_num_functional_groups(self) -> int:
"""
Return the number of functional groups.
Returns:
The number of functional groups in the building block.
"""
return len(self._functional_groups)
def get_functional_groups(
self,
fg_ids: typing.Optional[OneOrMany[int]] = None,
) -> typing.Iterator[FunctionalGroup]:
"""
Yield the functional groups, ordered by id.
Parameters:
fg_ids:
The ids of functional groups yielded. If ``None``, then
all functional groups are yielded. Can be a single
:class:`int`, if a single functional group is
desired.
Yields:
A functional group of the building block.
"""
if fg_ids is None:
fg_ids = range(len(self._functional_groups))
elif isinstance(fg_ids, int):
fg_ids = (fg_ids, )
for fg_id in fg_ids:
yield self._functional_groups[fg_id]
def clone(self) -> BuildingBlock:
clone = self._clone()
clone._functional_groups = self._functional_groups
clone._placer_ids = self._placer_ids
clone._core_ids = self._core_ids
return clone
def get_num_placers(self) -> int:
"""
Return the number of *placer* atoms in the building block.
Returns:
The number of *placer* atoms in the building block.
"""
return len(self._placer_ids)
def get_placer_ids(self) -> typing.Iterator[int]:
"""
Yield the ids of *placer* atoms.
*Placer* atoms are those, which should be used to calculate
the position of the building block.
See Also:
:meth:`.FunctionalGroup.get_placer_ids`
Yields:
The id of a *placer* atom.
"""
yield from self._placer_ids
def get_core_atom_ids(self) -> typing.Iterator[int]:
"""
Yield ids of atoms which form the core of the building block.
This includes all atoms in the building block not part of a
functional group, as well as any atoms in a functional group,
specifically labelled as core atoms.
See Also:
:meth:`.FunctionalGroup.get_core_atom_ids`
Yields:
The id of a *core* atom.
"""
yield from self._core_ids
def with_canonical_atom_ordering(self) -> BuildingBlock:
return self.clone()._with_canonical_atom_ordering()
def with_centroid(
self,
position: np.ndarray,
atom_ids: typing.Optional[OneOrMany[int]] = None,
) -> BuildingBlock:
return self.clone()._with_centroid(position, atom_ids)
def with_displacement(
self,
displacement: np.ndarray,
) -> BuildingBlock:
return self.clone()._with_displacement(displacement)
def with_position_matrix(
self,
position_matrix: np.ndarray,
) -> BuildingBlock:
return self.clone()._with_position_matrix(position_matrix)
def with_rotation_about_axis(
self,
angle: float,
axis: np.ndarray,
origin: np.ndarray,
) -> BuildingBlock:
return self.clone()._with_rotation_about_axis(
angle=angle,
axis=axis,
origin=origin,
)
def with_rotation_between_vectors(
self,
start: np.ndarray,
target: np.ndarray,
origin: np.ndarray,
) -> BuildingBlock:
return self.clone()._with_rotation_between_vectors(
start=start,
target=target,
origin=origin,
)
def with_rotation_to_minimize_angle(
self,
start: np.ndarray,
target: np.ndarray,
axis: np.ndarray,
origin: np.ndarray,
) -> BuildingBlock:
return self.clone()._with_rotation_to_minimize_angle(
start=start,
target=target,
axis=axis,
origin=origin,
)
def with_structure_from_file(
self,
path: str,
extension: typing.Optional[str] = None,
) -> BuildingBlock:
return typing.cast(
BuildingBlock,
super().with_structure_from_file(
path=path,
extension=extension,
)
)
def write(
self,
path: str,
atom_ids: typing.Optional[OneOrMany[int]] = None,
) -> BuildingBlock:
return typing.cast(
BuildingBlock,
super().write(path, atom_ids)
)
def __str__(self) -> str:
if self._functional_groups:
fg_repr = f', {self._functional_groups!r}'
else:
fg_repr = ''
smiles = rdkit.MolToSmiles(
mol=rdkit.RemoveHs(self.to_rdkit_mol()),
)
return f'{self.__class__.__name__}({smiles!r}{fg_repr})'
def __repr__(self) -> str:
return str(self)
|
the-stack_0_240 | #!/usr/bin/env python
""" Entry point for starting Galaxy without starting as part of a web server.
Example Usage: Start a job/workflow handler without a web server and with
a given name using.
galaxy-main --server-name handler0
Start as a daemon with (requires daemonized - install with 'pip install daemonize'):
galaxy-main -d --daemon-log-file=handler0-daemon.log --pid-file handler0.pid --server-name handler0
In daemon mode logging of Galaxy (as opposed to this script) is configured via
a loggers section in Galaxy's ini file - this can be overridden with sensible
defaults logging to a single file with the following:
galaxy-main -d --server-name handler0 --daemon-log-file=handler0-daemon.log --pid-file handler0.pid --log-file handler0.log
This can also be used to start Galaxy as a uWSGI mule, e.g. for job handling:
uwsgi ... --py-call-osafterfork --mule=lib/galaxy/main.py --mule=lib/galaxy/main.py --farm=job-handlers:1,2
The --py-call-osafterfork allows for proper shutdown on SIGTERM/SIGINT.
"""
import functools
import logging
import os
import signal
import sys
import threading
from argparse import ArgumentParser
from configparser import ConfigParser
from logging.config import fileConfig
try:
from daemonize import Daemonize
except ImportError:
Daemonize = None
try:
import uwsgi
except ImportError:
uwsgi = None
log = logging.getLogger(__name__)
real_file = os.path.realpath(__file__)
GALAXY_ROOT_DIR_ = os.path.abspath(os.path.join(os.path.dirname(real_file), os.pardir))
if not os.path.exists(os.path.join(GALAXY_ROOT_DIR_, 'run.sh')):
# Galaxy is installed
GALAXY_ROOT_DIR = None
else:
GALAXY_ROOT_DIR = GALAXY_ROOT_DIR_
GALAXY_LIB_DIR = os.path.join(GALAXY_ROOT_DIR_, "lib")
try:
sys.path.insert(1, GALAXY_LIB_DIR)
except Exception:
log.exception("Failed to add Galaxy to sys.path")
raise
from galaxy.main_config import (
absolute_config_path,
config_is_ini,
DEFAULT_CONFIG_SECTION,
DEFAULT_INI_APP,
find_config,
)
from galaxy.util import unicodify
from galaxy.web_stack import get_app_kwds
REQUIRES_DAEMONIZE_MESSAGE = "Attempted to use Galaxy in daemon mode, but daemonize is unavailable."
DEFAULT_PID = "galaxy.pid"
DEFAULT_VERBOSE = True
DESCRIPTION = "Daemonized entry point for Galaxy."
SHUTDOWN_MSG = '__SHUTDOWN__'
UWSGI_FARMS_VAR = '_GALAXY_UWSGI_FARM_NAMES'
exit = threading.Event()
def load_galaxy_app(
config_builder,
config_env=False,
log=None,
attach_to_pools=None,
**kwds
):
# Allow specification of log so daemon can reuse properly configured one.
if log is None:
log = logging.getLogger(__name__)
# If called in daemon mode, set the ROOT directory and ensure Galaxy is on
# sys.path.
if config_env:
try:
os.chdir(GALAXY_ROOT_DIR)
except Exception:
log.exception("Failed to chdir")
raise
config_builder.setup_logging()
from galaxy.util.properties import load_app_properties
kwds = config_builder.app_kwds()
kwds = load_app_properties(**kwds)
from galaxy.app import UniverseApplication
app = UniverseApplication(
global_conf=config_builder.global_conf(),
attach_to_pools=attach_to_pools,
**kwds
)
app.database_heartbeat.start()
app.application_stack.log_startup()
return app
def handle_signal(signum, frame):
log.info('Received signal %d, exiting', signum)
if uwsgi and 'mule_id' in dir(uwsgi) and uwsgi.mule_id() > 0:
farms = os.environ.get(UWSGI_FARMS_VAR, None)
if farms:
for farm in farms.split(','):
uwsgi.farm_msg(farm, SHUTDOWN_MSG)
else:
uwsgi.mule_msg(SHUTDOWN_MSG, uwsgi.mule_id())
exit.set()
def register_signals():
for name in ('TERM', 'INT', 'HUP'):
sig = getattr(signal, 'SIG%s' % name)
signal.signal(sig, handle_signal)
def app_loop(args, log):
try:
config_builder = GalaxyConfigBuilder(args)
config_env = GALAXY_ROOT_DIR is not None
galaxy_app = load_galaxy_app(
config_builder,
config_env=config_env,
log=log,
attach_to_pools=args.attach_to_pool,
)
except BaseException:
log.exception("Failed to initialize Galaxy application")
raise
try:
# A timeout is required or the signals won't be handled
while not exit.wait(20):
pass
except (KeyboardInterrupt, SystemExit):
pass
try:
galaxy_app.shutdown()
except Exception:
log.exception("Failed to shutdown Galaxy application")
raise
class GalaxyConfigBuilder:
""" Generate paste-like configuration from supplied command-line arguments.
"""
def __init__(self, args=None, **kwds):
self.config_file = None
self.config_section = None
self.app_name = kwds.get("app") or (args and args.app) or DEFAULT_CONFIG_SECTION
config_file = kwds.get("config_file", None) or (args and args.config_file)
# If given app_conf_path - use that - else we need to ensure we have a
# config file path.
if not config_file and 'config_file' in self.app_kwds():
config_file = self.app_kwds()['config_file']
if not config_file:
galaxy_root = kwds.get("galaxy_root", GALAXY_ROOT_DIR)
config_file = find_config(config_file, galaxy_root)
config_file = absolute_config_path(config_file, galaxy_root=galaxy_root)
self.config_file = unicodify(config_file)
# FIXME: this won't work for non-Paste ini configs
if self.config_is_ini:
self.config_section = "app:%s" % unicodify(kwds.get("app") or (args and args.app) or DEFAULT_INI_APP)
else:
self.config_section = self.app_name
self.log_file = (args and args.log_file)
@classmethod
def populate_options(cls, arg_parser):
arg_parser.add_argument("-c", "--config-file", default=None, help="Galaxy config file (defaults to config/galaxy.ini)")
arg_parser.add_argument("--ini-path", default=None, help="DEPRECATED: use -c/--config-file")
arg_parser.add_argument("--app", default=None, help="app section in config file (defaults to 'galaxy' for YAML/JSON, 'main' (w/ 'app:' prepended) for INI")
arg_parser.add_argument("-d", "--daemonize", default=False, help="Daemonize process", action="store_true")
arg_parser.add_argument("--daemon-log-file", default=None, help="log file for daemon script ")
arg_parser.add_argument("--log-file", default=None, help="Galaxy log file (overrides log configuration in config_file if set)")
arg_parser.add_argument("--pid-file", default=DEFAULT_PID, help="pid file (default is %s)" % DEFAULT_PID)
arg_parser.add_argument("--server-name", default=None, help="set a galaxy server name")
arg_parser.add_argument("--attach-to-pool", action="append", default=['job-handlers'], help="attach to asynchronous worker pool (specify multiple times for multiple pools)")
@property
def config_is_ini(self):
return config_is_ini(self.config_file)
def app_kwds(self):
kwds = get_app_kwds(self.app_name, app_name=self.app_name)
if 'config_file' not in kwds:
kwds['config_file'] = self.config_file
if 'config_section' not in kwds:
kwds['config_section'] = self.config_section
return kwds
def global_conf(self):
conf = {}
if self.config_is_ini:
conf["__file__"] = self.config_file
return conf
def setup_logging(self):
# Galaxy will attempt to setup logging if loggers is not present in
# ini config file - this handles that loggers block however if present
# (the way paste normally would)
if not self.config_file:
return
if self.config_is_ini:
raw_config = ConfigParser()
raw_config.read([self.config_file])
if raw_config.has_section('loggers'):
config_file = os.path.abspath(self.config_file)
fileConfig(
config_file,
dict(__file__=config_file, here=os.path.dirname(config_file))
)
def main(func=app_loop):
arg_parser = ArgumentParser(description=DESCRIPTION)
GalaxyConfigBuilder.populate_options(arg_parser)
args = arg_parser.parse_args()
if args.ini_path and not args.config_file:
args.config_file = args.ini_path
if args.log_file:
os.environ["GALAXY_CONFIG_LOG_DESTINATION"] = os.path.abspath(args.log_file)
if args.server_name:
os.environ["GALAXY_CONFIG_SERVER_NAME"] = args.server_name
pid_file = args.pid_file
log.setLevel(logging.DEBUG)
log.propagate = False
register_signals()
if args.daemonize:
if Daemonize is None:
raise ImportError(REQUIRES_DAEMONIZE_MESSAGE)
keep_fds = []
if args.daemon_log_file:
fh = logging.FileHandler(args.daemon_log_file, "w")
fh.setLevel(logging.DEBUG)
log.addHandler(fh)
keep_fds.append(fh.stream.fileno())
else:
fh = logging.StreamHandler(sys.stderr)
fh.setLevel(logging.DEBUG)
log.addHandler(fh)
daemon = Daemonize(
app="galaxy",
pid=pid_file,
action=functools.partial(func, args, log),
verbose=DEFAULT_VERBOSE,
logger=log,
keep_fds=keep_fds,
)
daemon.start()
else:
func(args, log)
if __name__ == "__main__":
main()
|
the-stack_0_241 | # -*- coding: utf-8 -*-
import sugartensor as tf
import matplotlib.pyplot as plt
# set log level to debug
tf.sg_verbosity(10)
#
# hyper parameters
#
batch_size = 25
z_dim = 50
#
# create generator
#
# random uniform seed
z = tf.random_uniform((batch_size, z_dim))
with tf.sg_context(name='generator', size=5, stride=2, act='relu', bn=True):
# generator network
gen = (z.sg_dense(dim=1024)
.sg_dense(dim=7*7*128)
.sg_reshape(shape=(-1, 7, 7, 128))
.sg_upconv(dim=64)
.sg_upconv(dim=1, act='sigmoid', bn=False)
.sg_squeeze())
#
# draw samples
#
with tf.Session() as sess:
tf.sg_init(sess)
# restore parameters
saver = tf.train.Saver()
saver.restore(sess, tf.train.latest_checkpoint('asset/train/ckpt'))
# run generator
imgs = sess.run(gen)
# plot result
_, ax = plt.subplots(5, 5, sharex=True, sharey=True)
for i in range(5):
for j in range(5):
ax[i][j].imshow(imgs[i * 5 + j], 'gray')
ax[i][j].set_axis_off()
plt.savefig('asset/train/sample.png', dpi=600)
tf.sg_info('Sample image saved to "asset/train/sample.png"')
plt.close()
|
the-stack_0_246 | import turtle
import os
t = turtle.Pen()
#t.speed(0)
t.shape("turtle")
t.left(90)
for i in range(6):
t.forward(100)
t.right(60)
for j in range(3):
t.forward(20)
t.backward(20)
t.left(60)
t.right(120)
t.backward(100)
t.left(60)
os.system("Pause") |
the-stack_0_247 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Xbacklight(AutotoolsPackage):
"""Xbacklight is used to adjust the backlight brightness where supported.
It uses the RandR extension to find all outputs on the X server
supporting backlight brightness control and changes them all in the
same way."""
homepage = "http://cgit.freedesktop.org/xorg/app/xbacklight"
url = "https://www.x.org/archive/individual/app/xbacklight-1.2.1.tar.gz"
version('1.2.1', 'e8e4c86b0f867e23aa3532618a697609')
depends_on('libxcb')
depends_on('xcb-util')
depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build')
|
the-stack_0_248 | import os
def align(step):
path = os.getcwd()
file_path = os.path.join(path, 'whales.txt')
file1 = open(file_path, 'r')
Lines = file1.readlines()
initial_pos = [0] * 2000
final = 0
for line in Lines:
input = line.strip()
poss = input.split(",")
prev_fuel = -1
for median in range(0, int(poss[len(poss)-1])+1):
fuel = 0
for pos in poss:
n = abs(int(pos) - int(median))
fuel += n * (n+1)/2 if step else n
if fuel < prev_fuel or prev_fuel == -1:
prev_fuel = fuel
print(int(prev_fuel))
if __name__ == "__main__":
align(False)
align(True)
|
the-stack_0_250 | #!/usr/bin/env python3
"""
Convert $1 -- a markdown file to an HTML file in `/tmp/vim/<parent>/<basename>.html`.
"""
# Standard Library
from os.path import realpath, basename, isfile, isdir, dirname, abspath
import os
import shutil
import subprocess
import sys
import re
import logging
from logging import Logger
from typing import Dict, Iterable, Optional, Set, Text, List
from subprocess import DEVNULL, PIPE, run
from meta import MATHJAX_CONFS, LongOpt, CodeStyle, FromFmt, Ext, ToFmt
from utils import validate_executables, require_exists, vimdir_path, ensure_exists
# initalise logging with sane configuration
logging.basicConfig(level=logging.WARN,
format="%(levelname)s:%(asctime)s %(message)s")
log: Logger = logging.getLogger()
validate_executables(["pandoc"])
# NOTE this will try to include a lua filter from `./task-list.lua`.
class PandocCmd:
def __init__(
self,
input_file: Text,
stylesheet: Text = vimdir_path("css", "styles.css"),
javascript: Text = vimdir_path("js", "script.js"),
from_fmt: FromFmt = FromFmt.MARKDOWN,
to_fmt: ToFmt = ToFmt.HTML5,
exts: List[Ext] = [
Ext.ASCII_IDENTIFIERS,
Ext.AUTOLINK_BARE_URIS,
Ext.COMPACT_DEFINITION_LISTS,
Ext.FENCED_DIVS,
Ext.GFM_AUTO_IDENTIFIERS,
Ext.INTRAWORD_UNDERSCORES,
Ext.MARKDOWN_ATTRIBUTE,
Ext.MMD_HEADER_IDENTIFIERS,
Ext.MMD_LINK_ATTRIBUTES,
Ext.SMART,
Ext.TEX_MATH_DOLLARS,
Ext.TEX_MATH_DOUBLE_BACKSLASH,
Ext.TEX_MATH_SINGLE_BACKSLASH,
],
no_exts: List[Ext] = [Ext.ALL_SYMBOLS_ESCAPABLE, Ext.ESCAPED_LINE_BREAKS],
long_opts: Dict[LongOpt, Optional[Text]] = {
LongOpt.ATX_HEADERS: None,
LongOpt.REFERENCE_LOCATION: "document",
LongOpt.SECTION_DIVS: None,
LongOpt.EMAIL_OBFUSCATION: "javascript",
},
code_style: CodeStyle = CodeStyle.TANGO,
mathjax_version: Text = "2.7.5",
mathjax_conf: Text = "TeX-AMS_HTML",
width: int = 80,
toc_depth: int = 3,
) -> None:
log.debug("initalising a PandocCmd object")
self.exts: List[Ext] = []
self.no_exts: List[Ext] = []
self.long_opts: Dict[LongOpt, Optional[Text]] = dict()
self.set_opts(long_opts).set_exts(exts).set_no_exts(no_exts).set_input_file(
input_file
).set_opt(LongOpt.STANDALONE).set_to_fmt(to_fmt).set_from_fmt(
from_fmt
).set_mathjax(
mathjax_version, mathjax_conf
).set_width(
width
).set_stylesheet(
stylesheet
).set_code_style(
code_style
).set_javascript(
javascript
)
if toc_depth:
self.set_toc_depth(toc_depth).set_opt(LongOpt.TOC)
lua_filter: str = os.path.join(os.path.dirname(
os.path.abspath(__file__)), 'task-list.lua')
if isfile(lua_filter):
self.set_opt(LongOpt.LUA_FILTER, lua_filter)
else:
log.error(f'failed to find lua filter ./{lua_filter}')
def set_from_fmt(self, fmt: FromFmt) -> object:
assert fmt in FromFmt, f"from format '{fmt}' invalid"
self.from_fmt: FromFmt = fmt
return self
def set_opt(self, key: LongOpt, val: Optional[Text] = None) -> object:
self.long_opts[key] = val
return self
def set_opts(self, pairs: Dict[LongOpt, Optional[Text]]) -> object:
for (k, v) in pairs.items():
self.set_opt(k, v)
return self
def set_to_fmt(self, fmt: ToFmt) -> object:
self.to_fmt = fmt
return self
def set_input_file(self, file_path: Text) -> object:
require_exists(file_path)
self.input_file = file_path
return self
def set_width(self, n: int) -> object:
assert n and n >= 0, f"invalid value {str(n)}"
return self.set_opt(LongOpt.COLUMNS, str(n))
def set_stylesheet(self, css_path: Text) -> object:
require_exists(css_path)
return self.set_opt(LongOpt.CSS, css_path)
def set_javascript(self, js_path: Text) -> object:
require_exists(js_path)
self.javascript = js_path
return self
def set_toc_depth(self, n: int) -> object:
assert n and n >= 0, f"invalid value {n}"
return self.set_opt(LongOpt.TOC_DEPTH, str(n))
def set_code_style(self, style: CodeStyle) -> object:
return self.set_opt(LongOpt.HIGHLIGHT_STYLE, style._name_.lower())
def set_mathjax(self, version: Text, cfg: Text) -> object:
assert cfg and cfg in MATHJAX_CONFS, f"unreconginsed MathJax config {cfg}"
assert (
version
and len(version) >= 3
and version[0] == "2"
and version[1] == "."
and str.isdigit(version[2])
), f"unrecognised MathJax version {version}"
return self.set_opt(
LongOpt.MATHJAX,
f"https://cdnjs.cloudflare.com/ajax/libs/mathjax/{version}/MathJax.js?config={cfg}",
)
def set_exts(self, exts: Iterable[Ext]) -> object:
for ext in exts:
self.set_ext(ext)
return self
def set_ext(self, ext: Ext) -> object:
self.exts.append(ext)
return self
def set_no_ext(self, ext: Ext) -> object:
self.no_exts.append(ext)
return self
def set_no_exts(self, exts: Iterable[Ext]) -> object:
for ext in exts:
self.set_no_ext(ext)
return self
@property
def args(self) -> List[Text]:
arguments = ["pandoc", "--from"]
from_fmt = self.from_fmt._name_.lower()
if len(self.exts) > 0:
for ext in self.exts:
from_fmt += f"+{ext._name_.lower()}"
if len(self.no_exts) > 0:
for ext in self.no_exts:
from_fmt += f"-{ext._name_.lower()}"
arguments.append(from_fmt)
arguments.extend(["--to", self.to_fmt._name_.lower()])
for opt in self.long_opts.keys():
maybe_val: Optional[Text] = self.long_opts[opt]
opt_name: Text = opt._name_.lower().replace("_", "-")
if maybe_val:
arguments.append(f"--{opt_name}={maybe_val}")
else:
arguments.append(f"--{opt_name}")
log.debug(f"args: {arguments}")
return arguments
def before(self, text: Text) -> Text:
"""Fix badly formatted markdown where heading marker `#` is not
followed by space.
NOTE: This method is pure.
:param text: input text before transformations
:return: output text after transformations
"""
log.debug("preprocessing [before hook]")
return re.sub(r"(#+)([A-Z])", "\1 \2", text, re.MULTILINE)
def after(self, text: Text) -> Text:
"""Transform relative links and references, Inject JavaScript.
NOTE: This method is pure.
Match on either src or href e.g.:
`src="./script.js"` and `href="styles.css"`
skip over whitespace e.g.:
`src=" address/file.png"`
match if relative e.g.:
`./`
or match if not an external link with a protocol e.g.:
`https://stackoverflow.com`
or match if not a valid directory reference e.g.:
`/srv/log/log.txt` and `~/file.txt`
:param text: input text
:return: output after transformations
"""
pat = re.compile(r'(href|src)="\s*(\./|(?![a-z]{2,10}://|~|\#|/))')
d: Text = dirname(self.input_file)
with open(self.javascript) as f:
log.debug("postprocessing [after hook]")
return re.sub(pat, f'\\1="{d}/', text).replace(
"</body>", f"<script>{f.read()}</script></body>"
)
def execute(self) -> Text:
log.debug("executing")
with open(self.input_file, encoding="utf-8") as input:
return self.after(
run(
self.args,
encoding="utf-8",
input=self.before(input.read()),
stderr=DEVNULL,
stdout=PIPE,
).stdout
)
if __name__ == "__main__":
INPUT_FILE = sys.argv[1]
log.debug(f"input file: {INPUT_FILE}")
OUTPUT_FILE: Text = os.path.join(
"/tmp/vim",
basename(dirname(INPUT_FILE)),
re.sub(
r"^(.*)\.(?:r?md|m(?:ark)?down)$",
r"\1.html",
basename(INPUT_FILE),
re.IGNORECASE | re.MULTILINE,
),
)
log.debug(f"output file: {OUTPUT_FILE}")
ensure_exists(OUTPUT_FILE)
with open(OUTPUT_FILE, "w", encoding="utf-8") as output:
cmd = PandocCmd(INPUT_FILE)
output.write(cmd.execute())
print(f"Cmd: {' '.join(cmd.args)}")
print(f'Output: {output.name}')
# vim:foldmethod=manual:
|
the-stack_0_251 | import logging
from typing import Any, Dict, List, Optional
from starlette.applications import Starlette
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import HTMLResponse, JSONResponse
from starlette.routing import Mount, Route
from pait.api_doc.html import get_redoc_html as _get_redoc_html
from pait.api_doc.html import get_swagger_ui_html as _get_swagger_ui_html
from pait.api_doc.open_api import PaitOpenApi
from pait.field import Depends, Query
from pait.g import config
from pait.model.core import PaitCoreModel
from pait.model.status import PaitStatus
from ._load_app import load_app
from ._pait import Pait
def add_doc_route(
app: Starlette,
scheme: Optional[str] = None,
prefix: str = "/",
pin_code: str = "",
title: str = "Pait Doc",
open_api_tag_list: Optional[List[Dict[str, Any]]] = None,
) -> None:
if pin_code:
logging.info(f"doc route start pin code:{pin_code}")
doc_pait: Pait = Pait(
author=config.author or ("so1n",),
status=config.status or PaitStatus.release,
tag=("pait_doc",),
group="pait_doc",
)
def _get_request_pin_code(r_pin_code: str = Query.i("", alias="pin_code")) -> Optional[str]:
if pin_code:
if r_pin_code != pin_code:
raise HTTPException(
status_code=404,
detail=(
"The requested URL was not found on the server. If you entered"
" the URL manually please check your spelling and try again."
),
)
return r_pin_code
def _get_open_json_url(request: Request, r_pin_code: str) -> str:
openapi_json_url: str = f"{'/'.join(request.url.path.split('/')[:-1])}/openapi.json"
if r_pin_code:
openapi_json_url += f"?pin_code={r_pin_code}"
return openapi_json_url
@doc_pait()
def get_redoc_html(request: Request, r_pin_code: str = Depends.i(_get_request_pin_code)) -> HTMLResponse:
return HTMLResponse(_get_redoc_html(_get_open_json_url(request, r_pin_code), title))
@doc_pait()
def get_swagger_ui_html(request: Request, r_pin_code: str = Depends.i(_get_request_pin_code)) -> HTMLResponse:
return HTMLResponse(_get_swagger_ui_html(_get_open_json_url(request, r_pin_code), title))
@doc_pait(pre_depend_list=[_get_request_pin_code])
def openapi_route(request: Request) -> JSONResponse:
pait_dict: Dict[str, PaitCoreModel] = load_app(request.app)
_scheme: str = scheme or request.url.scheme
pait_openapi: PaitOpenApi = PaitOpenApi(
pait_dict,
title=title,
open_api_server_list=[{"url": f"{_scheme}://{request.url.hostname}:{request.url.port}", "description": ""}],
open_api_tag_list=open_api_tag_list,
)
return JSONResponse(pait_openapi.open_api_dict)
route: Mount = Mount(
prefix,
name="api doc",
routes=[
Route("/redoc", get_redoc_html, methods=["GET"]),
Route("/swagger", get_swagger_ui_html, methods=["GET"]),
Route("/openapi.json", openapi_route, methods=["GET"]),
],
)
app.routes.append(route)
|
the-stack_0_252 | import hashlib
import io
import json
import tempfile
from random import randint
from unittest.mock import Mock
import pytest
import requests
from .shared import (
guess_mimetype,
HashWrap,
MogileFile,
encode_int,
future_waiter,
make_bucket_map,
maybe_update_max,
md5_fileobj_b64,
md5_fileobj_hex,
sha1_fileobj_b64,
sha1_fileobj_hex,
)
# pylint: disable=C0115,C0116,R0201
class TestMigrate:
MD5_HEX = "5eb63bbbe01eeed093cb22bb8f5acdc3"
SHA1_HEX = "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed"
MD5_B64 = "XrY7u+Ae7tCTyyK7j1rNww=="
SHA1_B64 = "Kq5sNclPz7QV2+lfQIuc6R7oRu0="
@pytest.fixture
def mogile_client(self):
mogile_client = Mock()
paths = Mock()
mogile_client.get_paths.return_value = paths
paths.data = {"paths": {1: "http://example.org/1", 2: "http://example.org/2"}}
return mogile_client
@pytest.fixture
def gcs_client(self):
gcs_client = Mock()
bucket = Mock()
blob = Mock()
gcs_client.get_bucket.return_value = bucket
bucket.get_blob.return_value = blob
bucket.blob.return_value = blob
return gcs_client
@pytest.fixture
def row(self):
# hello world sha1sum: 2aae6c35c94fcfb415dbe95f408b9ce91ee846ed
# (fid, dmid, dkey, length, classid, devcount)
return [1, 1, f"{self.SHA1_HEX}-mogilefs-prod-repo", 1593790, 0, 2]
@pytest.yield_fixture
def my_tempfile(self):
with tempfile.TemporaryFile() as tmp:
tmp.write(b"hello world")
yield tmp
@pytest.fixture
def mogile_file(self):
return MogileFile(dkey=f"{self.SHA1_HEX}-repo", fid=564879786, length=1)
def test_parse_row(self, row):
file = MogileFile.parse_row(row)
assert file.sha1sum == self.SHA1_HEX
assert file.fid == 1
assert file.length == 1593790
assert file.dkey == f"{self.SHA1_HEX}-mogilefs-prod-repo"
assert file.skip is False
assert file.mogile_bucket == "mogilefs-prod-repo"
def test_parse_bad_dmid(self, row):
row[1] = 2
with pytest.raises(AssertionError, match="Bad domain"):
MogileFile.parse_row(row)
def test_parse_temp_file(self, row):
row[2] = "8d26b4da-bd3e-47eb-888a-13bb3579c7e9.tmp"
file = MogileFile.parse_row(row)
assert file.skip is True
assert file.mogile_bucket is None
assert file.sha1sum is None
def test_parse_bad_class(self, row):
row[4] = 1
with pytest.raises(AssertionError, match="Bad class"):
MogileFile.parse_row(row)
def test_make_intermediary_key(self, mogile_file):
assert mogile_file.make_intermediary_key() == "0/564/879/0564879786.fid"
def test_make_contentrepo_key(self, mogile_file):
assert mogile_file.make_contentrepo_key() == self.SHA1_HEX
def test_exists_in_bucket(self, mogile_file, gcs_client):
blob = gcs_client.get_bucket().get_blob()
blob.size = 1
blob.md5_hash = self.MD5_HEX
assert (
mogile_file.exists_in_bucket(gcs_client, "my-bucket", "my-key")
== self.MD5_HEX
)
def test_does_not_exist_in_bucket(self, gcs_client, mogile_file):
gcs_client.get_bucket().get_blob.return_value = None
assert mogile_file.exists_in_bucket(gcs_client, "my-bucket", "my-file") is False
def test_mogile_file_to_json(self, mogile_file):
assert mogile_file == MogileFile.from_json(json.loads(mogile_file.to_json()))
def test_md5_fileobj(self, my_tempfile):
assert md5_fileobj_hex(my_tempfile) == self.MD5_HEX
assert md5_fileobj_b64(my_tempfile) == self.MD5_B64
def test_sha1_fileobj(self, my_tempfile):
assert sha1_fileobj_hex(my_tempfile) == self.SHA1_HEX
assert sha1_fileobj_b64(my_tempfile) == self.SHA1_B64
def test_put(
self, mogile_file: MogileFile, mogile_client, gcs_client, requests_mock
):
requests_mock.get("http://example.org/1", content=b"hello world")
requests_mock.get("http://example.org/2", content=b"hello world")
blob = gcs_client.get_bucket().get_blob()
def upload_from_file(fileobj, *args, **kwargs):
fileobj.read()
blob.upload_from_file.side_effect = upload_from_file
blob.upload_from_file.return_value = "xyz"
blob.md5_hash = self.MD5_B64
md5 = mogile_file.put(mogile_client, gcs_client, "my-bucket")
assert md5 == self.MD5_B64
def test_make_bucket_map(self):
assert make_bucket_map("a:b,c:d") == {"a": "b", "c": "d"}
def test_future_waiter(self):
futures_list = []
called_times = {}
for i in range(0, 100):
future = Mock()
future.exception.return_value = None
def mk_done(counter):
called_times[counter] = 0
return_after = counter % 3
def done():
called_times[counter] += 1
return called_times[counter] > return_after
return done
future.done = mk_done(i)
future.result.return_value = None
futures_list.append(future)
passthrough = future_waiter((f for f in futures_list), 10)
leftovers = list(iter(passthrough))
assert leftovers == [None] * 100
def test_future_waiter_exception(self):
with pytest.raises(Exception, match="huh"):
futures_list = []
for i in range(0, 10):
future = Mock()
future.exception.return_value = None
future.done.return_value = True
future.result.return_value = None
if i == 5:
future.exception.return_value = Exception("huh")
futures_list.append(future)
passthrough = future_waiter((f for f in futures_list), 10)
leftovers = list(iter(passthrough))
def test_hash_wrap(self):
bio = io.BytesIO(b"hello world")
md5 = hashlib.md5()
with HashWrap(bio, md5) as pipe:
assert pipe.read() == b"hello world"
assert md5.hexdigest() == "5eb63bbbe01eeed093cb22bb8f5acdc3"
def test_hash_wrap_requests(self, requests_mock):
requests_mock.get("http://example.org/1", content=b"hello world")
with requests.get("http://example.org/1", stream=True) as req:
req.raise_for_status()
sha1 = hashlib.sha1()
req.raw.decode_content = True
with HashWrap(req.raw, sha1) as pipe:
assert "hello world" == pipe.read().decode("utf-8")
assert sha1.hexdigest() == "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed"
def test_encode_int(self):
assert b"100" == encode_int(100)
i = randint(0, 100000000000)
assert i == int(encode_int(i))
def test_maybe_update_max(self):
db = {}
maybe_update_max(db, "last", 1)
assert db["last"] == b"1"
db = {"last": b"1"}
maybe_update_max(db, "last", 2)
assert db["last"] == b"2"
db = {"last": b"2"}
maybe_update_max(db, "last", 1)
assert db["last"] == b"2"
def test_guess_mimetype(self):
assert "image/png" == guess_mimetype(
"corpus-dev-0242ac130003/10.1371/image.pbio.v01.i01/1/image.pbio.v01.i01.g001.PNG_I"
)
assert "image/png" == guess_mimetype(
"gs:///corpus-dev-0242ac130003/10.1371/image.pbio.v01.i01/1/image.pbio.v01.i01.g001.PNG_I"
)
assert "image/png" == guess_mimetype("image.pbio.v01.i01.g001.PNG_I")
assert "image/png" == guess_mimetype("image.pbio.v01.i01.g001.PNG")
assert "image/png" == guess_mimetype("image.pbio.v01.i01.g001.PNG_I")
assert "application/octet-stream" == guess_mimetype("foo")
assert "text/csv" == guess_mimetype("foo.csv")
assert "text/html" == guess_mimetype("foo.html")
|
the-stack_0_255 | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import itertools
from pants.backend.python.goals import lockfile
from pants.backend.python.goals.lockfile import GeneratePythonLockfile
from pants.backend.python.subsystems.python_tool_base import PythonToolBase
from pants.backend.python.subsystems.setup import PythonSetup
from pants.backend.python.target_types import ConsoleScript, InterpreterConstraintsField
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.core.goals.generate_lockfiles import GenerateToolLockfileSentinel
from pants.engine.rules import Get, collect_rules, rule
from pants.engine.target import AllTargets, AllTargetsRequest
from pants.engine.unions import UnionRule
from pants.util.docutil import git_url
from pants.util.logging import LogLevel
class IPython(PythonToolBase):
options_scope = "ipython"
help = "The IPython enhanced REPL (https://ipython.org/)."
default_version = "ipython==7.16.1" # The last version to support Python 3.6.
default_main = ConsoleScript("ipython")
register_lockfile = True
default_lockfile_resource = ("pants.backend.python.subsystems", "ipython_lockfile.txt")
default_lockfile_path = "src/python/pants/backend/python/subsystems/ipython_lockfile.txt"
default_lockfile_url = git_url(default_lockfile_path)
@classmethod
def register_options(cls, register):
super().register_options(register)
register(
"--ignore-cwd",
type=bool,
advanced=True,
default=True,
help="Whether to tell IPython not to put the CWD on the import path.\n\n"
"Normally you want this to be True, so that imports come from the hermetic "
"environment Pants creates.\n\nHowever IPython<7.13.0 doesn't support this option, "
"so if you're using an earlier version (e.g., because you have Python 2.7 code) "
"then you will need to set this to False, and you may have issues with imports "
"from your CWD shading the hermetic environment.",
)
class IPythonLockfileSentinel(GenerateToolLockfileSentinel):
resolve_name = IPython.options_scope
@rule(
desc=(
"Determine all Python interpreter versions used by iPython in your project (for lockfile "
"usage)"
),
level=LogLevel.DEBUG,
)
async def setup_ipython_lockfile(
_: IPythonLockfileSentinel, ipython: IPython, python_setup: PythonSetup
) -> GeneratePythonLockfile:
if not ipython.uses_lockfile:
return GeneratePythonLockfile.from_tool(ipython)
# IPython is often run against the whole repo (`./pants repl ::`), but it is possible to run
# on subsets of the codebase with disjoint interpreter constraints, such as
# `./pants repl py2::` and then `./pants repl py3::`. Still, even with those subsets possible,
# we need a single lockfile that works with all possible Python interpreters in use.
#
# This ORs all unique interpreter constraints. The net effect is that every possible Python
# interpreter used will be covered.
all_tgts = await Get(AllTargets, AllTargetsRequest())
unique_constraints = {
InterpreterConstraints.create_from_compatibility_fields(
[tgt[InterpreterConstraintsField]], python_setup
)
for tgt in all_tgts
if tgt.has_field(InterpreterConstraintsField)
}
constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints))
return GeneratePythonLockfile.from_tool(
ipython, constraints or InterpreterConstraints(python_setup.interpreter_constraints)
)
def rules():
return (
*collect_rules(),
*lockfile.rules(),
UnionRule(GenerateToolLockfileSentinel, IPythonLockfileSentinel),
)
|
the-stack_0_256 | import os
from acres.model import topic_list
def test_parse():
topics = topic_list.parse("tests/resources/test_topics.tsv")
types = topic_list.unique_types(topics)
assert 'EKG' in types
def test_create_topic_list(ngramstat):
filename = "tests/resources/ngram_topics.tsv"
topic_list.create(filename, 1.0, 3)
assert os.path.exists(filename)
assert os.path.getsize(filename) > 10
|
the-stack_0_257 | # Copyright 2011 10gen
#
# Modified by Antonin Amand <[email protected]> to work with gevent.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pymongo.connection
from gevent.hub import getcurrent
import gevent.queue
import gevent.greenlet
import gevent.local
import gevent.coros
from gevent import socket
import weakref
class Pool(object):
""" A greenlet safe connection pool for gevent (non-thread safe).
"""
DEFAULT_TIMEOUT = 3.0
def __init__(self, pool_size, network_timeout=None):
self.network_timeout = network_timeout or self.DEFAULT_TIMEOUT
self.pool_size = pool_size
self._bootstrap(os.getpid())
self._lock = gevent.coros.RLock()
def _bootstrap(self, pid):
self._count = 0
self._pid = pid
self._used = {}
self._queue = gevent.queue.Queue(self.pool_size)
def connect(self, host, port):
"""Connect to Mongo and return a new (connected) socket.
"""
try:
# Prefer IPv4. If there is demand for an option
# to specify one or the other we can add it later.
s = socket.socket(socket.AF_INET)
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
s.settimeout(self.network_timeout or
pymongo.connection._CONNECT_TIMEOUT)
s.connect((host, port))
s.settimeout(self.network_timeout)
return s
except socket.gaierror:
# If that fails try IPv6
s = socket.socket(socket.AF_INET6)
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
s.settimeout(self.network_timeout or
pymongo.connection._CONNECT_TIMEOUT)
s.connect((host, port))
s.settimeout(self.network_timeout)
return s
def get_socket(self, host, port):
pid = os.getpid()
if pid != self._pid:
self._bootstrap(pid)
greenlet = getcurrent()
from_pool = True
sock = self._used.get(greenlet)
if sock is None:
with self._lock:
if self._count < self.pool_size:
self._count += 1
from_pool = False
sock = self.connect(host, port)
if sock is None:
from_pool = True
sock = self._queue.get(timeout=self.network_timeout)
if isinstance(greenlet, gevent.Greenlet):
greenlet.link(self._return)
self._used[greenlet] = sock
else:
ref = weakref.ref(greenlet, self._return)
self._used[ref] = sock
return sock, from_pool
def return_socket(self):
greenlet = getcurrent()
self._return(greenlet)
def _return(self, greenlet):
try:
sock = self._used.get(greenlet)
if sock is not None:
del self._used[greenlet]
self._queue.put(sock)
except:
with self._lock:
self._count -= 1
def patch():
import pymongo.connection
pymongo.connection._Pool = Pool
|
the-stack_0_259 | from PyQt5 import QtWidgets
from PyQt5.QtCore import QObject, pyqtSlot, pyqtSignal, QLocale, Qt
from PyQt5.QtWidgets import QPushButton, QLabel
from PyQt5.QtGui import QIcon, QPixmap
import sys
from pymodaq.daq_utils.parameter import utils as putils
from pymodaq.daq_measurement.daq_measurement_main import DAQ_Measurement
from collections import OrderedDict
from pymodaq.daq_utils.plotting.crosshair import Crosshair
import pyqtgraph as pg
import numpy as np
from pymodaq.daq_utils import daq_utils as utils
from pymodaq.daq_utils.gui_utils import QAction
from pymodaq.daq_utils.plotting.viewer1D.viewer1Dbasic import Viewer1DBasic
from pymodaq.daq_utils.managers.roi_manager import ROIManager
import datetime
logger = utils.set_logger(utils.get_module_name(__file__))
class Viewer1D(QtWidgets.QWidget, QObject):
"""this plots 1D data on a plotwidget. Math and measurement can be done on it. Datas and measurements are then exported with the signal
data_to_export_signal
"""
data_to_export_signal = pyqtSignal(OrderedDict) # self.data_to_export=edict(data0D=None,data1D=None,data2D=None)
math_signal = pyqtSignal(OrderedDict) # OrderedDict:=[x_axis=...,data=...,ROI_bounds=...,operation=]
ROI_changed = pyqtSignal()
ROI_changed_finished = pyqtSignal()
def __init__(self, parent=None):
QLocale.setDefault(QLocale(QLocale.English, QLocale.UnitedStates))
super(Viewer1D, self).__init__()
self.viewer_type = 'Data1D'
self.title = 'viewer1D'
if parent is None:
parent = QtWidgets.QWidget()
self.parent = parent
self.roi_manager = ROIManager('1D')
self.roi_manager.new_ROI_signal.connect(self.add_lineout)
self.roi_manager.remove_ROI_signal.connect(self.remove_ROI)
# self.roi_manager.ROI_changed_finished.connect(self.update_lineouts)
self.setupUI()
self.wait_time = 3000
self.measurement_module = None
self.math_module = Viewer1D_math()
if DAQ_Measurement is None: # pragma: no cover
self.ui.do_measurements_pb.setVisible(False)
self._labels = []
self.plot_channels = None
self.plot_colors = utils.plot_colors
self.color_list = ROIManager.color_list
self.lo_items = OrderedDict([])
self.lo_data = OrderedDict([])
self.ROI_bounds = []
self._x_axis = None
self.datas = [] # datas on each channel. list of 1D arrays
self.data_to_export = None
self.measurement_dict = OrderedDict(x_axis=None, datas=[], ROI_bounds=[], operations=[], channels=[])
# OrderedDict to be send to the daq_measurement module
self.measure_data_dict = OrderedDict()
# dictionnary with data to be put in the table on the form: key="Meas.{}:".format(ind)
# and value is the result of a given lineout or measurement
def setupUI(self):
self.ui = QObject()
self.parent.setLayout(QtWidgets.QVBoxLayout())
splitter_hor = QtWidgets.QSplitter(Qt.Horizontal)
# self.ui.statusbar = QtWidgets.QStatusBar()
# self.ui.statusbar.setMaximumHeight(15)
self.parent.layout().addWidget(splitter_hor)
#self.parent.layout().addWidget(self.ui.statusbar)
splitter_ver = QtWidgets.QSplitter(Qt.Vertical)
splitter_hor.addWidget(splitter_ver)
splitter_hor.addWidget(self.roi_manager.roiwidget)
self.roi_manager.roiwidget.hide()
self.ui.button_widget = QtWidgets.QToolBar()
splitter_ver.addWidget(self.ui.button_widget)
self.ui.Graph_Lineouts = pg.PlotWidget()
widg = QtWidgets.QWidget()
self.viewer = Viewer1DBasic(widg)
splitter_ver.addWidget(widg)
splitter_ver.addWidget(self.ui.Graph_Lineouts)
self.ui.Graph1D = self.viewer # for backcompatibility
self.roi_manager.viewer_widget = self.viewer.plotwidget
self.setup_buttons(self.ui.button_widget)
self.setup_zoom()
self.legend = None
self.axis_settings = dict(orientation='bottom', label='x axis', units='pxls')
self.ui.xaxis_item = self.viewer.plotwidget.plotItem.getAxis('bottom')
self.ui.Graph_Lineouts.hide()
self.ui.aspect_ratio_pb.clicked.connect(self.lock_aspect_ratio)
# #crosshair
self.ui.crosshair = Crosshair(self.viewer.plotwidget.plotItem, orientation='vertical')
self.ui.crosshair.crosshair_dragged.connect(self.update_crosshair_data)
self.ui.crosshair_pb.clicked.connect(self.crosshairClicked)
self.crosshairClicked()
# self.ui.Measurement_widget=Dock("Measurement Module", size=(300, 100), closable=True)
# self.dockarea.addDock(self.ui.Measurement_widget)
self.ui.Measurement_widget = QtWidgets.QWidget()
self.ui.Measurement_widget.setVisible(False)
# #Connecting buttons:
self.ui.Do_math_pb.clicked.connect(self.do_math_fun)
self.ui.do_measurements_pb.clicked.connect(self.open_measurement_module)
self.ui.zoom_pb.clicked.connect(self.enable_zoom)
self.ui.scatter.clicked.connect(self.do_scatter)
self.ui.xyplot_action.triggered.connect(self.do_xy)
def setup_buttons(self, button_widget):
self.ui.zoom_pb = QAction(QIcon(QPixmap(":/icons/Icon_Library/Zoom_to_Selection.png")), 'Zoom Widget')
self.ui.zoom_pb.setCheckable(True)
button_widget.addAction(self.ui.zoom_pb)
self.ui.Do_math_pb = QAction(QIcon(QPixmap(":/icons/Icon_Library/Calculator.png")), 'Do Math using ROI')
self.ui.Do_math_pb.setCheckable(True)
button_widget.addAction(self.ui.Do_math_pb)
self.ui.do_measurements_pb = QAction(QIcon(QPixmap(":/icons/Icon_Library/MeasurementStudio_32.png")),
'Do Advanced measurements (fits,...)')
self.ui.do_measurements_pb.setCheckable(True)
button_widget.addAction(self.ui.do_measurements_pb)
self.ui.crosshair_pb = QAction(QIcon(QPixmap(":/icons/Icon_Library/reset.png")),
'Show data cursor')
self.ui.crosshair_pb.setCheckable(True)
button_widget.addAction(self.ui.crosshair_pb)
self.ui.aspect_ratio_pb = QAction(QIcon(QPixmap(":/icons/Icon_Library/zoomReset.png")),
'Fix the aspect ratio')
self.ui.aspect_ratio_pb.setCheckable(True)
button_widget.addAction(self.ui.aspect_ratio_pb)
self.ui.scatter = QAction(QIcon(QPixmap(":/icons/Icon_Library/Marker.png")),
'Switch between line or scatter plots')
self.ui.scatter.setCheckable(True)
button_widget.addAction(self.ui.scatter)
self.ui.xyplot_action = QAction(QIcon(QPixmap(":/icons/Icon_Library/2d.png")),
'Switch between normal or XY representation (valid for 2 channels)')
self.ui.xyplot_action.setCheckable(True)
button_widget.addAction(self.ui.xyplot_action)
self.ui.xyplot_action.setVisible(False)
self.ui.x_label = QAction('x:')
button_widget.addAction(self.ui.x_label)
self.ui.y_label = QAction('y:')
button_widget.addAction(self.ui.y_label)
def setup_zoom(self):
# create and set the zoom widget
# self.ui.zoom_widget=Dock("1DViewer zoom", size=(300, 100), closable=True)
self.ui.zoom_widget = QtWidgets.QWidget()
layout = QtWidgets.QHBoxLayout()
self.ui.Graph_zoom = pg.PlotWidget()
layout.addWidget(self.ui.Graph_zoom)
self.ui.zoom_widget.setLayout(layout)
self.ui.zoom_region = pg.LinearRegionItem()
self.ui.zoom_region.setZValue(-10)
self.ui.zoom_region.setBrush('r')
self.ui.zoom_region.setOpacity(0.2)
self.ui.Graph_zoom.addItem(self.ui.zoom_region)
self.zoom_plot = []
# self.dockarea.addDock(self.ui.zoom_widget)
self.ui.zoom_widget.setVisible(False)
def do_scatter(self):
self.update_graph1D(self.datas)
def do_xy(self):
if self.ui.xyplot_action.isChecked():
axis = self.viewer.plotwidget.plotItem.getAxis('bottom')
axis.setLabel(text=self.labels[0], units='')
axis = self.viewer.plotwidget.plotItem.getAxis('left')
axis.setLabel(text=self.labels[1], units='')
self.legend.setVisible(False)
else:
self.set_axis_label(dict(orientation='bottom', label=self.axis_settings['label'],
units=self.axis_settings['units']))
axis = self.viewer.plotwidget.plotItem.getAxis('left')
axis.setLabel(text='', units='')
self.legend.setVisible(True)
self.update_graph1D(self.datas)
def update_lineouts(self):
try:
operations = []
channels = []
for ind, key in enumerate(self.roi_manager.ROIs):
operations.append(self.roi_manager.settings.child('ROIs', key, 'math_function').value())
channels.append(
self.roi_manager.settings.child('ROIs', key,
'use_channel').opts['limits'].index(
self.roi_manager.settings.child('ROIs',
key, 'use_channel').value()))
self.lo_items[key].setPen(self.roi_manager.settings.child('ROIs', key,
'Color').value())
self.measurement_dict['datas'] = self.datas
self.measurement_dict['ROI_bounds'] = [self.roi_manager.ROIs[item].getRegion() for item in
self.roi_manager.ROIs]
self.measurement_dict['channels'] = channels
self.measurement_dict['operations'] = operations
data_lo = self.math_module.update_math(self.measurement_dict)
self.show_math(data_lo)
except Exception as e:
pass
@pyqtSlot(str)
def remove_ROI(self, roi_name):
item = self.lo_items.pop(roi_name)
self.ui.Graph_Lineouts.plotItem.removeItem(item)
self.measure_data_dict.pop("Lineout_{:s}:".format(roi_name))
self.update_lineouts()
@pyqtSlot(int, str)
def add_lineout(self, index, roi_type=''):
try:
item = self.roi_manager.ROIs['ROI_{:02d}'.format(index)]
item_param = self.roi_manager.settings.child('ROIs', 'ROI_{:02d}'.format(index))
item_param.child(('use_channel')).setOpts(limits=self.labels)
if len(self.labels) == 0: # pragma: no cover
lab = ''
else:
lab = self.labels[0]
item_param.child(('use_channel')).setValue(lab)
item.sigRegionChanged.connect(self.update_lineouts)
item.sigRegionChangeFinished.connect(self.ROI_changed_finished.emit)
for child in putils.iter_children_params(item_param, childlist=[]):
if child.type() != 'group':
child.sigValueChanged.connect(self.update_lineouts)
item_lo = self.ui.Graph_Lineouts.plot()
item_lo.setPen(item_param.child(('Color')).value())
self.lo_items['ROI_{:02d}'.format(index)] = item_lo
self.lo_data = OrderedDict([])
for k in self.lo_items:
self.lo_data[k] = np.zeros((1,))
self.update_lineouts()
except Exception as e:
logger.exception(str(e))
def clear_lo(self):
self.lo_data = [[] for ind in range(len(self.lo_data))]
self.update_lineouts()
def crosshairClicked(self):
if self.ui.crosshair_pb.isChecked():
self.ui.crosshair.setVisible(True)
self.ui.x_label.setVisible(True)
self.ui.y_label.setVisible(True)
range = self.viewer.plotwidget.plotItem.vb.viewRange()
self.ui.crosshair.set_crosshair_position(xpos=np.mean(np.array(range[0])))
else:
self.ui.crosshair.setVisible(False)
self.ui.x_label.setVisible(False)
self.ui.y_label.setVisible(False)
def do_math_fun(self):
try:
if self.ui.Do_math_pb.isChecked():
self.roi_manager.roiwidget.show()
self.ui.Graph_Lineouts.show()
else:
self.ui.Graph_Lineouts.hide()
self.roi_manager.roiwidget.hide()
except Exception as e:
logger.exception(str(e))
def do_zoom(self):
bounds = self.ui.zoom_region.getRegion()
self.viewer.plotwidget.setXRange(bounds[0], bounds[1])
def enable_zoom(self):
try:
if not (self.ui.zoom_pb.isChecked()):
if self.zoom_plot != []:
for plot in self.zoom_plot:
self.ui.Graph_zoom.removeItem(plot)
self.ui.zoom_widget.hide()
self.ui.zoom_region.sigRegionChanged.disconnect(self.do_zoom)
else:
self.zoom_plot = []
for ind, data in enumerate(self.datas):
channel = self.ui.Graph_zoom.plot()
channel.setPen(self.plot_colors[ind])
self.zoom_plot.append(channel)
self.update_graph1D(self.datas)
self.ui.zoom_region.setRegion([np.min(self._x_axis), np.max(self._x_axis)])
self.ui.zoom_widget.show()
self.ui.zoom_region.sigRegionChanged.connect(self.do_zoom)
except Exception as e:
logger.exception(str(e))
def ini_data_plots(self, Nplots):
try:
self.plot_channels = []
# if self.legend is not None:
# self.viewer.plotwidget.plotItem.removeItem(self.legend)
self.legend = self.viewer.plotwidget.plotItem.legend
flag = True
while flag:
items = [item[1].text for item in self.legend.items]
if len(items) == 0:
flag = False
else:
self.legend.removeItem(items[0])
channels = []
for ind in range(Nplots):
channel = self.viewer.plotwidget.plot()
channel.setPen(self.plot_colors[ind])
self.legend.addItem(channel, self._labels[ind])
channels.append(ind)
self.plot_channels.append(channel)
except Exception as e:
logger.exception(str(e))
def update_labels(self, labels=[]):
try:
labels_tmp = labels[:]
if self.labels == labels:
if self.labels == [] or len(self.labels) < len(self.datas):
self._labels = ["CH{}".format(ind) for ind in range(len(self.datas))]
else:
if self.legend is not None:
flag = True
while flag:
items = [item[1].text for item in self.legend.items]
if len(items) == 0:
flag = False
else:
self.legend.removeItem(items[0])
if len(labels) < len(self.plot_channels):
for ind in range(len(labels), len(self.plot_channels)):
labels_tmp.append('CH{:02d}'.format(ind))
if len(labels_tmp) == len(self.plot_channels):
for ind, channel in enumerate(self.plot_channels):
self.legend.addItem(channel, labels_tmp[ind])
self._labels = labels_tmp
if self.labels != labels:
for ind in range(len(self.roi_manager.ROIs)):
val = self.roi_manager.settings.child('ROIs', 'ROI_{:02d}'.format(ind), 'use_channel').value()
self.roi_manager.settings.child('ROIs', 'ROI_{:02d}'.format(ind), 'use_channel').setOpts(
limits=self.labels)
if val not in self.labels:
self.roi_manager.settings.child('ROIs', 'ROI_{:02d}'.format(ind), 'use_channel').setValue(
self.labels[0])
self.ui.xyplot_action.setVisible(len(self.labels) == 2)
except Exception as e:
logger.exception(str(e))
@property
def labels(self):
return self._labels
@labels.setter
def labels(self, labels):
self.update_labels(labels)
self._labels = labels
def lock_aspect_ratio(self):
if self.ui.aspect_ratio_pb.isChecked():
self.viewer.plotwidget.plotItem.vb.setAspectLocked(lock=True, ratio=1)
else:
self.viewer.plotwidget.plotItem.vb.setAspectLocked(lock=False)
def open_measurement_module(self):
if not (self.ui.Do_math_pb.isChecked()):
self.ui.Do_math_pb.setChecked(True)
QtWidgets.QApplication.processEvents()
self.ui.Do_math_pb.clicked.emit()
QtWidgets.QApplication.processEvents()
self.ui.Measurement_widget.setVisible(True)
if self.ui.do_measurements_pb.isChecked():
Form = self.ui.Measurement_widget
self.measurement_module = DAQ_Measurement(Form)
# self.ui.Measurement_widget.addWidget(Form)
self.measurement_module.measurement_signal[list].connect(self.show_measurement)
self.update_measurement_module()
elif self.measurement_module is not None:
self.measurement_module.Quit_fun()
def remove_plots(self):
if self.plot_channels is not None:
for channel in self.plot_channels:
self.viewer.plotwidget.removeItem(channel)
self.plot_channels = None
if self.legend is not None:
self.viewer.plotwidget.removeItem(self.legend)
def set_axis_label(self, axis_settings=dict(orientation='bottom', label='x axis', units='pxls')):
axis = self.viewer.plotwidget.plotItem.getAxis(axis_settings['orientation'])
axis.setLabel(text=axis_settings['label'], units=axis_settings['units'])
self.axis_settings = axis_settings
@pyqtSlot(list)
def show_data(self, datas, labels=None, x_axis=None):
try:
self.datas = datas
self.update_labels(self.labels)
self.data_to_export = OrderedDict(name=self.title, data0D=OrderedDict(), data1D=OrderedDict(), data2D=None)
for ind, data in enumerate(datas):
self.data_to_export['data1D']['CH{:03d}'.format(ind)] = utils.DataToExport()
if self.plot_channels == [] or self.plot_channels is None: # initialize data and plots
self.ini_data_plots(len(datas))
elif len(self.plot_channels) != len(datas):
self.remove_plots()
self.ini_data_plots(len(datas))
self.update_graph1D(datas)
if x_axis is not None:
self.x_axis = x_axis
if labels is not None:
self.update_labels(labels)
if self.ui.do_measurements_pb.isChecked():
self.update_measurement_module()
except Exception as e:
logger.exception(str(e))
@pyqtSlot(list)
def show_data_temp(self, datas):
"""f
to plot temporary data, for instance when all pixels are not yet populated...
"""
try:
self.update_labels(self.labels)
self.datas = datas
if self.plot_channels is None: # initialize data and plots
self.ini_data_plots(len(datas))
elif len(self.plot_channels) != len(datas):
self.remove_plots()
self.ini_data_plots(len(datas))
for ind_plot, data in enumerate(datas):
if self.x_axis is None:
self.x_axis = np.linspace(0, len(data), len(data), endpoint=False)
x_axis = self.x_axis
elif len(self.x_axis) != len(data):
x_axis = np.linspace(0, len(data), len(data), endpoint=False)
else:
x_axis = self.x_axis
self.plot_channels[ind_plot].setData(x=x_axis, y=data)
except Exception as e:
logger.exception(str(e))
@pyqtSlot(list)
def show_math(self, data_lo):
# self.data_to_export=OrderedDict(x_axis=None,y_axis=None,z_axis=None,data0D=None,data1D=None,data2D=None)
if len(data_lo) != 0:
for ind, key in enumerate(self.lo_items):
self.measure_data_dict["Lineout_{:s}:".format(key)] = data_lo[ind]
self.data_to_export['data0D']['Measure_{:03d}'.format(ind)] = utils.DataToExport(name=self.title,
data=data_lo[ind],
source='roi')
self.roi_manager.settings.child(('measurements')).setValue(self.measure_data_dict)
for ind, key in enumerate(self.lo_items):
self.lo_data[key] = np.append(self.lo_data[key], data_lo[ind])
self.lo_items[key].setData(y=self.lo_data[key])
if not (self.ui.do_measurements_pb.isChecked()): # otherwise you export data from measurement
self.data_to_export['acq_time_s'] = datetime.datetime.now().timestamp()
self.data_to_export_signal.emit(self.data_to_export)
@pyqtSlot(list)
def show_measurement(self, data_meas):
ind_offset = len(self.data_to_export['data0D'])
for ind, res in enumerate(data_meas):
self.measure_data_dict["Meas.{}:".format(ind)] = res
self.data_to_export['data0D']['Measure_{:03d}'.format(ind + ind_offset)] = \
utils.DataToExport(name=self.title, data=res, source='roi')
self.roi_manager.settings.child('measurements').setValue(self.measure_data_dict)
self.data_to_export['acq_time_s'] = datetime.datetime.now().timestamp()
self.data_to_export_signal.emit(self.data_to_export)
def update_crosshair_data(self, posx, posy, name=""):
try:
indx = utils.find_index(self._x_axis, posx)[0][0]
string = "y="
for data in self.datas:
string += "{:.6e} / ".format(data[indx])
self.ui.y_label.setText(string)
self.ui.x_label.setText("x={:.6e} ".format(posx))
except Exception as e:
pass
def update_graph1D(self, datas):
# self.data_to_export=OrderedDict(data0D=OrderedDict(),data1D=OrderedDict(),data2D=None)
try:
pens = []
symbolBrushs = []
symbolSize = 5
for ind, ch in enumerate(self.plot_channels):
if self.ui.scatter.isChecked():
pens.append(None)
symbol = 'o'
symbolBrushs.append(self.plot_colors[ind])
else:
pens.append(self.plot_colors[ind])
symbol = None
symbolBrushs.append(None)
if self.x_axis is None:
self._x_axis = np.linspace(0, len(datas[0]), len(datas[0]), endpoint=False)
elif len(self.x_axis) != len(datas[0]):
self._x_axis = np.linspace(0, len(datas[0]), len(datas[0]), endpoint=False)
for ind_plot, data in enumerate(datas):
if not self.ui.xyplot_action.isChecked() or len(datas) == 0:
self.plot_channels[ind_plot].setData(x=self.x_axis, y=data, pen=pens[ind_plot], symbol=symbol,
symbolBrush=symbolBrushs[ind_plot], symbolSize=symbolSize,
pxMode=True)
else:
self.plot_channels[ind_plot].setData(x=np.array([]), y=np.array([]), pen=pens[ind_plot], symbol=symbol,
symbolBrush=symbolBrushs[ind_plot], symbolSize=symbolSize,
pxMode=True)
if self.ui.zoom_pb.isChecked():
self.zoom_plot[ind_plot].setData(x=self.x_axis, y=data)
x_axis = utils.Axis(data=self.x_axis, units=self.axis_settings['units'],
label=self.axis_settings['label'])
self.data_to_export['data1D']['CH{:03d}'.format(ind_plot)].update(
OrderedDict(name=self.title, data=data, x_axis=x_axis, source='raw')) # to be saved or exported
if self.ui.xyplot_action.isChecked() and len(datas) > 1:
self.plot_channels[0].setData(x=datas[0], y=datas[1], pen=pens[0], symbol=symbol,
symbolBrush=symbolBrushs[0], symbolSize=symbolSize,
pxMode=True)
if not self.ui.Do_math_pb.isChecked(): # otherwise math is done and then data is exported
self.data_to_export['acq_time_s'] = datetime.datetime.now().timestamp()
self.data_to_export_signal.emit(self.data_to_export)
else:
self.measurement_dict['datas'] = datas
if self.measurement_dict['x_axis'] is None:
self.measurement_dict['x_axis'] = self._x_axis
data_lo = self.math_module.update_math(self.measurement_dict)
self.show_math(data_lo)
except Exception as e:
logger.exception(str(e))
def update_measurement_module(self):
xdata = self.measurement_dict['x_axis']
ydata = self.measurement_dict['datas'][0]
if xdata is None:
self.measurement_module.update_data(ydata=ydata)
else:
self.measurement_module.update_data(xdata=xdata, ydata=ydata)
def update_status(self, txt):
logger.info(txt)
@property
def x_axis(self):
return self._x_axis
@x_axis.setter
def x_axis(self, x_axis):
label = 'Pxls'
units = ''
if isinstance(x_axis, dict):
if 'data' in x_axis:
xdata = x_axis['data']
if 'label' in x_axis:
label = x_axis['label']
if 'units' in x_axis:
units = x_axis['units']
else:
xdata = x_axis
self._x_axis = xdata
self.measurement_dict['x_axis'] = self._x_axis
if self.datas != []:
self.show_data_temp(self.datas)
self.set_axis_label(dict(orientation='bottom', label=label, units=units))
class Viewer1D_math(QObject):
status_sig = pyqtSignal(list)
def __init__(self):
super(QObject, self).__init__()
self.datas = []
self.ROI_bounds = []
self.x_axis = None
self.operations = []
self.channels = []
def update_math(self, measurement_dict):
try:
if 'datas' in measurement_dict:
self.datas = measurement_dict['datas']
if 'ROI_bounds' in measurement_dict:
self.ROI_bounds = measurement_dict['ROI_bounds']
if 'x_axis' in measurement_dict:
self.x_axis = measurement_dict['x_axis']
if 'operations' in measurement_dict:
self.operations = measurement_dict['operations']
if 'channels' in measurement_dict:
self.channels = measurement_dict['channels']
# self.status_sig.emit(["Update_Status","doing math"])
data_lo = []
for ind_meas in range(len(self.operations)):
indexes = utils.find_index(self.x_axis, self.ROI_bounds[ind_meas])
ind1 = indexes[0][0]
ind2 = indexes[1][0]
sub_data = self.datas[self.channels[ind_meas]][ind1:ind2]
sub_xaxis = self.x_axis[ind1:ind2]
if self.operations[ind_meas] == "Mean":
data_lo.append(float(np.mean(sub_data)))
elif self.operations[ind_meas] == "Sum":
data_lo.append(float(np.sum(sub_data)))
elif self.operations[ind_meas] == 'half-life' or self.operations[ind_meas] == 'expotime':
ind_x0 = utils.find_index(sub_data, np.max(sub_data))[0][0]
x0 = sub_xaxis[ind_x0]
sub_xaxis = sub_xaxis[ind_x0:]
sub_data = sub_data[ind_x0:]
offset = sub_data[-1]
N0 = np.max(sub_data) - offset
if self.operations[ind_meas] == 'half-life':
time = sub_xaxis[utils.find_index(sub_data - offset, 0.5 * N0)[0][0]] - x0
elif self.operations[ind_meas] == 'expotime':
time = sub_xaxis[utils.find_index(sub_data - offset, 0.37 * N0)[0][0]] - x0
data_lo.append(time)
return data_lo
except Exception as e:
logger.exception(str(e))
return []
if __name__ == '__main__': # pragma: no cover
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
prog = Viewer1D(Form)
from pymodaq.daq_utils.daq_utils import gauss1D
x = np.linspace(0, 200, 201)
y1 = gauss1D(x, 75, 25)
y2 = gauss1D(x, 120, 50, 2)
tau_half = 27
tau2 = 100
x0 = 50
dx = 20
ydata_expodec = np.zeros((len(x)))
ydata_expodec[:50] = 1 * gauss1D(x[:50], x0, dx, 2)
ydata_expodec[50:] = 1 * np.exp(-(x[50:] - x0) / (tau_half / np.log(2))) # +1*np.exp(-(x[50:]-x0)/tau2)
ydata_expodec += 0.1 * np.random.rand(len(x))
# x = np.sin(np.linspace(0,6*np.pi,201))
# y = np.sin(np.linspace(0, 6*np.pi, 201)+np.pi/2)
Form.show()
prog.ui.Do_math_pb.click()
QtWidgets.QApplication.processEvents()
prog.x_axis = x
# prog.show_data([y, y+2])
prog.show_data([y1, y2, ydata_expodec])
QtWidgets.QApplication.processEvents()
prog.update_labels(['coucou', 'label2'])
sys.exit(app.exec_())
|
the-stack_0_260 | # Author: Phyllipe Bezerra (https://github.com/pmba)
clothes = {
0: "underwear",
1: "pants",
2: "belt",
3: "suit",
4: "shoe",
5: "socks",
6: "shirt",
7: "tie",
8: "clock",
}
graph = [[1, 4], [2, 4], [3], [], [], [4], [2, 7], [3], []]
visited = [0 for x in range(len(graph))]
stack = []
def print_stack(stack, clothes):
order = 1
while stack:
cur_clothe = stack.pop()
print(order, clothes[cur_clothe])
order += 1
def dfs(u, visited, graph):
visited[u] = 1
for v in graph[u]:
if not visited[v]:
dfs(v, visited, graph)
stack.append(u)
def top_sort(graph, visited):
for v in range(len(graph)):
if not visited[v]:
dfs(v, visited, graph)
if __name__ == "__main__":
top_sort(graph, visited)
print(stack)
print_stack(stack, clothes)
|
the-stack_0_261 | # Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Actions related to codesigning."""
load(
"@build_bazel_rules_apple//apple/internal/utils:defines.bzl",
"defines",
)
load(
"@build_bazel_rules_apple//apple/internal/utils:legacy_actions.bzl",
"legacy_actions",
)
load(
"@build_bazel_rules_apple//apple/internal:intermediates.bzl",
"intermediates",
)
load(
"@build_bazel_rules_apple//apple/internal:platform_support.bzl",
"platform_support",
)
load(
"@build_bazel_rules_apple//apple/internal:rule_support.bzl",
"rule_support",
)
load(
"@bazel_skylib//lib:paths.bzl",
"paths",
)
load(
"@bazel_skylib//lib:shell.bzl",
"shell",
)
def _double_quote(raw_string):
"""Add double quotes around the string and preserve existing quote characters.
Args:
raw_string: A string that might have shell-syntaxed environment variables.
Returns:
The string with double quotes.
"""
return "\"" + raw_string.replace("\"", "\\\"") + "\""
def _no_op(x):
"""Helper that does not nothing be return the result."""
return x
def _codesign_args_for_path(
ctx,
path_to_sign,
provisioning_profile,
entitlements_file,
shell_quote = True):
"""Returns a command line for the codesigning tool wrapper script.
Args:
ctx: The Starlark context.
path_to_sign: A struct indicating the path that should be signed and its
optionality (see `_path_to_sign`).
provisioning_profile: The provisioning profile file. May be `None`.
entitlements_file: The entitlements file to pass to codesign. May be `None`
for non-app binaries (e.g. test bundles).
shell_quote: Sanitizes the arguments to be evaluated in a shell.
Returns:
The codesign command invocation for the given directory as a list.
"""
if not path_to_sign.is_directory and path_to_sign.signed_frameworks:
fail("Internal Error: Received a list of signed frameworks as exceptions " +
"for code signing, but path to sign is not a directory.")
for x in path_to_sign.signed_frameworks:
if not x.startswith(path_to_sign.path):
fail("Internal Error: Signed framework does not have the current path " +
"to sign (%s) as its prefix (%s)." % (path_to_sign.path, x))
cmd_codesigning = [
"--codesign",
"/usr/bin/codesign",
]
is_device = platform_support.is_device_build(ctx)
# Add quotes for sanitizing inputs when they're invoked directly from a shell script, for
# instance when using this string to assemble the output of codesigning_command.
maybe_quote = shell.quote if shell_quote else _no_op
maybe_double_quote = _double_quote if shell_quote else _no_op
# First, try to use the identity passed on the command line, if any. If it's a simulator build,
# use an ad hoc identity.
identity = ctx.fragments.objc.signing_certificate_name if is_device else "-"
if not identity:
if provisioning_profile:
cmd_codesigning.extend([
"--mobileprovision",
maybe_quote(provisioning_profile.path),
])
else:
identity = "-"
if identity:
cmd_codesigning.extend([
"--identity",
maybe_quote(identity),
])
# The entitlements rule ensures that entitlements_file is None or a file
# containing only "com.apple.security.get-task-allow" when building for the
# simulator.
if path_to_sign.use_entitlements and entitlements_file:
cmd_codesigning.extend([
"--entitlements",
maybe_quote(entitlements_file.path),
])
if is_device:
cmd_codesigning.append("--force")
else:
cmd_codesigning.extend([
"--force",
"--disable_timestamp",
])
if path_to_sign.is_directory:
cmd_codesigning.append("--directory_to_sign")
else:
cmd_codesigning.append("--target_to_sign")
# Because the path does include environment variables which need to be expanded, path has to be
# quoted using double quotes, this means that path can't be quoted using shell.quote.
cmd_codesigning.append(maybe_double_quote(path_to_sign.path))
if path_to_sign.signed_frameworks:
for signed_framework in path_to_sign.signed_frameworks:
# Signed frameworks must also be double quoted, as they too have an environment
# variable to be expanded.
cmd_codesigning.extend([
"--signed_path",
maybe_double_quote(signed_framework),
])
return cmd_codesigning
def _path_to_sign(path, is_directory = False, signed_frameworks = [], use_entitlements = True):
"""Returns a "path to sign" value to be passed to `_signing_command_lines`.
Args:
path: The path to sign, relative to wherever the code signing command lines
are being executed.
is_directory: If `True`, the path is a directory and not a bundle, indicating
that the contents of each item in the directory should be code signed
except for the invisible files prefixed with a period.
signed_frameworks: If provided, a list of frameworks that have already been signed.
use_entitlements: If provided, indicates if the entitlements on the bundling
target should be used for signing this path (useful to disabled the use
when signing frameworks within an iOS app).
Returns:
A `struct` that can be passed to `_signing_command_lines`.
"""
return struct(
path = path,
is_directory = is_directory,
signed_frameworks = signed_frameworks,
use_entitlements = use_entitlements,
)
def _provisioning_profile(ctx):
# Verify that a provisioning profile was provided for device builds on
# platforms that require it.
is_device = platform_support.is_device_build(ctx)
provisioning_profile = getattr(ctx.file, "provisioning_profile", None)
rule_descriptor = rule_support.rule_descriptor(ctx)
if (is_device and
rule_descriptor.requires_signing_for_device and
not provisioning_profile):
fail("The provisioning_profile attribute must be set for device " +
"builds on this platform (%s)." %
platform_support.platform_type(ctx))
return provisioning_profile
def _signing_command_lines(
ctx,
paths_to_sign,
entitlements_file):
"""Returns a multi-line string with codesign invocations for the bundle.
For any signing identity other than ad hoc, the identity is verified as being
valid in the keychain and an error will be emitted if the identity cannot be
used for signing for any reason.
Args:
ctx: The Starlark context.
paths_to_sign: A list of values returned from `path_to_sign` that indicate
paths that should be code-signed.
entitlements_file: The entitlements file to pass to codesign.
Returns:
A multi-line string with codesign invocations for the bundle.
"""
provisioning_profile = _provisioning_profile(ctx)
commands = []
# Use of the entitlements file is not recommended for the signing of frameworks. As long as
# this remains the case, we do have to split the "paths to sign" between multiple invocations
# of codesign.
for path_to_sign in paths_to_sign:
codesign_command = [ctx.executable._codesigningtool.path]
codesign_command.extend(_codesign_args_for_path(
ctx,
path_to_sign,
provisioning_profile,
entitlements_file,
))
commands.append(" ".join(codesign_command))
return "\n".join(commands)
def _should_sign_simulator_bundles(ctx):
"""Check if a main bundle should be codesigned.
The Frameworks/* bundles should *always* be signed, this is just for
the other bundles.
Args:
ctx: The Starlark context.
Returns:
True/False for if the bundle should be signed.
"""
rule_descriptor = rule_support.rule_descriptor(ctx)
if not rule_descriptor.skip_simulator_signing_allowed:
return True
# Default is to sign.
return defines.bool_value(
ctx,
"apple.codesign_simulator_bundles",
True,
)
def _should_sign_bundles(ctx):
should_sign_bundles = True
rule_descriptor = rule_support.rule_descriptor(ctx)
codesigning_exceptions = rule_descriptor.codesigning_exceptions
if (codesigning_exceptions ==
rule_support.codesigning_exceptions.sign_with_provisioning_profile):
# If the rule doesn't have a provisioning profile, do not sign the binary or its
# frameworks.
provisioning_profile = getattr(ctx.file, "provisioning_profile", None)
if not provisioning_profile:
should_sign_bundles = False
elif codesigning_exceptions == rule_support.codesigning_exceptions.skip_signing:
should_sign_bundles = False
elif codesigning_exceptions != rule_support.codesigning_exceptions.none:
fail("Internal Error: Encountered unsupported state for codesigning_exceptions.")
return should_sign_bundles
def _codesigning_args(
ctx,
entitlements,
full_archive_path,
is_framework = False):
"""Returns a set of codesigning arguments to be passed to the codesigning tool.
Args:
ctx: The rule context.
entitlements: The entitlements file to sign with. Can be None.
full_archive_path: The full path to the codesigning target.
is_framework: If the target is a framework. False by default.
Returns:
A list containing the arguments to pass to the codesigning tool.
"""
if not _should_sign_bundles(ctx):
return []
is_device = platform_support.is_device_build(ctx)
if not is_framework and not is_device and not _should_sign_simulator_bundles(ctx):
return []
return _codesign_args_for_path(
ctx,
_path_to_sign(full_archive_path),
provisioning_profile = _provisioning_profile(ctx),
entitlements_file = entitlements,
shell_quote = False,
)
def _codesigning_command(
ctx,
entitlements,
frameworks_path,
signed_frameworks,
bundle_path = ""):
"""Returns a codesigning command that includes framework embedded bundles.
Args:
ctx: The rule context.
entitlements: The entitlements file to sign with. Can be None.
frameworks_path: The location of the Frameworks directory, relative to the archive.
signed_frameworks: A depset containing each framework that has already been signed.
bundle_path: The location of the bundle, relative to the archive.
Returns:
A string containing the codesigning commands.
"""
if not _should_sign_bundles(ctx):
return ""
paths_to_sign = []
# The command returned by this function is executed as part of a bundling shell script.
# Each directory to be signed must be prefixed by $WORK_DIR, which is the variable in that
# script that contains the path to the directory where the bundle is being built.
if frameworks_path:
framework_root = paths.join("$WORK_DIR", frameworks_path) + "/"
full_signed_frameworks = []
for signed_framework in signed_frameworks.to_list():
full_signed_frameworks.append(paths.join(framework_root, signed_framework))
paths_to_sign.append(
_path_to_sign(
framework_root,
is_directory = True,
signed_frameworks = full_signed_frameworks,
use_entitlements = False,
),
)
is_device = platform_support.is_device_build(ctx)
if is_device or _should_sign_simulator_bundles(ctx):
path_to_sign = paths.join("$WORK_DIR", bundle_path)
paths_to_sign.append(
_path_to_sign(path_to_sign),
)
return _signing_command_lines(
ctx,
paths_to_sign = paths_to_sign,
entitlements_file = entitlements,
)
def _post_process_and_sign_archive_action(
ctx,
archive_codesigning_path,
frameworks_path,
input_archive,
output_archive,
output_archive_root_path,
signed_frameworks,
entitlements = None):
"""Post-processes and signs an archived bundle.
Args:
ctx: The target's rule context.
archive_codesigning_path: The codesigning path relative to the archive.
frameworks_path: The Frameworks path relative to the archive.
input_archive: The `File` representing the archive containing the bundle
that has not yet been processed or signed.
output_archive: The `File` representing the processed and signed archive.
output_archive_root_path: The `string` path to where the processed, uncompressed archive
should be located.
signed_frameworks: Depset containing each framework that has already been signed.
entitlements: Optional file representing the entitlements to sign with.
"""
input_files = [input_archive]
processing_tools = []
signing_command_lines = _codesigning_command(
ctx,
entitlements,
frameworks_path,
signed_frameworks,
bundle_path = archive_codesigning_path,
)
if signing_command_lines:
processing_tools.append(ctx.executable._codesigningtool)
if entitlements:
input_files.append(entitlements)
provisioning_profile = getattr(ctx.file, "provisioning_profile", None)
if provisioning_profile:
input_files.append(provisioning_profile)
ipa_post_processor = ctx.executable.ipa_post_processor
ipa_post_processor_path = ""
if ipa_post_processor:
processing_tools.append(ipa_post_processor)
ipa_post_processor_path = ipa_post_processor.path
# Only compress the IPA for optimized (release) builds or when requested.
# For debug builds, zip without compression, which will speed up the build.
compression_requested = defines.bool_value(ctx, "apple.compress_ipa", False)
should_compress = (ctx.var["COMPILATION_MODE"] == "opt") or compression_requested
# TODO(b/163217926): These are kept the same for the three different actions
# that could be run to ensure anything keying off these values continues to
# work. After some data is collected, the values likely can be revisited and
# changed.
mnemonic = "ProcessAndSign"
progress_message = "Processing and signing %s" % ctx.label.name
# If there is no work to be done, skip the processing/signing action, just
# copy the file over.
has_work = any([signing_command_lines, ipa_post_processor_path, should_compress])
if not has_work:
ctx.actions.run_shell(
inputs = [input_archive],
outputs = [output_archive],
mnemonic = mnemonic,
progress_message = progress_message,
command = "cp -p '%s' '%s'" % (input_archive.path, output_archive.path),
)
return
process_and_sign_template = intermediates.file(
ctx.actions,
ctx.label.name,
"process-and-sign-%s.sh" % hash(output_archive.path),
)
ctx.actions.expand_template(
template = ctx.file._process_and_sign_template,
output = process_and_sign_template,
is_executable = True,
substitutions = {
"%ipa_post_processor%": ipa_post_processor_path or "",
"%output_path%": output_archive.path,
"%should_compress%": "1" if should_compress else "",
"%signing_command_lines%": signing_command_lines,
"%unprocessed_archive_path%": input_archive.path,
"%work_dir%": output_archive_root_path,
},
)
# Build up some arguments for the script to allow logging to tell what work
# is being done within the action's script.
arguments = []
if signing_command_lines:
arguments.append("should_sign")
if ipa_post_processor_path:
arguments.append("should_process")
if should_compress:
arguments.append("should_compress")
run_on_darwin = any([signing_command_lines, ipa_post_processor_path])
if run_on_darwin:
legacy_actions.run(
ctx,
inputs = input_files,
outputs = [output_archive],
executable = process_and_sign_template,
arguments = arguments,
mnemonic = mnemonic,
progress_message = progress_message,
execution_requirements = {
# Added so that the output of this action is not cached remotely, in case multiple
# developers sign the same artifact with different identities.
"no-cache": "1",
# Unsure, but may be needed for keychain access, especially for files that live in
# $HOME.
"no-sandbox": "1",
},
tools = processing_tools,
)
else:
ctx.actions.run(
inputs = input_files,
outputs = [output_archive],
executable = process_and_sign_template,
arguments = arguments,
mnemonic = mnemonic,
progress_message = progress_message,
)
def _sign_binary_action(ctx, input_binary, output_binary):
"""Signs the input binary file, copying it into the given output binary file.
Args:
ctx: The target's rule context.
input_binary: The `File` representing the binary to be signed.
output_binary: The `File` representing signed binary.
"""
# It's not hermetic to sign the binary that was built by the apple_binary
# target that this rule takes as an input, so we copy it and then execute the
# code signing commands on that copy in the same action.
path_to_sign = _path_to_sign(output_binary.path)
signing_commands = _signing_command_lines(
ctx,
[path_to_sign],
None,
)
legacy_actions.run_shell(
ctx,
inputs = [input_binary],
outputs = [output_binary],
command = [
"/bin/bash",
"-c",
"cp {input_binary} {output_binary}".format(
input_binary = input_binary.path,
output_binary = output_binary.path,
) + "\n" + signing_commands,
],
mnemonic = "SignBinary",
execution_requirements = {
# Added so that the output of this action is not cached remotely, in case multiple
# developers sign the same artifact with different identities.
"no-cache": "1",
# Unsure, but may be needed for keychain access, especially for files that live in
# $HOME.
"no-sandbox": "1",
},
tools = [
ctx.executable._codesigningtool,
],
)
codesigning_support = struct(
codesigning_args = _codesigning_args,
codesigning_command = _codesigning_command,
post_process_and_sign_archive_action = _post_process_and_sign_archive_action,
provisioning_profile = _provisioning_profile,
sign_binary_action = _sign_binary_action,
)
|
the-stack_0_264 | import os.path as osp
import time
import joblib
import numpy as np
import tensorflow as tf
from baselines import logger
from collections import deque
from baselines.common import set_global_seeds, explained_variance
from baselines.common.runners import AbstractEnvRunner
from baselines.common import tf_util
from baselines.a2c.utils import discount_with_dones
from baselines.a2c.utils import Scheduler, make_path, find_trainable_variables
from baselines.a2c.utils import cat_entropy, mse
class Model(object):
def __init__(self, policy, ob_space, ac_space, nenvs, nsteps,
ent_coef=0.01, vf_coef=0.5, max_grad_norm=0.5, lr=7e-4,
alpha=0.99, epsilon=1e-5, total_timesteps=int(80e6), lrschedule='linear'):
sess = tf_util.make_session()
nbatch = nenvs*nsteps
A = tf.placeholder(tf.int32, [nbatch])
ADV = tf.placeholder(tf.float32, [nbatch])
R = tf.placeholder(tf.float32, [nbatch])
LR = tf.placeholder(tf.float32, [])
step_model = policy(sess, ob_space, ac_space, nenvs, 1, reuse=False)
train_model = policy(sess, ob_space, ac_space, nenvs*nsteps, nsteps, reuse=True)
neglogpac = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=train_model.pi, labels=A)
pg_loss = tf.reduce_mean(ADV * neglogpac)
vf_loss = tf.reduce_mean(mse(tf.squeeze(train_model.vf), R))
entropy = tf.reduce_mean(cat_entropy(train_model.pi))
loss = pg_loss - entropy*ent_coef + vf_loss * vf_coef
params = find_trainable_variables("model")
grads = tf.gradients(loss, params)
if max_grad_norm is not None:
grads, grad_norm = tf.clip_by_global_norm(grads, max_grad_norm)
grads = list(zip(grads, params))
trainer = tf.train.RMSPropOptimizer(learning_rate=LR, decay=alpha, epsilon=epsilon)
_train = trainer.apply_gradients(grads)
lr = Scheduler(v=lr, nvalues=total_timesteps, schedule=lrschedule)
def train(obs, states, rewards, masks, actions, values):
advs = rewards - values
for step in range(len(obs)):
cur_lr = lr.value()
td_map = {train_model.X:obs, A:actions, ADV:advs, R:rewards, LR:cur_lr}
if states is not None:
td_map[train_model.S] = states
td_map[train_model.M] = masks
policy_loss, value_loss, policy_entropy, _ = sess.run(
[pg_loss, vf_loss, entropy, _train],
td_map
)
return policy_loss, value_loss, policy_entropy
def save(save_path):
ps = sess.run(params)
make_path(osp.dirname(save_path))
joblib.dump(ps, save_path)
def load(load_path):
loaded_params = joblib.load(load_path)
restores = []
for p, loaded_p in zip(params, loaded_params):
restores.append(p.assign(loaded_p))
sess.run(restores)
self.train = train
self.train_model = train_model
self.step_model = step_model
self.step = step_model.step
self.value = step_model.value
self.initial_state = step_model.initial_state
self.save = save
self.load = load
tf.global_variables_initializer().run(session=sess)
class Runner(AbstractEnvRunner):
def __init__(self, env, model, nsteps=5, gamma=0.99):
super().__init__(env=env, model=model, nsteps=nsteps)
self.gamma = gamma
self.episodes_count = 0
def run(self):
mb_obs, mb_rewards, mb_actions, mb_values, mb_dones = [],[],[],[],[]
mb_states = self.states
epinfos = []
for n in range(self.nsteps):
actions, values, states, _ = self.model.step(self.obs, self.states, self.dones)
mb_obs.append(np.copy(self.obs))
mb_actions.append(actions)
mb_values.append(values)
mb_dones.append(self.dones)
obs, rewards, dones, infos = self.env.step(actions)
for info in infos:
maybeepinfo = info.get('episode')
if maybeepinfo:
self.episodes_count += 1
epinfos.append(maybeepinfo)
self.states = states
self.dones = dones
for n, done in enumerate(dones):
if done:
self.obs[n] = self.obs[n]*0
self.obs = obs
mb_rewards.append(rewards)
mb_dones.append(self.dones)
#batch of steps to batch of rollouts
mb_obs = np.asarray(mb_obs, dtype=np.uint8).swapaxes(1, 0).reshape(self.batch_ob_shape)
mb_rewards = np.asarray(mb_rewards, dtype=np.float32).swapaxes(1, 0)
mb_actions = np.asarray(mb_actions, dtype=np.int32).swapaxes(1, 0)
mb_values = np.asarray(mb_values, dtype=np.float32).swapaxes(1, 0)
mb_dones = np.asarray(mb_dones, dtype=np.bool).swapaxes(1, 0)
mb_masks = mb_dones[:, :-1]
mb_dones = mb_dones[:, 1:]
last_values = self.model.value(self.obs, self.states, self.dones).tolist()
#discount/bootstrap off value fn
for n, (rewards, dones, value) in enumerate(zip(mb_rewards, mb_dones, last_values)):
rewards = rewards.tolist()
dones = dones.tolist()
if dones[-1] == 0:
rewards = discount_with_dones(rewards+[value], dones+[0], self.gamma)[:-1]
else:
rewards = discount_with_dones(rewards, dones, self.gamma)
mb_rewards[n] = rewards
mb_rewards = mb_rewards.flatten()
mb_actions = mb_actions.flatten()
mb_values = mb_values.flatten()
mb_masks = mb_masks.flatten()
return mb_obs, mb_states, mb_rewards, mb_masks, mb_actions, mb_values, epinfos
def learn(policy, env, seed, nsteps=5, total_timesteps=int(80e6), vf_coef=0.5, ent_coef=0.01, max_grad_norm=0.5, lr=7e-4, lrschedule='linear', epsilon=1e-5, alpha=0.99, gamma=0.99, log_interval=100):
set_global_seeds(seed)
nenvs = env.num_envs
ob_space = env.observation_space
ac_space = env.action_space
model = Model(policy=policy, ob_space=ob_space, ac_space=ac_space, nenvs=nenvs, nsteps=nsteps, ent_coef=ent_coef, vf_coef=vf_coef,
max_grad_norm=max_grad_norm, lr=lr, alpha=alpha, epsilon=epsilon, total_timesteps=total_timesteps, lrschedule=lrschedule)
runner = Runner(env, model, nsteps=nsteps, gamma=gamma)
epinfobuf = deque(maxlen=100)
nbatch = nenvs*nsteps
tstart = time.time()
for update in range(1, total_timesteps//nbatch+1):
obs, states, rewards, masks, actions, values, epinfos = runner.run()
epinfobuf.extend(epinfos)
policy_loss, value_loss, policy_entropy = model.train(obs, states, rewards, masks, actions, values)
nseconds = time.time()-tstart
fps = int((update*nbatch)/nseconds)
if update % log_interval == 0 or update == 1:
ev = explained_variance(values, rewards)
logger.record_tabular("nupdates", update)
logger.record_tabular("total_timesteps", update*nbatch)
logger.record_tabular("fps", fps)
logger.logkv('eprewmean', safemean([epinfo['r'] for epinfo in epinfobuf]))
logger.logkv('eplenmean', safemean([epinfo['l'] for epinfo in epinfobuf]))
logger.record_tabular("policy_entropy", float(policy_entropy))
logger.record_tabular("value_loss", float(value_loss))
logger.record_tabular("explained_variance", float(ev))
logger.logkv('time_elapsed', nseconds)
logger.dump_tabular()
logger.logkv('episodes', runner.episodes_count)
env.close()
return model
def safemean(xs):
return np.nan if len(xs) == 0 else np.mean(xs)
|
the-stack_0_267 | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch BERT model."""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import copy
import json
import math
import logging
import tarfile
import tempfile
import shutil
import torch
from torch import nn
import torch.nn.functional as F
from torch.nn import CrossEntropyLoss
from torch.utils.checkpoint import checkpoint
from olfmlm.data_utils.file_utils import cached_path
logger = logging.getLogger(__name__)
PRETRAINED_MODEL_ARCHIVE_MAP = {
'bert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased.tar.gz",
'bert-large-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased.tar.gz",
'bert-base-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased.tar.gz",
'bert-large-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased.tar.gz",
'bert-base-multilingual-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-uncased.tar.gz",
'bert-base-multilingual-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-cased.tar.gz",
'bert-base-chinese': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-chinese.tar.gz",
}
CONFIG_NAME = 'bert_config.json'
WEIGHTS_NAME = 'pytorch_model.bin'
TF_WEIGHTS_NAME = 'model.ckpt'
def load_tf_weights_in_bert(model, tf_checkpoint_path):
""" Load tf checkpoints in a pytorch model
"""
try:
import re
import numpy as np
import tensorflow as tf
except ImportError:
print("Loading a TensorFlow models in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions.")
raise
tf_path = os.path.abspath(tf_checkpoint_path)
print("Converting TensorFlow checkpoint from {}".format(tf_path))
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
names = []
arrays = []
for name, shape in init_vars:
print("Loading TF weight {} with shape {}".format(name, shape))
array = tf.train.load_variable(tf_path, name)
names.append(name)
arrays.append(array)
for name, array in zip(names, arrays):
name = name.split('/')
# adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v
# which are not required for using pretrained model
if any(n in ["adam_v", "adam_m"] for n in name):
print("Skipping {}".format("/".join(name)))
continue
pointer = model
for m_name in name:
if re.fullmatch(r'[A-Za-z]+_\d+', m_name):
l = re.split(r'_(\d+)', m_name)
else:
l = [m_name]
if l[0] == 'kernel' or l[0] == 'gamma':
pointer = getattr(pointer, 'weight')
elif l[0] == 'output_bias' or l[0] == 'beta':
pointer = getattr(pointer, 'bias')
elif l[0] == 'output_weights':
pointer = getattr(pointer, 'weight')
else:
pointer = getattr(pointer, l[0])
if len(l) >= 2:
num = int(l[1])
pointer = pointer[num]
if m_name[-11:] == '_embeddings':
pointer = getattr(pointer, 'weight')
elif m_name == 'kernel':
array = np.transpose(array)
try:
assert pointer.shape == array.shape
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
print("Initialize PyTorch weight {}".format(name))
pointer.data = torch.from_numpy(array)
return model
def gelu(x):
"""Implementation of the gelu activation function.
For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
"""
return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
def swish(x):
return x * torch.sigmoid(x)
ACT2FN = {"gelu": gelu, "relu": torch.nn.functional.relu, "swish": swish}
class BertConfig(object):
"""Configuration class to store the configuration of a `BertModel`.
"""
def __init__(self,
vocab_size_or_config_json_file,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=2,
initializer_range=0.02,
fp32_layernorm=True,
fp32_embedding=True,
fp32_tokentypes=False,
layernorm_epsilon=1e-12):
"""Constructs BertConfig.
Args:
vocab_size_or_config_json_file: Vocabulary size of `inputs_ids` in `BertModel`.
hidden_size: Size of the encoder layers and the pooler layer.
num_hidden_layers: Number of hidden layers in the Transformer encoder.
num_attention_heads: Number of attention heads for each attention layer in
the Transformer encoder.
intermediate_size: The size of the "intermediate" (i.e., feed-forward)
layer in the Transformer encoder.
hidden_act: The non-linear activation function (function or string) in the
encoder and pooler. If string, "gelu", "relu" and "swish" are supported.
hidden_dropout_prob: The dropout probabilitiy for all fully connected
layers in the embeddings, encoder, and pooler.
attention_probs_dropout_prob: The dropout ratio for the attention
probabilities.
max_position_embeddings: The maximum sequence length that this model might
ever be used with. Typically set this to something large just in case
(e.g., 512 or 1024 or 2048).
type_vocab_size: The vocabulary size of the `token_type_ids` passed into
`BertModel`.
initializer_range: The sttdev of the truncated_normal_initializer for
initializing all weight matrices.
"""
if isinstance(vocab_size_or_config_json_file, str):
with open(vocab_size_or_config_json_file, "r", encoding='utf-8') as reader:
json_config = json.loads(reader.read())
for key, value in json_config.items():
self.__dict__[key] = value
elif isinstance(vocab_size_or_config_json_file, int):
self.vocab_size = vocab_size_or_config_json_file
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.hidden_act = hidden_act
self.intermediate_size = intermediate_size
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.max_position_embeddings = max_position_embeddings
self.type_vocab_size = type_vocab_size
self.initializer_range = initializer_range
self.fp32_layernorm = fp32_layernorm
self.fp32_embedding = fp32_embedding
self.layernorm_epsilon = layernorm_epsilon
self.fp32_tokentypes = fp32_tokentypes
else:
raise ValueError("First argument must be either a vocabulary size (int)"
"or the path to a pretrained model config file (str)")
@classmethod
def from_dict(cls, json_object):
"""Constructs a `BertConfig` from a Python dictionary of parameters."""
config = BertConfig(vocab_size_or_config_json_file=-1)
for key, value in json_object.items():
config.__dict__[key] = value
return config
@classmethod
def from_json_file(cls, json_file):
"""Constructs a `BertConfig` from a json file of parameters."""
with open(json_file, "r", encoding='utf-8') as reader:
text = reader.read()
return cls.from_dict(json.loads(text))
def __repr__(self):
return str(self.to_json_string())
def to_dict(self):
"""Serializes this instance to a Python dictionary."""
output = copy.deepcopy(self.__dict__)
return output
def to_json_string(self):
"""Serializes this instance to a JSON string."""
return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
# try:
# from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm
# except ImportError:
# print("Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex.")
# class BertLayerNorm(nn.Module):
# def __init__(self, hidden_size, eps=1e-12):
# """Construct a layernorm module in the TF style (epsilon inside the square root).
# """
# super(BertLayerNorm, self).__init__()
# self.weight = nn.Parameter(torch.ones(hidden_size))
# self.bias = nn.Parameter(torch.zeros(hidden_size))
# self.variance_epsilon = eps
# def forward(self, x):
# u = x.mean(-1, keepdim=True)
# s = (x - u).pow(2).mean(-1, keepdim=True)
# x = (x - u) / torch.sqrt(s + self.variance_epsilon)
# return self.weight * x + self.bias
class BertLayerNorm(nn.Module):
def __init__(self, hidden_size, eps=1e-12):
"""Construct a layernorm module in the TF style (epsilon inside the square root).
"""
super(BertLayerNorm, self).__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.bias = nn.Parameter(torch.zeros(hidden_size))
self.variance_epsilon = eps
def forward(self, x):
u = x.mean(-1, keepdim=True)
s = (x - u).pow(2).mean(-1, keepdim=True)
x = (x - u) / torch.sqrt(s + self.variance_epsilon)
return self.weight * x + self.bias
class BertEmbeddings(nn.Module):
"""Construct the embeddings from word, position and token_type embeddings.
"""
def __init__(self, config):
super(BertEmbeddings, self).__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=0)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size)
self.task_type_embeddings = nn.Embedding(config.num_tasks, config.hidden_size)
# self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
# any TensorFlow checkpoint file
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, input_ids, token_type_ids=None, task_ids=None):
seq_length = input_ids.size(1)
position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device)
position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
if token_type_ids is None:
token_type_ids = torch.zeros_like(input_ids)
if task_ids is None:
task_ids = torch.zeros_like(input_ids)
words_embeddings = self.word_embeddings(input_ids)
position_embeddings = self.position_embeddings(position_ids)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
task_embeddings = self.task_type_embeddings(task_ids)
embeddings = words_embeddings + position_embeddings + token_type_embeddings + task_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class BertSelfAttention(nn.Module):
def __init__(self, config):
super(BertSelfAttention, self).__init__()
if config.hidden_size % config.num_attention_heads != 0:
raise ValueError(
"The hidden size (%d) is not a multiple of the number of attention "
"heads (%d)" % (config.hidden_size, config.num_attention_heads))
self.num_attention_heads = config.num_attention_heads
self.attention_head_size = int(config.hidden_size / config.num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size)
self.key = nn.Linear(config.hidden_size, self.all_head_size)
self.value = nn.Linear(config.hidden_size, self.all_head_size)
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(self, hidden_states, attention_mask):
mixed_query_layer = self.query(hidden_states)
mixed_key_layer = self.key(hidden_states)
mixed_value_layer = self.value(hidden_states)
query_layer = self.transpose_for_scores(mixed_query_layer)
key_layer = self.transpose_for_scores(mixed_key_layer)
value_layer = self.transpose_for_scores(mixed_value_layer)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
attention_scores = attention_scores / math.sqrt(self.attention_head_size)
# Apply the attention mask is (precomputed for all layers in BertModel forward() function)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = nn.Softmax(dim=-1)(attention_scores)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.dropout(attention_probs)
previous_type = attention_probs.type()
context_layer = torch.matmul(attention_probs, value_layer)
context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,)
context_layer = context_layer.view(*new_context_layer_shape)
return context_layer
class BertSelfOutput(nn.Module):
def __init__(self, config):
super(BertSelfOutput, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.fp32_layernorm = config.fp32_layernorm
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layernorm_epsilon)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
ln_input = hidden_states + input_tensor
previous_type = ln_input.type()
if self.fp32_layernorm:
ln_input = ln_input.float()
hidden_states = self.LayerNorm(ln_input)
if self.fp32_layernorm:
hidden_states = hidden_states.type(previous_type)
return hidden_states
class BertAttention(nn.Module):
def __init__(self, config):
super(BertAttention, self).__init__()
self.self = BertSelfAttention(config)
self.output = BertSelfOutput(config)
def forward(self, input_tensor, attention_mask):
self_output = self.self(input_tensor, attention_mask)
attention_output = self.output(self_output, input_tensor)
return attention_output
class BertIntermediate(nn.Module):
def __init__(self, config):
super(BertIntermediate, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.intermediate_size)
self.intermediate_act_fn = ACT2FN[config.hidden_act] \
if isinstance(config.hidden_act, str) else config.hidden_act
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
class BertOutput(nn.Module):
def __init__(self, config):
super(BertOutput, self).__init__()
self.dense = nn.Linear(config.intermediate_size, config.hidden_size)
self.fp32_layernorm = config.fp32_layernorm
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layernorm_epsilon)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
ln_input = hidden_states + input_tensor
previous_type = ln_input.type()
if self.fp32_layernorm:
ln_input = ln_input.float()
hidden_states = self.LayerNorm(ln_input)
if self.fp32_layernorm:
hidden_states = hidden_states.type(previous_type)
return hidden_states
class BertLayer(nn.Module):
def __init__(self, config):
super(BertLayer, self).__init__()
self.attention = BertAttention(config)
self.intermediate = BertIntermediate(config)
self.output = BertOutput(config)
def forward(self, hidden_states, attention_mask):
attention_output = self.attention(hidden_states, attention_mask)
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
return layer_output
class BertEncoder(nn.Module):
def __init__(self, config):
super(BertEncoder, self).__init__()
layer = BertLayer(config)
self.layer = nn.ModuleList([copy.deepcopy(layer) for _ in range(config.num_hidden_layers)])
# def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True):
# all_encoder_layers = []
# for layer_module in self.layer:
# hidden_states = layer_module(hidden_states, attention_mask)
# if output_all_encoded_layers:
# all_encoder_layers.append(hidden_states)
# if not output_all_encoded_layers:
# all_encoder_layers.append(hidden_states)
# return all_encoder_layers
def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True, checkpoint_activations=False):
all_encoder_layers = []
def custom(start, end):
def custom_forward(*inputs):
layers = self.layer[start:end]
x_ = inputs[0]
for layer in layers:
x_ = layer(x_, inputs[1])
return x_
return custom_forward
if checkpoint_activations:
l = 0
num_layers = len(self.layer)
chunk_length = math.ceil(math.sqrt(num_layers))
while l < num_layers:
hidden_states = checkpoint(custom(l, l+chunk_length), hidden_states, attention_mask*1)
l += chunk_length
# decoder layers
else:
for i,layer_module in enumerate(self.layer):
hidden_states = layer_module(hidden_states, attention_mask)
if output_all_encoded_layers:
all_encoder_layers.append(hidden_states)
if not output_all_encoded_layers or checkpoint_activations:
all_encoder_layers.append(hidden_states)
return all_encoder_layers
class BertPooler(nn.Module):
def __init__(self, config):
super(BertPooler, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.activation = nn.Tanh()
def forward(self, hidden_states):
# We "pool" the model by simply taking the hidden state corresponding
# to the first token.
first_token_tensor = hidden_states[:, 0]
pooled_output = self.dense(first_token_tensor)
pooled_output = self.activation(pooled_output)
return pooled_output
class BertPredictionHeadTransform(nn.Module):
def __init__(self, config):
super(BertPredictionHeadTransform, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.transform_act_fn = ACT2FN[config.hidden_act] \
if isinstance(config.hidden_act, str) else config.hidden_act
self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layernorm_epsilon)
self.fp32_layernorm = config.fp32_layernorm
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.transform_act_fn(hidden_states)
previous_type = hidden_states.type()
if self.fp32_layernorm:
hidden_states = hidden_states.float()
hidden_states = self.LayerNorm(hidden_states)
if self.fp32_layernorm:
hidden_states = hidden_states.type(previous_type)
return hidden_states
class BertLMPredictionHead(nn.Module):
def __init__(self, config, bert_model_embedding_weights):
super(BertLMPredictionHead, self).__init__()
self.transform = BertPredictionHeadTransform(config)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
self.decoder = nn.Linear(bert_model_embedding_weights.size(1),
bert_model_embedding_weights.size(0),
bias=False)
self.decoder.weight = bert_model_embedding_weights
self.bias = nn.Parameter(torch.zeros(bert_model_embedding_weights.size(0)))
def forward(self, hidden_states):
hidden_states = self.transform(hidden_states)
hidden_states = self.decoder(hidden_states) + self.bias
return hidden_states
class BertOnlyMLMHead(nn.Module):
def __init__(self, config, bert_model_embedding_weights):
super(BertOnlyMLMHead, self).__init__()
self.predictions = BertLMPredictionHead(config, bert_model_embedding_weights)
def forward(self, sequence_output):
prediction_scores = self.predictions(sequence_output)
return prediction_scores
class BertOnlyNSPHead(nn.Module):
def __init__(self, config):
super(BertOnlyNSPHead, self).__init__()
self.seq_relationship = nn.Linear(config.hidden_size, 2)
def forward(self, pooled_output):
seq_relationship_score = self.seq_relationship(pooled_output)
return seq_relationship_score
class BertPreTrainingHeads(nn.Module):
def __init__(self, config, bert_model_embedding_weights):
super(BertPreTrainingHeads, self).__init__()
self.predictions = BertLMPredictionHead(config, bert_model_embedding_weights)
self.seq_relationship = nn.Linear(config.hidden_size, 2)
def forward(self, sequence_output, pooled_output):
prediction_scores = self.predictions(sequence_output)
for p in self.seq_relationship.parameters():
if p is None:
continue
pooled_output = pooled_output.type_as(p)
seq_relationship_score = self.seq_relationship(pooled_output)
return prediction_scores, seq_relationship_score
class PreTrainedBertModel(nn.Module):
""" An abstract class to handle weights initialization and
a simple interface for dowloading and loading pretrained models.
"""
def __init__(self, config, *inputs, **kwargs):
super(PreTrainedBertModel, self).__init__()
if not isinstance(config, BertConfig):
raise ValueError(
"Parameter config in `{}(config)` should be an instance of class `BertConfig`. "
"To create a model from a Google pretrained model use "
"`model = {}.from_pretrained(PRETRAINED_MODEL_NAME)`".format(
self.__class__.__name__, self.__class__.__name__
))
self.config = config
def init_bert_weights(self, module):
""" Initialize the weights.
"""
if isinstance(module, (nn.Linear, nn.Embedding)):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
elif isinstance(module, BertLayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
if isinstance(module, nn.Linear) and module.bias is not None:
module.bias.data.zero_()
@classmethod
def from_pretrained(cls, pretrained_model_name, state_dict=None, cache_dir=None,
fp32_layernorm=False, fp32_embedding=False, layernorm_epsilon=1e-12,
fp32_tokentypes=False, *inputs, **kwargs):
"""
Instantiate a PreTrainedBertModel from a pre-trained model file or a pytorch state dict.
Download and cache the pre-trained model file if needed.
Params:
pretrained_model_name: either:
- a str with the name of a pre-trained model to load selected in the list of:
. `bert-base-uncased`
. `bert-large-uncased`
. `bert-base-cased`
. `bert-large-cased`
. `bert-base-multilingual-uncased`
. `bert-base-multilingual-cased`
. `bert-base-chinese`
- a path or url to a pretrained model archive containing:
. `bert_config.json` a configuration file for the model
. `pytorch_model.bin` a PyTorch dump of a BertForPreTraining instance
cache_dir: an optional path to a folder in which the pre-trained models will be cached.
state_dict: an optional state dictionnary (collections.OrderedDict object) to use instead of Google pre-trained models
*inputs, **kwargs: additional input for the specific Bert class
(ex: num_labels for BertForSequenceClassification)
"""
if pretrained_model_name in PRETRAINED_MODEL_ARCHIVE_MAP:
archive_file = PRETRAINED_MODEL_ARCHIVE_MAP[pretrained_model_name]
else:
archive_file = pretrained_model_name
# redirect to the cache, if necessary
try:
resolved_archive_file = cached_path(archive_file, cache_dir=cache_dir)
except FileNotFoundError:
logger.error(
"Model name '{}' was not found in model name list ({}). "
"We assumed '{}' was a path or url but couldn't find any file "
"associated to this path or url.".format(
pretrained_model_name,
', '.join(PRETRAINED_MODEL_ARCHIVE_MAP.keys()),
archive_file))
return None
if resolved_archive_file == archive_file:
logger.info("loading archive file {}".format(archive_file))
else:
logger.info("loading archive file {} from cache at {}".format(
archive_file, resolved_archive_file))
tempdir = None
if os.path.isdir(resolved_archive_file):
serialization_dir = resolved_archive_file
else:
# Extract archive to temp dir
tempdir = tempfile.mkdtemp()
logger.info("extracting archive file {} to temp dir {}".format(
resolved_archive_file, tempdir))
with tarfile.open(resolved_archive_file, 'r:gz') as archive:
archive.extractall(tempdir)
serialization_dir = tempdir
# Load config
config_file = os.path.join(serialization_dir, CONFIG_NAME)
config = BertConfig.from_json_file(config_file)
config.fp32_layernorm = fp32_layernorm
config.fp32_embedding = fp32_embedding
config.layernorm_epsilon = layernorm_epsilon
config.fp32_tokentypes = fp32_tokentypes
logger.info("Model config {}".format(config))
# Instantiate model.
model = cls(config, *inputs, **kwargs)
if state_dict is None:
weights_path = os.path.join(serialization_dir, WEIGHTS_NAME)
state_dict = torch.load(weights_path)
old_keys = []
new_keys = []
for key in state_dict.keys():
new_key = None
if 'gamma' in key:
new_key = key.replace('gamma', 'weight')
if 'beta' in key:
new_key = key.replace('beta', 'bias')
if new_key:
old_keys.append(key)
new_keys.append(new_key)
for old_key, new_key in zip(old_keys, new_keys):
state_dict[new_key] = state_dict.pop(old_key)
missing_keys = []
unexpected_keys = []
error_msgs = []
# copy state_dict so _load_from_state_dict can modify it
metadata = getattr(state_dict, '_metadata', None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
def load(module, prefix=''):
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
module._load_from_state_dict(
state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + '.')
load(model, prefix='' if hasattr(model, 'bert') else 'bert.')
if len(missing_keys) > 0:
logger.info("Weights of {} not initialized from pretrained model: {}".format(
model.__class__.__name__, missing_keys))
if len(unexpected_keys) > 0:
logger.info("Weights from pretrained model not used in {}: {}".format(
model.__class__.__name__, unexpected_keys))
if tempdir:
# Clean up temp dir
shutil.rmtree(tempdir)
return model
class BertModel(PreTrainedBertModel):
"""BERT model ("Bidirectional Embedding Representations from a Transformer").
Params:
config: a BertConfig class instance with the configuration to build a new model
Inputs:
`input_ids`: a torch.LongTensor of shape [batch_size, sequence_length]
with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts
`extract_features.py`, `run_classifier.py` and `run_squad.py`)
`token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token
types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to
a `sentence B` token (see BERT paper for more details).
`attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices
selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max
input sequence length in the current batch. It's the mask that we typically use for attention when
a batch has varying length sentences.
`output_all_encoded_layers`: boolean which controls the content of the `encoded_layers` output as described below. Default: `True`.
Outputs: Tuple of (encoded_layers, pooled_output)
`encoded_layers`: controled by `output_all_encoded_layers` argument:
- `output_all_encoded_layers=True`: outputs a list of the full sequences of encoded-hidden-states at the end
of each attention block (i.e. 12 full sequences for BERT-base, 24 for BERT-large), each
encoded-hidden-state is a torch.FloatTensor of size [batch_size, sequence_length, hidden_size],
- `output_all_encoded_layers=False`: outputs only the full sequence of hidden-states corresponding
to the last attention block of shape [batch_size, sequence_length, hidden_size],
`pooled_output`: a torch.FloatTensor of size [batch_size, hidden_size] which is the output of a
classifier pretrained on top of the hidden state associated to the first character of the
input (`CLF`) to train on the Next-Sentence task (see BERT's paper).
Example usage:
```python
# Already been converted into WordPiece token ids
input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]])
input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]])
token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]])
config = modeling.BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768,
num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072)
model = modeling.BertModel(config=config)
all_encoder_layers, pooled_output = model(input_ids, token_type_ids, input_mask)
```
"""
def __init__(self, config):
super(BertModel, self).__init__(config)
self.embeddings = BertEmbeddings(config)
self.encoder = BertEncoder(config)
self.pooler = BertPooler(config)
self.apply(self.init_bert_weights)
def forward(self, input_ids, token_type_ids=None, task_ids=None, attention_mask=None, output_all_encoded_layers=True, checkpoint_activations=False):
if attention_mask is None:
attention_mask = torch.ones_like(input_ids)
if token_type_ids is None:
token_type_ids = torch.zeros_like(input_ids)
if task_ids is None:
task_ids = torch.zeros_like(input_ids)
# We create a 3D attention mask from a 2D tensor mask.
# Sizes are [batch_size, 1, 1, to_seq_length]
# So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]
# this attention mask is more simple than the triangular masking of causal attention
# used in OpenAI GPT, we just need to prepare the broadcast dimension here.
extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2)
# Since attention_mask is 1.0 for positions we want to attend and 0.0 for
# masked positions, this operation will create a tensor which is 0.0 for
# positions we want to attend and -10000.0 for masked positions.
# Since we are adding it to the raw scores before the softmax, this is
# effectively the same as removing these entirely.
extended_attention_mask = extended_attention_mask.to(dtype=next(self.encoder.parameters()).dtype)
extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
embedding_output = self.embeddings(input_ids, token_type_ids, task_ids)
encoded_layers = self.encoder(embedding_output,
extended_attention_mask,
output_all_encoded_layers=output_all_encoded_layers,
checkpoint_activations=checkpoint_activations)
sequence_output = encoded_layers[-1]
for p in self.pooler.parameters():
if p is None:
continue
sequence_output = sequence_output.type_as(p)
break
pooled_output = self.pooler(sequence_output)
if not output_all_encoded_layers or checkpoint_activations:
encoded_layers = encoded_layers[-1]
return encoded_layers, pooled_output
class BertForPreTraining(PreTrainedBertModel):
"""BERT model with pre-training heads.
This module comprises the BERT model followed by the two pre-training heads:
- the masked language modeling head, and
- the next sentence classification head.
Params:
config: a BertConfig class instance with the configuration to build a new model.
Inputs:
`input_ids`: a torch.LongTensor of shape [batch_size, sequence_length]
with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts
`extract_features.py`, `run_classifier.py` and `run_squad.py`)
`token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token
types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to
a `sentence B` token (see BERT paper for more details).
`attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices
selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max
input sequence length in the current batch. It's the mask that we typically use for attention when
a batch has varying length sentences.
`masked_lm_labels`: masked language modeling labels: torch.LongTensor of shape [batch_size, sequence_length]
with indices selected in [-1, 0, ..., vocab_size]. All labels set to -1 are ignored (masked), the loss
is only computed for the labels set in [0, ..., vocab_size]
`next_sentence_label`: next sentence classification loss: torch.LongTensor of shape [batch_size]
with indices selected in [0, 1].
0 => next sentence is the continuation, 1 => next sentence is a random sentence.
Outputs:
if `masked_lm_labels` and `next_sentence_label` are not `None`:
Outputs the total_loss which is the sum of the masked language modeling loss and the next
sentence classification loss.
if `masked_lm_labels` or `next_sentence_label` is `None`:
Outputs a tuple comprising
- the masked language modeling logits of shape [batch_size, sequence_length, vocab_size], and
- the next sentence classification logits of shape [batch_size, 2].
Example usage:
```python
# Already been converted into WordPiece token ids
input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]])
input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]])
token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]])
config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768,
num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072)
model = BertForPreTraining(config)
masked_lm_logits_scores, seq_relationship_logits = model(input_ids, token_type_ids, input_mask)
```
"""
def __init__(self, config):
super(BertForPreTraining, self).__init__(config)
self.bert = BertModel(config)
self.cls = BertPreTrainingHeads(config, self.bert.embeddings.word_embeddings.weight)
self.apply(self.init_bert_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None, masked_lm_labels=None, next_sentence_label=None, checkpoint_activations=False):
sequence_output, pooled_output = self.bert(input_ids, token_type_ids, attention_mask,
output_all_encoded_layers=False, checkpoint_activations=checkpoint_activations)
prediction_scores, seq_relationship_score = self.cls(sequence_output, pooled_output)
if masked_lm_labels is not None and next_sentence_label is not None:
loss_fct = CrossEntropyLoss(ignore_index=-1)
masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels.view(-1))
next_sentence_loss = loss_fct(seq_relationship_score.view(-1, 2), next_sentence_label.view(-1))
total_loss = masked_lm_loss + next_sentence_loss
return total_loss
else:
return prediction_scores, seq_relationship_score
class BertForMaskedLM(PreTrainedBertModel):
"""BERT model with the masked language modeling head.
This module comprises the BERT model followed by the masked language modeling head.
Params:
config: a BertConfig class instance with the configuration to build a new model.
Inputs:
`input_ids`: a torch.LongTensor of shape [batch_size, sequence_length]
with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts
`extract_features.py`, `run_classifier.py` and `run_squad.py`)
`token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token
types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to
a `sentence B` token (see BERT paper for more details).
`attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices
selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max
input sequence length in the current batch. It's the mask that we typically use for attention when
a batch has varying length sentences.
`masked_lm_labels`: masked language modeling labels: torch.LongTensor of shape [batch_size, sequence_length]
with indices selected in [-1, 0, ..., vocab_size]. All labels set to -1 are ignored (masked), the loss
is only computed for the labels set in [0, ..., vocab_size]
Outputs:
if `masked_lm_labels` is not `None`:
Outputs the masked language modeling loss.
if `masked_lm_labels` is `None`:
Outputs the masked language modeling logits of shape [batch_size, sequence_length, vocab_size].
Example usage:
```python
# Already been converted into WordPiece token ids
input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]])
input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]])
token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]])
config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768,
num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072)
model = BertForMaskedLM(config)
masked_lm_logits_scores = model(input_ids, token_type_ids, input_mask)
```
"""
def __init__(self, config):
super(BertForMaskedLM, self).__init__(config)
self.bert = BertModel(config)
self.cls = BertOnlyMLMHead(config, self.bert.embeddings.word_embeddings.weight)
self.apply(self.init_bert_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None, masked_lm_labels=None, checkpoint_activations=False):
sequence_output, _ = self.bert(input_ids, token_type_ids, attention_mask,
output_all_encoded_layers=False, checkpoint_activations=checkpoint_activations)
prediction_scores = self.cls(sequence_output)
if masked_lm_labels is not None:
loss_fct = CrossEntropyLoss(ignore_index=-1)
masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels.view(-1))
return masked_lm_loss
else:
return prediction_scores
class BertForNextSentencePrediction(PreTrainedBertModel):
"""BERT model with next sentence prediction head.
This module comprises the BERT model followed by the next sentence classification head.
Params:
config: a BertConfig class instance with the configuration to build a new model.
Inputs:
`input_ids`: a torch.LongTensor of shape [batch_size, sequence_length]
with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts
`extract_features.py`, `run_classifier.py` and `run_squad.py`)
`token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token
types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to
a `sentence B` token (see BERT paper for more details).
`attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices
selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max
input sequence length in the current batch. It's the mask that we typically use for attention when
a batch has varying length sentences.
`next_sentence_label`: next sentence classification loss: torch.LongTensor of shape [batch_size]
with indices selected in [0, 1].
0 => next sentence is the continuation, 1 => next sentence is a random sentence.
Outputs:
if `next_sentence_label` is not `None`:
Outputs the total_loss which is the sum of the masked language modeling loss and the next
sentence classification loss.
if `next_sentence_label` is `None`:
Outputs the next sentence classification logits of shape [batch_size, 2].
Example usage:
```python
# Already been converted into WordPiece token ids
input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]])
input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]])
token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]])
config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768,
num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072)
model = BertForNextSentencePrediction(config)
seq_relationship_logits = model(input_ids, token_type_ids, input_mask)
```
"""
def __init__(self, config):
super(BertForNextSentencePrediction, self).__init__(config)
self.bert = BertModel(config)
self.cls = BertOnlyNSPHead(config)
self.apply(self.init_bert_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None, next_sentence_label=None, checkpoint_activations=False):
_, pooled_output = self.bert(input_ids, token_type_ids, attention_mask,
output_all_encoded_layers=False, checkpoint_activations=checkpoint_activations)
seq_relationship_score = self.cls( pooled_output)
if next_sentence_label is not None:
loss_fct = CrossEntropyLoss(ignore_index=-1)
next_sentence_loss = loss_fct(seq_relationship_score.view(-1, 2), next_sentence_label.view(-1))
return next_sentence_loss
else:
return seq_relationship_score
class BertForSequenceClassification(PreTrainedBertModel):
"""BERT model for classification.
This module is composed of the BERT model with a linear layer on top of
the pooled output.
Params:
`config`: a BertConfig class instance with the configuration to build a new model.
`num_labels`: the number of classes for the classifier. Default = 2.
Inputs:
`input_ids`: a torch.LongTensor of shape [batch_size, sequence_length]
with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts
`extract_features.py`, `run_classifier.py` and `run_squad.py`)
`token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token
types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to
a `sentence B` token (see BERT paper for more details).
`attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices
selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max
input sequence length in the current batch. It's the mask that we typically use for attention when
a batch has varying length sentences.
`labels`: labels for the classification output: torch.LongTensor of shape [batch_size]
with indices selected in [0, ..., num_labels].
Outputs:
if `labels` is not `None`:
Outputs the CrossEntropy classification loss of the output with the labels.
if `labels` is `None`:
Outputs the classification logits of shape [batch_size, num_labels].
Example usage:
```python
# Already been converted into WordPiece token ids
input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]])
input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]])
token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]])
config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768,
num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072)
num_labels = 2
model = BertForSequenceClassification(config, num_labels)
logits = model(input_ids, token_type_ids, input_mask)
```
"""
def __init__(self, config, num_labels=2):
super(BertForSequenceClassification, self).__init__(config)
self.num_labels = num_labels
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.classifier = nn.Linear(config.hidden_size, num_labels)
self.apply(self.init_bert_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None, labels=None, checkpoint_activations=False):
_, pooled_output = self.bert(input_ids, token_type_ids, attention_mask, output_all_encoded_layers=False, checkpoint_activations=checkpoint_activations)
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
return loss
else:
return logits
class BertForMultipleChoice(PreTrainedBertModel):
"""BERT model for multiple choice tasks.
This module is composed of the BERT model with a linear layer on top of
the pooled output.
Params:
`config`: a BertConfig class instance with the configuration to build a new model.
`num_choices`: the number of classes for the classifier. Default = 2.
Inputs:
`input_ids`: a torch.LongTensor of shape [batch_size, num_choices, sequence_length]
with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts
`extract_features.py`, `run_classifier.py` and `run_squad.py`)
`token_type_ids`: an optional torch.LongTensor of shape [batch_size, num_choices, sequence_length]
with the token types indices selected in [0, 1]. Type 0 corresponds to a `sentence A`
and type 1 corresponds to a `sentence B` token (see BERT paper for more details).
`attention_mask`: an optional torch.LongTensor of shape [batch_size, num_choices, sequence_length] with indices
selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max
input sequence length in the current batch. It's the mask that we typically use for attention when
a batch has varying length sentences.
`labels`: labels for the classification output: torch.LongTensor of shape [batch_size]
with indices selected in [0, ..., num_choices].
Outputs:
if `labels` is not `None`:
Outputs the CrossEntropy classification loss of the output with the labels.
if `labels` is `None`:
Outputs the classification logits of shape [batch_size, num_labels].
Example usage:
```python
# Already been converted into WordPiece token ids
input_ids = torch.LongTensor([[[31, 51, 99], [15, 5, 0]], [[12, 16, 42], [14, 28, 57]]])
input_mask = torch.LongTensor([[[1, 1, 1], [1, 1, 0]],[[1,1,0], [1, 0, 0]]])
token_type_ids = torch.LongTensor([[[0, 0, 1], [0, 1, 0]],[[0, 1, 1], [0, 0, 1]]])
config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768,
num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072)
num_choices = 2
model = BertForMultipleChoice(config, num_choices)
logits = model(input_ids, token_type_ids, input_mask)
```
"""
def __init__(self, config, num_choices=2):
super(BertForMultipleChoice, self).__init__(config)
self.num_choices = num_choices
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.classifier = nn.Linear(config.hidden_size, 1)
self.apply(self.init_bert_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None, labels=None, checkpoint_activations=False):
flat_input_ids = input_ids.view(-1, input_ids.size(-1))
flat_token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1))
flat_attention_mask = attention_mask.view(-1, attention_mask.size(-1))
_, pooled_output = self.bert(flat_input_ids, flat_token_type_ids, flat_attention_mask, output_all_encoded_layers=False, checkpoint_activations=checkpoint_activations)
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
reshaped_logits = logits.view(-1, self.num_choices)
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(reshaped_logits, labels)
return loss
else:
return reshaped_logits
class BertForTokenClassification(PreTrainedBertModel):
"""BERT model for token-level classification.
This module is composed of the BERT model with a linear layer on top of
the full hidden state of the last layer.
Params:
`config`: a BertConfig class instance with the configuration to build a new model.
`num_labels`: the number of classes for the classifier. Default = 2.
Inputs:
`input_ids`: a torch.LongTensor of shape [batch_size, sequence_length]
with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts
`extract_features.py`, `run_classifier.py` and `run_squad.py`)
`token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token
types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to
a `sentence B` token (see BERT paper for more details).
`attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices
selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max
input sequence length in the current batch. It's the mask that we typically use for attention when
a batch has varying length sentences.
`labels`: labels for the classification output: torch.LongTensor of shape [batch_size]
with indices selected in [0, ..., num_labels].
Outputs:
if `labels` is not `None`:
Outputs the CrossEntropy classification loss of the output with the labels.
if `labels` is `None`:
Outputs the classification logits of shape [batch_size, sequence_length, num_labels].
Example usage:
```python
# Already been converted into WordPiece token ids
input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]])
input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]])
token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]])
config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768,
num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072)
num_labels = 2
model = BertForTokenClassification(config, num_labels)
logits = model(input_ids, token_type_ids, input_mask)
```
"""
def __init__(self, config, num_labels=2):
super(BertForTokenClassification, self).__init__(config)
self.num_labels = num_labels
self.bert = BertModel(config)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.classifier = nn.Linear(config.hidden_size, num_labels)
self.apply(self.init_bert_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None, labels=None, checkpoint_activations=False):
sequence_output, _ = self.bert(input_ids, token_type_ids, attention_mask, output_all_encoded_layers=False, checkpoint_activations=checkpoint_activations)
sequence_output = self.dropout(sequence_output)
logits = self.classifier(sequence_output)
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
return loss
else:
return logits
class BertForQuestionAnswering(PreTrainedBertModel):
"""BERT model for Question Answering (span extraction).
This module is composed of the BERT model with a linear layer on top of
the sequence output that computes start_logits and end_logits
Params:
`config`: a BertConfig class instance with the configuration to build a new model.
Inputs:
`input_ids`: a torch.LongTensor of shape [batch_size, sequence_length]
with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts
`extract_features.py`, `run_classifier.py` and `run_squad.py`)
`token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token
types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to
a `sentence B` token (see BERT paper for more details).
`attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices
selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max
input sequence length in the current batch. It's the mask that we typically use for attention when
a batch has varying length sentences.
`start_positions`: position of the first token for the labeled span: torch.LongTensor of shape [batch_size].
Positions are clamped to the length of the sequence and position outside of the sequence are not taken
into account for computing the loss.
`end_positions`: position of the last token for the labeled span: torch.LongTensor of shape [batch_size].
Positions are clamped to the length of the sequence and position outside of the sequence are not taken
into account for computing the loss.
Outputs:
if `start_positions` and `end_positions` are not `None`:
Outputs the total_loss which is the sum of the CrossEntropy loss for the start and end token positions.
if `start_positions` or `end_positions` is `None`:
Outputs a tuple of start_logits, end_logits which are the logits respectively for the start and end
position tokens of shape [batch_size, sequence_length].
Example usage:
```python
# Already been converted into WordPiece token ids
input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]])
input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]])
token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]])
config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768,
num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072)
model = BertForQuestionAnswering(config)
start_logits, end_logits = model(input_ids, token_type_ids, input_mask)
```
"""
def __init__(self, config):
super(BertForQuestionAnswering, self).__init__(config)
self.bert = BertModel(config)
# TODO check with Google if it's normal there is no dropout on the token classifier of SQuAD in the TF version
# self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.qa_outputs = nn.Linear(config.hidden_size, 2)
self.apply(self.init_bert_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None, start_positions=None, end_positions=None, checkpoint_activations=False):
sequence_output, _ = self.bert(input_ids, token_type_ids, attention_mask, output_all_encoded_layers=False, checkpoint_activations=checkpoint_activations)
logits = self.qa_outputs(sequence_output)
start_logits, end_logits = logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1)
end_logits = end_logits.squeeze(-1)
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions.clamp_(0, ignored_index)
end_positions.clamp_(0, ignored_index)
loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
return total_loss
else:
return start_logits, end_logits
|
the-stack_0_271 | #!/usr/bin/env python
"""
Easy Install
------------
A tool for doing automatic download/extract/build of distutils-based Python
packages. For detailed documentation, see the accompanying EasyInstall.txt
file, or visit the `EasyInstall home page`__.
__ https://pythonhosted.org/setuptools/easy_install.html
"""
from glob import glob
from distutils.util import get_platform
from distutils.util import convert_path, subst_vars
from distutils.errors import DistutilsArgError, DistutilsOptionError, \
DistutilsError, DistutilsPlatformError
from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
from distutils import log, dir_util
from distutils.command.build_scripts import first_line_re
import sys
import os
import zipimport
import shutil
import tempfile
import zipfile
import re
import stat
import random
import platform
import textwrap
import warnings
import site
import struct
import contextlib
from setuptools import Command
from setuptools.sandbox import run_setup
from setuptools.py31compat import get_path, get_config_vars
from setuptools.command import setopt
from setuptools.archive_util import unpack_archive
from setuptools.package_index import PackageIndex
from setuptools.package_index import URL_SCHEME
from setuptools.command import bdist_egg, egg_info
from setuptools.compat import (iteritems, maxsize, basestring, unicode,
reraise, PY2, PY3)
from pkg_resources import (
yield_lines, normalize_path, resource_string, ensure_directory,
get_distribution, find_distributions, Environment, Requirement,
Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
VersionConflict, DEVELOP_DIST,
)
import pkg_resources
# Turn on PEP440Warnings
warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
sys_executable = os.environ.get('__PYVENV_LAUNCHER__',
os.path.normpath(sys.executable))
__all__ = [
'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
'main', 'get_exe_prefixes',
]
def is_64bit():
return struct.calcsize("P") == 8
def samefile(p1, p2):
both_exist = os.path.exists(p1) and os.path.exists(p2)
use_samefile = hasattr(os.path, 'samefile') and both_exist
if use_samefile:
return os.path.samefile(p1, p2)
norm_p1 = os.path.normpath(os.path.normcase(p1))
norm_p2 = os.path.normpath(os.path.normcase(p2))
return norm_p1 == norm_p2
if PY2:
def _to_ascii(s):
return s
def isascii(s):
try:
unicode(s, 'ascii')
return True
except UnicodeError:
return False
else:
def _to_ascii(s):
return s.encode('ascii')
def isascii(s):
try:
s.encode('ascii')
return True
except UnicodeError:
return False
class easy_install(Command):
"""Manage a download/build/install process"""
description = "Find/get/install Python packages"
command_consumes_arguments = True
user_options = [
('prefix=', None, "installation prefix"),
("zip-ok", "z", "install package as a zipfile"),
("multi-version", "m", "make apps have to require() a version"),
("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
("install-dir=", "d", "install package to DIR"),
("script-dir=", "s", "install scripts to DIR"),
("exclude-scripts", "x", "Don't install scripts"),
("always-copy", "a", "Copy all needed packages to install dir"),
("index-url=", "i", "base URL of Python Package Index"),
("find-links=", "f", "additional URL(s) to search for packages"),
("build-directory=", "b",
"download/extract/build in DIR; keep the results"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('record=', None,
"filename in which to record list of installed files"),
('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
('site-dirs=', 'S', "list of directories where .pth files work"),
('editable', 'e', "Install specified packages in editable form"),
('no-deps', 'N', "don't install dependencies"),
('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
('local-snapshots-ok', 'l',
"allow building eggs from local checkouts"),
('version', None, "print version information and exit"),
('no-find-links', None,
"Don't load find-links defined in packages being installed")
]
boolean_options = [
'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
'editable',
'no-deps', 'local-snapshots-ok', 'version'
]
if site.ENABLE_USER_SITE:
help_msg = "install in user site-package '%s'" % site.USER_SITE
user_options.append(('user', None, help_msg))
boolean_options.append('user')
negative_opt = {'always-unzip': 'zip-ok'}
create_index = PackageIndex
def initialize_options(self):
if site.ENABLE_USER_SITE:
whereami = os.path.abspath(__file__)
self.user = whereami.startswith(site.USER_SITE)
else:
self.user = 0
self.zip_ok = self.local_snapshots_ok = None
self.install_dir = self.script_dir = self.exclude_scripts = None
self.index_url = None
self.find_links = None
self.build_directory = None
self.args = None
self.optimize = self.record = None
self.upgrade = self.always_copy = self.multi_version = None
self.editable = self.no_deps = self.allow_hosts = None
self.root = self.prefix = self.no_report = None
self.version = None
self.install_purelib = None # for pure module distributions
self.install_platlib = None # non-pure (dists w/ extensions)
self.install_headers = None # for C/C++ headers
self.install_lib = None # set to either purelib or platlib
self.install_scripts = None
self.install_data = None
self.install_base = None
self.install_platbase = None
if site.ENABLE_USER_SITE:
self.install_userbase = site.USER_BASE
self.install_usersite = site.USER_SITE
else:
self.install_userbase = None
self.install_usersite = None
self.no_find_links = None
# Options not specifiable via command line
self.package_index = None
self.pth_file = self.always_copy_from = None
self.site_dirs = None
self.installed_projects = {}
self.sitepy_installed = False
# Always read easy_install options, even if we are subclassed, or have
# an independent instance created. This ensures that defaults will
# always come from the standard configuration file(s)' "easy_install"
# section, even if this is a "develop" or "install" command, or some
# other embedding.
self._dry_run = None
self.verbose = self.distribution.verbose
self.distribution._set_command_options(
self, self.distribution.get_option_dict('easy_install')
)
def delete_blockers(self, blockers):
for filename in blockers:
if os.path.exists(filename) or os.path.islink(filename):
log.info("Deleting %s", filename)
if not self.dry_run:
if (os.path.isdir(filename) and
not os.path.islink(filename)):
rmtree(filename)
else:
os.unlink(filename)
def finalize_options(self):
if self.version:
print('setuptools %s' % get_distribution('setuptools').version)
sys.exit()
py_version = sys.version.split()[0]
prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
self.config_vars = {
'dist_name': self.distribution.get_name(),
'dist_version': self.distribution.get_version(),
'dist_fullname': self.distribution.get_fullname(),
'py_version': py_version,
'py_version_short': py_version[0:3],
'py_version_nodot': py_version[0] + py_version[2],
'sys_prefix': prefix,
'prefix': prefix,
'sys_exec_prefix': exec_prefix,
'exec_prefix': exec_prefix,
# Only python 3.2+ has abiflags
'abiflags': getattr(sys, 'abiflags', ''),
}
if site.ENABLE_USER_SITE:
self.config_vars['userbase'] = self.install_userbase
self.config_vars['usersite'] = self.install_usersite
# fix the install_dir if "--user" was used
# XXX: duplicate of the code in the setup command
if self.user and site.ENABLE_USER_SITE:
self.create_home_path()
if self.install_userbase is None:
raise DistutilsPlatformError(
"User base directory is not specified")
self.install_base = self.install_platbase = self.install_userbase
if os.name == 'posix':
self.select_scheme("unix_user")
else:
self.select_scheme(os.name + "_user")
self.expand_basedirs()
self.expand_dirs()
self._expand('install_dir', 'script_dir', 'build_directory',
'site_dirs')
# If a non-default installation directory was specified, default the
# script directory to match it.
if self.script_dir is None:
self.script_dir = self.install_dir
if self.no_find_links is None:
self.no_find_links = False
# Let install_dir get set by install_lib command, which in turn
# gets its info from the install command, and takes into account
# --prefix and --home and all that other crud.
self.set_undefined_options(
'install_lib', ('install_dir', 'install_dir')
)
# Likewise, set default script_dir from 'install_scripts.install_dir'
self.set_undefined_options(
'install_scripts', ('install_dir', 'script_dir')
)
if self.user and self.install_purelib:
self.install_dir = self.install_purelib
self.script_dir = self.install_scripts
# default --record from the install command
self.set_undefined_options('install', ('record', 'record'))
# Should this be moved to the if statement below? It's not used
# elsewhere
normpath = map(normalize_path, sys.path)
self.all_site_dirs = get_site_dirs()
if self.site_dirs is not None:
site_dirs = [
os.path.expanduser(s.strip()) for s in
self.site_dirs.split(',')
]
for d in site_dirs:
if not os.path.isdir(d):
log.warn("%s (in --site-dirs) does not exist", d)
elif normalize_path(d) not in normpath:
raise DistutilsOptionError(
d + " (in --site-dirs) is not on sys.path"
)
else:
self.all_site_dirs.append(normalize_path(d))
if not self.editable:
self.check_site_dir()
self.index_url = self.index_url or "https://pypi.python.org/simple"
self.shadow_path = self.all_site_dirs[:]
for path_item in self.install_dir, normalize_path(self.script_dir):
if path_item not in self.shadow_path:
self.shadow_path.insert(0, path_item)
if self.allow_hosts is not None:
hosts = [s.strip() for s in self.allow_hosts.split(',')]
else:
hosts = ['*']
if self.package_index is None:
self.package_index = self.create_index(
self.index_url, search_path=self.shadow_path, hosts=hosts,
)
self.local_index = Environment(self.shadow_path + sys.path)
if self.find_links is not None:
if isinstance(self.find_links, basestring):
self.find_links = self.find_links.split()
else:
self.find_links = []
if self.local_snapshots_ok:
self.package_index.scan_egg_links(self.shadow_path + sys.path)
if not self.no_find_links:
self.package_index.add_find_links(self.find_links)
self.set_undefined_options('install_lib', ('optimize', 'optimize'))
if not isinstance(self.optimize, int):
try:
self.optimize = int(self.optimize)
if not (0 <= self.optimize <= 2):
raise ValueError
except ValueError:
raise DistutilsOptionError("--optimize must be 0, 1, or 2")
if self.editable and not self.build_directory:
raise DistutilsArgError(
"Must specify a build directory (-b) when using --editable"
)
if not self.args:
raise DistutilsArgError(
"No urls, filenames, or requirements specified (see --help)")
self.outputs = []
def _expand_attrs(self, attrs):
for attr in attrs:
val = getattr(self, attr)
if val is not None:
if os.name == 'posix' or os.name == 'nt':
val = os.path.expanduser(val)
val = subst_vars(val, self.config_vars)
setattr(self, attr, val)
def expand_basedirs(self):
"""Calls `os.path.expanduser` on install_base, install_platbase and
root."""
self._expand_attrs(['install_base', 'install_platbase', 'root'])
def expand_dirs(self):
"""Calls `os.path.expanduser` on install dirs."""
self._expand_attrs(['install_purelib', 'install_platlib',
'install_lib', 'install_headers',
'install_scripts', 'install_data', ])
def run(self):
if self.verbose != self.distribution.verbose:
log.set_verbosity(self.verbose)
try:
for spec in self.args:
self.easy_install(spec, not self.no_deps)
if self.record:
outputs = self.outputs
if self.root: # strip any package prefix
root_len = len(self.root)
for counter in range(len(outputs)):
outputs[counter] = outputs[counter][root_len:]
from distutils import file_util
self.execute(
file_util.write_file, (self.record, outputs),
"writing list of installed files to '%s'" %
self.record
)
self.warn_deprecated_options()
finally:
log.set_verbosity(self.distribution.verbose)
def pseudo_tempname(self):
"""Return a pseudo-tempname base in the install directory.
This code is intentionally naive; if a malicious party can write to
the target directory you're already in deep doodoo.
"""
try:
pid = os.getpid()
except:
pid = random.randint(0, maxsize)
return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
def warn_deprecated_options(self):
pass
def check_site_dir(self):
"""Verify that self.install_dir is .pth-capable dir, if needed"""
instdir = normalize_path(self.install_dir)
pth_file = os.path.join(instdir, 'easy-install.pth')
# Is it a configured, PYTHONPATH, implicit, or explicit site dir?
is_site_dir = instdir in self.all_site_dirs
if not is_site_dir and not self.multi_version:
# No? Then directly test whether it does .pth file processing
is_site_dir = self.check_pth_processing()
else:
# make sure we can write to target dir
testfile = self.pseudo_tempname() + '.write-test'
test_exists = os.path.exists(testfile)
try:
if test_exists:
os.unlink(testfile)
open(testfile, 'w').close()
os.unlink(testfile)
except (OSError, IOError):
self.cant_write_to_target()
if not is_site_dir and not self.multi_version:
# Can't install non-multi to non-site dir
raise DistutilsError(self.no_default_version_msg())
if is_site_dir:
if self.pth_file is None:
self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
else:
self.pth_file = None
PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep)
if instdir not in map(normalize_path, [_f for _f in PYTHONPATH if _f]):
# only PYTHONPATH dirs need a site.py, so pretend it's there
self.sitepy_installed = True
elif self.multi_version and not os.path.exists(pth_file):
self.sitepy_installed = True # don't need site.py in this case
self.pth_file = None # and don't create a .pth file
self.install_dir = instdir
def cant_write_to_target(self):
template = """can't create or remove files in install directory
The following error occurred while trying to add or remove files in the
installation directory:
%s
The installation directory you specified (via --install-dir, --prefix, or
the distutils default setting) was:
%s
"""
msg = template % (sys.exc_info()[1], self.install_dir,)
if not os.path.exists(self.install_dir):
msg += """
This directory does not currently exist. Please create it and try again, or
choose a different installation directory (using the -d or --install-dir
option).
"""
else:
msg += """
Perhaps your account does not have write access to this directory? If the
installation directory is a system-owned directory, you may need to sign in
as the administrator or "root" account. If you do not have administrative
access to this machine, you may wish to choose a different installation
directory, preferably one that is listed in your PYTHONPATH environment
variable.
For information on other options, you may wish to consult the
documentation at:
https://pythonhosted.org/setuptools/easy_install.html
Please make the appropriate changes for your system and try again.
"""
raise DistutilsError(msg)
def check_pth_processing(self):
"""Empirically verify whether .pth files are supported in inst. dir"""
instdir = self.install_dir
log.info("Checking .pth file support in %s", instdir)
pth_file = self.pseudo_tempname() + ".pth"
ok_file = pth_file + '.ok'
ok_exists = os.path.exists(ok_file)
try:
if ok_exists:
os.unlink(ok_file)
dirname = os.path.dirname(ok_file)
if not os.path.exists(dirname):
os.makedirs(dirname)
f = open(pth_file, 'w')
except (OSError, IOError):
self.cant_write_to_target()
else:
try:
f.write("import os; f = open(%r, 'w'); f.write('OK'); "
"f.close()\n" % (ok_file,))
f.close()
f = None
executable = sys.executable
if os.name == 'nt':
dirname, basename = os.path.split(executable)
alt = os.path.join(dirname, 'pythonw.exe')
if (basename.lower() == 'python.exe' and
os.path.exists(alt)):
# use pythonw.exe to avoid opening a console window
executable = alt
from distutils.spawn import spawn
spawn([executable, '-E', '-c', 'pass'], 0)
if os.path.exists(ok_file):
log.info(
"TEST PASSED: %s appears to support .pth files",
instdir
)
return True
finally:
if f:
f.close()
if os.path.exists(ok_file):
os.unlink(ok_file)
if os.path.exists(pth_file):
os.unlink(pth_file)
if not self.multi_version:
log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
return False
def install_egg_scripts(self, dist):
"""Write all the scripts for `dist`, unless scripts are excluded"""
if not self.exclude_scripts and dist.metadata_isdir('scripts'):
for script_name in dist.metadata_listdir('scripts'):
if dist.metadata_isdir('scripts/' + script_name):
# The "script" is a directory, likely a Python 3
# __pycache__ directory, so skip it.
continue
self.install_script(
dist, script_name,
dist.get_metadata('scripts/' + script_name)
)
self.install_wrapper_scripts(dist)
def add_output(self, path):
if os.path.isdir(path):
for base, dirs, files in os.walk(path):
for filename in files:
self.outputs.append(os.path.join(base, filename))
else:
self.outputs.append(path)
def not_editable(self, spec):
if self.editable:
raise DistutilsArgError(
"Invalid argument %r: you can't use filenames or URLs "
"with --editable (except via the --find-links option)."
% (spec,)
)
def check_editable(self, spec):
if not self.editable:
return
if os.path.exists(os.path.join(self.build_directory, spec.key)):
raise DistutilsArgError(
"%r already exists in %s; can't do a checkout there" %
(spec.key, self.build_directory)
)
def easy_install(self, spec, deps=False):
tmpdir = tempfile.mkdtemp(prefix="easy_install-")
download = None
if not self.editable:
self.install_site_py()
try:
if not isinstance(spec, Requirement):
if URL_SCHEME(spec):
# It's a url, download it to tmpdir and process
self.not_editable(spec)
download = self.package_index.download(spec, tmpdir)
return self.install_item(None, download, tmpdir, deps,
True)
elif os.path.exists(spec):
# Existing file or directory, just process it directly
self.not_editable(spec)
return self.install_item(None, spec, tmpdir, deps, True)
else:
spec = parse_requirement_arg(spec)
self.check_editable(spec)
dist = self.package_index.fetch_distribution(
spec, tmpdir, self.upgrade, self.editable,
not self.always_copy, self.local_index
)
if dist is None:
msg = "Could not find suitable distribution for %r" % spec
if self.always_copy:
msg += " (--always-copy skips system and development eggs)"
raise DistutilsError(msg)
elif dist.precedence == DEVELOP_DIST:
# .egg-info dists don't need installing, just process deps
self.process_distribution(spec, dist, deps, "Using")
return dist
else:
return self.install_item(spec, dist.location, tmpdir, deps)
finally:
if os.path.exists(tmpdir):
rmtree(tmpdir)
def install_item(self, spec, download, tmpdir, deps, install_needed=False):
# Installation is also needed if file in tmpdir or is not an egg
install_needed = install_needed or self.always_copy
install_needed = install_needed or os.path.dirname(download) == tmpdir
install_needed = install_needed or not download.endswith('.egg')
install_needed = install_needed or (
self.always_copy_from is not None and
os.path.dirname(normalize_path(download)) ==
normalize_path(self.always_copy_from)
)
if spec and not install_needed:
# at this point, we know it's a local .egg, we just don't know if
# it's already installed.
for dist in self.local_index[spec.project_name]:
if dist.location == download:
break
else:
install_needed = True # it's not in the local index
log.info("Processing %s", os.path.basename(download))
if install_needed:
dists = self.install_eggs(spec, download, tmpdir)
for dist in dists:
self.process_distribution(spec, dist, deps)
else:
dists = [self.egg_distribution(download)]
self.process_distribution(spec, dists[0], deps, "Using")
if spec is not None:
for dist in dists:
if dist in spec:
return dist
def select_scheme(self, name):
"""Sets the install directories by applying the install schemes."""
# it's the caller's problem if they supply a bad name!
scheme = INSTALL_SCHEMES[name]
for key in SCHEME_KEYS:
attrname = 'install_' + key
if getattr(self, attrname) is None:
setattr(self, attrname, scheme[key])
def process_distribution(self, requirement, dist, deps=True, *info):
self.update_pth(dist)
self.package_index.add(dist)
if dist in self.local_index[dist.key]:
self.local_index.remove(dist)
self.local_index.add(dist)
self.install_egg_scripts(dist)
self.installed_projects[dist.key] = dist
log.info(self.installation_report(requirement, dist, *info))
if (dist.has_metadata('dependency_links.txt') and
not self.no_find_links):
self.package_index.add_find_links(
dist.get_metadata_lines('dependency_links.txt')
)
if not deps and not self.always_copy:
return
elif requirement is not None and dist.key != requirement.key:
log.warn("Skipping dependencies for %s", dist)
return # XXX this is not the distribution we were looking for
elif requirement is None or dist not in requirement:
# if we wound up with a different version, resolve what we've got
distreq = dist.as_requirement()
requirement = requirement or distreq
requirement = Requirement(
distreq.project_name, distreq.specs, requirement.extras
)
log.info("Processing dependencies for %s", requirement)
try:
distros = WorkingSet([]).resolve(
[requirement], self.local_index, self.easy_install
)
except DistributionNotFound:
e = sys.exc_info()[1]
raise DistutilsError(
"Could not find required distribution %s" % e.args
)
except VersionConflict:
e = sys.exc_info()[1]
raise DistutilsError(
"Installed distribution %s conflicts with requirement %s"
% e.args
)
if self.always_copy or self.always_copy_from:
# Force all the relevant distros to be copied or activated
for dist in distros:
if dist.key not in self.installed_projects:
self.easy_install(dist.as_requirement())
log.info("Finished processing dependencies for %s", requirement)
def should_unzip(self, dist):
if self.zip_ok is not None:
return not self.zip_ok
if dist.has_metadata('not-zip-safe'):
return True
if not dist.has_metadata('zip-safe'):
return True
return False
def maybe_move(self, spec, dist_filename, setup_base):
dst = os.path.join(self.build_directory, spec.key)
if os.path.exists(dst):
msg = ("%r already exists in %s; build directory %s will not be "
"kept")
log.warn(msg, spec.key, self.build_directory, setup_base)
return setup_base
if os.path.isdir(dist_filename):
setup_base = dist_filename
else:
if os.path.dirname(dist_filename) == setup_base:
os.unlink(dist_filename) # get it out of the tmp dir
contents = os.listdir(setup_base)
if len(contents) == 1:
dist_filename = os.path.join(setup_base, contents[0])
if os.path.isdir(dist_filename):
# if the only thing there is a directory, move it instead
setup_base = dist_filename
ensure_directory(dst)
shutil.move(setup_base, dst)
return dst
def install_wrapper_scripts(self, dist):
if not self.exclude_scripts:
for args in get_script_args(dist):
self.write_script(*args)
def install_script(self, dist, script_name, script_text, dev_path=None):
"""Generate a legacy script wrapper and install it"""
spec = str(dist.as_requirement())
is_script = is_python_script(script_text, script_name)
if is_script:
script_text = (get_script_header(script_text) +
self._load_template(dev_path) % locals())
self.write_script(script_name, _to_ascii(script_text), 'b')
@staticmethod
def _load_template(dev_path):
"""
There are a couple of template scripts in the package. This
function loads one of them and prepares it for use.
"""
# See https://bitbucket.org/pypa/setuptools/issue/134 for info
# on script file naming and downstream issues with SVR4
name = 'script.tmpl'
if dev_path:
name = name.replace('.tmpl', ' (dev).tmpl')
raw_bytes = resource_string('setuptools', name)
return raw_bytes.decode('utf-8')
def write_script(self, script_name, contents, mode="t", blockers=()):
"""Write an executable file to the scripts directory"""
self.delete_blockers( # clean up old .py/.pyw w/o a script
[os.path.join(self.script_dir, x) for x in blockers]
)
log.info("Installing %s script to %s", script_name, self.script_dir)
target = os.path.join(self.script_dir, script_name)
self.add_output(target)
mask = current_umask()
if not self.dry_run:
ensure_directory(target)
if os.path.exists(target):
os.unlink(target)
f = open(target, "w" + mode)
f.write(contents)
f.close()
chmod(target, 0o777 - mask)
def install_eggs(self, spec, dist_filename, tmpdir):
# .egg dirs or files are already built, so just return them
if dist_filename.lower().endswith('.egg'):
return [self.install_egg(dist_filename, tmpdir)]
elif dist_filename.lower().endswith('.exe'):
return [self.install_exe(dist_filename, tmpdir)]
# Anything else, try to extract and build
setup_base = tmpdir
if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
unpack_archive(dist_filename, tmpdir, self.unpack_progress)
elif os.path.isdir(dist_filename):
setup_base = os.path.abspath(dist_filename)
if (setup_base.startswith(tmpdir) # something we downloaded
and self.build_directory and spec is not None):
setup_base = self.maybe_move(spec, dist_filename, setup_base)
# Find the setup.py file
setup_script = os.path.join(setup_base, 'setup.py')
if not os.path.exists(setup_script):
setups = glob(os.path.join(setup_base, '*', 'setup.py'))
if not setups:
raise DistutilsError(
"Couldn't find a setup script in %s" %
os.path.abspath(dist_filename)
)
if len(setups) > 1:
raise DistutilsError(
"Multiple setup scripts in %s" %
os.path.abspath(dist_filename)
)
setup_script = setups[0]
# Now run it, and return the result
if self.editable:
log.info(self.report_editable(spec, setup_script))
return []
else:
return self.build_and_install(setup_script, setup_base)
def egg_distribution(self, egg_path):
if os.path.isdir(egg_path):
metadata = PathMetadata(egg_path, os.path.join(egg_path,
'EGG-INFO'))
else:
metadata = EggMetadata(zipimport.zipimporter(egg_path))
return Distribution.from_filename(egg_path, metadata=metadata)
def install_egg(self, egg_path, tmpdir):
destination = os.path.join(self.install_dir,
os.path.basename(egg_path))
destination = os.path.abspath(destination)
if not self.dry_run:
ensure_directory(destination)
dist = self.egg_distribution(egg_path)
if not samefile(egg_path, destination):
if os.path.isdir(destination) and not os.path.islink(destination):
dir_util.remove_tree(destination, dry_run=self.dry_run)
elif os.path.exists(destination):
self.execute(os.unlink, (destination,), "Removing " +
destination)
try:
new_dist_is_zipped = False
if os.path.isdir(egg_path):
if egg_path.startswith(tmpdir):
f, m = shutil.move, "Moving"
else:
f, m = shutil.copytree, "Copying"
elif self.should_unzip(dist):
self.mkpath(destination)
f, m = self.unpack_and_compile, "Extracting"
else:
new_dist_is_zipped = True
if egg_path.startswith(tmpdir):
f, m = shutil.move, "Moving"
else:
f, m = shutil.copy2, "Copying"
self.execute(f, (egg_path, destination),
(m + " %s to %s") %
(os.path.basename(egg_path),
os.path.dirname(destination)))
update_dist_caches(destination,
fix_zipimporter_caches=new_dist_is_zipped)
except:
update_dist_caches(destination, fix_zipimporter_caches=False)
raise
self.add_output(destination)
return self.egg_distribution(destination)
def install_exe(self, dist_filename, tmpdir):
# See if it's valid, get data
cfg = extract_wininst_cfg(dist_filename)
if cfg is None:
raise DistutilsError(
"%s is not a valid distutils Windows .exe" % dist_filename
)
# Create a dummy distribution object until we build the real distro
dist = Distribution(
None,
project_name=cfg.get('metadata', 'name'),
version=cfg.get('metadata', 'version'), platform=get_platform(),
)
# Convert the .exe to an unpacked egg
egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() +
'.egg')
egg_tmp = egg_path + '.tmp'
_egg_info = os.path.join(egg_tmp, 'EGG-INFO')
pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
self.exe_to_egg(dist_filename, egg_tmp)
# Write EGG-INFO/PKG-INFO
if not os.path.exists(pkg_inf):
f = open(pkg_inf, 'w')
f.write('Metadata-Version: 1.0\n')
for k, v in cfg.items('metadata'):
if k != 'target_version':
f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
f.close()
script_dir = os.path.join(_egg_info, 'scripts')
self.delete_blockers( # delete entry-point scripts to avoid duping
[os.path.join(script_dir, args[0]) for args in
get_script_args(dist)]
)
# Build .egg file from tmpdir
bdist_egg.make_zipfile(
egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
)
# install the .egg
return self.install_egg(egg_path, tmpdir)
def exe_to_egg(self, dist_filename, egg_tmp):
"""Extract a bdist_wininst to the directories an egg would use"""
# Check for .pth file and set up prefix translations
prefixes = get_exe_prefixes(dist_filename)
to_compile = []
native_libs = []
top_level = {}
def process(src, dst):
s = src.lower()
for old, new in prefixes:
if s.startswith(old):
src = new + src[len(old):]
parts = src.split('/')
dst = os.path.join(egg_tmp, *parts)
dl = dst.lower()
if dl.endswith('.pyd') or dl.endswith('.dll'):
parts[-1] = bdist_egg.strip_module(parts[-1])
top_level[os.path.splitext(parts[0])[0]] = 1
native_libs.append(src)
elif dl.endswith('.py') and old != 'SCRIPTS/':
top_level[os.path.splitext(parts[0])[0]] = 1
to_compile.append(dst)
return dst
if not src.endswith('.pth'):
log.warn("WARNING: can't process %s", src)
return None
# extract, tracking .pyd/.dll->native_libs and .py -> to_compile
unpack_archive(dist_filename, egg_tmp, process)
stubs = []
for res in native_libs:
if res.lower().endswith('.pyd'): # create stubs for .pyd's
parts = res.split('/')
resource = parts[-1]
parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
pyfile = os.path.join(egg_tmp, *parts)
to_compile.append(pyfile)
stubs.append(pyfile)
bdist_egg.write_stub(resource, pyfile)
self.byte_compile(to_compile) # compile .py's
bdist_egg.write_safety_flag(
os.path.join(egg_tmp, 'EGG-INFO'),
bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
for name in 'top_level', 'native_libs':
if locals()[name]:
txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
if not os.path.exists(txt):
f = open(txt, 'w')
f.write('\n'.join(locals()[name]) + '\n')
f.close()
def installation_report(self, req, dist, what="Installed"):
"""Helpful installation message for display to package users"""
msg = "\n%(what)s %(eggloc)s%(extras)s"
if self.multi_version and not self.no_report:
msg += """
Because this distribution was installed --multi-version, before you can
import modules from this package in an application, you will need to
'import pkg_resources' and then use a 'require()' call similar to one of
these examples, in order to select the desired version:
pkg_resources.require("%(name)s") # latest installed version
pkg_resources.require("%(name)s==%(version)s") # this exact version
pkg_resources.require("%(name)s>=%(version)s") # this version or higher
"""
if self.install_dir not in map(normalize_path, sys.path):
msg += """
Note also that the installation directory must be on sys.path at runtime for
this to work. (e.g. by being the application's script directory, by being on
PYTHONPATH, or by being added to sys.path by your code.)
"""
eggloc = dist.location
name = dist.project_name
version = dist.version
extras = '' # TODO: self.report_extras(req, dist)
return msg % locals()
def report_editable(self, spec, setup_script):
dirname = os.path.dirname(setup_script)
python = sys.executable
return """\nExtracted editable version of %(spec)s to %(dirname)s
If it uses setuptools in its setup script, you can activate it in
"development" mode by going to that directory and running::
%(python)s setup.py develop
See the setuptools documentation for the "develop" command for more info.
""" % locals()
def run_setup(self, setup_script, setup_base, args):
sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
sys.modules.setdefault('distutils.command.egg_info', egg_info)
args = list(args)
if self.verbose > 2:
v = 'v' * (self.verbose - 1)
args.insert(0, '-' + v)
elif self.verbose < 2:
args.insert(0, '-q')
if self.dry_run:
args.insert(0, '-n')
log.info(
"Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
)
try:
run_setup(setup_script, args)
except SystemExit:
v = sys.exc_info()[1]
raise DistutilsError("Setup script exited with %s" % (v.args[0],))
def build_and_install(self, setup_script, setup_base):
args = ['bdist_egg', '--dist-dir']
dist_dir = tempfile.mkdtemp(
prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
)
try:
self._set_fetcher_options(os.path.dirname(setup_script))
args.append(dist_dir)
self.run_setup(setup_script, setup_base, args)
all_eggs = Environment([dist_dir])
eggs = []
for key in all_eggs:
for dist in all_eggs[key]:
eggs.append(self.install_egg(dist.location, setup_base))
if not eggs and not self.dry_run:
log.warn("No eggs found in %s (setup script problem?)",
dist_dir)
return eggs
finally:
rmtree(dist_dir)
log.set_verbosity(self.verbose) # restore our log verbosity
def _set_fetcher_options(self, base):
"""
When easy_install is about to run bdist_egg on a source dist, that
source dist might have 'setup_requires' directives, requiring
additional fetching. Ensure the fetcher options given to easy_install
are available to that command as well.
"""
# find the fetch options from easy_install and write them out
# to the setup.cfg file.
ei_opts = self.distribution.get_option_dict('easy_install').copy()
fetch_directives = (
'find_links', 'site_dirs', 'index_url', 'optimize',
'site_dirs', 'allow_hosts',
)
fetch_options = {}
for key, val in ei_opts.items():
if key not in fetch_directives:
continue
fetch_options[key.replace('_', '-')] = val[1]
# create a settings dictionary suitable for `edit_config`
settings = dict(easy_install=fetch_options)
cfg_filename = os.path.join(base, 'setup.cfg')
setopt.edit_config(cfg_filename, settings)
def update_pth(self, dist):
if self.pth_file is None:
return
for d in self.pth_file[dist.key]: # drop old entries
if self.multi_version or d.location != dist.location:
log.info("Removing %s from easy-install.pth file", d)
self.pth_file.remove(d)
if d.location in self.shadow_path:
self.shadow_path.remove(d.location)
if not self.multi_version:
if dist.location in self.pth_file.paths:
log.info(
"%s is already the active version in easy-install.pth",
dist
)
else:
log.info("Adding %s to easy-install.pth file", dist)
self.pth_file.add(dist) # add new entry
if dist.location not in self.shadow_path:
self.shadow_path.append(dist.location)
if not self.dry_run:
self.pth_file.save()
if dist.key == 'setuptools':
# Ensure that setuptools itself never becomes unavailable!
# XXX should this check for latest version?
filename = os.path.join(self.install_dir, 'setuptools.pth')
if os.path.islink(filename):
os.unlink(filename)
f = open(filename, 'wt')
f.write(self.pth_file.make_relative(dist.location) + '\n')
f.close()
def unpack_progress(self, src, dst):
# Progress filter for unpacking
log.debug("Unpacking %s to %s", src, dst)
return dst # only unpack-and-compile skips files for dry run
def unpack_and_compile(self, egg_path, destination):
to_compile = []
to_chmod = []
def pf(src, dst):
if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
to_compile.append(dst)
elif dst.endswith('.dll') or dst.endswith('.so'):
to_chmod.append(dst)
self.unpack_progress(src, dst)
return not self.dry_run and dst or None
unpack_archive(egg_path, destination, pf)
self.byte_compile(to_compile)
if not self.dry_run:
for f in to_chmod:
mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
chmod(f, mode)
def byte_compile(self, to_compile):
if sys.dont_write_bytecode:
self.warn('byte-compiling is disabled, skipping.')
return
from distutils.util import byte_compile
try:
# try to make the byte compile messages quieter
log.set_verbosity(self.verbose - 1)
byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
if self.optimize:
byte_compile(
to_compile, optimize=self.optimize, force=1,
dry_run=self.dry_run
)
finally:
log.set_verbosity(self.verbose) # restore original verbosity
def no_default_version_msg(self):
template = """bad install directory or PYTHONPATH
You are attempting to install a package to a directory that is not
on PYTHONPATH and which Python does not read ".pth" files from. The
installation directory you specified (via --install-dir, --prefix, or
the distutils default setting) was:
%s
and your PYTHONPATH environment variable currently contains:
%r
Here are some of your options for correcting the problem:
* You can choose a different installation directory, i.e., one that is
on PYTHONPATH or supports .pth files
* You can add the installation directory to the PYTHONPATH environment
variable. (It must then also be on PYTHONPATH whenever you run
Python and want to use the package(s) you are installing.)
* You can set up the installation directory to support ".pth" files by
using one of the approaches described here:
https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
Please make the appropriate changes for your system and try again."""
return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
def install_site_py(self):
"""Make sure there's a site.py in the target dir, if needed"""
if self.sitepy_installed:
return # already did it, or don't need to
sitepy = os.path.join(self.install_dir, "site.py")
source = resource_string("setuptools", "site-patch.py")
current = ""
if os.path.exists(sitepy):
log.debug("Checking existing site.py in %s", self.install_dir)
f = open(sitepy, 'rb')
current = f.read()
# we want str, not bytes
if PY3:
current = current.decode()
f.close()
if not current.startswith('def __boot():'):
raise DistutilsError(
"%s is not a setuptools-generated site.py; please"
" remove it." % sitepy
)
if current != source:
log.info("Creating %s", sitepy)
if not self.dry_run:
ensure_directory(sitepy)
f = open(sitepy, 'wb')
f.write(source)
f.close()
self.byte_compile([sitepy])
self.sitepy_installed = True
def create_home_path(self):
"""Create directories under ~."""
if not self.user:
return
home = convert_path(os.path.expanduser("~"))
for name, path in iteritems(self.config_vars):
if path.startswith(home) and not os.path.isdir(path):
self.debug_print("os.makedirs('%s', 0o700)" % path)
os.makedirs(path, 0o700)
INSTALL_SCHEMES = dict(
posix=dict(
install_dir='$base/lib/python$py_version_short/site-packages',
script_dir='$base/bin',
),
)
DEFAULT_SCHEME = dict(
install_dir='$base/Lib/site-packages',
script_dir='$base/Scripts',
)
def _expand(self, *attrs):
config_vars = self.get_finalized_command('install').config_vars
if self.prefix:
# Set default install_dir/scripts from --prefix
config_vars = config_vars.copy()
config_vars['base'] = self.prefix
scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
for attr, val in scheme.items():
if getattr(self, attr, None) is None:
setattr(self, attr, val)
from distutils.util import subst_vars
for attr in attrs:
val = getattr(self, attr)
if val is not None:
val = subst_vars(val, config_vars)
if os.name == 'posix':
val = os.path.expanduser(val)
setattr(self, attr, val)
def get_site_dirs():
# return a list of 'site' dirs
sitedirs = [_f for _f in os.environ.get('PYTHONPATH',
'').split(os.pathsep) if _f]
prefixes = [sys.prefix]
if sys.exec_prefix != sys.prefix:
prefixes.append(sys.exec_prefix)
for prefix in prefixes:
if prefix:
if sys.platform in ('os2emx', 'riscos'):
sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
elif os.sep == '/':
sitedirs.extend([os.path.join(prefix,
"lib",
"python" + sys.version[:3],
"site-packages"),
os.path.join(prefix, "lib", "site-python")])
else:
sitedirs.extend(
[prefix, os.path.join(prefix, "lib", "site-packages")]
)
if sys.platform == 'darwin':
# for framework builds *only* we add the standard Apple
# locations. Currently only per-user, but /Library and
# /Network/Library could be added too
if 'Python.framework' in prefix:
home = os.environ.get('HOME')
if home:
sitedirs.append(
os.path.join(home,
'Library',
'Python',
sys.version[:3],
'site-packages'))
lib_paths = get_path('purelib'), get_path('platlib')
for site_lib in lib_paths:
if site_lib not in sitedirs:
sitedirs.append(site_lib)
if site.ENABLE_USER_SITE:
sitedirs.append(site.USER_SITE)
sitedirs = list(map(normalize_path, sitedirs))
return sitedirs
def expand_paths(inputs):
"""Yield sys.path directories that might contain "old-style" packages"""
seen = {}
for dirname in inputs:
dirname = normalize_path(dirname)
if dirname in seen:
continue
seen[dirname] = 1
if not os.path.isdir(dirname):
continue
files = os.listdir(dirname)
yield dirname, files
for name in files:
if not name.endswith('.pth'):
# We only care about the .pth files
continue
if name in ('easy-install.pth', 'setuptools.pth'):
# Ignore .pth files that we control
continue
# Read the .pth file
f = open(os.path.join(dirname, name))
lines = list(yield_lines(f))
f.close()
# Yield existing non-dupe, non-import directory lines from it
for line in lines:
if not line.startswith("import"):
line = normalize_path(line.rstrip())
if line not in seen:
seen[line] = 1
if not os.path.isdir(line):
continue
yield line, os.listdir(line)
def extract_wininst_cfg(dist_filename):
"""Extract configuration data from a bdist_wininst .exe
Returns a ConfigParser.RawConfigParser, or None
"""
f = open(dist_filename, 'rb')
try:
endrec = zipfile._EndRecData(f)
if endrec is None:
return None
prepended = (endrec[9] - endrec[5]) - endrec[6]
if prepended < 12: # no wininst data here
return None
f.seek(prepended - 12)
from setuptools.compat import StringIO, ConfigParser
import struct
tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))
if tag not in (0x1234567A, 0x1234567B):
return None # not a valid tag
f.seek(prepended - (12 + cfglen))
cfg = ConfigParser.RawConfigParser(
{'version': '', 'target_version': ''})
try:
part = f.read(cfglen)
# part is in bytes, but we need to read up to the first null
# byte.
if sys.version_info >= (2, 6):
null_byte = bytes([0])
else:
null_byte = chr(0)
config = part.split(null_byte, 1)[0]
# Now the config is in bytes, but for RawConfigParser, it should
# be text, so decode it.
config = config.decode(sys.getfilesystemencoding())
cfg.readfp(StringIO(config))
except ConfigParser.Error:
return None
if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
return None
return cfg
finally:
f.close()
def get_exe_prefixes(exe_filename):
"""Get exe->egg path translations for a given .exe file"""
prefixes = [
('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''),
('PLATLIB/', ''),
('SCRIPTS/', 'EGG-INFO/scripts/'),
('DATA/lib/site-packages', ''),
]
z = zipfile.ZipFile(exe_filename)
try:
for info in z.infolist():
name = info.filename
parts = name.split('/')
if len(parts) == 3 and parts[2] == 'PKG-INFO':
if parts[1].endswith('.egg-info'):
prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
break
if len(parts) != 2 or not name.endswith('.pth'):
continue
if name.endswith('-nspkg.pth'):
continue
if parts[0].upper() in ('PURELIB', 'PLATLIB'):
contents = z.read(name)
if PY3:
contents = contents.decode()
for pth in yield_lines(contents):
pth = pth.strip().replace('\\', '/')
if not pth.startswith('import'):
prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
finally:
z.close()
prefixes = [(x.lower(), y) for x, y in prefixes]
prefixes.sort()
prefixes.reverse()
return prefixes
def parse_requirement_arg(spec):
try:
return Requirement.parse(spec)
except ValueError:
raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" % (spec,)
)
class PthDistributions(Environment):
"""A .pth file with Distribution paths in it"""
dirty = False
def __init__(self, filename, sitedirs=()):
self.filename = filename
self.sitedirs = list(map(normalize_path, sitedirs))
self.basedir = normalize_path(os.path.dirname(self.filename))
self._load()
Environment.__init__(self, [], None, None)
for path in yield_lines(self.paths):
list(map(self.add, find_distributions(path, True)))
def _load(self):
self.paths = []
saw_import = False
seen = dict.fromkeys(self.sitedirs)
if os.path.isfile(self.filename):
f = open(self.filename, 'rt')
for line in f:
if line.startswith('import'):
saw_import = True
continue
path = line.rstrip()
self.paths.append(path)
if not path.strip() or path.strip().startswith('#'):
continue
# skip non-existent paths, in case somebody deleted a package
# manually, and duplicate paths as well
path = self.paths[-1] = normalize_path(
os.path.join(self.basedir, path)
)
if not os.path.exists(path) or path in seen:
self.paths.pop() # skip it
self.dirty = True # we cleaned up, so we're dirty now :)
continue
seen[path] = 1
f.close()
if self.paths and not saw_import:
self.dirty = True # ensure anything we touch has import wrappers
while self.paths and not self.paths[-1].strip():
self.paths.pop()
def save(self):
"""Write changed .pth file back to disk"""
if not self.dirty:
return
data = '\n'.join(map(self.make_relative, self.paths))
if data:
log.debug("Saving %s", self.filename)
data = (
"import sys; sys.__plen = len(sys.path)\n"
"%s\n"
"import sys; new=sys.path[sys.__plen:];"
" del sys.path[sys.__plen:];"
" p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;"
" sys.__egginsert = p+len(new)\n"
) % data
if os.path.islink(self.filename):
os.unlink(self.filename)
f = open(self.filename, 'wt')
f.write(data)
f.close()
elif os.path.exists(self.filename):
log.debug("Deleting empty %s", self.filename)
os.unlink(self.filename)
self.dirty = False
def add(self, dist):
"""Add `dist` to the distribution map"""
new_path = (
dist.location not in self.paths and (
dist.location not in self.sitedirs or
# account for '.' being in PYTHONPATH
dist.location == os.getcwd()
)
)
if new_path:
self.paths.append(dist.location)
self.dirty = True
Environment.add(self, dist)
def remove(self, dist):
"""Remove `dist` from the distribution map"""
while dist.location in self.paths:
self.paths.remove(dist.location)
self.dirty = True
Environment.remove(self, dist)
def make_relative(self, path):
npath, last = os.path.split(normalize_path(path))
baselen = len(self.basedir)
parts = [last]
sep = os.altsep == '/' and '/' or os.sep
while len(npath) >= baselen:
if npath == self.basedir:
parts.append(os.curdir)
parts.reverse()
return sep.join(parts)
npath, last = os.path.split(npath)
parts.append(last)
else:
return path
def _first_line_re():
"""
Return a regular expression based on first_line_re suitable for matching
strings.
"""
if isinstance(first_line_re.pattern, str):
return first_line_re
# first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
return re.compile(first_line_re.pattern.decode())
def get_script_header(script_text, executable=sys_executable, wininst=False):
"""Create a #! line, getting options (if any) from script_text"""
first = (script_text + '\n').splitlines()[0]
match = _first_line_re().match(first)
options = ''
if match:
options = match.group(1) or ''
if options:
options = ' ' + options
if wininst:
executable = "python.exe"
else:
executable = nt_quote_arg(executable)
hdr = "#!%(executable)s%(options)s\n" % locals()
if not isascii(hdr):
# Non-ascii path to sys.executable, use -x to prevent warnings
if options:
if options.strip().startswith('-'):
options = ' -x' + options.strip()[1:]
# else: punt, we can't do it, let the warning happen anyway
else:
options = ' -x'
executable = fix_jython_executable(executable, options)
hdr = "#!%(executable)s%(options)s\n" % locals()
return hdr
def auto_chmod(func, arg, exc):
if func is os.remove and os.name == 'nt':
chmod(arg, stat.S_IWRITE)
return func(arg)
et, ev, _ = sys.exc_info()
reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
def update_dist_caches(dist_path, fix_zipimporter_caches):
"""
Fix any globally cached `dist_path` related data
`dist_path` should be a path of a newly installed egg distribution (zipped
or unzipped).
sys.path_importer_cache contains finder objects that have been cached when
importing data from the original distribution. Any such finders need to be
cleared since the replacement distribution might be packaged differently,
e.g. a zipped egg distribution might get replaced with an unzipped egg
folder or vice versa. Having the old finders cached may then cause Python
to attempt loading modules from the replacement distribution using an
incorrect loader.
zipimport.zipimporter objects are Python loaders charged with importing
data packaged inside zip archives. If stale loaders referencing the
original distribution, are left behind, they can fail to load modules from
the replacement distribution. E.g. if an old zipimport.zipimporter instance
is used to load data from a new zipped egg archive, it may cause the
operation to attempt to locate the requested data in the wrong location -
one indicated by the original distribution's zip archive directory
information. Such an operation may then fail outright, e.g. report having
read a 'bad local file header', or even worse, it may fail silently &
return invalid data.
zipimport._zip_directory_cache contains cached zip archive directory
information for all existing zipimport.zipimporter instances and all such
instances connected to the same archive share the same cached directory
information.
If asked, and the underlying Python implementation allows it, we can fix
all existing zipimport.zipimporter instances instead of having to track
them down and remove them one by one, by updating their shared cached zip
archive directory information. This, of course, assumes that the
replacement distribution is packaged as a zipped egg.
If not asked to fix existing zipimport.zipimporter instances, we still do
our best to clear any remaining zipimport.zipimporter related cached data
that might somehow later get used when attempting to load data from the new
distribution and thus cause such load operations to fail. Note that when
tracking down such remaining stale data, we can not catch every conceivable
usage from here, and we clear only those that we know of and have found to
cause problems if left alive. Any remaining caches should be updated by
whomever is in charge of maintaining them, i.e. they should be ready to
handle us replacing their zip archives with new distributions at runtime.
"""
# There are several other known sources of stale zipimport.zipimporter
# instances that we do not clear here, but might if ever given a reason to
# do so:
# * Global setuptools pkg_resources.working_set (a.k.a. 'master working
# set') may contain distributions which may in turn contain their
# zipimport.zipimporter loaders.
# * Several zipimport.zipimporter loaders held by local variables further
# up the function call stack when running the setuptools installation.
# * Already loaded modules may have their __loader__ attribute set to the
# exact loader instance used when importing them. Python 3.4 docs state
# that this information is intended mostly for introspection and so is
# not expected to cause us problems.
normalized_path = normalize_path(dist_path)
_uncache(normalized_path, sys.path_importer_cache)
if fix_zipimporter_caches:
_replace_zip_directory_cache_data(normalized_path)
else:
# Here, even though we do not want to fix existing and now stale
# zipimporter cache information, we still want to remove it. Related to
# Python's zip archive directory information cache, we clear each of
# its stale entries in two phases:
# 1. Clear the entry so attempting to access zip archive information
# via any existing stale zipimport.zipimporter instances fails.
# 2. Remove the entry from the cache so any newly constructed
# zipimport.zipimporter instances do not end up using old stale
# zip archive directory information.
# This whole stale data removal step does not seem strictly necessary,
# but has been left in because it was done before we started replacing
# the zip archive directory information cache content if possible, and
# there are no relevant unit tests that we can depend on to tell us if
# this is really needed.
_remove_and_clear_zip_directory_cache_data(normalized_path)
def _collect_zipimporter_cache_entries(normalized_path, cache):
"""
Return zipimporter cache entry keys related to a given normalized path.
Alternative path spellings (e.g. those using different character case or
those using alternative path separators) related to the same path are
included. Any sub-path entries are included as well, i.e. those
corresponding to zip archives embedded in other zip archives.
"""
result = []
prefix_len = len(normalized_path)
for p in cache:
np = normalize_path(p)
if (np.startswith(normalized_path) and
np[prefix_len:prefix_len + 1] in (os.sep, '')):
result.append(p)
return result
def _update_zipimporter_cache(normalized_path, cache, updater=None):
"""
Update zipimporter cache data for a given normalized path.
Any sub-path entries are processed as well, i.e. those corresponding to zip
archives embedded in other zip archives.
Given updater is a callable taking a cache entry key and the original entry
(after already removing the entry from the cache), and expected to update
the entry and possibly return a new one to be inserted in its place.
Returning None indicates that the entry should not be replaced with a new
one. If no updater is given, the cache entries are simply removed without
any additional processing, the same as if the updater simply returned None.
"""
for p in _collect_zipimporter_cache_entries(normalized_path, cache):
# N.B. pypy's custom zipimport._zip_directory_cache implementation does
# not support the complete dict interface:
# * Does not support item assignment, thus not allowing this function
# to be used only for removing existing cache entries.
# * Does not support the dict.pop() method, forcing us to use the
# get/del patterns instead. For more detailed information see the
# following links:
# https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960
# https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99
old_entry = cache[p]
del cache[p]
new_entry = updater and updater(p, old_entry)
if new_entry is not None:
cache[p] = new_entry
def _uncache(normalized_path, cache):
_update_zipimporter_cache(normalized_path, cache)
def _remove_and_clear_zip_directory_cache_data(normalized_path):
def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
old_entry.clear()
_update_zipimporter_cache(
normalized_path, zipimport._zip_directory_cache,
updater=clear_and_remove_cached_zip_archive_directory_data)
# PyPy Python implementation does not allow directly writing to the
# zipimport._zip_directory_cache and so prevents us from attempting to correct
# its content. The best we can do there is clear the problematic cache content
# and have PyPy repopulate it as needed. The downside is that if there are any
# stale zipimport.zipimporter instances laying around, attempting to use them
# will fail due to not having its zip archive directory information available
# instead of being automatically corrected to use the new correct zip archive
# directory information.
if '__pypy__' in sys.builtin_module_names:
_replace_zip_directory_cache_data = \
_remove_and_clear_zip_directory_cache_data
else:
def _replace_zip_directory_cache_data(normalized_path):
def replace_cached_zip_archive_directory_data(path, old_entry):
# N.B. In theory, we could load the zip directory information just
# once for all updated path spellings, and then copy it locally and
# update its contained path strings to contain the correct
# spelling, but that seems like a way too invasive move (this cache
# structure is not officially documented anywhere and could in
# theory change with new Python releases) for no significant
# benefit.
old_entry.clear()
zipimport.zipimporter(path)
old_entry.update(zipimport._zip_directory_cache[path])
return old_entry
_update_zipimporter_cache(
normalized_path, zipimport._zip_directory_cache,
updater=replace_cached_zip_archive_directory_data)
def is_python(text, filename='<string>'):
"Is this string a valid Python script?"
try:
compile(text, filename, 'exec')
except (SyntaxError, TypeError):
return False
else:
return True
def is_sh(executable):
"""Determine if the specified executable is a .sh (contains a #! line)"""
try:
fp = open(executable)
magic = fp.read(2)
fp.close()
except (OSError, IOError):
return executable
return magic == '#!'
def nt_quote_arg(arg):
"""Quote a command line argument according to Windows parsing rules"""
result = []
needquote = False
nb = 0
needquote = (" " in arg) or ("\t" in arg)
if needquote:
result.append('"')
for c in arg:
if c == '\\':
nb += 1
elif c == '"':
# double preceding backslashes, then add a \"
result.append('\\' * (nb * 2) + '\\"')
nb = 0
else:
if nb:
result.append('\\' * nb)
nb = 0
result.append(c)
if nb:
result.append('\\' * nb)
if needquote:
result.append('\\' * nb) # double the trailing backslashes
result.append('"')
return ''.join(result)
def is_python_script(script_text, filename):
"""Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
"""
if filename.endswith('.py') or filename.endswith('.pyw'):
return True # extension says it's Python
if is_python(script_text, filename):
return True # it's syntactically valid Python
if script_text.startswith('#!'):
# It begins with a '#!' line, so check if 'python' is in it somewhere
return 'python' in script_text.splitlines()[0].lower()
return False # Not any Python I can recognize
try:
from os import chmod as _chmod
except ImportError:
# Jython compatibility
def _chmod(*args):
pass
def chmod(path, mode):
log.debug("changing mode of %s to %o", path, mode)
try:
_chmod(path, mode)
except os.error:
e = sys.exc_info()[1]
log.debug("chmod failed: %s", e)
def fix_jython_executable(executable, options):
if sys.platform.startswith('java') and is_sh(executable):
# Workaround for Jython is not needed on Linux systems.
import java
if java.lang.System.getProperty("os.name") == "Linux":
return executable
# Workaround Jython's sys.executable being a .sh (an invalid
# shebang line interpreter)
if options:
# Can't apply the workaround, leave it broken
log.warn(
"WARNING: Unable to adapt shebang line for Jython,"
" the following script is NOT executable\n"
" see http://bugs.jython.org/issue1112 for"
" more information.")
else:
return '/usr/bin/env %s' % executable
return executable
class ScriptWriter(object):
"""
Encapsulates behavior around writing entry point scripts for console and
gui apps.
"""
template = textwrap.dedent("""
# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
__requires__ = %(spec)r
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point(%(spec)r, %(group)r, %(name)r)()
)
""").lstrip()
@classmethod
def get_script_args(cls, dist, executable=sys_executable, wininst=False):
"""
Yield write_script() argument tuples for a distribution's entrypoints
"""
gen_class = cls.get_writer(wininst)
spec = str(dist.as_requirement())
header = get_script_header("", executable, wininst)
for type_ in 'console', 'gui':
group = type_ + '_scripts'
for name, ep in dist.get_entry_map(group).items():
script_text = gen_class.template % locals()
for res in gen_class._get_script_args(type_, name, header,
script_text):
yield res
@classmethod
def get_writer(cls, force_windows):
if force_windows or sys.platform == 'win32':
return WindowsScriptWriter.get_writer()
return cls
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
# Simply write the stub with no extension.
yield (name, header + script_text)
class WindowsScriptWriter(ScriptWriter):
@classmethod
def get_writer(cls):
"""
Get a script writer suitable for Windows
"""
writer_lookup = dict(
executable=WindowsExecutableLauncherWriter,
natural=cls,
)
# for compatibility, use the executable launcher by default
launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
return writer_lookup[launcher]
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
"For Windows, add a .py extension"
ext = dict(console='.pya', gui='.pyw')[type_]
if ext not in os.environ['PATHEXT'].lower().split(';'):
warnings.warn("%s not listed in PATHEXT; scripts will not be "
"recognized as executables." % ext, UserWarning)
old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
old.remove(ext)
header = cls._adjust_header(type_, header)
blockers = [name + x for x in old]
yield name + ext, header + script_text, 't', blockers
@staticmethod
def _adjust_header(type_, orig_header):
"""
Make sure 'pythonw' is used for gui and and 'python' is used for
console (regardless of what sys.executable is).
"""
pattern = 'pythonw.exe'
repl = 'python.exe'
if type_ == 'gui':
pattern, repl = repl, pattern
pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
new_header = pattern_ob.sub(string=orig_header, repl=repl)
clean_header = new_header[2:-1].strip('"')
if sys.platform == 'win32' and not os.path.exists(clean_header):
# the adjusted version doesn't exist, so return the original
return orig_header
return new_header
class WindowsExecutableLauncherWriter(WindowsScriptWriter):
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
"""
For Windows, add a .py extension and an .exe launcher
"""
if type_ == 'gui':
launcher_type = 'gui'
ext = '-script.pyw'
old = ['.pyw']
else:
launcher_type = 'cli'
ext = '-script.py'
old = ['.py', '.pyc', '.pyo']
hdr = cls._adjust_header(type_, header)
blockers = [name + x for x in old]
yield (name + ext, hdr + script_text, 't', blockers)
yield (
name + '.exe', get_win_launcher(launcher_type),
'b' # write in binary mode
)
if not is_64bit():
# install a manifest for the launcher to prevent Windows
# from detecting it as an installer (which it will for
# launchers like easy_install.exe). Consider only
# adding a manifest for launchers detected as installers.
# See Distribute #143 for details.
m_name = name + '.exe.manifest'
yield (m_name, load_launcher_manifest(name), 't')
# for backward-compatibility
get_script_args = ScriptWriter.get_script_args
def get_win_launcher(type):
"""
Load the Windows launcher (executable) suitable for launching a script.
`type` should be either 'cli' or 'gui'
Returns the executable as a byte string.
"""
launcher_fn = '%s.exe' % type
if platform.machine().lower() == 'arm':
launcher_fn = launcher_fn.replace(".", "-arm.")
if is_64bit():
launcher_fn = launcher_fn.replace(".", "-64.")
else:
launcher_fn = launcher_fn.replace(".", "-32.")
return resource_string('setuptools', launcher_fn)
def load_launcher_manifest(name):
manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
if PY2:
return manifest % vars()
else:
return manifest.decode('utf-8') % vars()
def rmtree(path, ignore_errors=False, onerror=auto_chmod):
"""Recursively delete a directory tree.
This code is taken from the Python 2.4 version of 'shutil', because
the 2.3 version doesn't really work right.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
names = []
try:
names = os.listdir(path)
except os.error:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except os.error:
mode = 0
if stat.S_ISDIR(mode):
rmtree(fullname, ignore_errors, onerror)
else:
try:
os.remove(fullname)
except os.error:
onerror(os.remove, fullname, sys.exc_info())
try:
os.rmdir(path)
except os.error:
onerror(os.rmdir, path, sys.exc_info())
def current_umask():
tmp = os.umask(0o022)
os.umask(tmp)
return tmp
def bootstrap():
# This function is called when setuptools*.egg is run using /bin/sh
import setuptools
argv0 = os.path.dirname(setuptools.__path__[0])
sys.argv[0] = argv0
sys.argv.append(argv0)
main()
def main(argv=None, **kw):
from setuptools import setup
from setuptools.dist import Distribution
class DistributionWithoutHelpCommands(Distribution):
common_usage = ""
def _show_help(self, *args, **kw):
with _patch_usage():
Distribution._show_help(self, *args, **kw)
if argv is None:
argv = sys.argv[1:]
with _patch_usage():
setup(
script_args=['-q', 'easy_install', '-v'] + argv,
script_name=sys.argv[0] or 'easy_install',
distclass=DistributionWithoutHelpCommands, **kw
)
@contextlib.contextmanager
def _patch_usage():
import distutils.core
USAGE = textwrap.dedent("""
usage: %(script)s [options] requirement_or_url ...
or: %(script)s --help
""").lstrip()
def gen_usage(script_name):
return USAGE % dict(
script=os.path.basename(script_name),
)
saved = distutils.core.gen_usage
distutils.core.gen_usage = gen_usage
try:
yield
finally:
distutils.core.gen_usage = saved
|
the-stack_0_272 | import matplotlib.pyplot as plt
import math, cv2, glob
import numpy as np
class ImageVisualizer:
# region vars
# endregion
def __init__(self, shape):
self.shape = shape
# to get a name of parameter variable to print a name in subplot
def param_to_str(self, obj, namespace):
return [name for name in namespace if namespace[name] is obj]
# tool to draw multiple images on plot
def plot_images(self, images, descriptions = None, invert_colors=False):
w = self.shape[0]
h = self.shape[1]
fig = plt.figure(figsize=(8, 8))
total_images = len(images)
rows = math.ceil(total_images/3)
columns = math.ceil(total_images/rows)
for i in range(1, total_images + 1):
fig.add_subplot(rows, columns, i)
if invert_colors and len(images[i-1].shape) > 2:
plt.imshow(cv2.cvtColor(images[i-1], cv2.COLOR_BGR2RGB)) # need to invert colors
else:
plt.imshow(images[i-1]) # do not need to invert colors when showing binary images
#plt.gca().set_title(self.param_to_str(images[i-1], globals()))
if not descriptions is None:
plt.gca().set_title(descriptions[i-1])
mng = plt.get_current_fig_manager() # to maximize window
mng.window.state('zoomed')
plt.show()
def get_image_info(self, image, alias ):
print(f'Image {alias} shape is: {image.shape}')
# tests solid green area on blank image
def draw_solid_area_on_blank_image(self, array_of_points):
blank_image = np.zeros(self.shape, dtype=np.uint8)
cv2.fillPoly(blank_image, pts = [array_of_points],color = (255,255,0))
return blank_image
def overlay_image_with_solid_area(self, main_image, image2):
return cv2.addWeighted(main_image, 0.8, image2, 0.2, 0)
|
the-stack_0_276 | # Copyright 2014, Solly Ross (see LICENSE.txt)
# Portions Copyright Python Software Foundation
# (see COPYRIGHT.txt and PYTHON-LICENSE.txt)
# main part
import code
import contextlib
import doctest
import getpass
import linecache
import pdb
import re
import sys
import traceback
import warnings
import six
from yalpt import ansi_helper as ansi
from yalpt import formatters
from yalpt import parsers
__all__ = ["LiterateInterpreter"]
@contextlib.contextmanager
def noop_mgr(writer):
yield writer
class LiterateInterpreter(code.InteractiveConsole):
def __init__(self, text_formatter=formatters.NoopFormatter(),
code_parser=parsers.DocTestParser(), use_ansi=True,
use_readline=True, env_driver=None, *args, **kwargs):
code.InteractiveConsole.__init__(self, *args, **kwargs)
self._output_checker = doctest.OutputChecker()
self._fakeout = doctest._SpoofOut()
self.chunks = None
self.exc_msg = None
self.name = 'literate program'
self.text_formatter = text_formatter
self.code_parser = code_parser
self.use_ansi = use_ansi
self.pause = True
self.interactive = True
if use_readline:
self._readline = __import__('readline')
self._add_readline()
else:
self._readline = None
self._env_driver = env_driver
self._correct_path()
def runfunction(self, func):
self.runcode(six.get_function_code(func))
def _correct_path(self):
def correct_path():
import sys
if '' not in sys.path:
sys.path.insert(0, '')
del sys
self.runfunction(correct_path)
def _add_readline(self):
self.locals['__console_locals__'] = self.locals
self.locals['readline'] = self._readline
def add_readline():
# add support for completion
import atexit
import os
import readline
import rlcompleter
completer = rlcompleter.Completer(__console_locals__) # noqa
readline.set_completer(completer.complete)
# tab completion
readline.parse_and_bind('Control-space: complete')
# history file
histfile = os.path.join(os.environ['HOME'],
'.literate-python-history')
try:
readline.read_history_file(histfile)
except IOError:
pass
atexit.register(readline.write_history_file, histfile)
del os, histfile, readline, rlcompleter
self.runfunction(add_readline)
del self.locals['__console_locals__']
del self.locals['readline']
def _interact_once(self, more):
try:
if more:
prompt = sys.ps2
else:
prompt = sys.ps1
try:
line = self.raw_input(prompt)
# Can be None if sys.stdin was redefined
encoding = getattr(sys.stdin, "encoding", None)
if encoding and not isinstance(line, six.text_type):
line = line.decode(encoding)
except EOFError:
self.write("\n")
return (False, None)
else:
if len(line) == 0:
blank = True
else:
blank = False
more = self.push(line)
except KeyboardInterrupt:
self.write("\nKeyboardInterrupt\n")
self.resetbuffer()
more = False
blank = False
return (blank, more)
# BEGIN FROM PYTHON STD LIB
# (extracted from the doctest module, made slight changes to the
# filename RE, and refactored _capture_output into its own method)
__LINECACHE_FILENAME_RE = re.compile(r'<literate '
r'(?P<name>.+)'
r'\[(?P<chunknum>\d+)\]>$')
def _patched_linecache_getlines(self, filename, module_globals=None):
m = self.__LINECACHE_FILENAME_RE.match(filename)
if m and m.group('name') == self.name:
chunk = self.chunks[int(m.group('chunknum'))]
source = chunk.source
if isinstance(source, six.text_type):
source = source.encode('ascii', 'backslashreplace')
return source.splitlines(True)
else:
return self.save_linecache_getlines(filename, module_globals)
@contextlib.contextmanager
def _capture_output(self):
save_stdout = sys.stdout
sys.stdout = self._fakeout
# set up pdb to work properly
save_set_trace = pdb.set_trace
self.debugger = doctest._OutputRedirectingPdb(save_stdout)
self.debugger.reset()
pdb.set_trace = self.debugger.set_trace
self.save_linecache_getlines = linecache.getlines
linecache.getlines = self._patched_linecache_getlines
save_displayhook = sys.displayhook
sys.displayhook = sys.__displayhook__
try:
yield sys.stdout
finally:
sys.stdout = save_stdout
pdb.set_trace = save_set_trace
linecache.getlines = self.save_linecache_getlines
sys.displayhook = save_displayhook
# END FROM PYTHON STD LIB
def _process_code_line(self, line, res, more):
if self._readline is not None:
self._readline.add_history(line)
if more:
self.write(sys.ps2)
else:
self.write(sys.ps1)
if self.use_ansi:
self.write(ansi.with_codes(line, 38, 5, 0xdf))
else:
self.write(line)
self.write("\n")
with self._capture_output() as output:
more = self.push(line)
res += output.getvalue()
output.truncate(0)
exc = self.exc_msg
self.exc_msg = None
return (res, exc, more)
def _format_tb(self):
try:
extype, value, tb = sys.exc_info()
sys.last_type = extype
sys.last_value = value
sys.last_traceback = tb
res = traceback.format_exception_only(extype, value)
finally:
tb = None
if res:
return res[-1]
def showtraceback(self):
try:
extype, value, tb = sys.exc_info()
sys.last_type = extype
sys.last_value = value
sys.last_traceback = tb
tblist = traceback.extract_tb(tb)
del tblist[:1]
lst = traceback.format_list(tblist)
if lst:
lst.insert(0, "Traceback (most recent call last):\n")
exc_msg = traceback.format_exception_only(extype, value)
lst[len(lst):] = exc_msg
finally:
tblist = tb = None
if self.filename.startswith("<literate "):
self._fakeout.write(''.join(lst))
else:
if sys.excepthook is sys.__excepthook__:
self.write(''.join(lst))
else:
sys.excepthook(extype, value, tb)
self.exc_msg = ''.join(exc_msg)
def _run_code(self, chunk, chunk_ind, pause=True):
self.filename = "<literate {name}[{num}]>".format(name=self.name,
num=chunk_ind)
more = False
res = ""
lines = chunk.source.split("\n")
for line in lines[:-1]:
res, exc, more = self._process_code_line(line, res, more)
if more:
res, exc, more = self._process_code_line(lines[-1], res, more)
if self.use_ansi:
mgr = ansi.BoxMaker(self)
else:
mgr = noop_mgr(self)
if chunk.want is not None or chunk.exc_msg is not None:
if len(res) > 0 or exc is not None:
# compare to the expected output
# TODO(sross): convert options to optionsflags
optionsflags = 0
checker = self._output_checker
if exc is None:
same = self._output_checker.check_output(chunk.want,
res, 0)
if not same:
self.write('\n')
with mgr as maker:
maker.write('Warning, output different from '
'expected:\n')
maker.write('================================'
'========\n\n')
diff = checker.output_difference(chunk, res,
optionsflags)
maker.write(diff)
self.write('\n')
else:
self.write(res)
elif chunk.exc_msg is None:
self.write('\n')
with mgr as maker:
maker.write('Warning, unexpected exception:\n')
maker.write('==============================\n\n')
maker.write(exc)
self.write('\n')
else:
same_ex = checker.check_output(chunk.exc_msg,
exc, optionsflags)
if not same_ex:
self.write('\n')
with mgr as maker:
maker.write('Warning, exception different from '
'expected:\n')
maker.write('=================================='
'=========\n\n')
diff = checker.output_difference(chunk, res,
optionsflags)
maker.write(diff)
self.write('\n')
else:
self.write(res)
else:
if exc is not None:
self.write('\n')
with mgr as maker:
maker.write('Warning, unexpected exception:\n')
maker.write('==============================\n\n')
maker.write(exc)
self.write('\n')
else:
self.write(res)
if not self.pause:
self.write(sys.ps1 + '\n')
def no_echo_input(self, prompt):
with warnings.catch_warnings():
res = getpass.getpass(prompt)
return res
def interact(self, lit_string, name, pause=True, interactive=True):
self.name = name
self.pause = pause
self.interactive = interactive
try:
sys.ps1
except AttributeError:
sys.ps1 = ">>> "
try:
sys.ps2
except AttributeError:
sys.ps2 = "... "
try:
sys.ps3
except AttributeError:
sys.ps3 = '>>> '
if self._env_driver is not None:
extra_locals = self._env_driver.setup()
self.locals.update(extra_locals)
extra_banner = self._env_driver.banner
driver_text = " ({0})".format(self._env_driver.DRIVER_NAME)
else:
extra_banner = ""
driver_text = ""
cprt = ('Type "help", "copyright", "credits" or "license" for '
'more information about Python.')
self.write("Literate Python Shell{driver_text}\nPython {ver} "
"on {platform}\n{cprt}\n"
"{extra_banner}\n\n".format(driver_text=driver_text,
ver=sys.version,
platform=sys.platform,
cprt=cprt,
extra_banner=extra_banner))
if not interactive and pause:
self.write('Press enter to continue after a code block\n\n')
try:
parser = self.code_parser
start = True
self.chunks = list(parser.parse(lit_string, name))
for chunk_ind, chunk in enumerate(self.chunks):
if isinstance(chunk, parsers.CodeChunk):
self._run_code(chunk, chunk_ind)
elif not chunk:
continue
else:
if not start and pause and interactive:
self.filename = "<stdin>"
more = False
blanks = 0
while blanks < 2:
blank, more = self._interact_once(more)
if blank:
blanks += 1
else:
blanks = 0
if more is None:
return
# reset exc_msg so it doesn't get
# raised after the next code block
self.exc_msg = None
elif not start and pause:
self.no_echo_input(sys.ps3)
self.write(self.text_formatter.format(chunk))
start = False
complete_msg = ("\n{file} complete! Continuing to interactive "
"console...\n\n".format(file=self.name))
if self.use_ansi:
self.write(ansi.with_codes(complete_msg, 1))
else:
self.write(complete_msg)
self.filename = "<stdin>"
self.locals['con'] = self
more = False
while more is not None:
blank, more = self._interact_once(more)
finally:
if self._env_driver is not None:
self._env_driver.teardown()
|
the-stack_0_279 | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
sys.path.append("..")
import unittest
import numpy as np
from op_test import OpTest
import paddle
import paddle.fluid as fluid
from paddle.fluid import compiler, Program, program_guard
#Situation 1: repeat_times is a list (without tensor)
class TestTileOpRank1(OpTest):
def setUp(self):
self.op_type = "tile"
self.place = paddle.device.MLUPlace(0)
self.__class__.use_mlu = True
self.init_data()
self.inputs = {'X': np.random.random(self.ori_shape).astype("float32")}
self.attrs = {'repeat_times': self.repeat_times}
output = np.tile(self.inputs['X'], self.repeat_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.repeat_times = [2]
def test_check_output(self):
self.check_output_with_place(self.place)
def test_check_grad(self):
self.check_grad(['X'], 'Out')
# with dimension expanding
class TestTileOpRank2Expanding(TestTileOpRank1):
def init_data(self):
self.ori_shape = [120]
self.repeat_times = [2, 2]
class TestTileOpRank2(TestTileOpRank1):
def init_data(self):
self.ori_shape = [12, 14]
self.repeat_times = [2, 3]
class TestTileOpRank3_Corner(TestTileOpRank1):
def init_data(self):
self.ori_shape = (2, 10, 5)
self.repeat_times = (1, 1, 1)
class TestTileOpRank3_Corner2(TestTileOpRank1):
def init_data(self):
self.ori_shape = (2, 10, 5)
self.repeat_times = (2, 2)
class TestTileOpRank3(TestTileOpRank1):
def init_data(self):
self.ori_shape = (2, 4, 15)
self.repeat_times = (2, 1, 4)
class TestTileOpRank4(TestTileOpRank1):
def init_data(self):
self.ori_shape = (2, 4, 5, 7)
self.repeat_times = (3, 2, 1, 2)
# Situation 2: repeat_times is a list (with tensor)
class TestTileOpRank1_tensor_attr(OpTest):
def setUp(self):
self.op_type = "tile"
self.place = paddle.device.MLUPlace(0)
self.__class__.use_mlu = True
self.init_data()
repeat_times_tensor = []
for index, ele in enumerate(self.repeat_times):
repeat_times_tensor.append(("x" + str(index), np.ones(
(1)).astype('int32') * ele))
self.inputs = {
'X': np.random.random(self.ori_shape).astype("float32"),
'repeat_times_tensor': repeat_times_tensor,
}
self.attrs = {"repeat_times": self.infer_repeat_times}
output = np.tile(self.inputs['X'], self.repeat_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.repeat_times = [2]
self.infer_repeat_times = [-1]
def test_check_output(self):
self.check_output_with_place(self.place)
def test_check_grad(self):
self.check_grad(['X'], 'Out')
class TestTileOpRank2_Corner_tensor_attr(TestTileOpRank1_tensor_attr):
def init_data(self):
self.ori_shape = [12, 14]
self.repeat_times = [1, 1]
self.infer_repeat_times = [1, -1]
class TestTileOpRank2_attr_tensor(TestTileOpRank1_tensor_attr):
def init_data(self):
self.ori_shape = [12, 14]
self.repeat_times = [2, 3]
self.infer_repeat_times = [-1, 3]
# Situation 3: repeat_times is a tensor
class TestTileOpRank1_tensor(OpTest):
def setUp(self):
self.op_type = "tile"
self.place = paddle.device.MLUPlace(0)
self.__class__.use_mlu = True
self.init_data()
self.inputs = {
'X': np.random.random(self.ori_shape).astype("float32"),
'RepeatTimes': np.array(self.repeat_times).astype("int32"),
}
self.attrs = {}
output = np.tile(self.inputs['X'], self.repeat_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.repeat_times = [2]
def test_check_output(self):
self.check_output_with_place(self.place)
def test_check_grad(self):
self.check_grad(['X'], 'Out')
class TestTileOpRank2_tensor(TestTileOpRank1_tensor):
def init_data(self):
self.ori_shape = [12, 14]
self.repeat_times = [2, 3]
# Situation 4: input x is Integer
class TestTileOpInteger(OpTest):
def setUp(self):
self.op_type = "tile"
self.place = paddle.device.MLUPlace(0)
self.__class__.use_mlu = True
self.inputs = {
'X': np.random.randint(10, size=(4, 4, 5)).astype("int32")
}
self.attrs = {'repeat_times': [2, 1, 4]}
output = np.tile(self.inputs['X'], (2, 1, 4))
self.outputs = {'Out': output}
def test_check_output(self):
self.check_output_with_place(self.place)
# Situation 5: input x is Bool
class TestTileOpBoolean(OpTest):
def setUp(self):
self.op_type = "tile"
self.place = paddle.device.MLUPlace(0)
self.__class__.use_mlu = True
self.inputs = {'X': np.random.randint(2, size=(2, 4, 5)).astype("bool")}
self.attrs = {'repeat_times': [2, 1, 4]}
output = np.tile(self.inputs['X'], (2, 1, 4))
self.outputs = {'Out': output}
def test_check_output(self):
self.check_output_with_place(self.place)
# Situation 56: input x is Integer
class TestTileOpInt64_t(OpTest):
def setUp(self):
self.op_type = "tile"
self.place = paddle.device.MLUPlace(0)
self.__class__.use_mlu = True
self.inputs = {
'X': np.random.randint(10, size=(2, 4, 5)).astype("int64")
}
self.attrs = {'repeat_times': [2, 1, 4]}
output = np.tile(self.inputs['X'], (2, 1, 4))
self.outputs = {'Out': output}
def test_check_output(self):
self.check_output_with_place(self.place)
class TestTileError(unittest.TestCase):
def test_errors(self):
with program_guard(Program(), Program()):
x1 = fluid.create_lod_tensor(np.array([[-1]]), [[1]],
fluid.CPUPlace())
repeat_times = [2, 2]
self.assertRaises(TypeError, paddle.tile, x1, repeat_times)
x2 = fluid.layers.data(name='x2', shape=[4], dtype="uint8")
self.assertRaises(TypeError, paddle.tile, x2, repeat_times)
x3 = fluid.layers.data(name='x3', shape=[4], dtype="bool")
x3.stop_gradient = False
self.assertRaises(ValueError, paddle.tile, x3, repeat_times)
class TestTileAPIStatic(unittest.TestCase):
def test_api(self):
with program_guard(Program(), Program()):
repeat_times = [2, 2]
x1 = fluid.layers.data(name='x1', shape=[4], dtype="int32")
out = paddle.tile(x1, repeat_times)
positive_2 = fluid.layers.fill_constant([1], dtype="int32", value=2)
out2 = paddle.tile(x1, repeat_times=[positive_2, 2])
# Test python API
class TestTileAPI(unittest.TestCase):
def test_api(self):
with fluid.dygraph.guard():
np_x = np.random.random([12, 14]).astype("float32")
x = paddle.to_tensor(np_x)
positive_2 = np.array([2]).astype("int32")
positive_2 = paddle.to_tensor(positive_2)
repeat_times = np.array([2, 3]).astype("int32")
repeat_times = paddle.to_tensor(repeat_times)
out_1 = paddle.tile(x, repeat_times=[2, 3])
out_2 = paddle.tile(x, repeat_times=[positive_2, 3])
out_3 = paddle.tile(x, repeat_times=repeat_times)
assert np.array_equal(out_1.numpy(), np.tile(np_x, (2, 3)))
assert np.array_equal(out_2.numpy(), np.tile(np_x, (2, 3)))
assert np.array_equal(out_3.numpy(), np.tile(np_x, (2, 3)))
if __name__ == "__main__":
paddle.enable_static()
unittest.main()
|
the-stack_0_280 | # Copyright 2016-2019 Dirk Thomas
# Licensed under the Apache License, Version 2.0
from pathlib import Path
import pytest
from scspell import Report
from scspell import SCSPELL_BUILTIN_DICT
from scspell import spell_check
spell_check_words_path = Path(__file__).parent / 'spell_check.words'
@pytest.fixture(scope='module')
def known_words():
global spell_check_words_path
return spell_check_words_path.read_text().splitlines()
def test_spell_check(known_words):
source_filenames = [Path(__file__).parents[1] / 'setup.py'] + \
list(
(Path(__file__).parents[1] / 'colcon_metadata')
.glob('**/*.py')) + \
list((Path(__file__).parents[1] / 'test').glob('**/*.py'))
for source_filename in sorted(source_filenames):
print('Spell checking:', source_filename)
# check all files
report = Report(known_words)
spell_check(
[str(p) for p in source_filenames], base_dicts=[SCSPELL_BUILTIN_DICT],
report_only=report, additional_extensions=[('', 'Python')])
unknown_word_count = len(report.unknown_words)
assert unknown_word_count == 0, \
'Found {unknown_word_count} unknown words: '.format_map(locals()) + \
', '.join(sorted(report.unknown_words))
unused_known_words = set(known_words) - report.found_known_words
unused_known_word_count = len(unused_known_words)
assert unused_known_word_count == 0, \
'{unused_known_word_count} words in the word list are not used: ' \
.format_map(locals()) + ', '.join(sorted(unused_known_words))
def test_spell_check_word_list_order(known_words):
assert known_words == sorted(known_words), \
'The word list should be ordered alphabetically'
def test_spell_check_word_list_duplicates(known_words):
assert len(known_words) == len(set(known_words)), \
'The word list should not contain duplicates'
|
the-stack_0_282 | # -*- coding: utf-8 -*-
"""
sphinx.builders.gettext
~~~~~~~~~~~~~~~~~~~~~~~
The MessageCatalogBuilder class.
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import unicode_literals
from os import path, walk
from codecs import open
from time import time
from datetime import datetime, tzinfo, timedelta
from collections import defaultdict
from uuid import uuid4
from six import iteritems
from sphinx.builders import Builder
from sphinx.util import split_index_msg
from sphinx.util.nodes import extract_messages, traverse_translatable_index
from sphinx.util.osutil import safe_relpath, ensuredir, find_catalog, SEP
from sphinx.util.console import darkgreen, purple, bold
from sphinx.locale import pairindextypes
POHEADER = r"""
# SOME DESCRIPTIVE TITLE.
# Copyright (C) %(copyright)s
# This file is distributed under the same license as the %(project)s package.
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: %(project)s %(version)s\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: %(ctime)s\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <[email protected]>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"""[1:]
class Catalog(object):
"""Catalog of translatable messages."""
def __init__(self):
self.messages = [] # retain insertion order, a la OrderedDict
self.metadata = {} # msgid -> file, line, uid
def add(self, msg, origin):
if not hasattr(origin, 'uid'):
# Nodes that are replicated like todo don't have a uid,
# however i18n is also unnecessary.
return
if msg not in self.metadata: # faster lookup in hash
self.messages.append(msg)
self.metadata[msg] = []
self.metadata[msg].append((origin.source, origin.line, origin.uid))
class MsgOrigin(object):
"""
Origin holder for Catalog message origin.
"""
def __init__(self, source, line):
self.source = source
self.line = line
self.uid = uuid4().hex
class I18nBuilder(Builder):
"""
General i18n builder.
"""
name = 'i18n'
versioning_method = 'text'
versioning_compare = None # be set by `gettext_uuid`
def __init__(self, app):
self.versioning_compare = app.env.config.gettext_uuid
super(I18nBuilder, self).__init__(app)
def init(self):
Builder.init(self)
self.catalogs = defaultdict(Catalog)
def get_target_uri(self, docname, typ=None):
return ''
def get_outdated_docs(self):
return self.env.found_docs
def prepare_writing(self, docnames):
return
def compile_catalogs(self, catalogs, message):
return
def write_doc(self, docname, doctree):
catalog = self.catalogs[find_catalog(docname,
self.config.gettext_compact)]
for node, msg in extract_messages(doctree):
catalog.add(msg, node)
if 'index' in self.env.config.gettext_enables:
# Extract translatable messages from index entries.
for node, entries in traverse_translatable_index(doctree):
for typ, msg, tid, main in entries:
for m in split_index_msg(typ, msg):
if typ == 'pair' and m in pairindextypes.values():
# avoid built-in translated message was incorporated
# in 'sphinx.util.nodes.process_index_entry'
continue
catalog.add(m, node)
# determine tzoffset once to remain unaffected by DST change during build
timestamp = time()
tzdelta = datetime.fromtimestamp(timestamp) - \
datetime.utcfromtimestamp(timestamp)
class LocalTimeZone(tzinfo):
def __init__(self, *args, **kw):
super(LocalTimeZone, self).__init__(*args, **kw)
self.tzdelta = tzdelta
def utcoffset(self, dt):
return self.tzdelta
def dst(self, dt):
return timedelta(0)
ltz = LocalTimeZone()
class MessageCatalogBuilder(I18nBuilder):
"""
Builds gettext-style message catalogs (.pot files).
"""
name = 'gettext'
def init(self):
I18nBuilder.init(self)
self.create_template_bridge()
self.templates.init(self)
def _collect_templates(self):
template_files = set()
for template_path in self.config.templates_path:
tmpl_abs_path = path.join(self.app.srcdir, template_path)
for dirpath, dirs, files in walk(tmpl_abs_path):
for fn in files:
if fn.endswith('.html'):
filename = path.join(dirpath, fn)
filename = filename.replace(path.sep, SEP)
template_files.add(filename)
return template_files
def _extract_from_template(self):
files = self._collect_templates()
self.info(bold('building [%s]: ' % self.name), nonl=1)
self.info('targets for %d template files' % len(files))
extract_translations = self.templates.environment.extract_translations
for template in self.app.status_iterator(
files, 'reading templates... ', purple, len(files)):
with open(template, 'r', encoding='utf-8') as f:
context = f.read()
for line, meth, msg in extract_translations(context):
origin = MsgOrigin(template, line)
self.catalogs['sphinx'].add(msg, origin)
def build(self, docnames, summary=None, method='update'):
self._extract_from_template()
I18nBuilder.build(self, docnames, summary, method)
def finish(self):
I18nBuilder.finish(self)
data = dict(
version = self.config.version,
copyright = self.config.copyright,
project = self.config.project,
ctime = datetime.fromtimestamp(
timestamp, ltz).strftime('%Y-%m-%d %H:%M%z'),
)
for textdomain, catalog in self.app.status_iterator(
iteritems(self.catalogs), "writing message catalogs... ",
darkgreen, len(self.catalogs),
lambda textdomain__: textdomain__[0]):
# noop if config.gettext_compact is set
ensuredir(path.join(self.outdir, path.dirname(textdomain)))
pofn = path.join(self.outdir, textdomain + '.pot')
pofile = open(pofn, 'w', encoding='utf-8')
try:
pofile.write(POHEADER % data)
for message in catalog.messages:
positions = catalog.metadata[message]
if self.config.gettext_location:
# generate "#: file1:line1\n#: file2:line2 ..."
pofile.write("#: %s\n" % "\n#: ".join("%s:%s" %
(safe_relpath(source, self.outdir), line)
for source, line, _ in positions))
if self.config.gettext_uuid:
# generate "# uuid1\n# uuid2\n ..."
pofile.write("# %s\n" % "\n# ".join(
uid for _, _, uid in positions))
# message contains *one* line of text ready for translation
message = message.replace('\\', r'\\'). \
replace('"', r'\"'). \
replace('\n', '\\n"\n"')
pofile.write('msgid "%s"\nmsgstr ""\n\n' % message)
finally:
pofile.close()
|
the-stack_0_283 | import json
from pathlib import Path
from typing import List, Optional
class ConfJSON:
def __init__(self, dir_path: Path, name: str, prefix: str):
self._dir_path = dir_path
self.name = f'{prefix.rstrip("-")}-{name}'
self._file_path = dir_path / name
self.path = str(self._file_path)
self.write_json()
def write_json(
self,
tests: Optional[List[str]] = None,
skipped_tests: Optional[List[str]] = None,
skipped_integrations: Optional[List[str]] = None,
nightly_integrations: Optional[List[str]] = None,
unmockable_integrations: Optional[List[str]] = None,
docker_thresholds: Optional[dict] = None
):
if tests is None:
tests = []
if skipped_tests is None:
skipped_tests = None
if skipped_integrations is None:
skipped_integrations = []
if nightly_integrations is None:
nightly_integrations = []
if unmockable_integrations is None:
unmockable_integrations = []
if docker_thresholds is None:
docker_thresholds = {}
self._file_path.write_text(json.dumps({
'tests': tests,
'skipped_tests': skipped_tests,
'skipped_integrations': skipped_integrations,
'nightly_integrations': nightly_integrations,
'unmockable_integrations': unmockable_integrations,
'docker_thresholds': docker_thresholds
}))
|
the-stack_0_284 | #!/usr/bin/env python3
# Copyright (c) 2018-2020 The Wflscoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Verify that starting wflscoin with -h works as expected."""
from test_framework.test_framework import WflscoinTestFramework
from test_framework.util import assert_equal
class HelpTest(WflscoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self):
self.add_nodes(self.num_nodes)
# Don't start the node
def get_node_output(self, *, ret_code_expected):
ret_code = self.nodes[0].process.wait(timeout=60)
assert_equal(ret_code, ret_code_expected)
self.nodes[0].stdout.seek(0)
self.nodes[0].stderr.seek(0)
out = self.nodes[0].stdout.read()
err = self.nodes[0].stderr.read()
self.nodes[0].stdout.close()
self.nodes[0].stderr.close()
# Clean up TestNode state
self.nodes[0].running = False
self.nodes[0].process = None
self.nodes[0].rpc_connected = False
self.nodes[0].rpc = None
return out, err
def run_test(self):
self.log.info("Start wflscoin with -h for help text")
self.nodes[0].start(extra_args=['-h'])
# Node should exit immediately and output help to stdout.
output, _ = self.get_node_output(ret_code_expected=0)
assert b'Options' in output
self.log.info("Help text received: {} (...)".format(output[0:60]))
self.log.info("Start wflscoin with -version for version information")
self.nodes[0].start(extra_args=['-version'])
# Node should exit immediately and output version to stdout.
output, _ = self.get_node_output(ret_code_expected=0)
assert b'version' in output
self.log.info("Version text received: {} (...)".format(output[0:60]))
# Test that arguments not in the help results in an error
self.log.info("Start wflscoind with -fakearg to make sure it does not start")
self.nodes[0].start(extra_args=['-fakearg'])
# Node should exit immediately and output an error to stderr
_, output = self.get_node_output(ret_code_expected=1)
assert b'Error parsing command line arguments' in output
self.log.info("Error message received: {} (...)".format(output[0:60]))
if __name__ == '__main__':
HelpTest().main()
|
the-stack_0_285 | import sys
import click
import json
import random
from .bakefile import Bakefile, TaskFilter
from .exceptions import NoBakefileFound
from .clint import eng_join
import pygments
import pygments.lexers
import pygments.formatters
from .constants import SKIP_NEXT, SAFE_ENVIRONS
def indent(line):
return f'{" " * 4}{line}'
def do_help(exit=0):
with click.Context(entrypoint) as ctx:
help = entrypoint.get_help(ctx)
help = help.replace(
" bake",
str(click.style(" $ ", fg="green", bold=True))
+ str(click.style("bake", fg="yellow", bold=True)),
)
help = help.replace(
"the strangely familiar task–runner",
str(
click.style("the strangely familiar task–runner", fg="white", bold=True)
),
)
help = help.replace(
"Options", str(click.style("Options", fg="white", bold=True))
)
help = help.replace(
"--insecure", str(click.style("--insecure", fg="red", bold=True))
)
help = help.replace("--yes", str(click.style("--yes", fg="red", bold=True)))
help = help.replace(
"--allow", str(click.style("--allow", fg="green", bold=True))
)
help = help.replace(
"--no-deps", str(click.style("--no-deps", fg="yellow", bold=True))
)
help = help.replace(
"--continue", str(click.style("--continue", fg="red", bold=True))
)
help = help.replace(
"--environ-json", str(click.style("--environ-json", fg="green", bold=True))
)
help = help.replace("-e,", str(click.style("-e", fg="green", bold=True) + ","))
click.echo(help, err=True)
sys.exit(exit)
def echo_json(obj):
_json = json.dumps(obj, indent=2)
if sys.stdin.isatty():
_json = pygments.highlight(
_json, pygments.lexers.JsonLexer(), pygments.formatters.TerminalFormatter()
)
click.echo(_json, err=False)
@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
@click.argument(
"task",
type=click.STRING,
default="__LIST_ALL__",
envvar="BAKE_TASK",
# required=False,
)
@click.option(
"--bakefile",
"-b",
default="__BAKEFILE__",
envvar="BAKEFILE_PATH",
nargs=1,
type=click.Path(),
help="The Bakefile to use.",
)
@click.option(
"--list",
"_list",
default=False,
is_flag=True,
help="Lists available tasks (and their dependencies).",
)
@click.option(
"--levels",
"-l",
default=None,
nargs=1,
type=click.INT,
help="List only a given number of '/' levels of tasks.",
)
@click.option(
"--help", "-h", default=False, is_flag=True, help="Show this message and exit."
)
@click.option("--debug", default=False, is_flag=True, hidden=True)
@click.option("--source", default=False, nargs=1, hidden=True)
@click.option(
"--allow",
default=False,
nargs=1,
multiple=True,
hidden=False,
help="Whitelist an environment variable for use.",
)
@click.option(
"--no-deps",
default=False,
is_flag=True,
hidden=False,
help="Do not run dependent tasks.",
)
@click.option("--yes", is_flag=True, help="Set medium–security prompts to yes.")
@click.option(
"--continue",
"_continue",
is_flag=True,
# type=click.BOOL,
help="Continue, if a task fails.",
)
@click.option(
"--interactive",
"-i",
is_flag=True,
# type=click.BOOL,
help="Run in interactive mode.",
)
@click.option(
"--insecure",
is_flag=True,
# type=click.BOOL,
help="Inherit parent shell's environment variables.",
)
@click.argument("arguments", nargs=-1, type=click.STRING)
@click.option(
"--silent",
"-s",
is_flag=True,
# type=click.BOOL,
help="Reduce output.",
envvar="BAKE_SILENT",
)
@click.option(
"--sort", is_flag=True, type=click.BOOL, help="Sort tasks, alphabetially."
)
@click.option(
"--environ-json",
"-e",
nargs=1,
type=click.STRING,
help="Provide environment variables via JSON.",
)
@click.option(
"--json",
"-j",
"_json",
is_flag=True,
# type=click.BOOL,
help="Output in JSON format (stdout).",
)
def entrypoint(
*,
task,
bakefile,
arguments,
_list,
levels,
_continue,
environ_json,
debug,
silent,
sort,
insecure,
allow,
_json,
no_deps,
interactive,
yes,
help,
source,
):
"""bake — the strangely familiar task–runner."""
if help:
do_help(0)
# Default to list behavior, when no task is provided.
if _json or source:
silent = True
# Allow explicitly–passed environment variables.
SAFE_ENVIRONS.extend(allow)
# Enable list functionality, by default.
if task == "__LIST_ALL__":
_list = True
task = None
# Establish the Bakefile.
try:
if bakefile == "__BAKEFILE__":
bakefile = Bakefile.find(root=".", filename="Bakefile")
else:
bakefile = Bakefile(path=bakefile)
except NoBakefileFound:
click.echo(click.style("No Bakefile found!", fg="red"), err=True)
do_help(1)
sys.exit(0)
if debug:
click.echo(f" + Bakefile: {bakefile.path}", err=True)
# --source (internal API)
if source:
def echo_generator(g):
for g in g:
click.echo(g)
if source == "__init__":
source = random.choice(list(bakefile.tasks.keys()))
task = bakefile.tasks[source]
source = task.gen_source(
sources=[task.bashfile.funcs_source, task.bashfile.root_source]
)
else:
task = bakefile.tasks[source]
source = task.gen_source(
sources=[
task.bashfile.funcs_source,
task.bashfile.root_source,
task.source,
]
)
for source_line in source:
click.echo(source_line)
sys.exit(0)
if not insecure:
for key in bakefile.environ:
if key not in SAFE_ENVIRONS:
del bakefile.environ[key]
if environ_json:
bakefile.add_environ_json(environ_json)
argv = []
environ = []
for i, argument in enumerate(arguments[:]):
if "=" in argument:
key, value = argument.split("=", 1)
environ.append((key, value))
else:
argv.append(argument)
if debug:
click.echo(f" + argv: {argv!r}", err=True)
click.echo(f" + environ: {environ!r}", err=True)
click.echo(err=True)
for env in environ:
key, value = env
if debug:
click.echo(
f" + Setting environ: {click.style(key, fg='red')} {click.style('=', fg='white')} {value}.",
err=True,
)
bakefile.add_environ(key, value)
bakefile.add_args(*argv)
if _list:
__list_json = {"tasks": {}}
# Enable level filtering.
if levels is not None:
task_list = []
for _task in bakefile.tasks:
if len(_task.split("/")) <= levels:
task_list.append(_task)
else:
task_list = bakefile.tasks
if sort:
task_list = sorted(task_list)
for _task in task_list:
depends_on = bakefile[_task].depends_on(
include_filters=False, recursive=True
)
if no_deps:
depends_on = ()
if depends_on:
deps = [str(a) for a in depends_on]
deps = f"\n {click.style('+', fg='yellow', bold=True)} {eng_join(deps, conj='&')}."
else:
deps = ""
colon = "" if not deps else "…"
__list_json["tasks"].update(
{_task: {"depends_on": [str(d) for d in depends_on]}}
)
if not silent:
click.echo(
f" {click.style('-', fg='green', bold=True)} {click.style(_task, bold=True)}{colon}{deps}",
err=False,
)
if not silent:
tasks_unechoed = len(bakefile.tasks) - len(task_list)
if tasks_unechoed:
bake_command = str(click.style(f"bake --levels {levels + 1}", fg="red"))
click.echo(
f"Note: {tasks_unechoed} more tasks are available. "
f"Please use $ {bake_command} to see more.",
err=True,
)
if _json:
echo_json(__list_json)
sys.exit(0)
if task:
try:
task = bakefile[task]
except KeyError:
click.echo(click.style(f"Task {task} does not exist!", fg="red"))
sys.exit(1)
def execute_task(task, *, silent=False):
try:
edges = list(bakefile.graph.out_edges(task))[0]
except IndexError:
edges = list()
skips = []
for edge in edges:
if edge.do_skip is not None:
skips.append(edge.do_skip)
if not all(skips or [False]):
# TODO: fully implement this?
if "@" in f"{task}":
silent = True
if not silent:
click.echo(
click.style(" + ", fg="white")
+ click.style(f"Executing {task}", fg="yellow")
+ click.style(":", fg="white"),
err=True,
)
usually_bash_task = task.execute(
yes=yes, debug=debug, silent=silent, interactive=interactive
)
if not _continue:
if hasattr(usually_bash_task, "ok"):
if usually_bash_task.return_code > 0:
if not silent:
click.echo(
click.style(f"Task {task} failed!", fg="red"),
err=True,
)
sys.exit(usually_bash_task.return_code)
# This happens when it's a task filter.
elif isinstance(usually_bash_task, tuple):
key, value = (
usually_bash_task
) # But, in this instance, clearly isn't.
else:
click.echo(
click.style(" + ", fg="green")
+ click.style(f"Skipping {task}", fg="white")
+ click.style(".", fg="white"),
err=True,
)
if not no_deps:
tasks = task.depends_on(recursive=True) + [task]
else:
tasks = [task]
for task in tasks:
execute_task(task, silent=silent)
if not silent:
click.echo(
click.style(" + ", fg="white")
+ click.style("Done", fg="green")
+ click.style(".", fg="white"),
err=True,
)
sys.exit(0)
if __name__ == "__main__":
entrypoint()
|
the-stack_0_286 | #!/usr/bin/python3
ARD_DEVICE_ID = "1"
ADC_READ_INTERVAL = 0.1
ADC_KEEP_VALS = 20
LIGHT_KEEP_VALS = 20
LOGFILE = "./logs/smarthome.log"
# --- Unicodes --- #
HOME = ' ⌂'
HOMEON = ' ☗'
KEY = ' ⚿'
CLOUD = ' ☁'
STAR = ' ★'
SUN = ' ☀'
REFRESH = ' 🗘'
CHECKED = ' ✔'
POINT = ' ☛'
MISSING = ' ✘'
MODESSYMBOL = ' ❖'
MENUSYMBOL = ' ☰'
TERMOMETER = ' 🌡'
MUSIC = ' ♫'
EMAIL = ' ✉'
NOTIFICATION = ' 🔔'
DEGREES = ' °C'
SMILE = ' ☺'
MOON = ' ☾'
QUATER_MOON = '☽'
PEACE = ' ☮'
# --- Modes --- #
LIGHT_TRESHOLD = 400
|
the-stack_0_288 | import pytest
import os
import utils
import io
import numpy
import json
import pickle
import gzip
from utils import kfp_client_utils
from utils import minio_utils
from utils import sagemaker_utils
def run_predict_mnist(boto3_session, endpoint_name, download_dir):
""" https://github.com/awslabs/amazon-sagemaker-examples/blob/a8c20eeb72dc7d3e94aaaf28be5bf7d7cd5695cb
/sagemaker-python-sdk/1P_kmeans_lowlevel/kmeans_mnist_lowlevel.ipynb """
# Download and load dataset
region = boto3_session.region_name
download_path = os.path.join(download_dir, "mnist.pkl.gz")
boto3_session.resource("s3", region_name=region).Bucket(
"sagemaker-sample-data-{}".format(region)
).download_file("algorithms/kmeans/mnist/mnist.pkl.gz", download_path)
with gzip.open(download_path, "rb") as f:
train_set, valid_set, test_set = pickle.load(f, encoding="latin1")
# Function to create a csv from numpy array
def np2csv(arr):
csv = io.BytesIO()
numpy.savetxt(csv, arr, delimiter=",", fmt="%g")
return csv.getvalue().decode().rstrip()
# Run prediction on an image
runtime = boto3_session.client("sagemaker-runtime")
payload = np2csv(train_set[0][30:31])
response = runtime.invoke_endpoint(
EndpointName=endpoint_name, ContentType="text/csv", Body=payload,
)
return json.loads(response["Body"].read().decode())
@pytest.mark.parametrize(
"test_file_dir",
[
pytest.param(
"resources/config/kmeans-mnist-endpoint", marks=pytest.mark.canary_test
)
],
)
def test_create_endpoint(
kfp_client, experiment_id, boto3_session, sagemaker_client, test_file_dir
):
download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated"))
test_params = utils.load_params(
utils.replace_placeholders(
os.path.join(test_file_dir, "config.yaml"),
os.path.join(download_dir, "config.yaml"),
)
)
# Generate random prefix for model, endpoint config and endpoint name
# to avoid errors if resources with same name exists
test_params["Arguments"]["model_name"] = test_params["Arguments"][
"endpoint_config_name"
] = test_params["Arguments"]["endpoint_name"] = input_endpoint_name = (
utils.generate_random_string(5) + "-" + test_params["Arguments"]["model_name"]
)
print(f"running test with model/endpoint name: {input_endpoint_name}")
_, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline(
kfp_client,
experiment_id,
test_params["PipelineDefinition"],
test_params["Arguments"],
download_dir,
test_params["TestName"],
test_params["Timeout"],
)
try:
outputs = {"sagemaker-deploy-model": ["endpoint_name"]}
output_files = minio_utils.artifact_download_iterator(
workflow_json, outputs, download_dir
)
output_endpoint_name = utils.read_from_file_in_tar(
output_files["sagemaker-deploy-model"]["endpoint_name"]
)
print(f"endpoint name: {output_endpoint_name}")
# Verify output from pipeline is endpoint name
assert output_endpoint_name == input_endpoint_name
# Verify endpoint is running
assert (
sagemaker_utils.describe_endpoint(sagemaker_client, input_endpoint_name)[
"EndpointStatus"
]
== "InService"
)
# Validate the model for use by running a prediction
result = run_predict_mnist(boto3_session, input_endpoint_name, download_dir)
print(f"prediction result: {result}")
assert json.dumps(result, sort_keys=True) == json.dumps(
test_params["ExpectedPrediction"], sort_keys=True
)
utils.remove_dir(download_dir)
finally:
# delete endpoint
sagemaker_utils.delete_endpoint(sagemaker_client, input_endpoint_name)
|
the-stack_0_291 | import random
class PriQ(object):
'''Binary-Heap based Priority Queue with uniquely named elements, name may \
be any hashable type. Defaults to min-heap, set maxpq=True for max-heap. \
Public methods: put, get, remove, update, contains, front, get_priority.'''
def __init__(self, maxpq = False):
self.q =[] # The priority queue, contains (priority, name) tuples
self.elements = {} # Dict of all elements currently in queue, with current index
self.maxmod = -1 if maxpq else 1 # modify pq to max instead of min
def __len__(self):
return len(self.q)
def __str__(self):
return str(self.q)
def _propUp(self, index):
'''Propagate up element to proper place in binary heap'''
current = index
while current > 0:
parent = (current - 1) // 2 # parent node
# swap with parent until parent <= child (>= for maxPQ)
if self.q[parent][0] * self.maxmod <= self.q[current][0] * self.maxmod:
break
self.elements[self.q[current][1]], self.elements[self.q[parent][1]] = parent, current
self.q[parent], self.q[current] = self.q[current], self.q[parent]
current = parent
def _propDown(self, index):
'''Propagate down element to proper place in binary heap'''
if len(self.q) == 1:
self.elements[self.q[0][1]] = 0 # update index of last element
current = index
while current * 2 + 1 < len(self.q): # node has a child
left, right = current * 2 + 1, current * 2 + 2
if right == len(self.q): # left child only
if self.q[current][0] * self.maxmod >= self.q[left][0] * self.maxmod:
# swap with left child and update elements dict
self.elements[self.q[current][1]], self.elements[self.q[left][1]] = left, current
self.q[current], self.q[left] = self.q[left], self.q[current]
break
if self.maxmod == 1: # min PQ
minChild = left if self.q[left] <= self.q[right] else right
if self.q[current] <= self.q[minChild]: # swap with lowest priority child as needed
break
self.elements[self.q[current][1]], self.elements[self.q[minChild][1]] = minChild, current
self.q[current], self.q[minChild] = self.q[minChild], self.q[current]
current = minChild
else: # max PQ
maxChild = left if self.q[left] >= self.q[right] else right
if self.q[current] >= self.q[maxChild]: # swap with highest priority child as needed
break
self.elements[self.q[current][1]], self.elements[self.q[maxChild][1]] = maxChild, current
self.q[current], self.q[maxChild] = self.q[maxChild], self.q[current]
current = maxChild
def put(self, name, priority):
'''Add named element to priorty queue and place in binary heap'''
if self.contains(name): return ValueError(name)
self.q.append((priority, name))
self.elements[name] = len(self.q) - 1
self._propUp(self.elements[name])
def front(self):
'''Element at front of queue'''
return self.q[0]
def get_priority(self, name):
'''Current priority of named element'''
return self.q[self.elements[name]][0]
def get(self):
'''Return element at front of queue and re-heapify queue'''
if not self.q:
return False # empty queue
result = self.q[0]
del(self.elements[result[1]])
if len(self.q) > 1:
self.q[0] = self.q.pop()
self._propDown(0)
else:
self.q = []
self.elements = {}
return result
def update(self, name, priority):
'''Change priority of named element and re-heapify'''
if not self.contains(name): return ValueError(name)
index = self.elements[name]
old_priority = self.q[index][0]
self.q[index] = (priority, name)
if priority * self.maxmod < old_priority * self.maxmod:
self._propUp(index)
if priority * self.maxmod > old_priority * self.maxmod:
self._propDown(index)
def contains(self, name):
'''True if name currently exists in the queue'''
return (name in self.elements)
def remove(self, name):
'''Remove named element and re-heapify'''
if not self.contains(name): return ValueError(name)
index = self.elements[name]
old_priority = self.q[index][0]
del(self.elements[name])
if len(self.q) > 1:
self.q[index] = self.q.pop() # replace with last item in queue
self.elements[self.q[index][1]] = index
# re-heapify
if self.q[index][0] * self.maxmod < old_priority * self.maxmod:
self._propUp(index)
elif self.q[index][0] * self.maxmod > old_priority * self.maxmod:
self._propDown(index)
else:
self.q = []
###############################################################################
# Following are examples, including Dijkstra's shortest path
###############################################################################
if __name__ == "__main__":
# Sequential letter names with random integer weights (demonstration)
pq = PriQ()
name = 'a'
for _ in range(26):
pq.put(name, random.randint(1,99))
name = chr(ord(name) + 1)
print("Initial: ['a'-'z' = randint(1,99)]\n", pq, "\nLength: ", len(pq))
print("Priority of 'a':", pq.get_priority('a'))
print("Front:", pq.front())
pq.put('a', 17) # testing for duplicate put
pq.update('a', 1)
pq.update('b', 99)
print("After updating 'a' to 1 and 'b' to 99:\n", pq)
pq.remove('c')
print("After removing 'c':\n", pq)
print("Contains 'c'?", pq.contains('c'))
print("Get elements until empty:")
while pq:
print(pq.get(), "contains 'd'?", pq.contains('d'))
'''
# Max-heap priority queue with random float weights and integer names
pq = PriQ(True)
for i in range(20):
pq.put(i, random.random())
print("['1'-'20' = random()]:\n", pq)
while pq: print(pq.get())
'''
# Dijkstra's shortest path algorithm example
def dsp(G, s):
'''Dijkstra's shortest path algorithm. Input: weighted graph G, an\
adjacency list mapping each named node to it's neighbors with a {name: weight}\
dict, and s, the name of the starting node. Outputs a mapping for each node\
to a tuple of (weight of shortest path, name of predecessor node).
'''
pq = PriQ()
result = {}
predecessor = {s: None}
for key in G.keys():
pq.put(key, 0 if key == s else float('inf'))
while pq:
cur_weight, cur_node = pq.get()
result[cur_node] = (cur_weight, predecessor[cur_node])
for adj in G[cur_node]: # for neighboring nodes
# update weight from starting node if less than previous paths
if adj not in result:
if pq.get_priority(adj) > cur_weight + G[cur_node][adj]:
pq.update(adj, cur_weight + G[cur_node][adj])
predecessor[adj] = cur_node
return result
G = {
'a': {'b': 2, 'f': 6, 'c': 1},
'b': {'a': 2, 'd': 6, 'f': 3, 'g': 4},
'c': {'a': 1, 'f': 2, 'g': 3, 'e': 5},
'd': {'b': 6, 'f': 2, 'g': 1, 'h': 2},
'e': {'c': 5, 'f': 4, 'g': 2, 'h': 3},
'f': {'a': 6, 'b': 3, 'd': 2, 'g': 7, 'e': 4, 'c': 2},
'g': {'f': 7, 'b': 4, 'd': 1, 'h': 5, 'e': 2, 'c': 3},
'h': {'d': 2, 'g': 5, 'e': 3}
}
print(
'''
B 6 D
2 3 2 4 1 2
A 6 F 7 G 5 H
1 2 3 4 2 3
C 5 E
''')
print("\nCalculating shortest paths with Dijkstra's algorithm...")
print("Shortest paths from 'a':", dsp(G, 'a'))
print("Shortest paths from 'd':", dsp(G, 'd')) |
the-stack_0_292 | # Lint as: python3
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An abstract layer for processing sequences step-by-step.
E.g.::
def ProcessSeq(step, external_inputs, input_batch):
prepared_inputs = step.PrepareExternalInputs(
step.theta, external_inputs)
batch_size, T = tf.shape(input_batch.paddings)[:2]
state = step.ZeroState(
step.theta, prepared_inputs, batch_size)
for t in range(T):
step_inputs = input_batch.Transform(lambda x: x[:, i, ...])
step_outputs, state = step.FProp(
step.theta, prepared_inputs, step_inputs, state)
(processing step_outputs...)
"""
import collections
from lingvo import compat as tf
from lingvo.core import base_layer
from lingvo.core import builder_layers
from lingvo.core import py_utils
from lingvo.core import recurrent
class Step(base_layer.BaseLayer):
"""A layer that processes input sequences step-by-step.
This can be seen as an RNNCell extended with optional external inputs.
"""
def PrepareExternalInputs(self, theta, external_inputs):
"""Returns the prepared external inputs, e.g., packed_src for attention."""
if not external_inputs:
external_inputs = py_utils.NestedMap()
packed = external_inputs.DeepCopy()
for name, child in self.children.items():
child_external_inputs = external_inputs.get(name, py_utils.NestedMap())
if isinstance(child, (tuple, list)):
output = []
for i, sub in enumerate(child):
if isinstance(sub, Step):
output.append(
sub.PrepareExternalInputs(theta[name][i],
child_external_inputs))
if output:
if len(output) != len(child):
raise ValueError('Expecting child list to be instances of Step.')
packed[name] = type(child)(output)
elif isinstance(child, Step):
packed[name] = child.PrepareExternalInputs(theta[name],
child_external_inputs)
return packed
def ZeroState(self, theta, prepared_inputs, batch_size):
"""Returns the initial state given external inputs and batch size.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
prepared_inputs: External inputs returned by PrepareExternalInputs().
batch_size: An int scalar representing the batch size of per-step inputs.
Returns:
A `.NestedMap` representing the initial state, which can be passed to
FProp() for processing the first time step.
"""
state0 = py_utils.NestedMap()
for name, child in self.children.items():
if isinstance(child, (tuple, list)):
output = []
for i, sub in enumerate(child):
if isinstance(sub, Step):
output.append(
sub.ZeroState(theta[name][i], prepared_inputs[name][i],
batch_size))
if output:
if len(output) != len(child):
raise ValueError('Expecting child list to be instances of Step.')
state0[name] = type(child)(output)
elif isinstance(child, Step):
state0[name] = child.ZeroState(theta[name], prepared_inputs[name],
batch_size)
return state0
def FProp(self, theta, prepared_inputs, step_inputs, padding, state0):
"""Forward function.
step_inputs, state0, step_outputs, and state1 should each be a `.NestedMap`
of tensor values. Each tensor must be of shape [batch_size ...]. The
structure of NestedMaps are determined by the implementation. state0 and
state1 must have exactly the same structure and tensor shapes.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
prepared_inputs: External inputs returned by PrepareExternalInputs().
step_inputs: The inputs for this time step.
padding: A 0/1 float tensor of shape [batch_size]; 1.0 means that this
batch element is empty in this step.
state0: The previous recurrent state.
Returns:
A tuple (step_outputs, state1).
- outputs: The outputs of this step.
- state1: The next recurrent state.
"""
raise NotImplementedError(type(self))
class StatelessLayerStep(Step):
"""Allows BaseLayer subclasses to be used as Steps.
Layers used with this class should be stateless: they should not return
anything that must be passed back in the next invocation.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('layer', None, 'Params for the layer that this step wraps.')
return p
def __init__(self, params):
super().__init__(params)
p = params
self.CreateChild('layer', p.layer)
def FProp(self, theta, prepared_inputs, step_inputs, padding, state0):
"""Perform inference on a stateless layer.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
prepared_inputs: unused.
step_inputs: A NestedMap containing 'inputs', which are passed directly to
the layer.
padding: A 0/1 float tensor of shape [batch_size]; 1.0 means that this
batch element is empty in this step.
state0: unused.
Returns:
(output, state1), where output is the output of the layer, and
state1 is an empty NestedMap.
"""
del state0
del prepared_inputs
args = {}
if padding is not None:
args['padding'] = padding
output = self.layer.FProp(theta.layer, step_inputs.inputs, **args)
return output, py_utils.NestedMap()
class StackStep(Step):
"""A stack of steps.
Each sub-step is assumed to accept step_inputs of type NestedMap(inputs=[])
and return a primary output of type NestedMap(output=tensor). The
output of layer n-1 is sent to input of layer n.
Per-step context vectors and per-sequence context vectors can also be
supplied; see FProp for more details.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'sub', [], 'A list of sub-stack params. Each layer is '
'expected to accept its input as NestedMap(inputs=[]), and '
'produce output as NestedMap(output=tensor). '
'The external_inputs parameter is passed directly to the '
'PrepareExternalInputs method of each sub-step. ')
p.Define(
'residual_start', -1, 'An index of the layer where residual '
'connections start. Setting this parameter to a negative value turns '
'off residual connections.'
'More precisely, when i >= residual_start, the output of each step '
'is defined as: '
'output[i] = output[i - residual_stride] + sub[i](output[i - 1]) '
'where output[-1] is the step input.')
p.Define(
'residual_stride', 1, 'If residual connections are active, this '
'is the number of layers that each connection skips. For '
'instance, setting residual_stride = 2 means the output of layer '
'n is added to layer n + 2')
return p
def __init__(self, params):
super().__init__(params)
p = params
self.sub_steps = []
self.CreateChildren('sub', p.sub)
def PrepareExternalInputs(self, theta, external_inputs):
"""Delegates external inputs preparation to sub-layers.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
external_inputs: A `.NestedMap` object. The structure of the internal
fields is defined by the sub-steps.
Returns:
A `.NestedMap` containing a pre-processed version of the external_inputs,
one per sub-step.
"""
packed = py_utils.NestedMap(sub=[])
for i in range(len(self.sub)):
packed.sub.append(self.sub[i].PrepareExternalInputs(
theta.sub[i], external_inputs))
return packed
def ZeroState(self, theta, prepared_inputs, batch_size):
"""Computes a zero state for each sub-step.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
prepared_inputs: An output from PrepareExternalInputs.
batch_size: The number of items in the batch that FProp will process.
Returns:
A `.NestedMap` containing a state0 object for each sub-step.
"""
state = py_utils.NestedMap(sub=[])
for i in range(len(self.sub)):
state.sub.append(self.sub[i].ZeroState(theta.sub[i], prepared_inputs,
batch_size))
return state
def FProp(self, theta, prepared_inputs, step_inputs, padding, state0):
"""Performs inference on the stack of sub-steps.
There are three possible ways to feed input to the stack:
* step_inputs.inputs: These tensors are fed only to the lowest layer.
* step_inputs.context: [Optional] This tensor is fed to every layer.
* prepared_inputs: [Optional] This tensor is fed to every layer and
is assumed to stay constant over all steps.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
prepared_inputs: An output from PrepareExternalInputs.
step_inputs: A `.NestedMap` containing a list called 'inputs', an
optionally a tensor called 'context'.
padding: A 0/1 float tensor of shape [batch_size]; 1.0 means that this
batch element is empty in this step.
state0: The previous recurrent state.
Returns:
A tuple (output, state1):
- output: A `.NestedMap` containing the output of the top-most step.
- state1: The recurrent state to feed to next invocation of this graph.
"""
state1 = py_utils.NestedMap(sub=[])
inputs = list(step_inputs.inputs)
# We pretend that the input is the output of layer -1 for the purposes
# of residual connections.
residual_inputs = [tf.concat(inputs, axis=1)]
additional = []
if 'context' in step_inputs:
additional.append(step_inputs.context)
for i in range(len(self.sub)):
sub_inputs = py_utils.NestedMap(inputs=inputs + additional)
sub_output, state1_i = self.sub[i].FProp(theta.sub[i],
prepared_inputs.sub[i],
sub_inputs, padding,
state0.sub[i])
state1.sub.append(state1_i)
output = sub_output.output
if i >= self.params.residual_start >= 0:
# residual_inputs contains the step input at residual_inputs[0].
assert i + 1 - self.params.residual_stride < len(residual_inputs)
output += residual_inputs[i + 1 - self.params.residual_stride]
residual_inputs.append(output)
inputs = [output]
return py_utils.NestedMap(output=output), state1
class ParallelStep(Step):
"""Runs many steps on the same input and concatenates their outputs."""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'sub', [], 'A list of step params. Each step is '
'expected to accept its input as NestedMap(inputs=[]), and '
'produce output as NestedMap(output=tensor). '
'The external_inputs parameter is passed directly to the '
'PrepareExternalInputs method of each sub-step. ')
return p
def __init__(self, params):
super().__init__(params)
p = params
self.CreateChildren('sub', p.sub)
def FProp(self, theta, prepared_inputs, step_inputs, padding, state0):
"""Performs inference on N steps at once and concatenates the result.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
prepared_inputs: An output from PrepareExternalInputs.
step_inputs: A `.NestedMap` containing a list called 'inputs'.
padding: A 0/1 float tensor of shape [batch_size]; 1.0 means that this
batch element is empty in this step.
state0: The previous recurrent state.
Returns:
A tuple (output, state1):
- output: A `.NestedMap` containing the output of the top-most step.
- state1: The recurrent state to feed to next invocation of this graph.
"""
state1 = py_utils.NestedMap(sub=[None] * len(self.sub))
outputs = [None] * len(self.sub)
for i in range(len(self.sub)):
outputs[i], state1.sub[i] = self.sub[i].FProp(theta.sub[i],
prepared_inputs.sub[i],
step_inputs, padding,
state0.sub[i])
output = py_utils.NestedMap(output=tf.concat(outputs, axis=1))
return output, state1
# signature: A GraphSignature string defining the input and output parameters
# of this step. For example, (inputs=[a,b])->c means that step_inputs
# should be NestedMap(inputs=[a,b]), and the output of FProp should be
# stored in c.
# external_signature: A GraphSignature string defining the input to
# PrepareExternalInputs. For example, 'external_inputs.foo' means that
# the tensor external_inputs.foo should be the 'external_inputs' parameter
# when calling PrepareExternalInputs on this sub-step.
# params: The parameters to use when constructing the sub-step.
SubStep = collections.namedtuple('SubStep',
['signature', 'external_signature', 'params'])
class GraphStep(Step):
r"""A step that connects sub-steps in a simple data flow graph.
This is an adaptation of builder_layers.GraphLayer to support steps.
Params.sub specifies a list of Specs that define each sub-step.
A spec contains:
* step_inputs: The signature describing how to assemble the input and output
for this step. The input part describes the 'step_inputs' parameter,
while the output part describes the name of the output. The state0
input and state1 output are handled automatically and should not be
specified.
* external_inputs: if this Step requires external_inputs, this
is the signature describing how to find those inputs.
This value can also be set to None.
* params: the params used to construct the sub-step.
The format of signature strings is defined in detail in the GraphSignature
class documentation.
All inputs to a layer must have been produced by some previous layer. No
cycles are allowed. All outputs must be uniquely named; no overwriting
of previous names is allowed.
Example
('(act=[layer_0.output,step_inputs.context])->layer_1',
'external_inputs.extra',
step_params)
This constructs the step defined by step_params. Its FProp method will be
called with {act=[layer_0.output,step_inputs.context]} as the step_inputs
parameter. Its PrepareExternalInputs method will be called with
'external_inputs.extra' as the external_inputs parameter. The output of that
method will be passed to ZeroState and FProp.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('output_signature', '', 'Signature of the step output.')
p.Define('sub', [], 'A list of SubSteps (defined above).')
p.Define('dict_type', py_utils.NestedMap, 'Type of nested dicts.')
return p
_seq = collections.namedtuple(
'_Seq', ['name', 'signature', 'external_signature', 'step'])
def __init__(self, params):
super().__init__(params)
p = self.params
assert p.name
self._seq = []
for i, (signature, external_signature, sub_params) in enumerate(p.sub):
assert signature
sig = builder_layers.GraphSignature(signature)
assert len(sig.inputs) == 1
assert sig.outputs
external_sig = None
if external_signature:
external_sig = builder_layers.GraphSignature(external_signature)
assert len(external_sig.inputs) == 1
assert not external_sig.outputs
name = sub_params.name
if not name:
name = '%s_%02d' % (sig.outputs[0], i)
sub_params.name = name
self.CreateChild(name, sub_params)
self._seq.append(
GraphStep._seq(name, sig, external_sig, self.children[name]))
self.output_signature = builder_layers.GraphSignature(p.output_signature)
def PrepareExternalInputs(self, theta, external_inputs):
"""Prepares external inputs for each sub-step.
The external_inputs parameter of this method is processed by the
external_inputs of each sub-step, then processed by the sub-step's
PrepareExternalInputs method.
Args:
theta: variables used by sub-steps.
external_inputs: A NestedMap of [n_batch, ...] tensors.
Returns:
A NestedMap of prepared inputs, where the keys are the names of
each sub-step.
"""
graph_tensors = builder_layers.GraphTensors()
graph_tensors.StoreTensor('external_inputs', external_inputs)
prepared_inputs = py_utils.NestedMap()
with tf.name_scope(self.params.name):
for seq in self._seq:
if seq.external_signature:
template = py_utils.NestedMap(inputs=seq.external_signature.inputs)
packed = template.Transform(graph_tensors.GetTensor)
seq_external_inputs = packed.inputs[0]
prepared_inputs[seq.name] = seq.step.PrepareExternalInputs(
theta[seq.name], seq_external_inputs)
else:
prepared_inputs[seq.name] = py_utils.NestedMap()
return prepared_inputs
def ZeroState(self, theta, prepared_inputs, batch_size):
"""Creates a zero state NestedMap for this step.
Args:
theta: variables used by sub-steps.
prepared_inputs: Output from a call to PrepareExternalInputs.
batch_size: The number of items in the batch that FProp will process.
Returns:
A NestedMap of ZeroState results for each sub-step.
"""
state0 = py_utils.NestedMap()
with tf.name_scope(self.params.name):
for seq in self._seq:
state0[seq.name] = seq.step.ZeroState(theta[seq.name],
prepared_inputs[seq.name],
batch_size)
return state0
def FProp(self, theta, prepared_inputs, step_inputs, padding, state0):
"""A single inference step for this step graph.
Args:
theta: variables used by sub-steps.
prepared_inputs: A NestedMap containing external_inputs that were
pre-processed by the PrepareExternalInputs method of each sub-step. The
keys are the names of the sub-steps.
step_inputs: A NestedMap of [batch, ...] tensors. The structure of this
depends on the graph implementation.
padding: A 0/1 float tensor of shape [batch_size]; 1.0 means that this
batch element is empty in this step.
state0: A NestedMap of state variables produced by either ZeroState or a
previous invocation of this FProp step. The keys are the names of the
sub-steps.
Returns:
(output, state1), both of which are NestedMaps.
output is implementation-dependent and is defined by the output_signature
parameter.
state1 is a NestedMap where the keys are names of sub-steps and the values
are state outputs from their FProp methods.
"""
p = self.params
graph_tensors = builder_layers.GraphTensors()
graph_tensors.StoreTensor('prepared_inputs', prepared_inputs)
graph_tensors.StoreTensor('step_inputs', step_inputs)
state1 = py_utils.NestedMap()
with tf.name_scope(p.name):
for seq in self._seq:
tf.logging.vlog(1, 'GraphStep: call %s', seq.name)
external = None
if seq.external_signature:
external = prepared_inputs[seq.name]
template = py_utils.NestedMap(inputs=seq.signature.inputs)
packed = template.Transform(graph_tensors.GetTensor)
input_args = packed.inputs[0]
out, seq_state1 = seq.step.FProp(theta[seq.name], external, input_args,
padding, state0[seq.name])
graph_tensors.StoreTensor(seq.signature.outputs[0], out)
state1[seq.name] = seq_state1
template = py_utils.NestedMap(inputs=self.output_signature.inputs)
output_tensors = template.Transform(graph_tensors.GetTensor).inputs[0]
return output_tensors, state1
class IteratorStep(Step):
"""An iterator over the time dimension of some tensors.
It's common to have a tensor of shape [batch, time, ...] or
[time, batch, ...]. This object will step through the time dimension,
producing tensors of shape [batch, ...] in succession.
The input tensors are passed to PrepareExternalInputs. The step_inputs
argument of FProp is unused.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('axis', 1, 'The time dimension of the tensors.')
return p
def PrepareExternalInputs(self, theta, external_inputs):
"""Prepares the input for iteration.
Args:
theta: unused.
external_inputs: A NestedMap containing tensors. The time axis of each
tensor should be params.axis.
Returns:
A prepared NestedMap (current the same as the input).
"""
return external_inputs
def ZeroState(self, theta, prepared_inputs, batch_size):
"""Returns the initial iterator state.
Args:
theta: unused.
prepared_inputs: Output from a call to PrepareExternalInputs.
batch_size: The number of items in the batch that FProp will process.
Returns:
An initial state NestedMap.
"""
return py_utils.NestedMap(t=tf.constant(0, dtype=tf.int32))
def FProp(self, theta, prepared_inputs, step_inputs, padding, state0):
"""Returns a A single inference step for this step graph.
Args:
theta: unused.
prepared_inputs: Output from a call to PrepareExternalInputs.
step_inputs: unused.
padding: unused.
state0: A NestedMap of state variables produced by either ZeroState or a
previous invocation of this FProp step.
Returns:
(output, state1), both of which are NestedMaps.
output is implementation-dependent and is defined by the output_signature
parameter.
state1 is a NestedMap where the keys are names of sub-steps and the values
are state outputs from their FProp methods.
"""
del theta
del step_inputs
del padding
def _Slice(tensor):
"""Return a slice of this tensor at time=state0.t."""
shape = py_utils.GetShape(tensor)
# All zeros except for t in the time dimension.
# e.g. if params.axis=1, begin is [0, t, 0, 0, 0, ...]
begin = tf.one_hot(self.params.axis, tf.rank(tensor), on_value=state0.t)
# Same as shape, but with a 1 in the time dimension.
# e.g. if params.axis=1, shape is [shape[0], 1, shape[2], shape[3], ...]
size = tf.concat([
shape[0:self.params.axis],
tf.constant([1], dtype=tf.int32), shape[self.params.axis + 1:]
],
axis=0)
# Make a slice where the time dimension is fixed at state0.t.
time_slice = tf.slice(tensor, begin, size)
# Remove the time dimension.
return tf.squeeze(time_slice, axis=self.params.axis)
output = prepared_inputs.Transform(_Slice)
state1 = py_utils.NestedMap(t=state0.t + 1)
return output, state1
class RecurrentStepWrapper(base_layer.BaseLayer):
"""A layer that wraps a step in a recurrent.Recurrent call."""
@classmethod
def Params(cls):
p = super().Params()
p.Define('step', None, 'The step params that this class wraps.')
return p
def __init__(self, params):
super().__init__(params)
self.CreateChild('step', self.params.step)
def _CreateChildrenVariables(self):
# Backwards compatibility: manually call child.InstantiateVariables()
# outside of tf.variable_scope(p.name).
self.step.InstantiateVariables()
super()._CreateChildrenVariables()
def PrepareExternalInputs(self, theta, external_inputs):
"""See Step.PrepareExternalInputs."""
return self.step.PrepareExternalInputs(theta.step, external_inputs)
def ZeroState(self, theta, prepared_inputs, batch_size):
"""See Step.ZeroState."""
return self.step.ZeroState(theta.step, prepared_inputs, batch_size)
def FProp(self, theta, prepared_inputs, inputs, padding, state0, **kwargs):
"""Runs a Step layer over multiple timesteps using Recurrent.
Args:
theta: A NestedMap containing weights' values of this layer and its
children layers.
prepared_inputs: External inputs returned by Step.PrepareExternalInputs().
inputs: A NestedMap of inputs of shape [time, batch_size, dim].
padding: A 0/1 float tensor of shape [time, batch_size]; 1.0 means that
this batch element is empty in this step.
state0: A NestedMap containing the initial recurrent state.
**kwargs: Additional kwargs to pass to Recurrent.
Returns:
A tuple (outputs, state1).
- outputs: A NestedMap containing the accumulated outputs of all steps,
containing Tensors shaped [time, batch_size, dim].
- state1: A NestedMap containing the accumulated recurrent states,
containing Tensors shaped [time, batch_size, dim].
"""
def RnnStep(recurrent_theta, recurrent_state0, recurrent_inputs):
"""Compute a single timestep."""
output, state1 = self.step.FProp(
theta=recurrent_theta.theta,
prepared_inputs=recurrent_theta.prepared_inputs,
step_inputs=recurrent_inputs.inputs,
padding=recurrent_inputs.padding,
state0=recurrent_state0.state)
recurrent_state1 = py_utils.NestedMap(output=output, state=state1)
return recurrent_state1, py_utils.NestedMap()
# In order to pass Step outputs through Recurrent, they need to be
# included as part of state.
output0, _ = self.step.FProp(theta.step, prepared_inputs,
inputs.Transform(lambda x: x[0]), padding[0],
state0)
accumulated_states, _ = recurrent.Recurrent(
theta=py_utils.NestedMap(
theta=theta.step, prepared_inputs=prepared_inputs),
state0=py_utils.NestedMap(output=output0, state=state0),
inputs=py_utils.NestedMap(inputs=inputs, padding=padding),
cell_fn=RnnStep,
**kwargs)
return accumulated_states.output, accumulated_states.state
|
the-stack_0_293 | # USAGE
# python facial_landmarks.py --shape-predictor shape_predictor_68_face_landmarks.dat --image images/example_01.jpg
# import the necessary packages
from imutils import face_utils
import numpy as np
import argparse
import imutils
import dlib
import cv2
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--shape-predictor", required=True,
help="path to facial landmark predictor")
ap.add_argument("-i", "--image", required=True,
help="path to input image")
args = vars(ap.parse_args())
# initialize dlib's face detector (HOG-based) and then create
# the facial landmark predictor
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor(args["shape_predictor"])
# load the input image, resize it, and convert it to grayscale
image = cv2.imread(args["image"])
image = imutils.resize(image, width=500)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# detect faces in the grayscale image
rects = detector(gray, 1)
# loop over the face detections
for (i, rect) in enumerate(rects):
# determine the facial landmarks for the face region, then
# convert the facial landmark (x, y)-coordinates to a NumPy
# array
shape = predictor(gray, rect)
shape = face_utils.shape_to_np(shape)
# convert dlib's rectangle to a OpenCV-style bounding box
# [i.e., (x, y, w, h)], then draw the face bounding box
(x, y, w, h) = face_utils.rect_to_bb(rect)
cv2.rectangle(image, (x, y), (x + w, y + h), (0, 255, 0), 2)
# show the face number
cv2.putText(image, "Face #{}".format(i + 1), (x - 10, y - 10),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
# loop over the (x, y)-coordinates for the facial landmarks
# and draw them on the image
for (x, y) in shape:
cv2.circle(image, (x, y), 1, (0, 0, 255), -1)
# show the output image with the face detections + facial landmarks
cv2.imshow("Output", image)
cv2.waitKey(0) |
the-stack_0_295 | """Support for Homematic thermostats."""
import logging
from homeassistant.components.climate import ClimateDevice
from homeassistant.components.climate.const import (
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_BOOST,
PRESET_COMFORT,
PRESET_ECO,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from . import ATTR_DISCOVER_DEVICES, HM_ATTRIBUTE_SUPPORT, HMDevice
_LOGGER = logging.getLogger(__name__)
HM_TEMP_MAP = ["ACTUAL_TEMPERATURE", "TEMPERATURE"]
HM_HUMI_MAP = ["ACTUAL_HUMIDITY", "HUMIDITY"]
HM_PRESET_MAP = {
"BOOST_MODE": PRESET_BOOST,
"COMFORT_MODE": PRESET_COMFORT,
"LOWERING_MODE": PRESET_ECO,
}
HM_CONTROL_MODE = "CONTROL_MODE"
HMIP_CONTROL_MODE = "SET_POINT_MODE"
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Homematic thermostat platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMThermostat(conf)
devices.append(new_device)
add_entities(devices)
class HMThermostat(HMDevice, ClimateDevice):
"""Representation of a Homematic thermostat."""
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def temperature_unit(self):
"""Return the unit of measurement that is used."""
return TEMP_CELSIUS
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if self.target_temperature <= self._hmdevice.OFF_VALUE + 0.5:
return HVAC_MODE_OFF
if "MANU_MODE" in self._hmdevice.ACTIONNODE:
if self._hm_control_mode == self._hmdevice.MANU_MODE:
return HVAC_MODE_HEAT
return HVAC_MODE_AUTO
# Simple devices
if self._data.get("BOOST_MODE"):
return HVAC_MODE_AUTO
return HVAC_MODE_HEAT
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
return [HVAC_MODE_AUTO, HVAC_MODE_HEAT, HVAC_MODE_OFF]
return [HVAC_MODE_HEAT, HVAC_MODE_OFF]
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp."""
if self._data.get("BOOST_MODE", False):
return "boost"
# Get the name of the mode
mode = HM_ATTRIBUTE_SUPPORT[HM_CONTROL_MODE][1][self._hm_control_mode]
mode = mode.lower()
# Filter HVAC states
if mode not in (HVAC_MODE_AUTO, HVAC_MODE_HEAT):
return None
return mode
@property
def preset_modes(self):
"""Return a list of available preset modes."""
preset_modes = []
for mode in self._hmdevice.ACTIONNODE:
if mode in HM_PRESET_MAP:
preset_modes.append(HM_PRESET_MAP[mode])
return preset_modes
@property
def current_humidity(self):
"""Return the current humidity."""
for node in HM_HUMI_MAP:
if node in self._data:
return self._data[node]
@property
def current_temperature(self):
"""Return the current temperature."""
for node in HM_TEMP_MAP:
if node in self._data:
return self._data[node]
@property
def target_temperature(self):
"""Return the target temperature."""
return self._data.get(self._state)
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return None
self._hmdevice.writeNodeData(self._state, float(temperature))
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
if hvac_mode == HVAC_MODE_AUTO:
self._hmdevice.MODE = self._hmdevice.AUTO_MODE
elif hvac_mode == HVAC_MODE_HEAT:
self._hmdevice.MODE = self._hmdevice.MANU_MODE
elif hvac_mode == HVAC_MODE_OFF:
self._hmdevice.turnoff()
def set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if preset_mode == PRESET_BOOST:
self._hmdevice.MODE = self._hmdevice.BOOST_MODE
elif preset_mode == PRESET_COMFORT:
self._hmdevice.MODE = self._hmdevice.COMFORT_MODE
elif preset_mode == PRESET_ECO:
self._hmdevice.MODE = self._hmdevice.LOWERING_MODE
@property
def min_temp(self):
"""Return the minimum temperature."""
return 4.5
@property
def max_temp(self):
"""Return the maximum temperature."""
return 30.5
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return 0.5
@property
def _hm_control_mode(self):
"""Return Control mode."""
if HMIP_CONTROL_MODE in self._data:
return self._data[HMIP_CONTROL_MODE]
# Homematic
return self._data["CONTROL_MODE"]
def _init_data_struct(self):
"""Generate a data dict (self._data) from the Homematic metadata."""
self._state = next(iter(self._hmdevice.WRITENODE.keys()))
self._data[self._state] = None
if (
HM_CONTROL_MODE in self._hmdevice.ATTRIBUTENODE
or HMIP_CONTROL_MODE in self._hmdevice.ATTRIBUTENODE
):
self._data[HM_CONTROL_MODE] = None
for node in self._hmdevice.SENSORNODE.keys():
self._data[node] = None
|
the-stack_0_296 | import os,re
import requests
import time
import json
class Connect(object):
def __init__(self, name='Athus', icon="Zaika"):
self.name = name
self.icon = icon
self.session = requests.session()
def save_cookie(self, file_name):
f = open(file_name, 'w+')
f.write(str(self.session.cookies.get_dict()))
f.close()
def login(self):
home = self.session.get('https://drrr.com',headers={'User-Agent': 'Bot'})
token = re.search('<input type="hidden" name="token" data-value=".*?">', home.text).group(0)[-34:-2]
home.close()
login_body = {
'name': self.name,
'login': 'ENTER',
'token': token,
'direct-join': '',
'language': 'en-US',
'icon': self.icon
}
li = self.session.post('https://drrr.com', login_body, headers={'User-Agent': 'Bot'})
li.close() |
the-stack_0_297 | from GlobalConstants import N_ROWS, N_COLS, FEATURE_COLOUR, FEATURE_SHAPE, FEATURE_SIZE, FEATURE_TEXT
import numpy as np
from scipy.spatial import distance
from DisplayGenerator import DisplayGenerator
class ObservationModel(object):
def sample(self, action, current_display):
"""
Samples a random observation from a given display.
:param seed:
:return: 2D array with colour and shape noisy observation
"""
x = action / N_COLS
y = action % N_COLS
obs_space_col = self.observe_feature(current_display, x, y, FEATURE_COLOUR)
obs_space_shp = self.observe_feature(current_display, x, y, FEATURE_SHAPE)
obs_space_size = self.observe_feature(current_display, x, y, FEATURE_SIZE)
obs_space_text = self.observe_feature(current_display, x, y, FEATURE_TEXT)
return obs_space_col, obs_space_shp, obs_space_size, obs_space_text
def observe_feature(self, display, x, y, feature):
observation = self.add_feature_noise(display, x, y, feature)
#observation = self.add_spatial_noise(temp, x, y, global_variables)
return observation
def add_feature_noise(self, features, x, y, feature):
obs_space = np.ones((N_ROWS, N_COLS)) * -1
#for COLOUR.
if feature == FEATURE_COLOUR:
for ext_x in range(0, N_ROWS, 1):
for ext_y in range(0, N_COLS, 1):
e = self.get_eccentricity(features.x[x][y], features.y[x][y],
features.x[ext_x][ext_y], features.y[ext_x][ext_y])
mu = 0.05 + (0.2*e) + (0*e*e) + (0.0004*e*e*e)
if features.objects[ext_x][ext_y].color != "BLANK":
if features.objects[ext_x][ext_y].size > np.random.normal(mu,0.5,1)[0]:
if features.objects[ext_x][ext_y].color == features.target.color:
obs_space[ext_x][ext_y] = 1
else:
obs_space[ext_x][ext_y] = 0
#for SHAPE.
if feature == FEATURE_SHAPE:
for ext_x in range(0, N_ROWS, 1):
for ext_y in range(0, N_COLS, 1):
e = self.get_eccentricity(features.x[x][y], features.y[x][y],
features.x[ext_x][ext_y], features.y[ext_x][ext_y])
mu = 0.05 + (0.2*e) + (0*e*e) + (0.025*e*e*e)
if features.objects[ext_x][ext_y].color != "BLANK":
if features.objects[ext_x][ext_y].size > np.random.normal(mu,0.5,1)[0]:
if features.objects[ext_x][ext_y].shape == features.target.shape:
obs_space[ext_x][ext_y] = 1
else:
obs_space[ext_x][ext_y] = 0
#for SIZE.
if feature == FEATURE_SIZE:
for ext_x in range(0, N_ROWS, 1):
for ext_y in range(0, N_COLS, 1):
e = self.get_eccentricity(features.x[x][y], features.y[x][y],
features.x[ext_x][ext_y], features.y[ext_x][ext_y])
mu = 0.05 + (0.2*e) + (0*e*e) + (0.0004*e*e*e)
if features.objects[ext_x][ext_y].color != "BLANK":
if features.objects[ext_x][ext_y].size > np.random.normal(mu,0.5,1)[0]:
if features.objects[ext_x][ext_y].size == features.target.size:
obs_space[ext_x][ext_y] = 1
else:
obs_space[ext_x][ext_y] = 0
#for TEXT.
if feature == FEATURE_TEXT:
for ext_x in range(0, N_ROWS, 1):
for ext_y in range(0, N_COLS, 1):
e = self.get_eccentricity(features.x[x][y], features.y[x][y],
features.x[ext_x][ext_y], features.y[ext_x][ext_y])
mu = 0.05 + (0.1*e) + (0*e*e) + (0.05*e*e*e)
if features.objects[ext_x][ext_y].color != "BLANK":
if .26 > np.random.normal(mu,1.0,1)[0]:
if features.objects[ext_x][ext_y].text == features.target.text:
obs_space[ext_x][ext_y] = 1
else:
obs_space[ext_x][ext_y] = 0
return obs_space
def get_eccentricity(self, fix_x, fix_y, ext_x, ext_y):
return distance.euclidean([fix_x, fix_y], [ext_x, ext_y])
#gen = DisplayGenerator()
#env = gen.sample()
#space = np.ones((N_ROWS, N_COLS)) * -1
#print env.target
#for ext_x in range(0, N_ROWS, 1):
# for ext_y in range(0, N_COLS, 1):
# if env.objects[ext_x][ext_y].text == env.target.text:
# space[ext_x][ext_y] = 1
#print space
#print "------------"
#model = ObservationModel()
#col, shp, size, txt = model.sample(0, env)
#print txt
#print "------------"
#col, shp, size, txt = model.sample(10, env)
#print txt
#print "------------"
#col, shp, size, txt = model.sample(38, env)
#print txt
#print "------------"
#col, shp, size, txt = model.sample(70, env)
#print txt
#print "------------"
#col, shp, size, txt = model.sample(76, env)
#print txt |
the-stack_0_298 |
# http://people.duke.edu/~ccc14/sta-663-2016/16A_MCMC.html
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
thetas = np.linspace(0, 1, 200)
n = 100
h = 61
a = 10
b = 10
def target(lik, prior, n, h, theta):
if theta < 0 or theta > 1:
return 0
else:
return lik(n, theta).pmf(h)*prior.pdf(theta)
def mh_coin(niters, n, h, theta, lik, prior, sigma):
samples = [theta]
while len(samples) < niters:
theta_p = theta + stats.norm(0, sigma).rvs()
rho = min(1, target(lik, prior, n, h, theta_p)/target(lik, prior, n, h, theta ))
u = np.random.uniform()
if u < rho:
theta = theta_p
samples.append(theta)
return samples
lik = stats.binom
prior = stats.beta(a, b)
sigma = 0.05
niters = 100
sampless = [mh_coin(niters, n, h, theta, lik, prior, sigma) for theta in np.arange(0.1, 1, 0.2)]
for samples in sampless:
plt.plot(samples, '-o')
plt.xlim([0, niters])
plt.ylim([0, 1]); |
the-stack_0_299 | import datetime as dt
from unittest.mock import patch, call
from model_bakery import baker
from django.utils import timezone as djangotime
from tacticalrmm.test import TacticalTestCase
from .models import AutomatedTask
from logs.models import PendingAction
from .serializers import AutoTaskSerializer
from .tasks import remove_orphaned_win_tasks, run_win_task, create_win_task_schedule
class TestAutotaskViews(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
@patch("automation.tasks.generate_agent_tasks_from_policies_task.delay")
@patch("autotasks.tasks.create_win_task_schedule.delay")
def test_add_autotask(
self, create_win_task_schedule, generate_agent_tasks_from_policies_task
):
url = "/tasks/automatedtasks/"
# setup data
script = baker.make_recipe("scripts.script")
agent = baker.make_recipe("agents.agent")
policy = baker.make("automation.Policy")
check = baker.make_recipe("checks.diskspace_check", agent=agent)
# test script set to invalid pk
data = {"autotask": {"script": 500}}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 404)
# test invalid policy
data = {"autotask": {"script": script.id}, "policy": 500}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 404)
# test invalid agent
data = {
"autotask": {"script": script.id},
"agent": 500,
}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 404)
# test add task to agent
data = {
"autotask": {
"name": "Test Task Scheduled with Assigned Check",
"run_time_days": ["Sunday", "Monday", "Friday"],
"run_time_minute": "10:00",
"timeout": 120,
"enabled": True,
"script": script.id,
"script_args": None,
"task_type": "scheduled",
"assigned_check": check.id,
},
"agent": agent.id,
}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
create_win_task_schedule.assert_called()
# test add task to policy
data = {
"autotask": {
"name": "Test Task Manual",
"run_time_days": [],
"timeout": 120,
"enabled": True,
"script": script.id,
"script_args": None,
"task_type": "manual",
"assigned_check": None,
},
"policy": policy.id,
}
resp = self.client.post(url, data, format="json")
self.assertEqual(resp.status_code, 200)
generate_agent_tasks_from_policies_task.assert_called_with(policy.id)
self.check_not_authenticated("post", url)
def test_get_autotask(self):
# setup data
agent = baker.make_recipe("agents.agent")
baker.make("autotasks.AutomatedTask", agent=agent, _quantity=3)
url = f"/tasks/{agent.id}/automatedtasks/"
resp = self.client.get(url, format="json")
serializer = AutoTaskSerializer(agent)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, serializer.data)
self.check_not_authenticated("get", url)
@patch("autotasks.tasks.enable_or_disable_win_task.delay")
@patch("automation.tasks.update_policy_task_fields_task.delay")
def test_update_autotask(
self, update_policy_task_fields_task, enable_or_disable_win_task
):
# setup data
agent = baker.make_recipe("agents.agent")
agent_task = baker.make("autotasks.AutomatedTask", agent=agent)
policy = baker.make("automation.Policy")
policy_task = baker.make("autotasks.AutomatedTask", policy=policy)
# test invalid url
resp = self.client.patch("/tasks/500/automatedtasks/", format="json")
self.assertEqual(resp.status_code, 404)
url = f"/tasks/{agent_task.id}/automatedtasks/"
# test editing agent task
data = {"enableordisable": False}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
enable_or_disable_win_task.assert_called_with(pk=agent_task.id, action=False)
url = f"/tasks/{policy_task.id}/automatedtasks/"
# test editing policy task
data = {"enableordisable": True}
resp = self.client.patch(url, data, format="json")
self.assertEqual(resp.status_code, 200)
update_policy_task_fields_task.assert_called_with(policy_task.id, True)
self.check_not_authenticated("patch", url)
@patch("autotasks.tasks.delete_win_task_schedule.delay")
@patch("automation.tasks.delete_policy_autotask_task.delay")
def test_delete_autotask(
self, delete_policy_autotask_task, delete_win_task_schedule
):
# setup data
agent = baker.make_recipe("agents.agent")
agent_task = baker.make("autotasks.AutomatedTask", agent=agent)
policy = baker.make("automation.Policy")
policy_task = baker.make("autotasks.AutomatedTask", policy=policy)
# test invalid url
resp = self.client.delete("/tasks/500/automatedtasks/", format="json")
self.assertEqual(resp.status_code, 404)
# test delete agent task
url = f"/tasks/{agent_task.id}/automatedtasks/"
resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 200)
delete_win_task_schedule.assert_called_with(pk=agent_task.id)
# test delete policy task
url = f"/tasks/{policy_task.id}/automatedtasks/"
resp = self.client.delete(url, format="json")
self.assertEqual(resp.status_code, 200)
delete_policy_autotask_task.assert_called_with(policy_task.id)
self.check_not_authenticated("delete", url)
@patch("agents.models.Agent.nats_cmd")
def test_run_autotask(self, nats_cmd):
# setup data
agent = baker.make_recipe("agents.agent", version="1.1.0")
task = baker.make("autotasks.AutomatedTask", agent=agent)
# test invalid url
resp = self.client.get("/tasks/runwintask/500/", format="json")
self.assertEqual(resp.status_code, 404)
# test run agent task
url = f"/tasks/runwintask/{task.id}/"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 200)
nats_cmd.assert_called_with({"func": "runtask", "taskpk": task.id}, wait=False)
nats_cmd.reset_mock()
old_agent = baker.make_recipe("agents.agent", version="1.0.2")
task2 = baker.make("autotasks.AutomatedTask", agent=old_agent)
url = f"/tasks/runwintask/{task2.id}/"
resp = self.client.get(url, format="json")
self.assertEqual(resp.status_code, 400)
nats_cmd.assert_not_called()
self.check_not_authenticated("get", url)
class TestAutoTaskCeleryTasks(TacticalTestCase):
def setUp(self):
self.authenticate()
self.setup_coresettings()
@patch("agents.models.Agent.nats_cmd")
def test_remove_orphaned_win_task(self, nats_cmd):
self.agent = baker.make_recipe("agents.agent")
self.task1 = AutomatedTask.objects.create(
agent=self.agent,
name="test task 1",
win_task_name=AutomatedTask.generate_task_name(),
)
# test removing an orphaned task
win_tasks = [
"Adobe Acrobat Update Task",
"AdobeGCInvoker-1.0",
"GoogleUpdateTaskMachineCore",
"GoogleUpdateTaskMachineUA",
"OneDrive Standalone Update Task-S-1-5-21-717461175-241712648-1206041384-1001",
self.task1.win_task_name,
"TacticalRMM_fixmesh",
"TacticalRMM_SchedReboot_jk324kajd",
"TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb", # orphaned task
]
self.calls = [
call({"func": "listschedtasks"}, timeout=10),
call(
{
"func": "delschedtask",
"schedtaskpayload": {
"name": "TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb"
},
},
timeout=10,
),
]
nats_cmd.side_effect = [win_tasks, "ok"]
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
self.assertEqual(nats_cmd.call_count, 2)
nats_cmd.assert_has_calls(self.calls)
self.assertEqual(ret.status, "SUCCESS")
# test nats delete task fail
nats_cmd.reset_mock()
nats_cmd.side_effect = [win_tasks, "error deleting task"]
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
nats_cmd.assert_has_calls(self.calls)
self.assertEqual(nats_cmd.call_count, 2)
self.assertEqual(ret.status, "SUCCESS")
# no orphaned tasks
nats_cmd.reset_mock()
win_tasks.remove("TacticalRMM_iggrLcOaldIZnUzLuJWPLNwikiOoJJHHznb")
nats_cmd.side_effect = [win_tasks, "ok"]
ret = remove_orphaned_win_tasks.s(self.agent.pk).apply()
self.assertEqual(nats_cmd.call_count, 1)
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.nats_cmd")
def test_run_win_task(self, nats_cmd):
self.agent = baker.make_recipe("agents.agent")
self.task1 = AutomatedTask.objects.create(
agent=self.agent,
name="test task 1",
win_task_name=AutomatedTask.generate_task_name(),
)
nats_cmd.return_value = "ok"
ret = run_win_task.s(self.task1.pk).apply()
self.assertEqual(ret.status, "SUCCESS")
@patch("agents.models.Agent.nats_cmd")
def test_create_win_task_schedule(self, nats_cmd):
self.agent = baker.make_recipe("agents.agent")
task_name = AutomatedTask.generate_task_name()
# test scheduled task
self.task1 = AutomatedTask.objects.create(
agent=self.agent,
name="test task 1",
win_task_name=task_name,
task_type="scheduled",
run_time_bit_weekdays=127,
run_time_minute="21:55",
)
self.assertEqual(self.task1.sync_status, "notsynced")
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
self.assertEqual(nats_cmd.call_count, 1)
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "weekly",
"weekdays": 127,
"pk": self.task1.pk,
"name": task_name,
"hour": 21,
"min": 55,
},
},
timeout=10,
)
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
self.assertEqual(self.task1.sync_status, "synced")
nats_cmd.return_value = "timeout"
ret = create_win_task_schedule.s(pk=self.task1.pk, pending_action=False).apply()
self.assertEqual(ret.status, "SUCCESS")
self.task1 = AutomatedTask.objects.get(pk=self.task1.pk)
self.assertEqual(self.task1.sync_status, "notsynced")
# test pending action
self.pending_action = PendingAction.objects.create(
agent=self.agent, action_type="taskaction"
)
self.assertEqual(self.pending_action.status, "pending")
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(
pk=self.task1.pk, pending_action=self.pending_action.pk
).apply()
self.assertEqual(ret.status, "SUCCESS")
self.pending_action = PendingAction.objects.get(pk=self.pending_action.pk)
self.assertEqual(self.pending_action.status, "completed")
# test runonce with future date
nats_cmd.reset_mock()
task_name = AutomatedTask.generate_task_name()
run_time_date = djangotime.now() + djangotime.timedelta(hours=22)
self.task2 = AutomatedTask.objects.create(
agent=self.agent,
name="test task 2",
win_task_name=task_name,
task_type="runonce",
run_time_date=run_time_date,
)
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(pk=self.task2.pk, pending_action=False).apply()
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "once",
"pk": self.task2.pk,
"name": task_name,
"year": int(dt.datetime.strftime(self.task2.run_time_date, "%Y")),
"month": dt.datetime.strftime(self.task2.run_time_date, "%B"),
"day": int(dt.datetime.strftime(self.task2.run_time_date, "%d")),
"hour": int(dt.datetime.strftime(self.task2.run_time_date, "%H")),
"min": int(dt.datetime.strftime(self.task2.run_time_date, "%M")),
},
},
timeout=10,
)
self.assertEqual(ret.status, "SUCCESS")
# test runonce with date in the past
nats_cmd.reset_mock()
task_name = AutomatedTask.generate_task_name()
run_time_date = djangotime.now() - djangotime.timedelta(days=13)
self.task3 = AutomatedTask.objects.create(
agent=self.agent,
name="test task 3",
win_task_name=task_name,
task_type="runonce",
run_time_date=run_time_date,
)
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(pk=self.task3.pk, pending_action=False).apply()
self.task3 = AutomatedTask.objects.get(pk=self.task3.pk)
self.assertEqual(ret.status, "SUCCESS")
# test checkfailure
nats_cmd.reset_mock()
self.check = baker.make_recipe("checks.diskspace_check", agent=self.agent)
task_name = AutomatedTask.generate_task_name()
self.task4 = AutomatedTask.objects.create(
agent=self.agent,
name="test task 4",
win_task_name=task_name,
task_type="checkfailure",
assigned_check=self.check,
)
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(pk=self.task4.pk, pending_action=False).apply()
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "manual",
"pk": self.task4.pk,
"name": task_name,
},
},
timeout=10,
)
self.assertEqual(ret.status, "SUCCESS")
# test manual
nats_cmd.reset_mock()
task_name = AutomatedTask.generate_task_name()
self.task5 = AutomatedTask.objects.create(
agent=self.agent,
name="test task 5",
win_task_name=task_name,
task_type="manual",
)
nats_cmd.return_value = "ok"
ret = create_win_task_schedule.s(pk=self.task5.pk, pending_action=False).apply()
nats_cmd.assert_called_with(
{
"func": "schedtask",
"schedtaskpayload": {
"type": "rmm",
"trigger": "manual",
"pk": self.task5.pk,
"name": task_name,
},
},
timeout=10,
)
self.assertEqual(ret.status, "SUCCESS")
|
the-stack_0_300 | """Ensure credentials are preserved through the authorization.
The Authorization Code Grant will need to preserve state as well as redirect
uri and the Implicit Grant will need to preserve state.
"""
from __future__ import absolute_import, unicode_literals
import json
import mock
from oauthlib.oauth2 import (MobileApplicationServer, RequestValidator,
WebApplicationServer)
from oauthlib.oauth2.rfc6749 import errors
from ....unittest import TestCase
from .test_utils import get_fragment_credentials, get_query_credentials
class PreservationTest(TestCase):
DEFAULT_REDIRECT_URI = 'http://i.b./path'
def setUp(self):
self.validator = mock.MagicMock(spec=RequestValidator)
self.validator.get_default_redirect_uri.return_value = self.DEFAULT_REDIRECT_URI
self.validator.get_code_challenge.return_value = None
self.validator.authenticate_client.side_effect = self.set_client
self.web = WebApplicationServer(self.validator)
self.mobile = MobileApplicationServer(self.validator)
def set_state(self, state):
def set_request_state(client_id, code, client, request):
request.state = state
return True
return set_request_state
def set_client(self, request):
request.client = mock.MagicMock()
request.client.client_id = 'mocked'
return True
def test_state_preservation(self):
auth_uri = 'http://example.com/path?state=xyz&client_id=abc&response_type='
token_uri = 'http://example.com/path'
# authorization grant
h, _, s = self.web.create_authorization_response(
auth_uri + 'code', scopes=['random'])
self.assertEqual(s, 302)
self.assertIn('Location', h)
code = get_query_credentials(h['Location'])['code'][0]
self.validator.validate_code.side_effect = self.set_state('xyz')
_, body, _ = self.web.create_token_response(token_uri,
body='grant_type=authorization_code&code=%s' % code)
self.assertEqual(json.loads(body)['state'], 'xyz')
# implicit grant
h, _, s = self.mobile.create_authorization_response(
auth_uri + 'token', scopes=['random'])
self.assertEqual(s, 302)
self.assertIn('Location', h)
self.assertEqual(get_fragment_credentials(h['Location'])['state'][0], 'xyz')
def test_redirect_uri_preservation(self):
auth_uri = 'http://example.com/path?redirect_uri=http%3A%2F%2Fi.b%2Fpath&client_id=abc'
redirect_uri = 'http://i.b/path'
token_uri = 'http://example.com/path'
# authorization grant
h, _, s = self.web.create_authorization_response(
auth_uri + '&response_type=code', scopes=['random'])
self.assertEqual(s, 302)
self.assertIn('Location', h)
self.assertTrue(h['Location'].startswith(redirect_uri))
# confirm_redirect_uri should return false if the redirect uri
# was given in the authorization but not in the token request.
self.validator.confirm_redirect_uri.return_value = False
code = get_query_credentials(h['Location'])['code'][0]
_, body, _ = self.web.create_token_response(token_uri,
body='grant_type=authorization_code&code=%s' % code)
self.assertEqual(json.loads(body)['error'], 'invalid_request')
# implicit grant
h, _, s = self.mobile.create_authorization_response(
auth_uri + '&response_type=token', scopes=['random'])
self.assertEqual(s, 302)
self.assertIn('Location', h)
self.assertTrue(h['Location'].startswith(redirect_uri))
def test_invalid_redirect_uri(self):
auth_uri = 'http://example.com/path?redirect_uri=http%3A%2F%2Fi.b%2Fpath&client_id=abc'
self.validator.validate_redirect_uri.return_value = False
# authorization grant
self.assertRaises(errors.MismatchingRedirectURIError,
self.web.create_authorization_response,
auth_uri + '&response_type=code', scopes=['random'])
# implicit grant
self.assertRaises(errors.MismatchingRedirectURIError,
self.mobile.create_authorization_response,
auth_uri + '&response_type=token', scopes=['random'])
def test_default_uri(self):
auth_uri = 'http://example.com/path?state=xyz&client_id=abc'
self.validator.get_default_redirect_uri.return_value = None
# authorization grant
self.assertRaises(errors.MissingRedirectURIError,
self.web.create_authorization_response,
auth_uri + '&response_type=code', scopes=['random'])
# implicit grant
self.assertRaises(errors.MissingRedirectURIError,
self.mobile.create_authorization_response,
auth_uri + '&response_type=token', scopes=['random'])
def test_default_uri_in_token(self):
auth_uri = 'http://example.com/path?state=xyz&client_id=abc'
token_uri = 'http://example.com/path'
# authorization grant
h, _, s = self.web.create_authorization_response(
auth_uri + '&response_type=code', scopes=['random'])
self.assertEqual(s, 302)
self.assertIn('Location', h)
self.assertTrue(h['Location'].startswith(self.DEFAULT_REDIRECT_URI))
# confirm_redirect_uri should return true if the redirect uri
# was not given in the authorization AND not in the token request.
self.validator.confirm_redirect_uri.return_value = True
code = get_query_credentials(h['Location'])['code'][0]
self.validator.validate_code.side_effect = self.set_state('xyz')
_, body, s = self.web.create_token_response(token_uri,
body='grant_type=authorization_code&code=%s' % code)
self.assertEqual(s, 200)
self.assertEqual(self.validator.confirm_redirect_uri.call_args[0][2], self.DEFAULT_REDIRECT_URI)
|
the-stack_0_301 | import os
import src.data.atomic as atomic_data
import src.data.conceptnet as conceptnet_data
import src.data.config as cfg
import utils.utils as utils
import pickle
import torch
import json
start_token = "<START>"
end_token = "<END>"
blank_token = "<blank>"
def save_checkpoint(state, filename):
print("Saving model to {}".format(filename))
torch.save(state, filename)
def save_step(model, vocab, optimizer, opt, length, lrs):
if cfg.test_save:
name = "{}.pickle".format(utils.make_name(
opt, prefix="garbage/models/", is_dir=False, eval_=True))
else:
name = "{}.pickle".format(utils.make_name(
opt, prefix="models/", is_dir=False, eval_=True))
save_checkpoint({
"epoch": length, "state_dict": model.state_dict(),
"optimizer": optimizer.state_dict(), "opt": opt,
"vocab": vocab, "epoch_learning_rates": lrs},
name)
def save_eval_file(opt, stats, eval_type="losses", split="dev", ext="pickle"):
if cfg.test_save:
name = "{}/{}.{}".format(utils.make_name(
opt, prefix="garbage/{}/".format(eval_type),
is_dir=True, eval_=True), split, ext)
else:
name = "{}/{}.{}".format(utils.make_name(
opt, prefix="results/{}/".format(eval_type),
is_dir=True, eval_=True), split, ext)
print("Saving {} {} to {}".format(split, eval_type, name))
if ext == "pickle":
with open(name, "wb") as f:
pickle.dump(stats, f)
elif ext == "txt":
with open(name, "w") as f:
f.write(stats)
elif ext == "json":
with open(name, "w") as f:
json.dump(stats, f)
else:
raise
def load_checkpoint(filename, gpu=True):
if os.path.exists(filename):
checkpoint = torch.load(
filename, map_location=lambda storage, loc: storage)
else:
print("No model found at {}".format(filename))
return checkpoint
def make_data_loader(opt, *args):
if opt.dataset == "atomic":
return atomic_data.GenerationDataLoader(opt, *args)
elif opt.dataset == "conceptnet":
return conceptnet_data.GenerationDataLoader(opt, *args)
def set_max_sizes(data_loader, force_split=None):
data_loader.total_size = {}
if force_split is not None:
data_loader.total_size[force_split] = \
data_loader.sequences[force_split]["total"].size(0)
return
for split in data_loader.sequences:
data_loader.total_size[split] = \
data_loader.sequences[split]["total"].size(0)
|
the-stack_0_302 | from .sir import SIR
from common.config import data_type
from common.linalg import as_array, as_matrix, init_weights
from common.stats import RSS, MSPE, RMSE
from numpy.random import normal, uniform
from numpy import *
from filtering.particlefilter import ParticleFilter
class ParticleSIR(SIR):
def __init__(self, num_enbs, params):
self.num_enbs = num_enbs
super(ParticleSIR, self).__init__(params)
del self.alpha
del self.beta
self.current_Is = uniform(0, self.i * 2, num_enbs)
self.current_Ss = ones(num_enbs) - self.current_Is
self.alphas = uniform(0., 1, num_enbs)
self.betas = uniform(0., 1, num_enbs)
self.weights = [init_weights(num_enbs)] # matrix-like
for i in range(num_enbs):
if self.alphas[i] < self.betas[i]:
self.alphas[i], self.betas[i] = self.betas[i], self.alphas[i]
self.Is = [self.current_Is.tolist()]
self.Ss = [self.current_Ss.tolist()]
def update_states(self):
for j in range(self.num_enbs):
s = self.current_Ss[j]
i = self.current_Is[j]
s += self._delta_s(self.current_Ss[j], self.current_Is[j],
self.alphas[j])
i += self._delta_i(self.current_Ss[j], self.current_Is[j],
self.alphas[j], self.betas[j])
s = self.check_bounds(s)
i = self.check_bounds(i)
self.current_Is[j] = i
self.current_Ss[j] = s
self.Is.append(self.current_Is.tolist())
self.Ss.append(self.current_Ss.tolist())
def _init_filter(self):
num_states = 4
num_obs = 1
self.filter = ParticleFilter(self.num_enbs)
def predict_with_filter(self):
F = self.filter
while self.epoch < self.epochs - 1:
X = as_matrix([self.current_Ss, self.current_Is,
self.alphas, self.betas])
F.fit(X)
y = self.CDC_obs[self.epoch]
F.step(y, predict_P=False)
self.weights.append(F.weights)
x_post = F.x_post
for j in range(self.num_enbs):
self.current_Ss[j] = self.check_bounds(x_post[0, j])
self.current_Is[j] = self.check_bounds(x_post[1, j])
self.alphas[j] = self.check_bounds(x_post[2, j], inf)
self.betas[j] = self.check_bounds(x_post[3, j], inf)
self.update_states()
self.epoch += 1
self.get_score()
def _delta_s(self, s, i, alpha):
return - alpha * s * i
def _delta_i(self, s, i, alpha, beta):
return alpha * s * i - beta * i
def check_par_bounds(self, par):
if par < 0: par = 0
return par
def get_score(self):
I_mat = as_array(self.Is)
for i, w in enumerate(self.weights):
I_mat[i] *= w
self.IS = sum(I_mat, axis=1)
time_gap = self.epochs / 52
idx = [x for x in range(self.epochs) if not x % time_gap]
self.score = RSS(self.CDC_obs, self.IS[idx])
self.scores = {}
self.scores['SSE'] = self.score
self.scores['RMSE'] = RMSE(self.CDC_obs, self.IS[idx])
self.scores['MSPE'] = MSPE(self.CDC_obs, self.IS[idx])
self.scores['CORR'] = corrcoef(self.CDC_obs, self.IS[idx])[0, 1]
return self.score
|
the-stack_0_303 | from typing import Any, Dict, Optional, Set
import great_expectations.exceptions as ge_exceptions
from great_expectations.core.expectation_configuration import ExpectationConfiguration
from great_expectations.rule_based_profiler.expectation_configuration_builder import (
ExpectationConfigurationBuilder,
)
from great_expectations.rule_based_profiler.types import Domain, ParameterContainer
from great_expectations.rule_based_profiler.util import (
get_parameter_value_and_validate_return_type,
)
class DefaultExpectationConfigurationBuilder(ExpectationConfigurationBuilder):
"""
Class which creates ExpectationConfiguration out of a given Expectation type and
parameter_name-to-parameter_fully_qualified_parameter_name map (name-value pairs supplied in the kwargs dictionary).
"""
exclude_field_names: Set[str] = {
"kwargs",
}
def __init__(
self,
expectation_type: str,
meta: Optional[Dict[str, Any]] = None,
**kwargs,
):
super().__init__(expectation_type=expectation_type, **kwargs)
self._kwargs = kwargs
if meta is None:
meta = {}
if not isinstance(meta, dict):
raise ge_exceptions.ProfilerExecutionError(
message=f"""Argument "{meta}" in "{self.__class__.__name__}" must be of type "dictionary" \
(value of type "{str(type())}" was encountered).
"""
)
self._meta = meta
@property
def expectation_type(self) -> str:
return self._expectation_type
@property
def kwargs(self) -> dict:
return self._kwargs
@property
def meta(self) -> dict:
return self._meta
def _build_expectation_configuration(
self,
domain: Domain,
variables: Optional[ParameterContainer] = None,
parameters: Optional[Dict[str, ParameterContainer]] = None,
) -> ExpectationConfiguration:
parameter_name: str
fully_qualified_parameter_name: str
expectation_kwargs: Dict[str, Any] = {
parameter_name: get_parameter_value_and_validate_return_type(
domain=domain,
parameter_reference=fully_qualified_parameter_name,
expected_return_type=None,
variables=variables,
parameters=parameters,
)
for parameter_name, fully_qualified_parameter_name in self.kwargs.items()
}
meta: Dict[str, Any] = get_parameter_value_and_validate_return_type(
domain=domain,
parameter_reference=self.meta,
expected_return_type=dict,
variables=variables,
parameters=parameters,
)
return ExpectationConfiguration(
expectation_type=self.expectation_type,
kwargs=expectation_kwargs,
meta=meta,
)
|
the-stack_0_304 | from tkinter import *
from tkinter import messagebox
root=Tk()
root.title("TIC TAC TOE!")
press=True
flag=0
s1="0"
s2="0"
#main game logic
def check(button):
global press
global flag
global s1
global s2
#alternate player turn's logic
if button["text"]=="" and press==True:
button["text"]="X"
press=False
flag+=1
elif button["text"]=="" and press==False:
button["text"]="O"
press=True
flag+=1
# X winning logic
if (button1["text"]=="X" and button2["text"]=="X" and button3["text"]=="X" or
button1["text"]=="X" and button4["text"]=="X" and button7["text"]=="X" or
button2["text"]=="X" and button5["text"]=="X" and button8["text"]=="X" or
button3["text"]=="X" and button6["text"]=="X" and button9["text"]=="X" or
button7["text"]=="X" and button8["text"]=="X" and button9["text"]=="X" or
button4["text"]=="X" and button5["text"]=="X" and button6["text"]=="X" or
button1["text"]=="X" and button5["text"]=="X" and button9["text"]=="X" or
button7["text"]=="X" and button5["text"]=="X" and button3["text"]=="X"):
messagebox.showinfo("GAME OVER","X Is Winner!")
flag=0
press=True
s1=int(s1)
s1=(s1+1)
s1=str(s1)
var0.set(s1)
reset()
# O winning logic
elif (button1["text"]=="O" and button2["text"]=="O" and button3["text"]=="O" or
button1["text"]=="O" and button4["text"]=="O" and button7["text"]=="O" or
button2["text"]=="O" and button5["text"]=="O" and button8["text"]=="O" or
button3["text"]=="O" and button6["text"]=="O" and button9["text"]=="O" or
button7["text"]=="O" and button8["text"]=="O" and button9["text"]=="O" or
button4["text"]=="O" and button5["text"]=="O" and button6["text"]=="O" or
button1["text"]=="O" and button5["text"]=="O" and button9["text"]=="O" or
button7["text"]=="O" and button5["text"]=="O" and button3["text"]=="O"):
messagebox.showinfo("GAME OVER","O Is Winner!")
flag=0
press=True
s2=int(s2)
s2=(s2+1)
s2=str(s2)
var1.set(s2)
reset()
elif flag>=9:
messagebox.showinfo("GAME OVER","Match Is Draw!")
flag=0
press=True
reset()
def resetscore():
global s1
global s2
s1="0"
s2="0"
var0.set(s1)
var1.set(s2)
def reset():
button1["text"]=""
button2["text"]=""
button3["text"]=""
button4["text"]=""
button5["text"]=""
button6["text"]=""
button7["text"]=""
button8["text"]=""
button9["text"]=""
#score logic
score=Label(root,text="---<:{ $core }:>---",font=("Verdana","15","normal"))
score.pack(anchor=N)
var0=StringVar()
scoren=Frame(root)
scoren.pack(anchor=W)
scorep1=Label(scoren,text="player X:",font=("Verdana","11","bold"))
scorep1.grid(row=0,column=0)
scorep1c=Label(scoren,textvariable=var0,font=("Segoe UI","14","bold"))
scorep1c.grid(row=0,column=1)
var0.set(s1)
var1=StringVar()
scorep2=Label(scoren,text="\tplayer O:",font=("Verdana","11","bold"))
scorep2.grid(row=0,column=2)
scorep2c=Label(scoren,textvariable=var1,font=("Segoe UI","14","bold"))
scorep2c.grid(row=0,column=3)
var1.set(s2)
#button logic
buttonframe=Frame(root)
buttonframe.pack(padx=5,pady=5)
button1=Button(buttonframe,text="",bd=2,relief = GROOVE ,font=("Segoe UI","10","bold"),height=5,width=10,command=lambda:check(button1))
button1.grid(row=0,column=0)
button2=Button(buttonframe,text="",bd=2,relief = GROOVE ,font=("Segoe UI","10","bold"),height=5,width=10,command=lambda:check(button2))
button2.grid(row=0,column=1)
button3=Button(buttonframe,text="",bd=2,relief = GROOVE ,font=("Segoe UI","10","bold"),height=5,width=10,command=lambda:check(button3))
button3.grid(row=0,column=2)
button4=Button(buttonframe,text="",bd=2,relief = GROOVE ,font=("Segoe UI","10","bold"),height=5,width=10,command=lambda:check(button4))
button4.grid(row=1,column=0)
button5=Button(buttonframe,text="",bd=2,relief = GROOVE ,font=("Segoe UI","10","bold"),height=5,width=10,command=lambda:check(button5))
button5.grid(row=1,column=1)
button6=Button(buttonframe,text="",bd=2,relief = GROOVE ,font=("Segoe UI","10","bold"),height=5,width=10,command=lambda:check(button6))
button6.grid(row=1,column=2)
button7=Button(buttonframe,text="",bd=2,relief = GROOVE ,font=("Segoe UI","10","bold"),height=5,width=10,command=lambda:check(button7))
button7.grid(row=2,column=0)
button8=Button(buttonframe,text="",bd=2,relief = GROOVE ,font=("Segoe UI","10","bold"),height=5,width=10,command=lambda:check(button8))
button8.grid(row=2,column=1)
button9=Button(buttonframe,text="",bd=2,relief = GROOVE ,font=("Segoe UI","10","bold"),height=5,width=10,command=lambda:check(button9))
button9.grid(row=2,column=2)
buttonresetscore=Button(root,text="---| Reset $core |---",font=("Verdana","13","normal"),command=lambda:resetscore())
buttonresetscore.pack(fill=X,side=BOTTOM)
buttonresetboard=Button(root,text="---| Reset Board # |---",font=("Verdana","13","normal"),command=lambda:reset())
buttonresetboard.pack(fill=X,side=BOTTOM)
root.mainloop()
|
the-stack_0_305 | from __future__ import unicode_literals
import onedrivesdk
from onedrivesdk.helpers import GetAuthCodeServer
from PIL import Image
import os
input = getattr(__builtins__, 'raw_input', input)
def main():
redirect_uri = "http://localhost:8080/"
client_secret = "BqaTYqI0XI7wDKcnJ5i3MvLwGcVsaMVM"
client = onedrivesdk.get_default_client(client_id='00000000481695BB',
scopes=['wl.signin',
'wl.offline_access',
'onedrive.readwrite'])
auth_url = client.auth_provider.get_auth_url(redirect_uri)
# Block thread until we have the code
code = GetAuthCodeServer.get_auth_code(auth_url, redirect_uri)
# Finally, authenticate!
client.auth_provider.authenticate(code, redirect_uri, client_secret)
item_id = "root"
copy_item_ids = None
action = 0
while True:
items = navigate(client, item_id)
print("0: UP")
count = 0
for count, item in enumerate(items):
print("{} {}".format(count+1, item.name if item.folder is None else "/"+item.name))
selected = input("Select item, enter 'C' to copy all, enter 'L' to list changes in current folder: ")
if selected == "C":
copy_item_ids = []
for item in items:
copy_item_ids.append(item.id)
elif selected == "L":
token = input("Enter your token, or nothing if you do not have one: ")
list_changes(client, item_id, token)
else:
selected = int(selected)
if selected == 0:
item_id = get_parent_id(client, item_id)
else:
action = int(input("Select action: 1:Navigate 2:Rename 3:View Thumbnail 4: Get Sharing Link 5: List Changes 6:Download 7:Upload 8:Delete 9:Copy{}... ".format(" 10: Paste" if copy_item_ids else "")))
if items[selected-1].folder is None or (action != 6 and action != 1):
if action == 1:
print("Can't navigate a file")
elif action == 2:
rename(client, items[selected-1].id)
elif action == 3:
view_thumbnail(client, items[selected-1].id)
elif action == 4:
get_sharing_link(client, items[selected-1].id)
elif action == 5:
token = input("Enter your token, or nothing if you do not have one: ")
list_changes(client, items[selected-1].id, token)
elif action == 6:
download(client, items[selected-1].id)
elif action == 7:
if item.folder is None:
print("You cannot upload to a file")
else:
upload(client, items[selected-1].id)
elif action == 8:
delete(client, items[selected-1].id)
elif action == 9:
copy_item_ids = [items[selected-1].id]
elif action == 10 and copy_item_ids:
if items[selected-1].folder:
paste(client, items[selected-1].id, copy_item_ids)
else:
print("Can't copy to a file")
else:
item_id = items[selected-1].id
def navigate(client, item_id):
items = client.item(id=item_id).children.get()
return items
def rename(client, item_id):
new_name = input("Enter new name: ")
renamed_item = onedrivesdk.Item()
renamed_item.name = new_name
renamed_item.id = item_id
client.item(id=item_id).update(renamed_item)
def view_thumbnail(client, item_id):
if len(client.item(id=item_id).thumbnails.get()) == 0:
print("File does not have any thumbnails!\n")
else:
action = int(input("Size? 1:Small 2:Medium 3:Large... "))
try:
os.remove("./tmp_thumb.jpg")
except:
pass
if action == 1:
client.item(id=item_id).thumbnails[0].small.download("./tmp_thumb.jpg")
elif action == 2:
client.item(id=item_id).thumbnails[0].medium.download("./tmp_thumb.jpg")
elif action == 3:
client.item(id=item_id).thumbnails[0].large.download("./tmp_thumb.jpg")
image = Image.open("./tmp_thumb.jpg")
image.show()
def get_sharing_link(client, item_id):
action = int(input("Type? 1:View 2:Edit... "))
permission = client.item(id=item_id).create_link("view" if action == 1 else "edit").post()
print("\n{}\n".format(permission.link.web_url))
def download(client, item_id):
directory = input("Enter download directory (can be relative): ")
client.item(id=item_id).download(directory)
def upload(client, item_id):
directory = input("Enter upload file directory (can be relative): ")
name = input("Enter file name with extension: ")
client.item(id=item_id).children[name].upload(directory)
def delete(client, item_id):
confirm = input("Confirm delete? Y/N: ")
if confirm == "Y":
client.item(id=item_id).delete()
def paste(client, item_id, copy_item_ids):
ref = onedrivesdk.ItemReference()
ref.id = item_id
for id in copy_item_ids:
client.item(id=id).copy(parent_reference=ref).post()
def list_changes(client, item_id, token):
collection_page = client.item(id=item_id).delta(token).get()
for item in collection_page:
print(item.name)
print("TOKEN: {}".format(collection_page.token))
def get_parent_id(client, item_id):
id = client.item(id=item_id).get().parent_reference.id
return id
if __name__ == "__main__":
main() |
the-stack_0_309 | def report_generator(file_path1, file_path2):
import numpy as np
import pandas as pd
from IPython.display import display
# read excel files
df1 = pd.read_excel(file_path1, sheet_name = 1, index_col= 0, header = 1, usecols = range(41), skipfooter = 17)
df2 = pd.read_excel(file_path2, sheet_name = 6, index_col= 0, header = 0, usecols = range(46), skipfooter = 0)
cols = [0, 3, 5, 8, 10, 13, 15, 18]
df3 = pd.read_excel(file_path2, sheet_name = 1, header = 0, usecols = cols, skipfooter = 3)
df4 = pd.read_excel(file_path2, sheet_name = 2, header = 0, usecols = cols, skipfooter = 6)
df5 = pd.concat([df3.tail(2), df4.tail(2)], axis = 1)
# check the data
display(df1.tail(2))
display(df2.tail(2))
display(df5.tail(2))
report = pd.read_excel('一手住宅简报格式.xlsx', sheet_name = 1, header = 0, index_col = 0,
skipfooter = 33, usecols = range(0, 45))
display(report.tail(2))
# generate supply and sales data of new houses in 40 cities
cities = list(report.columns[4:])
dict = {}
for i in cities:
dict[i] = [df1[i][-1], df2[i][-1]]
result = pd.DataFrame(dict, index = ['40城住宅成交', '40城住宅供应'])
# generate new house prices in 8 major cities
dict2 = {}
k = 0
j = 1
while j <= 15:
dict2[df5.columns[k]] = df5.iloc[-1, j]
k = k + 2
j = j + 2
result2 = pd.DataFrame(dict2, index = [df5.iloc[-1, 0]])
# write the results into one excel file
writer = pd.ExcelWriter('result_newhouse.xlsx')
result.to_excel(writer, sheet_name = 'supply_and_sales')
result2.to_excel(writer, sheet_name = 'prices')
writer.save()
return
print('运行 report_generator(40城,20城)')
|
the-stack_0_310 | import copy
import unittest
from datetime import datetime
from mltrace.db import Component, ComponentRun, IOPointer, Store
class TestDags(unittest.TestCase):
def setUp(self):
self.store = Store("test")
def testLinkedList(self):
# Create chain of component runs
expected_result = []
num_runs = 10
for i in range(1, num_runs + 1):
self.store.create_component(f"mock_component_{i}", "", "")
inp = self.store.get_io_pointer(f"iop_{i}")
out = self.store.get_io_pointer(f"iop_{i + 1}")
cr = self.store.initialize_empty_component_run(
f"mock_component_{i}"
)
cr.set_start_timestamp()
cr.set_end_timestamp()
cr.add_input(inp)
cr.add_output(out)
self.store.set_dependencies_from_inputs(cr)
self.store.commit_component_run(cr)
expected_result.append((num_runs - i, i))
# Reverse the expected result
expected_result.reverse()
# Trace the final output
trace = self.store.trace("iop_11")
level_id = [(level, cr.id) for level, cr in trace]
self.assertEqual(expected_result, level_id)
def testVersionedComputation(self):
# Run the same computation many times
self.store.create_component("mock_component", "", "")
num_runs = 10
for i in range(1, num_runs + 1):
inp = self.store.get_io_pointer("inp")
out = self.store.get_io_pointer("out")
cr = self.store.initialize_empty_component_run("mock_component")
cr.set_start_timestamp()
cr.set_end_timestamp()
cr.add_input(inp)
cr.add_output(out)
self.store.set_dependencies_from_inputs(cr)
self.store.commit_component_run(cr)
# Trace the out pointer. Only most recent run ID should show.
trace = self.store.trace("out")
self.assertEqual(len(trace), 1)
self.assertEqual(trace[0][0], 0)
self.assertEqual(trace[0][1].id, num_runs)
def testTree(self):
# Create a tree of component runs, 5 levels deep
num_levels = 2
global cr_counter
global iop_counter
cr_counter = 1
iop_counter = 1
def create_tree(level, inp):
if level == num_levels:
return
global cr_counter
global iop_counter
self.store.create_component(f"mock_component_{cr_counter}", "", "")
cr = self.store.initialize_empty_component_run(
f"mock_component_{cr_counter}"
)
cr_counter += 1
cr.set_start_timestamp()
cr.set_end_timestamp()
# Create output pointers
out1 = self.store.get_io_pointer(f"iop_{iop_counter}")
iop_counter += 1
out2 = self.store.get_io_pointer(f"iop_{iop_counter}")
iop_counter += 1
# Add and commit component run
cr.add_input(inp)
cr.add_outputs([out1, out2])
self.store.set_dependencies_from_inputs(cr)
self.store.commit_component_run(cr)
# Create left and right trees
create_tree(level + 1, out1)
create_tree(level + 1, out2)
# Create first input pointer and tree of computation
inp = self.store.get_io_pointer(f"iop_{iop_counter}")
iop_counter += 1
create_tree(0, inp)
# Grab last iop id and trace it
last_iop_id = f"iop_{iop_counter - 1}"
trace = self.store.trace(last_iop_id)
level_id = [(level, cr.id) for level, cr in trace]
self.assertEqual(level_id, [(0, 3), (1, 1)])
def testCycle(self):
# Create cycle. Since dependencies are versioned, we shouldn't run
# into problems.
# Create io pointers and components
iop1 = self.store.get_io_pointer("iop1")
iop2 = self.store.get_io_pointer("iop2")
self.store.create_component("component_1", "", "")
self.store.create_component("component_2", "", "")
# Create component runs
cr = self.store.initialize_empty_component_run("component_1")
cr.set_start_timestamp()
cr.set_end_timestamp()
cr.add_input(iop1)
cr.add_output(iop2)
self.store.set_dependencies_from_inputs(cr)
self.store.commit_component_run(cr)
cr = self.store.initialize_empty_component_run("component_2")
cr.set_start_timestamp()
cr.set_end_timestamp()
cr.add_input(iop2)
cr.add_output(iop1)
self.store.set_dependencies_from_inputs(cr)
self.store.commit_component_run(cr)
# Trace iop1
trace_1 = [(level, cr.id) for level, cr in self.store.trace("iop1")]
trace_2 = [(level, cr.id) for level, cr in self.store.trace("iop2")]
self.assertEqual(trace_1, [(0, 2), (1, 1)])
self.assertEqual(trace_2, [(0, 1)])
def testStaleUpdate(self):
# Create computation with stale update.
iop1 = self.store.get_io_pointer("iop1")
iop2 = self.store.get_io_pointer("iop2")
iop3 = self.store.get_io_pointer("iop3")
iop4 = self.store.get_io_pointer("iop4")
self.store.create_component("component_1", "", "")
self.store.create_component("component_2", "", "")
# Create first component
cr = self.store.initialize_empty_component_run("component_1")
cr.set_start_timestamp()
cr.set_end_timestamp()
cr.add_input(iop1)
cr.add_output(iop2)
self.store.set_dependencies_from_inputs(cr)
self.store.commit_component_run(cr)
# Create second component run
cr = self.store.initialize_empty_component_run("component_1")
cr.set_start_timestamp()
cr.set_end_timestamp()
cr.add_input(iop1)
cr.add_output(iop3)
self.store.set_dependencies_from_inputs(cr)
self.store.commit_component_run(cr)
# Create third component run that depends on the first (stale update)
cr = self.store.initialize_empty_component_run("component_2")
cr.set_start_timestamp()
cr.set_end_timestamp()
cr.add_input(iop2)
cr.add_output(iop4)
self.store.set_dependencies_from_inputs(cr)
self.store.commit_component_run(cr)
# Trace iop4
trace = [
(level, cr.id, cr.stale) for level, cr in self.store.trace("iop4")
]
res = [
(
0,
3,
[
"component_1 (ID 1) has 1 fresher run(s) that began "
+ "before this component run started."
],
),
(1, 1, []),
]
self.assertEqual(trace, res)
def testStaleTime(self):
# Create computation with stale update.
iop1 = self.store.get_io_pointer("iop1")
iop2 = self.store.get_io_pointer("iop2")
iop3 = self.store.get_io_pointer("iop3")
self.store.create_component("component_1", "", "")
self.store.create_component("component_2", "", "")
now = datetime.utcnow()
# Create first component
cr = self.store.initialize_empty_component_run("component_1")
cr.set_start_timestamp(now.replace(month=now.month - 2))
cr.set_end_timestamp()
cr.add_input(iop1)
cr.add_output(iop2)
self.store.set_dependencies_from_inputs(cr)
self.store.commit_component_run(cr)
# Create second component run
cr = self.store.initialize_empty_component_run("component_2")
cr.set_start_timestamp()
cr.set_end_timestamp()
cr.add_input(iop2)
cr.add_output(iop3)
self.store.set_dependencies_from_inputs(cr)
self.store.commit_component_run(cr)
# Trace
trace = [
(level, cr.id, cr.stale) for level, cr in self.store.trace("iop3")
]
res = [(0, 2, ["component_1 (ID 1) was run 61 days ago."]), (1, 1, [])]
self.assertEqual(trace, res)
if __name__ == "__main__":
unittest.main()
|
the-stack_0_311 | import io
import uuid
from mitmproxy.test import tutils
from mitmproxy import tcp
from mitmproxy import websocket
from mitmproxy import controller
from mitmproxy import http
from mitmproxy import flow
from mitmproxy.net import http as net_http
from mitmproxy.proxy import context
from wsproto.frame_protocol import Opcode
def ttcpflow(client_conn=True, server_conn=True, messages=True, err=None):
if client_conn is True:
client_conn = tclient_conn()
if server_conn is True:
server_conn = tserver_conn()
if messages is True:
messages = [
tcp.TCPMessage(True, b"hello"),
tcp.TCPMessage(False, b"it's me"),
]
if err is True:
err = terr()
f = tcp.TCPFlow(client_conn, server_conn)
f.messages = messages
f.error = err
f.reply = controller.DummyReply()
return f
def twebsocketflow(client_conn=True, server_conn=True, messages=True, err=None, handshake_flow=True):
if client_conn is True:
client_conn = tclient_conn()
if server_conn is True:
server_conn = tserver_conn()
if handshake_flow is True:
req = http.HTTPRequest(
"example.com",
80,
b"GET",
b"http",
b"example.com",
b"/ws",
b"HTTP/1.1",
headers=net_http.Headers(
connection="upgrade",
upgrade="websocket",
sec_websocket_version="13",
sec_websocket_key="1234",
),
content=b'',
trailers=None,
timestamp_start=946681200,
timestamp_end=946681201,
)
resp = http.HTTPResponse(
b"HTTP/1.1",
101,
reason=net_http.status_codes.RESPONSES.get(101),
headers=net_http.Headers(
connection='upgrade',
upgrade='websocket',
sec_websocket_accept=b'',
),
content=b'',
trailers=None,
timestamp_start=946681202,
timestamp_end=946681203,
)
handshake_flow = http.HTTPFlow(client_conn, server_conn)
handshake_flow.request = req
handshake_flow.response = resp
f = websocket.WebSocketFlow(client_conn, server_conn, handshake_flow)
f.metadata['websocket_handshake'] = handshake_flow.id
handshake_flow.metadata['websocket_flow'] = f.id
handshake_flow.metadata['websocket'] = True
if messages is True:
messages = [
websocket.WebSocketMessage(Opcode.BINARY, True, b"hello binary"),
websocket.WebSocketMessage(Opcode.TEXT, True, b"hello text"),
websocket.WebSocketMessage(Opcode.TEXT, False, b"it's me"),
]
if err is True:
err = terr()
f.messages = messages
f.error = err
f.reply = controller.DummyReply()
return f
def tflow(client_conn=True, server_conn=True, req=True, resp=None, err=None):
"""
@type client_conn: bool | None | mitmproxy.proxy.connection.ClientConnection
@type server_conn: bool | None | mitmproxy.proxy.connection.ServerConnection
@type req: bool | None | mitmproxy.proxy.protocol.http.HTTPRequest
@type resp: bool | None | mitmproxy.proxy.protocol.http.HTTPResponse
@type err: bool | None | mitmproxy.proxy.protocol.primitives.Error
@return: mitmproxy.proxy.protocol.http.HTTPFlow
"""
if client_conn is True:
client_conn = tclient_conn()
if server_conn is True:
server_conn = tserver_conn()
if req is True:
req = tutils.treq()
if resp is True:
resp = tutils.tresp()
if err is True:
err = terr()
f = http.HTTPFlow(client_conn, server_conn)
f.request = req
f.response = resp
f.error = err
f.reply = controller.DummyReply()
return f
class DummyFlow(flow.Flow):
"""A flow that is neither HTTP nor TCP."""
def __init__(self, client_conn, server_conn, live=None):
super().__init__("dummy", client_conn, server_conn, live)
def tdummyflow(client_conn=True, server_conn=True, err=None):
if client_conn is True:
client_conn = tclient_conn()
if server_conn is True:
server_conn = tserver_conn()
if err is True:
err = terr()
f = DummyFlow(client_conn, server_conn)
f.error = err
f.reply = controller.DummyReply()
return f
def tclient_conn() -> context.Client:
c = context.Client.from_state(dict(
id=str(uuid.uuid4()),
address=("127.0.0.1", 22),
mitmcert=None,
tls_established=True,
timestamp_start=946681200,
timestamp_tls_setup=946681201,
timestamp_end=946681206,
sni="address",
cipher_name="cipher",
alpn_proto_negotiated=b"http/1.1",
tls_version="TLSv1.2",
tls_extensions=[(0x00, bytes.fromhex("000e00000b6578616d"))],
state=0,
sockname=("", 0),
error=None,
tls=False,
certificate_list=[],
alpn_offers=[],
cipher_list=[],
))
c.reply = controller.DummyReply()
return c
def tserver_conn() -> context.Server:
c = context.Server.from_state(dict(
id=str(uuid.uuid4()),
address=("address", 22),
source_address=("address", 22),
ip_address=("192.168.0.1", 22),
timestamp_start=946681202,
timestamp_tcp_setup=946681203,
timestamp_tls_setup=946681204,
timestamp_end=946681205,
tls_established=True,
sni="address",
alpn_proto_negotiated=None,
tls_version="TLSv1.2",
via=None,
state=0,
error=None,
tls=False,
certificate_list=[],
alpn_offers=[],
cipher_name=None,
cipher_list=[],
via2=None,
))
c.reply = controller.DummyReply()
c.rfile = io.BytesIO()
c.wfile = io.BytesIO()
return c
def terr(content="error"):
"""
@return: mitmproxy.proxy.protocol.primitives.Error
"""
err = flow.Error(content)
return err
|
the-stack_0_312 | # Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
#
# Based on:
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
"""Construct minibatches for Detectron networks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import cv2
import logging
import numpy as np
from core.config import cfg,switch_to_teacher,switch_to_student,teacher_cfg
import roi_data.fast_rcnn
import roi_data.retinanet
import roi_data.rpn
import utils.blob as blob_utils
logger = logging.getLogger(__name__)
def get_minibatch_blob_names(is_training=True):
"""Return blob names in the order in which they are read by the data loader.
"""
# data blob: holds a batch of N images, each with 3 channels
blob_names = ['data']
if cfg.DISTILLATION.DISTILLATION_ON:
blob_names.append('teacher/data')
if cfg.RPN.RPN_ON:
# RPN-only or end-to-end Faster R-CNN
blob_names += roi_data.rpn.get_rpn_blob_names(is_training=is_training)
elif cfg.RETINANET.RETINANET_ON:
blob_names += roi_data.retinanet.get_retinanet_blob_names(
is_training=is_training
)
else:
# Fast R-CNN like models trained on precomputed proposals
blob_names += roi_data.fast_rcnn.get_fast_rcnn_blob_names(
is_training=is_training
)
return blob_names
def get_minibatch(roidb):
"""Given a roidb, construct a minibatch sampled from it."""
# We collect blobs from each image onto a list and then concat them into a
# single tensor, hence we initialize each blob to an empty list
blobs = {k: [] for k in get_minibatch_blob_names()}
# Get the input image blob, formatted for caffe2
im_blob, im_scales = _get_image_blob(roidb)
if cfg.DISTILLATION.DISTILLATION_ON:
teacher_cfg.TRAIN.SCALES=cfg.TRAIN.SCALES
teacher_cfg.TRAIN.MAX_SIZE=cfg.TRAIN.MAX_SIZE
teacher_blob,_=_get_image_blob(roidb,cfg=teacher_cfg)
blobs['data'] = im_blob
if cfg.DISTILLATION.DISTILLATION_ON:
blobs['teacher/data']=teacher_blob
if cfg.RPN.RPN_ON:
# RPN-only or end-to-end Faster/Mask R-CNN
valid = roi_data.rpn.add_rpn_blobs(blobs, im_scales, roidb)
elif cfg.RETINANET.RETINANET_ON:
im_width, im_height = im_blob.shape[3], im_blob.shape[2]
# im_width, im_height corresponds to the network input: padded image
# (if needed) width and height. We pass it as input and slice the data
# accordingly so that we don't need to use SampleAsOp
valid = roi_data.retinanet.add_retinanet_blobs(
blobs, im_scales, roidb, im_width, im_height
)
else:
# Fast R-CNN like models trained on precomputed proposals
valid = roi_data.fast_rcnn.add_fast_rcnn_blobs(blobs, im_scales, roidb)
return blobs, valid
def _get_image_blob(roidb,cfg=cfg):
"""Builds an input blob from the images in the roidb at the specified
scales.
"""
num_images = len(roidb)
# Sample random scales to use for each image in this batch
scale_inds = np.random.randint(
0, high=len(cfg.TRAIN.SCALES), size=num_images
)
processed_ims = []
im_scales = []
teacher_ims=[]
for i in range(num_images):
im = cv2.imread(roidb[i]['image'])
assert im is not None, \
'Failed to read image \'{}\''.format(roidb[i]['image'])
if roidb[i]['flipped']:
im = im[:, ::-1, :]
target_size = cfg.TRAIN.SCALES[scale_inds[i]]
im, im_scale = blob_utils.prep_im_for_blob(
im, cfg.PIXEL_MEANS,cfg.PIXEL_DIV,cfg.PIXEL_STD, [target_size], cfg.TRAIN.MAX_SIZE
)
im_scales.append(im_scale[0])
processed_ims.append(im[0])
# Create a blob to hold the input images
blob = blob_utils.im_list_to_blob(processed_ims)
return blob, im_scales
|
the-stack_0_314 | """
Script for testing on CUB.
Sample usage:
python -m cmr.benchmark.evaluate --split val --name <model_name> --num_train_epoch <model_epoch>
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import os
import os.path as osp
import numpy as np
import torch
import scipy.io as sio
from ..nnutils import test_utils
from ..data import cub as cub_data
from ..nnutils import predictor as pred_utils
flags.DEFINE_boolean('visualize', False, 'if true visualizes things')
opts = flags.FLAGS
class ShapeTester(test_utils.Tester):
def define_model(self):
opts = self.opts
self.predictor = pred_utils.MeshPredictor(opts)
# for visualization
self.renderer = self.predictor.vis_rend
self.renderer.set_bgcolor([1., 1., 1.])
self.renderer.renderer.renderer.renderer.image_size = 512
self.renderer.set_light_dir([0, 1, -1], 0.38)
def init_dataset(self):
opts = self.opts
self.data_module = cub_data
torch.manual_seed(0)
self.dataloader = self.data_module.data_loader(opts)
def evaluate(self, outputs, batch):
"""
Compute IOU and keypoint error
"""
opts = self.opts
bs = opts.batch_size
## compute iou
mask_gt = batch['mask'].view(bs, -1).numpy()
mask_pred = outputs['mask_pred'].cpu().view(bs, -1).type_as(
batch['mask']).numpy()
intersection = mask_gt * mask_pred
union = mask_gt + mask_pred - intersection
iou = intersection.sum(1) / union.sum(1)
# Compute pck
padding_frac = opts.padding_frac
# The [-1,1] coordinate frame in which keypoints corresponds to:
# (1+2*padding_frac)*max_bbox_dim in image coords
# pt_norm = 2* (pt_img - trans)/((1+2*pf)*max_bbox_dim)
# err_pt = 2*err_img/((1+2*pf)*max_bbox_dim)
# err_pck_norm = err_img/max_bbox_dim = err_pt*(1+2*pf)/2
# so the keypoint error in the canonical fram should be multiplied by:
err_scaling = (1 + 2 * padding_frac) / 2.0
kps_gt = batch['kp'].cpu().numpy()
kps_vis = kps_gt[:, :, 2]
kps_gt = kps_gt[:, :, 0:2]
kps_pred = outputs['kp_pred'].cpu().type_as(batch['kp']).numpy()
kps_err = kps_pred - kps_gt
kps_err = np.sqrt(np.sum(kps_err * kps_err, axis=2)) * err_scaling
return iou, kps_err, kps_vis
def visualize(self, outputs, batch):
vert = outputs['verts'][0]
cam = outputs['cam_pred'][0]
texture = outputs['texture'][0]
img_pred = self.renderer(vert, cam, texture=texture)
aroundz = []
aroundy = []
# for deg in np.arange(0, 180, 30):
for deg in np.arange(0, 150, 30):
rendz = self.renderer.diff_vp(
vert, cam, angle=-deg, axis=[1, 0, 0], texture=texture)
rendy = self.renderer.diff_vp(
vert, cam, angle=deg, axis=[0, 1, 0], texture=texture)
aroundz.append(rendz)
aroundy.append(rendy)
aroundz = np.hstack(aroundz)
aroundy = np.hstack(aroundy)
vps = np.vstack((aroundz, aroundy))
img = np.transpose(convert2np(batch['img'][0]), (1, 2, 0))
import matplotlib.pyplot as plt
plt.ion()
fig = plt.figure(1)
ax = fig.add_subplot(121)
ax.imshow(img)
ax.set_title('input')
ax.axis('off')
ax = fig.add_subplot(122)
ax.imshow(img_pred)
ax.set_title('pred_texture')
ax.axis('off')
plt.draw()
fig = plt.figure(2)
plt.imshow(vps)
plt.axis('off')
plt.draw()
plt.pause(0.01)
import ipdb
ipdb.set_trace()
def test(self):
opts = self.opts
bench_stats = {'ious': [], 'kp_errs': [], 'kp_vis': []}
if opts.ignore_pred_delta_v:
result_path = osp.join(opts.results_dir, 'results_meanshape.mat')
elif opts.use_sfm_ms:
result_path = osp.join(opts.results_dir,
'results_sfm_meanshape.mat')
else:
result_path = osp.join(opts.results_dir, 'results.mat')
if opts.use_sfm_camera:
result_path = result_path.replace('.mat', '_sfm_camera.mat')
print('Writing to %s' % result_path)
if not osp.exists(result_path):
n_iter = len(self.dataloader)
for i, batch in enumerate(self.dataloader):
if i % 100 == 0:
print('{}/{} evaluation iterations.'.format(i, n_iter))
if opts.max_eval_iter > 0 and (i >= opts.max_eval_iter):
break
outputs = self.predictor.predict(batch)
if opts.visualize:
self.visualize(outputs, batch)
iou, kp_err, kp_vis = self.evaluate(outputs, batch)
bench_stats['ious'].append(iou)
bench_stats['kp_errs'].append(kp_err)
bench_stats['kp_vis'].append(kp_vis)
if opts.save_visuals and (i % opts.visuals_freq == 0):
self.save_current_visuals(batch, outputs)
bench_stats['kp_errs'] = np.concatenate(bench_stats['kp_errs'])
bench_stats['kp_vis'] = np.concatenate(bench_stats['kp_vis'])
bench_stats['ious'] = np.concatenate(bench_stats['ious'])
sio.savemat(result_path, bench_stats)
else:
bench_stats = sio.loadmat(result_path)
# Report numbers.
mean_iou = bench_stats['ious'].mean()
n_vis_p = np.sum(bench_stats['kp_vis'], axis=0)
n_correct_p_pt1 = np.sum(
(bench_stats['kp_errs'] < 0.1) * bench_stats['kp_vis'], axis=0)
n_correct_p_pt15 = np.sum(
(bench_stats['kp_errs'] < 0.15) * bench_stats['kp_vis'], axis=0)
pck1 = (n_correct_p_pt1 / n_vis_p).mean()
pck15 = (n_correct_p_pt15 / n_vis_p).mean()
print('%s mean iou %.3g, pck.1 %.3g, pck.15 %.3g' %
(osp.basename(result_path), mean_iou, pck1, pck15))
def main(_):
opts.n_data_workers = 0
opts.batch_size = 1
opts.results_dir = osp.join(opts.results_dir_base, '%s' % (opts.split),
opts.name, 'epoch_%d' % opts.num_train_epoch)
if not osp.exists(opts.results_dir):
print('writing to %s' % opts.results_dir)
os.makedirs(opts.results_dir)
torch.manual_seed(0)
tester = ShapeTester(opts)
tester.init_testing()
tester.test()
if __name__ == '__main__':
app.run(main)
|
the-stack_0_315 | # -*- coding: utf-8 -*-
"""
Mesa Time Module
================
Objects for handling the time component of a model. In particular, this module
contains Schedulers, which handle agent activation. A Scheduler is an object
which controls when agents are called upon to act, and when.
The activation order can have a serious impact on model behavior, so it's
important to specify it explicitly. Example simple activation regimes include
activating all agents in the same order every step, shuffling the activation
order every time, activating each agent *on average* once per step, and more.
Key concepts:
Step: Many models advance in 'steps'. A step may involve the activation of
all agents, or a random (or selected) subset of them. Each agent in turn
may have their own step() method.
Time: Some models may simulate a continuous 'clock' instead of discrete
steps. However, by default, the Time is equal to the number of steps the
model has taken.
TODO: Have the schedulers use the model's randomizer, to keep random number
seeds consistent and allow for replication.
"""
import random
from collections import OrderedDict
class BaseScheduler:
""" Simplest scheduler; activates agents one at a time, in the order
they were added.
Assumes that each agent added has a *step* method which takes no arguments.
(This is explicitly meant to replicate the scheduler in MASON).
"""
def __init__(self, model):
""" Create a new, empty BaseScheduler. """
self.model = model
self.steps = 0
self.time = 0
self._agents = OrderedDict()
def add(self, agent):
""" Add an Agent object to the schedule.
Args:
agent: An Agent to be added to the schedule. NOTE: The agent must
have a step() method.
"""
self._agents[agent.unique_id] = agent
def remove(self, agent):
""" Remove all instances of a given agent from the schedule.
Args:
agent: An agent object.
"""
del self._agents[agent.unique_id]
def step(self):
""" Execute the step of all the agents, one at a time. """
agent_keys = list(self._agents.keys())
for agent_key in agent_keys:
self._agents[agent_key].step()
self.steps += 1
self.time += 1
def get_agent_count(self):
""" Returns the current number of agents in the queue. """
return len(self._agents.keys())
@property
def agents(self):
return list(self._agents.values())
class RandomActivation(BaseScheduler):
""" A scheduler which activates each agent once per step, in random order,
with the order reshuffled every step.
This is equivalent to the NetLogo 'ask agents...' and is generally the
default behavior for an ABM.
Assumes that all agents have a step(model) method.
"""
def step(self):
""" Executes the step of all agents, one at a time, in
random order.
"""
agent_keys = list(self._agents.keys())
random.shuffle(agent_keys)
for agent_key in agent_keys:
self._agents[agent_key].step()
self.steps += 1
self.time += 1
class SimultaneousActivation(BaseScheduler):
""" A scheduler to simulate the simultaneous activation of all the agents.
This scheduler requires that each agent have two methods: step and advance.
step() activates the agent and stages any necessary changes, but does not
apply them yet. advance() then applies the changes.
"""
def step(self):
""" Step all agents, then advance them. """
agent_keys = list(self._agents.keys())
for agent_key in agent_keys:
self._agents[agent_key].step()
for agent_key in agent_keys:
self._agents[agent_key].advance()
self.steps += 1
self.time += 1
class StagedActivation(BaseScheduler):
""" A scheduler which allows agent activation to be divided into several
stages instead of a single `step` method. All agents execute one stage
before moving on to the next.
Agents must have all the stage methods implemented. Stage methods take a
model object as their only argument.
This schedule tracks steps and time separately. Time advances in fractional
increments of 1 / (# of stages), meaning that 1 step = 1 unit of time.
"""
def __init__(self, model, stage_list=None, shuffle=False,
shuffle_between_stages=False):
""" Create an empty Staged Activation schedule.
Args:
model: Model object associated with the schedule.
stage_list: List of strings of names of stages to run, in the
order to run them in.
shuffle: If True, shuffle the order of agents each step.
shuffle_between_stages: If True, shuffle the agents after each
stage; otherwise, only shuffle at the start
of each step.
"""
super().__init__(model)
self.stage_list = ["step"] if not stage_list else stage_list
self.shuffle = shuffle
self.shuffle_between_stages = shuffle_between_stages
self.stage_time = 1 / len(self.stage_list)
def step(self):
""" Executes all the stages for all agents. """
agent_keys = list(self._agents.keys())
if self.shuffle:
random.shuffle(agent_keys)
for stage in self.stage_list:
for agent_key in agent_keys:
getattr(self._agents[agent_key], stage)() # Run stage
if self.shuffle_between_stages:
random.shuffle(agent_keys)
self.time += self.stage_time
self.steps += 1
|
the-stack_0_316 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for doing coverage analysis on the RPC interface.
Provides a way to track which RPC commands are exercised during
testing.
"""
import os
REFERENCE_FILENAME = 'rpc_interface.txt'
class AuthServiceProxyWrapper(object):
"""
An object that wraps AuthServiceProxy to record specific RPC calls.
"""
def __init__(self, auth_service_proxy_instance, coverage_logfile=None):
"""
Kwargs:
auth_service_proxy_instance (AuthServiceProxy): the instance
being wrapped.
coverage_logfile (str): if specified, write each service_name
out to a file when called.
"""
self.auth_service_proxy_instance = auth_service_proxy_instance
self.coverage_logfile = coverage_logfile
def __getattr__(self, *args, **kwargs):
return_val = self.auth_service_proxy_instance.__getattr__(
*args, **kwargs)
return AuthServiceProxyWrapper(return_val, self.coverage_logfile)
def __call__(self, *args, **kwargs):
"""
Delegates to AuthServiceProxy, then writes the particular RPC method
called to a file.
"""
return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs)
rpc_method = self.auth_service_proxy_instance._service_name
if self.coverage_logfile:
with open(self.coverage_logfile, 'a+', encoding='utf8') as f:
f.write("%s\n" % rpc_method)
return return_val
@property
def url(self):
return self.auth_service_proxy_instance.url
def __truediv__(self, relative_uri):
return AuthServiceProxyWrapper(self.auth_service_proxy_instance / relative_uri)
def get_filename(dirname, n_node):
"""
Get a filename unique to the test process ID and node.
This file will contain a list of RPC commands covered.
"""
pid = str(os.getpid())
return os.path.join(
dirname, "coverage.pid%s.node%s.txt" % (pid, str(n_node)))
def write_all_rpc_commands(dirname, node):
"""
Write out a list of all RPC functions available in `luk-cli` for
coverage comparison. This will only happen once per coverage
directory.
Args:
dirname (str): temporary test dir
node (AuthServiceProxy): client
Returns:
bool. if the RPC interface file was written.
"""
filename = os.path.join(dirname, REFERENCE_FILENAME)
if os.path.isfile(filename):
return False
help_output = node.help().split('\n')
commands = set()
for line in help_output:
line = line.strip()
# Ignore blanks and headers
if line and not line.startswith('='):
commands.add("%s\n" % line.split()[0])
with open(filename, 'w', encoding='utf8') as f:
f.writelines(list(commands))
return True
|
the-stack_0_317 | import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../libbeat/tests/system'))
from beat.beat import TestCase
class BaseTest(TestCase):
@classmethod
def setUpClass(self):
self.beat_name = "heartbeat"
self.beat_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../"))
|
the-stack_0_318 | # This module contains a synchronous implementation of a Channel Access client
# as three top-level functions: read, write, subscribe. They are comparatively
# simple and naive, with no caching or concurrency, and therefore less
# performant but more robust.
import getpass
import inspect
import logging
import selectors
import socket
import threading # just to make callback processing thread-safe
import time
import weakref
import caproto as ca
from .._dbr import ChannelType, SubscriptionType, field_types, native_type
from .._utils import (CaprotoError, CaprotoTimeoutError, ErrorResponseReceived,
adapt_old_callback_signature, get_environment_variables,
safe_getsockname)
from .repeater import spawn_repeater
__all__ = ('read', 'write', 'subscribe', 'block', 'interrupt',
'read_write_read')
logger = logging.getLogger('caproto.ctx')
# Make a dict to hold our tcp sockets.
sockets = {}
global_circuits = {}
_permission_to_block = [] # mutable state shared by block and interrupt
# Convenience functions that do both transport and caproto validation/ingest.
def send(circuit, command, pv_name=None):
if pv_name is not None:
tags = {'pv': pv_name}
else:
tags = None
buffers_to_send = circuit.send(command, extra=tags)
sockets[circuit].sendmsg(buffers_to_send)
def recv(circuit):
bytes_received = sockets[circuit].recv(4096)
commands, _ = circuit.recv(bytes_received)
for c in commands:
circuit.process_command(c)
return commands
def search(pv_name, udp_sock, timeout, *, max_retries=2):
# Set Broadcaster log level to match our logger.
b = ca.Broadcaster(our_role=ca.CLIENT)
b.client_address = safe_getsockname(udp_sock)
# Send registration request to the repeater
logger.debug('Registering with the Channel Access repeater.')
bytes_to_send = b.send(ca.RepeaterRegisterRequest())
env = get_environment_variables()
repeater_port = env['EPICS_CA_REPEATER_PORT']
client_address_list = ca.get_client_address_list()
local_address = ca.get_local_address()
try:
udp_sock.sendto(bytes_to_send, (local_address, repeater_port))
except OSError as exc:
raise ca.CaprotoNetworkError(
f"Failed to send to {local_address}:{repeater_port}") from exc
logger.debug("Searching for %r....", pv_name)
commands = (
ca.VersionRequest(0, ca.DEFAULT_PROTOCOL_VERSION),
ca.SearchRequest(pv_name, 0, ca.DEFAULT_PROTOCOL_VERSION))
bytes_to_send = b.send(*commands)
tags = {'role': 'CLIENT',
'our_address': b.client_address,
'direction': '--->>>'}
def send_search():
for dest in client_address_list:
tags['their_address'] = dest
b.log.debug(
'%d commands %dB',
len(commands), len(bytes_to_send), extra=tags)
try:
udp_sock.sendto(bytes_to_send, dest)
except OSError as exc:
host, port = dest
raise ca.CaprotoNetworkError(f"Failed to send to {host}:{port}") from exc
def check_timeout():
nonlocal retry_at
if time.monotonic() >= retry_at:
send_search()
retry_at = time.monotonic() + retry_timeout
if time.monotonic() - t > timeout:
raise CaprotoTimeoutError(f"Timed out while awaiting a response "
f"from the search for {pv_name!r}. Search "
f"requests were sent to this address list: "
f"{ca.get_address_list()}.")
# Initial search attempt
send_search()
# Await a search response, and keep track of registration status
retry_timeout = timeout / max((max_retries, 1))
t = time.monotonic()
retry_at = t + retry_timeout
try:
orig_timeout = udp_sock.gettimeout()
udp_sock.settimeout(retry_timeout)
while True:
try:
bytes_received, address = udp_sock.recvfrom(ca.MAX_UDP_RECV)
except socket.timeout:
check_timeout()
continue
check_timeout()
commands = b.recv(bytes_received, address)
b.process_commands(commands)
for command in commands:
if isinstance(command, ca.SearchResponse) and command.cid == 0:
address = ca.extract_address(command)
logger.debug('Found %r at %s:%d', pv_name, *address)
return address
else:
# None of the commands we have seen are a reply to our request.
# Receive more data.
continue
finally:
udp_sock.settimeout(orig_timeout)
def make_channel(pv_name, udp_sock, priority, timeout):
log = logging.LoggerAdapter(logging.getLogger('caproto.ch'), {'pv': pv_name})
address = search(pv_name, udp_sock, timeout)
try:
circuit = global_circuits[(address, priority)]
except KeyError:
circuit = global_circuits[(address, priority)] = ca.VirtualCircuit(
our_role=ca.CLIENT,
address=address,
priority=priority)
chan = ca.ClientChannel(pv_name, circuit)
new = False
if chan.circuit not in sockets:
new = True
sockets[chan.circuit] = socket.create_connection(chan.circuit.address,
timeout)
circuit.our_address = sockets[chan.circuit].getsockname()
try:
if new:
# Initialize our new TCP-based CA connection with a VersionRequest.
send(chan.circuit, ca.VersionRequest(
priority=priority,
version=ca.DEFAULT_PROTOCOL_VERSION),
pv_name)
send(chan.circuit, chan.host_name(socket.gethostname()))
send(chan.circuit, chan.client_name(getpass.getuser()))
send(chan.circuit, chan.create(), pv_name)
t = time.monotonic()
while True:
try:
commands = recv(chan.circuit)
if time.monotonic() - t > timeout:
raise socket.timeout
except socket.timeout:
raise CaprotoTimeoutError("Timeout while awaiting channel "
"creation.")
tags = {'direction': '<<<---',
'our_address': chan.circuit.our_address,
'their_address': chan.circuit.address}
for command in commands:
if isinstance(command, ca.Message):
tags['bytesize'] = len(command)
logger.debug("%r", command, extra=tags)
elif command is ca.DISCONNECTED:
raise CaprotoError('Disconnected during initialization')
if chan.states[ca.CLIENT] is ca.CONNECTED:
log.info("Channel connected.")
break
except BaseException:
sockets[chan.circuit].close()
del sockets[chan.circuit]
del global_circuits[(chan.circuit.address, chan.circuit.priority)]
raise
return chan
def _read(chan, timeout, data_type, data_count, notify, force_int_enums):
logger = chan.log
logger.debug("Detected native data_type %r.", chan.native_data_type)
ntype = native_type(chan.native_data_type) # abundance of caution
if ((ntype is ChannelType.ENUM) and
(data_type is None) and (not force_int_enums)):
logger.debug("Changing requested data_type to STRING.")
data_type = ChannelType.STRING
req = chan.read(data_type=data_type, data_count=data_count, notify=notify)
send(chan.circuit, req, chan.name)
t = time.monotonic()
while True:
try:
commands = recv(chan.circuit)
except socket.timeout:
commands = []
if time.monotonic() - t > timeout:
raise CaprotoTimeoutError("Timeout while awaiting reading.")
tags = {'direction': '<<<---',
'our_address': chan.circuit.our_address,
'their_address': chan.circuit.address}
for command in commands:
if isinstance(command, ca.Message):
tags['bytesize'] = len(command)
logger.debug("%r", command, extra=tags)
if (isinstance(command, (ca.ReadResponse, ca.ReadNotifyResponse)) and
command.ioid == req.ioid):
return command
elif isinstance(command, ca.ErrorResponse):
raise ErrorResponseReceived(command)
elif command is ca.DISCONNECTED:
raise CaprotoError('Disconnected while waiting for '
'read response')
def read(pv_name, *, data_type=None, data_count=None, timeout=1, priority=0,
notify=True, force_int_enums=False, repeater=True):
"""
Read a Channel.
Parameters
----------
pv_name : str
The PV name to read from
data_type : {'native', 'status', 'time', 'graphic', 'control'} or ChannelType or int ID, optional
Request specific data type or a class of data types, matched to the
channel's native data type. Default is Channel's native data type.
data_count : integer, optional
Requested number of values. Default is the channel's native data
count.
timeout : float, optional
Default is 1 second.
priority : 0, optional
Virtual Circuit priority. Default is 0, lowest. Highest is 99.
notify : boolean, optional
Send a ReadNotifyRequest instead of a ReadRequest. True by default.
force_int_enums : boolean, optional
Retrieve enums as integers. (Default is strings.)
repeater : boolean, optional
Spawn a Channel Access Repeater process if the port is available.
True default, as the Channel Access spec stipulates that well-behaved
clients should do this.
Returns
-------
response : ReadResponse or ReadNotifyResponse
Examples
--------
Get the value of a Channel named 'simple:A'.
>>> read('simple:A').data
array([1], dtype=int32)
Request a richer Channel Access data type that includes the timestamp, and
access the timestamp.
>>> read('cat', data_type='time').metadata.timestmap
1570622339.042392
A convenience method is provided for access the timestamp as a Python
datetime object.
>>> read('cat' data_type='time').metadata.stamp.as_datetime()
datetime.datetime(2019, 10, 9, 11, 58, 59, 42392)
The requested data type may also been given as a specific Channel Access
type
>>> from caproto import ChannelType
>>> read('cat', data_type=ChannelType.CTRL_FLOAT).metadata
DBR_CTRL_FLOAT(
status=<AlarmStatus.NO_ALARM: 0>,
severity=<AlarmSeverity.NO_ALARM: 0>,
upper_disp_limit=0.0,
lower_disp_limit=0.0,
upper_alarm_limit=0.0,
upper_warning_limit=0.0,
lower_warning_limit=0.0,
lower_alarm_limit=0.0,
upper_ctrl_limit=0.0,
lower_ctrl_limit=0.0,
precision=0,
units=b'')
or the corresponding integer identifer
>>> read('cat', data_type=30).metadata
DBR_CTRL_FLOAT(
status=<AlarmStatus.NO_ALARM: 0>,
severity=<AlarmSeverity.NO_ALARM: 0>,
upper_disp_limit=0.0,
lower_disp_limit=0.0,
upper_alarm_limit=0.0,
upper_warning_limit=0.0,
lower_warning_limit=0.0,
lower_alarm_limit=0.0,
upper_ctrl_limit=0.0,
lower_ctrl_limit=0.0,
precision=0,
units=b'')
"""
if repeater:
# As per the EPICS spec, a well-behaved client should start a
# caproto-repeater that will continue running after it exits.
spawn_repeater()
udp_sock = ca.bcast_socket()
# Must bind or getsocketname() will raise on Windows.
# See https://github.com/caproto/caproto/issues/514.
udp_sock.bind(('', 0))
try:
udp_sock.settimeout(timeout)
chan = make_channel(pv_name, udp_sock, priority, timeout)
finally:
udp_sock.close()
try:
return _read(chan, timeout, data_type=data_type, data_count=data_count,
notify=notify, force_int_enums=force_int_enums)
finally:
try:
if chan.states[ca.CLIENT] is ca.CONNECTED:
send(chan.circuit, chan.clear(), chan.name)
finally:
sockets[chan.circuit].close()
del sockets[chan.circuit]
del global_circuits[(chan.circuit.address, chan.circuit.priority)]
def subscribe(pv_name, priority=0, data_type=None, data_count=None,
low=0.0, high=0.0, to=0.0, mask=None):
"""
Define a subscription.
Parameters
----------
pv_name : string
The PV name to subscribe to
priority : integer, optional
Used by the server to triage subscription responses when under high
load. 0 is lowest; 99 is highest.
data_type : {'native', 'status', 'time', 'graphic', 'control'} or ChannelType or int ID, optional
Request specific data type or a class of data types, matched to the
channel's native data type. Default is Channel's native data type.
data_count : integer, optional
Requested number of values. Default is the channel's native data
count, which can be checked in the Channel's attribute
:attr:`native_data_count`.
low, high, to : float, optional
deprecated by Channel Access, not yet implemented by caproto
mask : SubscriptionType, optional
Subscribe to selective updates.
Examples
--------
Define a subscription on the ``random_walk:x`` PV.
>>> sub = subscribe('random_walk:x')
Add one or more user-defined callbacks to process responses.
>>> def f(sub, response):
... print(repsonse.data)
...
>>> sub.add_callback(f)
Activate the subscription and process incoming responses.
>>> sub.block()
This is a blocking operation in the sync client. (To do this on a
background thread, use the threading client.) Interrupt using Ctrl+C or
by calling :meth:`sub.interrupt()` from another thread.
The subscription may be reactivated by calling ``sub.block()`` again.
To process multiple subscriptions at once, use the *function*
:func:`block`, which takes one or more Subscriptions as arguments.
>>> block(sub1, sub2)
There is also an :func:`interrupt` function, which is merely an alias to
the method.
"""
return Subscription(pv_name, priority, data_type, data_count, low, high,
to, mask)
def interrupt():
"""
Signal to :func:`block` to stop blocking. Idempotent.
This obviously cannot be called interactively while blocked;
it is intended to be called from another thread.
"""
_permission_to_block.clear()
def block(*subscriptions, duration=None, timeout=1, force_int_enums=False,
repeater=True):
"""
Activate one or more subscriptions and process incoming responses.
Use Ctrl+C (SIGINT) to escape, or from another thread, call
:func:`interrupt()`.
Parameters
----------
*subscriptions : Subscriptions
The list of subscriptions.
duration : float, optional
How many seconds to run for. Run forever (None) by default.
timeout : float, optional
Default is 1 second. This is not the same as `for`; this is the timeout
for failure in the event of no connection.
force_int_enums : boolean, optional
Retrieve enums as integers. (Default is strings.)
repeater : boolean, optional
Spawn a Channel Access Repeater process if the port is available.
True default, as the Channel Access spec stipulates that well-behaved
clients should do this.
Examples
--------
Activate subscription(s) and block while they process updates.
>>> sub1 = subscribe('cat')
>>> sub1 = subscribe('dog')
>>> block(sub1, sub2)
"""
_permission_to_block.append(object())
if duration is not None:
deadline = time.time() + duration
else:
deadline = None
if repeater:
# As per the EPICS spec, a well-behaved client should start a
# caproto-repeater that will continue running after it exits.
spawn_repeater()
loggers = {}
for sub in subscriptions:
loggers[sub.pv_name] = logging.LoggerAdapter(logging.getLogger('caproto.ch'),
{'pv': sub.pv_name})
udp_sock = ca.bcast_socket()
# Must bind or getsocketname() will raise on Windows.
# See https://github.com/caproto/caproto/issues/514.
udp_sock.bind(('', 0))
try:
udp_sock.settimeout(timeout)
channels = {}
for sub in subscriptions:
pv_name = sub.pv_name
chan = make_channel(pv_name, udp_sock, sub.priority, timeout)
channels[sub] = chan
finally:
udp_sock.close()
try:
# Subscribe to all the channels.
sub_ids = {}
for sub, chan in channels.items():
loggers[chan.name].debug("Detected native data_type %r.",
chan.native_data_type)
# abundance of caution
ntype = field_types['native'][chan.native_data_type]
if ((ntype is ChannelType.ENUM) and (not force_int_enums)):
ntype = ChannelType.STRING
time_type = field_types['time'][ntype]
# Adjust the timeout during monitoring.
sockets[chan.circuit].settimeout(None)
loggers[chan.name].debug("Subscribing with data_type %r.",
time_type)
req = chan.subscribe(
data_type=time_type, data_count=sub.data_count, mask=sub.mask)
send(chan.circuit, req, chan.name)
sub_ids[(chan.circuit, req.subscriptionid)] = sub
logger.debug('Subscribed. Building socket selector.')
try:
circuits = set(chan.circuit for chan in channels.values())
selector = selectors.DefaultSelector()
sock_to_circuit = {}
for circuit in circuits:
sock = sockets[circuit]
sock_to_circuit[sock] = circuit
selector.register(sock, selectors.EVENT_READ)
if duration is None:
logger.debug('Continuing until SIGINT is received....')
while True:
events = selector.select(timeout=0.1)
if deadline is not None and time.time() > deadline:
logger.debug('Deadline reached.')
return
if not _permission_to_block:
logger.debug("Interrupted via "
"caproto.sync.client.interrupt().")
break
for selector_key, _ in events:
circuit = sock_to_circuit[selector_key.fileobj]
commands = recv(circuit)
for response in commands:
if isinstance(response, ca.ErrorResponse):
raise ErrorResponseReceived(response)
if response is ca.DISCONNECTED:
# TODO Re-connect.
raise CaprotoError("Disconnected")
sub = sub_ids.get((circuit, response.subscriptionid))
if sub:
sub.process(response)
except KeyboardInterrupt:
logger.debug('Received SIGINT. Closing.')
pass
finally:
_permission_to_block.clear()
try:
for chan in channels.values():
if chan.states[ca.CLIENT] is ca.CONNECTED:
send(chan.circuit, chan.clear(), chan.name)
finally:
# Reinstate the timeout for channel cleanup.
for chan in channels.values():
sockets[chan.circuit].settimeout(timeout)
sockets[chan.circuit].close()
del sockets[chan.circuit]
del global_circuits[(chan.circuit.address, chan.circuit.priority)]
def _write(chan, data, metadata, timeout, data_type, notify):
logger.debug("Detected native data_type %r.", chan.native_data_type)
# abundance of caution
ntype = field_types['native'][chan.native_data_type]
if (data_type is None) and (ntype is ChannelType.ENUM):
# Change data_type to STRING if data contains string-like data, or
# iterable of string-like data
stringy_data = False
if isinstance(data, (str, bytes)):
stringy_data = True
if hasattr(data, '__getitem__') \
and len(data) > 0 \
and isinstance(data[0], (str, bytes)):
stringy_data = True
if stringy_data:
logger.debug("Will write to ENUM as data_type STRING.")
data_type = ChannelType.STRING
logger.debug("Writing.")
req = chan.write(data=data, notify=notify,
data_type=data_type, metadata=metadata)
send(chan.circuit, req, chan.name)
t = time.monotonic()
if notify:
while True:
try:
commands = recv(chan.circuit)
except socket.timeout:
commands = []
if time.monotonic() - t > timeout:
raise CaprotoTimeoutError("Timeout while awaiting write reply.")
tags = {'direction': '<<<---',
'our_address': chan.circuit.our_address,
'their_address': chan.circuit.address}
for command in commands:
if isinstance(command, ca.Message):
tags['bytesize'] = len(command)
logger.debug("%r", command, extra=tags)
if (isinstance(command, ca.WriteNotifyResponse) and
command.ioid == req.ioid):
response = command
break
elif isinstance(command, ca.ErrorResponse):
raise ErrorResponseReceived(command)
elif command is ca.DISCONNECTED:
raise CaprotoError('Disconnected while waiting for '
'write response')
else:
continue
break
return response
else:
return None
def write(pv_name, data, *, notify=False, data_type=None, metadata=None,
timeout=1, priority=0,
repeater=True):
"""
Write to a Channel.
Parameters
----------
pv_name : str
The PV name to write to
data : str, bytes, int, or float or any Iterable of these
Value(s) to write.
notify : boolean, optional
Request notification of completion and wait for it. False by default.
data_type : {'native', 'status', 'time', 'graphic', 'control'} or ChannelType or int ID, optional
Write as specific data type. Default is inferred from input.
metadata : ``ctypes.BigEndianStructure`` or tuple
Status and control metadata for the values
timeout : float, optional
Default is 1 second.
priority : 0, optional
Virtual Circuit priority. Default is 0, lowest. Highest is 99.
repeater : boolean, optional
Spawn a Channel Access Repeater process if the port is available.
True default, as the Channel Access spec stipulates that well-behaved
clients should do this.
Returns
-------
initial, final : tuple of ReadNotifyResponse objects
Examples
--------
Write the value 5 to a Channel named 'simple:A'.
>>> write('simple:A', 5) # returns None
Request notification of completion ("put completion") and wait for it.
>>> write('cat', 5, notify=True) # blocks until complete, then returns:
WriteNotifyResponse(
data_type=<ChannelType.LONG: 5>,
data_count=1,
status=CAStatusCode(
name='ECA_NORMAL', code=0, code_with_severity=1,
severity=<CASeverity.SUCCESS: 1>,
success=1, defunct=False,
description='Normal successful completion'),
ioid=0)
"""
if repeater:
# As per the EPICS spec, a well-behaved client should start a
# caproto-repeater that will continue running after it exits.
spawn_repeater()
udp_sock = ca.bcast_socket()
# Must bind or getsocketname() will raise on Windows.
# See https://github.com/caproto/caproto/issues/514.
udp_sock.bind(('', 0))
try:
udp_sock.settimeout(timeout)
chan = make_channel(pv_name, udp_sock, priority, timeout)
finally:
udp_sock.close()
try:
return _write(chan, data, metadata, timeout, data_type, notify)
finally:
try:
if chan.states[ca.CLIENT] is ca.CONNECTED:
send(chan.circuit, chan.clear(), chan.name)
finally:
sockets[chan.circuit].close()
del sockets[chan.circuit]
del global_circuits[(chan.circuit.address, chan.circuit.priority)]
def read_write_read(pv_name, data, *, notify=False,
read_data_type=None, write_data_type=None,
metadata=None, timeout=1, priority=0,
force_int_enums=False, repeater=True):
"""
Write to a Channel, but sandwich the write between to reads.
This is what the command-line utilities ``caproto-put`` and ``caput`` do.
Notice that if you want the second reading to reflect the written value,
you should pass the parameter ``notify=True``. (This is also true of
``caproto-put``/``caput``, which needs the ``-c`` argument to behave the
way you might expect it to behave.)
This is provided as a separate function in order to support ``caproto-put``
efficiently. Making separate calls to :func:`read` and :func:`write` would
re-create a connection redundantly.
Parameters
----------
pv_name : str
The PV name to write/read/write
data : str, bytes, int, or float or any Iterable of these
Value to write.
notify : boolean, optional
Request notification of completion and wait for it. False by default.
read_data_type : {'native', 'status', 'time', 'graphic', 'control'} or ChannelType or int ID, optional
Request specific data type.
write_data_type : {'native', 'status', 'time', 'graphic', 'control'} or ChannelType or int ID, optional
Write as specific data type. Default is inferred from input.
metadata : ``ctypes.BigEndianStructure`` or tuple
Status and control metadata for the values
timeout : float, optional
Default is 1 second.
priority : 0, optional
Virtual Circuit priority. Default is 0, lowest. Highest is 99.
force_int_enums : boolean, optional
Retrieve enums as integers. (Default is strings.)
repeater : boolean, optional
Spawn a Channel Access Repeater process if the port is available.
True default, as the Channel Access spec stipulates that well-behaved
clients should do this.
Returns
-------
initial, write_response, final : tuple of response
The middle response comes from the write, and it will be ``None`` unless
``notify=True``.
Examples
--------
Write the value 5 to a Channel named 'simple:A'.
>>> read_write_read('cat', 5) # returns initial, None, final
Request notification of completion ("put completion") and wait for it.
>>> read_write_read('cat', 5, notify=True) # initial, WriteNotifyResponse, final
"""
if repeater:
# As per the EPICS spec, a well-behaved client should start a
# caproto-repeater that will continue running after it exits.
spawn_repeater()
udp_sock = ca.bcast_socket()
# Must bind or getsocketname() will raise on Windows.
# See https://github.com/caproto/caproto/issues/514.
udp_sock.bind(('', 0))
try:
udp_sock.settimeout(timeout)
chan = make_channel(pv_name, udp_sock, priority, timeout)
finally:
udp_sock.close()
try:
initial = _read(chan, timeout, read_data_type, None, notify=True,
force_int_enums=force_int_enums)
res = _write(chan, data, metadata, timeout, write_data_type, notify)
final = _read(chan, timeout, read_data_type, None, notify=True,
force_int_enums=force_int_enums)
finally:
try:
if chan.states[ca.CLIENT] is ca.CONNECTED:
send(chan.circuit, chan.clear(), chan.name)
finally:
sockets[chan.circuit].close()
del sockets[chan.circuit]
del global_circuits[(chan.circuit.address, chan.circuit.priority)]
return initial, res, final
class Subscription:
"""
This object encapsulates state related to a Subscription.
See the :func:`subscribe` function.
"""
def __init__(self, pv_name, priority=0, data_type=None, data_count=None,
low=0.0, high=0.0, to=0.0, mask=None):
if mask is None:
mask = SubscriptionType.DBE_VALUE | SubscriptionType.DBE_ALARM
self.pv_name = pv_name
self.priority = priority
self.data_type = data_type
self.data_count = data_count
self.low = low
self.high = high
self.to = to
self.mask = mask
self.callbacks = {}
self._callback_id = 0
self._callback_lock = threading.RLock()
# This is related to back-compat for user callbacks that have the old
# signature, f(response).
self.__wrapper_weakrefs = set()
def block(self, duration=None, timeout=1,
force_int_enums=False,
repeater=True):
"""
Activate one or more subscriptions and process incoming responses.
Use Ctrl+C (SIGINT) to escape, or from another thread, call
:meth:`interrupt()`.
Convenience alias for the top-level function :func:`block`, which may
be used to process multiple Subscriptions concurrently.
Parameters
----------
duration : float, optional
How many seconds to run for. Run forever (None) by default.
timeout : float, optional
Default is 1 second. This is not the same as `for`; this is the
timeout for failure in the event of no connection.
force_int_enums : boolean, optional
Retrieve enums as integers. (Default is strings.)
repeater : boolean, optional
Spawn a Channel Access Repeater process if the port is available.
True default, as the Channel Access spec stipulates that
well-behaved clients should do this.
"""
block(self, duration=duration, timeout=timeout,
force_int_enums=force_int_enums,
repeater=repeater)
def interrupt(self):
"""
Signal to block() to stop blocking. Idempotent.
This obviously cannot be called interactively while blocked;
it is intended to be called from another thread.
This method is a convenience alias for the top-level function
:func:`interrupt`.
"""
interrupt()
def add_callback(self, func):
"""
Add a callback to receive responses.
Parameters
----------
func : callable
Expected signature: ``func(sub, response)``.
The signature ``func(response)`` is also supported for
backward-compatibility but will issue warnings. Support will be
removed in a future release of caproto.
Returns
-------
token : int
Integer token that can be passed to :meth:`remove_callback`.
.. versionchanged:: 0.5.0
Changed the expected signature of ``func`` from ``func(response)``
to ``func(sub, response)``.
"""
func = adapt_old_callback_signature(func, self.__wrapper_weakrefs)
def removed(_):
self.remove_callback(cb_id)
if inspect.ismethod(func):
ref = weakref.WeakMethod(func, removed)
else:
# TODO: strong reference to non-instance methods?
ref = weakref.ref(func, removed)
with self._callback_lock:
cb_id = self._callback_id
self._callback_id += 1
self.callbacks[cb_id] = ref
return cb_id
def remove_callback(self, cb_id):
"""
Remove callback using token that was returned by :meth:`add_callback`.
"""
with self._callback_lock:
self.callbacks.pop(cb_id, None)
def process(self, response):
"""
Run the callbacks on a response.
This is used internally by :func:`block()`, generally not called by the
user.
"""
to_remove = []
with self._callback_lock:
callbacks = list(self.callbacks.items())
for cb_id, ref in callbacks:
callback = ref()
if callback is None:
to_remove.append(cb_id)
continue
callback(self, response)
with self._callback_lock:
for remove_id in to_remove:
self.callbacks.pop(remove_id, None)
def clear(self):
"""
Remove all callbacks. If currently blocking, interrupt.
"""
interrupt()
with self._callback_lock:
for cb_id in list(self.callbacks):
self.remove_callback(cb_id)
|
the-stack_0_319 | import logging
import os
from django.core.files.base import ContentFile
from django.utils.timezone import now
from django.utils.translation import ugettext as _
from django_scopes import scopes_disabled
from pretix.base.i18n import language
from pretix.base.models import (
CachedCombinedTicket, CachedTicket, Event, InvoiceAddress, Order,
OrderPosition,
)
from pretix.base.services.tasks import EventTask, ProfiledTask
from pretix.base.settings import PERSON_NAME_SCHEMES
from pretix.base.signals import allow_ticket_download, register_ticket_outputs
from pretix.celery_app import app
from pretix.helpers.database import rolledback_transaction
logger = logging.getLogger(__name__)
def generate_orderposition(order_position: int, provider: str):
order_position = OrderPosition.objects.select_related('order', 'order__event').get(id=order_position)
with language(order_position.order.locale):
responses = register_ticket_outputs.send(order_position.order.event)
for receiver, response in responses:
prov = response(order_position.order.event)
if prov.identifier == provider:
filename, ttype, data = prov.generate(order_position)
path, ext = os.path.splitext(filename)
for ct in CachedTicket.objects.filter(order_position=order_position, provider=provider):
ct.delete()
ct = CachedTicket.objects.create(order_position=order_position, provider=provider,
extension=ext, type=ttype, file=None)
ct.file.save(filename, ContentFile(data))
return ct.pk
def generate_order(order: int, provider: str):
order = Order.objects.select_related('event').get(id=order)
with language(order.locale):
responses = register_ticket_outputs.send(order.event)
for receiver, response in responses:
prov = response(order.event)
if prov.identifier == provider:
filename, ttype, data = prov.generate_order(order)
if ttype == 'text/uri-list':
continue
path, ext = os.path.splitext(filename)
for ct in CachedCombinedTicket.objects.filter(order=order, provider=provider):
ct.delete()
ct = CachedCombinedTicket.objects.create(order=order, provider=provider, extension=ext,
type=ttype, file=None)
ct.file.save(filename, ContentFile(data))
return ct.pk
@app.task(base=ProfiledTask)
def generate(model: str, pk: int, provider: str):
with scopes_disabled():
if model == 'order':
return generate_order(pk, provider)
elif model == 'orderposition':
return generate_orderposition(pk, provider)
class DummyRollbackException(Exception):
pass
def preview(event: int, provider: str):
event = Event.objects.get(id=event)
with rolledback_transaction(), language(event.settings.locale):
item = event.items.create(name=_("Sample product"), default_price=42.23,
description=_("Sample product description"))
item2 = event.items.create(name=_("Sample workshop"), default_price=23.40)
from pretix.base.models import Order
order = event.orders.create(status=Order.STATUS_PENDING, datetime=now(),
email='[email protected]',
locale=event.settings.locale,
expires=now(), code="PREVIEW1234", total=119)
scheme = PERSON_NAME_SCHEMES[event.settings.name_scheme]
sample = {k: str(v) for k, v in scheme['sample'].items()}
p = order.positions.create(item=item, attendee_name_parts=sample, price=item.default_price)
s = event.subevents.first()
order.positions.create(item=item2, attendee_name_parts=sample, price=item.default_price, addon_to=p, subevent=s)
order.positions.create(item=item2, attendee_name_parts=sample, price=item.default_price, addon_to=p, subevent=s)
InvoiceAddress.objects.create(order=order, name_parts=sample, company=_("Sample company"))
responses = register_ticket_outputs.send(event)
for receiver, response in responses:
prov = response(event)
if prov.identifier == provider:
return prov.generate(p)
def get_tickets_for_order(order, base_position=None):
can_download = all([r for rr, r in allow_ticket_download.send(order.event, order=order)])
if not can_download:
return []
if not order.ticket_download_available:
return []
providers = [
response(order.event)
for receiver, response
in register_ticket_outputs.send(order.event)
]
tickets = []
positions = list(order.positions_with_tickets)
if base_position:
# Only the given position and its children
positions = [
p for p in positions if p.pk == base_position.pk or p.addon_to_id == base_position.pk
]
for p in providers:
if not p.is_enabled:
continue
if p.multi_download_enabled and not base_position:
try:
if len(positions) == 0:
continue
ct = CachedCombinedTicket.objects.filter(
order=order, provider=p.identifier, file__isnull=False
).last()
if not ct or not ct.file:
retval = generate_order(order.pk, p.identifier)
if not retval:
continue
ct = CachedCombinedTicket.objects.get(pk=retval)
tickets.append((
"{}-{}-{}{}".format(
order.event.slug.upper(), order.code, ct.provider, ct.extension,
),
ct
))
except:
logger.exception('Failed to generate ticket.')
else:
for pos in positions:
try:
ct = CachedTicket.objects.filter(
order_position=pos, provider=p.identifier, file__isnull=False
).last()
if not ct or not ct.file:
retval = generate_orderposition(pos.pk, p.identifier)
if not retval:
continue
ct = CachedTicket.objects.get(pk=retval)
if ct.type == 'text/uri-list':
continue
tickets.append((
"{}-{}-{}-{}{}".format(
order.event.slug.upper(), order.code, pos.positionid, ct.provider, ct.extension,
),
ct
))
except:
logger.exception('Failed to generate ticket.')
return tickets
@app.task(base=EventTask)
def invalidate_cache(event: Event, item: int=None, provider: str=None, order: int=None, **kwargs):
qs = CachedTicket.objects.filter(order_position__order__event=event)
qsc = CachedCombinedTicket.objects.filter(order__event=event)
if item:
qs = qs.filter(order_position__item_id=item)
if provider:
qs = qs.filter(provider=provider)
qsc = qsc.filter(provider=provider)
if order:
qs = qs.filter(order_position__order_id=order)
qsc = qsc.filter(order_id=order)
for ct in qs:
ct.delete()
for ct in qsc:
ct.delete()
|
the-stack_0_322 | # --------------------------------------------------------
# Tensorflow Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Jiasen Lu, Jianwei Yang, based on code from Ross Girshick
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import _init_paths
import os
import sys
import numpy as np
import argparse
import pprint
import pdb
import time
import cv2
import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.optim as optim
import pickle
from roi_data_layer.roidb import combined_roidb
from roi_data_layer.roibatchLoader import roibatchLoader
from model.utils.config import cfg, cfg_from_file, cfg_from_list, get_output_dir
from model.rpn.bbox_transform import clip_boxes
from model.nms.nms_wrapper import nms
from model.rpn.bbox_transform import bbox_transform_inv
from model.utils.net_utils import save_net, load_net, vis_detections
from model.faster_rcnn.vgg16 import vgg16
from model.faster_rcnn.resnet import resnet
import pdb
try:
xrange # Python 2
except NameError:
xrange = range # Python 3
def parse_args():
"""
Parse input arguments
"""
parser = argparse.ArgumentParser(description='Train a Fast R-CNN network')
parser.add_argument('--dataset', dest='dataset',
help='training dataset',
default='pascal_voc', type=str)
parser.add_argument('--cfg', dest='cfg_file',
help='optional config file',
default='cfgs/vgg16.yml', type=str)
parser.add_argument('--net', dest='net',
help='vgg16, res50, res101, res152',
default='res101', type=str)
parser.add_argument('--set', dest='set_cfgs',
help='set config keys', default=None,
nargs=argparse.REMAINDER)
parser.add_argument('--load_dir', dest='load_dir',
help='directory to load models', default="/srv/share/jyang375/models",
type=str)
parser.add_argument('--cuda', dest='cuda',
help='whether use CUDA',
action='store_true')
parser.add_argument('--ls', dest='large_scale',
help='whether use large imag scale',
action='store_true')
parser.add_argument('--mGPUs', dest='mGPUs',
help='whether use multiple GPUs',
action='store_true')
parser.add_argument('--cag', dest='class_agnostic',
help='whether perform class_agnostic bbox regression',
action='store_true')
parser.add_argument('--parallel_type', dest='parallel_type',
help='which part of model to parallel, 0: all, 1: model before roi pooling',
default=0, type=int)
parser.add_argument('--checksession', dest='checksession',
help='checksession to load model',
default=1, type=int)
parser.add_argument('--checkepoch', dest='checkepoch',
help='checkepoch to load network',
default=1, type=int)
parser.add_argument('--checkpoint', dest='checkpoint',
help='checkpoint to load network',
default=10021, type=int)
parser.add_argument('--vis', dest='vis',
help='visualization mode',
action='store_true')
args = parser.parse_args()
return args
lr = cfg.TRAIN.LEARNING_RATE
momentum = cfg.TRAIN.MOMENTUM
weight_decay = cfg.TRAIN.WEIGHT_DECAY
if __name__ == '__main__':
args = parse_args()
print('Called with args:')
print(args)
if torch.cuda.is_available() and not args.cuda:
print("WARNING: You have a CUDA device, so you should probably run with --cuda")
np.random.seed(cfg.RNG_SEED)
if args.dataset == "pascal_voc":
args.imdb_name = "voc_2007_trainval"
args.imdbval_name = "voc_2007_test"
args.set_cfgs = ['ANCHOR_SCALES', '[8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
elif args.dataset == "pascal_voc_0712":
args.imdb_name = "voc_2007_trainval+voc_2012_trainval"
args.imdbval_name = "voc_2007_test"
args.set_cfgs = ['ANCHOR_SCALES', '[8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
elif args.dataset == "coco":
args.imdb_name = "coco_2014_train+coco_2014_valminusminival"
args.imdbval_name = "coco_2014_minival"
args.set_cfgs = ['ANCHOR_SCALES', '[4, 8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
elif args.dataset == "imagenet":
args.imdb_name = "imagenet_train"
args.imdbval_name = "imagenet_val"
args.set_cfgs = ['ANCHOR_SCALES', '[8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
elif args.dataset == "imagenet_vid":
args.imdb_name = "imagenet_vid_train+imagenet_det_train"
args.imdbval_name = "imagenet_vid_val"
args.set_cfgs = ['ANCHOR_SCALES', '[4, 8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
elif args.dataset == "vg":
args.imdb_name = "vg_150-50-50_minitrain"
args.imdbval_name = "vg_150-50-50_minival"
args.set_cfgs = ['ANCHOR_SCALES', '[4, 8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
args.cfg_file = "cfgs/{}_ls.yml".format(args.net) if args.large_scale else "cfgs/{}.yml".format(args.net)
if args.cfg_file is not None:
cfg_from_file(args.cfg_file)
if args.set_cfgs is not None:
cfg_from_list(args.set_cfgs)
print('Using config:')
pprint.pprint(cfg)
cfg.TRAIN.USE_FLIPPED = False
imdb, roidb, ratio_list, ratio_index = combined_roidb(args.imdbval_name, False)
imdb.competition_mode(on=True)
print('{:d} roidb entries'.format(len(roidb)))
input_dir = args.load_dir + "/" + args.net + "/" + args.dataset
if not os.path.exists(input_dir):
raise Exception('There is no input directory for loading network from ' + input_dir)
load_name = os.path.join(input_dir,
'faster_rcnn_{}_{}_{}.pth'.format(args.checksession, args.checkepoch, args.checkpoint))
# initilize the network here.
if args.net == 'vgg16':
fasterRCNN = vgg16(imdb.classes, pretrained=False, class_agnostic=args.class_agnostic)
elif args.net == 'res101':
fasterRCNN = resnet(imdb.classes, 101, pretrained=False, class_agnostic=args.class_agnostic)
elif args.net == 'res50':
fasterRCNN = resnet(imdb.classes, 50, pretrained=False, class_agnostic=args.class_agnostic)
elif args.net == 'res152':
fasterRCNN = resnet(imdb.classes, 152, pretrained=False, class_agnostic=args.class_agnostic)
else:
print("network is not defined")
pdb.set_trace()
fasterRCNN.create_architecture()
print("load checkpoint %s" % (load_name))
checkpoint = torch.load(load_name)
fasterRCNN.load_state_dict(checkpoint['model'])
if 'pooling_mode' in checkpoint.keys():
cfg.POOLING_MODE = checkpoint['pooling_mode']
print('load model successfully!')
# initilize the tensor holder here.
im_data = torch.FloatTensor(1)
im_info = torch.FloatTensor(1)
num_boxes = torch.LongTensor(1)
gt_boxes = torch.FloatTensor(1)
# ship to cuda
if args.cuda:
im_data = im_data.cuda()
im_info = im_info.cuda()
num_boxes = num_boxes.cuda()
gt_boxes = gt_boxes.cuda()
# make variable
im_data = Variable(im_data, volatile=True)
im_info = Variable(im_info, volatile=True)
num_boxes = Variable(num_boxes, volatile=True)
gt_boxes = Variable(gt_boxes, volatile=True)
if args.cuda:
cfg.CUDA = True
if args.cuda:
fasterRCNN.cuda()
start = time.time()
max_per_image = 100
vis = args.vis
if vis:
thresh = 0.05
else:
thresh = 0.0
save_name = 'faster_rcnn_10'
num_images = len(imdb.image_index)
all_boxes = [[[] for _ in xrange(num_images)]
for _ in xrange(imdb.num_classes)]
output_dir = get_output_dir(imdb, save_name)
dataset = roibatchLoader(roidb, ratio_list, ratio_index, 1, \
imdb.num_classes, training=False, normalize = False)
dataloader = torch.utils.data.DataLoader(dataset, batch_size=1,
shuffle=False, num_workers=0,
pin_memory=True)
data_iter = iter(dataloader)
_t = {'im_detect': time.time(), 'misc': time.time()}
det_file = os.path.join(output_dir, 'detections.pkl')
fasterRCNN.eval()
empty_array = np.transpose(np.array([[],[],[],[],[]]), (1,0))
for i in range(num_images):
data = next(data_iter)
im_data.data.resize_(data[0].size()).copy_(data[0])
im_info.data.resize_(data[1].size()).copy_(data[1])
gt_boxes.data.resize_(data[2].size()).copy_(data[2])
num_boxes.data.resize_(data[3].size()).copy_(data[3])
det_tic = time.time()
rois, cls_prob, bbox_pred, \
rpn_loss_cls, rpn_loss_box, \
RCNN_loss_cls, RCNN_loss_bbox, \
rois_label = fasterRCNN(im_data, im_info, gt_boxes, num_boxes)
scores = cls_prob.data
boxes = rois.data[:, :, 1:5]
if cfg.TEST.BBOX_REG:
# Apply bounding-box regression deltas
box_deltas = bbox_pred.data
if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED:
# Optionally normalize targets by a precomputed mean and stdev
if args.class_agnostic:
box_deltas = box_deltas.view(-1, 4) * torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_STDS).cuda() \
+ torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_MEANS).cuda()
box_deltas = box_deltas.view(1, -1, 4)
else:
box_deltas = box_deltas.view(-1, 4) * torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_STDS).cuda() \
+ torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_MEANS).cuda()
box_deltas = box_deltas.view(1, -1, 4 * len(imdb.classes))
pred_boxes = bbox_transform_inv(boxes, box_deltas, 1)
pred_boxes = clip_boxes(pred_boxes, im_info.data, 1)
else:
# Simply repeat the boxes, once for each class
pred_boxes = np.tile(boxes, (1, scores.shape[1]))
pred_boxes /= data[1][0][2]
scores = scores.squeeze()
pred_boxes = pred_boxes.squeeze()
det_toc = time.time()
detect_time = det_toc - det_tic
misc_tic = time.time()
if vis:
im = cv2.imread(imdb.image_path_at(i))
im2show = np.copy(im)
for j in xrange(1, imdb.num_classes):
inds = torch.nonzero(scores[:,j]>thresh).view(-1)
# if there is det
if inds.numel() > 0:
cls_scores = scores[:,j][inds]
_, order = torch.sort(cls_scores, 0, True)
if args.class_agnostic:
cls_boxes = pred_boxes[inds, :]
else:
cls_boxes = pred_boxes[inds][:, j * 4:(j + 1) * 4]
cls_dets = torch.cat((cls_boxes, cls_scores.unsqueeze(1)), 1)
# cls_dets = torch.cat((cls_boxes, cls_scores), 1)
cls_dets = cls_dets[order]
keep = nms(cls_dets, cfg.TEST.NMS)
cls_dets = cls_dets[keep.view(-1).long()]
if vis:
im2show = vis_detections(im2show, imdb.classes[j], cls_dets.cpu().numpy(), 0.3)
all_boxes[j][i] = cls_dets.cpu().numpy()
else:
all_boxes[j][i] = empty_array
# Limit to max_per_image detections *over all classes*
if max_per_image > 0:
image_scores = np.hstack([all_boxes[j][i][:, -1]
for j in xrange(1, imdb.num_classes)])
if len(image_scores) > max_per_image:
image_thresh = np.sort(image_scores)[-max_per_image]
for j in xrange(1, imdb.num_classes):
keep = np.where(all_boxes[j][i][:, -1] >= image_thresh)[0]
all_boxes[j][i] = all_boxes[j][i][keep, :]
misc_toc = time.time()
nms_time = misc_toc - misc_tic
sys.stdout.write('im_detect: {:d}/{:d} {:.3f}s {:.3f}s \r' \
.format(i + 1, num_images, detect_time, nms_time))
sys.stdout.flush()
if vis:
cv2.imwrite('result.png', im2show)
pdb.set_trace()
#cv2.imshow('test', im2show)
#cv2.waitKey(0)
with open(det_file, 'wb') as f:
pickle.dump(all_boxes, f, pickle.HIGHEST_PROTOCOL)
# print('Evaluating detections')
# imdb.evaluate_detections(all_boxes, output_dir)
end = time.time()
print("test time: %0.4fs" % (end - start))
|
the-stack_0_324 | import json
from .common import InfoExtractor
from .youtube import YoutubeIE
from ..compat import compat_b64decode
from ..utils import (
clean_html,
ExtractorError
)
class ChilloutzoneIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?chilloutzone\.net/video/(?P<id>[\w|-]+)\.html'
_TESTS = [{
'url': 'http://www.chilloutzone.net/video/enemene-meck-alle-katzen-weg.html',
'md5': 'a76f3457e813ea0037e5244f509e66d1',
'info_dict': {
'id': 'enemene-meck-alle-katzen-weg',
'ext': 'mp4',
'title': 'Enemene Meck - Alle Katzen weg',
'description': 'Ist das der Umkehrschluss des Niesenden Panda-Babys?',
},
}, {
'note': 'Video hosted at YouTube',
'url': 'http://www.chilloutzone.net/video/eine-sekunde-bevor.html',
'info_dict': {
'id': '1YVQaAgHyRU',
'ext': 'mp4',
'title': '16 Photos Taken 1 Second Before Disaster',
'description': 'md5:58a8fcf6a459fe0a08f54140f0ad1814',
'uploader': 'BuzzFeedVideo',
'uploader_id': 'BuzzFeedVideo',
'upload_date': '20131105',
},
}, {
'note': 'Video hosted at Vimeo',
'url': 'http://www.chilloutzone.net/video/icon-blending.html',
'md5': '2645c678b8dc4fefcc0e1b60db18dac1',
'info_dict': {
'id': '85523671',
'ext': 'mp4',
'title': 'The Sunday Times - Icons',
'description': 're:(?s)^Watch the making of - makingoficons.com.{300,}',
'uploader': 'Us',
'uploader_id': 'usfilms',
'upload_date': '20140131'
},
}]
def _real_extract(self, url):
mobj = self._match_valid_url(url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
base64_video_info = self._html_search_regex(
r'var cozVidData = "(.+?)";', webpage, 'video data')
decoded_video_info = compat_b64decode(base64_video_info).decode('utf-8')
video_info_dict = json.loads(decoded_video_info)
# get video information from dict
video_url = video_info_dict['mediaUrl']
description = clean_html(video_info_dict.get('description'))
title = video_info_dict['title']
native_platform = video_info_dict['nativePlatform']
native_video_id = video_info_dict['nativeVideoId']
source_priority = video_info_dict['sourcePriority']
# If nativePlatform is None a fallback mechanism is used (i.e. youtube embed)
if native_platform is None:
youtube_url = YoutubeIE._extract_url(webpage)
if youtube_url:
return self.url_result(youtube_url, ie=YoutubeIE.ie_key())
# Non Fallback: Decide to use native source (e.g. youtube or vimeo) or
# the own CDN
if source_priority == 'native':
if native_platform == 'youtube':
return self.url_result(native_video_id, ie='Youtube')
if native_platform == 'vimeo':
return self.url_result(
'http://vimeo.com/' + native_video_id, ie='Vimeo')
if not video_url:
raise ExtractorError('No video found')
return {
'id': video_id,
'url': video_url,
'ext': 'mp4',
'title': title,
'description': description,
}
|
the-stack_0_326 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Authors:
# - Wen Guan, [email protected], 2018
import json
import logging
import os
import socket
import sys
import time
from pilot.eventservice.communicationmanager.communicationmanager import CommunicationRequest, CommunicationResponse, CommunicationManager
from pilot.util.https import https_setup
from pilot.util.timing import time_stamp
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
logger = logging.getLogger(__name__)
https_setup(None, None)
def check_env():
"""
Function to check whether cvmfs is available.
To be used to decide whether to skip some test functions.
:returns True: if cvmfs is available. Otherwise False.
"""
return os.path.exists('/cvmfs/atlas.cern.ch/repo/')
class TestESCommunicationrRequestResponse(unittest.TestCase):
"""
Unit tests for event service communicator Request and Response.
"""
def test_communicator_request(self):
"""
Make sure that es message thread works as expected.
"""
req_attrs = {'request_type': CommunicationRequest.RequestType.RequestJobs,
'num_jobs': 1, 'post_hook': None, 'response': None}
req_job = CommunicationRequest(req_attrs)
self.assertEqual(req_job.request_type, CommunicationRequest.RequestType.RequestJobs)
req_attrs = {'request_type': CommunicationRequest.RequestType.RequestEvents,
'num_event_ranges': 1, 'post_hook': None, 'response': None}
req_events = CommunicationRequest(req_attrs)
self.assertEqual(req_events.request_type, CommunicationRequest.RequestType.RequestEvents)
req_attrs = {'request_type': CommunicationRequest.RequestType.UpdateEvents,
'output_files': None, 'post_hook': None, 'response': None}
req_output = CommunicationRequest(req_attrs)
self.assertEqual(req_output.request_type, CommunicationRequest.RequestType.UpdateEvents)
resp_attrs = {'status': 0, 'content': None, 'exception': None}
resp = CommunicationResponse(resp_attrs)
self.assertEqual(resp.status, 0)
class TestESCommunicationManagerPanda(unittest.TestCase):
"""
Unit tests for event service communicator manager.
"""
@unittest.skipIf(not check_env(), "No CVMFS")
def test_communicator_manager(self):
"""
Make sure that es communicator manager thread works as expected.
"""
communicator_manager = None
try:
args = {'workflow': 'eventservice_hpc',
'queue': 'BNL_CLOUD_MCORE',
'site': 'BNL_CLOUD_MCORE',
'port': 25443,
'url': 'https://aipanda007.cern.ch',
'job_label': 'ptest',
'pilot_user': 'ATLAS',
'node': socket.getfqdn(),
'mem': 16000,
'disk_space': 160000,
'working_group': '',
'cpu': 2601.0,
'info': None}
communicator_manager = CommunicationManager()
communicator_manager.start()
self.assertTrue(communicator_manager.is_alive())
jobs = communicator_manager.get_jobs(njobs=2, args=args)
self.assertEqual(len(jobs), 2)
jobs = communicator_manager.get_jobs(njobs=1, args=args)
self.assertEqual(len(jobs), 1)
job_list = []
for job in jobs:
job_data = {'node': socket.getfqdn(),
'pilotErrorCode': 0,
'startTime': time.time(),
'jobMetrics': 'coreCount=8',
'schedulerID': 'unknown',
'timestamp': time_stamp(),
'exeErrorCode': 0,
'pilotID': 'unknown|PR|2.0.0 (80)',
'transExitCode': 0,
'pilotErrorDiag': '',
'exeErrorDiag': ''}
job_data['jobId'] = job['PandaID']
job_data['siteName'] = 'BNL_CLOUD_MCORE'
job_data['state'] = 'running'
job_data['attemptNr'] = job['attemptNr'] + 1
job_list.append(job_data)
status = communicator_manager.update_jobs(jobs=job_list)
self.assertEqual(status[0], True)
events = communicator_manager.get_event_ranges(num_event_ranges=1, job=jobs[0])
self.assertEqual(len(events), 1)
for event in events:
event_range_status = {"errorCode": 1220, "eventRangeID": event['eventRangeID'], "eventStatus": 'failed'}
event_range_message = {'version': 0, 'eventRanges': json.dumps(event_range_status)}
res = communicator_manager.update_events(update_events=event_range_message)
self.assertEqual(res['StatusCode'], 0)
events = communicator_manager.get_event_ranges(num_event_ranges=2, job=jobs[0])
self.assertEqual(len(events), 2)
update_events = []
for event in events:
event_range = {"eventRangeID": event['eventRangeID'], "eventStatus": 'finished'}
update_events.append(event_range)
event_range_status = [{"zipFile": {"numEvents": len(update_events),
"objstoreID": 1318,
"adler32": '000000',
"lfn": 'test_file',
"fsize": 100,
"pathConvention": 1000},
"eventRanges": update_events}]
event_range_message = {'version': 1, 'eventRanges': json.dumps(event_range_status)}
res = communicator_manager.update_events(update_events=event_range_message)
self.assertEqual(res['StatusCode'], 0)
communicator_manager.stop()
time.sleep(2)
self.assertFalse(communicator_manager.is_alive())
except Exception as ex:
if communicator_manager:
communicator_manager.stop()
raise ex
|
the-stack_0_327 | """
swat-s1 topology
"""
from mininet.topo import Topo as TopoBase
from srve import Srve
from clie import Clie
class Topoe(TopoBase):
NETMASK = '/24'
NODES = [Srve, Clie]
def build(self):
switch = self.addSwitch('s1')
for node in Topoe.NODES:
host = self.addHost(
node.NAME,
ip=node.IP + Topoe.NETMASK,
mac=node.MAC)
self.addLink(host, switch)
|
the-stack_0_330 | import typing
from sqlalchemy import create_engine
from sqlalchemy.exc import OperationalError
from db_workers import DatabaseWorker
from db_workers import MySQLWorker
from db_workers import PostgreWorker
from fields import DATABASE_NAME_FIELD_NAME
from fields import FOLDER_NAME_FIELD_NAME
from fields import LOCAL_TABLE_NAME_FILED_NAME
from fields import TABLE_NAME_FIELD_NAME
def validate_get_data_request_body(
required_fields: list, request_body: dict, local_db_worker
) -> dict:
# Check if all fields defined
for f in required_fields:
if f not in request_body:
raise KeyError(f"Field {f} not defined in request")
# If columns defined - check if all exits in database
if "columns" in request_body:
table_data = local_db_worker.get_table(local_table_name=request_body["table"])
for col in request_body["columns"]:
if col not in table_data.columns:
KeyError(f"Column {col} not found in table {request_body['table']}")
# If limit defined - check that it can be converter to float
if "limit" in request_body:
request_body["limit"] = float(request_body["limit"])
return request_body
def get_existing_data(
sql_session, table_class_object, target_attr: str = None
) -> typing.List:
if not getattr(table_class_object, "__tablename__"):
raise ValueError("Получен неверный table_class_object.")
try:
data = sql_session.query(table_class_object).all()
except: # noqa: E722
sql_session.rollback()
return [getattr(i, target_attr) for i in data] if target_attr else data
def get_local_table_name_from_request(request_body: dict, local_worker):
return (
request_body[LOCAL_TABLE_NAME_FILED_NAME]
if LOCAL_TABLE_NAME_FILED_NAME in request_body
else local_worker.get_local_table_name(
database_name=request_body[DATABASE_NAME_FIELD_NAME],
folder_name=request_body[FOLDER_NAME_FIELD_NAME],
table_name=request_body[TABLE_NAME_FIELD_NAME],
)
)
def get_worker(db_type: str) -> typing.Type[DatabaseWorker]: # noqa: TYP006
if db_type == "postgres":
return PostgreWorker
elif db_type == "mysql":
return MySQLWorker
def database_health_check(engine) -> bool:
try:
engine.connect()
return True
except OperationalError:
return False
def get_db_engine(db_type: str, **con_params):
if db_type == "postgres":
return create_engine(
f"postgresql://{con_params.get('username')}:{con_params.get('password')}@"
f"{con_params.get('ip')}:{con_params.get('port')}/{con_params.get('database')}"
)
elif db_type == "mysql":
return create_engine(
f"mysql://{con_params.get('username')}:{con_params.get('password')}@"
f"{con_params.get('ip')}:{con_params.get('port')}/{con_params.get('database')}"
)
def get_bad_request_answer() -> list:
return [{"status": "error", "message": "Incorrect request"}, 400]
def update_data_about_db_structure(local_worker):
"""
This function iterates through all databases and refreshes its structure data (about tables/schemas)
:param local_worker: object of LocalBaseWorker
"""
for local_base_name in local_worker.get_db_name_list():
db_type = local_worker.get_database_object(local_base_name).type
worker = get_worker(db_type)(local_base_name, local_worker)
tables = worker.download_table_list()
# After rescan add grant access for admin tokens
for token_obj in local_worker.get_admin_tokens_objects_list():
token = token_obj.token
for local_table_name in tables:
if local_table_name not in list(token_obj.granted_tables):
local_worker.add_table_for_token(
token, local_table_name=local_table_name
)
|
the-stack_0_332 | import os
import unittest
from datetime import datetime
import requests_mock
from dateutil.tz import tzutc
from august.api import API_GET_DOORBELLS_URL, Api, API_GET_LOCKS_URL, \
API_GET_LOCK_STATUS_URL, API_LOCK_URL, API_UNLOCK_URL, API_GET_LOCK_URL, \
API_GET_DOORBELL_URL, API_GET_PINS_URL
from august.lock import LockStatus, LockDoorStatus
ACCESS_TOKEN = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9"
def load_fixture(filename):
"""Load a fixture."""
path = os.path.join(os.path.dirname(__file__), 'fixtures', filename)
with open(path) as fptr:
return fptr.read()
def utc_of(year, month, day, hour, minute, second, microsecond):
return datetime(year, month, day, hour, minute, second, microsecond,
tzinfo=tzutc())
class TestApi(unittest.TestCase):
@requests_mock.Mocker()
def test_get_doorbells(self, mock):
mock.register_uri(
"get",
API_GET_DOORBELLS_URL,
text=load_fixture("get_doorbells.json"))
api = Api()
doorbells = sorted(api.get_doorbells(ACCESS_TOKEN),
key=lambda d: d.device_id)
self.assertEqual(2, len(doorbells))
first = doorbells[0]
self.assertEqual("1KDAbJH89XYZ", first.device_id)
self.assertEqual("aaaaR08888", first.serial_number)
self.assertEqual("Back Door", first.device_name)
self.assertEqual("doorbell_call_status_offline", first.status)
self.assertEqual(False, first.has_subscription)
self.assertEqual(None, first.image_url)
self.assertEqual("3dd2accadddd", first.house_id)
second = doorbells[1]
self.assertEqual("K98GiDT45GUL", second.device_id)
self.assertEqual("tBXZR0Z35E", second.serial_number)
self.assertEqual("Front Door", second.device_name)
self.assertEqual("doorbell_call_status_online", second.status)
self.assertEqual(True, second.has_subscription)
self.assertEqual("https://image.com/vmk16naaaa7ibuey7sar.jpg",
second.image_url)
self.assertEqual("3dd2accaea08", second.house_id)
@requests_mock.Mocker()
def test_get_doorbell_detail(self, mock):
mock.register_uri(
"get",
API_GET_DOORBELL_URL.format(doorbell_id="K98GiDT45GUL"),
text=load_fixture("get_doorbell.json"))
api = Api()
doorbell = api.get_doorbell_detail(ACCESS_TOKEN, "K98GiDT45GUL")
self.assertEqual("K98GiDT45GUL", doorbell.device_id)
self.assertEqual("Front Door", doorbell.device_name)
self.assertEqual("3dd2accaea08", doorbell.house_id)
self.assertEqual("tBXZR0Z35E", doorbell.serial_number)
self.assertEqual("2.3.0-RC153+201711151527", doorbell.firmware_version)
self.assertEqual("doorbell_call_status_online", doorbell.status)
self.assertEqual(True, doorbell.is_online)
self.assertEqual(True, doorbell.has_subscription)
self.assertEqual("https://image.com/vmk16naaaa7ibuey7sar.jpg",
doorbell.image_url)
@requests_mock.Mocker()
def test_get_locks(self, mock):
mock.register_uri(
"get",
API_GET_LOCKS_URL,
text=load_fixture("get_locks.json"))
api = Api()
locks = sorted(api.get_locks(ACCESS_TOKEN), key=lambda d: d.device_id)
self.assertEqual(2, len(locks))
first = locks[0]
self.assertEqual("A6697750D607098BAE8D6BAA11EF8063", first.device_id)
self.assertEqual("Front Door Lock", first.device_name)
self.assertEqual("000000000000", first.house_id)
self.assertEqual(True, first.is_operable)
second = locks[1]
self.assertEqual("A6697750D607098BAE8D6BAA11EF9999", second.device_id)
self.assertEqual("Back Door Lock", second.device_name)
self.assertEqual("000000000011", second.house_id)
self.assertEqual(False, second.is_operable)
@requests_mock.Mocker()
def test_get_operable_locks(self, mock):
mock.register_uri(
"get",
API_GET_LOCKS_URL,
text=load_fixture("get_locks.json"))
api = Api()
locks = api.get_operable_locks(ACCESS_TOKEN)
self.assertEqual(1, len(locks))
first = locks[0]
self.assertEqual("A6697750D607098BAE8D6BAA11EF8063", first.device_id)
self.assertEqual("Front Door Lock", first.device_name)
self.assertEqual("000000000000", first.house_id)
self.assertEqual(True, first.is_operable)
@requests_mock.Mocker()
def test_get_lock_detail(self, mock):
mock.register_uri(
"get",
API_GET_LOCK_URL.format(
lock_id="A6697750D607098BAE8D6BAA11EF8063"),
text=load_fixture("get_lock.json"))
api = Api()
lock = api.get_lock_detail(ACCESS_TOKEN,
"A6697750D607098BAE8D6BAA11EF8063")
self.assertEqual("A6697750D607098BAE8D6BAA11EF8063", lock.device_id)
self.assertEqual("Front Door Lock", lock.device_name)
self.assertEqual("000000000000", lock.house_id)
self.assertEqual("X2FSW05DGA", lock.serial_number)
self.assertEqual("109717e9-3.0.44-3.0.30", lock.firmware_version)
self.assertEqual(88, lock.battery_level)
self.assertEqual("Medium", lock.keypad.battery_level)
self.assertEqual("5bc65c24e6ef2a263e1450a8", lock.keypad.device_id)
@requests_mock.Mocker()
def test_get_lock_status_with_locked_response(self, mock):
lock_id = 1234
mock.register_uri(
"get",
API_GET_LOCK_STATUS_URL.format(lock_id=lock_id),
text="{\"status\": \"kAugLockState_Locked\"}")
api = Api()
status = api.get_lock_status(ACCESS_TOKEN, lock_id)
self.assertEqual(LockStatus.LOCKED, status)
@requests_mock.Mocker()
def test_get_lock_and_door_status_with_locked_response(self, mock):
lock_id = 1234
mock.register_uri(
"get",
API_GET_LOCK_STATUS_URL.format(lock_id=lock_id),
text="{\"status\": \"kAugLockState_Locked\""
",\"doorState\": \"kAugLockDoorState_Closed\"}")
api = Api()
status, door_status = api.get_lock_status(ACCESS_TOKEN, lock_id, True)
self.assertEqual(LockStatus.LOCKED, status)
self.assertEqual(LockDoorStatus.CLOSED, door_status)
@requests_mock.Mocker()
def test_get_lock_status_with_unlocked_response(self, mock):
lock_id = 1234
mock.register_uri(
"get",
API_GET_LOCK_STATUS_URL.format(lock_id=lock_id),
text="{\"status\": \"kAugLockState_Unlocked\"}")
api = Api()
status = api.get_lock_status(ACCESS_TOKEN, lock_id)
self.assertEqual(LockStatus.UNLOCKED, status)
@requests_mock.Mocker()
def test_get_lock_status_with_unknown_status_response(self, mock):
lock_id = 1234
mock.register_uri(
"get",
API_GET_LOCK_STATUS_URL.format(lock_id=lock_id),
text="{\"status\": \"not_advertising\"}")
api = Api()
status = api.get_lock_status(ACCESS_TOKEN, lock_id)
self.assertEqual(LockStatus.UNKNOWN, status)
@requests_mock.Mocker()
def test_get_lock_door_status_with_closed_response(self, mock):
lock_id = 1234
mock.register_uri(
"get",
API_GET_LOCK_STATUS_URL.format(lock_id=lock_id),
text="{\"doorState\": \"kAugLockDoorState_Closed\"}")
api = Api()
door_status = api.get_lock_door_status(ACCESS_TOKEN, lock_id)
self.assertEqual(LockDoorStatus.CLOSED, door_status)
@requests_mock.Mocker()
def test_get_lock_door_status_with_open_response(self, mock):
lock_id = 1234
mock.register_uri(
"get",
API_GET_LOCK_STATUS_URL.format(lock_id=lock_id),
text="{\"doorState\": \"kAugLockDoorState_Open\"}")
api = Api()
door_status = api.get_lock_door_status(ACCESS_TOKEN, lock_id)
self.assertEqual(LockDoorStatus.OPEN, door_status)
@requests_mock.Mocker()
def test_get_lock_and_door_status_with_open_response(self, mock):
lock_id = 1234
mock.register_uri(
"get",
API_GET_LOCK_STATUS_URL.format(lock_id=lock_id),
text="{\"status\": \"kAugLockState_Unlocked\""
",\"doorState\": \"kAugLockDoorState_Open\"}")
api = Api()
door_status, status = api.get_lock_door_status(ACCESS_TOKEN, lock_id,
True)
self.assertEqual(LockDoorStatus.OPEN, door_status)
self.assertEqual(LockStatus.UNLOCKED, status)
@requests_mock.Mocker()
def test_get_lock_door_status_with_unknown_response(self, mock):
lock_id = 1234
mock.register_uri(
"get",
API_GET_LOCK_STATUS_URL.format(lock_id=lock_id),
text="{\"doorState\": \"not_advertising\"}")
api = Api()
door_status = api.get_lock_door_status(ACCESS_TOKEN, lock_id)
self.assertEqual(LockDoorStatus.UNKNOWN, door_status)
@requests_mock.Mocker()
def test_lock(self, mock):
lock_id = 1234
mock.register_uri(
"put",
API_LOCK_URL.format(lock_id=lock_id),
text="{\"status\":\"locked\","
"\"dateTime\":\"2017-12-10T07:43:39.056Z\","
"\"isLockStatusChanged\":false,"
"\"valid\":true}")
api = Api()
status = api.lock(ACCESS_TOKEN, lock_id)
self.assertEqual(LockStatus.LOCKED, status)
@requests_mock.Mocker()
def test_unlock(self, mock):
lock_id = 1234
mock.register_uri(
"put",
API_UNLOCK_URL.format(lock_id=lock_id),
text="{\"status\": \"unlocked\"}")
api = Api()
status = api.unlock(ACCESS_TOKEN, lock_id)
self.assertEqual(LockStatus.UNLOCKED, status)
@requests_mock.Mocker()
def test_get_pins(self, mock):
lock_id = 1234
mock.register_uri(
"get",
API_GET_PINS_URL.format(lock_id=lock_id),
text=load_fixture("get_pins.json"))
api = Api()
pins = api.get_pins(ACCESS_TOKEN, lock_id)
self.assertEqual(1, len(pins))
first = pins[0]
self.assertEqual("epoZ87XSPqxlFdsaYyJiRRVR", first.pin_id)
self.assertEqual("A6697750D607098BAE8D6BAA11EF8063", first.lock_id)
self.assertEqual("c3b3a94f-473z-61a3-a8d1-a6e99482787a", first.user_id)
self.assertEqual("in-use", first.state)
self.assertEqual("123456", first.pin)
self.assertEqual(646545456465161, first.slot)
self.assertEqual("one-time", first.access_type)
self.assertEqual("John", first.first_name)
self.assertEqual("Doe", first.last_name)
self.assertEqual(True, first.unverified)
self.assertEqual(utc_of(2016, 11, 26, 22, 27, 11, 176000),
first.created_at)
self.assertEqual(utc_of(2017, 11, 23, 00, 42, 19, 470000),
first.updated_at)
self.assertEqual(utc_of(2017, 12, 10, 3, 12, 55, 563000),
first.loaded_date)
self.assertEqual(utc_of(2018, 1, 1, 1, 1, 1, 563000),
first.access_start_time)
self.assertEqual(utc_of(2018, 12, 1, 1, 1, 1, 563000),
first.access_end_time)
self.assertEqual(utc_of(2018, 11, 5, 10, 2, 41, 684000),
first.access_times)
|