code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
"""
Views for contract feature
"""
import logging
from edxmako.shortcuts import render_to_response
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from biz.djangoapps.ga_manager.models import Manager
log = logging.getLogger(__name__)
LOGIN_ADMIN = 1
LOGIN_ERROR = -1
LOGIN_DEFAULT = 0
LOGIN_ERROR_AUTH = -2
def index(request):
"""
lists content of Login
"""
next_url = request.GET.get('next', '')
if request.user.is_active:
if request.user.is_authenticated():
if next_url == '':
return redirect(reverse('biz:index'))
else:
return redirect(next_url)
account_check = LOGIN_DEFAULT
post_email = request.POST.get('email', '')
post_password = request.POST.get("password")
post_remember = False
if request.method == 'POST':
next_url = request.POST.get("next", '')
if "remember" in request.POST:
post_remember = True
if not 0 < len(post_email) <= 255:
log.info('Login failed - email length over')
account_check = LOGIN_ERROR
if not 0 < len(post_password) <= 255:
log.info('Login failed - password length over')
account_check = LOGIN_ERROR
if User.objects.filter(email=post_email, is_active=True).exists():
user = User.objects.get(email=post_email, is_active=True)
else:
log.info("Login failed - password for {0} is invalid".format(post_email))
account_check = LOGIN_ERROR
if account_check == LOGIN_ERROR:
return render_to_response('gx_login/login.html', {'account_check': account_check, 'next_url': next_url, 'email': post_email})
if user.check_password(post_password):
mgs = Manager.get_managers(user)
if any([mg.is_aggregator() for mg in mgs]):
account_check = LOGIN_ADMIN
if any([mg.is_director() for mg in mgs]):
account_check = LOGIN_ADMIN
if any([mg.is_manager() for mg in mgs]):
account_check = LOGIN_ADMIN
if any([mg.is_platformer() for mg in mgs]):
account_check = LOGIN_ADMIN
if account_check == LOGIN_ADMIN:
# Auto Updating Last Login Datetime
user = authenticate(username=user.username, password=post_password)
login(request, user)
if post_remember:
# Session Retention 7 days
request.session.set_expiry(604800)
else:
request.session.set_expiry(0)
if next_url == '':
return redirect(reverse('biz:index'))
else:
return redirect(next_url)
else:
account_check = LOGIN_ERROR_AUTH
else:
log.info('Login failed - password mismatch')
account_check = LOGIN_ERROR
return render_to_response('gx_login/login.html', {'account_check': account_check, 'next_url': next_url, 'email': post_email})
| nttks/edx-platform | biz/djangoapps/gx_login/views.py | Python | agpl-3.0 | 3,215 |
# This file is part of VoltDB.
# Copyright (C) 2008-2018 VoltDB Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
# All the commands supported by the Voter application.
import os
@VOLT.Command(description = 'Build the Voter application and catalog.',
options = VOLT.BooleanOption('-C', '--conditional', 'conditional',
'only build when the catalog file is missing'))
def build(runner):
if not runner.opts.conditional or not os.path.exists('voter.jar'):
runner.java.compile('obj', 'src/voter/*.java', 'src/voter/procedures/*.java')
runner.call('volt.compile', '-c', 'obj', '-o', 'voter.jar', 'ddl.sql')
@VOLT.Command(description = 'Clean the Voter build output.')
def clean(runner):
runner.shell('rm', '-rfv', 'obj', 'debugoutput', 'voter.jar', 'voltdbroot')
@VOLT.Server('create',
description = 'Start the Voter VoltDB server.',
command_arguments = 'voter.jar',
classpath = 'obj')
def server(runner):
runner.call('build', '-C')
runner.go()
@VOLT.Java('voter.AsyncBenchmark', classpath = 'obj',
description = 'Run the Voter asynchronous benchmark.')
def async(runner):
runner.call('build', '-C')
runner.go()
@VOLT.Java('voter.SyncBenchmark', classpath = 'obj',
description = 'Run the Voter synchronous benchmark.')
def sync(runner):
runner.call('build', '-C')
runner.go()
@VOLT.Java('voter.JDBCBenchmark', classpath = 'obj',
description = 'Run the Voter JDBC benchmark.')
def jdbc(runner):
runner.call('build', '-C')
runner.go()
@VOLT.Java('voter.SimpleBenchmark', classpath = 'obj',
description = 'Run the Voter simple benchmark.')
def simple(runner):
runner.call('build', '-C')
runner.go()
| simonzhangsm/voltdb | tools/voter.d/voter.py | Python | agpl-3.0 | 2,792 |
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api, _
from openerp.exceptions import Warning
import logging
_logger = logging.getLogger(__name__)
class afip_incoterm(models.Model):
_name = 'afip.incoterm'
_description = 'Afip Incoterm'
afip_code = fields.Char(
'Code', required=True)
name = fields.Char(
'Name', required=True)
class afip_point_of_sale(models.Model):
_name = 'afip.point_of_sale'
_description = 'Afip Point Of Sale'
prefix = fields.Char(
'Prefix'
)
sufix = fields.Char(
'Sufix'
)
type = fields.Selection([
('manual', 'Manual'),
('preprinted', 'Preprinted'),
('online', 'Online'),
# Agregados por otro modulo
# ('electronic', 'Electronic'),
# ('fiscal_printer', 'Fiscal Printer'),
],
'Type',
default='manual',
required=True,
)
name = fields.Char(
compute='get_name',
)
number = fields.Integer(
'Number', required=True
)
company_id = fields.Many2one(
'res.company', 'Company', required=True,
default=lambda self: self.env['res.company']._company_default_get(
'afip.point_of_sale')
)
journal_ids = fields.One2many(
'account.journal',
'point_of_sale_id',
'Journals',
)
document_sequence_type = fields.Selection(
[('own_sequence', 'Own Sequence'),
('same_sequence', 'Same Invoice Sequence')],
string='Document Sequence Type',
default='own_sequence',
required=True,
help="Use own sequence or invoice sequence on Debit and Credit Notes?"
)
journal_document_class_ids = fields.One2many(
'account.journal.afip_document_class',
compute='get_journal_document_class_ids',
string='Documents Classes',
)
@api.one
@api.depends('type', 'sufix', 'prefix', 'number')
def get_name(self):
# TODO mejorar esto y que tome el lable traducido del selection
if self.type == 'manual':
name = 'Manual'
elif self.type == 'preprinted':
name = 'Preimpresa'
elif self.type == 'online':
name = 'Online'
elif self.type == 'electronic':
name = 'Electronica'
if self.prefix:
name = '%s %s' % (self.prefix, name)
if self.sufix:
name = '%s %s' % (name, self.sufix)
name = '%04d - %s' % (self.number, name)
self.name = name
@api.one
@api.depends('journal_ids.journal_document_class_ids')
def get_journal_document_class_ids(self):
journal_document_class_ids = self.env[
'account.journal.afip_document_class'].search([
('journal_id.point_of_sale_id', '=', self.id)])
self.journal_document_class_ids = journal_document_class_ids
_sql_constraints = [('number_unique', 'unique(number, company_id)',
'Number Must be Unique per Company!'), ]
class afip_document_class(models.Model):
_name = 'afip.document_class'
_description = 'Afip Document Class'
name = fields.Char(
'Name', size=120)
doc_code_prefix = fields.Char(
'Document Code Prefix', help="Prefix for Documents Codes on Invoices \
and Account Moves. For eg. 'FA ' will build 'FA 0001-0000001' Document Number")
afip_code = fields.Integer(
'AFIP Code', required=True)
document_letter_id = fields.Many2one(
'afip.document_letter', 'Document Letter')
report_name = fields.Char(
'Name on Reports',
help='Name that will be printed in reports, for example "CREDIT NOTE"')
document_type = fields.Selection([
('invoice', 'Invoices'),
('credit_note', 'Credit Notes'),
('debit_note', 'Debit Notes'),
('receipt', 'Receipt'),
('ticket', 'Ticket'),
('in_document', 'In Document'),
('other_document', 'Other Documents')
],
string='Document Type',
help='It defines some behaviours on automatic journal selection and\
in menus where it is shown.')
active = fields.Boolean(
'Active', default=True)
class afip_document_letter(models.Model):
_name = 'afip.document_letter'
_description = 'Afip Document letter'
name = fields.Char(
'Name', size=64, required=True)
afip_document_class_ids = fields.One2many(
'afip.document_class', 'document_letter_id', 'Afip Document Classes')
issuer_ids = fields.Many2many(
'afip.responsability', 'afip_doc_letter_issuer_rel',
'letter_id', 'responsability_id', 'Issuers',)
receptor_ids = fields.Many2many(
'afip.responsability', 'afip_doc_letter_receptor_rel',
'letter_id', 'responsability_id', 'Receptors',)
active = fields.Boolean(
'Active', default=True)
vat_discriminated = fields.Boolean(
'Vat Discriminated on Invoices?',
help="If True, the vat will be discriminated on invoice report.")
_sql_constraints = [('name', 'unique(name)', 'Name must be unique!'), ]
class afip_responsability(models.Model):
_name = 'afip.responsability'
_description = 'AFIP VAT Responsability'
name = fields.Char(
'Name', size=64, required=True)
code = fields.Char(
'Code', size=8, required=True)
active = fields.Boolean(
'Active', default=True)
issued_letter_ids = fields.Many2many(
'afip.document_letter', 'afip_doc_letter_issuer_rel',
'responsability_id', 'letter_id', 'Issued Document Letters')
received_letter_ids = fields.Many2many(
'afip.document_letter', 'afip_doc_letter_receptor_rel',
'responsability_id', 'letter_id', 'Received Document Letters')
vat_tax_required_on_sales_invoices = fields.Boolean(
'VAT Tax Required on Sales Invoices?',
help='If True, then a vay tax is mandatory on each sale invoice for companies of this responsability',
)
_sql_constraints = [('name', 'unique(name)', 'Name must be unique!'),
('code', 'unique(code)', 'Code must be unique!')]
class afip_document_type(models.Model):
_name = 'afip.document_type'
_description = 'AFIP document types'
name = fields.Char(
'Name', size=120, required=True)
code = fields.Char(
'Code', size=16, required=True)
afip_code = fields.Integer(
'AFIP Code', required=True)
active = fields.Boolean(
'Active', default=True)
| adrianpaesani/odoo-argentina | l10n_ar_invoice/models/afip.py | Python | agpl-3.0 | 6,822 |
"""Add is_loud and pronouns columns to PanelApplicant
Revision ID: bba880ef5bbd
Revises: 8f8419ebcf27
Create Date: 2019-07-20 02:57:17.794469
"""
# revision identifiers, used by Alembic.
revision = 'bba880ef5bbd'
down_revision = '8f8419ebcf27'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
try:
is_sqlite = op.get_context().dialect.name == 'sqlite'
except Exception:
is_sqlite = False
if is_sqlite:
op.get_context().connection.execute('PRAGMA foreign_keys=ON;')
utcnow_server_default = "(datetime('now', 'utc'))"
else:
utcnow_server_default = "timezone('utc', current_timestamp)"
def sqlite_column_reflect_listener(inspector, table, column_info):
"""Adds parenthesis around SQLite datetime defaults for utcnow."""
if column_info['default'] == "datetime('now', 'utc')":
column_info['default'] = utcnow_server_default
sqlite_reflect_kwargs = {
'listeners': [('column_reflect', sqlite_column_reflect_listener)]
}
# ===========================================================================
# HOWTO: Handle alter statements in SQLite
#
# def upgrade():
# if is_sqlite:
# with op.batch_alter_table('table_name', reflect_kwargs=sqlite_reflect_kwargs) as batch_op:
# batch_op.alter_column('column_name', type_=sa.Unicode(), server_default='', nullable=False)
# else:
# op.alter_column('table_name', 'column_name', type_=sa.Unicode(), server_default='', nullable=False)
#
# ===========================================================================
def upgrade():
op.add_column('panel_applicant', sa.Column('other_pronouns', sa.Unicode(), server_default='', nullable=False))
op.add_column('panel_applicant', sa.Column('pronouns', sa.Unicode(), server_default='', nullable=False))
op.add_column('panel_application', sa.Column('is_loud', sa.Boolean(), server_default='False', nullable=False))
def downgrade():
op.drop_column('panel_application', 'is_loud')
op.drop_column('panel_applicant', 'pronouns')
op.drop_column('panel_applicant', 'other_pronouns')
| magfest/ubersystem | alembic/versions/bba880ef5bbd_add_is_loud_and_pronouns_columns_to_.py | Python | agpl-3.0 | 2,103 |
def keysetter(key):
if not isinstance(key, str):
raise TypeError('key name must be a string')
resolve = key.split('.')
head, last = tuple(resolve[:-1]), resolve[-1]
def g(obj,value):
for key in head :
obj = obj[key]
obj[last] = value
return g
def keygetter(key):
if not isinstance(key, str):
raise TypeError('key name must be a string')
return lambda obj : resolve_key(obj, key)
def resolve_key(obj, key):
for name in key.split('.'):
obj = obj[name]
return obj
| aureooms/sak | lib/nice/operator.py | Python | agpl-3.0 | 554 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class EventType(models.Model):
_inherit = "event.type"
community_menu = fields.Boolean(
"Community Menu", compute="_compute_community_menu",
readonly=False, store=True,
help="Display community tab on website")
@api.depends('website_menu')
def _compute_community_menu(self):
for event_type in self:
event_type.community_menu = event_type.website_menu
| ygol/odoo | addons/website_event_track_online/models/event_type.py | Python | agpl-3.0 | 551 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014-present Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pytest
import json
from django.urls import reverse
from .. import factories as f
pytestmark = pytest.mark.django_db
def test_watch_task(client):
user = f.UserFactory.create()
task = f.create_task(owner=user, milestone=None)
f.MembershipFactory.create(project=task.project, user=user, is_admin=True)
url = reverse("tasks-watch", args=(task.id,))
client.login(user)
response = client.post(url)
assert response.status_code == 200
def test_unwatch_task(client):
user = f.UserFactory.create()
task = f.create_task(owner=user, milestone=None)
f.MembershipFactory.create(project=task.project, user=user, is_admin=True)
url = reverse("tasks-watch", args=(task.id,))
client.login(user)
response = client.post(url)
assert response.status_code == 200
def test_list_task_watchers(client):
user = f.UserFactory.create()
task = f.TaskFactory(owner=user)
f.MembershipFactory.create(project=task.project, user=user, is_admin=True)
f.WatchedFactory.create(content_object=task, user=user)
url = reverse("task-watchers-list", args=(task.id,))
client.login(user)
response = client.get(url)
assert response.status_code == 200
assert response.data[0]['id'] == user.id
def test_get_task_watcher(client):
user = f.UserFactory.create()
task = f.TaskFactory(owner=user)
f.MembershipFactory.create(project=task.project, user=user, is_admin=True)
watch = f.WatchedFactory.create(content_object=task, user=user)
url = reverse("task-watchers-detail", args=(task.id, watch.user.id))
client.login(user)
response = client.get(url)
assert response.status_code == 200
assert response.data['id'] == watch.user.id
def test_get_task_watchers(client):
user = f.UserFactory.create()
task = f.TaskFactory(owner=user)
f.MembershipFactory.create(project=task.project, user=user, is_admin=True)
url = reverse("tasks-detail", args=(task.id,))
f.WatchedFactory.create(content_object=task, user=user)
client.login(user)
response = client.get(url)
assert response.status_code == 200
assert response.data['watchers'] == [user.id]
assert response.data['total_watchers'] == 1
def test_get_task_is_watcher(client):
user = f.UserFactory.create()
task = f.create_task(owner=user, milestone=None)
f.MembershipFactory.create(project=task.project, user=user, is_admin=True)
url_detail = reverse("tasks-detail", args=(task.id,))
url_watch = reverse("tasks-watch", args=(task.id,))
url_unwatch = reverse("tasks-unwatch", args=(task.id,))
client.login(user)
response = client.get(url_detail)
assert response.status_code == 200
assert response.data['watchers'] == []
assert response.data['is_watcher'] == False
response = client.post(url_watch)
assert response.status_code == 200
response = client.get(url_detail)
assert response.status_code == 200
assert response.data['watchers'] == [user.id]
assert response.data['is_watcher'] == True
response = client.post(url_unwatch)
assert response.status_code == 200
response = client.get(url_detail)
assert response.status_code == 200
assert response.data['watchers'] == []
assert response.data['is_watcher'] == False
def test_remove_task_watcher(client):
user = f.UserFactory.create()
project = f.ProjectFactory.create()
task = f.TaskFactory(project=project,
user_story=None,
status__project=project,
milestone__project=project)
task.add_watcher(user)
role = f.RoleFactory.create(project=project, permissions=['modify_task', 'view_tasks'])
f.MembershipFactory.create(project=project, user=user, role=role)
url = reverse("tasks-detail", args=(task.id,))
client.login(user)
data = {"version": task.version, "watchers": []}
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 200
assert response.data['watchers'] == []
assert response.data['is_watcher'] == False
| taigaio/taiga-back | tests/integration/test_watch_tasks.py | Python | agpl-3.0 | 4,832 |
import subprocess
def release():
subprocess.call(["python3", "setup.py", "sdist", "upload"])
| aureooms/sak | sak/pip3.py | Python | agpl-3.0 | 100 |
from . import BaseWordChoice
class WordPreference(BaseWordChoice):
def pick_w(self,m,voc,mem,context=[]):
if m in voc.get_known_meanings():
if m in list(mem['prefered words'].keys()):
w = mem['prefered words'][m]
if w not in voc.get_known_words(m=m):
w = voc.get_random_known_w(m=m)
else:
w = voc.get_random_known_w(m=m)
elif voc.get_unknown_words():
w = voc.get_new_unknown_w()
else:
w = voc.get_random_known_w(option='min')
return w
class PlaySmart(WordPreference):
def __init__(self, *args, **kwargs):
WordPreference.__init__(self,memory_policies=[{'mem_type':'wordpreference_smart'}],*args,**kwargs)
class PlayLast(WordPreference):
def __init__(self, *args, **kwargs):
WordPreference.__init__(self,memory_policies=[{'mem_type':'wordpreference_last'}],*args,**kwargs)
class PlayFirst(WordPreference):
def __init__(self, *args, **kwargs):
WordPreference.__init__(self,memory_policies=[{'mem_type':'wordpreference_first'}],*args,**kwargs)
| flowersteam/naminggamesal | naminggamesal/ngstrat/word_choice/word_preference.py | Python | agpl-3.0 | 997 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('maposmatic', '0004_maprenderingjob_track'),
]
operations = [
migrations.AlterField(
model_name='maprenderingjob',
name='track',
field=models.FileField(null=True, upload_to=b'upload/tracks/', blank=True),
),
]
| hholzgra/maposmatic | www/maposmatic/migrations/0005_auto_20170521_0103.py | Python | agpl-3.0 | 453 |
"""
Base test case for the course API views.
"""
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from lms.djangoapps.courseware.tests.factories import StaffFactory
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import TEST_DATA_SPLIT_MODULESTORE, SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
# pylint: disable=unused-variable
class BaseCourseViewTest(SharedModuleStoreTestCase, APITestCase):
"""
Base test class for course data views.
"""
MODULESTORE = TEST_DATA_SPLIT_MODULESTORE
view_name = None # The name of the view to use in reverse() call in self.get_url()
@classmethod
def setUpClass(cls):
super(BaseCourseViewTest, cls).setUpClass()
cls.course = CourseFactory.create(display_name='test course', run="Testing_course")
cls.course_key = cls.course.id
cls.password = 'test'
cls.student = UserFactory(username='dummy', password=cls.password)
cls.staff = StaffFactory(course_key=cls.course.id, password=cls.password)
cls.initialize_course(cls.course)
@classmethod
def initialize_course(cls, course):
"""
Sets up the structure of the test course.
"""
course.self_paced = True
cls.store.update_item(course, cls.staff.id)
cls.section = ItemFactory.create(
parent_location=course.location,
category="chapter",
)
cls.subsection1 = ItemFactory.create(
parent_location=cls.section.location,
category="sequential",
)
unit1 = ItemFactory.create(
parent_location=cls.subsection1.location,
category="vertical",
)
ItemFactory.create(
parent_location=unit1.location,
category="video",
)
ItemFactory.create(
parent_location=unit1.location,
category="problem",
)
cls.subsection2 = ItemFactory.create(
parent_location=cls.section.location,
category="sequential",
)
unit2 = ItemFactory.create(
parent_location=cls.subsection2.location,
category="vertical",
)
unit3 = ItemFactory.create(
parent_location=cls.subsection2.location,
category="vertical",
)
ItemFactory.create(
parent_location=unit3.location,
category="video",
)
ItemFactory.create(
parent_location=unit3.location,
category="video",
)
def get_url(self, course_id):
"""
Helper function to create the url
"""
return reverse(
self.view_name,
kwargs={
'course_id': course_id
}
)
| cpennington/edx-platform | cms/djangoapps/contentstore/api/tests/base.py | Python | agpl-3.0 | 2,906 |
# -*- coding: utf-8 -*-
# Copyright (C) 2010 Eduardo Robles Elvira <edulix AT gmail DOT com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.contrib.auth.models import User, UserManager
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.db.models import signals, Avg, Q
from datetime import date
import os
from django.conf import settings
def create_profile_for_user(sender, **kwargs):
'''
This way everytime a User is created, a Profile is created too.
'''
if kwargs['created']:
profile = Profile()
if not kwargs['instance'].__dict__.has_key("birth_date"):
profile.birth_date = date.today()
if not kwargs['instance'].__dict__.has_key("address"):
profile.address = _("address")
profile.__dict__.update(kwargs['instance'].__dict__)
profile.save()
#signals.post_save.connect(create_profile_for_user, sender=User)
class Profile(User):
'''
<<<<<<< HEAD
User with timebank settings.
=======
User with time bank settings.
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
'''
photo = models.ImageField(_("Avatar"), blank=True, null=True,
upload_to=os.path.join(settings.STATIC_DOC_ROOT, "photos"))
<<<<<<< HEAD
birth_date = models.DateField(_("Rojstni datum"), default=date.today())
address = models.CharField(_("Naslov"), max_length=100, default=_("address"))
org_name = models.CharField(_("Ime organizacije"), max_length=30, default=_("org_name"))
first_name1 = models.CharField(_("Ime zastopnika"), max_length=30, default=_("first_name"))
last_name1 = models.CharField(_("Priimek zastopnika"), max_length=30, default=_("last_name"))
email1 = models.CharField(_("E-mail zastopnika"), max_length=30, default=_("email"))
# credits in minutes
balance = models.IntegerField(default=600)
=======
birth_date = models.DateField(_("Birth date"), default=date.today())
address = models.CharField(_("Address"), max_length=100, default=_("address"))
# credits in minutes
balance = models.IntegerField(default=0)
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
def balance_hours(self):
if self.balance % 60 == 0:
return self.balance/60
return self.balance/60.0
<<<<<<< HEAD
description = models.TextField(_("Opis"), max_length=300,
blank=True)
land_line = models.CharField(_("Stacionarni telefon"), max_length=20)
mobile_tlf = models.CharField(_("Mobilni telefon"), max_length=20)
email_updates = models.BooleanField(_(u"Želim prejemati novice Časovne banke"),
=======
description = models.TextField(_("Personal address"), max_length=300,
blank=True)
land_line = models.CharField(_("Land line"), max_length=20)
mobile_tlf = models.CharField(_("Mobile phone"), max_length=20)
email_updates = models.BooleanField(_("Receive email updates"),
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
default=True)
# Saving the user language allows sending emails to him in his desired
# language (among other things)
<<<<<<< HEAD
lang_code = models.CharField(_("Jezik"), max_length=10, default='')
class Meta:
verbose_name = _("user")
verbose_name_plural = _("users")
=======
lang_code = models.CharField(_("Language Code"), max_length=10, default='')
class Meta:
verbose_name = _("User")
verbose_name_plural = _("Users")
>>>>>>> 2db144ba2c6c34a8f17f795a1186a524059b1aa6
def __unicode__(self):
return self.username
# Use UserManager to get the create_user method, etc.
objects = UserManager()
def __eq__(self, value):
return value and self.id == value.id or False
def transfers_pending(self):
'''
Transfers from this user which are not in a final state
'''
from serv.models import Transfer
return Transfer.objects.filter(Q(credits_payee=self) \
| Q(credits_payee=self)).filter(status__in=['r', 'd'])
def karma(self):
'''
Average of the user's transfer scores
'''
karma = self.transfers_received.aggregate(Avg('rating_score'))
if karma['rating_score__avg']:
return int(karma['rating_score__avg'])
else:
return 0
| miaerbus/timebank | user/models.py | Python | agpl-3.0 | 4,968 |
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Products Management Group',
'version': '13.0.1.0.0',
'category': 'base.module_category_knowledge_management',
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'depends': [
'sale',
],
'data': [
'security/product_management_security.xml',
],
'installable': False,
}
| ingadhoc/product | product_management_group/__manifest__.py | Python | agpl-3.0 | 1,296 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from math import isinf, isnan
from warnings import warn
NOT_MASS_BALANCED_TERMS = {"SBO:0000627", # EXCHANGE
"SBO:0000628", # DEMAND
"SBO:0000629", # BIOMASS
"SBO:0000631", # PSEUDOREACTION
"SBO:0000632", # SINK
}
def check_mass_balance(model):
unbalanced = {}
for reaction in model.reactions:
if reaction.annotation.get("SBO") not in NOT_MASS_BALANCED_TERMS:
balance = reaction.check_mass_balance()
if balance:
unbalanced[reaction] = balance
return unbalanced
# no longer strictly necessary, done by optlang solver interfaces
def check_reaction_bounds(model):
warn("no longer necessary, done by optlang solver interfaces",
DeprecationWarning)
errors = []
for reaction in model.reactions:
if reaction.lower_bound > reaction.upper_bound:
errors.append("Reaction '%s' has lower bound > upper bound" %
reaction.id)
if isinf(reaction.lower_bound):
errors.append("Reaction '%s' has infinite lower_bound" %
reaction.id)
elif isnan(reaction.lower_bound):
errors.append("Reaction '%s' has NaN for lower_bound" %
reaction.id)
if isinf(reaction.upper_bound):
errors.append("Reaction '%s' has infinite upper_bound" %
reaction.id)
elif isnan(reaction.upper_bound):
errors.append("Reaction '%s' has NaN for upper_bound" %
reaction.id)
return errors
def check_metabolite_compartment_formula(model):
errors = []
for met in model.metabolites:
if met.formula is not None and len(met.formula) > 0:
if not met.formula.isalnum():
errors.append("Metabolite '%s' formula '%s' not alphanumeric" %
(met.id, met.formula))
return errors
| zakandrewking/cobrapy | cobra/manipulation/validate.py | Python | lgpl-2.1 | 2,116 |
import math, os
from bup import _helpers, helpers
from bup.helpers import sc_page_size
_fmincore = getattr(helpers, 'fmincore', None)
BLOB_MAX = 8192*4 # 8192 is the "typical" blob size for bupsplit
BLOB_READ_SIZE = 1024*1024
MAX_PER_TREE = 256
progress_callback = None
fanout = 16
GIT_MODE_FILE = 0100644
GIT_MODE_TREE = 040000
GIT_MODE_SYMLINK = 0120000
assert(GIT_MODE_TREE != 40000) # 0xxx should be treated as octal
# The purpose of this type of buffer is to avoid copying on peek(), get(),
# and eat(). We do copy the buffer contents on put(), but that should
# be ok if we always only put() large amounts of data at a time.
class Buf:
def __init__(self):
self.data = ''
self.start = 0
def put(self, s):
if s:
self.data = buffer(self.data, self.start) + s
self.start = 0
def peek(self, count):
return buffer(self.data, self.start, count)
def eat(self, count):
self.start += count
def get(self, count):
v = buffer(self.data, self.start, count)
self.start += count
return v
def used(self):
return len(self.data) - self.start
def _fadvise_pages_done(fd, first_page, count):
assert(first_page >= 0)
assert(count >= 0)
if count > 0:
_helpers.fadvise_done(fd,
first_page * sc_page_size,
count * sc_page_size)
def _nonresident_page_regions(status_bytes, max_region_len=None):
"""Return (start_page, count) pairs in ascending start_page order for
each contiguous region of nonresident pages indicated by the
mincore() status_bytes. Limit the number of pages in each region
to max_region_len."""
assert(max_region_len is None or max_region_len > 0)
start = None
for i, x in enumerate(status_bytes):
in_core = x & helpers.MINCORE_INCORE
if start is None:
if not in_core:
start = i
else:
count = i - start
if in_core:
yield (start, count)
start = None
elif max_region_len and count >= max_region_len:
yield (start, count)
start = i
if start is not None:
yield (start, len(status_bytes) - start)
def _uncache_ours_upto(fd, offset, first_region, remaining_regions):
"""Uncache the pages of fd indicated by first_region and
remaining_regions that are before offset, where each region is a
(start_page, count) pair. The final region must have a start_page
of None."""
rstart, rlen = first_region
while rstart is not None and (rstart + rlen) * sc_page_size <= offset:
_fadvise_pages_done(fd, rstart, rlen)
rstart, rlen = next(remaining_regions, (None, None))
return (rstart, rlen)
def readfile_iter(files, progress=None):
for filenum,f in enumerate(files):
ofs = 0
b = ''
fd = rpr = rstart = rlen = None
if _fmincore and hasattr(f, 'fileno'):
fd = f.fileno()
max_chunk = max(1, (8 * 1024 * 1024) / sc_page_size)
rpr = _nonresident_page_regions(_fmincore(fd), max_chunk)
rstart, rlen = next(rpr, (None, None))
while 1:
if progress:
progress(filenum, len(b))
b = f.read(BLOB_READ_SIZE)
ofs += len(b)
if rpr:
rstart, rlen = _uncache_ours_upto(fd, ofs, (rstart, rlen), rpr)
if not b:
break
yield b
if rpr:
rstart, rlen = _uncache_ours_upto(fd, ofs, (rstart, rlen), rpr)
def _splitbuf(buf, basebits, fanbits):
while 1:
b = buf.peek(buf.used())
(ofs, bits) = _helpers.splitbuf(b)
if ofs:
if ofs > BLOB_MAX:
ofs = BLOB_MAX
level = 0
else:
level = (bits-basebits)//fanbits # integer division
buf.eat(ofs)
yield buffer(b, 0, ofs), level
else:
break
while buf.used() >= BLOB_MAX:
# limit max blob size
yield buf.get(BLOB_MAX), 0
def _hashsplit_iter(files, progress):
assert(BLOB_READ_SIZE > BLOB_MAX)
basebits = _helpers.blobbits()
fanbits = int(math.log(fanout or 128, 2))
buf = Buf()
for inblock in readfile_iter(files, progress):
buf.put(inblock)
for buf_and_level in _splitbuf(buf, basebits, fanbits):
yield buf_and_level
if buf.used():
yield buf.get(buf.used()), 0
def _hashsplit_iter_keep_boundaries(files, progress):
for real_filenum,f in enumerate(files):
if progress:
def prog(filenum, nbytes):
# the inner _hashsplit_iter doesn't know the real file count,
# so we'll replace it here.
return progress(real_filenum, nbytes)
else:
prog = None
for buf_and_level in _hashsplit_iter([f], progress=prog):
yield buf_and_level
def hashsplit_iter(files, keep_boundaries, progress):
if keep_boundaries:
return _hashsplit_iter_keep_boundaries(files, progress)
else:
return _hashsplit_iter(files, progress)
total_split = 0
def split_to_blobs(makeblob, files, keep_boundaries, progress):
global total_split
for (blob, level) in hashsplit_iter(files, keep_boundaries, progress):
sha = makeblob(blob)
total_split += len(blob)
if progress_callback:
progress_callback(len(blob))
yield (sha, len(blob), level)
def _make_shalist(l):
ofs = 0
l = list(l)
total = sum(size for mode,sha,size, in l)
vlen = len('%x' % total)
shalist = []
for (mode, sha, size) in l:
shalist.append((mode, '%0*x' % (vlen,ofs), sha))
ofs += size
assert(ofs == total)
return (shalist, total)
def _squish(maketree, stacks, n):
i = 0
while i < n or len(stacks[i]) >= MAX_PER_TREE:
while len(stacks) <= i+1:
stacks.append([])
if len(stacks[i]) == 1:
stacks[i+1] += stacks[i]
elif stacks[i]:
(shalist, size) = _make_shalist(stacks[i])
tree = maketree(shalist)
stacks[i+1].append((GIT_MODE_TREE, tree, size))
stacks[i] = []
i += 1
def split_to_shalist(makeblob, maketree, files,
keep_boundaries, progress=None):
sl = split_to_blobs(makeblob, files, keep_boundaries, progress)
assert(fanout != 0)
if not fanout:
shal = []
for (sha,size,level) in sl:
shal.append((GIT_MODE_FILE, sha, size))
return _make_shalist(shal)[0]
else:
stacks = [[]]
for (sha,size,level) in sl:
stacks[0].append((GIT_MODE_FILE, sha, size))
_squish(maketree, stacks, level)
#log('stacks: %r\n' % [len(i) for i in stacks])
_squish(maketree, stacks, len(stacks)-1)
#log('stacks: %r\n' % [len(i) for i in stacks])
return _make_shalist(stacks[-1])[0]
def split_to_blob_or_tree(makeblob, maketree, files,
keep_boundaries, progress=None):
shalist = list(split_to_shalist(makeblob, maketree,
files, keep_boundaries, progress))
if len(shalist) == 1:
return (shalist[0][0], shalist[0][2])
elif len(shalist) == 0:
return (GIT_MODE_FILE, makeblob(''))
else:
return (GIT_MODE_TREE, maketree(shalist))
def open_noatime(name):
fd = _helpers.open_noatime(name)
try:
return os.fdopen(fd, 'rb', 1024*1024)
except:
try:
os.close(fd)
except:
pass
raise
| jbaber/bup | lib/bup/hashsplit.py | Python | lgpl-2.1 | 7,757 |
#!/usr/bin/python
"""Test of ARIA horizontal sliders using Firefox."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
#sequence.append(WaitForDocLoad())
sequence.append(PauseAction(10000))
sequence.append(KeyComboAction("Tab"))
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.AssertPresentationAction(
"1. Tab to Volume Horizontal Slider",
["BRAILLE LINE: 'Volume 0 % horizontal slider'",
" VISIBLE: 'Volume 0 % horizontal slider', cursor=1",
"SPEECH OUTPUT: 'Volume horizontal slider 0 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Right"))
sequence.append(utils.AssertPresentationAction(
"2. Volume Right Arrow",
["BRAILLE LINE: 'Volume 1 % horizontal slider'",
" VISIBLE: 'Volume 1 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '1 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Right"))
sequence.append(utils.AssertPresentationAction(
"3. Volume Right Arrow",
["BRAILLE LINE: 'Volume 2 % horizontal slider'",
" VISIBLE: 'Volume 2 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '2 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Left"))
sequence.append(utils.AssertPresentationAction(
"4. Volume Left Arrow",
["BRAILLE LINE: 'Volume 1 % horizontal slider'",
" VISIBLE: 'Volume 1 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '1 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Left"))
sequence.append(utils.AssertPresentationAction(
"5. Volume Left Arrow",
["BRAILLE LINE: 'Volume 0 % horizontal slider'",
" VISIBLE: 'Volume 0 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '0 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"6. Volume Up Arrow",
["BRAILLE LINE: 'Volume 1 % horizontal slider'",
" VISIBLE: 'Volume 1 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '1 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"7. Volume Up Arrow",
["BRAILLE LINE: 'Volume 2 % horizontal slider'",
" VISIBLE: 'Volume 2 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '2 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"8. Volume Down Arrow",
["BRAILLE LINE: 'Volume 1 % horizontal slider'",
" VISIBLE: 'Volume 1 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '1 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"9. Volume Down Arrow",
["BRAILLE LINE: 'Volume 0 % horizontal slider'",
" VISIBLE: 'Volume 0 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '0 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Page_Up"))
sequence.append(utils.AssertPresentationAction(
"10. Volume Page Up",
["BRAILLE LINE: 'Volume 25 % horizontal slider'",
" VISIBLE: 'Volume 25 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '25 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Page_Down"))
sequence.append(utils.AssertPresentationAction(
"11. Volume Page Down",
["BRAILLE LINE: 'Volume 0 % horizontal slider'",
" VISIBLE: 'Volume 0 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '0 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("End"))
sequence.append(utils.AssertPresentationAction(
"12. Volume End",
["BRAILLE LINE: 'Volume 100 % horizontal slider'",
" VISIBLE: 'Volume 100 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '100 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Home"))
sequence.append(utils.AssertPresentationAction(
"13. Volume Home",
["BRAILLE LINE: 'Volume 0 % horizontal slider'",
" VISIBLE: 'Volume 0 % horizontal slider', cursor=1",
"SPEECH OUTPUT: '0 %'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.AssertPresentationAction(
"14. Tab to Food Quality Horizontal Slider",
["KNOWN ISSUE: The double-presentation is because of the authoring, putting the name and value into the description",
"BRAILLE LINE: 'Food Quality terrible horizontal slider'",
" VISIBLE: 'Food Quality terrible horizontal', cursor=1",
"SPEECH OUTPUT: 'Food Quality horizontal slider terrible.'",
"SPEECH OUTPUT: 'Food Quality: terrible (1 of 5)'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Right"))
sequence.append(utils.AssertPresentationAction(
"15. Food Quality Right Arrow",
["BRAILLE LINE: 'Food Quality bad horizontal slider'",
" VISIBLE: 'Food Quality bad horizontal slid', cursor=1",
"SPEECH OUTPUT: 'bad'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Right"))
sequence.append(utils.AssertPresentationAction(
"16. Food Quality Right Arrow",
["BRAILLE LINE: 'Food Quality decent horizontal slider'",
" VISIBLE: 'Food Quality decent horizontal s', cursor=1",
"SPEECH OUTPUT: 'decent'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Left"))
sequence.append(utils.AssertPresentationAction(
"17. Food Quality Left Arrow",
["BRAILLE LINE: 'Food Quality bad horizontal slider'",
" VISIBLE: 'Food Quality bad horizontal slid', cursor=1",
"SPEECH OUTPUT: 'bad'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"18. Food Quality Up Arrow",
["BRAILLE LINE: 'Food Quality decent horizontal slider'",
" VISIBLE: 'Food Quality decent horizontal s', cursor=1",
"SPEECH OUTPUT: 'decent'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"19. Food Quality Down Arrow",
["BRAILLE LINE: 'Food Quality bad horizontal slider'",
" VISIBLE: 'Food Quality bad horizontal slid', cursor=1",
"SPEECH OUTPUT: 'bad'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"20. Food Quality Down Arrow",
["BRAILLE LINE: 'Food Quality terrible horizontal slider'",
" VISIBLE: 'Food Quality terrible horizontal', cursor=1",
"SPEECH OUTPUT: 'terrible'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Page_Up"))
sequence.append(utils.AssertPresentationAction(
"21. Food Quality Page Up",
["BRAILLE LINE: 'Food Quality bad horizontal slider'",
" VISIBLE: 'Food Quality bad horizontal slid', cursor=1",
"SPEECH OUTPUT: 'bad'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Page_Down"))
sequence.append(utils.AssertPresentationAction(
"22. Food Quality Page Down",
["BRAILLE LINE: 'Food Quality terrible horizontal slider'",
" VISIBLE: 'Food Quality terrible horizontal', cursor=1",
"SPEECH OUTPUT: 'terrible'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("End"))
sequence.append(utils.AssertPresentationAction(
"23. Food Quality End",
["BRAILLE LINE: 'Food Quality excellent horizontal slider'",
" VISIBLE: 'Food Quality excellent horizonta', cursor=1",
"SPEECH OUTPUT: 'excellent'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Home"))
sequence.append(utils.AssertPresentationAction(
"24. Food Quality Home",
["BRAILLE LINE: 'Food Quality terrible horizontal slider'",
" VISIBLE: 'Food Quality terrible horizontal', cursor=1",
"SPEECH OUTPUT: 'terrible'"]))
sequence.append(utils.AssertionSummaryAction())
sequence.start()
| GNOME/orca | test/keystrokes/firefox/aria_slider_tpg.py | Python | lgpl-2.1 | 8,352 |
# JoeTraffic - Web-Log Analysis Application utilizing the JoeAgent Framework.
# Copyright (C) 2004 Rhett Garber
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from JoeAgent import job, event
import db_interface
import os, os.path
import logging
import log_parser
LINEINCR = 30
log = logging.getLogger("agent.LogReader")
class ReadLogCompleteEvent(event.Event):
"""Event to indicate the file is completely read. This event will
be caught by the FindLogJob that is watching it. The file will
continue to be checked for modifications"""
pass
class ReadLogContinueEvent(event.Event):
"""Event to indicate we should continue reading the file. Log file
processing will be done in chunks so as not to block the agent for
too long."""
pass
class ReadLogJob(job.Job):
def __init__(self, agent_obj, logfile):
job.Job.__init__(self, agent_obj)
assert os.path.isfile(logfile), "Not a file: %s" % str(logfile)
self._log_size = os.stat(logfile).st_size
log.debug("Log size is %d" % self._log_size)
self._logfile_path = logfile
self._logfile_hndl = open(logfile, 'r')
self._progress = 0 # Data read from file
self._db = db_interface.getDB()
def getFilePath(self):
return self._logfile_path
def getBytesRead(self):
return self._progress
def getBytesTotal(self):
return self._log_size
def run(self):
evt = ReadLogContinueEvent(self)
self.getAgent().addEvent(evt)
def notify(self, evt):
job.Job.notify(self, evt)
if isinstance(evt, ReadLogContinueEvent) and evt.getSource() == self:
log.debug("Continuing read of file")
# Continue to read the log
try:
self._progress += log_parser.read_log(
self._logfile_hndl, self._db, LINEINCR)
log.debug("Read %d %% of file (%d / %d)" % (self.getProgress(),
self._progress,
self._log_size))
except log_parser.EndOfLogException, e:
self._progress = self._log_size
# Log file is complete, updated the db entry
self._mark_complete()
# Add an event to notify that the file is complete
self._logfile_hndl.close()
new_evt = ReadLogCompleteEvent(self)
self.getAgent().addEvent(new_evt)
except log_parser.InvalidLogException, e:
log.warning("Invalid log file: %s" % str(e))
self._logfile_hndl.close()
new_evt = ReadLogCompleteEvent(self)
self.getAgent().addEvent(new_evt)
else:
# Add an event to continue reading
new_evt = ReadLogContinueEvent(self)
self.getAgent().addEvent(new_evt)
def _update_db(self):
"""Update the entry in the database for this logfile"""
log.debug("Updating file %s" % self._logfile_path)
pass
def _mark_invalid(self):
"""Update the database to indicate that this is not a valid log file"""
log.debug("Marking file %s invalid" % self._logfile_path)
pass
def _mark_complete(self):
log.debug("Marking file %s complete" % self._logfile_path)
pass
def getProgress(self):
"""Return a percentage complete value"""
if self._log_size == 0:
return 0
return int((float(self._progress) / self._log_size) * 100)
| rhettg/JoeTraffic | LogReader/read_job.py | Python | lgpl-2.1 | 4,327 |
@app.route('/job/<name>')
def results(name):
job = saliweb.frontend.get_completed_job(name,
flask.request.args.get('passwd'))
# Determine whether the job completed successfully
if os.path.exists(job.get_path('output.pdb')):
template = 'results_ok.html'
else:
template = 'results_failed.html'
return saliweb.frontend.render_results_template(template, job=job)
| salilab/saliweb | examples/frontend-results.py | Python | lgpl-2.1 | 440 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Name: Generic.py
# Purpose:
# Author: Fabien Marteau <[email protected]>
# Created: 21/05/2008
#-----------------------------------------------------------------------------
# Copyright (2008) Armadeus Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#-----------------------------------------------------------------------------
# Revision list :
#
# Date By Changes
#
#-----------------------------------------------------------------------------
__doc__ = ""
__version__ = "1.0.0"
__author__ = "Fabien Marteau <[email protected]>"
import re
from periphondemand.bin.utils.wrapperxml import WrapperXml
from periphondemand.bin.utils.error import Error
DESTINATION = ["fpga","driver","both"]
PUBLIC = ["true","false"]
class Generic(WrapperXml):
""" Manage generic instance value
"""
def __init__(self,parent,**keys):
""" init Generic,
__init__(self,parent,node)
__init__(self,parent,nodestring)
__init__(self,parent,name)
"""
self.parent=parent
if "node" in keys:
self.__initnode(keys["node"])
elif "nodestring" in keys:
self.__initnodestring(keys["nodestring"])
elif "name" in keys:
self.__initname(keys["name"])
else:
raise Error("Keys unknown in Generic init()",0)
def __initnode(self,node):
WrapperXml.__init__(self,node=node)
def __initnodestring(self,nodestring):
WrapperXml.__init__(self,nodestring=nodestring)
def __initname(self,name):
WrapperXml.__init__(self,nodename="generic")
self.setName(name)
def getOp(self):
return self.getAttributeValue("op")
def setOp(self,op):
self.setAttribute("op",op)
def getTarget(self):
return self.getAttributeValue("target")
def setTarget(self,target):
self.setAttribute("target",target)
def isPublic(self):
if self.getAttributeValue("public")=="true":
return "true"
else:
return "false"
def setPublic(self,public):
public = public.lower()
if not public in PUBLIC:
raise Error("Public value "+str(public)+" wrong")
self.setAttribute("public",public)
def getType(self):
the_type = self.getAttributeValue("type")
if the_type == None:
raise Error("Generic "+self.getName()+\
" description malformed, type must be defined",0)
else:
return the_type
def setType(self,type):
self.setAttribute("type",type)
def getMatch(self):
try:
return self.getAttributeValue("match").encode("utf-8")
except AttributeError:
return None
def setMatch(self,match):
self.setAttribute("match",match)
def getValue(self):
""" return the generic value
"""
component = self.getParent()
if self.getOp() == None:
return self.getAttributeValue("value")
else:
target = self.getTarget().split(".")
if self.getOp() == "realsizeof":
# return the number of connected pin
return str(int(component.getInterface(target[0]).getPort(target[1]).getMaxPinNum())+1)
else:
raise Error("Operator unknown "+self.getOp(),1)
def setValue(self,value):
if self.getMatch() == None:
self.setAttribute("value",value)
elif re.compile(self.getMatch()).match(value):
self.setAttribute("value",value)
else:
raise Error("Value doesn't match for attribute "+str(value),0)
def getDestination(self):
""" return the generic destination (fpga,driver or both)
"""
return self.getAttributeValue("destination")
def setDestination(self,destination):
destination = destination.lower()
if not destination in DESTINATION:
raise Error("Destination value "+str(destination)+\
" unknown")
self.setAttribute("destination",destination)
| magyarm/periphondemand-code | src/bin/core/generic.py | Python | lgpl-2.1 | 4,911 |
import logging
from ..DataUploader import Plugin as DataUploaderPlugin
from .reader import AndroidReader, AndroidStatsReader
from ...common.interfaces import AbstractPlugin
try:
from volta.core.core import Core as VoltaCore
except Exception:
raise RuntimeError("Please install volta. https://github.com/yandex-load/volta")
logger = logging.getLogger(__name__)
class Plugin(AbstractPlugin):
SECTION = "android"
SECTION_META = "meta"
def __init__(self, core, cfg, name):
self.stats_reader = None
self.reader = None
super(Plugin, self).__init__(core, cfg, name)
self.device = None
try:
self.cfg = cfg['volta_options']
for key, value in self.cfg.items():
if not isinstance(value, dict):
logger.debug('Malformed VoltaConfig key: %s value %s', key, value)
raise RuntimeError('Malformed VoltaConfig passed, key: %s. Should by dict' % key)
except AttributeError:
logger.error('Failed to read Volta config', exc_info=True)
self.volta_core = VoltaCore(self.cfg)
@staticmethod
def get_key():
return __file__
def get_available_options(self):
opts = ["volta_options"]
return opts
def configure(self):
self.volta_core.configure()
def get_reader(self):
if self.reader is None:
self.reader = AndroidReader()
return self.reader
def get_stats_reader(self):
if self.stats_reader is None:
self.stats_reader = AndroidStatsReader()
return self.stats_reader
def prepare_test(self):
self.core.add_artifact_file(self.volta_core.currents_fname)
[self.core.add_artifact_file(fname) for fname in self.volta_core.event_fnames.values()]
def start_test(self):
try:
self.volta_core.start_test()
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.info('Failed to start test of Android plugin', exc_info=True)
return 1
def is_test_finished(self):
try:
if hasattr(self.volta_core, 'phone'):
if hasattr(self.volta_core.phone, 'test_performer'):
if not self.volta_core.phone.test_performer:
logger.warning('There is no test performer process on the phone, interrupting test')
return 1
if not self.volta_core.phone.test_performer.is_finished():
logger.debug('Waiting for phone test to finish...')
return -1
else:
return self.volta_core.phone.test_performer.retcode
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.error('Unknown exception of Android plugin. Interrupting test', exc_info=True)
return 1
def end_test(self, retcode):
try:
self.volta_core.end_test()
uploaders = self.core.get_plugins_of_type(DataUploaderPlugin)
for uploader in uploaders:
response = uploader.lp_job.api_client.link_mobile_job(
lp_key=uploader.lp_job.number,
mobile_key=self.volta_core.uploader.jobno
)
logger.info(
'Linked mobile job %s to %s for plugin: %s. Response: %s',
self.volta_core.uploader.jobno, uploader.lp_job.number, uploader.backend_type, response
)
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.error('Failed to complete end_test of Android plugin', exc_info=True)
retcode = 1
return retcode
def get_info(self):
return AndroidInfo()
def post_process(self, retcode):
try:
self.volta_core.post_process()
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.error('Failed to complete post_process of Android plugin', exc_info=True)
retcode = 1
return retcode
class AndroidInfo(object):
def __init__(self):
self.address = ''
self.port = 80
self.ammo_file = ''
self.duration = 0
self.loop_count = 1
self.instances = 1
self.rps_schedule = ''
| yandex/yandex-tank | yandextank/plugins/Android/plugin.py | Python | lgpl-2.1 | 4,425 |
# Copyright (C) 2013-2017 Chris Lalancette <[email protected]>
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation;
# version 2.1 of the License.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""
Linux installation
"""
import os
import re
import time
import libvirt
import oz.Guest
import oz.OzException
class LinuxCDGuest(oz.Guest.CDGuest):
"""
Class for Linux installation.
"""
def __init__(self, tdl, config, auto, output_disk, nicmodel, diskbus,
iso_allowed, url_allowed, macaddress, useuefi):
oz.Guest.CDGuest.__init__(self, tdl, config, auto, output_disk,
nicmodel, None, None, diskbus, iso_allowed,
url_allowed, macaddress, useuefi)
def _test_ssh_connection(self, guestaddr):
"""
Internal method to test out the ssh connection before we try to use it.
Under systemd, the IP address of a guest can come up and reportip can
run before the ssh key is generated and sshd starts up. This check
makes sure that we allow an additional 30 seconds (1 second per ssh
attempt) for sshd to finish initializing.
"""
count = 30
success = False
while count > 0:
try:
self.log.debug("Testing ssh connection, try %d", count)
start = time.time()
self.guest_execute_command(guestaddr, 'ls', timeout=1)
self.log.debug("Succeeded")
success = True
break
except oz.ozutil.SubprocessException:
# ensure that we spent at least one second before trying again
end = time.time()
if (end - start) < 1:
time.sleep(1 - (end - start))
count -= 1
if not success:
self.log.debug("Failed to connect to ssh on running guest")
raise oz.OzException.OzException("Failed to connect to ssh on running guest")
def get_default_runlevel(self, g_handle):
"""
Function to determine the default runlevel based on the /etc/inittab.
"""
runlevel = "3"
if g_handle.exists('/etc/inittab'):
lines = g_handle.cat('/etc/inittab').split("\n")
for line in lines:
if re.match('id:', line):
runlevel = line.split(':')[1]
break
return runlevel
def guest_execute_command(self, guestaddr, command, timeout=10):
"""
Method to execute a command on the guest and return the output.
"""
# ServerAliveInterval protects against NAT firewall timeouts
# on long-running commands with no output
#
# PasswordAuthentication=no prevents us from falling back to
# keyboard-interactive password prompting
#
# -F /dev/null makes sure that we don't use the global or per-user
# configuration files
return oz.ozutil.subprocess_check_output(["ssh", "-i", self.sshprivkey,
"-F", "/dev/null",
"-o", "ServerAliveInterval=30",
"-o", "StrictHostKeyChecking=no",
"-o", "ConnectTimeout=" + str(timeout),
"-o", "UserKnownHostsFile=/dev/null",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly yes",
"root@" + guestaddr, command],
printfn=self.log.debug)
def guest_live_upload(self, guestaddr, file_to_upload, destination,
timeout=10):
"""
Method to copy a file to the live guest.
"""
self.guest_execute_command(guestaddr,
"mkdir -p " + os.path.dirname(destination),
timeout)
# ServerAliveInterval protects against NAT firewall timeouts
# on long-running commands with no output
#
# PasswordAuthentication=no prevents us from falling back to
# keyboard-interactive password prompting
#
# -F /dev/null makes sure that we don't use the global or per-user
# configuration files
return oz.ozutil.subprocess_check_output(["scp", "-i", self.sshprivkey,
"-F", "/dev/null",
"-o", "ServerAliveInterval=30",
"-o", "StrictHostKeyChecking=no",
"-o", "ConnectTimeout=" + str(timeout),
"-o", "UserKnownHostsFile=/dev/null",
"-o", "PasswordAuthentication=no",
"-o", "IdentitiesOnly yes",
file_to_upload,
"root@" + guestaddr + ":" + destination],
printfn=self.log.debug)
def _customize_files(self, guestaddr):
"""
Method to upload the custom files specified in the TDL to the guest.
"""
self.log.info("Uploading custom files")
for name, fp in list(self.tdl.files.items()):
# all of the self.tdl.files are named temporary files; we just need
# to fetch the name out and have scp upload it
self.guest_live_upload(guestaddr, fp.name, name)
def _shutdown_guest(self, guestaddr, libvirt_dom):
"""
Method to shutdown the guest (gracefully at first, then with prejudice).
"""
if guestaddr is not None:
# sometimes the ssh process gets disconnected before it can return
# cleanly (particularly when the guest is running systemd). If that
# happens, ssh returns 255, guest_execute_command throws an
# exception, and the guest is forcibly destroyed. While this
# isn't the end of the world, it isn't desirable. To avoid
# this, we catch any exception thrown by ssh during the shutdown
# command and throw them away. In the (rare) worst case, the
# shutdown will not have made it to the guest and we'll have to wait
# 90 seconds for wait_for_guest_shutdown to timeout and forcibly
# kill the guest.
try:
self.guest_execute_command(guestaddr, 'shutdown -h now')
except Exception:
pass
try:
if not self._wait_for_guest_shutdown(libvirt_dom):
self.log.warning("Guest did not shutdown in time, going to kill")
else:
libvirt_dom = None
except Exception:
self.log.warning("Failed shutting down guest, forcibly killing")
if libvirt_dom is not None:
try:
libvirt_dom.destroy()
except libvirt.libvirtError:
# the destroy failed for some reason. This can happen if
# _wait_for_guest_shutdown times out, but the domain shuts
# down before we get to destroy. Check to make sure that the
# domain is gone from the list of running domains; if so, just
# continue on; if not, re-raise the error.
for domid in self.libvirt_conn.listDomainsID():
if domid == libvirt_dom.ID():
raise
def _collect_setup(self, libvirt_xml): # pylint: disable=unused-argument
"""
Default method to set the guest up for remote access.
"""
raise oz.OzException.OzException("ICICLE generation and customization is not implemented for guest %s" % (self.tdl.distro))
def _collect_teardown(self, libvirt_xml): # pylint: disable=unused-argument
"""
Method to reverse the changes done in _collect_setup.
"""
raise oz.OzException.OzException("ICICLE generation and customization is not implemented for guest %s" % (self.tdl.distro))
def _install_packages(self, guestaddr, packstr): # pylint: disable=unused-argument
"""
Internal method to install packages; expected to be overriden by
child classes.
"""
raise oz.OzException.OzException("Customization is not implemented for guest %s" % (self.tdl.distro))
def _customize_repos(self, guestaddr): # pylint: disable=unused-argument
"""
Internal method to customize repositories; expected to be overriden by
child classes.
"""
raise oz.OzException.OzException("Customization is not implemented for guest %s" % (self.tdl.distro))
def _remove_repos(self, guestaddr): # pylint: disable=unused-argument
"""
Internal method to remove repositories; expected to be overriden by
child classes.
"""
raise oz.OzException.OzException("Repository removal not implemented for guest %s" % (self.tdl.distro))
def do_customize(self, guestaddr):
"""
Method to customize by installing additional packages and files.
"""
if not self.tdl.packages and not self.tdl.files and not self.tdl.commands:
# no work to do, just return
return
self._customize_repos(guestaddr)
for cmd in self.tdl.precommands:
self.guest_execute_command(guestaddr, cmd.read())
self.log.debug("Installing custom packages")
packstr = ''
for package in self.tdl.packages:
packstr += '"' + package.name + '" '
if packstr != '':
self._install_packages(guestaddr, packstr)
self._customize_files(guestaddr)
self.log.debug("Running custom commands")
for cmd in self.tdl.commands:
self.guest_execute_command(guestaddr, cmd.read())
self.log.debug("Removing non-persisted repos")
self._remove_repos(guestaddr)
self.log.debug("Syncing")
self.guest_execute_command(guestaddr, 'sync')
def do_icicle(self, guestaddr):
"""
Default method to collect the package information and generate the
ICICLE XML.
"""
raise oz.OzException.OzException("ICICLE generation is not implemented for this guest type")
def _internal_customize(self, libvirt_xml, action):
"""
Internal method to customize and optionally generate an ICICLE for the
operating system after initial installation.
"""
# the "action" input is actually a tri-state:
# action = "gen_and_mod" means to generate the icicle and to
# potentially make modifications
# action = "gen_only" means to generate the icicle only, and not
# look at any modifications
# action = "mod_only" means to not generate the icicle, but still
# potentially make modifications
self.log.info("Customizing image")
if not self.tdl.packages and not self.tdl.files and not self.tdl.commands:
if action == "mod_only":
self.log.info("No additional packages, files, or commands to install, and icicle generation not requested, skipping customization")
return
elif action == "gen_and_mod":
# It is actually possible to get here with a "gen_and_mod"
# action but a TDL that contains no real customizations.
# In the "safe ICICLE" code below it is important to know
# when we are truly in a "gen_only" state so we modify
# the action here if we detect that ICICLE generation is the
# only task to be done.
self.log.debug("Asked to gen_and_mod but no mods are present - changing action to gen_only")
action = "gen_only"
# when doing an oz-install with -g, this isn't necessary as it will
# just replace the port with the same port. However, it is very
# necessary when doing an oz-customize since the serial port might
# not match what is specified in the libvirt XML
modified_xml = self._modify_libvirt_xml_for_serial(libvirt_xml)
if action == "gen_only" and self.safe_icicle_gen:
# We are only generating ICICLE and the user has asked us to do
# this without modifying the completed image by booting it.
# Create a copy on write snapshot to use for ICICLE
# generation - discard when finished
cow_diskimage = self.diskimage + "-icicle-snap.qcow2"
self._internal_generate_diskimage(force=True,
backing_filename=self.diskimage,
image_filename=cow_diskimage)
modified_xml = self._modify_libvirt_xml_diskimage(modified_xml, cow_diskimage, 'qcow2')
self._collect_setup(modified_xml)
icicle = None
try:
libvirt_dom = self.libvirt_conn.createXML(modified_xml, 0)
try:
guestaddr = None
guestaddr = self._wait_for_guest_boot(libvirt_dom)
self._test_ssh_connection(guestaddr)
if action == "gen_and_mod":
self.do_customize(guestaddr)
icicle = self.do_icicle(guestaddr)
elif action == "gen_only":
icicle = self.do_icicle(guestaddr)
elif action == "mod_only":
self.do_customize(guestaddr)
else:
raise oz.OzException.OzException("Invalid customize action %s; this is a programming error" % (action))
finally:
if action == "gen_only" and self.safe_icicle_gen:
# if this is a gen_only and safe_icicle_gen, there is no
# reason to wait around for the guest to shutdown; we'll
# be removing the overlay file anyway. Just destroy it
libvirt_dom.destroy()
else:
self._shutdown_guest(guestaddr, libvirt_dom)
finally:
if action == "gen_only" and self.safe_icicle_gen:
# no need to teardown because we simply discard the file
# containing those changes
os.unlink(cow_diskimage)
else:
self._collect_teardown(modified_xml)
return icicle
def customize(self, libvirt_xml):
"""
Method to customize the operating system after installation.
"""
return self._internal_customize(libvirt_xml, "mod_only")
def customize_and_generate_icicle(self, libvirt_xml):
"""
Method to customize and generate the ICICLE for an operating system
after installation. This is equivalent to calling customize() and
generate_icicle() back-to-back, but is faster.
"""
return self._internal_customize(libvirt_xml, "gen_and_mod")
def generate_icicle(self, libvirt_xml):
"""
Method to generate the ICICLE from an operating system after
installation. The ICICLE contains information about packages and
other configuration on the diskimage.
"""
return self._internal_customize(libvirt_xml, "gen_only")
| nullr0ute/oz | oz/Linux.py | Python | lgpl-2.1 | 16,373 |
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2019 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
""" Loaders plugin manager """
from __future__ import print_function
import os.path
from fife import fife
from fife.extensions.serializers.xmlmap import XMLMapLoader
mapFileMapping = { 'xml' : XMLMapLoader}
fileExtensions = set(['xml'])
def loadMapFile(path, engine, callback=None, debug=True, extensions={}):
""" load map file and get (an optional) callback if major stuff is done:
- map creation
- parsed imports
- parsed layers
- parsed cameras
the callback will send both a string and a float (which shows
the overall process), callback(string, float)
@type engine: object
@param engine: FIFE engine instance
@type callback: function
@param callback: callback for maploading progress
@type debug: bool
@param debug: flag to activate / deactivate print statements
@rtype object
@return FIFE map object
"""
(filename, extension) = os.path.splitext(path)
map_loader = mapFileMapping[extension[1:]](engine, callback, debug, extensions)
map = map_loader.loadResource(path)
if debug: print("--- Loading map took: ", map_loader.time_to_load, " seconds.")
return map
def addMapLoader(fileExtension, loaderClass):
"""Add a new loader for fileextension
@type fileExtension: string
@param fileExtension: The file extension the loader is registered for
@type loaderClass: object
@param loaderClass: A fife.ResourceLoader implementation that loads maps
from files with the given fileExtension
"""
mapFileMapping[fileExtension] = loaderClass
_updateMapFileExtensions()
def _updateMapFileExtensions():
global fileExtensions
fileExtensions = set(mapFileMapping.keys())
| fifengine/fifengine | engine/python/fife/extensions/loaders.py | Python | lgpl-2.1 | 2,646 |
from __future__ import print_function
from __future__ import absolute_import
from enigma import *
from Screens.MessageBox import MessageBox
from Screens.Standby import TryQuitMainloop
from . crossepglib import *
from . crossepg_downloader import CrossEPG_Downloader
from . crossepg_importer import CrossEPG_Importer
from . crossepg_converter import CrossEPG_Converter
from . crossepg_loader import CrossEPG_Loader
from . crossepg_setup import CrossEPG_Setup
from . crossepg_menu import CrossEPG_Menu
from . crossepg_auto import CrossEPG_Auto
class CrossEPG_Main:
def __init__(self):
self.config = CrossEPG_Config()
self.patchtype = getEPGPatchType()
def downloader(self, session):
self.session = session
CrossEPG_Auto.instance.lock = True
CrossEPG_Auto.instance.stop()
self.config.load()
if self.config.configured == 0:
self.session.openWithCallback(self.configureCallback, MessageBox, _("You need to configure crossepg before starting downloader.\nWould You like to do it now ?"), type=MessageBox.TYPE_YESNO)
else:
self.config.deleteLog()
self.session.openWithCallback(self.downloadCallback, CrossEPG_Downloader, self.config.providers)
def configureCallback(self, result):
if result is True:
self.session.open(CrossEPG_Setup)
def loaderAsPlugin(self, session):
self.session = session
CrossEPG_Auto.instance.lock = True
CrossEPG_Auto.instance.stop()
self.loader()
def downloadCallback(self, ret):
if ret:
if self.config.csv_import_enabled == 1:
self.importer()
else:
if self.patchtype != 3:
self.converter()
else:
self.loader()
else:
CrossEPG_Auto.instance.lock = False
def importer(self):
self.session.openWithCallback(self.importerCallback, CrossEPG_Importer)
def importerCallback(self, ret):
if ret:
if self.patchtype != 3:
self.converter()
else:
self.loader()
else:
CrossEPG_Auto.instance.lock = False
def converter(self):
self.session.openWithCallback(self.converterCallback, CrossEPG_Converter)
def converterCallback(self, ret):
if ret:
if self.patchtype != -1:
self.loader()
else:
if self.config.download_manual_reboot:
self.session.open(TryQuitMainloop, 3)
else:
CrossEPG_Auto.instance.lock = False
else:
CrossEPG_Auto.instance.lock = False
def loader(self):
self.session.openWithCallback(self.loaderCallback, CrossEPG_Loader)
def loaderCallback(self, ret):
CrossEPG_Auto.instance.lock = False
def setup(self, session, **kwargs):
CrossEPG_Auto.instance.lock = True
session.openWithCallback(self.setupCallback, CrossEPG_Menu)
def setupCallback(self):
CrossEPG_Auto.instance.lock = False
CrossEPG_Auto.instance.doneConfiguring()
crossepg_main = CrossEPG_Main()
| oe-alliance/e2openplugin-CrossEPG | src/enigma2/python/crossepg_main.py | Python | lgpl-2.1 | 2,743 |
# -*- coding: utf-8 -*-
#--------------------------------------------------------------------#
# This file is part of Py-cnotify. #
# #
# Copyright (C) 2007, 2008 Paul Pogonyshev. #
# #
# This library is free software; you can redistribute it and/or #
# modify it under the terms of the GNU Lesser General Public License #
# as published by the Free Software Foundation; either version 2.1 #
# of the License, or (at your option) any later version. #
# #
# This library is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #
# Lesser General Public License for more details. #
# #
# You should have received a copy of the GNU Lesser General Public #
# License along with this library; if not, write to the Free #
# Software Foundation, Inc., 51 Franklin Street, Fifth Floor, #
# Boston, MA 02110-1301 USA #
#--------------------------------------------------------------------#
"""
cNotify package provides three main concepts: I{L{signals <signal>}}, I{L{conditions
<condition>}} and I{L{variables <variable>}}. Signals are basically lists of callables
that can be I{emitted} and then will call all contained callables (I{handler} of a signal)
in turn. Conditions are boolean values complemented with a signal that is emitted when
condition’s I{state} changes. Variables are akin to conditions but can hold arbitrary
I{values}, not just booleans. Conditions, unlike variables, can also be combined using
standard logic operators, like negation, conjunction and so on.
All three concepts provide separation between providers (writers, setters) and listeners
(readers, getters) of some entity. Conditions and variables make the entity explicit—it
is a boolean state for the former and arbitrary Python object for the latter (though
derived variable classes can restrict the set of allowed values.)
Here is a quick example:
>>> from cnotify.variable import *
... name = Variable ()
...
... import sys
... name.changed.connect (
... lambda string: sys.stdout.write ('Hello there, %s!\\n' % string))
...
... name.value = 'Chuk'
Note that when setting the C{name} variable, you don’t need to know who, if anyone,
listens to changes to it. Interested parties take care to express their interest
themselves and are informed upon a change automatically.
Here is a little more elaborate example with the same functionality (it requires U{PyGTK
<http://pygtk.org/>}):
>>> from cnotify.variable import *
... import gtk
...
... name = Variable ()
...
... def welcome_user (name_string):
... dialog = gtk.MessageDialog (None, 0, gtk.MESSAGE_INFO, gtk.BUTTONS_OK,
... 'Hello there, %s!' % name_string)
... dialog.run ()
... dialog.destroy ()
...
... name.changed.connect (welcome_user)
...
... def set_name_from_entry (entry):
... name.value = entry.get_text ()
...
... window = gtk.Window ()
... window.set_title ('Enter name')
...
... entry = gtk.Entry ()
... entry.show ()
... window.add (entry)
...
... entry.connect ('activate', set_name_from_entry)
... window.connect ('destroy', lambda window: gtk.main_quit ())
...
... window.present ()
...
... gtk.main ()
Note that C{window} knows absolutely nothing about how changes to C{name} variable are
handled. If you play with this example, you will notice one thing: pressing C{Enter} in
the main window twice doesn’t pop the welcoming dialog twice. That is because both
conditions and variables emit their ‘changed’ signal I{only} when their state/value
actually changes, not on every assignment.
Now a final, quite complicated, example introducing conditions and some other features:
>>> from cnotify.all import *
...
... pilots = Variable ()
... fuel = Variable ()
...
... import sys
...
... pilots.changed.connect (
... lambda pilots: sys.stdout.write ('Pilots are %s\\n' % pilots))
... fuel.changed.connect (
... lambda amount: sys.stdout.write ('Got %d litres of fuel\\n' % amount))
...
... def ready_state_changed (ready):
... if ready:
... sys.stdout.write ('Ready to get off!\\n')
... else:
... sys.stdout.write ('Missing pilots or fuel\\n')
...
... ready = pilots.is_true () & fuel.predicate (lambda amount: amount > 0)
... ready.store (ready_state_changed)
...
... pilots.value = 'Jen and Jim'
... fuel.value = 500
...
... fuel.value = 0
First line of example shows a way to save typing by importing all package contents at
once. Whether to use this technique is up to you. Following lines up to C{ready = ...}
should be familiar.
Now let’s consider that assignment closer. First, C{L{pilots.is_true ()
<variable.AbstractVariable.is_true>}} code creates a condition that is true depending on
C{pilots} value (true for non-empty sequences in our case.) It is just a convenience
wrapper over C{L{AbstractVariable.predicate <variable.AbstractVariable.predicate>}}
method. Now, the latter is also used directly in this line of code. It creates a
condition that is true as long as variable’s value conforms to the passed in predicate.
In particular, C{fuel.predicate (lambda amount: amount > 0)} creates a condition that is
true if C{fuel}’s value is greater than zero. Predicate conditions will recompute their
state each time variable’s value changes and that’s the point in using them.
Finally, two just constructed conditions are combined into a third condition using ‘and’
operator (C{&}). This third condition will be true if and only if I{both} its term
conditions are true. Conditions support four logic operations: negation, conjunction,
disjunction and xoring (with these operators: C{~}, C{&}, C{|} and C{^}.) In addition,
each condition has C{L{if_else <condition.AbstractCondition.if_else>}} method, which is
much like Python’s C{if} operator.
The next line introduces one more new method: C{L{store
<base.AbstractValueObject.store>}}. It is really just like connecting its only argument
to the ‘changed’ signal, except that it is also called once with the current state of the
condition (or value of a variable.)
The example should produce this output::
Missing pilots or fuel
Pilots are Jen and Jim
Got 500 litres of fuel
Ready to get off!
Got 0 litres of fuel
Missing pilots or fuel
Notable here is the output from C{ready_state_changed} function. It is called once at the
beginning from the C{store} method with the state of C{ready} condition (then C{False}.)
Both later calls correspond to changes in C{ready}’s state. When both C{pilots} and
C{fuel} variables are set, corresponding predicate conditions become true and so does the
C{ready} condition. However, when one of the predicate conditions becomes false (as the
result of C{fuel} being set to zero), C{ready} turns false again. Note that
C{ready_state_changed} is not called in between of setting C{pilots} and C{fuel} variable.
C{ready} state is recomputed, but since it remains the same, ‘changed’ signal is not
emitted.
G{packagetree}
"""
__docformat__ = 'epytext en'
# CONFIGURATION
__version__ = '0.3.2.1'
"""
Version of Py-cnotify, as a string.
"""
version_tuple = (0, 3, 2, 1)
"""
Version of Py-cnotify, as a tuple of integers. It is guaranteed that version tuples of
later versions will compare greater that those of earlier versions.
"""
# /CONFIGURATION
# Local variables:
# mode: python
# python-indent: 4
# indent-tabs-mode: nil
# fill-column: 90
# End:
| kived/py-cnotify | cnotify/__init__.py | Python | lgpl-2.1 | 8,208 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Backend.AI Library documentation build configuration file, created by
# sphinx-quickstart on Tue Mar 1 21:26:20 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
on_rtd = os.environ.get('READTHEDOCS') == 'True'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = ['.rst']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Backend.AI API Documentation'
copyright = '2015-2020, Lablup Inc.'
author = 'Lablup Inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'v5.20191215'
# The full version, including alpha/beta/rc tags.
release = '20.03'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'tango'
highlight_language = 'python3'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
numfig = True
intersphinx_mapping = {
'client-py':
('https://client-py.docs.backend.ai/en/latest/', None),
}
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'BackendAIAPIDoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'BackendAIDoc.tex', 'Backend.AI API Documentation',
author, 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'backend.ai', 'Backend.AI API Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Backend.AI', 'Backend.AI API Documentation',
author, 'Backend.AI', 'Backend.AI is a hassle-free backend for AI programming and service.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| lablup/sorna | docs/conf.py | Python | lgpl-3.0 | 6,409 |
import math
import socket
import tempfile
import unittest
from contextlib import closing
import numpy as np
from shyft.api import (
Calendar, UtcPeriod,
DtsServer, DtsClient,
TimeAxis, TimeSeries, POINT_AVERAGE_VALUE, POINT_INSTANT_VALUE
)
from shyft.pyapi import fixed_tsv, windowed_percentiles_tsv, period_percentiles_tsv, selector_ts
def find_free_port() -> int:
"""
from SO https://stackoverflow.com/questions/1365265/on-localhost-how-to-pick-a-free-port-number
:return: available port number for use
"""
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
s.bind(('', 0))
return s.getsockname()[1]
class SelectorTsTestCase(unittest.TestCase):
def setUp(self) -> None:
self.port = find_free_port()
self.server = DtsServer()
self.server.set_listening_port(self.port)
self.server.start_async()
self.client = DtsClient(rf'localhost:{self.port}')
def tearDown(self) -> None:
self.server.clear()
del self.server
del self.port
def test_fixed_tsv_empty(self) -> None:
"""Test that an empty TsVector is generated by fixed_tsv when given an empty sequence of values."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
tsv = fixed_tsv(period, [])
self.assertEqual(len(tsv), 0)
def test_fixed_tsv_values(self) -> None:
"""Test that a TsVector with fixed constant values is generated by fixed_tsv when given
a sequence of values."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
values = [12, 15.5]
tsv = fixed_tsv(period, values)
self.assertEqual(len(tsv), 2)
for v, ts in zip(values, tsv):
for ts_v in ts.values:
self.assertEqual(ts_v, v)
def test_windowed_percentiles_tsv_empty(self) -> None:
"""Test that an empty TsVector is generated by windowed_percentiles_tsv
when given an empty sequence of percentiles."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
data = np.linspace(-2, 2, 24*7)
data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data, POINT_INSTANT_VALUE)
# compute
tsv = windowed_percentiles_tsv(
data_ts, period,
Calendar.HOUR, Calendar.HOUR,
[],
self.client, cal
)
self.assertEqual(len(tsv), 0)
def test_windowed_percentiles_tsv_values(self) -> None:
"""Test that a TsVector is generated by windowed_percentiles_tsv with time-series
fulfilling some properties of being percentiles of the data ts."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
data = np.linspace(-2, 2, 24*7)
data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data, POINT_INSTANT_VALUE)
# compute
percentiles = [0, 10, 50, 90, 100]
tsv = windowed_percentiles_tsv(
data_ts, period,
3*Calendar.HOUR, 12*Calendar.HOUR,
percentiles,
self.client, cal
)
self.assertEqual(len(tsv), 5)
# assert that the time-series have the correct properties for being percentile series
for i in range(len(tsv[0])):
prev_v = tsv[0].values[i]
for j in range(len(percentiles)-1):
v = tsv[j+1].values[i]
# both values will be NaN at the end - that is ok
if math.isnan(prev_v) and math.isnan(v):
continue
# check that no larger percentile have values greater than lower percentiles
self.assertLessEqual(prev_v, v)
prev_v = v
def test_period_percentiles_tsv_empty(self) -> None:
"""Test that an empty TsVector is generated by period_percentiles_tsv
when given an empty sequence of percentiles."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
data = np.linspace(-2, 2, 24*7)
data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data, POINT_INSTANT_VALUE)
# compute
tsv = period_percentiles_tsv(
data_ts, period,
3*Calendar.HOUR, period,
[],
self.client, cal
)
self.assertEqual(len(tsv), 0)
def test_period_percentiles_tsv_values(self) -> None:
"""Test that a TsVector is generated by period_percentiles_tsv with time-series
fulfilling some properties of being percentiles of the data ts."""
cal = Calendar()
period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))
data = np.linspace(-2, 2, 24*7)
data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data, POINT_INSTANT_VALUE)
# compute
percentiles = [0, 10, 50, 90, 100]
tsv = period_percentiles_tsv(
data_ts, period,
3*Calendar.HOUR, period,
percentiles,
self.client, cal
)
self.assertEqual(len(tsv), 5)
# assert that the time-series have the correct properties for being percentile series
for i in range(len(tsv[0])):
prev_v = tsv[0].values[i]
for j in range(len(percentiles)-1):
v = tsv[j+1].values[i]
# both values will be NaN at the end - that is ok
if math.isnan(prev_v) and math.isnan(v):
continue
# check that no larger percentile have values greater than lower percentiles
self.assertLessEqual(prev_v, v)
prev_v = v
def test_selector_ts(self) -> None:
"""Test that selector_ts constructs a time-series selects data from different time-series correctly."""
n = 24
cal = Calendar()
period = UtcPeriod(0, n*Calendar.HOUR)
data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, n), np.linspace(-10, 10, n), POINT_INSTANT_VALUE)
source_tss = [
TimeSeries(TimeAxis(0, Calendar.HOUR, n), 1.00*np.ones(n), POINT_INSTANT_VALUE),
TimeSeries(TimeAxis(0, Calendar.HOUR, n), 10.0*np.ones(n), POINT_INSTANT_VALUE),
TimeSeries(TimeAxis(0, Calendar.HOUR, n), 100.*np.ones(n), POINT_INSTANT_VALUE),
]
threshold_1 = -5
threshold_2 = 5
threshold_tss = [
TimeSeries(TimeAxis(0, Calendar.HOUR, n), threshold_1*np.ones(n), POINT_INSTANT_VALUE),
TimeSeries(TimeAxis(0, Calendar.HOUR, n), threshold_2*np.ones(n), POINT_INSTANT_VALUE),
]
ts = selector_ts(
data_ts, period, 2*Calendar.HOUR,
threshold_tss, source_tss,
POINT_AVERAGE_VALUE,
self.client, cal
)
self.assertEqual(len(data_ts), len(ts))
for dv, rv in zip(data_ts.values, ts.values):
if dv < threshold_1:
self.assertEqual(rv, source_tss[0].values[0])
elif threshold_1 <= dv < threshold_2:
self.assertEqual(rv, source_tss[1].values[0])
else:
self.assertEqual(rv, source_tss[2].values[0])
| jfburkhart/shyft | shyft/tests/pyapi/test_selector_ts.py | Python | lgpl-3.0 | 7,317 |
#!/usr/bin/env python
"""
RSS Reader for C-Power 1200
Copyright 2010-2012 Michael Farrell <http://micolous.id.au/>
This library is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from cpower1200 import *
import feedparser
from sys import argv
FEED = 'http://news.google.com.au/news?pz=1&cf=all&ned=au&hl=en&output=rss'
d = feedparser.parse(FEED)
s = CPower1200(argv[1])
# Define one window at the top of the screen, and one in the lower part of the screen
s.send_window(dict(x=0, y=0, h=8, w=64), dict(x=0, y=8, h=8, w=64))
header = s.format_text(d.feed.title, RED, 0)
articles = ''
for i, article in enumerate(d.entries[:4]):
print "entry %d: %s" % (i, article.title)
colour = YELLOW if i % 2 == 0 else GREEN
articles += s.format_text(article.title + ' ', colour)
# send to sign
#s.send_text(0, header, effect=EFFECT_NONE)
s.send_clock(0, display_year=False, display_month=False, display_day=False, display_hour=True, display_minute=True, display_second=True, multiline=False, red=255,green=0,blue=0)
s.send_text(1, articles, speed=10)
| micolous/ledsign | cpower1200_rss.py | Python | lgpl-3.0 | 1,615 |
# -*- coding: utf-8 -*-
from distutils.core import setup
import os.path
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
]
def read(fname):
fname = os.path.join(os.path.dirname(__file__), fname)
return open(fname).read().strip()
def read_files(*fnames):
return '\r\n\r\n\r\n'.join(map(read, fnames))
setup(
name = 'icall',
version = '0.3.4',
py_modules = ['icall'],
description = 'Parameters call function, :-)',
long_description = read_files('README.rst', 'CHANGES.rst'),
author = 'huyx',
author_email = '[email protected]',
url = 'https://github.com/huyx/icall',
keywords = ['functools', 'function', 'call'],
classifiers = classifiers,
)
| huyx/icall | setup.py | Python | lgpl-3.0 | 1,026 |
{
'name' : 'Signature templates for user emails',
'version' : '1.0.0',
'author' : 'IT-Projects LLC, Ivan Yelizariev',
'license': 'LGPL-3',
'category' : 'Social Network',
'website' : 'https://yelizariev.github.io',
'depends' : ['base'],
'data':[
'res_users_signature_views.xml',
'security/res_users_signature_security.xml',
'security/ir.model.access.csv',
],
'installable': False
}
| iledarn/addons-yelizariev | res_users_signature/__openerp__.py | Python | lgpl-3.0 | 445 |
# -*- coding: utf-8 -*-
# Some utils
import hashlib
import uuid
def get_hash(data):
"""Returns hashed string"""
return hashlib.sha256(data).hexdigest()
def get_token():
return str(uuid.uuid4())
| aluminiumgeek/organic | utils.py | Python | lgpl-3.0 | 212 |
#!/usr/bin/python
# -*- coding: latin-1 -*-
# Copyright 2015 Oeyvind Brandtsegg
#
# This file is part of the Signal Interaction Toolkit
#
# The Signal Interaction Toolkit is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3
# as published by the Free Software Foundation.
#
# The Signal Interaction Toolkit is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with The Signal Interaction Toolkit.
# If not, see <http://www.gnu.org/licenses/>.
import sys
if sys.argv[1] == 'template':
effectname = 'template'
parameters = [('Vol', (0.0, 1.0, 0.5, 0.25, 0.00001))]
# pName, (min, max, default, skew, increment)
# where skew is a dynamic adjustment of exp/lin/log translation if the GUI widget
# and increment is the smallest change allowed by the GUI widget
if sys.argv[1] == 'stereopan':
effectname = 'stereopan'
parameters = [('Pan', (0.0, 1.0, 0.5, 1, 0.001)),
('Mix', (0.0, 1.0, 0.5, 1, 0.001))]
if sys.argv[1] == 'tremolam':
effectname = 'tremolam'
parameters = [('Depth', (0.0, 1.0, 0.5, 0.25, 0.001)),
('RateLow', (0.0, 10.0, 0.5, 0.25, 0.001)),
('RateHigh', (0.0, 500.0, 0.5, 0.25, 0.001))]
if sys.argv[1] == 'vst_mediator':
effectname = 'vst_mediator'
parameters = [('parm1', (0.0, 1.0, 0.5, 1, 0.001)),
('parm2', (0.0, 1.0, 0.5, 1, 0.001)),
('parm3', (0.0, 1.0, 0.5, 1, 0.001)),
('parm4', (0.0, 1.0, 0.5, 1, 0.001)),
('parm5', (0.0, 1.0, 0.5, 1, 0.001)),
('parm6', (0.0, 1.0, 0.5, 1, 0.001)),
('parm7', (0.0, 1.0, 0.5, 1, 0.001)),
('parm8', (0.0, 1.0, 0.5, 1, 0.001))
]
if sys.argv[1] == 'vst_MIDIator':
effectname = 'vst_MIDIator'
parameters = [('parm1', (0.0, 1.0, 0.5, 1, 0.001)),
('parm2', (0.0, 1.0, 0.5, 1, 0.001)),
('parm3', (0.0, 1.0, 0.5, 1, 0.001)),
('parm4', (0.0, 1.0, 0.5, 1, 0.001)),
('parm5', (0.0, 1.0, 0.5, 1, 0.001)),
('parm6', (0.0, 1.0, 0.5, 1, 0.001)),
('parm7', (0.0, 1.0, 0.5, 1, 0.001)),
('parm8', (0.0, 1.0, 0.5, 1, 0.001))
]
if sys.argv[1] == 'stereodelay':
effectname = 'stereodelay'
parameters = [('delaytime', (0.0008, 2.0, 0.5, 0.25, 0.00001)),
('filt_fq', (100, 10000, 1000, 0.35, 1)),
('feedback', (0.0, 0.9999, 0.3, 1.9, 0.0001))
]
if sys.argv[1] == 'pluck':
effectname = 'pluck'
parameters = [('inlevel', (0, 1.0, 1, 0.3, 0.01)),
('freq', (1, 1450, 400, 0.3, 0.01)),
('filt_fq', (1000, 16000, 7000, 0.35, 1)),
('feedback', (0.8, 0.9999, 0.95, 1.9, 0.0001)),
('mix', (0, 1.0, 1, 0.3, 0.01))
]
if sys.argv[1] == 'lpf18dist':
effectname = 'lpf18dist'
parameters = [('Drive', (1, 12, 2, 1, 0.1)),
('Freq', (20, 10000, 3000, 0.35, 1)),
('Resonance', (0.001, 0.95, 0.3, 1, 0.001)),
('Dist', (0.001, 10, 0.2, 0.5, 0.001)),
('Mix', (0.0, 1.0, 1.0, 1, 0.01)),
]
if sys.argv[1] == 'screverb':
effectname = 'screverb'
parameters = [('InLevel', (0, 1.0, 0.2, 0.3, 0.01)),
('Feed', (0.0, 1.0, 0.85, 1.2, 0.01)),
('FiltFq', (100, 14000, 7000, 0.6, 1)),
('PitchMod', (0.0, 4.0, 0.9, 1, 0.01)),
('PreDly', (0.0, 500, 120, 1, 1)),
('LfRoll', (20, 500, 90, 1, 1)),
('Mix', (0.0, 1.0, 1.0, 1, 0.01))
]
if sys.argv[1] == 'freeverb':
effectname = 'freeverb'
parameters = [('inlevel', (0, 1.0, 1.0, 0.3, 0.01)),
('reverbtime', (0.0, 8.0, 1.5, 0.4, 0.01)),
('reverbdamp', (0.0, 1.0, 0.25, 0.6, 0.01)),
('reverbmix', (0.0, 1.0, 0.7, 1, 0.01))
]
if sys.argv[1] == 'mincertime':
effectname = 'mincertime'
parameters = [('inlevel', (0, 1.0, 1, 0.3, 0.01)),
('timpoint', (0, 0.99, 0.1, 0.4, 0.001)),
('pitch', (0.0, 2.0, 1.0, 1, 0.01)),
('feedback', (0.0, 1.0, 0.0, 1, 0.01)),
('mix', (0, 1.0, 1, 0.3, 0.01))
]
if sys.argv[1] == 'plucktremlpfverb':
effectname = 'plucktremlpfverb'
parameters = [('inlevel', (0, 1.0, 1, 0.3, 0.01)),
('pluckfreq', (1, 1450, 400, 0.3, 0.01)),
('pluckfilt', (1000, 16000, 7000, 0.35, 1)),
('pluckfeed', (0.8, 0.9999, 0.95, 1.9, 0.0001)),
('pluckmix', (0, 1.0, 1, 0.3, 0.01)),
('tremDepth', (0.0, 1.0, 0.5, 0.25, 0.001)),
('tRateLow', (0.0, 10.0, 0.5, 0.25, 0.001)),
('tRateHigh', (0.0, 500.0, 0.5, 0.25, 0.001)),
('lpfDrive', (1, 12, 2, 1, 0.1)),
('lpfFreq', (20, 10000, 3000, 0.35, 1)),
('lpfResonance', (0.001, 0.95, 0.3, 1, 0.001)),
('lpfDist', (0.001, 10, 0.2, 0.5, 0.001)),
('lpfMix', (0.0, 1.0, 1.0, 1, 0.01)),
('reverbtime', (0.0, 8.0, 1.5, 0.4, 0.01)),
('reverbdamp', (0.0, 1.0, 0.25, 0.6, 0.01)),
('reverbmix', (0.0, 1.0, 0.7, 1, 0.01))
]
if sys.argv[1] == 'mincerpanverb':
effectname = 'mincerpanverb'
parameters = [('inlevel', (0, 1.0, 1, 0.3, 0.01)),
('mincertime', (0, 0.99, 0.1, 0.4, 0.001)),
('mincerpitch', (0.0, 2.0, 1.0, 1, 0.01)),
('mincerfeed', (0.0, 1.0, 0.0, 1, 0.01)),
('mincermix', (0, 1.0, 1, 0.3, 0.01)),
('Pan', (0.0, 1.0, 0.5, 1, 0.001)),
('panMix', (0.0, 1.0, 0.5, 1, 0.001)),
('reverbtime', (0.0, 8.0, 1.5, 0.4, 0.01)),
('reverbdamp', (0.0, 1.0, 0.25, 0.6, 0.01)),
('reverbmix', (0.0, 1.0, 0.7, 1, 0.01))
]
#
scorefile = open(effectname+'_score_events.inc', 'w')
fractionalinstr = 0
for p in parameters:
fractionalinstr += 1
scorefile.write('i4.{fracinstr:02d} 3.1 $SCORELEN "{pname}"\n'.format(fracinstr=fractionalinstr, pname=p[0]))
#
chn_init_file = open(effectname+'_parameter_ranges.inc', 'w')
instr_template = '''
instr 1
; list of min and max for the mappable parameters
{}
endin
'''
parameter_ranges = ''
for i in range(len(parameters)):
parm = parameters[i]
parameter_ranges += ' chnset {}, "{}_min" \n'.format(parm[1][0], parm[0])
parameter_ranges += ' chnset {}, "{}_max" \n'.format(parm[1][1], parm[0])
chn_init_file.write(instr_template.format(parameter_ranges))
#
start_x_pos = 30
start_y_pos = 5
plant_height = 85
analysis_parms = '"rms", "rms_preEq", "cps", "pitch", "centroid", "spread", "skewness", "kurtosis", "flatness", "crest", "flux", "amp_trans", "amp_t_dens", "centr_trans", "centr_t_dens", "kurt_trans", "pitchup_trans", "pitchdown_trans", "cps_raw"'
plant = '''groupbox bounds({start_y}, {start_x}, 564, 81), plant("plant_{pname}"), linethickness("0"){{
combobox channel("source1_{pname}"), bounds(10, 12, 90, 20), items({analysis_p}), value(1), channeltype("string")
combobox channel("chan1_{pname}"), bounds(103, 12, 50, 20), items("1", "2", "3", "4"), value(1)
numberbox bounds(158, 14, 35, 15), channel("rise1_{pname}"), range(0.01, 10.0, 0.01)
numberbox bounds(196, 14, 35, 15), channel("fall1_{pname}"), range(0.01, 10.0, 0.5)
hslider bounds(233, 12, 86, 20), channel("scale1_{pname}"), range(-1.0, 1.0, 0, 1, 0.01)
button bounds(320, 12, 29, 19), channel("scale1_x_{pname}"), text("x 1","x 10"),
hslider bounds(349, 12, 86, 20), channel("curve1_{pname}"), range(-5.0, 5.0, 0)
combobox channel("source2_{pname}"), bounds(10, 34, 90, 20), items({analysis_p}), value(1), channeltype("string")
combobox channel("chan2_{pname}"), bounds(103, 34, 50, 20), items("1", "2", "3", "4"), value(1)
numberbox bounds(158, 36, 35, 15), channel("rise2_{pname}"), range(0.01, 10.0, 0.01)
numberbox bounds(196, 36, 35, 15), channel("fall2_{pname}"), range(0.01, 10.0, 0.5)
hslider bounds(233, 34, 86, 20), channel("scale2_{pname}"), range(-1.0, 1.0, 0, 1, 0.01)
button bounds(320, 34, 29, 19), channel("scale2_x_{pname}"), text("x 1","x 10"),
hslider bounds(349, 34, 86, 20), channel("curve2_{pname}"), range(-5.0, 5.0, 0)
label bounds(10, 58, 90, 12), text("source"), colour(20,20,20,255)
label bounds(103, 58, 50, 12), text("chan"), colour(20,20,20,255)
label bounds(156, 58, 76, 12), text("rise/fall"), colour(20,20,20,255)
label bounds(236, 58, 110, 12), text("scale"), colour(20,20,20,255)
label bounds(352, 58, 81, 12), text("curve"), colour(20,20,20,255)
rslider bounds(433, 12, 62, 62), text("offset"), channel("offset_{pname}"), range({p_min}, {p_max}, {p_default}, {p_skew}, {p_incr})
combobox bounds(433, 1, 55, 12), channel("offsetx_{pname}"), items("-1", "Nornm", "+1"), , value(2), channeltype("string")
rslider bounds(494, 8, 66, 66), text("{pname}"), channel("{pname}"), range({p_min}, {p_max}, {p_default}, {p_skew}, {p_incr})
}}
'''
plantMIDI = '''groupbox bounds({start_y}, {start_x}, 710, 81), plant("plant_{pname}"), linethickness("0"){{
combobox channel("source1_{pname}"), bounds(10, 12, 90, 20), items({analysis_p}), value(1), channeltype("string")
combobox channel("chan1_{pname}"), bounds(103, 12, 50, 20), items("1", "2", "3", "4"), value(1)
numberbox bounds(158, 14, 35, 15), channel("rise1_{pname}"), range(0.01, 10.0, 0.01)
numberbox bounds(196, 14, 35, 15), channel("fall1_{pname}"), range(0.01, 10.0, 0.5)
hslider bounds(233, 12, 86, 20), channel("scale1_{pname}"), range(-1.0, 1.0, 0, 1, 0.01)
button bounds(320, 12, 29, 19), channel("scale1_x_{pname}"), text("x 1","x 10"),
hslider bounds(349, 12, 86, 20), channel("curve1_{pname}"), range(-5.0, 5.0, 0)
combobox channel("source2_{pname}"), bounds(10, 34, 90, 20), items({analysis_p}), value(1), channeltype("string")
combobox channel("chan2_{pname}"), bounds(103, 34, 50, 20), items("1", "2", "3", "4"), value(1)
numberbox bounds(158, 36, 35, 15), channel("rise2_{pname}"), range(0.01, 10.0, 0.01)
numberbox bounds(196, 36, 35, 15), channel("fall2_{pname}"), range(0.01, 10.0, 0.5)
hslider bounds(233, 34, 86, 20), channel("scale2_{pname}"), range(-1.0, 1.0, 0, 1, 0.01)
button bounds(320, 34, 29, 19), channel("scale2_x_{pname}"), text("x 1","x 10"),
hslider bounds(349, 34, 86, 20), channel("curve2_{pname}"), range(-5.0, 5.0, 0)
label bounds(10, 58, 90, 12), text("source"), colour(20,20,20,255)
label bounds(103, 58, 50, 12), text("chan"), colour(20,20,20,255)
label bounds(156, 58, 76, 12), text("rise/fall"), colour(20,20,20,255)
label bounds(236, 58, 110, 12), text("scale"), colour(20,20,20,255)
label bounds(352, 58, 81, 12), text("curve"), colour(20,20,20,255)
rslider bounds(433, 12, 62, 62), text("offset"), channel("offset_{pname}"), range({p_min}, {p_max}, {p_default}, {p_skew}, {p_incr})
combobox bounds(433, 1, 55, 12), channel("offsetx_{pname}"), items("-1", "Nornm", "+1"), , value(2), channeltype("string")
rslider bounds(494, 8, 66, 66), text("{pname}"), channel("{pname}"), range({p_min}, {p_max}, {p_default}, {p_skew}, {p_incr})
label bounds(570, 8, 55, 12), text("midi"), colour(20,20,20,255)
checkbox bounds(632, 8, 12, 12), text("enable"), channel("enable_{pname}"), value(1)
numberbox bounds(570, 25, 55, 15), channel("midich_{pname}"), range(1, 16, 1)
numberbox bounds(570, 42, 55, 15), channel("ctrlnum_{pname}"), range(1, 127, 1)
label bounds(632, 25, 70, 12), text("channel"), colour(20,20,20,255)
label bounds(632, 42, 70, 12), text("ctrl"), colour(20,20,20,255)
}}
'''
if effectname == 'vst_MIDIator': plant = plantMIDI
guifile = open(effectname+'_gui_scratchpad.inc', 'w')
x_pos = start_x_pos
x_pos1 = start_x_pos
y_pos = start_y_pos
for i in range(len(parameters)):
parm = parameters[i]
if (effectname == 'plucktremlpfverb') and (parm[0] == 'lpfDrive'):
x_pos1 = x_pos
x_pos = start_x_pos
y_pos = 575
guifile.write(plant.format(start_x=x_pos, start_y=y_pos, pname=parm[0], analysis_p=analysis_parms,p_min=parm[1][0], p_max=parm[1][1], p_default=parm[1][2], p_skew=parm[1][3], p_incr=parm[1][4]))
x_pos+=plant_height
guifile.write(';next x position available below plants is {}'.format(max([x_pos,x_pos1]))) | Oeyvind/interprocessing | codeUtility.py | Python | lgpl-3.0 | 12,966 |
import unittest
from matching.cpe_sorter import *
unsorted_cpes = [{'wfn': {'version': '4.0', 'target_sw': 'android_marshmallow'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.0:beta:~~~android_marshmallow~~'},
{'wfn': {'version': '1.0.1.2', 'target_sw': 'android_marshmallow'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:1.0.1.2:beta'},
{'wfn': {'version': '4.1.2', 'target_sw': 'ANY'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'},
{'wfn': {'version': '4.6.3', 'target_sw': 'windows'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.6.3:beta:~~~windows~~'},
{'wfn': {'version': '4.7.1', 'target_sw': 'android'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'},
{'wfn': {'version': '4.7.2', 'target_sw': 'ANY'},
'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.2:beta'},
{'wfn': {'version': '4.3.2', 'target_sw': 'linux'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'},
{'wfn': {'version': '2.3.1', 'target_sw': 'linux'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2.3.1:beta'},
{'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'}
]
unsorted_cpes_year = [{'wfn': {'version': '2000', 'target_sw': 'android_marshmallow'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2000:beta:~~~android_marshmallow~~'},
{'wfn': {'version': '2007', 'target_sw': 'android_marshmallow'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2007:beta'},
{'wfn': {'version': '4.1.2', 'target_sw': 'ANY'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.1.2:beta'},
{'wfn': {'version': '2010', 'target_sw': 'windows'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~windows~~'},
{'wfn': {'version': '4.7.1', 'target_sw': 'android'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.1:beta:~~~android~~'},
{'wfn': {'version': '2001', 'target_sw': 'ANY'},
'uri_binding':'cpe:/a:string_value_with\:double_points:internet_explorer:2001:beta'},
{'wfn': {'version': '4.3.2', 'target_sw': 'linux'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.3.2:beta:~~~linux~~'},
{'wfn': {'version': '2010', 'target_sw': 'linux'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta'},
{'wfn': {'version': '4.7.3', 'target_sw': 'mac_os_x'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:4.7.3:beta:~~~mac_os_x~~'},
{'wfn': {'version': '2010', 'target_sw': 'mac_os_x'},
'uri_binding': 'cpe:/a:string_value_with\:double_points:internet_explorer:2010:beta:~~~mac_os_x~~'}]
version = '4.7.2'
version_without_points = '4_7-2'
version_year = '2010'
os_windows = 'windows_7'
os_linux = 'linux_ubuntu'
os_android = 'android'
os_mac = 'mac_os_x_10.11'
class TestCPESorter(unittest.TestCase):
def test_sort_cpes_by_software_version(self):
sorted_cpes = sort_cpes_by_version(unsorted_cpes, version)
self.assertEqual(len(unsorted_cpes), len(sorted_cpes))
self.assertEqual(unsorted_cpes[5], sorted_cpes[0]) # 4.7.2
self.assertEqual(unsorted_cpes[4], sorted_cpes[1]) # 4.7.1
self.assertEqual(unsorted_cpes[8], sorted_cpes[2]) # 4.7.3
self.assertEqual(unsorted_cpes[0], sorted_cpes[3]) # 4.0
self.assertEqual(unsorted_cpes[2], sorted_cpes[4]) # 4.1.2
self.assertEqual(unsorted_cpes[3], sorted_cpes[5]) # 4.6.3
self.assertEqual(unsorted_cpes[6], sorted_cpes[6]) # 4.3.2
def test_cpes_and_sorted_cpes_are_equal_when_software_version_not_splitted_by_points(self):
sorted_cpes = sort_cpes_by_version(unsorted_cpes, version_without_points)
self.assertListEqual(unsorted_cpes, sorted_cpes)
def test_sort_cpes_by_version_with_year(self):
sorted_cpes = sort_cpes_by_version(unsorted_cpes_year, version_year)
self.assertEqual(len(unsorted_cpes_year), len(sorted_cpes))
self.assertEqual(unsorted_cpes_year[3], sorted_cpes[0]) # 2010
self.assertEqual(unsorted_cpes_year[7], sorted_cpes[1]) # 2010
self.assertEqual(unsorted_cpes_year[9], sorted_cpes[2]) # 2010
self.assertEqual(unsorted_cpes_year[0], sorted_cpes[3]) # 2000
self.assertEqual(unsorted_cpes_year[1], sorted_cpes[4]) # 2007
self.assertEqual(unsorted_cpes_year[5], sorted_cpes[5]) # 2001
def test_sort_cpes_by_operating_system_windows(self):
sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_windows)
self.assertEqual(len(unsorted_cpes), len(sorted_cpes))
self.assertEqual(unsorted_cpes[3], sorted_cpes[0])
def test_sort_cpes_by_operating_system_linux(self):
sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_linux)
self.assertEqual(len(unsorted_cpes), len(sorted_cpes))
self.assertEqual(unsorted_cpes[6], sorted_cpes[0])
def test_sort_cpes_by_operating_system_android(self):
sorted_cpes = sort_cpes_by_operating_system(unsorted_cpes, os_android)
self.assertEqual(len(unsorted_cpes), len(sorted_cpes))
self.assertEqual(unsorted_cpes[4], sorted_cpes[0])
self.assertEqual(unsorted_cpes[0], sorted_cpes[1])
if __name__ == '__main__':
unittest.main()
| fkie-cad/iva | tests/test_cpe_matching/test_cpe_sorter.py | Python | lgpl-3.0 | 6,349 |
"""
# Copyright (c) 05 2015 | surya
# 18/05/15 [email protected]
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# __init__.py.py
"""
import urlparse
from niimanga.libs.exceptions import HtmlError
from requests import request
class Site:
def __init__(self):
pass
def get_html(self, url, method='GET', **kwargs):
resp = request(method, url, **kwargs)
if resp.status_code != 200:
raise HtmlError({'msg': 'external_request_fail', 'url': url})
return resp.content
def fetch_manga_seed_page(self, url, **kwargs):
return self.get_html(url, **kwargs)
def fetch_chapter_seed_page(self, url, **kwargs):
return self.get_html(url, **kwargs)
def fetch_page_image(self, url, **kwargs):
return self.get_html(url, **kwargs)
def search_by_author(self, author):
"""
Return list of chapter dicts whose keys are:
name
url
site
This should be specifically implemented in each Site subclass. If not,
this method will be used which returns an empty list.
"""
return []
from mangaeden import MangaEden
from batoto import Batoto
available_sites = [
# Kissmanga(),
# Vitaku(),
Batoto(),
# Mangafox(),
# Mangahere(),
# MangaHereMob(),
MangaEden()
]
# Factory function, return instance of suitable "site" class from url
def get_site(url):
netloc = urlparse.urlparse(url).netloc
for site in available_sites:
if netloc in site.netlocs:
return site
return None | suryakencana/niimanga | niimanga/sites/__init__.py | Python | lgpl-3.0 | 2,259 |
import os
import socket
import sys
input_host = '127.0.0.1'
input_port = 65000
batch_enabled = int(os.environ.get('_BACKEND_BATCH_MODE', '0'))
if batch_enabled:
# Since latest Python 2 has `builtins`and `input`,
# we cannot detect Python 2 with the existence of them.
if sys.version_info.major > 2:
import builtins
def _input(prompt=''):
sys.stdout.write(prompt)
sys.stdout.flush()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
try:
sock.connect((input_host, input_port))
userdata = sock.recv(1024)
except ConnectionRefusedError:
userdata = b'<user-input-unavailable>'
return userdata.decode()
builtins._input = input # type: ignore
builtins.input = _input
else:
# __builtins__ is an alias dict for __builtin__ in modules other than __main__.
# Thus, we have to explicitly import __builtin__ module in Python 2.
import __builtin__
builtins = __builtin__
def _raw_input(prompt=''):
sys.stdout.write(prompt)
sys.stdout.flush()
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((input_host, input_port))
userdata = sock.recv(1024)
except socket.error:
userdata = b'<user-input-unavailable>'
finally:
sock.close()
return userdata.decode()
builtins._raw_input = builtins.raw_input # type: ignore
builtins.raw_input = _raw_input # type: ignore
| lablup/sorna-agent | src/ai/backend/kernel/python/sitecustomize.py | Python | lgpl-3.0 | 1,693 |
# Copyright 2013, Michael H. Goldwasser
#
# Developed for use with the book:
#
# Data Structures and Algorithms in Python
# Michael T. Goodrich, Roberto Tamassia, and Michael H. Goldwasser
# John Wiley & Sons, 2013
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Range:
"""A class that mimic's the built-in range class."""
def __init__(self, start, stop=None, step=1):
"""Initialize a Range instance.
Semantics is similar to built-in range class.
"""
if step == 0:
raise ValueError('step cannot be 0')
if stop is None: # special case of range(n)
start, stop = 0, start # should be treated as if range(0,n)
# calculate the effective length once
self._length = max(0, (stop - start + step - 1) // step)
# need knowledge of start and step (but not stop) to support __getitem__
self._start = start
self._step = step
def __len__(self):
"""Return number of entries in the range."""
return self._length
def __getitem__(self, k):
"""Return entry at index k (using standard interpretation if negative)."""
if k < 0:
k += len(self) # attempt to convert negative index
if not 0 <= k < self._length:
raise IndexError('index out of range')
return self._start + k * self._step
| consultit/Ely | ely/direct/data_structures_and_algorithms/ch02/range.py | Python | lgpl-3.0 | 2,023 |
#!/usr/bin/env python3
"""
This script edits your backends conf file by replacing stuff like:
[bnporc21]
_module = bnporc
website = pp
login = 123456
password = 78910
with:
[bnporc21]
_module = bnporc
website = pp
login = 123456
password = `pass show weboob/bnporc21`
"""
from __future__ import print_function
import os
import re
import shutil
import subprocess
import sys
import tempfile
FILE = os.getenv('WEBOOB_BACKENDS') or os.path.expanduser('~/.config/weboob/backends')
if not os.path.exists(FILE):
print('the backends file does not exist')
sys.exit(os.EX_NOINPUT)
if not shutil.which('pass'):
print('the "pass" tool could not be found')
sys.exit(os.EX_UNAVAILABLE)
errors = 0
seen = set()
backend = None
with open(FILE) as inp:
with tempfile.NamedTemporaryFile('w', delete=False, dir=os.path.dirname(FILE)) as outp:
for line in inp:
line = line.strip()
mtc = re.match(r'password\s*=\s*(\S.*)$', line)
if mtc and not mtc.group(1).startswith('`'):
cmd = ['pass', 'insert', 'weboob/%s' % backend]
stdin = 2 * ('%s\n' % mtc.group(1))
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
proc.communicate(stdin.encode('utf-8'))
if proc.returncode == 0:
print('password = `pass show weboob/%s`' % backend, file=outp)
continue
else:
errors += 1
print('warning: could not store password for backend %r' % backend)
mtc = re.match(r'\[(.+)\]', line)
if mtc:
backend = mtc.group(1)
if backend in seen:
print('error: backend %r is present multiple times' % backend)
sys.exit(os.EX_DATAERR)
seen.add(backend)
print(line, file=outp)
os.rename(outp.name, FILE)
if errors:
print('%d errors were encountered when storing passwords securely' % errors)
sys.exit(2)
| vicnet/weboob | contrib/replace-backends-pass.py | Python | lgpl-3.0 | 2,065 |
# -*- coding: utf-8 -*-
import oauth2 # XXX pumazi: factor this out
from webob.multidict import MultiDict, NestedMultiDict
from webob.request import Request as WebObRequest
__all__ = ['Request']
class Request(WebObRequest):
"""The OAuth version of the WebOb Request.
Provides an easier way to obtain OAuth request parameters
(e.g. oauth_token) from the WSGI environment."""
def _checks_positive_for_oauth(self, params_var):
"""Simple check for the presence of OAuth parameters."""
checks = [ p.find('oauth_') >= 0 for p in params_var ]
return True in checks
@property
def str_oauth_header(self):
extracted = {}
# Check for OAuth in the Header
if 'authorization' in self.headers:
auth_header = self.headers['authorization']
# Check that the authorization header is OAuth.
if auth_header[:6] == 'OAuth ':
auth_header = auth_header.lstrip('OAuth ')
try:
# Extract the parameters from the header.
extracted = oauth2.Request._split_header(auth_header)
except:
raise Error('Unable to parse OAuth parameters from '
'the Authorization header.')
return extracted
@property
def str_oauth_POST(self):
extracted = {}
if self._checks_positive_for_oauth(self.str_POST):
extracted = dict([ (k, v,) for k, v in self.str_POST.iteritems()
if (k.find('oauth_') >= 0) ])
return extracted
@property
def str_oauth_GET(self):
extracted = {}
if self._checks_positive_for_oauth(self.str_GET):
extracted = dict([ (k, v,) for k, v in self.str_GET.iteritems()
if (k.find('oauth_') >= 0) ])
return extracted
def params(self):
params = WebObRequest.params.fget(self)
return NestedMultiDict(params, self.str_oauth_header)
params = property(params, doc=WebObRequest.params.__doc__)
@property
def oauth_params(self):
"""Simple way to get the OAuth parameters without sifting through
the entire stack of parameters.
We check the header first, because it is low hanging fruit.
However, it would be more efficient to check for the POSTed
parameters, because the specification defines the POST method as the
recommended request type before using GET or the Authorization
header."""
extracted = {}
# OAuth in the Header
extracted.update(self.str_oauth_header)
# OAuth in a GET or POST method
extracted.update(self.str_oauth_GET)
extracted.update(self.str_oauth_POST)
# Return the extracted oauth variables
return MultiDict(extracted)
@property
def nonoauth_params(self):
"""Simple way to get the non-OAuth parameters from the request."""
oauth_param_keys = self.oauth_params.keys()
return dict([(k, v) for k, v in self.params.iteritems() if k not in oauth_param_keys])
| karacos/karacos-wsgi | lib/wsgioauth/request.py | Python | lgpl-3.0 | 3,175 |
# -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
import re
import json
from datetime import datetime
from weboob.browser.pages import LoggedPage, HTMLPage, JsonPage
from weboob.browser.elements import DictElement, ItemElement, method
from weboob.browser.filters.standard import Date, CleanDecimal, CleanText, Format, Field, Env, Regexp, Currency
from weboob.browser.filters.json import Dict
from weboob.capabilities import NotAvailable
from weboob.capabilities.bank import Account, Loan
from weboob.capabilities.contact import Advisor
from weboob.capabilities.profile import Profile
from weboob.capabilities.bill import DocumentTypes, Subscription, Document
from weboob.tools.capabilities.bank.transactions import FrenchTransaction
from weboob.exceptions import BrowserUnavailable
class Transaction(FrenchTransaction):
PATTERNS = [
(re.compile(r'^CB (?P<text>.*?) FACT (?P<dd>\d{2})(?P<mm>\d{2})(?P<yy>\d{2})', re.IGNORECASE), FrenchTransaction.TYPE_CARD),
(re.compile(r'^RET(RAIT)? DAB (?P<dd>\d+)-(?P<mm>\d+)-.*', re.IGNORECASE), FrenchTransaction.TYPE_WITHDRAWAL),
(re.compile(r'^RET(RAIT)? DAB (?P<text>.*?) (?P<dd>\d{2})(?P<mm>\d{2})(?P<yy>\d{2}) (?P<HH>\d{2})H(?P<MM>\d{2})', re.IGNORECASE), FrenchTransaction.TYPE_WITHDRAWAL),
(re.compile(r'^VIR(EMENT)?(\.PERIODIQUE)? (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_TRANSFER),
(re.compile(r'^PRLV (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_ORDER),
(re.compile(r'^CHEQUE.*', re.IGNORECASE), FrenchTransaction.TYPE_CHECK),
(re.compile(r'^(CONVENTION \d+ )?COTIS(ATION)? (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_BANK),
(re.compile(r'^\* (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_BANK),
(re.compile(r'^REMISE (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_DEPOSIT),
(re.compile(r'^(?P<text>.*)( \d+)? QUITTANCE .*', re.IGNORECASE), FrenchTransaction.TYPE_ORDER),
(re.compile(r'^CB [\d\*]+ TOT DIF .*', re.IGNORECASE), FrenchTransaction.TYPE_CARD_SUMMARY),
(re.compile(r'^CB [\d\*]+ (?P<text>.*)', re.IGNORECASE), FrenchTransaction.TYPE_CARD),
(re.compile(r'^CB (?P<text>.*?) (?P<dd>\d{2})(?P<mm>\d{2})(?P<yy>\d{2})', re.IGNORECASE), FrenchTransaction.TYPE_CARD),
(re.compile(r'\*CB (?P<text>.*?) (?P<dd>\d{2})(?P<mm>\d{2})(?P<yy>\d{2})', re.IGNORECASE), FrenchTransaction.TYPE_CARD),
(re.compile(r'^FAC CB (?P<text>.*?) (?P<dd>\d{2})/(?P<mm>\d{2})', re.IGNORECASE), FrenchTransaction.TYPE_CARD),
]
class LoginPage(JsonPage):
def get_response(self):
return self.doc
class CenetLoginPage(HTMLPage):
def login(self, username, password, nuser, codeCaisse, _id, vkpass):
form = self.get_form(id='aspnetForm')
form['__EVENTTARGET'] = "btn_authentifier_securise"
form['__EVENTARGUMENT'] = '{"CodeCaisse":"%s","NumeroBad":"%s","NumeroUsager":"%s",\
"MotDePasse":"%s","IdentifiantClavier":"%s","ChaineConnexion":"%s"}' \
% (codeCaisse, username, nuser, password, _id, vkpass)
form.submit()
class CenetHomePage(LoggedPage, HTMLPage):
@method
class get_advisor(ItemElement):
klass = Advisor
obj_name = CleanText('//section[contains(@id, "ChargeAffaires")]//strong')
obj_email = CleanText('//li[contains(@id, "MailContact")]')
obj_phone = CleanText('//li[contains(@id, "TelAgence")]', replace=[('.', '')])
obj_mobile = NotAvailable
obj_agency = CleanText('//section[contains(@id, "Agence")]//strong')
obj_address = CleanText('//li[contains(@id, "AdresseAgence")]')
def obj_fax(self):
return CleanText('//li[contains(@id, "FaxAgence")]', replace=[('.', '')])(self) or NotAvailable
@method
class get_profile(ItemElement):
klass = Profile
obj_name = CleanText('//li[@class="identite"]/a/span')
class CenetJsonPage(JsonPage):
def __init__(self, browser, response, *args, **kwargs):
super(CenetJsonPage, self).__init__(browser, response, *args, **kwargs)
# Why you are so ugly....
self.doc = json.loads(self.doc['d'])
if self.doc['Erreur'] and (self.doc['Erreur']['Titre'] or self.doc['Erreur']['Code']):
self.logger.warning('error on %r: %s', self.url, self.doc['Erreur']['Titre'] or self.doc['Erreur']['Code'])
raise BrowserUnavailable(self.doc['Erreur']['Titre'] or self.doc['Erreur']['Description'])
self.doc['DonneesSortie'] = json.loads(self.doc['DonneesSortie'])
class CenetAccountsPage(LoggedPage, CenetJsonPage):
ACCOUNT_TYPES = {'CCP': Account.TYPE_CHECKING}
@method
class get_accounts(DictElement):
item_xpath = "DonneesSortie"
class item(ItemElement):
klass = Account
obj_id = CleanText(Dict('Numero'))
obj_label = CleanText(Dict('Intitule'))
obj_iban = CleanText(Dict('IBAN'))
def obj_balance(self):
absolut_amount = CleanDecimal(Dict('Solde/Valeur'))(self)
if CleanText(Dict('Solde/CodeSens'))(self) == 'D':
return -absolut_amount
return absolut_amount
def obj_currency(self):
return CleanText(Dict('Devise'))(self).upper()
def obj_type(self):
return self.page.ACCOUNT_TYPES.get(Dict('TypeCompte')(self), Account.TYPE_UNKNOWN)
def obj__formated(self):
return self.el
class CenetLoanPage(LoggedPage, CenetJsonPage):
@method
class get_accounts(DictElement):
item_xpath = "DonneesSortie"
class item(ItemElement):
klass = Loan
obj_id = CleanText(Dict('IdentifiantUniqueContrat'), replace=[(' ', '-')])
obj_label = CleanText(Dict('Libelle'))
obj_total_amount = CleanDecimal(Dict('MontantInitial/Valeur'))
obj_currency = Currency(Dict('MontantInitial/Devise'))
obj_type = Account.TYPE_LOAN
obj_duration = CleanDecimal(Dict('Duree'))
obj_rate = CleanDecimal.French(Dict('Taux'))
obj_next_payment_amount = CleanDecimal(Dict('MontantProchaineEcheance/Valeur'))
def obj_balance(self):
balance = CleanDecimal(Dict('CapitalRestantDu/Valeur'))(self)
if balance > 0:
balance *= -1
return balance
def obj_subscription_date(self):
sub_date = Dict('DateDebutEffet')(self)
if sub_date:
date = CleanDecimal().filter(sub_date) / 1000
return datetime.fromtimestamp(date).date()
return NotAvailable
def obj_maturity_date(self):
mat_date = Dict('DateDerniereEcheance')(self)
if mat_date:
date = CleanDecimal().filter(mat_date) / 1000
return datetime.fromtimestamp(date).date()
return NotAvailable
def obj_next_payment_date(self):
next_date = Dict('DateProchaineEcheance')(self)
if next_date:
date = CleanDecimal().filter(next_date) / 1000
return datetime.fromtimestamp(date).date()
return NotAvailable
class CenetCardsPage(LoggedPage, CenetJsonPage):
def get_cards(self):
cards = Dict('DonneesSortie')(self.doc)
# Remove dates to prevent bad parsing
def reword_dates(card):
tmp_card = card
for k, v in tmp_card.items():
if isinstance(v, dict):
v = reword_dates(v)
if k == "Date" and v is not None and "Date" in v:
card[k] = None
for card in cards:
reword_dates(card)
return cards
class CenetAccountHistoryPage(LoggedPage, CenetJsonPage):
TR_TYPES_LABEL = {
'VIR': Transaction.TYPE_TRANSFER,
'CHEQUE': Transaction.TYPE_CHECK,
'REMISE CHEQUE': Transaction.TYPE_CASH_DEPOSIT,
'PRLV': Transaction.TYPE_ORDER,
}
TR_TYPES_API = {
'VIR': Transaction.TYPE_TRANSFER,
'PE': Transaction.TYPE_ORDER, # PRLV
'CE': Transaction.TYPE_CHECK, # CHEQUE
'DE': Transaction.TYPE_CASH_DEPOSIT, # APPRO
'PI': Transaction.TYPE_CASH_DEPOSIT, # REMISE CHEQUE
}
@method
class get_history(DictElement):
item_xpath = "DonneesSortie"
class item(ItemElement):
klass = Transaction
obj_raw = Format('%s %s', Dict('Libelle'), Dict('Libelle2'))
obj_label = CleanText(Dict('Libelle'))
obj_date = Date(Dict('DateGroupImputation'), dayfirst=True)
obj_rdate = Date(Dict('DateGroupReglement'), dayfirst=True)
def obj_type(self):
ret = Transaction.TYPE_UNKNOWN
# The API may send the same key for 'PRLV' and 'VIR' transactions
# So the label is checked first, then the API key
for k, v in self.page.TR_TYPES_LABEL.items():
if Field('label')(self).startswith(k):
ret = v
break
if ret == Transaction.TYPE_UNKNOWN:
ret = self.page.TR_TYPES_API.get(Dict('TypeOperationDisplay')(self), Transaction.TYPE_UNKNOWN)
if ret != Transaction.TYPE_UNKNOWN:
return ret
for pattern, type in Transaction.PATTERNS:
if pattern.match(Field('raw')(self)):
return type
return Transaction.TYPE_UNKNOWN
def obj_amount(self):
amount = CleanDecimal(Dict('Montant/Valeur'))(self)
return -amount if Dict('Montant/CodeSens')(self) == "D" else amount
def next_offset(self):
offset = Dict('OffsetSortie')(self.doc)
if offset:
assert Dict('EstComplete')(self.doc) == 'false'
return offset
class CenetCardSummaryPage(LoggedPage, CenetJsonPage):
@method
class get_history(DictElement):
item_xpath = "DonneesSortie/OperationsCB"
class item(ItemElement):
klass = Transaction
obj_label = CleanText(Dict('Libelle'))
obj_date = Date(Dict('DateGroupImputation'), dayfirst=True)
obj_type = Transaction.TYPE_DEFERRED_CARD
def obj_raw(self):
label = Dict('Libelle')(self)
label2 = Dict('Libelle2')(self)
if label2 and label2 != 'None':
return '%s %s' % (label, label2)
else:
return label
def obj_rdate(self):
rdate = re.search('(FACT\s)(\d{6})', Field('label')(self))
if rdate.group(2):
return Date(dayfirst=True).filter(rdate.group(2))
return NotAvailable
def obj_amount(self):
amount = CleanDecimal(Dict('Montant/Valeur'))(self)
return -amount if Dict('Montant/CodeSens')(self) == "D" else amount
class _LogoutPage(HTMLPage):
def on_load(self):
raise BrowserUnavailable(CleanText('//*[@class="messErreur"]')(self.doc))
class ErrorPage(_LogoutPage):
pass
class UnavailablePage(HTMLPage):
def on_load(self):
raise BrowserUnavailable(CleanText('//div[@id="message_error_hs"]')(self.doc))
class SubscriptionPage(LoggedPage, CenetJsonPage):
@method
class iter_subscription(DictElement):
item_xpath = 'DonneesSortie'
class item(ItemElement):
klass = Subscription
obj_id = CleanText(Dict('Numero'))
obj_label = CleanText(Dict('Intitule'))
obj_subscriber = Env('subscriber')
@method
class iter_documents(DictElement):
item_xpath = 'DonneesSortie'
class item(ItemElement):
klass = Document
obj_id = Format('%s_%s_%s', Env('sub_id'), Dict('Numero'), CleanText(Env('french_date'), symbols='/'))
obj_format = 'pdf'
obj_type = DocumentTypes.OTHER
obj__numero = CleanText(Dict('Numero'))
obj__sub_id = Env('sub_id')
obj__sub_label = Env('sub_label')
obj__download_id = CleanText(Dict('IdDocument'))
def obj_date(self):
date = Regexp(Dict('DateArrete'), r'Date\((\d+)\)')(self)
date = int(date) // 1000
return datetime.fromtimestamp(date).date()
def obj_label(self):
return '%s %s' % (CleanText(Dict('Libelle'))(self), Env('french_date')(self))
def parse(self, el):
self.env['french_date'] = Field('date')(self).strftime('%d/%m/%Y')
class DownloadDocumentPage(LoggedPage, HTMLPage):
def download_form(self, document):
data = {
'Numero': document._numero,
'Libelle': document._sub_label.replace(' ', '+'),
'DateArrete': '',
'IdDocument': document._download_id
}
form = self.get_form(id='aspnetForm')
form['__EVENTTARGET'] = 'btn_telecharger'
form['__EVENTARGUMENT'] = json.dumps(data)
return form.submit()
| laurentb/weboob | modules/caissedepargne/cenet/pages.py | Python | lgpl-3.0 | 14,121 |
"""
.15925 Editor
Copyright 2014 TechInvestLab.ru [email protected]
.15925 Editor is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 3.0 of the License, or (at your option) any later version.
.15925 Editor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with .15925 Editor.
"""
from iso15926.tools.environment import EnvironmentContext
from PySide.QtCore import *
from PySide.QtGui import *
import os
from framework.dialogs import Choice
class TestWindow(QDialog):
vis_label = tm.main.tests_title
tests_dir = 'tests'
def __init__(self):
QDialog.__init__(self, appdata.topwindow, Qt.WindowSystemMenuHint | Qt.WindowTitleHint)
self.setWindowTitle(self.vis_label)
layout = QVBoxLayout(self)
box = QGroupBox(tm.main.tests_field, self)
self.tests_list = QListWidget(box)
boxlayout = QHBoxLayout(box)
boxlayout.addWidget(self.tests_list)
layout.addWidget(box)
for n in os.listdir(self.tests_dir):
if n.startswith(".") or not n.endswith('.py'):
continue
sp = os.path.splitext(n)
item = QListWidgetItem(sp[0], self.tests_list)
item.setCheckState(Qt.Unchecked)
self.btn_prepare = QPushButton(tm.main.prepare, self)
self.btn_prepare.setToolTip(tm.main.prepare_selected_tests)
self.btn_prepare.clicked.connect(self.OnPrepare)
self.btn_run = QPushButton(tm.main.run, self)
self.btn_run.setToolTip(tm.main.run_selected_tests)
self.btn_run.clicked.connect(self.OnRun)
self.btn_sel_all = QPushButton(tm.main.select_all, self)
self.btn_sel_all.clicked.connect(self.SelectAll)
self.btn_unsel_all = QPushButton(tm.main.unselect_all, self)
self.btn_unsel_all.clicked.connect(self.UnselectAll)
self.btn_cancel = QPushButton(tm.main.cancel, self)
self.btn_cancel.clicked.connect(self.reject)
btnlayout = QHBoxLayout()
btnlayout.addWidget(self.btn_sel_all)
btnlayout.addWidget(self.btn_unsel_all)
btnlayout.addStretch()
btnlayout.addWidget(self.btn_prepare)
btnlayout.addWidget(self.btn_run)
btnlayout.addWidget(self.btn_cancel)
layout.addLayout(btnlayout)
box = QGroupBox(tm.main.tests_result_field, self)
self.report = QPlainTextEdit(self)
boxlayout = QHBoxLayout(box)
boxlayout.addWidget(self.report)
layout.addWidget(box)
self.exec_()
def SelectAll(self):
self.tests_list.SetChecked([x for x in xrange(self.tests_list.Count)])
def UnselectAll(self):
self.tests_list.SetChecked([])
def OnPrepare(self):
if Choice(tm.main.tests_prepare_warning):
for k in self.tests_list.CheckedStrings:
self.report.AppendText(tm.main.tests_preparing.format(k))
locals = {'mode': 'prepare'}
ec = EnvironmentContext(None, locals)
ec.ExecutePythonFile(os.path.join(self.tests_dir, k + '.py'))
self.report.AppendText(tm.main.tests_preparing_done)
def OnRun(self):
all_passed = True
self.report.appendPlainText(tm.main.tests_running)
count = 0
passed = 0
for i in xrange(self.tests_list.count()):
item = self.tests_list.item(i)
name = item.text()
if not item.checkState() == Qt.Checked:
continue
count += 1
locals = {'mode': 'run', 'passed': False}
ec = EnvironmentContext(None, locals)
ec.ExecutePythonFile(os.path.join(self.tests_dir, name + '.py'))
if locals['passed']:
passed += 1
self.report.appendPlainText(tm.main.test_passed.format(name))
else:
self.report.appendPlainText(tm.main.test_failed.format(name))
self.report.appendPlainText(tm.main.tests_result)
self.report.appendPlainText(tm.main.tests_result_info.format(passed, count))
if os.path.exists(TestWindow.tests_dir):
@public('workbench.menu.help')
class xTestMenu:
vis_label = tm.main.menu_tests
@classmethod
def Do(cls):
TestWindow()
| TechInvestLab/dot15926 | editor_qt/iso15926/common/testing.py | Python | lgpl-3.0 | 4,639 |
import os
import struct
from binascii import unhexlify
from shutil import copy as copyfile
from twisted.internet.defer import inlineCallbacks
from Tribler.Core.CacheDB.SqliteCacheDBHandler import TorrentDBHandler, MyPreferenceDBHandler, ChannelCastDBHandler
from Tribler.Core.CacheDB.sqlitecachedb import str2bin
from Tribler.Core.Category.Category import Category
from Tribler.Core.TorrentDef import TorrentDef
from Tribler.Core.leveldbstore import LevelDbStore
from Tribler.Test.Core.test_sqlitecachedbhandler import AbstractDB
from Tribler.Test.common import TESTS_DATA_DIR
S_TORRENT_PATH_BACKUP = os.path.join(TESTS_DATA_DIR, 'bak_single.torrent')
M_TORRENT_PATH_BACKUP = os.path.join(TESTS_DATA_DIR, 'bak_multiple.torrent')
class TestTorrentFullSessionDBHandler(AbstractDB):
def setUpPreSession(self):
super(TestTorrentFullSessionDBHandler, self).setUpPreSession()
self.config.set_megacache_enabled(True)
@inlineCallbacks
def setUp(self):
yield super(TestTorrentFullSessionDBHandler, self).setUp()
self.tdb = TorrentDBHandler(self.session)
def test_initialize(self):
self.tdb.initialize()
self.assertIsNone(self.tdb.mypref_db)
self.assertIsNone(self.tdb.votecast_db)
self.assertIsNone(self.tdb.channelcast_db)
class TestTorrentDBHandler(AbstractDB):
def addTorrent(self):
old_size = self.tdb.size()
old_tracker_size = self.tdb._db.size('TrackerInfo')
s_infohash = unhexlify('44865489ac16e2f34ea0cd3043cfd970cc24ec09')
m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98')
single_torrent_file_path = os.path.join(self.getStateDir(), 'single.torrent')
multiple_torrent_file_path = os.path.join(self.getStateDir(), 'multiple.torrent')
copyfile(S_TORRENT_PATH_BACKUP, single_torrent_file_path)
copyfile(M_TORRENT_PATH_BACKUP, multiple_torrent_file_path)
single_tdef = TorrentDef.load(single_torrent_file_path)
self.assertEqual(s_infohash, single_tdef.get_infohash())
multiple_tdef = TorrentDef.load(multiple_torrent_file_path)
self.assertEqual(m_infohash, multiple_tdef.get_infohash())
self.tdb.addExternalTorrent(single_tdef)
self.tdb.addExternalTorrent(multiple_tdef)
single_torrent_id = self.tdb.getTorrentID(s_infohash)
multiple_torrent_id = self.tdb.getTorrentID(m_infohash)
self.assertEqual(self.tdb.getInfohash(single_torrent_id), s_infohash)
single_name = 'Tribler_4.1.7_src.zip'
multiple_name = 'Tribler_4.1.7_src'
self.assertEqual(self.tdb.size(), old_size + 2)
new_tracker_table_size = self.tdb._db.size('TrackerInfo')
self.assertLess(old_tracker_size, new_tracker_table_size)
sname = self.tdb.getOne('name', torrent_id=single_torrent_id)
self.assertEqual(sname, single_name)
mname = self.tdb.getOne('name', torrent_id=multiple_torrent_id)
self.assertEqual(mname, multiple_name)
s_size = self.tdb.getOne('length', torrent_id=single_torrent_id)
self.assertEqual(s_size, 1583233)
m_size = self.tdb.getOne('length', torrent_id=multiple_torrent_id)
self.assertEqual(m_size, 5358560)
cat = self.tdb.getOne('category', torrent_id=multiple_torrent_id)
self.assertEqual(cat, u'xxx')
s_status = self.tdb.getOne('status', torrent_id=single_torrent_id)
self.assertEqual(s_status, u'unknown')
m_comment = self.tdb.getOne('comment', torrent_id=multiple_torrent_id)
comments = 'www.tribler.org'
self.assertGreater(m_comment.find(comments), -1)
comments = 'something not inside'
self.assertEqual(m_comment.find(comments), -1)
m_trackers = self.tdb.getTrackerListByInfohash(m_infohash)
self.assertEqual(len(m_trackers), 8)
self.assertIn('http://tpb.tracker.thepiratebay.org/announce', m_trackers)
s_torrent = self.tdb.getTorrent(s_infohash)
m_torrent = self.tdb.getTorrent(m_infohash)
self.assertEqual(s_torrent['name'], 'Tribler_4.1.7_src.zip')
self.assertEqual(m_torrent['name'], 'Tribler_4.1.7_src')
self.assertEqual(m_torrent['last_tracker_check'], 0)
def updateTorrent(self):
m_infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98')
self.tdb.updateTorrent(m_infohash, relevance=3.1415926, category=u'Videoclips',
status=u'good', seeder=123, leecher=321,
last_tracker_check=1234567,
other_key1='abcd', other_key2=123)
multiple_torrent_id = self.tdb.getTorrentID(m_infohash)
category = self.tdb.getOne('category', torrent_id=multiple_torrent_id)
self.assertEqual(category, u'Videoclips')
status = self.tdb.getOne('status', torrent_id=multiple_torrent_id)
self.assertEqual(status, u'good')
seeder = self.tdb.getOne('num_seeders', torrent_id=multiple_torrent_id)
self.assertEqual(seeder, 123)
leecher = self.tdb.getOne('num_leechers', torrent_id=multiple_torrent_id)
self.assertEqual(leecher, 321)
last_tracker_check = self.tdb.getOne('last_tracker_check', torrent_id=multiple_torrent_id)
self.assertEqual(last_tracker_check, 1234567)
def setUpPreSession(self):
super(TestTorrentDBHandler, self).setUpPreSession()
self.config.set_megacache_enabled(True)
self.config.set_torrent_store_enabled(True)
@inlineCallbacks
def setUp(self):
yield super(TestTorrentDBHandler, self).setUp()
from Tribler.Core.APIImplementation.LaunchManyCore import TriblerLaunchMany
from Tribler.Core.Modules.tracker_manager import TrackerManager
self.session.lm = TriblerLaunchMany()
self.session.lm.tracker_manager = TrackerManager(self.session)
self.tdb = TorrentDBHandler(self.session)
self.tdb.torrent_dir = TESTS_DATA_DIR
self.tdb.category = Category()
self.tdb.mypref_db = MyPreferenceDBHandler(self.session)
@inlineCallbacks
def tearDown(self):
self.tdb.mypref_db.close()
self.tdb.mypref_db = None
self.tdb.close()
self.tdb = None
yield super(TestTorrentDBHandler, self).tearDown()
def test_hasTorrent(self):
infohash_str = 'AA8cTG7ZuPsyblbRE7CyxsrKUCg='
infohash = str2bin(infohash_str)
self.assertTrue(self.tdb.hasTorrent(infohash))
self.assertTrue(self.tdb.hasTorrent(infohash)) # cache will trigger
fake_infohash = 'fake_infohash_100000'
self.assertFalse(self.tdb.hasTorrent(fake_infohash))
def test_get_infohash(self):
self.assertTrue(self.tdb.getInfohash(1))
self.assertFalse(self.tdb.getInfohash(1234567))
def test_add_update_torrent(self):
self.addTorrent()
self.updateTorrent()
def test_update_torrent_from_metainfo(self):
# Add torrent first
infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98')
# Only infohash is added to the database
self.tdb.addOrGetTorrentID(infohash)
# Then update the torrent with metainfo
metainfo = {'info': {'files': [{'path': ['Something.something.pdf'], 'length': 123456789},
{'path': ['Another-thing.jpg'], 'length': 100000000}],
'piece length': 2097152,
'name': '\xc3Something awesome (2015)',
'pieces': ''},
'seeders': 0, 'initial peers': [],
'leechers': 36, 'download_exists': False, 'nodes': []}
self.tdb.update_torrent_with_metainfo(infohash, metainfo)
# Check updates are correct
torrent_id = self.tdb.getTorrentID(infohash)
name = self.tdb.getOne('name', torrent_id=torrent_id)
self.assertEqual(name, u'\xc3Something awesome (2015)')
num_files = self.tdb.getOne('num_files', torrent_id=torrent_id)
self.assertEqual(num_files, 2)
length = self.tdb.getOne('length', torrent_id=torrent_id)
self.assertEqual(length, 223456789)
def test_add_external_torrent_no_def_existing(self):
infohash = str2bin('AA8cTG7ZuPsyblbRE7CyxsrKUCg=')
self.tdb.addExternalTorrentNoDef(infohash, "test torrent", [], [], 1234)
self.assertTrue(self.tdb.hasTorrent(infohash))
def test_add_external_torrent_no_def_no_files(self):
infohash = unhexlify('48865489ac16e2f34ea0cd3043cfd970cc24ec09')
self.tdb.addExternalTorrentNoDef(infohash, "test torrent", [], [], 1234)
self.assertFalse(self.tdb.hasTorrent(infohash))
def test_add_external_torrent_no_def_one_file(self):
infohash = unhexlify('49865489ac16e2f34ea0cd3043cfd970cc24ec09')
self.tdb.addExternalTorrentNoDef(infohash, "test torrent", [("file1", 42)],
['http://localhost/announce'], 1234)
self.assertTrue(self.tdb.getTorrentID(infohash))
def test_add_external_torrent_no_def_more_files(self):
infohash = unhexlify('50865489ac16e2f34ea0cd3043cfd970cc24ec09')
self.tdb.addExternalTorrentNoDef(infohash, "test torrent", [("file1", 42), ("file2", 43)],
[], 1234, extra_info={"seeder": 2, "leecher": 3})
self.assertTrue(self.tdb.getTorrentID(infohash))
def test_add_external_torrent_no_def_invalid(self):
infohash = unhexlify('50865489ac16e2f34ea0cd3043cfd970cc24ec09')
self.tdb.addExternalTorrentNoDef(infohash, "test torrent", [("file1", {}), ("file2", 43)],
[], 1234)
self.assertFalse(self.tdb.getTorrentID(infohash))
def test_add_get_torrent_id(self):
infohash = str2bin('AA8cTG7ZuPsyblbRE7CyxsrKUCg=')
self.assertEqual(self.tdb.addOrGetTorrentID(infohash), 1)
new_infohash = unhexlify('50865489ac16e2f34ea0cd3043cfd970cc24ec09')
self.assertEqual(self.tdb.addOrGetTorrentID(new_infohash), 4859)
def test_add_get_torrent_ids_return(self):
infohash = str2bin('AA8cTG7ZuPsyblbRE7CyxsrKUCg=')
new_infohash = unhexlify('50865489ac16e2f34ea0cd3043cfd970cc24ec09')
tids, inserted = self.tdb.addOrGetTorrentIDSReturn([infohash, new_infohash])
self.assertEqual(tids, [1, 4859])
self.assertEqual(len(inserted), 1)
def test_index_torrent_existing(self):
self.tdb._indexTorrent(1, "test", [])
def test_getCollectedTorrentHashes(self):
res = self.tdb.getNumberCollectedTorrents()
self.assertEqual(res, 4847)
def test_freeSpace(self):
# Manually set the torrent store because register is not called.
self.session.lm.torrent_store = LevelDbStore(self.session.config.get_torrent_store_dir())
old_res = self.tdb.getNumberCollectedTorrents()
self.tdb.freeSpace(20)
res = self.tdb.getNumberCollectedTorrents()
self.session.lm.torrent_store.close()
self.assertEqual(res, old_res-20)
def test_get_search_suggestions(self):
self.assertEqual(self.tdb.getSearchSuggestion(["content", "cont"]), ["content 1"])
def test_get_autocomplete_terms(self):
self.assertEqual(len(self.tdb.getAutoCompleteTerms("content", 100)), 0)
def test_get_recently_randomly_collected_torrents(self):
self.assertEqual(len(self.tdb.getRecentlyCollectedTorrents(limit=10)), 10)
self.assertEqual(len(self.tdb.getRandomlyCollectedTorrents(100000000, limit=10)), 3)
def test_get_recently_checked_torrents(self):
self.assertEqual(len(self.tdb.getRecentlyCheckedTorrents(limit=5)), 5)
def test_select_torrents_to_collect(self):
infohash = str2bin('AA8cTG7ZuPsyblbRE7CyxsrKUCg=')
self.assertEqual(len(self.tdb.select_torrents_to_collect(infohash)), 0)
def test_get_torrents_stats(self):
self.assertEqual(self.tdb.getTorrentsStats(), (4847, 6519179841442, 187195))
def test_get_library_torrents(self):
self.assertEqual(len(self.tdb.getLibraryTorrents(['infohash'])), 12)
def test_search_names_no_sort(self):
"""
Test whether the right amount of torrents are returned when searching for torrents in db
"""
columns = ['T.torrent_id', 'infohash', 'status', 'num_seeders']
self.tdb.channelcast_db = ChannelCastDBHandler(self.session)
self.assertEqual(len(self.tdb.searchNames(['content'], keys=columns, doSort=False)), 4849)
self.assertEqual(len(self.tdb.searchNames(['content', '1'], keys=columns, doSort=False)), 1)
def test_search_names_sort(self):
"""
Test whether the right amount of sorted torrents are returned when searching for torrents in db
"""
columns = ['T.torrent_id', 'infohash', 'status', 'num_seeders']
self.tdb.channelcast_db = ChannelCastDBHandler(self.session)
results = self.tdb.searchNames(['content'], keys=columns)
self.assertEqual(len(results), 4849)
self.assertEqual(results[0][3], 493785)
def test_search_local_torrents(self):
"""
Test the search procedure in the local database when searching for torrents
"""
results = self.tdb.search_in_local_torrents_db('content', ['infohash', 'num_seeders'])
self.assertEqual(len(results), 4849)
self.assertNotEqual(results[0][-1], 0.0) # Relevance score of result should not be zero
results = self.tdb.search_in_local_torrents_db('fdsafasfds', ['infohash'])
self.assertEqual(len(results), 0)
def test_rel_score_remote_torrent(self):
self.tdb.latest_matchinfo_torrent = struct.pack("I" * 12, *([1] * 12)), "torrent"
self.assertNotEqual(self.tdb.relevance_score_remote_torrent("my-torrent.iso"), 0.0)
| Captain-Coder/tribler | Tribler/Test/Core/test_sqlitecachedbhandler_torrents.py | Python | lgpl-3.0 | 13,903 |
"""
Property reference docs:
- https://docs.microsoft.com/en-us/office/client-developer/outlook/mapi/mapping-canonical-property-names-to-mapi-names#tagged-properties
- https://interoperability.blob.core.windows.net/files/MS-OXPROPS/[MS-OXPROPS].pdf
- https://fossies.org/linux/libpst/xml/MAPI_definitions.pdf
- https://docs.microsoft.com/en-us/office/client-developer/outlook/mapi/mapi-constants
+----------------+----------------+-------------------------------------------------------------------------------+
| Range minimum | Range maximum | Description |
+----------------+----------------+-------------------------------------------------------------------------------+
| 0x0001 | 0x0BFF | Message object envelope property; reserved |
| 0x0C00 | 0x0DFF | Recipient property; reserved |
| 0x0E00 | 0x0FFF | Non-transmittable Message property; reserved |
| 0x1000 | 0x2FFF | Message content property; reserved |
| 0x3000 | 0x33FF | Multi-purpose property that can appear on all or most objects; reserved |
| 0x3400 | 0x35FF | Message store property; reserved |
| 0x3600 | 0x36FF | Folder and address book container property; reserved |
| 0x3700 | 0x38FF | Attachment property; reserved |
| 0x3900 | 0x39FF | Address Book object property; reserved |
| 0x3A00 | 0x3BFF | Mail user object property; reserved |
| 0x3C00 | 0x3CFF | Distribution list property; reserved |
| 0x3D00 | 0x3DFF | Profile section property; reserved |
| 0x3E00 | 0x3EFF | Status object property; reserved |
| 0x4000 | 0x57FF | Transport-defined envelope property |
| 0x5800 | 0x5FFF | Transport-defined recipient property |
| 0x6000 | 0x65FF | User-defined non-transmittable property |
| 0x6600 | 0x67FF | Provider-defined internal non-transmittable property |
| 0x6800 | 0x7BFF | Message class-defined content property |
| 0x7C00 | 0x7FFF | Message class-defined non-transmittable property |
| 0x8000 | 0xFFFF | Reserved for mapping to named properties. The exceptions to this rule are |
| | | some of the address book tagged properties (those with names beginning with |
| | | PIDTagAddressBook). Many are static property IDs but are in this range. |
+----------------+----------------+-------------------------------------------------------------------------------+
""" # noqa: E501
MAPI_ACKNOWLEDGEMENT_MODE = 0x0001
MAPI_ALTERNATE_RECIPIENT_ALLOWED = 0x0002
MAPI_AUTHORIZING_USERS = 0x0003
MAPI_AUTO_FORWARD_COMMENT = 0x0004
MAPI_AUTO_FORWARDED = 0x0005
MAPI_CONTENT_CONFIDENTIALITY_ALGORITHM_ID = 0x0006
MAPI_CONTENT_CORRELATOR = 0x0007
MAPI_CONTENT_IDENTIFIER = 0x0008
MAPI_CONTENT_LENGTH = 0x0009
MAPI_CONTENT_RETURN_REQUESTED = 0x000A
MAPI_CONVERSATION_KEY = 0x000B
MAPI_CONVERSION_EITS = 0x000C
MAPI_CONVERSION_WITH_LOSS_PROHIBITED = 0x000D
MAPI_CONVERTED_EITS = 0x000E
MAPI_DEFERRED_DELIVERY_TIME = 0x000F
MAPI_DELIVER_TIME = 0x0010
MAPI_DISCARD_REASON = 0x0011
MAPI_DISCLOSURE_OF_RECIPIENTS = 0x0012
MAPI_DL_EXPANSION_HISTORY = 0x0013
MAPI_DL_EXPANSION_PROHIBITED = 0x0014
MAPI_EXPIRY_TIME = 0x0015
MAPI_IMPLICIT_CONVERSION_PROHIBITED = 0x0016
MAPI_IMPORTANCE = 0x0017
MAPI_IPM_ID = 0x0018
MAPI_LATEST_DELIVERY_TIME = 0x0019
MAPI_MESSAGE_CLASS = 0x001A
MAPI_MESSAGE_DELIVERY_ID = 0x001B
MAPI_MESSAGE_SECURITY_LABEL = 0x001E
MAPI_OBSOLETED_IPMS = 0x001F
MAPI_ORIGINALLY_INTENDED_RECIPIENT_NAME = 0x0020
MAPI_ORIGINAL_EITS = 0x0021
MAPI_ORIGINATOR_CERTIFICATE = 0x0022
MAPI_ORIGINATOR_DELIVERY_REPORT_REQUESTED = 0x0023
MAPI_ORIGINATOR_RETURN_ADDRESS = 0x0024
MAPI_PARENT_KEY = 0x0025
MAPI_PRIORITY = 0x0026
MAPI_ORIGIN_CHECK = 0x0027
MAPI_PROOF_OF_SUBMISSION_REQUESTED = 0x0028
MAPI_READ_RECEIPT_REQUESTED = 0x0029
MAPI_RECEIPT_TIME = 0x002A
MAPI_RECIPIENT_REASSIGNMENT_PROHIBITED = 0x002B
MAPI_REDIRECTION_HISTORY = 0x002C
MAPI_RELATED_IPMS = 0x002D
MAPI_ORIGINAL_SENSITIVITY = 0x002E
MAPI_LANGUAGES = 0x002F
MAPI_REPLY_TIME = 0x0030
MAPI_REPORT_TAG = 0x0031
MAPI_REPORT_TIME = 0x0032
MAPI_RETURNED_IPM = 0x0033
MAPI_SECURITY = 0x0034
MAPI_INCOMPLETE_COPY = 0x0035
MAPI_SENSITIVITY = 0x0036
MAPI_SUBJECT = 0x0037
MAPI_SUBJECT_IPM = 0x0038
MAPI_CLIENT_SUBMIT_TIME = 0x0039
MAPI_REPORT_NAME = 0x003A
MAPI_SENT_REPRESENTING_SEARCH_KEY = 0x003B
MAPI_X400_CONTENT_TYPE = 0x003C
MAPI_SUBJECT_PREFIX = 0x003D
MAPI_NON_RECEIPT_REASON = 0x003E
MAPI_RECEIVED_BY_ENTRYID = 0x003F
MAPI_RECEIVED_BY_NAME = 0x0040
MAPI_SENT_REPRESENTING_ENTRYID = 0x0041
MAPI_SENT_REPRESENTING_NAME = 0x0042
MAPI_RCVD_REPRESENTING_ENTRYID = 0x0043
MAPI_RCVD_REPRESENTING_NAME = 0x0044
MAPI_REPORT_ENTRYID = 0x0045
MAPI_READ_RECEIPT_ENTRYID = 0x0046
MAPI_MESSAGE_SUBMISSION_ID = 0x0047
MAPI_PROVIDER_SUBMIT_TIME = 0x0048
MAPI_ORIGINAL_SUBJECT = 0x0049
MAPI_DISC_VAL = 0x004A
MAPI_ORIG_MESSAGE_CLASS = 0x004B
MAPI_ORIGINAL_AUTHOR_ENTRYID = 0x004C
MAPI_ORIGINAL_AUTHOR_NAME = 0x004D
MAPI_ORIGINAL_SUBMIT_TIME = 0x004E
MAPI_REPLY_RECIPIENT_ENTRIES = 0x004F
MAPI_REPLY_RECIPIENT_NAMES = 0x0050
MAPI_RECEIVED_BY_SEARCH_KEY = 0x0051
MAPI_RCVD_REPRESENTING_SEARCH_KEY = 0x0052
MAPI_READ_RECEIPT_SEARCH_KEY = 0x0053
MAPI_REPORT_SEARCH_KEY = 0x0054
MAPI_ORIGINAL_DELIVERY_TIME = 0x0055
MAPI_ORIGINAL_AUTHOR_SEARCH_KEY = 0x0056
MAPI_MESSAGE_TO_ME = 0x0057
MAPI_MESSAGE_CC_ME = 0x0058
MAPI_MESSAGE_RECIP_ME = 0x0059
MAPI_ORIGINAL_SENDER_NAME = 0x005A
MAPI_ORIGINAL_SENDER_ENTRYID = 0x005B
MAPI_ORIGINAL_SENDER_SEARCH_KEY = 0x005C
MAPI_ORIGINAL_SENT_REPRESENTING_NAME = 0x005D
MAPI_ORIGINAL_SENT_REPRESENTING_ENTRYID = 0x005E
MAPI_ORIGINAL_SENT_REPRESENTING_SEARCH_KEY = 0x005F
MAPI_START_DATE = 0x0060
MAPI_END_DATE = 0x0061
MAPI_OWNER_APPT_ID = 0x0062
MAPI_RESPONSE_REQUESTED = 0x0063
MAPI_SENT_REPRESENTING_ADDRTYPE = 0x0064
MAPI_SENT_REPRESENTING_EMAIL_ADDRESS = 0x0065
MAPI_ORIGINAL_SENDER_ADDRTYPE = 0x0066
MAPI_ORIGINAL_SENDER_EMAIL_ADDRESS = 0x0067
MAPI_ORIGINAL_SENT_REPRESENTING_ADDRTYPE = 0x0068
MAPI_ORIGINAL_SENT_REPRESENTING_EMAIL_ADDRESS = 0x0069
MAPI_CONVERSATION_TOPIC = 0x0070
MAPI_CONVERSATION_INDEX = 0x0071
MAPI_ORIGINAL_DISPLAY_BCC = 0x0072
MAPI_ORIGINAL_DISPLAY_CC = 0x0073
MAPI_ORIGINAL_DISPLAY_TO = 0x0074
MAPI_RECEIVED_BY_ADDRTYPE = 0x0075
MAPI_RECEIVED_BY_EMAIL_ADDRESS = 0x0076
MAPI_RCVD_REPRESENTING_ADDRTYPE = 0x0077
MAPI_RCVD_REPRESENTING_EMAIL_ADDRESS = 0x0078
MAPI_ORIGINAL_AUTHOR_ADDRTYPE = 0x0079
MAPI_ORIGINAL_AUTHOR_EMAIL_ADDRESS = 0x007A
MAPI_ORIGINALLY_INTENDED_RECIP_ADDRTYPE = 0x007B
MAPI_ORIGINALLY_INTENDED_RECIP_EMAIL_ADDRESS = 0x007C
MAPI_TRANSPORT_MESSAGE_HEADERS = 0x007D
MAPI_DELEGATION = 0x007E
MAPI_TNEF_CORRELATION_KEY = 0x007F
MAPI_CONTENT_INTEGRITY_CHECK = 0x0C00
MAPI_EXPLICIT_CONVERSION = 0x0C01
MAPI_IPM_RETURN_REQUESTED = 0x0C02
MAPI_MESSAGE_TOKEN = 0x0C03
MAPI_NDR_REASON_CODE = 0x0C04
MAPI_NDR_DIAG_CODE = 0x0C05
MAPI_NON_RECEIPT_NOTIFICATION_REQUESTED = 0x0C06
MAPI_DELIVERY_POINT = 0x0C07
MAPI_ORIGINATOR_NON_DELIVERY_REPORT_REQUESTED = 0x0C08
MAPI_ORIGINATOR_REQUESTED_ALTERNATE_RECIPIENT = 0x0C09
MAPI_PHYSICAL_DELIVERY_BUREAU_FAX_DELIVERY = 0x0C0A
MAPI_PHYSICAL_DELIVERY_MODE = 0x0C0B
MAPI_PHYSICAL_DELIVERY_REPORT_REQUEST = 0x0C0C
MAPI_PHYSICAL_FORWARDING_ADDRESS = 0x0C0D
MAPI_PHYSICAL_FORWARDING_ADDRESS_REQUESTED = 0x0C0E
MAPI_PHYSICAL_FORWARDING_PROHIBITED = 0x0C0F
MAPI_PHYSICAL_RENDITION_ATTRIBUTES = 0x0C10
MAPI_PROOF_OF_DELIVERY = 0x0C11
MAPI_PROOF_OF_DELIVERY_REQUESTED = 0x0C12
MAPI_RECIPIENT_CERTIFICATE = 0x0C13
MAPI_RECIPIENT_NUMBER_FOR_ADVICE = 0x0C14
MAPI_RECIPIENT_TYPE = 0x0C15
MAPI_REGISTERED_MAIL_TYPE = 0x0C16
MAPI_REPLY_REQUESTED = 0x0C17
MAPI_REQUESTED_DELIVERY_METHOD = 0x0C18
MAPI_SENDER_ENTRYID = 0x0C19
MAPI_SENDER_NAME = 0x0C1A
MAPI_SUPPLEMENTARY_INFO = 0x0C1B
MAPI_TYPE_OF_MTS_USER = 0x0C1C
MAPI_SENDER_SEARCH_KEY = 0x0C1D
MAPI_SENDER_ADDRTYPE = 0x0C1E
MAPI_SENDER_EMAIL_ADDRESS = 0x0C1F
MAPI_CURRENT_VERSION = 0x0E00
MAPI_DELETE_AFTER_SUBMIT = 0x0E01
MAPI_DISPLAY_BCC = 0x0E02
MAPI_DISPLAY_CC = 0x0E03
MAPI_DISPLAY_TO = 0x0E04
MAPI_PARENT_DISPLAY = 0x0E05
MAPI_MESSAGE_DELIVERY_TIME = 0x0E06
MAPI_MESSAGE_FLAGS = 0x0E07
MAPI_MESSAGE_SIZE = 0x0E08
MAPI_PARENT_ENTRYID = 0x0E09
MAPI_SENTMAIL_ENTRYID = 0x0E0A
MAPI_CORRELATE = 0x0E0C
MAPI_CORRELATE_MTSID = 0x0E0D
MAPI_DISCRETE_VALUES = 0x0E0E
MAPI_RESPONSIBILITY = 0x0E0F
MAPI_SPOOLER_STATUS = 0x0E10
MAPI_TRANSPORT_STATUS = 0x0E11
MAPI_MESSAGE_RECIPIENTS = 0x0E12
MAPI_MESSAGE_ATTACHMENTS = 0x0E13
MAPI_SUBMIT_FLAGS = 0x0E14
MAPI_RECIPIENT_STATUS = 0x0E15
MAPI_TRANSPORT_KEY = 0x0E16
MAPI_MSG_STATUS = 0x0E17
MAPI_MESSAGE_DOWNLOAD_TIME = 0x0E18
MAPI_CREATION_VERSION = 0x0E19
MAPI_MODIFY_VERSION = 0x0E1A
MAPI_HASATTACH = 0x0E1B
MAPI_BODY_CRC = 0x0E1C
MAPI_NORMALIZED_SUBJECT = 0x0E1D
MAPI_RTF_IN_SYNC = 0x0E1F
MAPI_ATTACH_SIZE = 0x0E20
MAPI_ATTACH_NUM = 0x0E21
MAPI_PREPROCESS = 0x0E22
MAPI_ORIGINATING_MTA_CERTIFICATE = 0x0E25
MAPI_PROOF_OF_SUBMISSION = 0x0E26
MAPI_PRIMARY_SEND_ACCOUNT = 0x0E28
MAPI_NEXT_SEND_ACCT = 0x0E29
MAPI_ACCESS = 0x0FF4
MAPI_ROW_TYPE = 0x0FF5
MAPI_INSTANCE_KEY = 0x0FF6
MAPI_ACCESS_LEVEL = 0x0FF7
MAPI_MAPPING_SIGNATURE = 0x0FF8
MAPI_RECORD_KEY = 0x0FF9
MAPI_STORE_RECORD_KEY = 0x0FFA
MAPI_STORE_ENTRYID = 0x0FFB
MAPI_MINI_ICON = 0x0FFC
MAPI_ICON = 0x0FFD
MAPI_OBJECT_TYPE = 0x0FFE
MAPI_ENTRYID = 0x0FFF
MAPI_BODY = 0x1000
MAPI_REPORT_TEXT = 0x1001
MAPI_ORIGINATOR_AND_DL_EXPANSION_HISTORY = 0x1002
MAPI_REPORTING_DL_NAME = 0x1003
MAPI_REPORTING_MTA_CERTIFICATE = 0x1004
MAPI_RTF_SYNC_BODY_CRC = 0x1006
MAPI_RTF_SYNC_BODY_COUNT = 0x1007
MAPI_RTF_SYNC_BODY_TAG = 0x1008
MAPI_RTF_COMPRESSED = 0x1009
MAPI_RTF_SYNC_PREFIX_COUNT = 0x1010
MAPI_RTF_SYNC_TRAILING_COUNT = 0x1011
MAPI_ORIGINALLY_INTENDED_RECIP_ENTRYID = 0x1012
MAPI_BODY_HTML = 0x1013
MAPI_NATIVE_BODY = 0x1016
MAPI_SMTP_MESSAGE_ID = 0x1035
MAPI_INTERNET_REFERENCES = 0x1039
MAPI_IN_REPLY_TO_ID = 0x1042
MAPI_INTERNET_RETURN_PATH = 0x1046
MAPI_ICON_INDEX = 0x1080
MAPI_LAST_VERB_EXECUTED = 0x1081
MAPI_LAST_VERB_EXECUTION_TIME = 0x1082
MAPI_URL_COMP_NAME = 0x10F3
MAPI_ATTRIBUTE_HIDDEN = 0x10F4
MAPI_ATTRIBUTE_SYSTEM = 0x10F5
MAPI_ATTRIBUTE_READ_ONLY = 0x10F6
MAPI_ROWID = 0x3000
MAPI_DISPLAY_NAME = 0x3001
MAPI_ADDRTYPE = 0x3002
MAPI_EMAIL_ADDRESS = 0x3003
MAPI_COMMENT = 0x3004
MAPI_DEPTH = 0x3005
MAPI_PROVIDER_DISPLAY = 0x3006
MAPI_CREATION_TIME = 0x3007
MAPI_LAST_MODIFICATION_TIME = 0x3008
MAPI_RESOURCE_FLAGS = 0x3009
MAPI_PROVIDER_DLL_NAME = 0x300A
MAPI_SEARCH_KEY = 0x300B
MAPI_PROVIDER_UID = 0x300C
MAPI_PROVIDER_ORDINAL = 0x300D
MAPI_TARGET_ENTRY_ID = 0x3010
MAPI_CONVERSATION_ID = 0x3013
MAPI_CONVERSATION_INDEX_TRACKING = 0x3016
MAPI_FORM_VERSION = 0x3301
MAPI_FORM_CLSID = 0x3302
MAPI_FORM_CONTACT_NAME = 0x3303
MAPI_FORM_CATEGORY = 0x3304
MAPI_FORM_CATEGORY_SUB = 0x3305
MAPI_FORM_HOST_MAP = 0x3306
MAPI_FORM_HIDDEN = 0x3307
MAPI_FORM_DESIGNER_NAME = 0x3308
MAPI_FORM_DESIGNER_GUID = 0x3309
MAPI_FORM_MESSAGE_BEHAVIOR = 0x330A
MAPI_DEFAULT_STORE = 0x3400
MAPI_STORE_SUPPORT_MASK = 0x340D
MAPI_STORE_STATE = 0x340E
MAPI_STORE_UNICODE_MASK = 0x340F
MAPI_IPM_SUBTREE_SEARCH_KEY = 0x3410
MAPI_IPM_OUTBOX_SEARCH_KEY = 0x3411
MAPI_IPM_WASTEBASKET_SEARCH_KEY = 0x3412
MAPI_IPM_SENTMAIL_SEARCH_KEY = 0x3413
MAPI_MDB_PROVIDER = 0x3414
MAPI_RECEIVE_FOLDER_SETTINGS = 0x3415
MAPI_VALID_FOLDER_MASK = 0x35DF
MAPI_IPM_SUBTREE_ENTRYID = 0x35E0
MAPI_IPM_OUTBOX_ENTRYID = 0x35E2
MAPI_IPM_WASTEBASKET_ENTRYID = 0x35E3
MAPI_IPM_SENTMAIL_ENTRYID = 0x35E4
MAPI_VIEWS_ENTRYID = 0x35E5
MAPI_COMMON_VIEWS_ENTRYID = 0x35E6
MAPI_FINDER_ENTRYID = 0x35E7
MAPI_CONTAINER_FLAGS = 0x3600
MAPI_FOLDER_TYPE = 0x3601
MAPI_CONTENT_COUNT = 0x3602
MAPI_CONTENT_UNREAD = 0x3603
MAPI_CREATE_TEMPLATES = 0x3604
MAPI_DETAILS_TABLE = 0x3605
MAPI_SEARCH = 0x3607
MAPI_SELECTABLE = 0x3609
MAPI_SUBFOLDERS = 0x360A
MAPI_STATUS = 0x360B
MAPI_ANR = 0x360C
MAPI_CONTENTS_SORT_ORDER = 0x360D
MAPI_CONTAINER_HIERARCHY = 0x360E
MAPI_CONTAINER_CONTENTS = 0x360F
MAPI_FOLDER_ASSOCIATED_CONTENTS = 0x3610
MAPI_DEF_CREATE_DL = 0x3611
MAPI_DEF_CREATE_MAILUSER = 0x3612
MAPI_CONTAINER_CLASS = 0x3613
MAPI_CONTAINER_MODIFY_VERSION = 0x3614
MAPI_AB_PROVIDER_ID = 0x3615
MAPI_DEFAULT_VIEW_ENTRYID = 0x3616
MAPI_ASSOC_CONTENT_COUNT = 0x3617
MAPI_ATTACHMENT_X400_PARAMETERS = 0x3700
MAPI_ATTACH_DATA_OBJ = 0x3701
MAPI_ATTACH_ENCODING = 0x3702
MAPI_ATTACH_EXTENSION = 0x3703
MAPI_ATTACH_FILENAME = 0x3704
MAPI_ATTACH_METHOD = 0x3705
MAPI_ATTACH_LONG_FILENAME = 0x3707
MAPI_ATTACH_PATHNAME = 0x3708
MAPI_ATTACH_RENDERING = 0x3709
MAPI_ATTACH_TAG = 0x370A
MAPI_RENDERING_POSITION = 0x370B
MAPI_ATTACH_TRANSPORT_NAME = 0x370C
MAPI_ATTACH_LONG_PATHNAME = 0x370D
MAPI_ATTACH_MIME_TAG = 0x370E
MAPI_ATTACH_ADDITIONAL_INFO = 0x370F
MAPI_ATTACH_MIME_SEQUENCE = 0x3710
MAPI_ATTACH_CONTENT_ID = 0x3712
MAPI_ATTACH_CONTENT_LOCATION = 0x3713
MAPI_ATTACH_FLAGS = 0x3714
MAPI_DISPLAY_TYPE = 0x3900
MAPI_TEMPLATEID = 0x3902
MAPI_PRIMARY_CAPABILITY = 0x3904
MAPI_SMTP_ADDRESS = 0x39FE
MAPI_7BIT_DISPLAY_NAME = 0x39FF
MAPI_ACCOUNT = 0x3A00
MAPI_ALTERNATE_RECIPIENT = 0x3A01
MAPI_CALLBACK_TELEPHONE_NUMBER = 0x3A02
MAPI_CONVERSION_PROHIBITED = 0x3A03
MAPI_DISCLOSE_RECIPIENTS = 0x3A04
MAPI_GENERATION = 0x3A05
MAPI_GIVEN_NAME = 0x3A06
MAPI_GOVERNMENT_ID_NUMBER = 0x3A07
MAPI_BUSINESS_TELEPHONE_NUMBER = 0x3A08
MAPI_HOME_TELEPHONE_NUMBER = 0x3A09
MAPI_INITIALS = 0x3A0A
MAPI_KEYWORD = 0x3A0B
MAPI_LANGUAGE = 0x3A0C
MAPI_LOCATION = 0x3A0D
MAPI_MAIL_PERMISSION = 0x3A0E
MAPI_MHS_COMMON_NAME = 0x3A0F
MAPI_ORGANIZATIONAL_ID_NUMBER = 0x3A10
MAPI_SURNAME = 0x3A11
MAPI_ORIGINAL_ENTRYID = 0x3A12
MAPI_ORIGINAL_DISPLAY_NAME = 0x3A13
MAPI_ORIGINAL_SEARCH_KEY = 0x3A14
MAPI_POSTAL_ADDRESS = 0x3A15
MAPI_COMPANY_NAME = 0x3A16
MAPI_TITLE = 0x3A17
MAPI_DEPARTMENT_NAME = 0x3A18
MAPI_OFFICE_LOCATION = 0x3A19
MAPI_PRIMARY_TELEPHONE_NUMBER = 0x3A1A
MAPI_BUSINESS2_TELEPHONE_NUMBER = 0x3A1B
MAPI_MOBILE_TELEPHONE_NUMBER = 0x3A1C
MAPI_RADIO_TELEPHONE_NUMBER = 0x3A1D
MAPI_CAR_TELEPHONE_NUMBER = 0x3A1E
MAPI_OTHER_TELEPHONE_NUMBER = 0x3A1F
MAPI_TRANSMITABLE_DISPLAY_NAME = 0x3A20
MAPI_PAGER_TELEPHONE_NUMBER = 0x3A21
MAPI_USER_CERTIFICATE = 0x3A22
MAPI_PRIMARY_FAX_NUMBER = 0x3A23
MAPI_BUSINESS_FAX_NUMBER = 0x3A24
MAPI_HOME_FAX_NUMBER = 0x3A25
MAPI_COUNTRY = 0x3A26
MAPI_LOCALITY = 0x3A27
MAPI_STATE_OR_PROVINCE = 0x3A28
MAPI_STREET_ADDRESS = 0x3A29
MAPI_POSTAL_CODE = 0x3A2A
MAPI_POST_OFFICE_BOX = 0x3A2B
MAPI_TELEX_NUMBER = 0x3A2C
MAPI_ISDN_NUMBER = 0x3A2D
MAPI_ASSISTANT_TELEPHONE_NUMBER = 0x3A2E
MAPI_HOME2_TELEPHONE_NUMBER = 0x3A2F
MAPI_ASSISTANT = 0x3A30
MAPI_SEND_RICH_INFO = 0x3A40
MAPI_WEDDING_ANNIVERSARY = 0x3A41
MAPI_BIRTHDAY = 0x3A42
MAPI_HOBBIES = 0x3A43
MAPI_MIDDLE_NAME = 0x3A44
MAPI_DISPLAY_NAME_PREFIX = 0x3A45
MAPI_PROFESSION = 0x3A46
MAPI_PREFERRED_BY_NAME = 0x3A47
MAPI_SPOUSE_NAME = 0x3A48
MAPI_COMPUTER_NETWORK_NAME = 0x3A49
MAPI_CUSTOMER_ID = 0x3A4A
MAPI_TTYTDD_PHONE_NUMBER = 0x3A4B
MAPI_FTP_SITE = 0x3A4C
MAPI_GENDER = 0x3A4D
MAPI_MANAGER_NAME = 0x3A4E
MAPI_NICKNAME = 0x3A4F
MAPI_PERSONAL_HOME_PAGE = 0x3A50
MAPI_BUSINESS_HOME_PAGE = 0x3A51
MAPI_CONTACT_VERSION = 0x3A52
MAPI_CONTACT_ENTRYIDS = 0x3A53
MAPI_CONTACT_ADDRTYPES = 0x3A54
MAPI_CONTACT_DEFAULT_ADDRESS_INDEX = 0x3A55
MAPI_CONTACT_EMAIL_ADDRESSES = 0x3A56
MAPI_COMPANY_MAIN_PHONE_NUMBER = 0x3A57
MAPI_CHILDRENS_NAMES = 0x3A58
MAPI_HOME_ADDRESS_CITY = 0x3A59
MAPI_HOME_ADDRESS_COUNTRY = 0x3A5A
MAPI_HOME_ADDRESS_POSTAL_CODE = 0x3A5B
MAPI_HOME_ADDRESS_STATE_OR_PROVINCE = 0x3A5C
MAPI_HOME_ADDRESS_STREET = 0x3A5D
MAPI_HOME_ADDRESS_POST_OFFICE_BOX = 0x3A5E
MAPI_OTHER_ADDRESS_CITY = 0x3A5F
MAPI_OTHER_ADDRESS_COUNTRY = 0x3A60
MAPI_OTHER_ADDRESS_POSTAL_CODE = 0x3A61
MAPI_OTHER_ADDRESS_STATE_OR_PROVINCE = 0x3A62
MAPI_OTHER_ADDRESS_STREET = 0x3A63
MAPI_OTHER_ADDRESS_POST_OFFICE_BOX = 0x3A64
MAPI_SEND_INTERNET_ENCODING = 0x3A71
MAPI_STORE_PROVIDERS = 0x3D00
MAPI_AB_PROVIDERS = 0x3D01
MAPI_TRANSPORT_PROVIDERS = 0x3D02
MAPI_DEFAULT_PROFILE = 0x3D04
MAPI_AB_SEARCH_PATH = 0x3D05
MAPI_AB_DEFAULT_DIR = 0x3D06
MAPI_AB_DEFAULT_PAB = 0x3D07
MAPI_FILTERING_HOOKS = 0x3D08
MAPI_SERVICE_NAME = 0x3D09
MAPI_SERVICE_DLL_NAME = 0x3D0A
MAPI_SERVICE_ENTRY_NAME = 0x3D0B
MAPI_SERVICE_UID = 0x3D0C
MAPI_SERVICE_EXTRA_UIDS = 0x3D0D
MAPI_SERVICES = 0x3D0E
MAPI_SERVICE_SUPPORT_FILES = 0x3D0F
MAPI_SERVICE_DELETE_FILES = 0x3D10
MAPI_AB_SEARCH_PATH_UPDATE = 0x3D11
MAPI_PROFILE_NAME = 0x3D12
MAPI_IDENTITY_DISPLAY = 0x3E00
MAPI_IDENTITY_ENTRYID = 0x3E01
MAPI_RESOURCE_METHODS = 0x3E02
MAPI_RESOURCE_TYPE = 0x3E03
MAPI_STATUS_CODE = 0x3E04
MAPI_IDENTITY_SEARCH_KEY = 0x3E05
MAPI_OWN_STORE_ENTRYID = 0x3E06
MAPI_RESOURCE_PATH = 0x3E07
MAPI_STATUS_STRING = 0x3E08
MAPI_X400_DEFERRED_DELIVERY_CANCEL = 0x3E09
MAPI_HEADER_FOLDER_ENTRYID = 0x3E0A
MAPI_REMOTE_PROGRESS = 0x3E0B
MAPI_REMOTE_PROGRESS_TEXT = 0x3E0C
MAPI_REMOTE_VALIDATE_OK = 0x3E0D
MAPI_CONTROL_FLAGS = 0x3F00
MAPI_CONTROL_STRUCTURE = 0x3F01
MAPI_CONTROL_TYPE = 0x3F02
MAPI_DELTAX = 0x3F03
MAPI_DELTAY = 0x3F04
MAPI_XPOS = 0x3F05
MAPI_YPOS = 0x3F06
MAPI_CONTROL_ID = 0x3F07
MAPI_INITIAL_DETAILS_PANE = 0x3F08
MAPI_UNCOMPRESSED_BODY = 0x3FD9
MAPI_INTERNET_CODEPAGE = 0x3FDE
MAPI_AUTO_RESPONSE_SUPPRESS = 0x3FDF
MAPI_MESSAGE_LOCALE_ID = 0x3FF1
MAPI_RULE_TRIGGER_HISTORY = 0x3FF2
MAPI_MOVE_TO_STORE_ENTRYID = 0x3FF3
MAPI_MOVE_TO_FOLDER_ENTRYID = 0x3FF4
MAPI_STORAGE_QUOTA_LIMIT = 0x3FF5
MAPI_EXCESS_STORAGE_USED = 0x3FF6
MAPI_SVR_GENERATING_QUOTA_MSG = 0x3FF7
MAPI_CREATOR_NAME = 0x3FF8
MAPI_CREATOR_ENTRY_ID = 0x3FF9
MAPI_LAST_MODIFIER_NAME = 0x3FFA
MAPI_LAST_MODIFIER_ENTRY_ID = 0x3FFB
MAPI_REPLY_RECIPIENT_SMTP_PROXIES = 0x3FFC
MAPI_MESSAGE_CODEPAGE = 0x3FFD
MAPI_EXTENDED_ACL_DATA = 0x3FFE
MAPI_SENDER_FLAGS = 0x4019
MAPI_SENT_REPRESENTING_FLAGS = 0x401A
MAPI_RECEIVED_BY_FLAGS = 0x401B
MAPI_RECEIVED_REPRESENTING_FLAGS = 0x401C
MAPI_CREATOR_ADDRESS_TYPE = 0x4022
MAPI_CREATOR_EMAIL_ADDRESS = 0x4023
MAPI_SENDER_SIMPLE_DISPLAY_NAME = 0x4030
MAPI_SENT_REPRESENTING_SIMPLE_DISPLAY_NAME = 0x4031
MAPI_RECEIVED_REPRESENTING_SIMPLE_DISPLAY_NAME = 0x4035
MAPI_CREATOR_SIMPLE_DISP_NAME = 0x4038
MAPI_LAST_MODIFIER_SIMPLE_DISPLAY_NAME = 0x4039
MAPI_CONTENT_FILTER_SPAM_CONFIDENCE_LEVEL = 0x4076
MAPI_INTERNET_MAIL_OVERRIDE_FORMAT = 0x5902
MAPI_MESSAGE_EDITOR_FORMAT = 0x5909
MAPI_SENDER_SMTP_ADDRESS = 0x5D01
MAPI_SENT_REPRESENTING_SMTP_ADDRESS = 0x5D02
MAPI_READ_RECEIPT_SMTP_ADDRESS = 0x5D05
MAPI_RECEIVED_BY_SMTP_ADDRESS = 0x5D07
MAPI_RECEIVED_REPRESENTING_SMTP_ADDRESS = 0x5D08
MAPI_SENDING_SMTP_ADDRESS = 0x5D09
MAPI_SIP_ADDRESS = 0x5FE5
MAPI_RECIPIENT_DISPLAY_NAME = 0x5FF6
MAPI_RECIPIENT_ENTRYID = 0x5FF7
MAPI_RECIPIENT_FLAGS = 0x5FFD
MAPI_RECIPIENT_TRACKSTATUS = 0x5FFF
MAPI_CHANGE_KEY = 0x65E2
MAPI_PREDECESSOR_CHANGE_LIST = 0x65E3
MAPI_ID_SECURE_MIN = 0x67F0
MAPI_ID_SECURE_MAX = 0x67FF
MAPI_VOICE_MESSAGE_DURATION = 0x6801
MAPI_SENDER_TELEPHONE_NUMBER = 0x6802
MAPI_VOICE_MESSAGE_SENDER_NAME = 0x6803
MAPI_FAX_NUMBER_OF_PAGES = 0x6804
MAPI_VOICE_MESSAGE_ATTACHMENT_ORDER = 0x6805
MAPI_CALL_ID = 0x6806
MAPI_ATTACHMENT_LINK_ID = 0x7FFA
MAPI_EXCEPTION_START_TIME = 0x7FFB
MAPI_EXCEPTION_END_TIME = 0x7FFC
MAPI_ATTACHMENT_FLAGS = 0x7FFD
MAPI_ATTACHMENT_HIDDEN = 0x7FFE
MAPI_ATTACHMENT_CONTACT_PHOTO = 0x7FFF
MAPI_FILE_UNDER = 0x8005
MAPI_FILE_UNDER_ID = 0x8006
MAPI_CONTACT_ITEM_DATA = 0x8007
MAPI_REFERRED_BY = 0x800E
MAPI_DEPARTMENT = 0x8010
MAPI_HAS_PICTURE = 0x8015
MAPI_HOME_ADDRESS = 0x801A
MAPI_WORK_ADDRESS = 0x801B
MAPI_OTHER_ADDRESS = 0x801C
MAPI_POSTAL_ADDRESS_ID = 0x8022
MAPI_CONTACT_CHARACTER_SET = 0x8023
MAPI_AUTO_LOG = 0x8025
MAPI_FILE_UNDER_LIST = 0x8026
MAPI_EMAIL_LIST = 0x8027
MAPI_ADDRESS_BOOK_PROVIDER_EMAIL_LIST = 0x8028
MAPI_ADDRESS_BOOK_PROVIDER_ARRAY_TYPE = 0x8029
MAPI_HTML = 0x802B
MAPI_YOMI_FIRST_NAME = 0x802C
MAPI_YOMI_LAST_NAME = 0x802D
MAPI_YOMI_COMPANY_NAME = 0x802E
MAPI_BUSINESS_CARD_DISPLAY_DEFINITION = 0x8040
MAPI_BUSINESS_CARD_CARD_PICTURE = 0x8041
MAPI_WORK_ADDRESS_STREET = 0x8045
MAPI_WORK_ADDRESS_CITY = 0x8046
MAPI_WORK_ADDRESS_STATE = 0x8047
MAPI_WORK_ADDRESS_POSTAL_CODE = 0x8048
MAPI_WORK_ADDRESS_COUNTRY = 0x8049
MAPI_WORK_ADDRESS_POST_OFFICE_BOX = 0x804A
MAPI_DISTRIBUTION_LIST_CHECKSUM = 0x804C
MAPI_BIRTHDAY_EVENT_ENTRY_ID = 0x804D
MAPI_ANNIVERSARY_EVENT_ENTRY_ID = 0x804E
MAPI_CONTACT_USER_FIELD1 = 0x804F
MAPI_CONTACT_USER_FIELD2 = 0x8050
MAPI_CONTACT_USER_FIELD3 = 0x8051
MAPI_CONTACT_USER_FIELD4 = 0x8052
MAPI_DISTRIBUTION_LIST_NAME = 0x8053
MAPI_DISTRIBUTION_LIST_ONE_OFF_MEMBERS = 0x8054
MAPI_DISTRIBUTION_LIST_MEMBERS = 0x8055
MAPI_INSTANT_MESSAGING_ADDRESS = 0x8062
MAPI_DISTRIBUTION_LIST_STREAM = 0x8064
MAPI_EMAIL_DISPLAY_NAME = 0x8080
MAPI_EMAIL_ADDR_TYPE = 0x8082
MAPI_EMAIL_EMAIL_ADDRESS = 0x8083
MAPI_EMAIL_ORIGINAL_DISPLAY_NAME = 0x8084
MAPI_EMAIL1ORIGINAL_ENTRY_ID = 0x8085
MAPI_EMAIL1RICH_TEXT_FORMAT = 0x8086
MAPI_EMAIL1EMAIL_TYPE = 0x8087
MAPI_EMAIL2DISPLAY_NAME = 0x8090
MAPI_EMAIL2ENTRY_ID = 0x8091
MAPI_EMAIL2ADDR_TYPE = 0x8092
MAPI_EMAIL2EMAIL_ADDRESS = 0x8093
MAPI_EMAIL2ORIGINAL_DISPLAY_NAME = 0x8094
MAPI_EMAIL2ORIGINAL_ENTRY_ID = 0x8095
MAPI_EMAIL2RICH_TEXT_FORMAT = 0x8096
MAPI_EMAIL3DISPLAY_NAME = 0x80A0
MAPI_EMAIL3ENTRY_ID = 0x80A1
MAPI_EMAIL3ADDR_TYPE = 0x80A2
MAPI_EMAIL3EMAIL_ADDRESS = 0x80A3
MAPI_EMAIL3ORIGINAL_DISPLAY_NAME = 0x80A4
MAPI_EMAIL3ORIGINAL_ENTRY_ID = 0x80A5
MAPI_EMAIL3RICH_TEXT_FORMAT = 0x80A6
MAPI_FAX1ADDRESS_TYPE = 0x80B2
MAPI_FAX1EMAIL_ADDRESS = 0x80B3
MAPI_FAX1ORIGINAL_DISPLAY_NAME = 0x80B4
MAPI_FAX1ORIGINAL_ENTRY_ID = 0x80B5
MAPI_FAX2ADDRESS_TYPE = 0x80C2
MAPI_FAX2EMAIL_ADDRESS = 0x80C3
MAPI_FAX2ORIGINAL_DISPLAY_NAME = 0x80C4
MAPI_FAX2ORIGINAL_ENTRY_ID = 0x80C5
MAPI_FAX3ADDRESS_TYPE = 0x80D2
MAPI_FAX3EMAIL_ADDRESS = 0x80D3
MAPI_FAX3ORIGINAL_DISPLAY_NAME = 0x80D4
MAPI_FAX3ORIGINAL_ENTRY_ID = 0x80D5
MAPI_FREE_BUSY_LOCATION = 0x80D8
MAPI_HOME_ADDRESS_COUNTRY_CODE = 0x80DA
MAPI_WORK_ADDRESS_COUNTRY_CODE = 0x80DB
MAPI_OTHER_ADDRESS_COUNTRY_CODE = 0x80DC
MAPI_ADDRESS_COUNTRY_CODE = 0x80DD
MAPI_BIRTHDAY_LOCAL = 0x80DE
MAPI_WEDDING_ANNIVERSARY_LOCAL = 0x80DF
MAPI_TASK_STATUS = 0x8101
MAPI_TASK_START_DATE = 0x8104
MAPI_TASK_DUE_DATE = 0x8105
MAPI_TASK_ACTUAL_EFFORT = 0x8110
MAPI_TASK_ESTIMATED_EFFORT = 0x8111
MAPI_TASK_FRECUR = 0x8126
MAPI_SEND_MEETING_AS_ICAL = 0x8200
MAPI_APPOINTMENT_SEQUENCE = 0x8201
MAPI_APPOINTMENT_SEQUENCE_TIME = 0x8202
MAPI_APPOINTMENT_LAST_SEQUENCE = 0x8203
MAPI_CHANGE_HIGHLIGHT = 0x8204
MAPI_BUSY_STATUS = 0x8205
MAPI_FEXCEPTIONAL_BODY = 0x8206
MAPI_APPOINTMENT_AUXILIARY_FLAGS = 0x8207
MAPI_OUTLOOK_LOCATION = 0x8208
MAPI_MEETING_WORKSPACE_URL = 0x8209
MAPI_FORWARD_INSTANCE = 0x820A
MAPI_LINKED_TASK_ITEMS = 0x820C
MAPI_APPT_START_WHOLE = 0x820D
MAPI_APPT_END_WHOLE = 0x820E
MAPI_APPOINTMENT_START_TIME = 0x820F
MAPI_APPOINTMENT_END_TIME = 0x8210
MAPI_APPOINTMENT_END_DATE = 0x8211
MAPI_APPOINTMENT_START_DATE = 0x8212
MAPI_APPT_DURATION = 0x8213
MAPI_APPOINTMENT_COLOR = 0x8214
MAPI_APPOINTMENT_SUB_TYPE = 0x8215
MAPI_APPOINTMENT_RECUR = 0x8216
MAPI_APPOINTMENT_STATE_FLAGS = 0x8217
MAPI_RESPONSE_STATUS = 0x8218
MAPI_APPOINTMENT_REPLY_TIME = 0x8220
MAPI_RECURRING = 0x8223
MAPI_INTENDED_BUSY_STATUS = 0x8224
MAPI_APPOINTMENT_UPDATE_TIME = 0x8226
MAPI_EXCEPTION_REPLACE_TIME = 0x8228
MAPI_OWNER_NAME = 0x822E
MAPI_APPOINTMENT_REPLY_NAME = 0x8230
MAPI_RECURRENCE_TYPE = 0x8231
MAPI_RECURRENCE_PATTERN = 0x8232
MAPI_TIME_ZONE_STRUCT = 0x8233
MAPI_TIME_ZONE_DESCRIPTION = 0x8234
MAPI_CLIP_START = 0x8235
MAPI_CLIP_END = 0x8236
MAPI_ORIGINAL_STORE_ENTRY_ID = 0x8237
MAPI_ALL_ATTENDEES_STRING = 0x8238
MAPI_AUTO_FILL_LOCATION = 0x823A
MAPI_TO_ATTENDEES_STRING = 0x823B
MAPI_CCATTENDEES_STRING = 0x823C
MAPI_CONF_CHECK = 0x8240
MAPI_CONFERENCING_TYPE = 0x8241
MAPI_DIRECTORY = 0x8242
MAPI_ORGANIZER_ALIAS = 0x8243
MAPI_AUTO_START_CHECK = 0x8244
MAPI_AUTO_START_WHEN = 0x8245
MAPI_ALLOW_EXTERNAL_CHECK = 0x8246
MAPI_COLLABORATE_DOC = 0x8247
MAPI_NET_SHOW_URL = 0x8248
MAPI_ONLINE_PASSWORD = 0x8249
MAPI_APPOINTMENT_PROPOSED_DURATION = 0x8256
MAPI_APPT_COUNTER_PROPOSAL = 0x8257
MAPI_APPOINTMENT_PROPOSAL_NUMBER = 0x8259
MAPI_APPOINTMENT_NOT_ALLOW_PROPOSE = 0x825A
MAPI_APPT_TZDEF_START_DISPLAY = 0x825E
MAPI_APPT_TZDEF_END_DISPLAY = 0x825F
MAPI_APPT_TZDEF_RECUR = 0x8260
MAPI_REMINDER_MINUTES_BEFORE_START = 0x8501
MAPI_REMINDER_TIME = 0x8502
MAPI_REMINDER_SET = 0x8503
MAPI_PRIVATE = 0x8506
MAPI_AGING_DONT_AGE_ME = 0x850E
MAPI_FORM_STORAGE = 0x850F
MAPI_SIDE_EFFECTS = 0x8510
MAPI_REMOTE_STATUS = 0x8511
MAPI_PAGE_DIR_STREAM = 0x8513
MAPI_SMART_NO_ATTACH = 0x8514
MAPI_COMMON_START = 0x8516
MAPI_COMMON_END = 0x8517
MAPI_TASK_MODE = 0x8518
MAPI_FORM_PROP_STREAM = 0x851B
MAPI_REQUEST = 0x8530
MAPI_NON_SENDABLE_TO = 0x8536
MAPI_NON_SENDABLE_CC = 0x8537
MAPI_NON_SENDABLE_BCC = 0x8538
MAPI_COMPANIES = 0x8539
MAPI_CONTACTS = 0x853A
MAPI_PROP_DEF_STREAM = 0x8540
MAPI_SCRIPT_STREAM = 0x8541
MAPI_CUSTOM_FLAG = 0x8542
MAPI_OUTLOOK_CURRENT_VERSION = 0x8552
MAPI_CURRENT_VERSION_NAME = 0x8554
MAPI_REMINDER_NEXT_TIME = 0x8560
MAPI_HEADER_ITEM = 0x8578
MAPI_USE_TNEF = 0x8582
MAPI_TO_DO_TITLE = 0x85A4
MAPI_VALID_FLAG_STRING_PROOF = 0x85BF
MAPI_LOG_TYPE = 0x8700
MAPI_LOG_START = 0x8706
MAPI_LOG_DURATION = 0x8707
MAPI_LOG_END = 0x8708
CODE_TO_NAME = {
MAPI_ACKNOWLEDGEMENT_MODE: "MAPI_ACKNOWLEDGEMENT_MODE",
MAPI_ALTERNATE_RECIPIENT_ALLOWED: "MAPI_ALTERNATE_RECIPIENT_ALLOWED",
MAPI_AUTHORIZING_USERS: "MAPI_AUTHORIZING_USERS",
MAPI_AUTO_FORWARD_COMMENT: "MAPI_AUTO_FORWARD_COMMENT",
MAPI_AUTO_FORWARDED: "MAPI_AUTO_FORWARDED",
MAPI_CONTENT_CONFIDENTIALITY_ALGORITHM_ID: "MAPI_CONTENT_CONFIDENTIALITY_ALGORITHM_ID",
MAPI_CONTENT_CORRELATOR: "MAPI_CONTENT_CORRELATOR",
MAPI_CONTENT_IDENTIFIER: "MAPI_CONTENT_IDENTIFIER",
MAPI_CONTENT_LENGTH: "MAPI_CONTENT_LENGTH",
MAPI_CONTENT_RETURN_REQUESTED: "MAPI_CONTENT_RETURN_REQUESTED",
MAPI_CONVERSATION_KEY: "MAPI_CONVERSATION_KEY",
MAPI_CONVERSION_EITS: "MAPI_CONVERSION_EITS",
MAPI_CONVERSION_WITH_LOSS_PROHIBITED: "MAPI_CONVERSION_WITH_LOSS_PROHIBITED",
MAPI_CONVERTED_EITS: "MAPI_CONVERTED_EITS",
MAPI_DEFERRED_DELIVERY_TIME: "MAPI_DEFERRED_DELIVERY_TIME",
MAPI_DELIVER_TIME: "MAPI_DELIVER_TIME",
MAPI_DISCARD_REASON: "MAPI_DISCARD_REASON",
MAPI_DISCLOSURE_OF_RECIPIENTS: "MAPI_DISCLOSURE_OF_RECIPIENTS",
MAPI_DL_EXPANSION_HISTORY: "MAPI_DL_EXPANSION_HISTORY",
MAPI_DL_EXPANSION_PROHIBITED: "MAPI_DL_EXPANSION_PROHIBITED",
MAPI_EXPIRY_TIME: "MAPI_EXPIRY_TIME",
MAPI_IMPLICIT_CONVERSION_PROHIBITED: "MAPI_IMPLICIT_CONVERSION_PROHIBITED",
MAPI_IMPORTANCE: "MAPI_IMPORTANCE",
MAPI_IPM_ID: "MAPI_IPM_ID",
MAPI_LATEST_DELIVERY_TIME: "MAPI_LATEST_DELIVERY_TIME",
MAPI_MESSAGE_CLASS: "MAPI_MESSAGE_CLASS",
MAPI_MESSAGE_DELIVERY_ID: "MAPI_MESSAGE_DELIVERY_ID",
MAPI_MESSAGE_SECURITY_LABEL: "MAPI_MESSAGE_SECURITY_LABEL",
MAPI_OBSOLETED_IPMS: "MAPI_OBSOLETED_IPMS",
MAPI_ORIGINALLY_INTENDED_RECIPIENT_NAME: "MAPI_ORIGINALLY_INTENDED_RECIPIENT_NAME",
MAPI_ORIGINAL_EITS: "MAPI_ORIGINAL_EITS",
MAPI_ORIGINATOR_CERTIFICATE: "MAPI_ORIGINATOR_CERTIFICATE",
MAPI_ORIGINATOR_DELIVERY_REPORT_REQUESTED: "MAPI_ORIGINATOR_DELIVERY_REPORT_REQUESTED",
MAPI_ORIGINATOR_RETURN_ADDRESS: "MAPI_ORIGINATOR_RETURN_ADDRESS",
MAPI_PARENT_KEY: "MAPI_PARENT_KEY",
MAPI_PRIORITY: "MAPI_PRIORITY",
MAPI_ORIGIN_CHECK: "MAPI_ORIGIN_CHECK",
MAPI_PROOF_OF_SUBMISSION_REQUESTED: "MAPI_PROOF_OF_SUBMISSION_REQUESTED",
MAPI_READ_RECEIPT_REQUESTED: "MAPI_READ_RECEIPT_REQUESTED",
MAPI_RECEIPT_TIME: "MAPI_RECEIPT_TIME",
MAPI_RECIPIENT_REASSIGNMENT_PROHIBITED: "MAPI_RECIPIENT_REASSIGNMENT_PROHIBITED",
MAPI_REDIRECTION_HISTORY: "MAPI_REDIRECTION_HISTORY",
MAPI_RELATED_IPMS: "MAPI_RELATED_IPMS",
MAPI_ORIGINAL_SENSITIVITY: "MAPI_ORIGINAL_SENSITIVITY",
MAPI_LANGUAGES: "MAPI_LANGUAGES",
MAPI_REPLY_TIME: "MAPI_REPLY_TIME",
MAPI_REPORT_TAG: "MAPI_REPORT_TAG",
MAPI_REPORT_TIME: "MAPI_REPORT_TIME",
MAPI_RETURNED_IPM: "MAPI_RETURNED_IPM",
MAPI_SECURITY: "MAPI_SECURITY",
MAPI_INCOMPLETE_COPY: "MAPI_INCOMPLETE_COPY",
MAPI_SENSITIVITY: "MAPI_SENSITIVITY",
MAPI_SUBJECT: "MAPI_SUBJECT",
MAPI_SUBJECT_IPM: "MAPI_SUBJECT_IPM",
MAPI_CLIENT_SUBMIT_TIME: "MAPI_CLIENT_SUBMIT_TIME",
MAPI_REPORT_NAME: "MAPI_REPORT_NAME",
MAPI_SENT_REPRESENTING_SEARCH_KEY: "MAPI_SENT_REPRESENTING_SEARCH_KEY",
MAPI_X400_CONTENT_TYPE: "MAPI_X400_CONTENT_TYPE",
MAPI_SUBJECT_PREFIX: "MAPI_SUBJECT_PREFIX",
MAPI_NON_RECEIPT_REASON: "MAPI_NON_RECEIPT_REASON",
MAPI_RECEIVED_BY_ENTRYID: "MAPI_RECEIVED_BY_ENTRYID",
MAPI_RECEIVED_BY_NAME: "MAPI_RECEIVED_BY_NAME",
MAPI_SENT_REPRESENTING_ENTRYID: "MAPI_SENT_REPRESENTING_ENTRYID",
MAPI_SENT_REPRESENTING_NAME: "MAPI_SENT_REPRESENTING_NAME",
MAPI_RCVD_REPRESENTING_ENTRYID: "MAPI_RCVD_REPRESENTING_ENTRYID",
MAPI_RCVD_REPRESENTING_NAME: "MAPI_RCVD_REPRESENTING_NAME",
MAPI_REPORT_ENTRYID: "MAPI_REPORT_ENTRYID",
MAPI_READ_RECEIPT_ENTRYID: "MAPI_READ_RECEIPT_ENTRYID",
MAPI_MESSAGE_SUBMISSION_ID: "MAPI_MESSAGE_SUBMISSION_ID",
MAPI_PROVIDER_SUBMIT_TIME: "MAPI_PROVIDER_SUBMIT_TIME",
MAPI_ORIGINAL_SUBJECT: "MAPI_ORIGINAL_SUBJECT",
MAPI_DISC_VAL: "MAPI_DISC_VAL",
MAPI_ORIG_MESSAGE_CLASS: "MAPI_ORIG_MESSAGE_CLASS",
MAPI_ORIGINAL_AUTHOR_ENTRYID: "MAPI_ORIGINAL_AUTHOR_ENTRYID",
MAPI_ORIGINAL_AUTHOR_NAME: "MAPI_ORIGINAL_AUTHOR_NAME",
MAPI_ORIGINAL_SUBMIT_TIME: "MAPI_ORIGINAL_SUBMIT_TIME",
MAPI_REPLY_RECIPIENT_ENTRIES: "MAPI_REPLY_RECIPIENT_ENTRIES",
MAPI_REPLY_RECIPIENT_NAMES: "MAPI_REPLY_RECIPIENT_NAMES",
MAPI_RECEIVED_BY_SEARCH_KEY: "MAPI_RECEIVED_BY_SEARCH_KEY",
MAPI_RCVD_REPRESENTING_SEARCH_KEY: "MAPI_RCVD_REPRESENTING_SEARCH_KEY",
MAPI_READ_RECEIPT_SEARCH_KEY: "MAPI_READ_RECEIPT_SEARCH_KEY",
MAPI_REPORT_SEARCH_KEY: "MAPI_REPORT_SEARCH_KEY",
MAPI_ORIGINAL_DELIVERY_TIME: "MAPI_ORIGINAL_DELIVERY_TIME",
MAPI_ORIGINAL_AUTHOR_SEARCH_KEY: "MAPI_ORIGINAL_AUTHOR_SEARCH_KEY",
MAPI_MESSAGE_TO_ME: "MAPI_MESSAGE_TO_ME",
MAPI_MESSAGE_CC_ME: "MAPI_MESSAGE_CC_ME",
MAPI_MESSAGE_RECIP_ME: "MAPI_MESSAGE_RECIP_ME",
MAPI_ORIGINAL_SENDER_NAME: "MAPI_ORIGINAL_SENDER_NAME",
MAPI_ORIGINAL_SENDER_ENTRYID: "MAPI_ORIGINAL_SENDER_ENTRYID",
MAPI_ORIGINAL_SENDER_SEARCH_KEY: "MAPI_ORIGINAL_SENDER_SEARCH_KEY",
MAPI_ORIGINAL_SENT_REPRESENTING_NAME: "MAPI_ORIGINAL_SENT_REPRESENTING_NAME",
MAPI_ORIGINAL_SENT_REPRESENTING_ENTRYID: "MAPI_ORIGINAL_SENT_REPRESENTING_ENTRYID",
MAPI_ORIGINAL_SENT_REPRESENTING_SEARCH_KEY: "MAPI_ORIGINAL_SENT_REPRESENTING_SEARCH_KEY",
MAPI_START_DATE: "MAPI_START_DATE",
MAPI_END_DATE: "MAPI_END_DATE",
MAPI_OWNER_APPT_ID: "MAPI_OWNER_APPT_ID",
MAPI_RESPONSE_REQUESTED: "MAPI_RESPONSE_REQUESTED",
MAPI_SENT_REPRESENTING_ADDRTYPE: "MAPI_SENT_REPRESENTING_ADDRTYPE",
MAPI_SENT_REPRESENTING_EMAIL_ADDRESS: "MAPI_SENT_REPRESENTING_EMAIL_ADDRESS",
MAPI_ORIGINAL_SENDER_ADDRTYPE: "MAPI_ORIGINAL_SENDER_ADDRTYPE",
MAPI_ORIGINAL_SENDER_EMAIL_ADDRESS: "MAPI_ORIGINAL_SENDER_EMAIL_ADDRESS",
MAPI_ORIGINAL_SENT_REPRESENTING_ADDRTYPE: "MAPI_ORIGINAL_SENT_REPRESENTING_ADDRTYPE",
MAPI_ORIGINAL_SENT_REPRESENTING_EMAIL_ADDRESS: "MAPI_ORIGINAL_SENT_REPRESENTING_EMAIL_ADDRESS",
MAPI_CONVERSATION_TOPIC: "MAPI_CONVERSATION_TOPIC",
MAPI_CONVERSATION_INDEX: "MAPI_CONVERSATION_INDEX",
MAPI_ORIGINAL_DISPLAY_BCC: "MAPI_ORIGINAL_DISPLAY_BCC",
MAPI_ORIGINAL_DISPLAY_CC: "MAPI_ORIGINAL_DISPLAY_CC",
MAPI_ORIGINAL_DISPLAY_TO: "MAPI_ORIGINAL_DISPLAY_TO",
MAPI_RECEIVED_BY_ADDRTYPE: "MAPI_RECEIVED_BY_ADDRTYPE",
MAPI_RECEIVED_BY_EMAIL_ADDRESS: "MAPI_RECEIVED_BY_EMAIL_ADDRESS",
MAPI_RCVD_REPRESENTING_ADDRTYPE: "MAPI_RCVD_REPRESENTING_ADDRTYPE",
MAPI_RCVD_REPRESENTING_EMAIL_ADDRESS: "MAPI_RCVD_REPRESENTING_EMAIL_ADDRESS",
MAPI_ORIGINAL_AUTHOR_ADDRTYPE: "MAPI_ORIGINAL_AUTHOR_ADDRTYPE",
MAPI_ORIGINAL_AUTHOR_EMAIL_ADDRESS: "MAPI_ORIGINAL_AUTHOR_EMAIL_ADDRESS",
MAPI_ORIGINALLY_INTENDED_RECIP_ADDRTYPE: "MAPI_ORIGINALLY_INTENDED_RECIP_ADDRTYPE",
MAPI_ORIGINALLY_INTENDED_RECIP_EMAIL_ADDRESS: "MAPI_ORIGINALLY_INTENDED_RECIP_EMAIL_ADDRESS",
MAPI_TRANSPORT_MESSAGE_HEADERS: "MAPI_TRANSPORT_MESSAGE_HEADERS",
MAPI_DELEGATION: "MAPI_DELEGATION",
MAPI_TNEF_CORRELATION_KEY: "MAPI_TNEF_CORRELATION_KEY",
MAPI_CONTENT_INTEGRITY_CHECK: "MAPI_CONTENT_INTEGRITY_CHECK",
MAPI_EXPLICIT_CONVERSION: "MAPI_EXPLICIT_CONVERSION",
MAPI_IPM_RETURN_REQUESTED: "MAPI_IPM_RETURN_REQUESTED",
MAPI_MESSAGE_TOKEN: "MAPI_MESSAGE_TOKEN",
MAPI_NDR_REASON_CODE: "MAPI_NDR_REASON_CODE",
MAPI_NDR_DIAG_CODE: "MAPI_NDR_DIAG_CODE",
MAPI_NON_RECEIPT_NOTIFICATION_REQUESTED: "MAPI_NON_RECEIPT_NOTIFICATION_REQUESTED",
MAPI_DELIVERY_POINT: "MAPI_DELIVERY_POINT",
MAPI_ORIGINATOR_NON_DELIVERY_REPORT_REQUESTED: "MAPI_ORIGINATOR_NON_DELIVERY_REPORT_REQUESTED",
MAPI_ORIGINATOR_REQUESTED_ALTERNATE_RECIPIENT: "MAPI_ORIGINATOR_REQUESTED_ALTERNATE_RECIPIENT",
MAPI_PHYSICAL_DELIVERY_BUREAU_FAX_DELIVERY: "MAPI_PHYSICAL_DELIVERY_BUREAU_FAX_DELIVERY",
MAPI_PHYSICAL_DELIVERY_MODE: "MAPI_PHYSICAL_DELIVERY_MODE",
MAPI_PHYSICAL_DELIVERY_REPORT_REQUEST: "MAPI_PHYSICAL_DELIVERY_REPORT_REQUEST",
MAPI_PHYSICAL_FORWARDING_ADDRESS: "MAPI_PHYSICAL_FORWARDING_ADDRESS",
MAPI_PHYSICAL_FORWARDING_ADDRESS_REQUESTED: "MAPI_PHYSICAL_FORWARDING_ADDRESS_REQUESTED",
MAPI_PHYSICAL_FORWARDING_PROHIBITED: "MAPI_PHYSICAL_FORWARDING_PROHIBITED",
MAPI_PHYSICAL_RENDITION_ATTRIBUTES: "MAPI_PHYSICAL_RENDITION_ATTRIBUTES",
MAPI_PROOF_OF_DELIVERY: "MAPI_PROOF_OF_DELIVERY",
MAPI_PROOF_OF_DELIVERY_REQUESTED: "MAPI_PROOF_OF_DELIVERY_REQUESTED",
MAPI_RECIPIENT_CERTIFICATE: "MAPI_RECIPIENT_CERTIFICATE",
MAPI_RECIPIENT_NUMBER_FOR_ADVICE: "MAPI_RECIPIENT_NUMBER_FOR_ADVICE",
MAPI_RECIPIENT_TYPE: "MAPI_RECIPIENT_TYPE",
MAPI_REGISTERED_MAIL_TYPE: "MAPI_REGISTERED_MAIL_TYPE",
MAPI_REPLY_REQUESTED: "MAPI_REPLY_REQUESTED",
MAPI_REQUESTED_DELIVERY_METHOD: "MAPI_REQUESTED_DELIVERY_METHOD",
MAPI_SENDER_ENTRYID: "MAPI_SENDER_ENTRYID",
MAPI_SENDER_NAME: "MAPI_SENDER_NAME",
MAPI_SUPPLEMENTARY_INFO: "MAPI_SUPPLEMENTARY_INFO",
MAPI_TYPE_OF_MTS_USER: "MAPI_TYPE_OF_MTS_USER",
MAPI_SENDER_SEARCH_KEY: "MAPI_SENDER_SEARCH_KEY",
MAPI_SENDER_ADDRTYPE: "MAPI_SENDER_ADDRTYPE",
MAPI_SENDER_EMAIL_ADDRESS: "MAPI_SENDER_EMAIL_ADDRESS",
MAPI_CURRENT_VERSION: "MAPI_CURRENT_VERSION",
MAPI_DELETE_AFTER_SUBMIT: "MAPI_DELETE_AFTER_SUBMIT",
MAPI_DISPLAY_BCC: "MAPI_DISPLAY_BCC",
MAPI_DISPLAY_CC: "MAPI_DISPLAY_CC",
MAPI_DISPLAY_TO: "MAPI_DISPLAY_TO",
MAPI_PARENT_DISPLAY: "MAPI_PARENT_DISPLAY",
MAPI_MESSAGE_DELIVERY_TIME: "MAPI_MESSAGE_DELIVERY_TIME",
MAPI_MESSAGE_FLAGS: "MAPI_MESSAGE_FLAGS",
MAPI_MESSAGE_SIZE: "MAPI_MESSAGE_SIZE",
MAPI_PARENT_ENTRYID: "MAPI_PARENT_ENTRYID",
MAPI_SENTMAIL_ENTRYID: "MAPI_SENTMAIL_ENTRYID",
MAPI_CORRELATE: "MAPI_CORRELATE",
MAPI_CORRELATE_MTSID: "MAPI_CORRELATE_MTSID",
MAPI_DISCRETE_VALUES: "MAPI_DISCRETE_VALUES",
MAPI_RESPONSIBILITY: "MAPI_RESPONSIBILITY",
MAPI_SPOOLER_STATUS: "MAPI_SPOOLER_STATUS",
MAPI_TRANSPORT_STATUS: "MAPI_TRANSPORT_STATUS",
MAPI_MESSAGE_RECIPIENTS: "MAPI_MESSAGE_RECIPIENTS",
MAPI_MESSAGE_ATTACHMENTS: "MAPI_MESSAGE_ATTACHMENTS",
MAPI_SUBMIT_FLAGS: "MAPI_SUBMIT_FLAGS",
MAPI_RECIPIENT_STATUS: "MAPI_RECIPIENT_STATUS",
MAPI_TRANSPORT_KEY: "MAPI_TRANSPORT_KEY",
MAPI_MSG_STATUS: "MAPI_MSG_STATUS",
MAPI_MESSAGE_DOWNLOAD_TIME: "MAPI_MESSAGE_DOWNLOAD_TIME",
MAPI_CREATION_VERSION: "MAPI_CREATION_VERSION",
MAPI_MODIFY_VERSION: "MAPI_MODIFY_VERSION",
MAPI_HASATTACH: "MAPI_HASATTACH",
MAPI_BODY_CRC: "MAPI_BODY_CRC",
MAPI_NORMALIZED_SUBJECT: "MAPI_NORMALIZED_SUBJECT",
MAPI_RTF_IN_SYNC: "MAPI_RTF_IN_SYNC",
MAPI_ATTACH_SIZE: "MAPI_ATTACH_SIZE",
MAPI_ATTACH_NUM: "MAPI_ATTACH_NUM",
MAPI_PREPROCESS: "MAPI_PREPROCESS",
MAPI_ORIGINATING_MTA_CERTIFICATE: "MAPI_ORIGINATING_MTA_CERTIFICATE",
MAPI_PROOF_OF_SUBMISSION: "MAPI_PROOF_OF_SUBMISSION",
MAPI_PRIMARY_SEND_ACCOUNT: "MAPI_PRIMARY_SEND_ACCOUNT",
MAPI_NEXT_SEND_ACCT: "MAPI_NEXT_SEND_ACCT",
MAPI_ACCESS: "MAPI_ACCESS",
MAPI_ROW_TYPE: "MAPI_ROW_TYPE",
MAPI_INSTANCE_KEY: "MAPI_INSTANCE_KEY",
MAPI_ACCESS_LEVEL: "MAPI_ACCESS_LEVEL",
MAPI_MAPPING_SIGNATURE: "MAPI_MAPPING_SIGNATURE",
MAPI_RECORD_KEY: "MAPI_RECORD_KEY",
MAPI_STORE_RECORD_KEY: "MAPI_STORE_RECORD_KEY",
MAPI_STORE_ENTRYID: "MAPI_STORE_ENTRYID",
MAPI_MINI_ICON: "MAPI_MINI_ICON",
MAPI_ICON: "MAPI_ICON",
MAPI_OBJECT_TYPE: "MAPI_OBJECT_TYPE",
MAPI_ENTRYID: "MAPI_ENTRYID",
MAPI_BODY: "MAPI_BODY",
MAPI_REPORT_TEXT: "MAPI_REPORT_TEXT",
MAPI_ORIGINATOR_AND_DL_EXPANSION_HISTORY: "MAPI_ORIGINATOR_AND_DL_EXPANSION_HISTORY",
MAPI_REPORTING_DL_NAME: "MAPI_REPORTING_DL_NAME",
MAPI_REPORTING_MTA_CERTIFICATE: "MAPI_REPORTING_MTA_CERTIFICATE",
MAPI_RTF_SYNC_BODY_CRC: "MAPI_RTF_SYNC_BODY_CRC",
MAPI_RTF_SYNC_BODY_COUNT: "MAPI_RTF_SYNC_BODY_COUNT",
MAPI_RTF_SYNC_BODY_TAG: "MAPI_RTF_SYNC_BODY_TAG",
MAPI_RTF_COMPRESSED: "MAPI_RTF_COMPRESSED",
MAPI_RTF_SYNC_PREFIX_COUNT: "MAPI_RTF_SYNC_PREFIX_COUNT",
MAPI_RTF_SYNC_TRAILING_COUNT: "MAPI_RTF_SYNC_TRAILING_COUNT",
MAPI_ORIGINALLY_INTENDED_RECIP_ENTRYID: "MAPI_ORIGINALLY_INTENDED_RECIP_ENTRYID",
MAPI_BODY_HTML: "MAPI_BODY_HTML",
MAPI_NATIVE_BODY: "MAPI_NATIVE_BODY",
MAPI_SMTP_MESSAGE_ID: "MAPI_SMTP_MESSAGE_ID",
MAPI_INTERNET_REFERENCES: "MAPI_INTERNET_REFERENCES",
MAPI_IN_REPLY_TO_ID: "MAPI_IN_REPLY_TO_ID",
MAPI_INTERNET_RETURN_PATH: "MAPI_INTERNET_RETURN_PATH",
MAPI_ICON_INDEX: "MAPI_ICON_INDEX",
MAPI_LAST_VERB_EXECUTED: "MAPI_LAST_VERB_EXECUTED",
MAPI_LAST_VERB_EXECUTION_TIME: "MAPI_LAST_VERB_EXECUTION_TIME",
MAPI_URL_COMP_NAME: "MAPI_URL_COMP_NAME",
MAPI_ATTRIBUTE_HIDDEN: "MAPI_ATTRIBUTE_HIDDEN",
MAPI_ATTRIBUTE_SYSTEM: "MAPI_ATTRIBUTE_SYSTEM",
MAPI_ATTRIBUTE_READ_ONLY: "MAPI_ATTRIBUTE_READ_ONLY",
MAPI_ROWID: "MAPI_ROWID",
MAPI_DISPLAY_NAME: "MAPI_DISPLAY_NAME",
MAPI_ADDRTYPE: "MAPI_ADDRTYPE",
MAPI_EMAIL_ADDRESS: "MAPI_EMAIL_ADDRESS",
MAPI_COMMENT: "MAPI_COMMENT",
MAPI_DEPTH: "MAPI_DEPTH",
MAPI_PROVIDER_DISPLAY: "MAPI_PROVIDER_DISPLAY",
MAPI_CREATION_TIME: "MAPI_CREATION_TIME",
MAPI_LAST_MODIFICATION_TIME: "MAPI_LAST_MODIFICATION_TIME",
MAPI_RESOURCE_FLAGS: "MAPI_RESOURCE_FLAGS",
MAPI_PROVIDER_DLL_NAME: "MAPI_PROVIDER_DLL_NAME",
MAPI_SEARCH_KEY: "MAPI_SEARCH_KEY",
MAPI_PROVIDER_UID: "MAPI_PROVIDER_UID",
MAPI_PROVIDER_ORDINAL: "MAPI_PROVIDER_ORDINAL",
MAPI_TARGET_ENTRY_ID: "MAPI_TARGET_ENTRY_ID",
MAPI_CONVERSATION_ID: "MAPI_CONVERSATION_ID",
MAPI_CONVERSATION_INDEX_TRACKING: "MAPI_CONVERSATION_INDEX_TRACKING",
MAPI_FORM_VERSION: "MAPI_FORM_VERSION",
MAPI_FORM_CLSID: "MAPI_FORM_CLSID",
MAPI_FORM_CONTACT_NAME: "MAPI_FORM_CONTACT_NAME",
MAPI_FORM_CATEGORY: "MAPI_FORM_CATEGORY",
MAPI_FORM_CATEGORY_SUB: "MAPI_FORM_CATEGORY_SUB",
MAPI_FORM_HOST_MAP: "MAPI_FORM_HOST_MAP",
MAPI_FORM_HIDDEN: "MAPI_FORM_HIDDEN",
MAPI_FORM_DESIGNER_NAME: "MAPI_FORM_DESIGNER_NAME",
MAPI_FORM_DESIGNER_GUID: "MAPI_FORM_DESIGNER_GUID",
MAPI_FORM_MESSAGE_BEHAVIOR: "MAPI_FORM_MESSAGE_BEHAVIOR",
MAPI_DEFAULT_STORE: "MAPI_DEFAULT_STORE",
MAPI_STORE_SUPPORT_MASK: "MAPI_STORE_SUPPORT_MASK",
MAPI_STORE_STATE: "MAPI_STORE_STATE",
MAPI_STORE_UNICODE_MASK: "MAPI_STORE_UNICODE_MASK",
MAPI_IPM_SUBTREE_SEARCH_KEY: "MAPI_IPM_SUBTREE_SEARCH_KEY",
MAPI_IPM_OUTBOX_SEARCH_KEY: "MAPI_IPM_OUTBOX_SEARCH_KEY",
MAPI_IPM_WASTEBASKET_SEARCH_KEY: "MAPI_IPM_WASTEBASKET_SEARCH_KEY",
MAPI_IPM_SENTMAIL_SEARCH_KEY: "MAPI_IPM_SENTMAIL_SEARCH_KEY",
MAPI_MDB_PROVIDER: "MAPI_MDB_PROVIDER",
MAPI_RECEIVE_FOLDER_SETTINGS: "MAPI_RECEIVE_FOLDER_SETTINGS",
MAPI_VALID_FOLDER_MASK: "MAPI_VALID_FOLDER_MASK",
MAPI_IPM_SUBTREE_ENTRYID: "MAPI_IPM_SUBTREE_ENTRYID",
MAPI_IPM_OUTBOX_ENTRYID: "MAPI_IPM_OUTBOX_ENTRYID",
MAPI_IPM_WASTEBASKET_ENTRYID: "MAPI_IPM_WASTEBASKET_ENTRYID",
MAPI_IPM_SENTMAIL_ENTRYID: "MAPI_IPM_SENTMAIL_ENTRYID",
MAPI_VIEWS_ENTRYID: "MAPI_VIEWS_ENTRYID",
MAPI_COMMON_VIEWS_ENTRYID: "MAPI_COMMON_VIEWS_ENTRYID",
MAPI_FINDER_ENTRYID: "MAPI_FINDER_ENTRYID",
MAPI_CONTAINER_FLAGS: "MAPI_CONTAINER_FLAGS",
MAPI_FOLDER_TYPE: "MAPI_FOLDER_TYPE",
MAPI_CONTENT_COUNT: "MAPI_CONTENT_COUNT",
MAPI_CONTENT_UNREAD: "MAPI_CONTENT_UNREAD",
MAPI_CREATE_TEMPLATES: "MAPI_CREATE_TEMPLATES",
MAPI_DETAILS_TABLE: "MAPI_DETAILS_TABLE",
MAPI_SEARCH: "MAPI_SEARCH",
MAPI_SELECTABLE: "MAPI_SELECTABLE",
MAPI_SUBFOLDERS: "MAPI_SUBFOLDERS",
MAPI_STATUS: "MAPI_STATUS",
MAPI_ANR: "MAPI_ANR",
MAPI_CONTENTS_SORT_ORDER: "MAPI_CONTENTS_SORT_ORDER",
MAPI_CONTAINER_HIERARCHY: "MAPI_CONTAINER_HIERARCHY",
MAPI_CONTAINER_CONTENTS: "MAPI_CONTAINER_CONTENTS",
MAPI_FOLDER_ASSOCIATED_CONTENTS: "MAPI_FOLDER_ASSOCIATED_CONTENTS",
MAPI_DEF_CREATE_DL: "MAPI_DEF_CREATE_DL",
MAPI_DEF_CREATE_MAILUSER: "MAPI_DEF_CREATE_MAILUSER",
MAPI_CONTAINER_CLASS: "MAPI_CONTAINER_CLASS",
MAPI_CONTAINER_MODIFY_VERSION: "MAPI_CONTAINER_MODIFY_VERSION",
MAPI_AB_PROVIDER_ID: "MAPI_AB_PROVIDER_ID",
MAPI_DEFAULT_VIEW_ENTRYID: "MAPI_DEFAULT_VIEW_ENTRYID",
MAPI_ASSOC_CONTENT_COUNT: "MAPI_ASSOC_CONTENT_COUNT",
MAPI_ATTACHMENT_X400_PARAMETERS: "MAPI_ATTACHMENT_X400_PARAMETERS",
MAPI_ATTACH_DATA_OBJ: "MAPI_ATTACH_DATA_OBJ",
MAPI_ATTACH_ENCODING: "MAPI_ATTACH_ENCODING",
MAPI_ATTACH_EXTENSION: "MAPI_ATTACH_EXTENSION",
MAPI_ATTACH_FILENAME: "MAPI_ATTACH_FILENAME",
MAPI_ATTACH_METHOD: "MAPI_ATTACH_METHOD",
MAPI_ATTACH_LONG_FILENAME: "MAPI_ATTACH_LONG_FILENAME",
MAPI_ATTACH_PATHNAME: "MAPI_ATTACH_PATHNAME",
MAPI_ATTACH_RENDERING: "MAPI_ATTACH_RENDERING",
MAPI_ATTACH_TAG: "MAPI_ATTACH_TAG",
MAPI_RENDERING_POSITION: "MAPI_RENDERING_POSITION",
MAPI_ATTACH_TRANSPORT_NAME: "MAPI_ATTACH_TRANSPORT_NAME",
MAPI_ATTACH_LONG_PATHNAME: "MAPI_ATTACH_LONG_PATHNAME",
MAPI_ATTACH_MIME_TAG: "MAPI_ATTACH_MIME_TAG",
MAPI_ATTACH_ADDITIONAL_INFO: "MAPI_ATTACH_ADDITIONAL_INFO",
MAPI_ATTACH_MIME_SEQUENCE: "MAPI_ATTACH_MIME_SEQUENCE",
MAPI_ATTACH_CONTENT_ID: "MAPI_ATTACH_CONTENT_ID",
MAPI_ATTACH_CONTENT_LOCATION: "MAPI_ATTACH_CONTENT_LOCATION",
MAPI_ATTACH_FLAGS: "MAPI_ATTACH_FLAGS",
MAPI_DISPLAY_TYPE: "MAPI_DISPLAY_TYPE",
MAPI_TEMPLATEID: "MAPI_TEMPLATEID",
MAPI_PRIMARY_CAPABILITY: "MAPI_PRIMARY_CAPABILITY",
MAPI_SMTP_ADDRESS: "MAPI_SMTP_ADDRESS",
MAPI_7BIT_DISPLAY_NAME: "MAPI_7BIT_DISPLAY_NAME",
MAPI_ACCOUNT: "MAPI_ACCOUNT",
MAPI_ALTERNATE_RECIPIENT: "MAPI_ALTERNATE_RECIPIENT",
MAPI_CALLBACK_TELEPHONE_NUMBER: "MAPI_CALLBACK_TELEPHONE_NUMBER",
MAPI_CONVERSION_PROHIBITED: "MAPI_CONVERSION_PROHIBITED",
MAPI_DISCLOSE_RECIPIENTS: "MAPI_DISCLOSE_RECIPIENTS",
MAPI_GENERATION: "MAPI_GENERATION",
MAPI_GIVEN_NAME: "MAPI_GIVEN_NAME",
MAPI_GOVERNMENT_ID_NUMBER: "MAPI_GOVERNMENT_ID_NUMBER",
MAPI_BUSINESS_TELEPHONE_NUMBER: "MAPI_BUSINESS_TELEPHONE_NUMBER",
MAPI_HOME_TELEPHONE_NUMBER: "MAPI_HOME_TELEPHONE_NUMBER",
MAPI_INITIALS: "MAPI_INITIALS",
MAPI_KEYWORD: "MAPI_KEYWORD",
MAPI_LANGUAGE: "MAPI_LANGUAGE",
MAPI_LOCATION: "MAPI_LOCATION",
MAPI_MAIL_PERMISSION: "MAPI_MAIL_PERMISSION",
MAPI_MHS_COMMON_NAME: "MAPI_MHS_COMMON_NAME",
MAPI_ORGANIZATIONAL_ID_NUMBER: "MAPI_ORGANIZATIONAL_ID_NUMBER",
MAPI_SURNAME: "MAPI_SURNAME",
MAPI_ORIGINAL_ENTRYID: "MAPI_ORIGINAL_ENTRYID",
MAPI_ORIGINAL_DISPLAY_NAME: "MAPI_ORIGINAL_DISPLAY_NAME",
MAPI_ORIGINAL_SEARCH_KEY: "MAPI_ORIGINAL_SEARCH_KEY",
MAPI_POSTAL_ADDRESS: "MAPI_POSTAL_ADDRESS",
MAPI_COMPANY_NAME: "MAPI_COMPANY_NAME",
MAPI_TITLE: "MAPI_TITLE",
MAPI_DEPARTMENT_NAME: "MAPI_DEPARTMENT_NAME",
MAPI_OFFICE_LOCATION: "MAPI_OFFICE_LOCATION",
MAPI_PRIMARY_TELEPHONE_NUMBER: "MAPI_PRIMARY_TELEPHONE_NUMBER",
MAPI_BUSINESS2_TELEPHONE_NUMBER: "MAPI_BUSINESS2_TELEPHONE_NUMBER",
MAPI_MOBILE_TELEPHONE_NUMBER: "MAPI_MOBILE_TELEPHONE_NUMBER",
MAPI_RADIO_TELEPHONE_NUMBER: "MAPI_RADIO_TELEPHONE_NUMBER",
MAPI_CAR_TELEPHONE_NUMBER: "MAPI_CAR_TELEPHONE_NUMBER",
MAPI_OTHER_TELEPHONE_NUMBER: "MAPI_OTHER_TELEPHONE_NUMBER",
MAPI_TRANSMITABLE_DISPLAY_NAME: "MAPI_TRANSMITABLE_DISPLAY_NAME",
MAPI_PAGER_TELEPHONE_NUMBER: "MAPI_PAGER_TELEPHONE_NUMBER",
MAPI_USER_CERTIFICATE: "MAPI_USER_CERTIFICATE",
MAPI_PRIMARY_FAX_NUMBER: "MAPI_PRIMARY_FAX_NUMBER",
MAPI_BUSINESS_FAX_NUMBER: "MAPI_BUSINESS_FAX_NUMBER",
MAPI_HOME_FAX_NUMBER: "MAPI_HOME_FAX_NUMBER",
MAPI_COUNTRY: "MAPI_COUNTRY",
MAPI_LOCALITY: "MAPI_LOCALITY",
MAPI_STATE_OR_PROVINCE: "MAPI_STATE_OR_PROVINCE",
MAPI_STREET_ADDRESS: "MAPI_STREET_ADDRESS",
MAPI_POSTAL_CODE: "MAPI_POSTAL_CODE",
MAPI_POST_OFFICE_BOX: "MAPI_POST_OFFICE_BOX",
MAPI_TELEX_NUMBER: "MAPI_TELEX_NUMBER",
MAPI_ISDN_NUMBER: "MAPI_ISDN_NUMBER",
MAPI_ASSISTANT_TELEPHONE_NUMBER: "MAPI_ASSISTANT_TELEPHONE_NUMBER",
MAPI_HOME2_TELEPHONE_NUMBER: "MAPI_HOME2_TELEPHONE_NUMBER",
MAPI_ASSISTANT: "MAPI_ASSISTANT",
MAPI_SEND_RICH_INFO: "MAPI_SEND_RICH_INFO",
MAPI_WEDDING_ANNIVERSARY: "MAPI_WEDDING_ANNIVERSARY",
MAPI_BIRTHDAY: "MAPI_BIRTHDAY",
MAPI_HOBBIES: "MAPI_HOBBIES",
MAPI_MIDDLE_NAME: "MAPI_MIDDLE_NAME",
MAPI_DISPLAY_NAME_PREFIX: "MAPI_DISPLAY_NAME_PREFIX",
MAPI_PROFESSION: "MAPI_PROFESSION",
MAPI_PREFERRED_BY_NAME: "MAPI_PREFERRED_BY_NAME",
MAPI_SPOUSE_NAME: "MAPI_SPOUSE_NAME",
MAPI_COMPUTER_NETWORK_NAME: "MAPI_COMPUTER_NETWORK_NAME",
MAPI_CUSTOMER_ID: "MAPI_CUSTOMER_ID",
MAPI_TTYTDD_PHONE_NUMBER: "MAPI_TTYTDD_PHONE_NUMBER",
MAPI_FTP_SITE: "MAPI_FTP_SITE",
MAPI_GENDER: "MAPI_GENDER",
MAPI_MANAGER_NAME: "MAPI_MANAGER_NAME",
MAPI_NICKNAME: "MAPI_NICKNAME",
MAPI_PERSONAL_HOME_PAGE: "MAPI_PERSONAL_HOME_PAGE",
MAPI_BUSINESS_HOME_PAGE: "MAPI_BUSINESS_HOME_PAGE",
MAPI_CONTACT_VERSION: "MAPI_CONTACT_VERSION",
MAPI_CONTACT_ENTRYIDS: "MAPI_CONTACT_ENTRYIDS",
MAPI_CONTACT_ADDRTYPES: "MAPI_CONTACT_ADDRTYPES",
MAPI_CONTACT_DEFAULT_ADDRESS_INDEX: "MAPI_CONTACT_DEFAULT_ADDRESS_INDEX",
MAPI_CONTACT_EMAIL_ADDRESSES: "MAPI_CONTACT_EMAIL_ADDRESSES",
MAPI_COMPANY_MAIN_PHONE_NUMBER: "MAPI_COMPANY_MAIN_PHONE_NUMBER",
MAPI_CHILDRENS_NAMES: "MAPI_CHILDRENS_NAMES",
MAPI_HOME_ADDRESS_CITY: "MAPI_HOME_ADDRESS_CITY",
MAPI_HOME_ADDRESS_COUNTRY: "MAPI_HOME_ADDRESS_COUNTRY",
MAPI_HOME_ADDRESS_POSTAL_CODE: "MAPI_HOME_ADDRESS_POSTAL_CODE",
MAPI_HOME_ADDRESS_STATE_OR_PROVINCE: "MAPI_HOME_ADDRESS_STATE_OR_PROVINCE",
MAPI_HOME_ADDRESS_STREET: "MAPI_HOME_ADDRESS_STREET",
MAPI_HOME_ADDRESS_POST_OFFICE_BOX: "MAPI_HOME_ADDRESS_POST_OFFICE_BOX",
MAPI_OTHER_ADDRESS_CITY: "MAPI_OTHER_ADDRESS_CITY",
MAPI_OTHER_ADDRESS_COUNTRY: "MAPI_OTHER_ADDRESS_COUNTRY",
MAPI_OTHER_ADDRESS_POSTAL_CODE: "MAPI_OTHER_ADDRESS_POSTAL_CODE",
MAPI_OTHER_ADDRESS_STATE_OR_PROVINCE: "MAPI_OTHER_ADDRESS_STATE_OR_PROVINCE",
MAPI_OTHER_ADDRESS_STREET: "MAPI_OTHER_ADDRESS_STREET",
MAPI_OTHER_ADDRESS_POST_OFFICE_BOX: "MAPI_OTHER_ADDRESS_POST_OFFICE_BOX",
MAPI_SEND_INTERNET_ENCODING: "MAPI_SEND_INTERNET_ENCODING",
MAPI_STORE_PROVIDERS: "MAPI_STORE_PROVIDERS",
MAPI_AB_PROVIDERS: "MAPI_AB_PROVIDERS",
MAPI_TRANSPORT_PROVIDERS: "MAPI_TRANSPORT_PROVIDERS",
MAPI_DEFAULT_PROFILE: "MAPI_DEFAULT_PROFILE",
MAPI_AB_SEARCH_PATH: "MAPI_AB_SEARCH_PATH",
MAPI_AB_DEFAULT_DIR: "MAPI_AB_DEFAULT_DIR",
MAPI_AB_DEFAULT_PAB: "MAPI_AB_DEFAULT_PAB",
MAPI_FILTERING_HOOKS: "MAPI_FILTERING_HOOKS",
MAPI_SERVICE_NAME: "MAPI_SERVICE_NAME",
MAPI_SERVICE_DLL_NAME: "MAPI_SERVICE_DLL_NAME",
MAPI_SERVICE_ENTRY_NAME: "MAPI_SERVICE_ENTRY_NAME",
MAPI_SERVICE_UID: "MAPI_SERVICE_UID",
MAPI_SERVICE_EXTRA_UIDS: "MAPI_SERVICE_EXTRA_UIDS",
MAPI_SERVICES: "MAPI_SERVICES",
MAPI_SERVICE_SUPPORT_FILES: "MAPI_SERVICE_SUPPORT_FILES",
MAPI_SERVICE_DELETE_FILES: "MAPI_SERVICE_DELETE_FILES",
MAPI_AB_SEARCH_PATH_UPDATE: "MAPI_AB_SEARCH_PATH_UPDATE",
MAPI_PROFILE_NAME: "MAPI_PROFILE_NAME",
MAPI_IDENTITY_DISPLAY: "MAPI_IDENTITY_DISPLAY",
MAPI_IDENTITY_ENTRYID: "MAPI_IDENTITY_ENTRYID",
MAPI_RESOURCE_METHODS: "MAPI_RESOURCE_METHODS",
MAPI_RESOURCE_TYPE: "MAPI_RESOURCE_TYPE",
MAPI_STATUS_CODE: "MAPI_STATUS_CODE",
MAPI_IDENTITY_SEARCH_KEY: "MAPI_IDENTITY_SEARCH_KEY",
MAPI_OWN_STORE_ENTRYID: "MAPI_OWN_STORE_ENTRYID",
MAPI_RESOURCE_PATH: "MAPI_RESOURCE_PATH",
MAPI_STATUS_STRING: "MAPI_STATUS_STRING",
MAPI_X400_DEFERRED_DELIVERY_CANCEL: "MAPI_X400_DEFERRED_DELIVERY_CANCEL",
MAPI_HEADER_FOLDER_ENTRYID: "MAPI_HEADER_FOLDER_ENTRYID",
MAPI_REMOTE_PROGRESS: "MAPI_REMOTE_PROGRESS",
MAPI_REMOTE_PROGRESS_TEXT: "MAPI_REMOTE_PROGRESS_TEXT",
MAPI_REMOTE_VALIDATE_OK: "MAPI_REMOTE_VALIDATE_OK",
MAPI_CONTROL_FLAGS: "MAPI_CONTROL_FLAGS",
MAPI_CONTROL_STRUCTURE: "MAPI_CONTROL_STRUCTURE",
MAPI_CONTROL_TYPE: "MAPI_CONTROL_TYPE",
MAPI_DELTAX: "MAPI_DELTAX",
MAPI_DELTAY: "MAPI_DELTAY",
MAPI_XPOS: "MAPI_XPOS",
MAPI_YPOS: "MAPI_YPOS",
MAPI_CONTROL_ID: "MAPI_CONTROL_ID",
MAPI_INITIAL_DETAILS_PANE: "MAPI_INITIAL_DETAILS_PANE",
MAPI_UNCOMPRESSED_BODY: "MAPI_UNCOMPRESSED_BODY",
MAPI_INTERNET_CODEPAGE: "MAPI_INTERNET_CODEPAGE",
MAPI_AUTO_RESPONSE_SUPPRESS: "MAPI_AUTO_RESPONSE_SUPPRESS",
MAPI_MESSAGE_LOCALE_ID: "MAPI_MESSAGE_LOCALE_ID",
MAPI_RULE_TRIGGER_HISTORY: "MAPI_RULE_TRIGGER_HISTORY",
MAPI_MOVE_TO_STORE_ENTRYID: "MAPI_MOVE_TO_STORE_ENTRYID",
MAPI_MOVE_TO_FOLDER_ENTRYID: "MAPI_MOVE_TO_FOLDER_ENTRYID",
MAPI_STORAGE_QUOTA_LIMIT: "MAPI_STORAGE_QUOTA_LIMIT",
MAPI_EXCESS_STORAGE_USED: "MAPI_EXCESS_STORAGE_USED",
MAPI_SVR_GENERATING_QUOTA_MSG: "MAPI_SVR_GENERATING_QUOTA_MSG",
MAPI_CREATOR_NAME: "MAPI_CREATOR_NAME",
MAPI_CREATOR_ENTRY_ID: "MAPI_CREATOR_ENTRY_ID",
MAPI_LAST_MODIFIER_NAME: "MAPI_LAST_MODIFIER_NAME",
MAPI_LAST_MODIFIER_ENTRY_ID: "MAPI_LAST_MODIFIER_ENTRY_ID",
MAPI_REPLY_RECIPIENT_SMTP_PROXIES: "MAPI_REPLY_RECIPIENT_SMTP_PROXIES",
MAPI_MESSAGE_CODEPAGE: "MAPI_MESSAGE_CODEPAGE",
MAPI_EXTENDED_ACL_DATA: "MAPI_EXTENDED_ACL_DATA",
MAPI_SENDER_FLAGS: "MAPI_SENDER_FLAGS",
MAPI_SENT_REPRESENTING_FLAGS: "MAPI_SENT_REPRESENTING_FLAGS",
MAPI_RECEIVED_BY_FLAGS: "MAPI_RECEIVED_BY_FLAGS",
MAPI_RECEIVED_REPRESENTING_FLAGS: "MAPI_RECEIVED_REPRESENTING_FLAGS",
MAPI_CREATOR_ADDRESS_TYPE: "MAPI_CREATOR_ADDRESS_TYPE",
MAPI_CREATOR_EMAIL_ADDRESS: "MAPI_CREATOR_EMAIL_ADDRESS",
MAPI_SENDER_SIMPLE_DISPLAY_NAME: "MAPI_SENDER_SIMPLE_DISPLAY_NAME",
MAPI_SENT_REPRESENTING_SIMPLE_DISPLAY_NAME: "MAPI_SENT_REPRESENTING_SIMPLE_DISPLAY_NAME",
MAPI_RECEIVED_REPRESENTING_SIMPLE_DISPLAY_NAME: "MAPI_RECEIVED_REPRESENTING_SIMPLE_DISPLAY_NAME",
MAPI_CREATOR_SIMPLE_DISP_NAME: "MAPI_CREATOR_SIMPLE_DISP_NAME",
MAPI_LAST_MODIFIER_SIMPLE_DISPLAY_NAME: "MAPI_LAST_MODIFIER_SIMPLE_DISPLAY_NAME",
MAPI_CONTENT_FILTER_SPAM_CONFIDENCE_LEVEL: "MAPI_CONTENT_FILTER_SPAM_CONFIDENCE_LEVEL",
MAPI_INTERNET_MAIL_OVERRIDE_FORMAT: "MAPI_INTERNET_MAIL_OVERRIDE_FORMAT",
MAPI_MESSAGE_EDITOR_FORMAT: "MAPI_MESSAGE_EDITOR_FORMAT",
MAPI_SENDER_SMTP_ADDRESS: "MAPI_SENDER_SMTP_ADDRESS",
MAPI_SENT_REPRESENTING_SMTP_ADDRESS: "MAPI_SENT_REPRESENTING_SMTP_ADDRESS",
MAPI_READ_RECEIPT_SMTP_ADDRESS: "MAPI_READ_RECEIPT_SMTP_ADDRESS",
MAPI_RECEIVED_BY_SMTP_ADDRESS: "MAPI_RECEIVED_BY_SMTP_ADDRESS",
MAPI_RECEIVED_REPRESENTING_SMTP_ADDRESS: "MAPI_RECEIVED_REPRESENTING_SMTP_ADDRESS",
MAPI_SENDING_SMTP_ADDRESS: "MAPI_SENDING_SMTP_ADDRESS",
MAPI_SIP_ADDRESS: "MAPI_SIP_ADDRESS",
MAPI_RECIPIENT_DISPLAY_NAME: "MAPI_RECIPIENT_DISPLAY_NAME",
MAPI_RECIPIENT_ENTRYID: "MAPI_RECIPIENT_ENTRYID",
MAPI_RECIPIENT_FLAGS: "MAPI_RECIPIENT_FLAGS",
MAPI_RECIPIENT_TRACKSTATUS: "MAPI_RECIPIENT_TRACKSTATUS",
MAPI_CHANGE_KEY: "MAPI_CHANGE_KEY",
MAPI_PREDECESSOR_CHANGE_LIST: "MAPI_PREDECESSOR_CHANGE_LIST",
MAPI_ID_SECURE_MIN: "MAPI_ID_SECURE_MIN",
MAPI_ID_SECURE_MAX: "MAPI_ID_SECURE_MAX",
MAPI_VOICE_MESSAGE_DURATION: "MAPI_VOICE_MESSAGE_DURATION",
MAPI_SENDER_TELEPHONE_NUMBER: "MAPI_SENDER_TELEPHONE_NUMBER",
MAPI_VOICE_MESSAGE_SENDER_NAME: "MAPI_VOICE_MESSAGE_SENDER_NAME",
MAPI_FAX_NUMBER_OF_PAGES: "MAPI_FAX_NUMBER_OF_PAGES",
MAPI_VOICE_MESSAGE_ATTACHMENT_ORDER: "MAPI_VOICE_MESSAGE_ATTACHMENT_ORDER",
MAPI_CALL_ID: "MAPI_CALL_ID",
MAPI_ATTACHMENT_LINK_ID: "MAPI_ATTACHMENT_LINK_ID",
MAPI_EXCEPTION_START_TIME: "MAPI_EXCEPTION_START_TIME",
MAPI_EXCEPTION_END_TIME: "MAPI_EXCEPTION_END_TIME",
MAPI_ATTACHMENT_FLAGS: "MAPI_ATTACHMENT_FLAGS",
MAPI_ATTACHMENT_HIDDEN: "MAPI_ATTACHMENT_HIDDEN",
MAPI_ATTACHMENT_CONTACT_PHOTO: "MAPI_ATTACHMENT_CONTACT_PHOTO",
MAPI_FILE_UNDER: "MAPI_FILE_UNDER",
MAPI_FILE_UNDER_ID: "MAPI_FILE_UNDER_ID",
MAPI_CONTACT_ITEM_DATA: "MAPI_CONTACT_ITEM_DATA",
MAPI_REFERRED_BY: "MAPI_REFERRED_BY",
MAPI_DEPARTMENT: "MAPI_DEPARTMENT",
MAPI_HAS_PICTURE: "MAPI_HAS_PICTURE",
MAPI_HOME_ADDRESS: "MAPI_HOME_ADDRESS",
MAPI_WORK_ADDRESS: "MAPI_WORK_ADDRESS",
MAPI_OTHER_ADDRESS: "MAPI_OTHER_ADDRESS",
MAPI_POSTAL_ADDRESS_ID: "MAPI_POSTAL_ADDRESS_ID",
MAPI_CONTACT_CHARACTER_SET: "MAPI_CONTACT_CHARACTER_SET",
MAPI_AUTO_LOG: "MAPI_AUTO_LOG",
MAPI_FILE_UNDER_LIST: "MAPI_FILE_UNDER_LIST",
MAPI_EMAIL_LIST: "MAPI_EMAIL_LIST",
MAPI_ADDRESS_BOOK_PROVIDER_EMAIL_LIST: "MAPI_ADDRESS_BOOK_PROVIDER_EMAIL_LIST",
MAPI_ADDRESS_BOOK_PROVIDER_ARRAY_TYPE: "MAPI_ADDRESS_BOOK_PROVIDER_ARRAY_TYPE",
MAPI_HTML: "MAPI_HTML",
MAPI_YOMI_FIRST_NAME: "MAPI_YOMI_FIRST_NAME",
MAPI_YOMI_LAST_NAME: "MAPI_YOMI_LAST_NAME",
MAPI_YOMI_COMPANY_NAME: "MAPI_YOMI_COMPANY_NAME",
MAPI_BUSINESS_CARD_DISPLAY_DEFINITION: "MAPI_BUSINESS_CARD_DISPLAY_DEFINITION",
MAPI_BUSINESS_CARD_CARD_PICTURE: "MAPI_BUSINESS_CARD_CARD_PICTURE",
MAPI_WORK_ADDRESS_STREET: "MAPI_WORK_ADDRESS_STREET",
MAPI_WORK_ADDRESS_CITY: "MAPI_WORK_ADDRESS_CITY",
MAPI_WORK_ADDRESS_STATE: "MAPI_WORK_ADDRESS_STATE",
MAPI_WORK_ADDRESS_POSTAL_CODE: "MAPI_WORK_ADDRESS_POSTAL_CODE",
MAPI_WORK_ADDRESS_COUNTRY: "MAPI_WORK_ADDRESS_COUNTRY",
MAPI_WORK_ADDRESS_POST_OFFICE_BOX: "MAPI_WORK_ADDRESS_POST_OFFICE_BOX",
MAPI_DISTRIBUTION_LIST_CHECKSUM: "MAPI_DISTRIBUTION_LIST_CHECKSUM",
MAPI_BIRTHDAY_EVENT_ENTRY_ID: "MAPI_BIRTHDAY_EVENT_ENTRY_ID",
MAPI_ANNIVERSARY_EVENT_ENTRY_ID: "MAPI_ANNIVERSARY_EVENT_ENTRY_ID",
MAPI_CONTACT_USER_FIELD1: "MAPI_CONTACT_USER_FIELD1",
MAPI_CONTACT_USER_FIELD2: "MAPI_CONTACT_USER_FIELD2",
MAPI_CONTACT_USER_FIELD3: "MAPI_CONTACT_USER_FIELD3",
MAPI_CONTACT_USER_FIELD4: "MAPI_CONTACT_USER_FIELD4",
MAPI_DISTRIBUTION_LIST_NAME: "MAPI_DISTRIBUTION_LIST_NAME",
MAPI_DISTRIBUTION_LIST_ONE_OFF_MEMBERS: "MAPI_DISTRIBUTION_LIST_ONE_OFF_MEMBERS",
MAPI_DISTRIBUTION_LIST_MEMBERS: "MAPI_DISTRIBUTION_LIST_MEMBERS",
MAPI_INSTANT_MESSAGING_ADDRESS: "MAPI_INSTANT_MESSAGING_ADDRESS",
MAPI_DISTRIBUTION_LIST_STREAM: "MAPI_DISTRIBUTION_LIST_STREAM",
MAPI_EMAIL_DISPLAY_NAME: "MAPI_EMAIL_DISPLAY_NAME",
MAPI_EMAIL_ADDR_TYPE: "MAPI_EMAIL_ADDR_TYPE",
MAPI_EMAIL_EMAIL_ADDRESS: "MAPI_EMAIL_EMAIL_ADDRESS",
MAPI_EMAIL_ORIGINAL_DISPLAY_NAME: "MAPI_EMAIL_ORIGINAL_DISPLAY_NAME",
MAPI_EMAIL1ORIGINAL_ENTRY_ID: "MAPI_EMAIL1ORIGINAL_ENTRY_ID",
MAPI_EMAIL1RICH_TEXT_FORMAT: "MAPI_EMAIL1RICH_TEXT_FORMAT",
MAPI_EMAIL1EMAIL_TYPE: "MAPI_EMAIL1EMAIL_TYPE",
MAPI_EMAIL2DISPLAY_NAME: "MAPI_EMAIL2DISPLAY_NAME",
MAPI_EMAIL2ENTRY_ID: "MAPI_EMAIL2ENTRY_ID",
MAPI_EMAIL2ADDR_TYPE: "MAPI_EMAIL2ADDR_TYPE",
MAPI_EMAIL2EMAIL_ADDRESS: "MAPI_EMAIL2EMAIL_ADDRESS",
MAPI_EMAIL2ORIGINAL_DISPLAY_NAME: "MAPI_EMAIL2ORIGINAL_DISPLAY_NAME",
MAPI_EMAIL2ORIGINAL_ENTRY_ID: "MAPI_EMAIL2ORIGINAL_ENTRY_ID",
MAPI_EMAIL2RICH_TEXT_FORMAT: "MAPI_EMAIL2RICH_TEXT_FORMAT",
MAPI_EMAIL3DISPLAY_NAME: "MAPI_EMAIL3DISPLAY_NAME",
MAPI_EMAIL3ENTRY_ID: "MAPI_EMAIL3ENTRY_ID",
MAPI_EMAIL3ADDR_TYPE: "MAPI_EMAIL3ADDR_TYPE",
MAPI_EMAIL3EMAIL_ADDRESS: "MAPI_EMAIL3EMAIL_ADDRESS",
MAPI_EMAIL3ORIGINAL_DISPLAY_NAME: "MAPI_EMAIL3ORIGINAL_DISPLAY_NAME",
MAPI_EMAIL3ORIGINAL_ENTRY_ID: "MAPI_EMAIL3ORIGINAL_ENTRY_ID",
MAPI_EMAIL3RICH_TEXT_FORMAT: "MAPI_EMAIL3RICH_TEXT_FORMAT",
MAPI_FAX1ADDRESS_TYPE: "MAPI_FAX1ADDRESS_TYPE",
MAPI_FAX1EMAIL_ADDRESS: "MAPI_FAX1EMAIL_ADDRESS",
MAPI_FAX1ORIGINAL_DISPLAY_NAME: "MAPI_FAX1ORIGINAL_DISPLAY_NAME",
MAPI_FAX1ORIGINAL_ENTRY_ID: "MAPI_FAX1ORIGINAL_ENTRY_ID",
MAPI_FAX2ADDRESS_TYPE: "MAPI_FAX2ADDRESS_TYPE",
MAPI_FAX2EMAIL_ADDRESS: "MAPI_FAX2EMAIL_ADDRESS",
MAPI_FAX2ORIGINAL_DISPLAY_NAME: "MAPI_FAX2ORIGINAL_DISPLAY_NAME",
MAPI_FAX2ORIGINAL_ENTRY_ID: "MAPI_FAX2ORIGINAL_ENTRY_ID",
MAPI_FAX3ADDRESS_TYPE: "MAPI_FAX3ADDRESS_TYPE",
MAPI_FAX3EMAIL_ADDRESS: "MAPI_FAX3EMAIL_ADDRESS",
MAPI_FAX3ORIGINAL_DISPLAY_NAME: "MAPI_FAX3ORIGINAL_DISPLAY_NAME",
MAPI_FAX3ORIGINAL_ENTRY_ID: "MAPI_FAX3ORIGINAL_ENTRY_ID",
MAPI_FREE_BUSY_LOCATION: "MAPI_FREE_BUSY_LOCATION",
MAPI_HOME_ADDRESS_COUNTRY_CODE: "MAPI_HOME_ADDRESS_COUNTRY_CODE",
MAPI_WORK_ADDRESS_COUNTRY_CODE: "MAPI_WORK_ADDRESS_COUNTRY_CODE",
MAPI_OTHER_ADDRESS_COUNTRY_CODE: "MAPI_OTHER_ADDRESS_COUNTRY_CODE",
MAPI_ADDRESS_COUNTRY_CODE: "MAPI_ADDRESS_COUNTRY_CODE",
MAPI_BIRTHDAY_LOCAL: "MAPI_BIRTHDAY_LOCAL",
MAPI_WEDDING_ANNIVERSARY_LOCAL: "MAPI_WEDDING_ANNIVERSARY_LOCAL",
MAPI_TASK_STATUS: "MAPI_TASK_STATUS",
MAPI_TASK_START_DATE: "MAPI_TASK_START_DATE",
MAPI_TASK_DUE_DATE: "MAPI_TASK_DUE_DATE",
MAPI_TASK_ACTUAL_EFFORT: "MAPI_TASK_ACTUAL_EFFORT",
MAPI_TASK_ESTIMATED_EFFORT: "MAPI_TASK_ESTIMATED_EFFORT",
MAPI_TASK_FRECUR: "MAPI_TASK_FRECUR",
MAPI_SEND_MEETING_AS_ICAL: "MAPI_SEND_MEETING_AS_ICAL",
MAPI_APPOINTMENT_SEQUENCE: "MAPI_APPOINTMENT_SEQUENCE",
MAPI_APPOINTMENT_SEQUENCE_TIME: "MAPI_APPOINTMENT_SEQUENCE_TIME",
MAPI_APPOINTMENT_LAST_SEQUENCE: "MAPI_APPOINTMENT_LAST_SEQUENCE",
MAPI_CHANGE_HIGHLIGHT: "MAPI_CHANGE_HIGHLIGHT",
MAPI_BUSY_STATUS: "MAPI_BUSY_STATUS",
MAPI_FEXCEPTIONAL_BODY: "MAPI_FEXCEPTIONAL_BODY",
MAPI_APPOINTMENT_AUXILIARY_FLAGS: "MAPI_APPOINTMENT_AUXILIARY_FLAGS",
MAPI_OUTLOOK_LOCATION: "MAPI_OUTLOOK_LOCATION",
MAPI_MEETING_WORKSPACE_URL: "MAPI_MEETING_WORKSPACE_URL",
MAPI_FORWARD_INSTANCE: "MAPI_FORWARD_INSTANCE",
MAPI_LINKED_TASK_ITEMS: "MAPI_LINKED_TASK_ITEMS",
MAPI_APPT_START_WHOLE: "MAPI_APPT_START_WHOLE",
MAPI_APPT_END_WHOLE: "MAPI_APPT_END_WHOLE",
MAPI_APPOINTMENT_START_TIME: "MAPI_APPOINTMENT_START_TIME",
MAPI_APPOINTMENT_END_TIME: "MAPI_APPOINTMENT_END_TIME",
MAPI_APPOINTMENT_END_DATE: "MAPI_APPOINTMENT_END_DATE",
MAPI_APPOINTMENT_START_DATE: "MAPI_APPOINTMENT_START_DATE",
MAPI_APPT_DURATION: "MAPI_APPT_DURATION",
MAPI_APPOINTMENT_COLOR: "MAPI_APPOINTMENT_COLOR",
MAPI_APPOINTMENT_SUB_TYPE: "MAPI_APPOINTMENT_SUB_TYPE",
MAPI_APPOINTMENT_RECUR: "MAPI_APPOINTMENT_RECUR",
MAPI_APPOINTMENT_STATE_FLAGS: "MAPI_APPOINTMENT_STATE_FLAGS",
MAPI_RESPONSE_STATUS: "MAPI_RESPONSE_STATUS",
MAPI_APPOINTMENT_REPLY_TIME: "MAPI_APPOINTMENT_REPLY_TIME",
MAPI_RECURRING: "MAPI_RECURRING",
MAPI_INTENDED_BUSY_STATUS: "MAPI_INTENDED_BUSY_STATUS",
MAPI_APPOINTMENT_UPDATE_TIME: "MAPI_APPOINTMENT_UPDATE_TIME",
MAPI_EXCEPTION_REPLACE_TIME: "MAPI_EXCEPTION_REPLACE_TIME",
MAPI_OWNER_NAME: "MAPI_OWNER_NAME",
MAPI_APPOINTMENT_REPLY_NAME: "MAPI_APPOINTMENT_REPLY_NAME",
MAPI_RECURRENCE_TYPE: "MAPI_RECURRENCE_TYPE",
MAPI_RECURRENCE_PATTERN: "MAPI_RECURRENCE_PATTERN",
MAPI_TIME_ZONE_STRUCT: "MAPI_TIME_ZONE_STRUCT",
MAPI_TIME_ZONE_DESCRIPTION: "MAPI_TIME_ZONE_DESCRIPTION",
MAPI_CLIP_START: "MAPI_CLIP_START",
MAPI_CLIP_END: "MAPI_CLIP_END",
MAPI_ORIGINAL_STORE_ENTRY_ID: "MAPI_ORIGINAL_STORE_ENTRY_ID",
MAPI_ALL_ATTENDEES_STRING: "MAPI_ALL_ATTENDEES_STRING",
MAPI_AUTO_FILL_LOCATION: "MAPI_AUTO_FILL_LOCATION",
MAPI_TO_ATTENDEES_STRING: "MAPI_TO_ATTENDEES_STRING",
MAPI_CCATTENDEES_STRING: "MAPI_CCATTENDEES_STRING",
MAPI_CONF_CHECK: "MAPI_CONF_CHECK",
MAPI_CONFERENCING_TYPE: "MAPI_CONFERENCING_TYPE",
MAPI_DIRECTORY: "MAPI_DIRECTORY",
MAPI_ORGANIZER_ALIAS: "MAPI_ORGANIZER_ALIAS",
MAPI_AUTO_START_CHECK: "MAPI_AUTO_START_CHECK",
MAPI_AUTO_START_WHEN: "MAPI_AUTO_START_WHEN",
MAPI_ALLOW_EXTERNAL_CHECK: "MAPI_ALLOW_EXTERNAL_CHECK",
MAPI_COLLABORATE_DOC: "MAPI_COLLABORATE_DOC",
MAPI_NET_SHOW_URL: "MAPI_NET_SHOW_URL",
MAPI_ONLINE_PASSWORD: "MAPI_ONLINE_PASSWORD",
MAPI_APPOINTMENT_PROPOSED_DURATION: "MAPI_APPOINTMENT_PROPOSED_DURATION",
MAPI_APPT_COUNTER_PROPOSAL: "MAPI_APPT_COUNTER_PROPOSAL",
MAPI_APPOINTMENT_PROPOSAL_NUMBER: "MAPI_APPOINTMENT_PROPOSAL_NUMBER",
MAPI_APPOINTMENT_NOT_ALLOW_PROPOSE: "MAPI_APPOINTMENT_NOT_ALLOW_PROPOSE",
MAPI_APPT_TZDEF_START_DISPLAY: "MAPI_APPT_TZDEF_START_DISPLAY",
MAPI_APPT_TZDEF_END_DISPLAY: "MAPI_APPT_TZDEF_END_DISPLAY",
MAPI_APPT_TZDEF_RECUR: "MAPI_APPT_TZDEF_RECUR",
MAPI_REMINDER_MINUTES_BEFORE_START: "MAPI_REMINDER_MINUTES_BEFORE_START",
MAPI_REMINDER_TIME: "MAPI_REMINDER_TIME",
MAPI_REMINDER_SET: "MAPI_REMINDER_SET",
MAPI_PRIVATE: "MAPI_PRIVATE",
MAPI_AGING_DONT_AGE_ME: "MAPI_AGING_DONT_AGE_ME",
MAPI_FORM_STORAGE: "MAPI_FORM_STORAGE",
MAPI_SIDE_EFFECTS: "MAPI_SIDE_EFFECTS",
MAPI_REMOTE_STATUS: "MAPI_REMOTE_STATUS",
MAPI_PAGE_DIR_STREAM: "MAPI_PAGE_DIR_STREAM",
MAPI_SMART_NO_ATTACH: "MAPI_SMART_NO_ATTACH",
MAPI_COMMON_START: "MAPI_COMMON_START",
MAPI_COMMON_END: "MAPI_COMMON_END",
MAPI_TASK_MODE: "MAPI_TASK_MODE",
MAPI_FORM_PROP_STREAM: "MAPI_FORM_PROP_STREAM",
MAPI_REQUEST: "MAPI_REQUEST",
MAPI_NON_SENDABLE_TO: "MAPI_NON_SENDABLE_TO",
MAPI_NON_SENDABLE_CC: "MAPI_NON_SENDABLE_CC",
MAPI_NON_SENDABLE_BCC: "MAPI_NON_SENDABLE_BCC",
MAPI_COMPANIES: "MAPI_COMPANIES",
MAPI_CONTACTS: "MAPI_CONTACTS",
MAPI_PROP_DEF_STREAM: "MAPI_PROP_DEF_STREAM",
MAPI_SCRIPT_STREAM: "MAPI_SCRIPT_STREAM",
MAPI_CUSTOM_FLAG: "MAPI_CUSTOM_FLAG",
MAPI_OUTLOOK_CURRENT_VERSION: "MAPI_OUTLOOK_CURRENT_VERSION",
MAPI_CURRENT_VERSION_NAME: "MAPI_CURRENT_VERSION_NAME",
MAPI_REMINDER_NEXT_TIME: "MAPI_REMINDER_NEXT_TIME",
MAPI_HEADER_ITEM: "MAPI_HEADER_ITEM",
MAPI_USE_TNEF: "MAPI_USE_TNEF",
MAPI_TO_DO_TITLE: "MAPI_TO_DO_TITLE",
MAPI_VALID_FLAG_STRING_PROOF: "MAPI_VALID_FLAG_STRING_PROOF",
MAPI_LOG_TYPE: "MAPI_LOG_TYPE",
MAPI_LOG_START: "MAPI_LOG_START",
MAPI_LOG_DURATION: "MAPI_LOG_DURATION",
MAPI_LOG_END: "MAPI_LOG_END",
}
| koodaamo/tnefparse | tnefparse/properties.py | Python | lgpl-3.0 | 63,627 |
# BlenderBIM Add-on - OpenBIM Blender Add-on
# Copyright (C) 2020, 2021 Maxim Vasilyev <[email protected]>
#
# This file is part of BlenderBIM Add-on.
#
# BlenderBIM Add-on is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BlenderBIM Add-on is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BlenderBIM Add-on. If not, see <http://www.gnu.org/licenses/>.
import bpy
import blf
import math
import gpu, bgl
from bpy import types
from mathutils import Vector, Matrix
from mathutils import geometry
from bpy_extras import view3d_utils
from blenderbim.bim.module.drawing.shaders import DotsGizmoShader, ExtrusionGuidesShader, BaseLinesShader
from ifcopenshell.util.unit import si_conversions
"""Gizmos under the hood
## Transforms:
source/blender/windowmanager/gizmo/WM_gizmo_types.h
matrix_basis -- "Transformation of this gizmo." = placement in scene
matrix_offset -- "Custom offset from origin." = local transforms according to state/value
matrix_space -- "The space this gizmo is being modified in." used by some gizmos for undefined purposes
matrix_world -- final matrix, scaled according to viewport zoom and custom scale_basis
source/blender/windowmanager/gizmo/intern/wm_gizmo.c:WM_gizmo_calc_matrix_final_params
final = space @ (autoscale * (basis @ offset))
final = space @ (basis @ offset) -- if gizmo.use_draw_scale == False
final = space @ ((autoscale * basis) @ offset) -- if gizmo.use_draw_offset_scale
source/blender/windowmanager/gizmo/intern/wm_gizmo.c:wm_gizmo_calculate_scale
autoscale = gizmo.scale_basis * magic(preferences, matrix_space, matrix_basis, context.region_data)
magic -- making 1.0 to match preferences.view.gizmo_size pixels (75 by default)
## Selection
select_id -- apparently, id of a selectable part
test_select -- expected to return id of selection, doesn't seem to work
draw_select -- fake-draw of selection geometry for gpu-side cursor tracking
"""
# some geometries for Gizmo.custom_shape shaders
CUBE = (
(+1, +1, +1),
(-1, +1, +1),
(+1, -1, +1), # top
(+1, -1, +1),
(-1, +1, +1),
(-1, -1, +1),
(+1, +1, +1),
(+1, -1, +1),
(+1, +1, -1), # right
(+1, +1, -1),
(+1, -1, +1),
(+1, -1, -1),
(+1, +1, +1),
(+1, +1, -1),
(-1, +1, +1), # back
(-1, +1, +1),
(+1, +1, -1),
(-1, +1, -1),
(-1, -1, -1),
(-1, +1, -1),
(+1, -1, -1), # bot
(+1, -1, -1),
(-1, +1, -1),
(+1, +1, -1),
(-1, -1, -1),
(-1, -1, +1),
(-1, +1, -1), # left
(-1, +1, -1),
(-1, -1, +1),
(-1, +1, +1),
(-1, -1, -1),
(+1, -1, -1),
(-1, -1, +1), # front
(-1, -1, +1),
(+1, -1, -1),
(+1, -1, +1),
)
DISC = (
(0.0, 0.0, 0.0),
(1.0, 0.0, 0),
(0.8660254037844387, 0.49999999999999994, 0),
(0.0, 0.0, 0.0),
(0.8660254037844387, 0.49999999999999994, 0),
(0.5000000000000001, 0.8660254037844386, 0),
(0.0, 0.0, 0.0),
(0.5000000000000001, 0.8660254037844386, 0),
(6.123233995736766e-17, 1.0, 0),
(0.0, 0.0, 0.0),
(6.123233995736766e-17, 1.0, 0),
(-0.4999999999999998, 0.8660254037844387, 0),
(0.0, 0.0, 0.0),
(-0.4999999999999998, 0.8660254037844387, 0),
(-0.8660254037844385, 0.5000000000000003, 0),
(0.0, 0.0, 0.0),
(-0.8660254037844385, 0.5000000000000003, 0),
(-1.0, 1.2246467991473532e-16, 0),
(0.0, 0.0, 0.0),
(-1.0, 1.2246467991473532e-16, 0),
(-0.8660254037844388, -0.4999999999999997, 0),
(0.0, 0.0, 0.0),
(-0.8660254037844388, -0.4999999999999997, 0),
(-0.5000000000000004, -0.8660254037844384, 0),
(0.0, 0.0, 0.0),
(-0.5000000000000004, -0.8660254037844384, 0),
(-1.8369701987210297e-16, -1.0, 0),
(0.0, 0.0, 0.0),
(-1.8369701987210297e-16, -1.0, 0),
(0.49999999999999933, -0.866025403784439, 0),
(0.0, 0.0, 0.0),
(0.49999999999999933, -0.866025403784439, 0),
(0.8660254037844384, -0.5000000000000004, 0),
(0.0, 0.0, 0.0),
(0.8660254037844384, -0.5000000000000004, 0),
(1.0, 0.0, 0),
)
X3DISC = (
(0.0, 0.0, 0.0),
(1.0, 0.0, 0),
(0.8660254037844387, 0.49999999999999994, 0),
(0.0, 0.0, 0.0),
(0.8660254037844387, 0.49999999999999994, 0),
(0.5000000000000001, 0.8660254037844386, 0),
(0.0, 0.0, 0.0),
(0.5000000000000001, 0.8660254037844386, 0),
(6.123233995736766e-17, 1.0, 0),
(0.0, 0.0, 0.0),
(6.123233995736766e-17, 1.0, 0),
(-0.4999999999999998, 0.8660254037844387, 0),
(0.0, 0.0, 0.0),
(-0.4999999999999998, 0.8660254037844387, 0),
(-0.8660254037844385, 0.5000000000000003, 0),
(0.0, 0.0, 0.0),
(-0.8660254037844385, 0.5000000000000003, 0),
(-1.0, 1.2246467991473532e-16, 0),
(0.0, 0.0, 0.0),
(-1.0, 1.2246467991473532e-16, 0),
(-0.8660254037844388, -0.4999999999999997, 0),
(0.0, 0.0, 0.0),
(-0.8660254037844388, -0.4999999999999997, 0),
(-0.5000000000000004, -0.8660254037844384, 0),
(0.0, 0.0, 0.0),
(-0.5000000000000004, -0.8660254037844384, 0),
(-1.8369701987210297e-16, -1.0, 0),
(0.0, 0.0, 0.0),
(-1.8369701987210297e-16, -1.0, 0),
(0.49999999999999933, -0.866025403784439, 0),
(0.0, 0.0, 0.0),
(0.49999999999999933, -0.866025403784439, 0),
(0.8660254037844384, -0.5000000000000004, 0),
(0.0, 0.0, 0.0),
(0.8660254037844384, -0.5000000000000004, 0),
(1.0, 0.0, 0),
(0.0, 0.0, 0.0),
(0, 1.0, 0.0),
(0, 0.8660254037844387, 0.49999999999999994),
(0.0, 0.0, 0.0),
(0, 0.8660254037844387, 0.49999999999999994),
(0, 0.5000000000000001, 0.8660254037844386),
(0.0, 0.0, 0.0),
(0, 0.5000000000000001, 0.8660254037844386),
(0, 6.123233995736766e-17, 1.0),
(0.0, 0.0, 0.0),
(0, 6.123233995736766e-17, 1.0),
(0, -0.4999999999999998, 0.8660254037844387),
(0.0, 0.0, 0.0),
(0, -0.4999999999999998, 0.8660254037844387),
(0, -0.8660254037844385, 0.5000000000000003),
(0.0, 0.0, 0.0),
(0, -0.8660254037844385, 0.5000000000000003),
(0, -1.0, 1.2246467991473532e-16),
(0.0, 0.0, 0.0),
(0, -1.0, 1.2246467991473532e-16),
(0, -0.8660254037844388, -0.4999999999999997),
(0.0, 0.0, 0.0),
(0, -0.8660254037844388, -0.4999999999999997),
(0, -0.5000000000000004, -0.8660254037844384),
(0.0, 0.0, 0.0),
(0, -0.5000000000000004, -0.8660254037844384),
(0, -1.8369701987210297e-16, -1.0),
(0.0, 0.0, 0.0),
(0, -1.8369701987210297e-16, -1.0),
(0, 0.49999999999999933, -0.866025403784439),
(0.0, 0.0, 0.0),
(0, 0.49999999999999933, -0.866025403784439),
(0, 0.8660254037844384, -0.5000000000000004),
(0.0, 0.0, 0.0),
(0, 0.8660254037844384, -0.5000000000000004),
(0, 1.0, 0.0),
(0.0, 0.0, 0.0),
(0.0, 0, 1.0),
(0.49999999999999994, 0, 0.8660254037844387),
(0.0, 0.0, 0.0),
(0.49999999999999994, 0, 0.8660254037844387),
(0.8660254037844386, 0, 0.5000000000000001),
(0.0, 0.0, 0.0),
(0.8660254037844386, 0, 0.5000000000000001),
(1.0, 0, 6.123233995736766e-17),
(0.0, 0.0, 0.0),
(1.0, 0, 6.123233995736766e-17),
(0.8660254037844387, 0, -0.4999999999999998),
(0.0, 0.0, 0.0),
(0.8660254037844387, 0, -0.4999999999999998),
(0.5000000000000003, 0, -0.8660254037844385),
(0.0, 0.0, 0.0),
(0.5000000000000003, 0, -0.8660254037844385),
(1.2246467991473532e-16, 0, -1.0),
(0.0, 0.0, 0.0),
(1.2246467991473532e-16, 0, -1.0),
(-0.4999999999999997, 0, -0.8660254037844388),
(0.0, 0.0, 0.0),
(-0.4999999999999997, 0, -0.8660254037844388),
(-0.8660254037844384, 0, -0.5000000000000004),
(0.0, 0.0, 0.0),
(-0.8660254037844384, 0, -0.5000000000000004),
(-1.0, 0, -1.8369701987210297e-16),
(0.0, 0.0, 0.0),
(-1.0, 0, -1.8369701987210297e-16),
(-0.866025403784439, 0, 0.49999999999999933),
(0.0, 0.0, 0.0),
(-0.866025403784439, 0, 0.49999999999999933),
(-0.5000000000000004, 0, 0.8660254037844384),
(0.0, 0.0, 0.0),
(-0.5000000000000004, 0, 0.8660254037844384),
(0.0, 0, 1.0),
)
class CustomGizmo:
# FIXME: highliting/selection doesnt work
def draw_very_custom_shape(self, ctx, custom_shape, select_id=None):
# similar to draw_custom_shape
shape, batch, shader = custom_shape
shader.bind()
if select_id is not None:
gpu.select.load_id(select_id)
else:
if self.is_highlight:
color = (*self.color_highlight, self.alpha_highlight)
else:
color = (*self.color, self.alpha)
shader.uniform_float("color", color)
shape.glenable()
shape.uniform_region(ctx)
# shader.uniform_float('modelMatrix', self.matrix_world)
with gpu.matrix.push_pop():
gpu.matrix.multiply_matrix(self.matrix_world)
batch.draw()
bgl.glDisable(bgl.GL_BLEND)
class OffsetHandle:
"""Handling mouse to offset gizmo from base along Z axis"""
# FIXME: works a bit weird for rotated objects
def invoke(self, ctx, event):
self.init_value = self.target_get_value("offset") / self.scale_value
coordz = self.project_mouse(ctx, event)
if coordz is None:
return {"CANCELLED"}
self.init_coordz = coordz
return {"RUNNING_MODAL"}
def modal(self, ctx, event, tweak):
coordz = self.project_mouse(ctx, event)
if coordz is None:
return {"CANCELLED"}
delta = coordz - self.init_coordz
if "PRECISE" in tweak:
delta /= 10.0
value = max(0, self.init_value + delta)
value *= self.scale_value
# ctx.area.header_text_set(f"coords: {self.init_coordz} - {coordz}, delta: {delta}, value: {value}")
ctx.area.header_text_set(f"Depth: {value}")
self.target_set_value("offset", value)
return {"RUNNING_MODAL"}
def project_mouse(self, ctx, event):
"""Projecting mouse coords to local axis Z"""
# logic from source/blender/editors/gizmo_library/gizmo_types/arrow3d_gizmo.c:gizmo_arrow_modal
mouse = Vector((event.mouse_region_x, event.mouse_region_y))
region = ctx.region
region3d = ctx.region_data
ray_orig = view3d_utils.region_2d_to_origin_3d(region, region3d, mouse)
ray_norm = view3d_utils.region_2d_to_vector_3d(region, region3d, mouse)
# 'arrow' origin and direction
base = Vector((0, 0, 0))
axis = Vector((0, 0, 1))
# projecttion of the arrow to a plane, perpendicular to view ray
axis_proj = axis - ray_norm * axis.dot(ray_norm)
# intersection of the axis with the plane through view origin perpendicular to the arrow projection
coords = geometry.intersect_line_plane(base, axis, ray_orig, axis_proj)
return coords.z
def exit(self, ctx, cancel):
if cancel:
self.target_set_value("offset", self.init_value)
else:
self.group.update(ctx)
class UglyDotGizmo(OffsetHandle, types.Gizmo):
"""three orthogonal circles"""
bl_idname = "BIM_GT_uglydot_3d"
bl_target_properties = ({"id": "offset", "type": "FLOAT", "array_length": 1},)
__slots__ = (
"scale_value",
"custom_shape",
"init_value",
"init_coordz",
)
def setup(self):
self.custom_shape = self.new_custom_shape(type="TRIS", verts=X3DISC)
def refresh(self):
offset = self.target_get_value("offset") / self.scale_value
self.matrix_offset.col[3][2] = offset # z-shift
def draw(self, ctx):
self.refresh()
self.draw_custom_shape(self.custom_shape)
def draw_select(self, ctx, select_id):
self.refresh()
self.draw_custom_shape(self.custom_shape, select_id=select_id)
class DotGizmo(CustomGizmo, OffsetHandle, types.Gizmo):
"""Single dot viewport-aligned"""
# FIXME: make it selectable
bl_idname = "BIM_GT_dot_2d"
bl_target_properties = ({"id": "offset", "type": "FLOAT", "array_length": 1},)
__slots__ = (
"scale_value",
"custom_shape",
)
def setup(self):
shader = DotsGizmoShader()
self.custom_shape = shader, shader.batch(pos=((0, 0, 0),)), shader.prog
self.use_draw_scale = False
def refresh(self):
offset = self.target_get_value("offset") / self.scale_value
self.matrix_offset.col[3][2] = offset # z-shifted
def draw(self, ctx):
self.refresh()
self.draw_very_custom_shape(ctx, self.custom_shape)
def draw_select(self, ctx, select_id):
self.refresh()
self.draw_very_custom_shape(ctx, self.custom_shape, select_id=select_id)
# doesn't get called
# def test_select(self, ctx, location):
# pass
class ExtrusionGuidesGizmo(CustomGizmo, types.Gizmo):
"""Extrusion guides
Noninteractive gizmo to indicate extrusion depth and planes.
Draws main segment and orthogonal cross at endpoints.
"""
bl_idname = "BIM_GT_extrusion_guides"
bl_target_properties = ({"id": "depth", "type": "FLOAT", "array_length": 1},)
__slots__ = ("scale_value", "custom_shape")
def setup(self):
shader = ExtrusionGuidesShader()
self.custom_shape = shader, shader.batch(pos=((0, 0, 0), (0, 0, 1))), shader.prog
self.use_draw_scale = False
def refresh(self):
depth = self.target_get_value("depth") / self.scale_value
self.matrix_offset.col[2][2] = depth # z-scaled
def draw(self, ctx):
self.refresh()
self.draw_very_custom_shape(ctx, self.custom_shape)
class DimensionLabelGizmo(types.Gizmo):
"""Text label for a dimension"""
# does not work properly, fonts are totally screwed up
bl_idname = "BIM_GT_dimension_label"
bl_target_properties = ({"id": "value", "type": "FLOAT", "array_length": 1},)
__slots__ = "text_label"
def setup(self):
pass
def refresh(self, ctx):
value = self.target_get_value("value")
self.matrix_offset.col[3][2] = value * 0.5
unit_system = ctx.scene.unit_settings.system
self.text_label = bpy.utils.units.to_string(unit_system, "LENGTH", value, 3, split_unit=False)
def draw(self, ctx):
self.refresh(ctx)
self.draw_text(ctx)
def draw_text(self, ctx):
font_id = 0
font_size = 16
dpi = ctx.preferences.system.dpi
# pos = self.matrix_world @ Vector((0, 0, 0, 1))
# pos = Vector((0, 0, 0.5))
# region = ctx.region
# region3d = ctx.region_data
# pos = view3d_utils.location_3d_to_region_2d(region, region3d, pos)
# text = self.text_label
blf.size(font_id, font_size, dpi)
blf.position(font_id, 0, 0, 0)
blf.color(font_id, *self.color, self.alpha)
blf.draw(font_id, "ABC")
class ExtrusionWidget(types.GizmoGroup):
bl_idname = "bim.extrusion_widget"
bl_label = "Extrusion Gizmos"
bl_space_type = "VIEW_3D"
bl_region_type = "WINDOW"
bl_options = {"3D", "PERSISTENT", "SHOW_MODAL_ALL"}
@classmethod
def poll(cls, ctx):
obj = ctx.object
return (
obj
and obj.type == "MESH"
and obj.data.BIMMeshProperties.ifc_parameters.get("IfcExtrudedAreaSolid/Depth") is not None
)
def setup(self, ctx):
target = ctx.object
prop = target.data.BIMMeshProperties.ifc_parameters.get("IfcExtrudedAreaSolid/Depth")
basis = target.matrix_world.normalized()
theme = ctx.preferences.themes[0].user_interface
scale_value = self.get_scale_value(ctx.scene.unit_settings.system, ctx.scene.unit_settings.length_unit)
gz = self.handle = self.gizmos.new("BIM_GT_uglydot_3d")
gz.matrix_basis = basis
gz.scale_basis = 0.1
gz.color = gz.color_highlight = tuple(theme.gizmo_primary)
gz.alpha = 0.5
gz.alpha_highlight = 1.0
gz.use_draw_modal = True
gz.target_set_prop("offset", prop, "value")
gz.scale_value = scale_value
gz = self.guides = self.gizmos.new("BIM_GT_extrusion_guides")
gz.matrix_basis = basis
gz.color = gz.color_highlight = tuple(theme.gizmo_secondary)
gz.alpha = gz.alpha_highlight = 0.5
gz.use_draw_modal = True
gz.target_set_prop("depth", prop, "value")
gz.scale_value = scale_value
# gz = self.label = self.gizmos.new('GIZMO_GT_dimension_label')
# gz.matrix_basis = basis
# gz.color = tuple(theme.gizmo_secondary)
# gz.alpha = 0.5
# gz.use_draw_modal = True
# gz.target_set_prop('value', target.demo, 'depth')
def refresh(self, ctx):
"""updating gizmos"""
target = ctx.object
basis = target.matrix_world.normalized()
self.handle.matrix_basis = basis
self.guides.matrix_basis = basis
def update(self, ctx):
"""updating object"""
bpy.ops.bim.update_parametric_representation()
target = ctx.object
prop = target.data.BIMMeshProperties.ifc_parameters.get("IfcExtrudedAreaSolid/Depth")
self.handle.target_set_prop("offset", prop, "value")
self.guides.target_set_prop("depth", prop, "value")
@staticmethod
def get_scale_value(system, length_unit):
scale_value = 1
if system == "METRIC":
if length_unit == "KILOMETERS":
scale_value /= 1000
elif length_unit == "CENTIMETERS":
scale_value *= 100
elif length_unit == "MILLIMETERS":
scale_value *= 1000
elif length_unit == "MICROMETERS":
scale_value *= 1000000
elif system == "IMPERIAL":
if length_unit == "MILES":
scale_value /= si_conversions["mile"]
elif length_unit == "FEET":
scale_value /= si_conversions["foot"]
elif length_unit == "INCHES":
scale_value /= si_conversions["inch"]
elif length_unit == "THOU":
scale_value /= si_conversions["thou"]
return scale_value
| IfcOpenShell/IfcOpenShell | src/blenderbim/blenderbim/bim/module/drawing/gizmos.py | Python | lgpl-3.0 | 18,534 |
# https://en.wikipedia.org/wiki/Treap
import random
import time
class Treap:
def __init__(self, key):
self.key = key
self.prio = random.randint(0, 1000000000)
self.size = 1
self.left = None
self.right = None
def update(self):
self.size = 1 + size(self.left) + size(self.right)
def size(treap):
return 0 if treap is None else treap.size
def split(root, minRight):
if root is None:
return None, None
if root.key >= minRight:
left, right = split(root.left, minRight)
root.left = right
root.update()
return left, root
else:
left, right = split(root.right, minRight)
root.right = left
root.update()
return root, right
def merge(left, right):
if left is None:
return right
if right is None:
return left
if left.prio > right.prio:
left.right = merge(left.right, right)
left.update()
return left
else:
right.left = merge(left, right.left)
right.update()
return right
def insert(root, key):
left, right = split(root, key)
return merge(merge(left, Treap(key)), right)
def remove(root, key):
left, right = split(root, key)
return merge(left, split(right, key + 1)[1])
def kth(root, k):
if k < size(root.left):
return kth(root.left, k)
elif k > size(root.left):
return kth(root.right, k - size(root.left) - 1)
return root.key
def print_treap(root):
def dfs_print(root):
if root is None:
return
dfs_print(root.left)
print(str(root.key) + ' ', end='')
dfs_print(root.right)
dfs_print(root)
print()
def test():
start = time.time()
treap = None
s = set()
for i in range(100000):
key = random.randint(0, 10000)
if random.randint(0, 1) == 0:
if key in s:
treap = remove(treap, key)
s.remove(key)
elif key not in s:
treap = insert(treap, key)
s.add(key)
assert len(s) == size(treap)
for i in range(size(treap)):
assert kth(treap, i) in s
print(time.time() - start)
test()
| indy256/codelibrary | python/treap_bst.py | Python | unlicense | 2,234 |
# coding: utf-8
#
# Copyright (C) 2015 ycmd contributors
#
# This file is part of ycmd.
#
# ycmd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ycmd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ycmd. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import * # noqa
from nose.tools import eq_
from hamcrest import ( assert_that, has_item, has_items, has_entry,
has_entries, contains, empty, contains_string )
from ycmd.utils import ReadFile
from ycmd.tests.python import PathToTestFile, SharedYcmd
from ycmd.tests.test_utils import ( BuildRequest, CompletionEntryMatcher,
CompletionLocationMatcher )
import http.client
@SharedYcmd
def GetCompletions_Basic_test( app ):
filepath = PathToTestFile( 'basic.py' )
completion_data = BuildRequest( filepath = filepath,
filetype = 'python',
contents = ReadFile( filepath ),
line_num = 7,
column_num = 3)
results = app.post_json( '/completions',
completion_data ).json[ 'completions' ]
assert_that( results,
has_items(
CompletionEntryMatcher( 'a' ),
CompletionEntryMatcher( 'b' ),
CompletionLocationMatcher( 'line_num', 3 ),
CompletionLocationMatcher( 'line_num', 4 ),
CompletionLocationMatcher( 'column_num', 10 ),
CompletionLocationMatcher( 'filepath', filepath ) ) )
@SharedYcmd
def GetCompletions_UnicodeDescription_test( app ):
filepath = PathToTestFile( 'unicode.py' )
completion_data = BuildRequest( filepath = filepath,
filetype = 'python',
contents = ReadFile( filepath ),
force_semantic = True,
line_num = 5,
column_num = 3)
results = app.post_json( '/completions',
completion_data ).json[ 'completions' ]
assert_that( results, has_item(
has_entry( 'detailed_info', contains_string( u'aafäö' ) ) ) )
def RunTest( app, test ):
"""
Method to run a simple completion test and verify the result
test is a dictionary containing:
'request': kwargs for BuildRequest
'expect': {
'response': server response code (e.g. httplib.OK)
'data': matcher for the server response json
}
"""
contents = ReadFile( test[ 'request' ][ 'filepath' ] )
def CombineRequest( request, data ):
kw = request
request.update( data )
return BuildRequest( **kw )
app.post_json( '/event_notification',
CombineRequest( test[ 'request' ], {
'event_name': 'FileReadyToParse',
'contents': contents,
} ) )
# We ignore errors here and we check the response code ourself.
# This is to allow testing of requests returning errors.
response = app.post_json( '/completions',
CombineRequest( test[ 'request' ], {
'contents': contents
} ),
expect_errors = True )
eq_( response.status_code, test[ 'expect' ][ 'response' ] )
assert_that( response.json, test[ 'expect' ][ 'data' ] )
@SharedYcmd
def GetCompletions_NoSuggestions_Fallback_test( app ):
# Python completer doesn't raise NO_COMPLETIONS_MESSAGE, so this is a
# different code path to the Clang completer cases
# TESTCASE2 (general_fallback/lang_python.py)
RunTest( app, {
'description': 'param jedi does not know about (id). query="a_p"',
'request': {
'filetype' : 'python',
'filepath' : PathToTestFile( 'general_fallback',
'lang_python.py' ),
'line_num' : 28,
'column_num': 20,
'force_semantic': False,
},
'expect': {
'response': http.client.OK,
'data': has_entries( {
'completions': contains(
CompletionEntryMatcher( 'a_parameter', '[ID]' ),
CompletionEntryMatcher( 'another_parameter', '[ID]' ),
),
'errors': empty(),
} )
},
} )
@SharedYcmd
def GetCompletions_Unicode_InLine_test( app ):
RunTest( app, {
'description': 'return completions for strings with multi-byte chars',
'request': {
'filetype' : 'python',
'filepath' : PathToTestFile( 'unicode.py' ),
'line_num' : 7,
'column_num': 14
},
'expect': {
'response': http.client.OK,
'data': has_entries( {
'completions': contains(
CompletionEntryMatcher( 'center', 'function: builtins.str.center' )
),
'errors': empty(),
} )
},
} )
| netsamir/dotfiles | files/vim/bundle/YouCompleteMe/third_party/ycmd/ycmd/tests/python/get_completions_test.py | Python | unlicense | 5,566 |
import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER FEC CMTE ID', 'number': '2'},
{'name': 'ENTITY TYPE', 'number': '3'},
{'name': 'NAME (Payee)', 'number': '4'},
{'name': 'STREET 1', 'number': '5'},
{'name': 'STREET 2', 'number': '6'},
{'name': 'CITY', 'number': '7'},
{'name': 'STATE', 'number': '8'},
{'name': 'ZIP', 'number': '9'},
{'name': 'TRANSDESC', 'number': '10'},
{'name': 'Of Expenditure', 'number': '11-'},
{'name': 'AMOUNT', 'number': '12'},
{'name': 'SUPPORT/OPPOSE', 'number': '13'},
{'name': 'S/O FEC CAN ID NUMBER', 'number': '14'},
{'name': 'S/O CAN/NAME', 'number': '15'},
{'name': 'S/O CAN/OFFICE', 'number': '16'},
{'name': 'S/O CAN/STATE', 'number': '17'},
{'name': 'S/O CAN/DIST', 'number': '18'},
{'name': 'FEC COMMITTEE ID NUMBER', 'number': '19'},
{'name': 'Unused field', 'number': '20'},
{'name': 'Unused field', 'number': '21'},
{'name': 'Unused field', 'number': '22'},
{'name': 'Unused field', 'number': '23'},
{'name': 'Unused field', 'number': '24'},
{'name': 'CONDUIT NAME', 'number': '25'},
{'name': 'CONDUIT STREET 1', 'number': '26'},
{'name': 'CONDUIT STREET 2', 'number': '27'},
{'name': 'CONDUIT CITY', 'number': '28'},
{'name': 'CONDUIT STATE', 'number': '29'},
{'name': 'CONDUIT ZIP', 'number': '30'},
{'name': 'AMENDED CD', 'number': '31'},
{'name': 'TRAN ID', 'number': '32'},
]
self.fields_names = self.hash_names(self.fields)
| h4ck3rm1k3/FEC-Field-Documentation | fec/version/v3/F57.py | Python | unlicense | 1,916 |
#!/usr/bin/env python
"""
Largest product in a grid
Problem 11
Published on 22 February 2002 at 06:00 pm [Server Time]
In the 20x20 grid below, four numbers along a diagonal line have been marked in red.
The product of these numbers is 26 * 63 * 78 * 14 = 1788696.
What is the greatest product of four adjacent numbers in the same direction (up, down, left, right, or diagonally) in the 20x20 grid?
"""
THE_GRID = [[int(column) for column in row.split(' ')] for row in
"""
08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48
""".strip().split('\n')]
"""
A few words about the declaration of THE_GRID:
This is not the easiest thing to digest on first look. I think it is "pythonic"
in its implementation and it allows to copy/paste the grid straight out of the problem
statement without a bunch of mucking around to manually turn it into a 2d array
( or nested lists, actually ). It is arranged as a list of rows. Each row is a
list of numbers for each column in that row. Looking at it, the multi-line string
definition actually converts to a list of strings from the split operation. One
string for each row. The top list comprehension converts each row into a list of
short strings ( the columns ) which are also converted to int.
"""
#------------------------------------------------------------------------------
import operator
#------------------------------------------------------------------------------
def product(iterable):
return reduce(operator.mul, iterable, 1)
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for y_dir in (0, 1):
for x_dir in (0,1):
for i in range(run_length):
print THE_GRID[row+(y_dir*i)][column+x_dir*i]
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for i in range(run_length):
for y_dir in (0, 1):
for x_dir in (0,1):
print THE_GRID[row+(y_dir*i)][column+x_dir*i]
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
highest = 0
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for x_dir, y_dir in [(1, 0), (0, 1), (1, 1)]:
for i in range(run_length):
print THE_GRID[row+(y_dir*i)][column+x_dir*i]
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
highest = 0
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for x_dir, y_dir in [(1, 0), (0, 1), (1, 1)]:
run =[THE_GRID[row+(y_dir*i)][column+x_dir*i] for i in range(run_length)]
result = product(run)
print run, result
#if result > highest:
# highest = result
#return(highest)
#------------------------------------------------------------------------------
def solve():
g = THE_GRID
maxp = 0
rows, cols, path_size = len(g), len(g[0]), 5
for i in range(rows):
for j in range(cols - path_size + 1):
phv = max(product([g[i][j+s] for s in range(path_size)]),
product([g[j+s][i] for s in range(path_size)]))
#phv = max(g[i][j] * g[i][j+1] * g[i][j+2] * g[i][j+3],
# g[j][i] * g[j+1][i] * g[j+2][i] * g[j+3][i])
if i < rows - path_size:
pdd = max(product([g[i+s][j+s] for s in range(path_size)]),
product([g[i+s][j+path_size-s-1] for s in range(path_size)]))
#pdd = max(g[i][j] * g[i+1][j+1] * g[i+2][j+2] * g[i+3][j+3],
# g[i][j+3] * g[i+1][j+2] * g[i+2][j+1] * g[i+3][j])
maxp = max(maxp, phv, pdd)
return maxp
#------------------------------------------------------------------------------
def main():
print "PROBLEM:\n"
for line in __doc__.strip().split('\n'):
print '\t', line
print "\nSOLUTION:"
print "\n\t", solve()
#------------------------------------------------------------------------------
if __name__ == "__main__":
main() | slowkid/EulerProject | solutions/problem11.py | Python | unlicense | 5,822 |
"""
Your job is to write a function which increments a string, to create a new string. If the string already ends with a number, the number should be incremented by 1. If the string does not end with a number the number 1 should be appended to the new string.
Examples:
foo -> foo1
foobar23 -> foobar24
foo0042 -> foo0043
foo9 -> foo10
foo099 -> foo100
Attention: If the number has leading zeros the amount of digits should be considered.
"""
import re
def increment_string(strng):
match = re.match(r"(.*?)(\d*)$",strng)
string = match.group(1)
number = match.group(2)
if not number:
return string + '1'
else:
return string + str(int(number)+1).zfill(len(number))
| aadithpm/code-a-day | py/String Incrementer.py | Python | unlicense | 744 |
from ..parsers.errors import ErrorResponse
from warnings import warn
def raise_for_error(f):
"""
Wrapper method to parse any error response and raise the ErrorResponse instance if an error is encountered.
:param f:
:return:
"""
def inner(*args, **kwargs):
warn('`raise_for_error` is deprecated and will not process any response content.')
return f(*args, **kwargs)
# e = ErrorResponse.load(content)
# e.raise_for_error()
# return content
return inner
def raise_response_for_error(f):
"""
Wrapper method to parse a response object and raise the ErrorResponse
instance if an error is encountered in the response body.
:param f:
:return:
"""
def inner(*args, **kwargs):
warn('`raise_response_for_error` is deprecated and will not process any response content.')
return f(*args, **kwargs)
return inner
| ziplokk1/python-amazon-mws-tools | mwstools/requesters/base.py | Python | unlicense | 920 |
#!/usr/bin/env python
# -.- coding: utf-8 -.-y
import random
import socket
import os
import time
import threading
import Queue
import sys
import argparse
from multiprocessing import Process
print """\33[91m
═════════════════════════════════════════════════════════
███████ ██████ ███████
█ █ █ █ ║
█ █════╗ █ ╔═█ ║
█═════════════█ ╚█ ║█═══╝
█ ██████ ║█
█ █ █ ╚╗█ ╔═══════Server
█════════╗ █ █ ╚═█ ║
███████ ║ █ █ ███████
Chat Room Client════════╝
═════════════════════════════════════════════════════════
\33[92m"""
quit = Queue.Queue()
path = os.path.realpath(__file__)
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--screen", help="This is used by the script to make a screen. Not necessarily needed for regular users.")
args = parser.parse_args()
def outputscreen(messages, online):
rows, columns = os.popen('stty size', 'r').read().split()
rows = int(rows)
rows = rows - 1
columns = int(columns)
if len(messages) > rows:
messages = messages[len(messages) - rows:]
print messages
else:
pass
if len(online) > rows:
online = online[len(online) - rows:]
print online
else:
pass
output = []
for line in range(rows):
output.append(["", ""])
tick = 0
for message in messages:
output[tick][0] = message
tick = tick + 1
print tick
if len(output) <= len(online):
print "less or equal output then online"
for l in range(len(online) - len(output)):
output.append(["", ""])
print output
#for num in range(len(online)):
tick = 0
print output
for user in online:
output[tick][1] = user
tick = tick + 1
print output
else:
print "more output then online"
print rows
#for num in range(len(output)):
tick = 0
for user in online:
output[tick][1] = user
tick = tick + 1
for line in output:
space = int(columns)
outleng = len(line[0]) + len(line[1])
space = space - outleng
print line[0] + " "*space + line[1]
if args.screen:
sp = args.screen
sp = sp.split(":")
user = sp[2]
port = int(sp[1])
server = sp[0]
global cv
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = (server, port)
sock.connect(server_address)
sock.send("screen:")
#print "\33[96m Type /stop to quit\33[91m"
quit = False
messages = []
import ast
online = sock.recv(1024)
online = ast.literal_eval(online)
tmp = online
while quit == False:
servercom = sock.recv(1024)
#print servercom
if servercom == "quitting:":
quit.put("1")
quit = True
os._exit(0)
elif "online:" in servercom:
online = ast.literal_eval(servercom[7:])
if tmp != online:
for line in tmp:
if line not in online:
messages.append(line + " has left the server...")
else:
pass
for line in online:
if line not in tmp:
messages.append(line + " has joined the server...")
else:
pass
else:
pass
if user not in online:
quit = True
sock.send("quitting:")
os._exit(0)
else:
sock.send("good:")
tmp = online
outputscreen(messages, online)
else:
messages.append(servercom)
outputscreen(messages, online)
time.sleep(.01)
if servercom == "ping":
sock.send("ping:pong")
else:
pass
else:
pass
cv = "1.0"
username = raw_input("Name:")
server = raw_input("Server IP[127.0.0.1]:")
port = raw_input("Server Port[22550]:")
if port == "":
port = "22550"
else:
pass
if server == "":
server = "127.0.0.1"
else:
pass
print port
class connect(object):
def __init__(self, server, port, username, quit):
self.quit = quit
self.server = server
self.port = port
self.username = username
self.con()
def con(self):
#try:
global cv
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = (self.server, int(self.port))
self.sock.connect(server_address)
self.sock.settimeout(60)
self.sock.send("cv:" + cv)
compatible = self.sock.recv(1024)
if compatible == "comp:1":
pass
else:
print """\33[91m
***************************************************
Error Server is on version """ + compatible[7:] + """
***************************************************
"""
sys.exit()
self.sock.send("user:" + self.username)
nc = self.sock.recv(1024)
if "error:" in nc:
print """\33[91m
***************************************************
Error while sending username:
""" + nc[6:] + """
***************************************************
"""
os._exit(0)
#threading.Thread(target = self.ping, args=()).start()
#threading.Thread(target = self.screen, args=()).start()
#self.screen.start()
quit = False
while quit == False:
#inp = raw_input(">>")
#time.sleep(.2)
send = str(random.randint(0, 9))
self.sock.send(send)
print send
'''if inp == "/quit":
quit = True
self.quit.put("1")
self.sock.send("quitting:")
elif "" == inp:
"""\33[91m
***************************************************
Error no message entered
***************************************************
"""
elif "/help" == inp:
"""\33[91m
***************************************************
Error no help menu implemented yet
***************************************************
"""
else:
self.sock.send("mesg:" + inp)'''
else:
os._exit(0)
'''except:
print """\33[91m
***************************************************
Error while initiating connecting with server
***************************************************
"""
sys.exit()'''
def ping(self):
while True:
self.sock.send("ping:")
time.sleep(1)
#def screen(self):
global path
os.system("xterm -hold -e python " + "./ChatRoom1.0Client.py" + " -s " + self.server + ":" + self.port + ":" + self.username)
self.qt = True
self.quit.put("1")
def quitcheck(quit):
while True:
time.sleep(1)
if quit.empty() == True:
pass
else:
os._exit(0)
threading.Thread(target = quitcheck, args=(quit,)).start()
threading.Thread(target=connect, args=(server, port, username, quit)).start()
| YodaBytePrograming/ChatRoom | ChatRoom 1.0/ChatRoom1.0Doser.py | Python | unlicense | 8,273 |
#!/usr/bin/env python
"""
Assorted utilities for manipulating latitude and longitude values
"""
from __future__ import unicode_literals
__version__ = "1.4"
import math, struct
def signbit(value):
"""
Test whether the sign bit of the given floating-point value is
set. If it is set, this generally means the given value is
negative. However, this is not the same as comparing the value
to C{0.0}. For example:
>>> NEGATIVE_ZERO < 0.0
False
since negative zero is numerically equal to positive zero. But
the sign bit of negative zero is indeed set:
>>> signbit(NEGATIVE_ZERO)
True
>>> signbit(0.0)
False
@type value: float
@param value: a Python (double-precision) float value
@rtype: bool
@return: C{True} if the sign bit of C{value} is set;
C{False} if it is not set.
signbit and doubleToRawLongBits
are from Martin Jansche:
http://symptotic.com/mj/code.html (MIT license).
This is required to capture the difference between -0.0 and 0.0, which is
useful if someone wants to convert a latitude or longitude like:
-0.0degrees, 34minutes to 0d34'00"S
"""
return (doubleToRawLongBits(value) >> 63) == 1
def doubleToRawLongBits(value):
"""
@type value: float
@param value: a Python (double-precision) float value
@rtype: long
@return: the IEEE 754 bit representation (64 bits as a long integer)
of the given double-precision floating-point value.
"""
# pack double into 64 bits, then unpack as long int
return struct.unpack(b'Q', struct.pack(b'd', value))[0]
class LatLongConverter:
@classmethod
def ToDecDeg(self, d=0, m=0, s=0, ustring = False, max=180):
"""
DecDegrees = ToDecDeg(d=0, m=0, s=0)
converts degrees, minutes, seconds to decimal degrees (returned as a Float).
"""
if m < 0 or s < 0:
raise ValueError("Minutes and Seconds have to be positive")
if m > 60.0 or s > 60.0:
raise ValueError("Minutes and Seconds have to be between -180 and 180")
if abs(d) > max:
raise ValueError("Degrees have to be between -180 and 180")
if signbit(d):
Sign = -1
d = abs(d)
else:
Sign = 1
deg_has_fract = bool(math.modf(d)[0])
min_has_fract = bool(math.modf(m)[0])
if deg_has_fract and (m != 0.0 or s != 0.0):
raise ValueError("degrees cannot have fraction unless both minutes"
"and seconds are zero")
if min_has_fract and s != 0.0:
raise ValueError("minutes cannot have fraction unless seconds are zero")
DecDegrees = Sign * (d + m/60.0 + s/3600.0)
if ustring:
return u"%.6f\xb0"%(DecDegrees)
else:
return DecDegrees
@classmethod
def ToDegMin(self, DecDegrees, ustring = False):
"""
Converts from decimal (binary float) degrees to:
Degrees, Minutes
If the optional parameter: "ustring" is True,
a Unicode string is returned
"""
if signbit(DecDegrees):
Sign = -1
DecDegrees = abs(DecDegrees)
else:
Sign = 1
Degrees = int(DecDegrees)
DecMinutes = round((DecDegrees - Degrees + 1e-14) * 60, 10)# add a tiny bit then round to avoid binary rounding issues
if ustring:
if Sign == 1:
return u"%i\xb0 %.3f'"%(Degrees, DecMinutes)
else:
return u"-%i\xb0 %.3f'"%(Degrees, DecMinutes)
else:
return (Sign*float(Degrees), DecMinutes) # float to preserve -0.0
@classmethod
def ToDegMinSec(self, DecDegrees, ustring = False):
"""
Converts from decimal (binary float) degrees to:
Degrees, Minutes, Seconds
If the optional parameter: "ustring" is True,
a unicode string is returned
"""
if signbit(DecDegrees):
Sign = -1
DecDegrees = abs(DecDegrees)
else:
Sign = 1
Degrees = int(DecDegrees)
DecMinutes = (DecDegrees - Degrees + 1e-14) * 60 # add a tiny bit to avoid rounding issues
Minutes = int(DecMinutes)
Seconds = round(((DecMinutes - Minutes) * 60), 10 )
if ustring:
if Sign == 1:
return u"%i\xb0 %i' %.2f\""%(Degrees, Minutes, Seconds)
else:
return u"-%i\xb0 %i' %.2f\""%(Degrees, Minutes, Seconds)
else:
return (Sign * float(Degrees), Minutes, Seconds)
## These are classes used in our web apps: ResponseLink, etc.
## They provide a different interface to lat-long format conversion
class Latitude:
"""An object that can interpret a latitude in various formats.
Constructor:
Latitude(deg, min=0.0, sec=0.0, direction=None)
- 'deg' may be between -90.0 and 90.0.
- if 'min' is nonzero, 'deg' cannot have a fractional part.
(This means 5 and 5.0 are acceptable but 5.1 is not.)
- if 'sec' is nonzero, 'deg' and 'min' cannot have fractional parts.
- 'direction' may be a string beginning with 'N' or 'S' (case
insensitive), or None.
- if 'direction' is not None, 'deg' cannot be negative.
Attributes:
.value : a float in decimal degrees. Positive is North; negative is
South. (These apply to zero too; positive zero is North.)
Methods:
.degrees() -> (float, str)
.degrees_minutes() -> (int, float, str)
.degrees_minutes_seconds() -> (int, int, float, str)
The 'str' argument is the direction: "North" or "South".
Example:
>>> lat1 = Latitude(-120.7625)
>>> lat2 = Latitude(-120, 45.7500)
>>> lat3 = Latitude(-120, 45, 45)
>>> lat4 = Latitude(120.7625, direction='South')
>>> lat5 = Latitude(120, 45.7500, direction='S')
>>> lat6 = Latitude(120, 45, 45, direction='south')
>>> (lat1.value == lat2.value == lat3.value == lat4.value ==
... lat5.value == lat6.value)
True
>>> lat1.value
-120.7625
>>> lat1.degrees()
(120.7625, 'South')
>>> lat1.degrees_minutes()
(120, 45.750000000000171, 'South')
>>> lat1.degrees_minutes_seconds()
(120, 45, 45.000000000010232, 'South')
>>> print str(lat1)
Latitude(-120.762500)
"""
negative_direction = "South"
positive_direction = "North"
min = -90.0
max = 90.0
def __init__(self, deg, min=0.0, sec=0.0, direction=None):
ndir = self.negative_direction[0].upper()
pdir = self.positive_direction[0].upper()
if direction:
if deg < 0.0:
msg = "degrees cannot be negative if direction is specified"
raise ValueError(msg)
if direction[0].upper() == pdir:
pass
elif direction[0].upper() == ndir:
deg = -deg
else:
msg = "direction must start with %r or %r" % (pdir, ndir)
raise ValueError(msg)
self.value = LatLongConverter.ToDecDeg(deg, min, sec, max=self.max)
def direction(self):
if self.value < 0.0:
return self.negative_direction
else:
return self.positive_direction
def degrees(self):
deg = abs(self.value)
return deg, self.direction()
def degrees_minutes(self):
deg, min = LatLongConverter.ToDegMin(abs(self.value))
return deg, min, self.direction()
def degrees_minutes_seconds(self):
deg, min, sec = LatLongConverter.ToDegMinSec(abs(self.value))
return deg, min, sec, self.direction()
def __repr__(self):
try:
return "%s(%f)" % (self.__class__.__name__, self.value)
except AttributeError:
return "%s(uninitialized)" % self.__class__.__name__
def format(self, style):
"""
format(style)
returns formatted value as Unicode string with u'\xb0' (degree symbol).
style is one of:
1: decimal degrees
2: degrees, decimal minutes
3: degrees, minutes, seconds
"""
if style == 1:
return u'''%0.2f\xb0 %s''' % self.degrees()
elif style == 2:
return u'''%d\xb0 %0.2f' %s''' % self.degrees_minutes()
elif style == 3:
return u'''%d\xb0 %d' %0.2f" %s''' % self.degrees_minutes_seconds()
else:
raise ValueError("style must be 1, 2, or 3")
def format_html(self, style):
"""
format_html(style)
Backward compatibility for Quixote rlink and Pylons inews.
"""
return self.format(style).replace(u"\xb0", u"°").encode("ascii")
class Longitude(Latitude):
"""See Latitude docstring.
Positive is East; negative is West. Degrees must be between -180.0 and
180.0
"""
negative_direction = "West"
positive_direction = "East"
min = -180.0
max = 180.0
class DummyLatitude:
"""A pseudo-Latitude whose components are None.
Useful in building HTML forms where the value is not required.
Note: this class may be deleted if it doesn't turn out to be useful.
"""
value = None
def direction(self): return None
def degrees(self): return None, None
def degrees_minutes(self): return None, None, None
def degrees_minutes_seconds(self): return None, None, None, None
class DummyLongitude(DummyLatitude):
"""
Note: this class may be deleted if it doesn't turn out to be useful.
"""
pass
## The new simple API -- just methods that do what we need for ResponseLink, etc.
DEGREES = "\xb0" # "DEGREE SIGN"
MINUTES = "\u2032" # "PRIME"
SECONDS = "\u2033" # "DOUBLE PRIME"
LAT_POSITIVE_DIRECTION = "North"
LAT_NEGATIVE_DIRECTION = "South"
LON_POSITIVE_DIRECTION = "East"
LON_NEGATIVE_DIRECTION = "West"
FORMAT1 = "{:.2f}\N{DEGREE SIGN} {}"
FORMAT2 = "{:.0f}\N{DEGREE SIGN} {:.2f}\N{PRIME} {}"
FORMAT3 = "{:.0f}\N{DEGREE SIGN} {:.0f}\N{PRIME} {:.2f}\N{DOUBLE PRIME} {}"
def reduce_base_60(f):
"""extract the base 60 fractional portion of a floating point number.
i.e. minutes from degrees, seconds from minutes.
"""
fract, whole = math.modf(f)
# Add a tiny bit before rounding to avoid binary rounding errors.
fract = abs(fract)
fract = (fract + 1e-14) * 60
fract = round(fract, 10)
return whole, fract
def format_latlon2(f, positive_direction, negative_direction):
direction = positive_direction if f >= 0.0 else negative_direction
degrees, minutes = reduce_base_60(f)
degrees = abs(degrees)
return FORMAT2.format(degrees, minutes, direction)
def format_latlon3(f, positive_direction, negative_direction):
direction = positive_direction if f >= 0.0 else negative_direction
degrees, minutes = reduce_base_60(f)
minutes, seconds = reduce_base_60(minutes)
degrees = abs(degrees)
return FORMAT3.format(degrees, minutes, seconds, direction)
def format_lat(f):
return format_latlon2(f, LAT_POSITIVE_DIRECTION, LAT_NEGATIVE_DIRECTION)
def format_lon(f):
return format_latlon2(f, LON_POSITIVE_DIRECTION, LON_NEGATIVE_DIRECTION)
def format_lat_dms(f):
return format_latlon3(f, LAT_POSITIVE_DIRECTION, LAT_NEGATIVE_DIRECTION)
def format_lon_dms(f):
return format_latlon3(f, LON_POSITIVE_DIRECTION, LON_NEGATIVE_DIRECTION)
| NOAA-ORR-ERD/hazpy.unit_conversion | hazpy/unit_conversion/lat_long.py | Python | unlicense | 11,710 |
from math import log
def num_prime_factors(upper_limit):
"""
Create an array whose entries are the number of not necessarily distinct
prime factors of the index. The upper bound is the first number not
included.
"""
factor_count = [0] * upper_limit
prime = 2 #start with the first prime, which is 2
while True:
prime_power = prime
for exponent in range(int(log(upper_limit, prime))):
for hit in range(prime_power, upper_limit, prime_power):
factor_count[hit] += 1
prime_power *= prime
while True:
prime += 1
if prime >= upper_limit:
return factor_count
if factor_count[prime] == 0: break
print(sum(1 if n == 2 else 0 for n in num_prime_factors(100000000)))
| peterstace/project-euler | OLD_PY_CODE/project_euler_old_old/187/187.py | Python | unlicense | 813 |
# coding: utf-8
from __future__ import unicode_literals
import base64
import functools
import json
import re
import itertools
from .common import InfoExtractor
from ..compat import (
compat_kwargs,
compat_HTTPError,
compat_str,
compat_urlparse,
)
from ..utils import (
clean_html,
determine_ext,
dict_get,
ExtractorError,
js_to_json,
int_or_none,
merge_dicts,
OnDemandPagedList,
parse_filesize,
RegexNotFoundError,
sanitized_Request,
smuggle_url,
std_headers,
str_or_none,
try_get,
unified_timestamp,
unsmuggle_url,
urlencode_postdata,
urljoin,
unescapeHTML,
)
class VimeoBaseInfoExtractor(InfoExtractor):
_NETRC_MACHINE = 'vimeo'
_LOGIN_REQUIRED = False
_LOGIN_URL = 'https://vimeo.com/log_in'
def _login(self):
username, password = self._get_login_info()
if username is None:
if self._LOGIN_REQUIRED:
raise ExtractorError('No login info available, needed for using %s.' % self.IE_NAME, expected=True)
return
webpage = self._download_webpage(
self._LOGIN_URL, None, 'Downloading login page')
token, vuid = self._extract_xsrft_and_vuid(webpage)
data = {
'action': 'login',
'email': username,
'password': password,
'service': 'vimeo',
'token': token,
}
self._set_vimeo_cookie('vuid', vuid)
try:
self._download_webpage(
self._LOGIN_URL, None, 'Logging in',
data=urlencode_postdata(data), headers={
'Content-Type': 'application/x-www-form-urlencoded',
'Referer': self._LOGIN_URL,
})
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 418:
raise ExtractorError(
'Unable to log in: bad username or password',
expected=True)
raise ExtractorError('Unable to log in')
def _verify_video_password(self, url, video_id, webpage):
password = self._downloader.params.get('videopassword')
if password is None:
raise ExtractorError('This video is protected by a password, use the --video-password option', expected=True)
token, vuid = self._extract_xsrft_and_vuid(webpage)
data = urlencode_postdata({
'password': password,
'token': token,
})
if url.startswith('http://'):
# vimeo only supports https now, but the user can give an http url
url = url.replace('http://', 'https://')
password_request = sanitized_Request(url + '/password', data)
password_request.add_header('Content-Type', 'application/x-www-form-urlencoded')
password_request.add_header('Referer', url)
self._set_vimeo_cookie('vuid', vuid)
return self._download_webpage(
password_request, video_id,
'Verifying the password', 'Wrong password')
def _extract_xsrft_and_vuid(self, webpage):
xsrft = self._search_regex(
r'(?:(?P<q1>["\'])xsrft(?P=q1)\s*:|xsrft\s*[=:])\s*(?P<q>["\'])(?P<xsrft>.+?)(?P=q)',
webpage, 'login token', group='xsrft')
vuid = self._search_regex(
r'["\']vuid["\']\s*:\s*(["\'])(?P<vuid>.+?)\1',
webpage, 'vuid', group='vuid')
return xsrft, vuid
def _extract_vimeo_config(self, webpage, video_id, *args, **kwargs):
vimeo_config = self._search_regex(
r'vimeo\.config\s*=\s*(?:({.+?})|_extend\([^,]+,\s+({.+?})\));',
webpage, 'vimeo config', *args, **compat_kwargs(kwargs))
if vimeo_config:
return self._parse_json(vimeo_config, video_id)
def _set_vimeo_cookie(self, name, value):
self._set_cookie('vimeo.com', name, value)
def _vimeo_sort_formats(self, formats):
# Bitrates are completely broken. Single m3u8 may contain entries in kbps and bps
# at the same time without actual units specified. This lead to wrong sorting.
self._sort_formats(formats, field_preference=('preference', 'height', 'width', 'fps', 'tbr', 'format_id'))
def _parse_config(self, config, video_id):
video_data = config['video']
video_title = video_data['title']
live_event = video_data.get('live_event') or {}
is_live = live_event.get('status') == 'started'
formats = []
config_files = video_data.get('files') or config['request'].get('files', {})
for f in config_files.get('progressive', []):
video_url = f.get('url')
if not video_url:
continue
formats.append({
'url': video_url,
'format_id': 'http-%s' % f.get('quality'),
'width': int_or_none(f.get('width')),
'height': int_or_none(f.get('height')),
'fps': int_or_none(f.get('fps')),
'tbr': int_or_none(f.get('bitrate')),
})
# TODO: fix handling of 308 status code returned for live archive manifest requests
for files_type in ('hls', 'dash'):
for cdn_name, cdn_data in config_files.get(files_type, {}).get('cdns', {}).items():
manifest_url = cdn_data.get('url')
if not manifest_url:
continue
format_id = '%s-%s' % (files_type, cdn_name)
if files_type == 'hls':
formats.extend(self._extract_m3u8_formats(
manifest_url, video_id, 'mp4',
'm3u8' if is_live else 'm3u8_native', m3u8_id=format_id,
note='Downloading %s m3u8 information' % cdn_name,
fatal=False))
elif files_type == 'dash':
mpd_pattern = r'/%s/(?:sep/)?video/' % video_id
mpd_manifest_urls = []
if re.search(mpd_pattern, manifest_url):
for suffix, repl in (('', 'video'), ('_sep', 'sep/video')):
mpd_manifest_urls.append((format_id + suffix, re.sub(
mpd_pattern, '/%s/%s/' % (video_id, repl), manifest_url)))
else:
mpd_manifest_urls = [(format_id, manifest_url)]
for f_id, m_url in mpd_manifest_urls:
if 'json=1' in m_url:
real_m_url = (self._download_json(m_url, video_id, fatal=False) or {}).get('url')
if real_m_url:
m_url = real_m_url
mpd_formats = self._extract_mpd_formats(
m_url.replace('/master.json', '/master.mpd'), video_id, f_id,
'Downloading %s MPD information' % cdn_name,
fatal=False)
for f in mpd_formats:
if f.get('vcodec') == 'none':
f['preference'] = -50
elif f.get('acodec') == 'none':
f['preference'] = -40
formats.extend(mpd_formats)
live_archive = live_event.get('archive') or {}
live_archive_source_url = live_archive.get('source_url')
if live_archive_source_url and live_archive.get('status') == 'done':
formats.append({
'format_id': 'live-archive-source',
'url': live_archive_source_url,
'preference': 1,
})
subtitles = {}
text_tracks = config['request'].get('text_tracks')
if text_tracks:
for tt in text_tracks:
subtitles[tt['lang']] = [{
'ext': 'vtt',
'url': urljoin('https://vimeo.com', tt['url']),
}]
thumbnails = []
if not is_live:
for key, thumb in video_data.get('thumbs', {}).items():
thumbnails.append({
'id': key,
'width': int_or_none(key),
'url': thumb,
})
thumbnail = video_data.get('thumbnail')
if thumbnail:
thumbnails.append({
'url': thumbnail,
})
owner = video_data.get('owner') or {}
video_uploader_url = owner.get('url')
return {
'id': str_or_none(video_data.get('id')) or video_id,
'title': self._live_title(video_title) if is_live else video_title,
'uploader': owner.get('name'),
'uploader_id': video_uploader_url.split('/')[-1] if video_uploader_url else None,
'uploader_url': video_uploader_url,
'thumbnails': thumbnails,
'duration': int_or_none(video_data.get('duration')),
'formats': formats,
'subtitles': subtitles,
'is_live': is_live,
}
def _extract_original_format(self, url, video_id):
download_data = self._download_json(
url, video_id, fatal=False,
query={'action': 'load_download_config'},
headers={'X-Requested-With': 'XMLHttpRequest'})
if download_data:
source_file = download_data.get('source_file')
if isinstance(source_file, dict):
download_url = source_file.get('download_url')
if download_url and not source_file.get('is_cold') and not source_file.get('is_defrosting'):
source_name = source_file.get('public_name', 'Original')
if self._is_valid_url(download_url, video_id, '%s video' % source_name):
ext = (try_get(
source_file, lambda x: x['extension'],
compat_str) or determine_ext(
download_url, None) or 'mp4').lower()
return {
'url': download_url,
'ext': ext,
'width': int_or_none(source_file.get('width')),
'height': int_or_none(source_file.get('height')),
'filesize': parse_filesize(source_file.get('size')),
'format_id': source_name,
'preference': 1,
}
class VimeoIE(VimeoBaseInfoExtractor):
"""Information extractor for vimeo.com."""
# _VALID_URL matches Vimeo URLs
_VALID_URL = r'''(?x)
https?://
(?:
(?:
www|
player
)
\.
)?
vimeo(?:pro)?\.com/
(?!(?:channels|album|showcase)/[^/?#]+/?(?:$|[?#])|[^/]+/review/|ondemand/)
(?:.*?/)?
(?:
(?:
play_redirect_hls|
moogaloop\.swf)\?clip_id=
)?
(?:videos?/)?
(?P<id>[0-9]+)
(?:/[\da-f]+)?
/?(?:[?&].*)?(?:[#].*)?$
'''
IE_NAME = 'vimeo'
_TESTS = [
{
'url': 'http://vimeo.com/56015672#at=0',
'md5': '8879b6cc097e987f02484baf890129e5',
'info_dict': {
'id': '56015672',
'ext': 'mp4',
'title': "youtube-dl test video - \u2605 \" ' \u5e78 / \\ \u00e4 \u21ad \U0001d550",
'description': 'md5:2d3305bad981a06ff79f027f19865021',
'timestamp': 1355990239,
'upload_date': '20121220',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user7108434',
'uploader_id': 'user7108434',
'uploader': 'Filippo Valsorda',
'duration': 10,
'license': 'by-sa',
},
'params': {
'format': 'best[protocol=https]',
},
},
{
'url': 'http://vimeopro.com/openstreetmapus/state-of-the-map-us-2013/video/68093876',
'md5': '3b5ca6aa22b60dfeeadf50b72e44ed82',
'note': 'Vimeo Pro video (#1197)',
'info_dict': {
'id': '68093876',
'ext': 'mp4',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/openstreetmapus',
'uploader_id': 'openstreetmapus',
'uploader': 'OpenStreetMap US',
'title': 'Andy Allan - Putting the Carto into OpenStreetMap Cartography',
'description': 'md5:2c362968038d4499f4d79f88458590c1',
'duration': 1595,
'upload_date': '20130610',
'timestamp': 1370893156,
},
'params': {
'format': 'best[protocol=https]',
},
},
{
'url': 'http://player.vimeo.com/video/54469442',
'md5': '619b811a4417aa4abe78dc653becf511',
'note': 'Videos that embed the url in the player page',
'info_dict': {
'id': '54469442',
'ext': 'mp4',
'title': 'Kathy Sierra: Building the minimum Badass User, Business of Software 2012',
'uploader': 'The BLN & Business of Software',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/theblnbusinessofsoftware',
'uploader_id': 'theblnbusinessofsoftware',
'duration': 3610,
'description': None,
},
'params': {
'format': 'best[protocol=https]',
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
'url': 'http://vimeo.com/68375962',
'md5': 'aaf896bdb7ddd6476df50007a0ac0ae7',
'note': 'Video protected with password',
'info_dict': {
'id': '68375962',
'ext': 'mp4',
'title': 'youtube-dl password protected test video',
'timestamp': 1371200155,
'upload_date': '20130614',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user18948128',
'uploader_id': 'user18948128',
'uploader': 'Jaime Marquínez Ferrándiz',
'duration': 10,
'description': 'md5:dca3ea23adb29ee387127bc4ddfce63f',
},
'params': {
'format': 'best[protocol=https]',
'videopassword': 'youtube-dl',
},
},
{
'url': 'http://vimeo.com/channels/keypeele/75629013',
'md5': '2f86a05afe9d7abc0b9126d229bbe15d',
'info_dict': {
'id': '75629013',
'ext': 'mp4',
'title': 'Key & Peele: Terrorist Interrogation',
'description': 'md5:8678b246399b070816b12313e8b4eb5c',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/atencio',
'uploader_id': 'atencio',
'uploader': 'Peter Atencio',
'channel_id': 'keypeele',
'channel_url': r're:https?://(?:www\.)?vimeo\.com/channels/keypeele',
'timestamp': 1380339469,
'upload_date': '20130928',
'duration': 187,
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
'url': 'http://vimeo.com/76979871',
'note': 'Video with subtitles',
'info_dict': {
'id': '76979871',
'ext': 'mp4',
'title': 'The New Vimeo Player (You Know, For Videos)',
'description': 'md5:2ec900bf97c3f389378a96aee11260ea',
'timestamp': 1381846109,
'upload_date': '20131015',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/staff',
'uploader_id': 'staff',
'uploader': 'Vimeo Staff',
'duration': 62,
}
},
{
# from https://www.ouya.tv/game/Pier-Solar-and-the-Great-Architects/
'url': 'https://player.vimeo.com/video/98044508',
'note': 'The js code contains assignments to the same variable as the config',
'info_dict': {
'id': '98044508',
'ext': 'mp4',
'title': 'Pier Solar OUYA Official Trailer',
'uploader': 'Tulio Gonçalves',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user28849593',
'uploader_id': 'user28849593',
},
},
{
# contains original format
'url': 'https://vimeo.com/33951933',
'md5': '53c688fa95a55bf4b7293d37a89c5c53',
'info_dict': {
'id': '33951933',
'ext': 'mp4',
'title': 'FOX CLASSICS - Forever Classic ID - A Full Minute',
'uploader': 'The DMCI',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/dmci',
'uploader_id': 'dmci',
'timestamp': 1324343742,
'upload_date': '20111220',
'description': 'md5:ae23671e82d05415868f7ad1aec21147',
},
},
{
# only available via https://vimeo.com/channels/tributes/6213729 and
# not via https://vimeo.com/6213729
'url': 'https://vimeo.com/channels/tributes/6213729',
'info_dict': {
'id': '6213729',
'ext': 'mp4',
'title': 'Vimeo Tribute: The Shining',
'uploader': 'Casey Donahue',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/caseydonahue',
'uploader_id': 'caseydonahue',
'channel_url': r're:https?://(?:www\.)?vimeo\.com/channels/tributes',
'channel_id': 'tributes',
'timestamp': 1250886430,
'upload_date': '20090821',
'description': 'md5:bdbf314014e58713e6e5b66eb252f4a6',
},
'params': {
'skip_download': True,
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
# redirects to ondemand extractor and should be passed through it
# for successful extraction
'url': 'https://vimeo.com/73445910',
'info_dict': {
'id': '73445910',
'ext': 'mp4',
'title': 'The Reluctant Revolutionary',
'uploader': '10Ft Films',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/tenfootfilms',
'uploader_id': 'tenfootfilms',
'description': 'md5:0fa704e05b04f91f40b7f3ca2e801384',
'upload_date': '20130830',
'timestamp': 1377853339,
},
'params': {
'skip_download': True,
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
'url': 'http://player.vimeo.com/video/68375962',
'md5': 'aaf896bdb7ddd6476df50007a0ac0ae7',
'info_dict': {
'id': '68375962',
'ext': 'mp4',
'title': 'youtube-dl password protected test video',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user18948128',
'uploader_id': 'user18948128',
'uploader': 'Jaime Marquínez Ferrándiz',
'duration': 10,
},
'params': {
'format': 'best[protocol=https]',
'videopassword': 'youtube-dl',
},
},
{
'url': 'http://vimeo.com/moogaloop.swf?clip_id=2539741',
'only_matching': True,
},
{
'url': 'https://vimeo.com/109815029',
'note': 'Video not completely processed, "failed" seed status',
'only_matching': True,
},
{
'url': 'https://vimeo.com/groups/travelhd/videos/22439234',
'only_matching': True,
},
{
'url': 'https://vimeo.com/album/2632481/video/79010983',
'only_matching': True,
},
{
# source file returns 403: Forbidden
'url': 'https://vimeo.com/7809605',
'only_matching': True,
},
{
'url': 'https://vimeo.com/160743502/abd0e13fb4',
'only_matching': True,
}
# https://gettingthingsdone.com/workflowmap/
# vimeo embed with check-password page protected by Referer header
]
@staticmethod
def _smuggle_referrer(url, referrer_url):
return smuggle_url(url, {'http_headers': {'Referer': referrer_url}})
@staticmethod
def _extract_urls(url, webpage):
urls = []
# Look for embedded (iframe) Vimeo player
for mobj in re.finditer(
r'<iframe[^>]+?src=(["\'])(?P<url>(?:https?:)?//player\.vimeo\.com/video/\d+.*?)\1',
webpage):
urls.append(VimeoIE._smuggle_referrer(unescapeHTML(mobj.group('url')), url))
PLAIN_EMBED_RE = (
# Look for embedded (swf embed) Vimeo player
r'<embed[^>]+?src=(["\'])(?P<url>(?:https?:)?//(?:www\.)?vimeo\.com/moogaloop\.swf.+?)\1',
# Look more for non-standard embedded Vimeo player
r'<video[^>]+src=(["\'])(?P<url>(?:https?:)?//(?:www\.)?vimeo\.com/[0-9]+)\1',
)
for embed_re in PLAIN_EMBED_RE:
for mobj in re.finditer(embed_re, webpage):
urls.append(mobj.group('url'))
return urls
@staticmethod
def _extract_url(url, webpage):
urls = VimeoIE._extract_urls(url, webpage)
return urls[0] if urls else None
def _verify_player_video_password(self, url, video_id, headers):
password = self._downloader.params.get('videopassword')
if password is None:
raise ExtractorError('This video is protected by a password, use the --video-password option', expected=True)
data = urlencode_postdata({
'password': base64.b64encode(password.encode()),
})
headers = merge_dicts(headers, {
'Content-Type': 'application/x-www-form-urlencoded',
})
checked = self._download_json(
url + '/check-password', video_id,
'Verifying the password', data=data, headers=headers)
if checked is False:
raise ExtractorError('Wrong video password', expected=True)
return checked
def _real_initialize(self):
self._login()
def _real_extract(self, url):
url, data = unsmuggle_url(url, {})
headers = std_headers.copy()
if 'http_headers' in data:
headers.update(data['http_headers'])
if 'Referer' not in headers:
headers['Referer'] = url
channel_id = self._search_regex(
r'vimeo\.com/channels/([^/]+)', url, 'channel id', default=None)
# Extract ID from URL
video_id = self._match_id(url)
orig_url = url
is_pro = 'vimeopro.com/' in url
is_player = '://player.vimeo.com/video/' in url
if is_pro:
# some videos require portfolio_id to be present in player url
# https://github.com/ytdl-org/youtube-dl/issues/20070
url = self._extract_url(url, self._download_webpage(url, video_id))
if not url:
url = 'https://vimeo.com/' + video_id
elif is_player:
url = 'https://player.vimeo.com/video/' + video_id
elif any(p in url for p in ('play_redirect_hls', 'moogaloop.swf')):
url = 'https://vimeo.com/' + video_id
try:
# Retrieve video webpage to extract further information
webpage, urlh = self._download_webpage_handle(
url, video_id, headers=headers)
redirect_url = urlh.geturl()
except ExtractorError as ee:
if isinstance(ee.cause, compat_HTTPError) and ee.cause.code == 403:
errmsg = ee.cause.read()
if b'Because of its privacy settings, this video cannot be played here' in errmsg:
raise ExtractorError(
'Cannot download embed-only video without embedding '
'URL. Please call youtube-dl with the URL of the page '
'that embeds this video.',
expected=True)
raise
# Now we begin extracting as much information as we can from what we
# retrieved. First we extract the information common to all extractors,
# and latter we extract those that are Vimeo specific.
self.report_extraction(video_id)
vimeo_config = self._extract_vimeo_config(webpage, video_id, default=None)
if vimeo_config:
seed_status = vimeo_config.get('seed_status', {})
if seed_status.get('state') == 'failed':
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, seed_status['title']),
expected=True)
cc_license = None
timestamp = None
video_description = None
# Extract the config JSON
try:
try:
config_url = self._html_search_regex(
r' data-config-url="(.+?)"', webpage,
'config URL', default=None)
if not config_url:
# Sometimes new react-based page is served instead of old one that require
# different config URL extraction approach (see
# https://github.com/ytdl-org/youtube-dl/pull/7209)
page_config = self._parse_json(self._search_regex(
r'vimeo\.(?:clip|vod_title)_page_config\s*=\s*({.+?});',
webpage, 'page config'), video_id)
config_url = page_config['player']['config_url']
cc_license = page_config.get('cc_license')
timestamp = try_get(
page_config, lambda x: x['clip']['uploaded_on'],
compat_str)
video_description = clean_html(dict_get(
page_config, ('description', 'description_html_escaped')))
config = self._download_json(config_url, video_id)
except RegexNotFoundError:
# For pro videos or player.vimeo.com urls
# We try to find out to which variable is assigned the config dic
m_variable_name = re.search(r'(\w)\.video\.id', webpage)
if m_variable_name is not None:
config_re = [r'%s=({[^}].+?});' % re.escape(m_variable_name.group(1))]
else:
config_re = [r' = {config:({.+?}),assets:', r'(?:[abc])=({.+?});']
config_re.append(r'\bvar\s+r\s*=\s*({.+?})\s*;')
config_re.append(r'\bconfig\s*=\s*({.+?})\s*;')
config = self._search_regex(config_re, webpage, 'info section',
flags=re.DOTALL)
config = json.loads(config)
except Exception as e:
if re.search('The creator of this video has not given you permission to embed it on this domain.', webpage):
raise ExtractorError('The author has restricted the access to this video, try with the "--referer" option')
if re.search(r'<form[^>]+?id="pw_form"', webpage) is not None:
if '_video_password_verified' in data:
raise ExtractorError('video password verification failed!')
self._verify_video_password(redirect_url, video_id, webpage)
return self._real_extract(
smuggle_url(redirect_url, {'_video_password_verified': 'verified'}))
else:
raise ExtractorError('Unable to extract info section',
cause=e)
else:
if config.get('view') == 4:
config = self._verify_player_video_password(redirect_url, video_id, headers)
vod = config.get('video', {}).get('vod', {})
def is_rented():
if '>You rented this title.<' in webpage:
return True
if config.get('user', {}).get('purchased'):
return True
for purchase_option in vod.get('purchase_options', []):
if purchase_option.get('purchased'):
return True
label = purchase_option.get('label_string')
if label and (label.startswith('You rented this') or label.endswith(' remaining')):
return True
return False
if is_rented() and vod.get('is_trailer'):
feature_id = vod.get('feature_id')
if feature_id and not data.get('force_feature_id', False):
return self.url_result(smuggle_url(
'https://player.vimeo.com/player/%s' % feature_id,
{'force_feature_id': True}), 'Vimeo')
# Extract video description
if not video_description:
video_description = self._html_search_regex(
r'(?s)<div\s+class="[^"]*description[^"]*"[^>]*>(.*?)</div>',
webpage, 'description', default=None)
if not video_description:
video_description = self._html_search_meta(
'description', webpage, default=None)
if not video_description and is_pro:
orig_webpage = self._download_webpage(
orig_url, video_id,
note='Downloading webpage for description',
fatal=False)
if orig_webpage:
video_description = self._html_search_meta(
'description', orig_webpage, default=None)
if not video_description and not is_player:
self._downloader.report_warning('Cannot find video description')
# Extract upload date
if not timestamp:
timestamp = self._search_regex(
r'<time[^>]+datetime="([^"]+)"', webpage,
'timestamp', default=None)
try:
view_count = int(self._search_regex(r'UserPlays:(\d+)', webpage, 'view count'))
like_count = int(self._search_regex(r'UserLikes:(\d+)', webpage, 'like count'))
comment_count = int(self._search_regex(r'UserComments:(\d+)', webpage, 'comment count'))
except RegexNotFoundError:
# This info is only available in vimeo.com/{id} urls
view_count = None
like_count = None
comment_count = None
formats = []
source_format = self._extract_original_format(
'https://vimeo.com/' + video_id, video_id)
if source_format:
formats.append(source_format)
info_dict_config = self._parse_config(config, video_id)
formats.extend(info_dict_config['formats'])
self._vimeo_sort_formats(formats)
json_ld = self._search_json_ld(webpage, video_id, default={})
if not cc_license:
cc_license = self._search_regex(
r'<link[^>]+rel=["\']license["\'][^>]+href=(["\'])(?P<license>(?:(?!\1).)+)\1',
webpage, 'license', default=None, group='license')
channel_url = 'https://vimeo.com/channels/%s' % channel_id if channel_id else None
info_dict = {
'formats': formats,
'timestamp': unified_timestamp(timestamp),
'description': video_description,
'webpage_url': url,
'view_count': view_count,
'like_count': like_count,
'comment_count': comment_count,
'license': cc_license,
'channel_id': channel_id,
'channel_url': channel_url,
}
info_dict = merge_dicts(info_dict, info_dict_config, json_ld)
return info_dict
class VimeoOndemandIE(VimeoIE):
IE_NAME = 'vimeo:ondemand'
_VALID_URL = r'https?://(?:www\.)?vimeo\.com/ondemand/([^/]+/)?(?P<id>[^/?#&]+)'
_TESTS = [{
# ondemand video not available via https://vimeo.com/id
'url': 'https://vimeo.com/ondemand/20704',
'md5': 'c424deda8c7f73c1dfb3edd7630e2f35',
'info_dict': {
'id': '105442900',
'ext': 'mp4',
'title': 'המעבדה - במאי יותם פלדמן',
'uploader': 'גם סרטים',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/gumfilms',
'uploader_id': 'gumfilms',
'description': 'md5:4c027c965e439de4baab621e48b60791',
'upload_date': '20140906',
'timestamp': 1410032453,
},
'params': {
'format': 'best[protocol=https]',
},
'expected_warnings': ['Unable to download JSON metadata'],
}, {
# requires Referer to be passed along with og:video:url
'url': 'https://vimeo.com/ondemand/36938/126682985',
'info_dict': {
'id': '126584684',
'ext': 'mp4',
'title': 'Rävlock, rätt läte på rätt plats',
'uploader': 'Lindroth & Norin',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/lindrothnorin',
'uploader_id': 'lindrothnorin',
'description': 'md5:c3c46a90529612c8279fb6af803fc0df',
'upload_date': '20150502',
'timestamp': 1430586422,
},
'params': {
'skip_download': True,
},
'expected_warnings': ['Unable to download JSON metadata'],
}, {
'url': 'https://vimeo.com/ondemand/nazmaalik',
'only_matching': True,
}, {
'url': 'https://vimeo.com/ondemand/141692381',
'only_matching': True,
}, {
'url': 'https://vimeo.com/ondemand/thelastcolony/150274832',
'only_matching': True,
}]
class VimeoChannelIE(VimeoBaseInfoExtractor):
IE_NAME = 'vimeo:channel'
_VALID_URL = r'https://vimeo\.com/channels/(?P<id>[^/?#]+)/?(?:$|[?#])'
_MORE_PAGES_INDICATOR = r'<a.+?rel="next"'
_TITLE = None
_TITLE_RE = r'<link rel="alternate"[^>]+?title="(.*?)"'
_TESTS = [{
'url': 'https://vimeo.com/channels/tributes',
'info_dict': {
'id': 'tributes',
'title': 'Vimeo Tributes',
},
'playlist_mincount': 25,
}]
_BASE_URL_TEMPL = 'https://vimeo.com/channels/%s'
def _page_url(self, base_url, pagenum):
return '%s/videos/page:%d/' % (base_url, pagenum)
def _extract_list_title(self, webpage):
return self._TITLE or self._html_search_regex(
self._TITLE_RE, webpage, 'list title', fatal=False)
def _title_and_entries(self, list_id, base_url):
for pagenum in itertools.count(1):
page_url = self._page_url(base_url, pagenum)
webpage = self._download_webpage(
page_url, list_id,
'Downloading page %s' % pagenum)
if pagenum == 1:
yield self._extract_list_title(webpage)
# Try extracting href first since not all videos are available via
# short https://vimeo.com/id URL (e.g. https://vimeo.com/channels/tributes/6213729)
clips = re.findall(
r'id="clip_(\d+)"[^>]*>\s*<a[^>]+href="(/(?:[^/]+/)*\1)(?:[^>]+\btitle="([^"]+)")?', webpage)
if clips:
for video_id, video_url, video_title in clips:
yield self.url_result(
compat_urlparse.urljoin(base_url, video_url),
VimeoIE.ie_key(), video_id=video_id, video_title=video_title)
# More relaxed fallback
else:
for video_id in re.findall(r'id=["\']clip_(\d+)', webpage):
yield self.url_result(
'https://vimeo.com/%s' % video_id,
VimeoIE.ie_key(), video_id=video_id)
if re.search(self._MORE_PAGES_INDICATOR, webpage, re.DOTALL) is None:
break
def _extract_videos(self, list_id, base_url):
title_and_entries = self._title_and_entries(list_id, base_url)
list_title = next(title_and_entries)
return self.playlist_result(title_and_entries, list_id, list_title)
def _real_extract(self, url):
channel_id = self._match_id(url)
return self._extract_videos(channel_id, self._BASE_URL_TEMPL % channel_id)
class VimeoUserIE(VimeoChannelIE):
IE_NAME = 'vimeo:user'
_VALID_URL = r'https://vimeo\.com/(?!(?:[0-9]+|watchlater)(?:$|[?#/]))(?P<id>[^/]+)(?:/videos|[#?]|$)'
_TITLE_RE = r'<a[^>]+?class="user">([^<>]+?)</a>'
_TESTS = [{
'url': 'https://vimeo.com/nkistudio/videos',
'info_dict': {
'title': 'Nki',
'id': 'nkistudio',
},
'playlist_mincount': 66,
}]
_BASE_URL_TEMPL = 'https://vimeo.com/%s'
class VimeoAlbumIE(VimeoBaseInfoExtractor):
IE_NAME = 'vimeo:album'
_VALID_URL = r'https://vimeo\.com/(?:album|showcase)/(?P<id>\d+)(?:$|[?#]|/(?!video))'
_TITLE_RE = r'<header id="page_header">\n\s*<h1>(.*?)</h1>'
_TESTS = [{
'url': 'https://vimeo.com/album/2632481',
'info_dict': {
'id': '2632481',
'title': 'Staff Favorites: November 2013',
},
'playlist_mincount': 13,
}, {
'note': 'Password-protected album',
'url': 'https://vimeo.com/album/3253534',
'info_dict': {
'title': 'test',
'id': '3253534',
},
'playlist_count': 1,
'params': {
'videopassword': 'youtube-dl',
}
}]
_PAGE_SIZE = 100
def _fetch_page(self, album_id, authorizaion, hashed_pass, page):
api_page = page + 1
query = {
'fields': 'link,uri',
'page': api_page,
'per_page': self._PAGE_SIZE,
}
if hashed_pass:
query['_hashed_pass'] = hashed_pass
videos = self._download_json(
'https://api.vimeo.com/albums/%s/videos' % album_id,
album_id, 'Downloading page %d' % api_page, query=query, headers={
'Authorization': 'jwt ' + authorizaion,
})['data']
for video in videos:
link = video.get('link')
if not link:
continue
uri = video.get('uri')
video_id = self._search_regex(r'/videos/(\d+)', uri, 'video_id', default=None) if uri else None
yield self.url_result(link, VimeoIE.ie_key(), video_id)
def _real_extract(self, url):
album_id = self._match_id(url)
webpage = self._download_webpage(url, album_id)
viewer = self._parse_json(self._search_regex(
r'bootstrap_data\s*=\s*({.+?})</script>',
webpage, 'bootstrap data'), album_id)['viewer']
jwt = viewer['jwt']
album = self._download_json(
'https://api.vimeo.com/albums/' + album_id,
album_id, headers={'Authorization': 'jwt ' + jwt},
query={'fields': 'description,name,privacy'})
hashed_pass = None
if try_get(album, lambda x: x['privacy']['view']) == 'password':
password = self._downloader.params.get('videopassword')
if not password:
raise ExtractorError(
'This album is protected by a password, use the --video-password option',
expected=True)
self._set_vimeo_cookie('vuid', viewer['vuid'])
try:
hashed_pass = self._download_json(
'https://vimeo.com/showcase/%s/auth' % album_id,
album_id, 'Verifying the password', data=urlencode_postdata({
'password': password,
'token': viewer['xsrft'],
}), headers={
'X-Requested-With': 'XMLHttpRequest',
})['hashed_pass']
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 401:
raise ExtractorError('Wrong password', expected=True)
raise
entries = OnDemandPagedList(functools.partial(
self._fetch_page, album_id, jwt, hashed_pass), self._PAGE_SIZE)
return self.playlist_result(
entries, album_id, album.get('name'), album.get('description'))
class VimeoGroupsIE(VimeoChannelIE):
IE_NAME = 'vimeo:group'
_VALID_URL = r'https://vimeo\.com/groups/(?P<id>[^/]+)(?:/(?!videos?/\d+)|$)'
_TESTS = [{
'url': 'https://vimeo.com/groups/kattykay',
'info_dict': {
'id': 'kattykay',
'title': 'Katty Kay',
},
'playlist_mincount': 27,
}]
_BASE_URL_TEMPL = 'https://vimeo.com/groups/%s'
class VimeoReviewIE(VimeoBaseInfoExtractor):
IE_NAME = 'vimeo:review'
IE_DESC = 'Review pages on vimeo'
_VALID_URL = r'(?P<url>https://vimeo\.com/[^/]+/review/(?P<id>[^/]+)/[0-9a-f]{10})'
_TESTS = [{
'url': 'https://vimeo.com/user21297594/review/75524534/3c257a1b5d',
'md5': 'c507a72f780cacc12b2248bb4006d253',
'info_dict': {
'id': '75524534',
'ext': 'mp4',
'title': "DICK HARDWICK 'Comedian'",
'uploader': 'Richard Hardwick',
'uploader_id': 'user21297594',
'description': "Comedian Dick Hardwick's five minute demo filmed in front of a live theater audience.\nEdit by Doug Mattocks",
},
'expected_warnings': ['Unable to download JSON metadata'],
}, {
'note': 'video player needs Referer',
'url': 'https://vimeo.com/user22258446/review/91613211/13f927e053',
'md5': '6295fdab8f4bf6a002d058b2c6dce276',
'info_dict': {
'id': '91613211',
'ext': 'mp4',
'title': 're:(?i)^Death by dogma versus assembling agile . Sander Hoogendoorn',
'uploader': 'DevWeek Events',
'duration': 2773,
'thumbnail': r're:^https?://.*\.jpg$',
'uploader_id': 'user22258446',
},
'skip': 'video gone',
}, {
'note': 'Password protected',
'url': 'https://vimeo.com/user37284429/review/138823582/c4d865efde',
'info_dict': {
'id': '138823582',
'ext': 'mp4',
'title': 'EFFICIENT PICKUP MASTERCLASS MODULE 1',
'uploader': 'TMB',
'uploader_id': 'user37284429',
},
'params': {
'videopassword': 'holygrail',
},
'skip': 'video gone',
}]
def _real_initialize(self):
self._login()
def _real_extract(self, url):
page_url, video_id = re.match(self._VALID_URL, url).groups()
clip_data = self._download_json(
page_url.replace('/review/', '/review/data/'),
video_id)['clipData']
config_url = clip_data['configUrl']
config = self._download_json(config_url, video_id)
info_dict = self._parse_config(config, video_id)
source_format = self._extract_original_format(
page_url + '/action', video_id)
if source_format:
info_dict['formats'].append(source_format)
self._vimeo_sort_formats(info_dict['formats'])
info_dict['description'] = clean_html(clip_data.get('description'))
return info_dict
class VimeoWatchLaterIE(VimeoChannelIE):
IE_NAME = 'vimeo:watchlater'
IE_DESC = 'Vimeo watch later list, "vimeowatchlater" keyword (requires authentication)'
_VALID_URL = r'https://vimeo\.com/(?:home/)?watchlater|:vimeowatchlater'
_TITLE = 'Watch Later'
_LOGIN_REQUIRED = True
_TESTS = [{
'url': 'https://vimeo.com/watchlater',
'only_matching': True,
}]
def _real_initialize(self):
self._login()
def _page_url(self, base_url, pagenum):
url = '%s/page:%d/' % (base_url, pagenum)
request = sanitized_Request(url)
# Set the header to get a partial html page with the ids,
# the normal page doesn't contain them.
request.add_header('X-Requested-With', 'XMLHttpRequest')
return request
def _real_extract(self, url):
return self._extract_videos('watchlater', 'https://vimeo.com/watchlater')
class VimeoLikesIE(VimeoChannelIE):
_VALID_URL = r'https://(?:www\.)?vimeo\.com/(?P<id>[^/]+)/likes/?(?:$|[?#]|sort:)'
IE_NAME = 'vimeo:likes'
IE_DESC = 'Vimeo user likes'
_TESTS = [{
'url': 'https://vimeo.com/user755559/likes/',
'playlist_mincount': 293,
'info_dict': {
'id': 'user755559',
'title': 'urza’s Likes',
},
}, {
'url': 'https://vimeo.com/stormlapse/likes',
'only_matching': True,
}]
def _page_url(self, base_url, pagenum):
return '%s/page:%d/' % (base_url, pagenum)
def _real_extract(self, url):
user_id = self._match_id(url)
return self._extract_videos(user_id, 'https://vimeo.com/%s/likes' % user_id)
class VHXEmbedIE(VimeoBaseInfoExtractor):
IE_NAME = 'vhx:embed'
_VALID_URL = r'https?://embed\.vhx\.tv/videos/(?P<id>\d+)'
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
config_url = self._parse_json(self._search_regex(
r'window\.OTTData\s*=\s*({.+})', webpage,
'ott data'), video_id, js_to_json)['config_url']
config = self._download_json(config_url, video_id)
info = self._parse_config(config, video_id)
self._vimeo_sort_formats(info['formats'])
return info
| remitamine/youtube-dl | youtube_dl/extractor/vimeo.py | Python | unlicense | 46,520 |
__author__ = 'Alex'
from Movement import Movement
class BaseCommand:
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'unknown'
self.m = movement
def execute(selfself):pass
class Forward(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'forward'
self.m = movement
def execute(self):
self.m.moveCM(10)
class Reverse(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'reverse'
self.m = movement
def execute(self):
self.m.moveCM(10)
class Left(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'left'
self.m = movement
def execute(self):
self.m.turnDegrees(-90)
class Right(BaseCommand):
def __init__(self, movement):
assert isinstance(movement, Movement)
self.name = 'right'
self.m = movement
def execute(self):
self.m.turnDegrees(90)
| RobotTurtles/mid-level-routines | Apps/TurtleCommands.py | Python | apache-2.0 | 1,101 |
# -*- coding: utf-8 -*-
#
# yara documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 8 11:04:03 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'yara'
copyright = u'2014-2019, VirusTotal'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '3.9'
# The full version, including alpha/beta/rc tags.
release = '3.9.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
try:
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except:
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'yaradoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'yara.tex', u'yara Documentation',
u'Victor M. Alvarez', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'yara', u'yara Documentation',
[u'Victor M. Alvarez'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'yara', u'yara Documentation',
u'Victor M. Alvarez', 'yara', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| Masood-M/yalih | req/yara-3.9.0/docs/conf.py | Python | apache-2.0 | 8,265 |
# Copyright 2019, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Libraries of Keras metrics."""
import tensorflow as tf
def _apply_mask(y_true, sample_weight, masked_tokens, dtype):
if sample_weight is None:
sample_weight = tf.ones_like(y_true, dtype)
else:
sample_weight = tf.cast(sample_weight, dtype)
for token in masked_tokens:
mask = tf.cast(tf.not_equal(y_true, token), dtype)
sample_weight = sample_weight * mask
return sample_weight
class NumTokensCounter(tf.keras.metrics.Sum):
"""A `tf.keras.metrics.Metric` that counts tokens seen after masking."""
def __init__(self, masked_tokens=None, name='num_tokens', dtype=tf.int64):
self._masked_tokens = masked_tokens or []
super().__init__(name, dtype)
def update_state(self, y_true, y_pred, sample_weight=None):
sample_weight = _apply_mask(y_true, sample_weight, self._masked_tokens,
self._dtype)
sample_weight = tf.reshape(sample_weight, [-1])
super().update_state(sample_weight)
def get_config(self):
config = super().get_config()
config['masked_tokens'] = tuple(self._masked_tokens)
return config
class MaskedCategoricalAccuracy(tf.keras.metrics.SparseCategoricalAccuracy):
"""An accuracy metric that masks some tokens."""
def __init__(self, masked_tokens=None, name='accuracy', dtype=None):
self._masked_tokens = masked_tokens or []
super().__init__(name, dtype=dtype)
def update_state(self, y_true, y_pred, sample_weight=None):
sample_weight = _apply_mask(y_true, sample_weight, self._masked_tokens,
self._dtype)
num_classes = tf.shape(y_pred)[-1]
y_true = tf.reshape(y_true, [-1])
y_pred = tf.reshape(y_pred, [-1, num_classes])
sample_weight = tf.reshape(sample_weight, [-1])
super().update_state(y_true, y_pred, sample_weight)
def get_config(self):
config = super().get_config()
config['masked_tokens'] = tuple(self._masked_tokens)
return config
| google-research/federated | utils/keras_metrics.py | Python | apache-2.0 | 2,516 |
# Copyright 2019 DeepMind Technologies Limited and Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Training script."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
from absl import logging
import tensorflow.compat.v1 as tf
from cs_gan import file_utils
from cs_gan import gan
from cs_gan import image_metrics
from cs_gan import utils
flags.DEFINE_integer(
'num_training_iterations', 1200000,
'Number of training iterations.')
flags.DEFINE_string(
'ode_mode', 'rk4', 'Integration method.')
flags.DEFINE_integer(
'batch_size', 64, 'Training batch size.')
flags.DEFINE_float(
'grad_reg_weight', 0.02, 'Step size for latent optimisation.')
flags.DEFINE_string(
'opt_name', 'gd', 'Name of the optimiser (gd|adam).')
flags.DEFINE_bool(
'schedule_lr', True, 'The method to project z.')
flags.DEFINE_bool(
'reg_first_grad_only', True, 'Whether only to regularise the first grad.')
flags.DEFINE_integer(
'num_latents', 128, 'The number of latents')
flags.DEFINE_integer(
'summary_every_step', 1000,
'The interval at which to log debug ops.')
flags.DEFINE_integer(
'image_metrics_every_step', 1000,
'The interval at which to log (expensive) image metrics.')
flags.DEFINE_integer(
'export_every', 10,
'The interval at which to export samples.')
# Use 50k to reproduce scores from the paper. Default to 10k here to avoid the
# runtime error caused by too large graph with 50k samples on some machines.
flags.DEFINE_integer(
'num_eval_samples', 10000,
'The number of samples used to evaluate FID/IS.')
flags.DEFINE_string(
'dataset', 'cifar', 'The dataset used for learning (cifar|mnist).')
flags.DEFINE_string(
'output_dir', '/tmp/ode_gan/gan', 'Location where to save output files.')
flags.DEFINE_float('disc_lr', 4e-2, 'Discriminator Learning rate.')
flags.DEFINE_float('gen_lr', 4e-2, 'Generator Learning rate.')
flags.DEFINE_bool(
'run_real_data_metrics', False,
'Whether or not to run image metrics on real data.')
flags.DEFINE_bool(
'run_sample_metrics', True,
'Whether or not to run image metrics on samples.')
FLAGS = flags.FLAGS
# Log info level (for Hooks).
tf.logging.set_verbosity(tf.logging.INFO)
def _copy_vars(v_list):
"""Copy variables in v_list."""
t_list = []
for v in v_list:
t_list.append(tf.identity(v))
return t_list
def _restore_vars(v_list, t_list):
"""Restore variables in v_list from t_list."""
ops = []
for v, t in zip(v_list, t_list):
ops.append(v.assign(t))
return ops
def _scale_vars(s, v_list):
"""Scale all variables in v_list by s."""
return [s * v for v in v_list]
def _acc_grads(g_sum, g_w, g):
"""Accumulate gradients in g, weighted by g_w."""
return [g_sum_i + g_w * g_i for g_sum_i, g_i in zip(g_sum, g)]
def _compute_reg_grads(gen_grads, disc_vars):
"""Compute gradients norm (this is an upper-bpund of the full-batch norm)."""
gen_norm = tf.accumulate_n([tf.reduce_sum(u * u) for u in gen_grads])
disc_reg_grads = tf.gradients(gen_norm, disc_vars)
return disc_reg_grads
def run_model(prior, images, model, disc_reg_weight):
"""Run the model with new data and samples.
Args:
prior: the noise source as the generator input.
images: images sampled from dataset.
model: a GAN model defined in gan.py.
disc_reg_weight: regularisation weight for discrmininator gradients.
Returns:
debug_ops: statistics from the model, see gan.py for more detials.
disc_grads: discriminator gradients.
gen_grads: generator gradients.
"""
generator_inputs = prior.sample(FLAGS.batch_size)
model_output = model.connect(images, generator_inputs)
optimization_components = model_output.optimization_components
disc_grads = tf.gradients(
optimization_components['disc'].loss,
optimization_components['disc'].vars)
gen_grads = tf.gradients(
optimization_components['gen'].loss,
optimization_components['gen'].vars)
if disc_reg_weight > 0.0:
reg_grads = _compute_reg_grads(gen_grads,
optimization_components['disc'].vars)
disc_grads = _acc_grads(disc_grads, disc_reg_weight, reg_grads)
debug_ops = model_output.debug_ops
return debug_ops, disc_grads, gen_grads
def update_model(model, disc_grads, gen_grads, disc_opt, gen_opt,
global_step, update_scale):
"""Update model with gradients."""
disc_vars, gen_vars = model.get_variables()
with tf.control_dependencies(gen_grads + disc_grads):
disc_update_op = disc_opt.apply_gradients(
zip(_scale_vars(update_scale, disc_grads),
disc_vars))
gen_update_op = gen_opt.apply_gradients(
zip(_scale_vars(update_scale, gen_grads),
gen_vars),
global_step=global_step)
update_op = tf.group([disc_update_op, gen_update_op])
return update_op
def main(argv):
del argv
utils.make_output_dir(FLAGS.output_dir)
data_processor = utils.DataProcessor()
# Compute the batch-size multiplier
if FLAGS.ode_mode == 'rk2':
batch_mul = 2
elif FLAGS.ode_mode == 'rk4':
batch_mul = 4
else:
batch_mul = 1
images = utils.get_train_dataset(data_processor, FLAGS.dataset,
int(FLAGS.batch_size * batch_mul))
image_splits = tf.split(images, batch_mul)
logging.info('Generator learning rate: %d', FLAGS.gen_lr)
logging.info('Discriminator learning rate: %d', FLAGS.disc_lr)
global_step = tf.train.get_or_create_global_step()
# Construct optimizers.
if FLAGS.opt_name == 'adam':
disc_opt = tf.train.AdamOptimizer(FLAGS.disc_lr, beta1=0.5, beta2=0.999)
gen_opt = tf.train.AdamOptimizer(FLAGS.gen_lr, beta1=0.5, beta2=0.999)
elif FLAGS.opt_name == 'gd':
if FLAGS.schedule_lr:
gd_disc_lr = tf.train.piecewise_constant(
global_step,
values=[FLAGS.disc_lr / 4., FLAGS.disc_lr, FLAGS.disc_lr / 2.],
boundaries=[500, 400000])
gd_gen_lr = tf.train.piecewise_constant(
global_step,
values=[FLAGS.gen_lr / 4., FLAGS.gen_lr, FLAGS.gen_lr / 2.],
boundaries=[500, 400000])
else:
gd_disc_lr = FLAGS.disc_lr
gd_gen_lr = FLAGS.gen_lr
disc_opt = tf.train.GradientDescentOptimizer(gd_disc_lr)
gen_opt = tf.train.GradientDescentOptimizer(gd_gen_lr)
else:
raise ValueError('Unknown ODE mode!')
# Create the networks and models.
generator = utils.get_generator(FLAGS.dataset)
metric_net = utils.get_metric_net(FLAGS.dataset, use_sn=False)
model = gan.GAN(metric_net, generator)
prior = utils.make_prior(FLAGS.num_latents)
# Setup ODE parameters.
if FLAGS.ode_mode == 'rk2':
ode_grad_weights = [0.5, 0.5]
step_scale = [1.0]
elif FLAGS.ode_mode == 'rk4':
ode_grad_weights = [1. / 6., 1. / 3., 1. / 3., 1. / 6.]
step_scale = [0.5, 0.5, 1.]
elif FLAGS.ode_mode == 'euler':
# Euler update
ode_grad_weights = [1.0]
step_scale = []
else:
raise ValueError('Unknown ODE mode!')
# Extra steps for RK updates.
num_extra_steps = len(step_scale)
if FLAGS.reg_first_grad_only:
first_reg_weight = FLAGS.grad_reg_weight / ode_grad_weights[0]
other_reg_weight = 0.0
else:
first_reg_weight = FLAGS.grad_reg_weight
other_reg_weight = FLAGS.grad_reg_weight
debug_ops, disc_grads, gen_grads = run_model(prior, image_splits[0],
model, first_reg_weight)
disc_vars, gen_vars = model.get_variables()
final_disc_grads = _scale_vars(ode_grad_weights[0], disc_grads)
final_gen_grads = _scale_vars(ode_grad_weights[0], gen_grads)
restore_ops = []
# Preparing for further RK steps.
if num_extra_steps > 0:
# copy the variables before they are changed by update_op
saved_disc_vars = _copy_vars(disc_vars)
saved_gen_vars = _copy_vars(gen_vars)
# Enter RK loop.
with tf.control_dependencies(saved_disc_vars + saved_gen_vars):
step_deps = []
for i_step in range(num_extra_steps):
with tf.control_dependencies(step_deps):
# Compute gradient steps for intermediate updates.
update_op = update_model(
model, disc_grads, gen_grads, disc_opt, gen_opt,
None, step_scale[i_step])
with tf.control_dependencies([update_op]):
_, disc_grads, gen_grads = run_model(
prior, image_splits[i_step + 1], model, other_reg_weight)
# Accumlate gradients for final update.
final_disc_grads = _acc_grads(final_disc_grads,
ode_grad_weights[i_step + 1],
disc_grads)
final_gen_grads = _acc_grads(final_gen_grads,
ode_grad_weights[i_step + 1],
gen_grads)
# Make new restore_op for each step.
restore_ops = []
restore_ops += _restore_vars(disc_vars, saved_disc_vars)
restore_ops += _restore_vars(gen_vars, saved_gen_vars)
step_deps = restore_ops
with tf.control_dependencies(restore_ops):
update_op = update_model(
model, final_disc_grads, final_gen_grads, disc_opt, gen_opt,
global_step, 1.0)
samples = generator(prior.sample(FLAGS.batch_size), is_training=False)
# Get data needed to compute FID. We also compute metrics on
# real data as a sanity check and as a reference point.
eval_real_data = utils.get_real_data_for_eval(FLAGS.num_eval_samples,
FLAGS.dataset,
split='train')
def sample_fn(x):
return utils.optimise_and_sample(x, module=model,
data=None, is_training=False)[0]
if FLAGS.run_sample_metrics:
sample_metrics = image_metrics.get_image_metrics_for_samples(
eval_real_data, sample_fn,
prior, data_processor,
num_eval_samples=FLAGS.num_eval_samples)
else:
sample_metrics = {}
if FLAGS.run_real_data_metrics:
data_metrics = image_metrics.get_image_metrics(
eval_real_data, eval_real_data)
else:
data_metrics = {}
sample_exporter = file_utils.FileExporter(
os.path.join(FLAGS.output_dir, 'samples'))
# Hooks.
debug_ops['it'] = global_step
# Abort training on Nans.
nan_disc_hook = tf.train.NanTensorHook(debug_ops['disc_loss'])
nan_gen_hook = tf.train.NanTensorHook(debug_ops['gen_loss'])
# Step counter.
step_conter_hook = tf.train.StepCounterHook()
checkpoint_saver_hook = tf.train.CheckpointSaverHook(
checkpoint_dir=utils.get_ckpt_dir(FLAGS.output_dir), save_secs=10 * 60)
loss_summary_saver_hook = tf.train.SummarySaverHook(
save_steps=FLAGS.summary_every_step,
output_dir=os.path.join(FLAGS.output_dir, 'summaries'),
summary_op=utils.get_summaries(debug_ops))
metrics_summary_saver_hook = tf.train.SummarySaverHook(
save_steps=FLAGS.image_metrics_every_step,
output_dir=os.path.join(FLAGS.output_dir, 'summaries'),
summary_op=utils.get_summaries(sample_metrics))
hooks = [checkpoint_saver_hook, metrics_summary_saver_hook,
nan_disc_hook, nan_gen_hook, step_conter_hook,
loss_summary_saver_hook]
# Start training.
with tf.train.MonitoredSession(hooks=hooks) as sess:
logging.info('starting training')
for key, value in sess.run(data_metrics).items():
logging.info('%s: %d', key, value)
for i in range(FLAGS.num_training_iterations):
sess.run(update_op)
if i % FLAGS.export_every == 0:
samples_np, data_np = sess.run([samples, image_splits[0]])
# Create an object which gets data and does the processing.
data_np = data_processor.postprocess(data_np)
samples_np = data_processor.postprocess(samples_np)
sample_exporter.save(samples_np, 'samples')
sample_exporter.save(data_np, 'data')
if __name__ == '__main__':
tf.enable_resource_variables()
app.run(main)
| deepmind/deepmind-research | cs_gan/main_ode.py | Python | apache-2.0 | 12,654 |
# Copyright (c) 2014 Alcatel-Lucent Enterprise
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from facette.utils import *
from facette.v1.plotserie import PlotSerie
import json
PLOT_ID = "id"
PLOT_NAME = "name"
PLOT_DESCRIPTION = "description"
PLOT_TYPE = "type"
PLOT_SERIES = "series"
PLOT_STACK_MODE = "stack_mode"
PLOT_START = "start"
PLOT_END = "end"
PLOT_STEP = "step"
PLOT_MODIFIED = "modified"
PLOT_UNIT_LABEL = "unit_label"
PLOT_UNIT_TYPE = "unit_type"
GRAPH_TYPE_AREA = 1
GRAPH_TYPE_LINE = 2
STACK_MODE_NONE = 1
STACK_MODE_NORMAL = 2
STACK_MODE_PERCENT = 3
class Plot:
def __init__(self, js=""):
self.plot = {}
self.id = facette_to_json(PLOT_ID, js, self.plot)
self.name = facette_to_json(PLOT_NAME, js, self.plot)
self.description = facette_to_json(PLOT_DESCRIPTION, js, self.plot)
self.type = facette_to_json(PLOT_TYPE, js, self.plot)
self.stack_mode = facette_to_json(PLOT_STACK_MODE, js, self.plot)
self.start = facette_to_json(PLOT_START, js, self.plot)
self.end = facette_to_json(PLOT_END, js, self.plot)
self.step = facette_to_json(PLOT_STEP, js, self.plot)
self.modified = facette_to_json(PLOT_MODIFIED, js, self.plot)
self.unit_label = facette_to_json(PLOT_UNIT_LABEL, js, self.plot)
self.unit_type = facette_to_json(PLOT_UNIT_TYPE, js, self.plot)
self.series = []
if js.get(PLOT_SERIES):
for x in js[PLOT_SERIES]:
e = PlotSerie(x)
self.series.append(e)
self.plot[PLOT_SERIES] = self.series
def __str__(self):
js = self.plot
series = []
for s in self.series:
series.append(json.loads(str(s)))
js[PLOT_SERIES] = series
return json.dumps(js)
def __repr__(self):
return str(self)
| OpenTouch/python-facette | src/facette/v1/plot.py | Python | apache-2.0 | 2,559 |
#!/usr/bin/env python
"""
Set up the logging
"""
import logging
import tempfile
import os
def initialize_logging():
"""
Set up the screen and file logging.
:return: The log filename
"""
# set up DEBUG logging to file, INFO logging to STDERR
log_file = os.path.join(tempfile.gettempdir(), 'spfy.log')
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
# set up logging to file - see previous section for more details
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M',
filename=log_file,
filemode='w')
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setFormatter(formatter)
console.setLevel(logging.INFO)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
return log_file
| superphy/backend | app/modules/loggingFunctions.py | Python | apache-2.0 | 1,056 |
import pytest
from selenium import webdriver
@pytest.fixture
def driver(request):
wd = webdriver.Firefox(capabilities={"marionette": True})
#(desired_capabilities={"chromeOptions": {"args": ["--start-fullscreen"]}})
request.addfinalizer(wd.quit)
return wd
def test_example(driver):
driver.get("http://localhost/litecart/admin/")
driver.find_element_by_xpath("//input[@name='username']").send_keys("admin")
driver.find_element_by_xpath("//input[@name='password']").send_keys("admin")
driver.find_element_by_xpath("//button[@name='login']").click()
| byakatat/selenium-training | test_login.py | Python | apache-2.0 | 586 |
import numpy as np
def sigmoid(x):
"""
Compute the sigmoid function for the input here.
"""
x = 1. / (1. + np.exp(-x))
return x
def sigmoid_grad(f):
"""
Compute the gradient for the sigmoid function here. Note that
for this implementation, the input f should be the sigmoid
function value of your original input x.
"""
f = f * (1. - f)
return f
def test_sigmoid_basic():
"""
Some simple tests to get you started.
Warning: these are not exhaustive.
"""
print "Running basic tests..."
x = np.array([[1, 2], [-1, -2]])
f = sigmoid(x)
g = sigmoid_grad(f)
print f
assert np.amax(f - np.array([[0.73105858, 0.88079708],
[0.26894142, 0.11920292]])) <= 1e-6
print g
assert np.amax(g - np.array([[0.19661193, 0.10499359],
[0.19661193, 0.10499359]])) <= 1e-6
print "You should verify these results!\n"
def test_sigmoid():
"""
Use this space to test your sigmoid implementation by running:
python q2_sigmoid.py
This function will not be called by the autograder, nor will
your tests be graded.
"""
print "Running your tests..."
### YOUR CODE HERE
raise NotImplementedError
### END YOUR CODE
if __name__ == "__main__":
test_sigmoid_basic();
#test_sigmoid()
| zhaojijet/CS224d | Assignment1/q2_sigmoid.py | Python | apache-2.0 | 1,343 |
#!/usr/bin/env python
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
import datetime
import socket
import time
import sys
import os.path
lib_path = os.path.abspath('../utils')
sys.path.append(lib_path)
from myParser import *
from myCrypto import *
#from myDriver import *
#from myCamDriver import *
import re
import hashlib
#from PIL import Image
#host='connect.mysensors.info'
host='localhost'
port=9090
state="INITIAL"
device=""
server="mysensors"
class mySensorDatagramProtocol(DatagramProtocol):
def __init__(self, host,port,reactor):
self.ip= socket.gethostbyname(host)
self.port = port
#self._reactor=reactor
#self.ip=reactor.resolve(host)
def startProtocol(self):
self.transport.connect(self.ip,self.port)
if state=='INITIAL':
#If system is at the initial state, it will send the device creation Senze
self.register()
else:
response=raw_input("Enter your Senze:")
self.sendDatagram(response)
def stopProtocol(self):
#on disconnect
#self._reactor.listenUDP(0, self)
print "STOP **************"
def register(self):
global server
cry=myCrypto(name=device)
senze ='SHARE #pubkey %s @%s' %(pubkey,server)
senze=cry.signSENZE(senze)
self.transport.write(senze)
def sendDatagram(self,senze):
global server
cry=myCrypto(name=device)
senze=cry.signSENZE(senze)
print senze
self.transport.write(senze)
def datagramReceived(self, datagram, host):
print 'Datagram received: ', repr(datagram)
parser=myParser(datagram)
recipients=parser.getUsers()
sender=parser.getSender()
signature=parser.getSignature()
data=parser.getData()
sensors=parser.getSensors()
cmd=parser.getCmd()
if cmd=="DATA":
if 'UserCreated' in data['msg']:
#Creating the .devicename file and store the device name and PIN
f=open(".devicename",'w')
f.write(device+'\n')
f.close()
print device+ " was created at the server."
print "You should execute the program again."
print "The system halted!"
reactor.stop()
elif 'UserCreationFailed' in data['msg']:
print "This user name may be already taken"
print "You can try it again with different username"
print "The system halted!"
reactor.stop()
#self.sendDatagram()
def init():
#cam=myCamDriver()
global device
global pubkey
global state
#If .device name is not there, we will read the device name from keyboard
#else we will get it from .devicename file
try:
if not os.path.isfile(".devicename"):
device=raw_input("Enter the device name: ")
# Account need to be created at the server
state='INITIAL'
else:
#The device name will be read form the .devicename file
f=open(".devicename","r")
device = f.readline().rstrip("\n")
state='READY'
except:
print "ERRER: Cannot access the device name file."
raise SystemExit
#Here we will generate public and private keys for the device
#These keys will be used to perform authentication and key exchange
try:
cry=myCrypto(name=device)
#If keys are not available yet
if not os.path.isfile(cry.pubKeyLoc):
# Generate or loads an RSA keypair with an exponent of 65537 in PEM format
# Private key and public key was saved in the .devicenamePriveKey and .devicenamePubKey files
cry.generateRSA(bits=1024)
pubkey=cry.loadRSAPubKey()
except:
print "ERRER: Cannot genereate private/public keys for the device."
raise SystemExit
print pubkey
#Check the network connectivity.
#check_connectivity(ServerName)
def main():
global host
global port
protocol = mySensorDatagramProtocol(host,port,reactor)
reactor.listenUDP(0, protocol)
reactor.run()
if __name__ == '__main__':
init()
main()
| kasundezoysa/senze | testpi/myDevice.py | Python | apache-2.0 | 4,355 |
# -*- coding: utf-8 -*-
"""
Tests.
"""
import unittest
from bruges.rockphysics import fluidsub
# Inputs... GAS case
vp_gas = 2429.0
vs_gas = 1462.4
rho_gas = 2080.
# Expected outputs... BRINE case
vp_brine = 2850.5
vs_brine = 1416.1
rho_brine = 2210.0
phi = 0.275 # Don't know this... reading from fig
rhohc = 250.0 # gas
rhow = 1040.0 # brine
sw = 0.3 # Don't know this... just guessing
swnew = 1.0 # Don't know this... just guessing
khc = 207000000.0 # gas
kw = 2950000000.0 # brine
kclay = 25000000000.0
kqtz = 37000000000.0
vclay = 0.05
kmin = 36266406250.0 # Don't know this... reading from fig
class FluidsubTest(unittest.TestCase):
"""
Tests fluid sub calculations against Smith et al 2003.
https://dl.dropboxusercontent.com/u/14965965/Smith_etal_2003.pdf
"""
def test_avseth(self):
# Base case: gas
# Subbing with: brine
sub = fluidsub.avseth_fluidsub(vp=vp_gas,
vs=vs_gas,
rho=rho_gas,
phi=phi,
rhof1=rhohc,
rhof2=rhow,
kmin=37000000000,
kf1=khc,
kf2=kw)
self.assertAlmostEqual(sub[0], vp_brine, places=-1) # Cannot match :(
self.assertAlmostEqual(sub[1], vs_brine, places=-1) # Cannot match :(
self.assertAlmostEqual(sub[2], rho_brine, places=-1) # Cannot match :(
def test_smith(self):
# Base case: gas
# Subbing with: brine
sub = fluidsub.smith_fluidsub(vp=vp_gas,
vs=vs_gas,
rho=rho_gas,
phi=phi,
rhohc=rhohc,
rhow=rhow,
sw=sw,
swnew=swnew,
khc=khc,
kw=kw,
kclay=kclay,
kqtz=kqtz,
vclay=vclay)
self.assertAlmostEqual(sub[0], vp_brine, places=-1)
self.assertAlmostEqual(sub[1], vs_brine, places=-1)
self.assertAlmostEqual(sub[2], rho_brine, places=-1) # Cannot match :(
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(FluidsubTest)
unittest.TextTestRunner(verbosity=2).run(suite)
| agile-geoscience/agilegeo | bruges/rockphysics/test/fluidsub_test.py | Python | apache-2.0 | 2,698 |
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from rigour.errors import ValidationFailed
from rigour.types import *
from rigour.constraints import length_between
import rigour
import pytest
def test_secrecy_declared_before():
t = String().secret().constrain(length_between(4,6))
with pytest.raises(ValidationFailed) as excinfo:
t.check("xxx")
message = str(excinfo)
assert "xxx" not in message
def test_secrecy_declared_after():
t = String().constrain(length_between(4,6)).secret()
with pytest.raises(ValidationFailed) as excinfo:
t.check("xxx")
message = str(excinfo)
assert "xxx" not in message
| steinarvk/rigour | rigour/tests/test_secrecy.py | Python | apache-2.0 | 1,198 |
# Copyright 2014 Modelling, Simulation and Design Lab (MSDL) at
# McGill University and the University of Antwerp (http://msdl.cs.mcgill.ca/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The No Age scheduler is based on the Heapset scheduler, though it does not take age into account.
.. warning:: This scheduler does not take the age into account, making it **unusable** in simulations where the *timeAdvance* function can return (exactly) 0. If unsure, do **not** use this scheduler, but the more general Heapset scheduler.
The heap will contain only the timestamps of events that should happen. One of the dictionaries will contain the actual models that transition at the specified time. The second dictionary than contains a reverse relation: it maps the models to their time_next. This reverse relation is necessary to know the *old* time_next value of the model. Because as soon as the model has its time_next changed, its previously scheduled time will be unknown. This 'previous time' is **not** equal to the *timeLast*, as it might be possible that the models wait time was interrupted.
For a schedule, the model is added to the dictionary at the specified time_next. In case it is the first element at this location in the dictionary, we also add the timestamp to the heap. This way, the heap only contains *unique* timestamps and thus the actual complexity is reduced to the number of *different* timestamps. Furthermore, the reverse relation is also updated.
Unscheduling is done similarly by simply removing the element from the dictionary.
Rescheduling is a slight optimisation of unscheduling, followed by scheduling.
This scheduler does still schedule models that are inactive (their time_next is infinity), though this does not influence the complexity. The complexity is not affected due to infinity being a single element in the heap that is always present. Since a heap has O(log(n)) complexity, this one additional element does not have a serious impact.
The main advantage over the Activity Heap is that it never gets dirty and thus doesn't require periodical cleanup. The only part that gets dirty is the actual heap, which only contains small tuples. Duplicates of these will also be reduced to a single element, thus memory consumption should not be a problem in most cases.
This scheduler is ideal in situations where most transitions happen at exactly the same time, as we can then profit from the internal structure and simply return the mapped elements. It results in sufficient efficiency in most other cases, mainly due to the code base being a lot smaller then the Activity Heap.
"""
from heapq import heappush, heappop
from pypdevs.logger import *
class SchedulerNA(object):
"""
Scheduler class itself
"""
def __init__(self, models, epsilon, total_models):
"""
Constructor
:param models: all models in the simulation
"""
self.heap = []
self.reverse = [None] * total_models
self.mapped = {}
self.infinite = float('inf')
# Init the basic 'inactive' entry here, to prevent scheduling in the heap itself
self.mapped[self.infinite] = set()
self.epsilon = epsilon
for m in models:
self.schedule(m)
def schedule(self, model):
"""
Schedule a model
:param model: the model to schedule
"""
try:
self.mapped[model.time_next[0]].add(model)
except KeyError:
self.mapped[model.time_next[0]] = set([model])
heappush(self.heap, model.time_next[0])
try:
self.reverse[model.model_id] = model.time_next[0]
except IndexError:
self.reverse.append(model.time_next[0])
def unschedule(self, model):
"""
Unschedule a model
:param model: model to unschedule
"""
try:
self.mapped[self.reverse[model.model_id]].remove(model)
except KeyError:
pass
self.reverse[model.model_id] = None
def massReschedule(self, reschedule_set):
"""
Reschedule all models provided.
Equivalent to calling unschedule(model); schedule(model) on every element in the iterable.
:param reschedule_set: iterable containing all models to reschedule
"""
#NOTE the usage of exceptions is a lot better for the PyPy JIT and nets a noticable speedup
# as the JIT generates guard statements for an 'if'
for model in reschedule_set:
model_id = model.model_id
try:
self.mapped[self.reverse[model_id]].remove(model)
except KeyError:
# Element simply not present, so don't need to unschedule it
pass
self.reverse[model_id] = tn = model.time_next[0]
try:
self.mapped[tn].add(model)
except KeyError:
# Create a tuple with a single entry and use it to initialize the mapped entry
self.mapped[tn] = set((model, ))
heappush(self.heap, tn)
def readFirst(self):
"""
Returns the time of the first model that has to transition
:returns: timestamp of the first model
"""
first = self.heap[0]
while len(self.mapped[first]) == 0:
del self.mapped[first]
heappop(self.heap)
first = self.heap[0]
# The age was stripped of
return (first, 1)
def getImminent(self, time):
"""
Returns a list of all models that transition at the provided time, with the specified epsilon deviation allowed.
:param time: timestamp to check for models
.. warning:: For efficiency, this method only checks the **first** elements, so trying to invoke this function with a timestamp higher than the value provided with the *readFirst* method, will **always** return an empty set.
"""
t, age = time
imm_children = set()
try:
first = self.heap[0]
if (abs(first - t) < self.epsilon):
#NOTE this would change the original set, though this doesn't matter as it is no longer used
imm_children = self.mapped.pop(first)
heappop(self.heap)
first = self.heap[0]
while (abs(first - t) < self.epsilon):
imm_children |= self.mapped.pop(first)
heappop(self.heap)
first = self.heap[0]
except IndexError:
pass
return imm_children
| kdheepak89/pypdevs | pypdevs/schedulers/schedulerNA.py | Python | apache-2.0 | 7,125 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 Clione Software
# Copyright (c) 2010-2013 Cidadania S. Coop. Galega
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from django.core.validators import RegexValidator
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from core.spaces.file_validation import ContentTypeRestrictedFileField
from fields import StdImageField
from allowed_types import ALLOWED_CONTENT_TYPES
class Space(models.Model):
"""
Spaces model. This model stores a "space" or "place" also known as a
participative process in reality. Every place has a minimum set of
settings for customization.
There are three main permission roles in every space: administrator
(admins), moderators (mods) and regular users (users).
"""
name = models.CharField(_('Name'), max_length=250, unique=True,
help_text=_('Max: 250 characters'))
url = models.CharField(_('URL'), max_length=100, unique=True,
validators=[RegexValidator(regex='^[a-z0-9_]+$',
message='Invalid characters in the space URL.')],
help_text=_('Valid characters are lowercase, digits and \
underscore. This will be the accesible URL'))
description = models.TextField(_('Description'),
default=_('Write here your description.'))
pub_date = models.DateTimeField(_('Date of creation'), auto_now_add=True)
author = models.ForeignKey(User, blank=True, null=True,
verbose_name=_('Space creator'), help_text=_('Select a user that \
will be marked as creator of the space'))
logo = StdImageField(upload_to='spaces/logos', size=(100, 75, False),
help_text = _('Valid extensions are jpg, jpeg, png and gif'))
banner = StdImageField(upload_to='spaces/banners', size=(500, 75, False),
help_text = _('Valid extensions are jpg, jpeg, png and gif'))
public = models.BooleanField(_('Public space'), help_text=_("This will \
make the space visible to everyone, but registration will be \
necessary to participate."))
# Modules
mod_debate = models.BooleanField(_('Debate'))
mod_proposals = models.BooleanField(_('Proposals'))
mod_news = models.BooleanField(_('News'))
mod_cal = models.BooleanField(_('Calendar'))
mod_docs = models.BooleanField(_('Documents'))
mod_voting = models.BooleanField(_('Voting'))
class Meta:
ordering = ['name']
verbose_name = _('Space')
verbose_name_plural = _('Spaces')
get_latest_by = 'pub_date'
permissions = (
('view_space', 'Can view this space.'),
('admin_space', 'Can administrate this space.'),
('mod_space', 'Can moderate this space.')
)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('space-index', (), {
'space_url': self.url})
class Entity(models.Model):
"""
This model stores the name of the entities responsible for the creation
of the space or supporting it.
"""
name = models.CharField(_('Name'), max_length=100, unique=True)
website = models.CharField(_('Website'), max_length=100, null=True,
blank=True)
logo = models.ImageField(upload_to='spaces/logos', verbose_name=_('Logo'),
blank=True, null=True)
space = models.ForeignKey(Space, blank=True, null=True)
class Meta:
ordering = ['name']
verbose_name = _('Entity')
verbose_name_plural = _('Entities')
def __unicode__(self):
return self.name
class Document(models.Model):
"""
This models stores documents for the space, like a document repository,
There is no restriction in what a user can upload to the space.
:methods: get_file_ext, get_file_size
"""
title = models.CharField(_('Document title'), max_length=100,
help_text=_('Max: 100 characters'))
space = models.ForeignKey(Space, blank=True, null=True,
help_text=_('Change the space to whom belongs this document'))
docfile = ContentTypeRestrictedFileField(_('File'),
upload_to='spaces/documents/%Y/%m/%d',
content_types=ALLOWED_CONTENT_TYPES,
max_upload_size=26214400,
help_text=_('Permitted file types: DOC, DOCX, PPT, ODT, ODF, ODP, \
PDF, RST, TXT.'))
pub_date = models.DateTimeField(auto_now_add=True)
author = models.ForeignKey(User, verbose_name=_('Author'), blank=True,
null=True, help_text=_('Change the user that will figure as the \
author'))
def get_file_ext(self):
filename = self.docfile.name
extension = filename.split('.')
return extension[1].upper()
def get_file_size(self):
if self.docfile.size < 1023:
return str(self.docfile.size) + " Bytes"
elif self.docfile.size >= 1024 and self.docfile.size <= 1048575:
return str(round(self.docfile.size / 1024.0, 2)) + " KB"
elif self.docfile.size >= 1048576:
return str(round(self.docfile.size / 1024000.0, 2)) + " MB"
class Meta:
ordering = ['pub_date']
verbose_name = _('Document')
verbose_name_plural = _('Documents')
get_latest_by = 'pub_date'
# There is no 'view-document' view, so I'll leave the get_absolute_url
# method without permalink. Remember that the document files are accesed
# through the url() method in templates.
def get_absolute_url(self):
return '/spaces/%s/docs/%s' % (self.space.url, self.id)
class Event(models.Model):
"""
Meeting data model. Every space (process) has N meetings. This will
keep record of the assistants, meeting name, etc.
"""
title = models.CharField(_('Event name'), max_length=250,
help_text="Max: 250 characters")
space = models.ForeignKey(Space, blank=True, null=True)
user = models.ManyToManyField(User, verbose_name=_('Users'),
help_text=_('List of the users that will assist or assisted to the \
event.'))
pub_date = models.DateTimeField(auto_now_add=True)
event_author = models.ForeignKey(User, verbose_name=_('Created by'),
blank=True, null=True, related_name='meeting_author',
help_text=_('Select the user that will be designated as author.'))
event_date = models.DateTimeField(verbose_name=_('Event date'),
help_text=_('Select the date where the event is celebrated.'))
description = models.TextField(_('Description'), blank=True, null=True)
location = models.TextField(_('Location'), blank=True, null=True)
latitude = models.DecimalField(_('Latitude'), blank=True, null=True,
max_digits=17, decimal_places=15, help_text=_('Specify it in decimal'))
longitude = models.DecimalField(_('Longitude'), blank=True, null=True,
max_digits=17, decimal_places=15, help_text=_('Specify it in decimal'))
def is_due(self):
if self.event_date < datetime.now():
return True
else:
return False
class Meta:
ordering = ['event_date']
verbose_name = _('Event')
verbose_name_plural = _('Events')
get_latest_by = 'event_date'
permissions = (
('view_event', 'Can view this event'),
('admin_event', 'Can administrate this event'),
('mod_event', 'Can moderate this event'),
)
def __unicode__(self):
return self.title
@models.permalink
def get_absolute_url(self):
return ('view-event', (), {
'space_url': self.space.url,
'event_id': str(self.id)})
class Intent(models.Model):
"""
Intent data model. Intent stores the reference of a user-token when a user
asks entering in a restricted space.
.. versionadded: 0.1.5
"""
user = models.ForeignKey(User)
space = models.ForeignKey(Space)
token = models.CharField(max_length=32)
requested_on = models.DateTimeField(auto_now_add=True)
def get_approve_url(self):
site = Site.objects.all()[0]
return "http://%s%sintent/approve/%s" % (site.domain, self.space.get_absolute_url(), self.token)
| cidadania/e-cidadania | src/core/spaces/models.py | Python | apache-2.0 | 8,796 |
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import policy
class AddProtocol(policy.PolicyTargetMixin, tables.LinkAction):
name = "create"
verbose_name = _("Add Protocol")
url = "horizon:identity:identity_providers:protocols:create"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("identity", "identity:create_protocol"),)
def get_link_url(self, datum=None):
idp_id = self.table.kwargs['identity_provider_id']
return reverse(self.url, args=(idp_id,))
class RemoveProtocol(policy.PolicyTargetMixin, tables.DeleteAction):
@staticmethod
def action_present(count):
return ngettext_lazy(
"Delete Protocol",
"Delete Protocols",
count
)
@staticmethod
def action_past(count):
return ngettext_lazy(
"Deleted Protocol",
"Deleted Protocols",
count
)
policy_rules = (("identity", "identity:delete_protocol"),)
def delete(self, request, obj_id):
identity_provider = self.table.kwargs['identity_provider_id']
protocol = obj_id
api.keystone.protocol_delete(request, identity_provider, protocol)
class ProtocolsTable(tables.DataTable):
protocol = tables.Column("id",
verbose_name=_("Protocol ID"))
mapping = tables.Column("mapping_id",
verbose_name=_("Mapping ID"))
def get_object_display(self, datum):
return datum.id
class Meta(object):
name = "idp_protocols"
verbose_name = _("Protocols")
table_actions = (AddProtocol, RemoveProtocol)
row_actions = (RemoveProtocol, )
| openstack/horizon | openstack_dashboard/dashboards/identity/identity_providers/protocols/tables.py | Python | apache-2.0 | 2,469 |
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""nsx_gw_devices
Revision ID: 19180cf98af6
Revises: 117643811bca
Create Date: 2014-02-26 02:46:26.151741
"""
# revision identifiers, used by Alembic.
revision = '19180cf98af6'
down_revision = '117643811bca'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.nicira.NeutronPlugin.NvpPluginV2',
'neutron.plugins.nicira.NeutronServicePlugin.NvpAdvancedPlugin',
'neutron.plugins.vmware.plugin.NsxPlugin',
'neutron.plugins.vmware.plugin.NsxServicePlugin'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.create_table(
'networkgatewaydevicereferences',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_gateway_id', sa.String(length=36), nullable=True),
sa.Column('interface_name', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['network_gateway_id'], ['networkgateways.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', 'network_gateway_id', 'interface_name'),
mysql_engine='InnoDB')
# Copy data from networkgatewaydevices into networkgatewaydevicereference
op.execute("INSERT INTO networkgatewaydevicereferences SELECT "
"id, network_gateway_id, interface_name FROM "
"networkgatewaydevices")
# drop networkgatewaydevices
op.drop_table('networkgatewaydevices')
op.create_table(
'networkgatewaydevices',
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('nsx_id', sa.String(length=36), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('connector_type', sa.String(length=10), nullable=True),
sa.Column('connector_ip', sa.String(length=64), nullable=True),
sa.Column('status', sa.String(length=16), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB')
# Create a networkgatewaydevice for each existing reference.
# For existing references nsx_id == neutron_id
# Do not fill conenctor info as they would be unknown
op.execute("INSERT INTO networkgatewaydevices (id, nsx_id) SELECT "
"id, id as nsx_id FROM networkgatewaydevicereferences")
def downgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.drop_table('networkgatewaydevices')
# Re-create previous version of networkgatewaydevices table
op.create_table(
'networkgatewaydevices',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_gateway_id', sa.String(length=36), nullable=True),
sa.Column('interface_name', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['network_gateway_id'], ['networkgateways.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB')
# Copy from networkgatewaydevicereferences to networkgatewaydevices
op.execute("INSERT INTO networkgatewaydevices SELECT "
"id, network_gateway_id, interface_name FROM "
"networkgatewaydevicereferences")
# Dropt networkgatewaydevicereferences
op.drop_table('networkgatewaydevicereferences')
| zhhf/charging | charging/db/migration/alembic_migrations/versions/19180cf98af6_nsx_gw_devices.py | Python | apache-2.0 | 4,178 |
from datetime import datetime
import random
import string
from bson import ObjectId
class DuplicateUserException(Exception):
def __init__(self, message='User name/email already exits'):
Exception.__init__(self, message)
pass
class UserServiceException(Exception):
def __init__(self, message=None):
Exception.__init__(self, message)
@classmethod
def cannot_delete_super_admin(cls):
return UserServiceException("Cannot delete super admin user!")
class UserService(object):
def __init__(self, db):
self.db = db
self.users = self.db.user_collection
def generate_api_key(self):
s = string.ascii_letters + string.digits
return ''.join(random.sample(s, 20))
def create(self, item):
if self.user_exists(item['email']):
raise DuplicateUserException()
item.pop('_id', None)
item['created_at'] = datetime.now()
item['status'] = True
if 'api_key' not in item:
item['api_key'] = self.generate_api_key()
if 'roles' not in item or item['roles'] is None or len(item['roles']) == 0:
item['roles'] = ['member']
return self.users.insert(item)
def get_by_email(self, email):
return self.users.find_one({"email": email})
def validate_user(self, username, password):
query = {'email': username, 'password': password}
return self.users.find(query).count() > 0
def search(self, email=None):
query = {}
if email is not None:
query['email'] = email
return [x for x in self.users.find(query)]
def delete(self, id):
item = self.get_by_id(id)
if item and 'roles' in item and item['roles'] is not None and 'super_admin' in item['roles']:
raise UserServiceException.cannot_delete_super_admin()
return self.users.remove({"_id": ObjectId(id)})
def get_by_id(self, id):
return self.users.find_one({"_id": ObjectId(id)})
def get_by_api_key(self, api_key):
return self.users.find_one({"api_key": api_key})
def update(self, item):
if item['_id'] is None:
return item
if self.user_exists(item['email'], str(item['_id'])):
raise DuplicateUserException()
item['updated_at'] = datetime.now()
self.users.save(item)
return item
def user_exists(self, email, id=None):
query = {}
if id is not None:
query = {"_id": {"$ne": ObjectId(id)}}
query['email'] = email
return self.users.find(query).count() > 0
| cackharot/geosnap-server | src/geosnap/service/UserService.py | Python | apache-2.0 | 2,611 |
from abc import ABCMeta, abstractmethod, abstractproperty
from contextlib import contextmanager
from functools import wraps
import gzip
from inspect import getargspec
from itertools import (
combinations,
count,
product,
)
import operator
import os
from os.path import abspath, dirname, join, realpath
import shutil
from sys import _getframe
import tempfile
from logbook import TestHandler
from mock import patch
from nose.tools import nottest
from numpy.testing import assert_allclose, assert_array_equal
import pandas as pd
from six import itervalues, iteritems, with_metaclass
from six.moves import filter, map
from sqlalchemy import create_engine
from testfixtures import TempDirectory
from toolz import concat, curry
from zipline.assets import AssetFinder, AssetDBWriter
from zipline.assets.synthetic import make_simple_equity_info
from zipline.data.data_portal import DataPortal
from zipline.data.loader import get_benchmark_filename, INDEX_MAPPING
from zipline.data.minute_bars import (
BcolzMinuteBarReader,
BcolzMinuteBarWriter,
US_EQUITIES_MINUTES_PER_DAY
)
from zipline.data.us_equity_pricing import (
BcolzDailyBarReader,
BcolzDailyBarWriter,
SQLiteAdjustmentWriter,
)
from zipline.finance.blotter import Blotter
from zipline.finance.trading import TradingEnvironment
from zipline.finance.order import ORDER_STATUS
from zipline.lib.labelarray import LabelArray
from zipline.pipeline.data import USEquityPricing
from zipline.pipeline.engine import SimplePipelineEngine
from zipline.pipeline.factors import CustomFactor
from zipline.pipeline.loaders.testing import make_seeded_random_loader
from zipline.utils import security_list
from zipline.utils.calendars import get_calendar
from zipline.utils.input_validation import expect_dimensions
from zipline.utils.numpy_utils import as_column, isnat
from zipline.utils.pandas_utils import timedelta_to_integral_seconds
from zipline.utils.paths import ensure_directory
from zipline.utils.sentinel import sentinel
import numpy as np
from numpy import float64
EPOCH = pd.Timestamp(0, tz='UTC')
def seconds_to_timestamp(seconds):
return pd.Timestamp(seconds, unit='s', tz='UTC')
def to_utc(time_str):
"""Convert a string in US/Eastern time to UTC"""
return pd.Timestamp(time_str, tz='US/Eastern').tz_convert('UTC')
def str_to_seconds(s):
"""
Convert a pandas-intelligible string to (integer) seconds since UTC.
>>> from pandas import Timestamp
>>> (Timestamp('2014-01-01') - Timestamp(0)).total_seconds()
1388534400.0
>>> str_to_seconds('2014-01-01')
1388534400
"""
return timedelta_to_integral_seconds(pd.Timestamp(s, tz='UTC') - EPOCH)
def drain_zipline(test, zipline):
output = []
transaction_count = 0
msg_counter = 0
# start the simulation
for update in zipline:
msg_counter += 1
output.append(update)
if 'daily_perf' in update:
transaction_count += \
len(update['daily_perf']['transactions'])
return output, transaction_count
def check_algo_results(test,
results,
expected_transactions_count=None,
expected_order_count=None,
expected_positions_count=None,
sid=None):
if expected_transactions_count is not None:
txns = flatten_list(results["transactions"])
test.assertEqual(expected_transactions_count, len(txns))
if expected_positions_count is not None:
raise NotImplementedError
if expected_order_count is not None:
# de-dup orders on id, because orders are put back into perf packets
# whenever they a txn is filled
orders = set([order['id'] for order in
flatten_list(results["orders"])])
test.assertEqual(expected_order_count, len(orders))
def flatten_list(list):
return [item for sublist in list for item in sublist]
def assert_single_position(test, zipline):
output, transaction_count = drain_zipline(test, zipline)
if 'expected_transactions' in test.zipline_test_config:
test.assertEqual(
test.zipline_test_config['expected_transactions'],
transaction_count
)
else:
test.assertEqual(
test.zipline_test_config['order_count'],
transaction_count
)
# the final message is the risk report, the second to
# last is the final day's results. Positions is a list of
# dicts.
closing_positions = output[-2]['daily_perf']['positions']
# confirm that all orders were filled.
# iterate over the output updates, overwriting
# orders when they are updated. Then check the status on all.
orders_by_id = {}
for update in output:
if 'daily_perf' in update:
if 'orders' in update['daily_perf']:
for order in update['daily_perf']['orders']:
orders_by_id[order['id']] = order
for order in itervalues(orders_by_id):
test.assertEqual(
order['status'],
ORDER_STATUS.FILLED,
"")
test.assertEqual(
len(closing_positions),
1,
"Portfolio should have one position."
)
sid = test.zipline_test_config['sid']
test.assertEqual(
closing_positions[0]['sid'],
sid,
"Portfolio should have one position in " + str(sid)
)
return output, transaction_count
@contextmanager
def security_list_copy():
old_dir = security_list.SECURITY_LISTS_DIR
new_dir = tempfile.mkdtemp()
try:
for subdir in os.listdir(old_dir):
shutil.copytree(os.path.join(old_dir, subdir),
os.path.join(new_dir, subdir))
with patch.object(security_list, 'SECURITY_LISTS_DIR', new_dir), \
patch.object(security_list, 'using_copy', True,
create=True):
yield
finally:
shutil.rmtree(new_dir, True)
def add_security_data(adds, deletes):
if not hasattr(security_list, 'using_copy'):
raise Exception('add_security_data must be used within '
'security_list_copy context')
directory = os.path.join(
security_list.SECURITY_LISTS_DIR,
"leveraged_etf_list/20150127/20150125"
)
if not os.path.exists(directory):
os.makedirs(directory)
del_path = os.path.join(directory, "delete")
with open(del_path, 'w') as f:
for sym in deletes:
f.write(sym)
f.write('\n')
add_path = os.path.join(directory, "add")
with open(add_path, 'w') as f:
for sym in adds:
f.write(sym)
f.write('\n')
def all_pairs_matching_predicate(values, pred):
"""
Return an iterator of all pairs, (v0, v1) from values such that
`pred(v0, v1) == True`
Parameters
----------
values : iterable
pred : function
Returns
-------
pairs_iterator : generator
Generator yielding pairs matching `pred`.
Examples
--------
>>> from zipline.testing import all_pairs_matching_predicate
>>> from operator import eq, lt
>>> list(all_pairs_matching_predicate(range(5), eq))
[(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)]
>>> list(all_pairs_matching_predicate("abcd", lt))
[('a', 'b'), ('a', 'c'), ('a', 'd'), ('b', 'c'), ('b', 'd'), ('c', 'd')]
"""
return filter(lambda pair: pred(*pair), product(values, repeat=2))
def product_upper_triangle(values, include_diagonal=False):
"""
Return an iterator over pairs, (v0, v1), drawn from values.
If `include_diagonal` is True, returns all pairs such that v0 <= v1.
If `include_diagonal` is False, returns all pairs such that v0 < v1.
"""
return all_pairs_matching_predicate(
values,
operator.le if include_diagonal else operator.lt,
)
def all_subindices(index):
"""
Return all valid sub-indices of a pandas Index.
"""
return (
index[start:stop]
for start, stop in product_upper_triangle(range(len(index) + 1))
)
def chrange(start, stop):
"""
Construct an iterable of length-1 strings beginning with `start` and ending
with `stop`.
Parameters
----------
start : str
The first character.
stop : str
The last character.
Returns
-------
chars: iterable[str]
Iterable of strings beginning with start and ending with stop.
Examples
--------
>>> chrange('A', 'C')
['A', 'B', 'C']
"""
return list(map(chr, range(ord(start), ord(stop) + 1)))
def make_trade_data_for_asset_info(dates,
asset_info,
price_start,
price_step_by_date,
price_step_by_sid,
volume_start,
volume_step_by_date,
volume_step_by_sid,
frequency,
writer=None):
"""
Convert the asset info dataframe into a dataframe of trade data for each
sid, and write to the writer if provided. Write NaNs for locations where
assets did not exist. Return a dict of the dataframes, keyed by sid.
"""
trade_data = {}
sids = asset_info.index
price_sid_deltas = np.arange(len(sids), dtype=float64) * price_step_by_sid
price_date_deltas = (np.arange(len(dates), dtype=float64) *
price_step_by_date)
prices = (price_sid_deltas + as_column(price_date_deltas)) + price_start
volume_sid_deltas = np.arange(len(sids)) * volume_step_by_sid
volume_date_deltas = np.arange(len(dates)) * volume_step_by_date
volumes = volume_sid_deltas + as_column(volume_date_deltas) + volume_start
for j, sid in enumerate(sids):
start_date, end_date = asset_info.loc[sid, ['start_date', 'end_date']]
# Normalize here so the we still generate non-NaN values on the minutes
# for an asset's last trading day.
for i, date in enumerate(dates.normalize()):
if not (start_date <= date <= end_date):
prices[i, j] = 0
volumes[i, j] = 0
df = pd.DataFrame(
{
"open": prices[:, j],
"high": prices[:, j],
"low": prices[:, j],
"close": prices[:, j],
"volume": volumes[:, j],
},
index=dates,
)
if writer:
writer.write_sid(sid, df)
trade_data[sid] = df
return trade_data
def check_allclose(actual,
desired,
rtol=1e-07,
atol=0,
err_msg='',
verbose=True):
"""
Wrapper around np.testing.assert_allclose that also verifies that inputs
are ndarrays.
See Also
--------
np.assert_allclose
"""
if type(actual) != type(desired):
raise AssertionError("%s != %s" % (type(actual), type(desired)))
return assert_allclose(
actual,
desired,
atol=atol,
rtol=rtol,
err_msg=err_msg,
verbose=verbose,
)
def check_arrays(x, y, err_msg='', verbose=True, check_dtypes=True):
"""
Wrapper around np.testing.assert_array_equal that also verifies that inputs
are ndarrays.
See Also
--------
np.assert_array_equal
"""
assert type(x) == type(y), "{x} != {y}".format(x=type(x), y=type(y))
assert x.dtype == y.dtype, "{x.dtype} != {y.dtype}".format(x=x, y=y)
if isinstance(x, LabelArray):
# Check that both arrays have missing values in the same locations...
assert_array_equal(
x.is_missing(),
y.is_missing(),
err_msg=err_msg,
verbose=verbose,
)
# ...then check the actual values as well.
x = x.as_string_array()
y = y.as_string_array()
elif x.dtype.kind in 'mM':
x_isnat = isnat(x)
y_isnat = isnat(y)
assert_array_equal(
x_isnat,
y_isnat,
err_msg="NaTs not equal",
verbose=verbose,
)
# Fill NaTs with zero for comparison.
x = np.where(x_isnat, np.zeros_like(x), x)
y = np.where(y_isnat, np.zeros_like(y), y)
return assert_array_equal(x, y, err_msg=err_msg, verbose=verbose)
class UnexpectedAttributeAccess(Exception):
pass
class ExplodingObject(object):
"""
Object that will raise an exception on any attribute access.
Useful for verifying that an object is never touched during a
function/method call.
"""
def __getattribute__(self, name):
raise UnexpectedAttributeAccess(name)
def write_minute_data(trading_calendar, tempdir, minutes, sids):
first_session = trading_calendar.minute_to_session_label(
minutes[0], direction="none"
)
last_session = trading_calendar.minute_to_session_label(
minutes[-1], direction="none"
)
sessions = trading_calendar.sessions_in_range(first_session, last_session)
write_bcolz_minute_data(
trading_calendar,
sessions,
tempdir.path,
create_minute_bar_data(minutes, sids),
)
return tempdir.path
def create_minute_bar_data(minutes, sids):
length = len(minutes)
for sid_idx, sid in enumerate(sids):
yield sid, pd.DataFrame(
{
'open': np.arange(length) + 10 + sid_idx,
'high': np.arange(length) + 15 + sid_idx,
'low': np.arange(length) + 8 + sid_idx,
'close': np.arange(length) + 10 + sid_idx,
'volume': 100 + sid_idx,
},
index=minutes,
)
def create_daily_bar_data(sessions, sids):
length = len(sessions)
for sid_idx, sid in enumerate(sids):
yield sid, pd.DataFrame(
{
"open": (np.array(range(10, 10 + length)) + sid_idx),
"high": (np.array(range(15, 15 + length)) + sid_idx),
"low": (np.array(range(8, 8 + length)) + sid_idx),
"close": (np.array(range(10, 10 + length)) + sid_idx),
"volume": np.array(range(100, 100 + length)) + sid_idx,
"day": [session.value for session in sessions]
},
index=sessions,
)
def write_daily_data(tempdir, sim_params, sids, trading_calendar):
path = os.path.join(tempdir.path, "testdaily.bcolz")
BcolzDailyBarWriter(path, trading_calendar,
sim_params.start_session,
sim_params.end_session).write(
create_daily_bar_data(sim_params.sessions, sids),
)
return path
def create_data_portal(asset_finder, tempdir, sim_params, sids,
trading_calendar, adjustment_reader=None):
if sim_params.data_frequency == "daily":
daily_path = write_daily_data(tempdir, sim_params, sids,
trading_calendar)
equity_daily_reader = BcolzDailyBarReader(daily_path)
return DataPortal(
asset_finder, trading_calendar,
first_trading_day=equity_daily_reader.first_trading_day,
equity_daily_reader=equity_daily_reader,
adjustment_reader=adjustment_reader
)
else:
minutes = trading_calendar.minutes_in_range(
sim_params.first_open,
sim_params.last_close
)
minute_path = write_minute_data(trading_calendar, tempdir, minutes,
sids)
equity_minute_reader = BcolzMinuteBarReader(minute_path)
return DataPortal(
asset_finder, trading_calendar,
first_trading_day=equity_minute_reader.first_trading_day,
equity_minute_reader=equity_minute_reader,
adjustment_reader=adjustment_reader
)
def write_bcolz_minute_data(trading_calendar, days, path, data):
BcolzMinuteBarWriter(
path,
trading_calendar,
days[0],
days[-1],
US_EQUITIES_MINUTES_PER_DAY
).write(data)
def create_minute_df_for_asset(trading_calendar,
start_dt,
end_dt,
interval=1,
start_val=1,
minute_blacklist=None):
asset_minutes = trading_calendar.minutes_for_sessions_in_range(
start_dt, end_dt
)
minutes_count = len(asset_minutes)
minutes_arr = np.array(range(start_val, start_val + minutes_count))
df = pd.DataFrame(
{
"open": minutes_arr + 1,
"high": minutes_arr + 2,
"low": minutes_arr - 1,
"close": minutes_arr,
"volume": 100 * minutes_arr,
},
index=asset_minutes,
)
if interval > 1:
counter = 0
while counter < len(minutes_arr):
df[counter:(counter + interval - 1)] = 0
counter += interval
if minute_blacklist is not None:
for minute in minute_blacklist:
df.loc[minute] = 0
return df
def create_daily_df_for_asset(trading_calendar, start_day, end_day,
interval=1):
days = trading_calendar.sessions_in_range(start_day, end_day)
days_count = len(days)
days_arr = np.arange(days_count) + 2
df = pd.DataFrame(
{
"open": days_arr + 1,
"high": days_arr + 2,
"low": days_arr - 1,
"close": days_arr,
"volume": days_arr * 100,
},
index=days,
)
if interval > 1:
# only keep every 'interval' rows
for idx, _ in enumerate(days_arr):
if (idx + 1) % interval != 0:
df["open"].iloc[idx] = 0
df["high"].iloc[idx] = 0
df["low"].iloc[idx] = 0
df["close"].iloc[idx] = 0
df["volume"].iloc[idx] = 0
return df
def trades_by_sid_to_dfs(trades_by_sid, index):
for sidint, trades in iteritems(trades_by_sid):
opens = []
highs = []
lows = []
closes = []
volumes = []
for trade in trades:
opens.append(trade.open_price)
highs.append(trade.high)
lows.append(trade.low)
closes.append(trade.close_price)
volumes.append(trade.volume)
yield sidint, pd.DataFrame(
{
"open": opens,
"high": highs,
"low": lows,
"close": closes,
"volume": volumes,
},
index=index,
)
def create_data_portal_from_trade_history(asset_finder, trading_calendar,
tempdir, sim_params, trades_by_sid):
if sim_params.data_frequency == "daily":
path = os.path.join(tempdir.path, "testdaily.bcolz")
writer = BcolzDailyBarWriter(
path, trading_calendar,
sim_params.start_session,
sim_params.end_session
)
writer.write(
trades_by_sid_to_dfs(trades_by_sid, sim_params.sessions),
)
equity_daily_reader = BcolzDailyBarReader(path)
return DataPortal(
asset_finder, trading_calendar,
first_trading_day=equity_daily_reader.first_trading_day,
equity_daily_reader=equity_daily_reader,
)
else:
minutes = trading_calendar.minutes_in_range(
sim_params.first_open,
sim_params.last_close
)
length = len(minutes)
assets = {}
for sidint, trades in iteritems(trades_by_sid):
opens = np.zeros(length)
highs = np.zeros(length)
lows = np.zeros(length)
closes = np.zeros(length)
volumes = np.zeros(length)
for trade in trades:
# put them in the right place
idx = minutes.searchsorted(trade.dt)
opens[idx] = trade.open_price * 1000
highs[idx] = trade.high * 1000
lows[idx] = trade.low * 1000
closes[idx] = trade.close_price * 1000
volumes[idx] = trade.volume
assets[sidint] = pd.DataFrame({
"open": opens,
"high": highs,
"low": lows,
"close": closes,
"volume": volumes,
"dt": minutes
}).set_index("dt")
write_bcolz_minute_data(
trading_calendar,
sim_params.sessions,
tempdir.path,
assets
)
equity_minute_reader = BcolzMinuteBarReader(tempdir.path)
return DataPortal(
asset_finder, trading_calendar,
first_trading_day=equity_minute_reader.first_trading_day,
equity_minute_reader=equity_minute_reader,
)
class FakeDataPortal(DataPortal):
def __init__(self, env, trading_calendar=None,
first_trading_day=None):
if trading_calendar is None:
trading_calendar = get_calendar("NYSE")
super(FakeDataPortal, self).__init__(env.asset_finder,
trading_calendar,
first_trading_day)
def get_spot_value(self, asset, field, dt, data_frequency):
if field == "volume":
return 100
else:
return 1.0
def get_history_window(self, assets, end_dt, bar_count, frequency, field,
data_frequency, ffill=True):
if frequency == "1d":
end_idx = \
self.trading_calendar.all_sessions.searchsorted(end_dt)
days = self.trading_calendar.all_sessions[
(end_idx - bar_count + 1):(end_idx + 1)
]
df = pd.DataFrame(
np.full((bar_count, len(assets)), 100.0),
index=days,
columns=assets
)
return df
class FetcherDataPortal(DataPortal):
"""
Mock dataportal that returns fake data for history and non-fetcher
spot value.
"""
def __init__(self, asset_finder, trading_calendar, first_trading_day=None):
super(FetcherDataPortal, self).__init__(asset_finder, trading_calendar,
first_trading_day)
def get_spot_value(self, asset, field, dt, data_frequency):
# if this is a fetcher field, exercise the regular code path
if self._is_extra_source(asset, field, self._augmented_sources_map):
return super(FetcherDataPortal, self).get_spot_value(
asset, field, dt, data_frequency)
# otherwise just return a fixed value
return int(asset)
# XXX: These aren't actually the methods that are used by the superclasses,
# so these don't do anything, and this class will likely produce unexpected
# results for history().
def _get_daily_window_for_sid(self, asset, field, days_in_window,
extra_slot=True):
return np.arange(days_in_window, dtype=np.float64)
def _get_minute_window_for_asset(self, asset, field, minutes_for_window):
return np.arange(minutes_for_window, dtype=np.float64)
class tmp_assets_db(object):
"""Create a temporary assets sqlite database.
This is meant to be used as a context manager.
Parameters
----------
url : string
The URL for the database connection.
**frames
The frames to pass to the AssetDBWriter.
By default this maps equities:
('A', 'B', 'C') -> map(ord, 'ABC')
See Also
--------
empty_assets_db
tmp_asset_finder
"""
_default_equities = sentinel('_default_equities')
def __init__(self,
url='sqlite:///:memory:',
equities=_default_equities,
**frames):
self._url = url
self._eng = None
if equities is self._default_equities:
equities = make_simple_equity_info(
list(map(ord, 'ABC')),
pd.Timestamp(0),
pd.Timestamp('2015'),
)
frames['equities'] = equities
self._frames = frames
self._eng = None # set in enter and exit
def __enter__(self):
self._eng = eng = create_engine(self._url)
AssetDBWriter(eng).write(**self._frames)
return eng
def __exit__(self, *excinfo):
assert self._eng is not None, '_eng was not set in __enter__'
self._eng.dispose()
self._eng = None
def empty_assets_db():
"""Context manager for creating an empty assets db.
See Also
--------
tmp_assets_db
"""
return tmp_assets_db(equities=None)
class tmp_asset_finder(tmp_assets_db):
"""Create a temporary asset finder using an in memory sqlite db.
Parameters
----------
url : string
The URL for the database connection.
finder_cls : type, optional
The type of asset finder to create from the assets db.
**frames
Forwarded to ``tmp_assets_db``.
See Also
--------
tmp_assets_db
"""
def __init__(self,
url='sqlite:///:memory:',
finder_cls=AssetFinder,
**frames):
self._finder_cls = finder_cls
super(tmp_asset_finder, self).__init__(url=url, **frames)
def __enter__(self):
return self._finder_cls(super(tmp_asset_finder, self).__enter__())
def empty_asset_finder():
"""Context manager for creating an empty asset finder.
See Also
--------
empty_assets_db
tmp_assets_db
tmp_asset_finder
"""
return tmp_asset_finder(equities=None)
class tmp_trading_env(tmp_asset_finder):
"""Create a temporary trading environment.
Parameters
----------
load : callable, optional
Function that returns benchmark returns and treasury curves.
finder_cls : type, optional
The type of asset finder to create from the assets db.
**frames
Forwarded to ``tmp_assets_db``.
See Also
--------
empty_trading_env
tmp_asset_finder
"""
def __init__(self, load=None, *args, **kwargs):
super(tmp_trading_env, self).__init__(*args, **kwargs)
self._load = load
def __enter__(self):
return TradingEnvironment(
load=self._load,
asset_db_path=super(tmp_trading_env, self).__enter__().engine,
)
def empty_trading_env():
return tmp_trading_env(equities=None)
class SubTestFailures(AssertionError):
def __init__(self, *failures):
self.failures = failures
def __str__(self):
return 'failures:\n %s' % '\n '.join(
'\n '.join((
', '.join('%s=%r' % item for item in scope.items()),
'%s: %s' % (type(exc).__name__, exc),
)) for scope, exc in self.failures,
)
@nottest
def subtest(iterator, *_names):
"""
Construct a subtest in a unittest.
Consider using ``zipline.testing.parameter_space`` when subtests
are constructed over a single input or over the cross-product of multiple
inputs.
``subtest`` works by decorating a function as a subtest. The decorated
function will be run by iterating over the ``iterator`` and *unpacking the
values into the function. If any of the runs fail, the result will be put
into a set and the rest of the tests will be run. Finally, if any failed,
all of the results will be dumped as one failure.
Parameters
----------
iterator : iterable[iterable]
The iterator of arguments to pass to the function.
*name : iterator[str]
The names to use for each element of ``iterator``. These will be used
to print the scope when a test fails. If not provided, it will use the
integer index of the value as the name.
Examples
--------
::
class MyTest(TestCase):
def test_thing(self):
# Example usage inside another test.
@subtest(([n] for n in range(100000)), 'n')
def subtest(n):
self.assertEqual(n % 2, 0, 'n was not even')
subtest()
@subtest(([n] for n in range(100000)), 'n')
def test_decorated_function(self, n):
# Example usage to parameterize an entire function.
self.assertEqual(n % 2, 1, 'n was not odd')
Notes
-----
We use this when we:
* Will never want to run each parameter individually.
* Have a large parameter space we are testing
(see tests/utils/test_events.py).
``nose_parameterized.expand`` will create a test for each parameter
combination which bloats the test output and makes the travis pages slow.
We cannot use ``unittest2.TestCase.subTest`` because nose, pytest, and
nose2 do not support ``addSubTest``.
See Also
--------
zipline.testing.parameter_space
"""
def dec(f):
@wraps(f)
def wrapped(*args, **kwargs):
names = _names
failures = []
for scope in iterator:
scope = tuple(scope)
try:
f(*args + scope, **kwargs)
except Exception as e:
if not names:
names = count()
failures.append((dict(zip(names, scope)), e))
if failures:
raise SubTestFailures(*failures)
return wrapped
return dec
class MockDailyBarReader(object):
def get_value(self, col, sid, dt):
return 100
def create_mock_adjustment_data(splits=None, dividends=None, mergers=None):
if splits is None:
splits = create_empty_splits_mergers_frame()
elif not isinstance(splits, pd.DataFrame):
splits = pd.DataFrame(splits)
if mergers is None:
mergers = create_empty_splits_mergers_frame()
elif not isinstance(mergers, pd.DataFrame):
mergers = pd.DataFrame(mergers)
if dividends is None:
dividends = create_empty_dividends_frame()
elif not isinstance(dividends, pd.DataFrame):
dividends = pd.DataFrame(dividends)
return splits, mergers, dividends
def create_mock_adjustments(tempdir, days, splits=None, dividends=None,
mergers=None):
path = tempdir.getpath("test_adjustments.db")
SQLiteAdjustmentWriter(path, MockDailyBarReader(), days).write(
*create_mock_adjustment_data(splits, dividends, mergers)
)
return path
def assert_timestamp_equal(left, right, compare_nat_equal=True, msg=""):
"""
Assert that two pandas Timestamp objects are the same.
Parameters
----------
left, right : pd.Timestamp
The values to compare.
compare_nat_equal : bool, optional
Whether to consider `NaT` values equal. Defaults to True.
msg : str, optional
A message to forward to `pd.util.testing.assert_equal`.
"""
if compare_nat_equal and left is pd.NaT and right is pd.NaT:
return
return pd.util.testing.assert_equal(left, right, msg=msg)
def powerset(values):
"""
Return the power set (i.e., the set of all subsets) of entries in `values`.
"""
return concat(combinations(values, i) for i in range(len(values) + 1))
def to_series(knowledge_dates, earning_dates):
"""
Helper for converting a dict of strings to a Series of datetimes.
This is just for making the test cases more readable.
"""
return pd.Series(
index=pd.to_datetime(knowledge_dates),
data=pd.to_datetime(earning_dates),
)
def gen_calendars(start, stop, critical_dates):
"""
Generate calendars to use as inputs.
"""
all_dates = pd.date_range(start, stop, tz='utc')
for to_drop in map(list, powerset(critical_dates)):
# Have to yield tuples.
yield (all_dates.drop(to_drop),)
# Also test with the trading calendar.
trading_days = get_calendar("NYSE").all_days
yield (trading_days[trading_days.slice_indexer(start, stop)],)
@contextmanager
def temp_pipeline_engine(calendar, sids, random_seed, symbols=None):
"""
A contextManager that yields a SimplePipelineEngine holding a reference to
an AssetFinder generated via tmp_asset_finder.
Parameters
----------
calendar : pd.DatetimeIndex
Calendar to pass to the constructed PipelineEngine.
sids : iterable[int]
Sids to use for the temp asset finder.
random_seed : int
Integer used to seed instances of SeededRandomLoader.
symbols : iterable[str], optional
Symbols for constructed assets. Forwarded to make_simple_equity_info.
"""
equity_info = make_simple_equity_info(
sids=sids,
start_date=calendar[0],
end_date=calendar[-1],
symbols=symbols,
)
loader = make_seeded_random_loader(random_seed, calendar, sids)
def get_loader(column):
return loader
with tmp_asset_finder(equities=equity_info) as finder:
yield SimplePipelineEngine(get_loader, calendar, finder)
def parameter_space(__fail_fast=False, **params):
"""
Wrapper around subtest that allows passing keywords mapping names to
iterables of values.
The decorated test function will be called with the cross-product of all
possible inputs
Examples
--------
>>> from unittest import TestCase
>>> class SomeTestCase(TestCase):
... @parameter_space(x=[1, 2], y=[2, 3])
... def test_some_func(self, x, y):
... # Will be called with every possible combination of x and y.
... self.assertEqual(somefunc(x, y), expected_result(x, y))
See Also
--------
zipline.testing.subtest
"""
def decorator(f):
argspec = getargspec(f)
if argspec.varargs:
raise AssertionError("parameter_space() doesn't support *args")
if argspec.keywords:
raise AssertionError("parameter_space() doesn't support **kwargs")
if argspec.defaults:
raise AssertionError("parameter_space() doesn't support defaults.")
# Skip over implicit self.
argnames = argspec.args
if argnames[0] == 'self':
argnames = argnames[1:]
extra = set(params) - set(argnames)
if extra:
raise AssertionError(
"Keywords %s supplied to parameter_space() are "
"not in function signature." % extra
)
unspecified = set(argnames) - set(params)
if unspecified:
raise AssertionError(
"Function arguments %s were not "
"supplied to parameter_space()." % extra
)
def make_param_sets():
return product(*(params[name] for name in argnames))
if __fail_fast:
@wraps(f)
def wrapped(self):
for args in make_param_sets():
f(self, *args)
return wrapped
else:
@wraps(f)
def wrapped(*args, **kwargs):
subtest(make_param_sets(), *argnames)(f)(*args, **kwargs)
return wrapped
return decorator
def create_empty_dividends_frame():
return pd.DataFrame(
np.array(
[],
dtype=[
('ex_date', 'datetime64[ns]'),
('pay_date', 'datetime64[ns]'),
('record_date', 'datetime64[ns]'),
('declared_date', 'datetime64[ns]'),
('amount', 'float64'),
('sid', 'int32'),
],
),
index=pd.DatetimeIndex([], tz='UTC'),
)
def create_empty_splits_mergers_frame():
return pd.DataFrame(
np.array(
[],
dtype=[
('effective_date', 'int64'),
('ratio', 'float64'),
('sid', 'int64'),
],
),
index=pd.DatetimeIndex([]),
)
def make_alternating_boolean_array(shape, first_value=True):
"""
Create a 2D numpy array with the given shape containing alternating values
of False, True, False, True,... along each row and each column.
Examples
--------
>>> make_alternating_boolean_array((4,4))
array([[ True, False, True, False],
[False, True, False, True],
[ True, False, True, False],
[False, True, False, True]], dtype=bool)
>>> make_alternating_boolean_array((4,3), first_value=False)
array([[False, True, False],
[ True, False, True],
[False, True, False],
[ True, False, True]], dtype=bool)
"""
if len(shape) != 2:
raise ValueError(
'Shape must be 2-dimensional. Given shape was {}'.format(shape)
)
alternating = np.empty(shape, dtype=np.bool)
for row in alternating:
row[::2] = first_value
row[1::2] = not(first_value)
first_value = not(first_value)
return alternating
def make_cascading_boolean_array(shape, first_value=True):
"""
Create a numpy array with the given shape containing cascading boolean
values, with `first_value` being the top-left value.
Examples
--------
>>> make_cascading_boolean_array((4,4))
array([[ True, True, True, False],
[ True, True, False, False],
[ True, False, False, False],
[False, False, False, False]], dtype=bool)
>>> make_cascading_boolean_array((4,2))
array([[ True, False],
[False, False],
[False, False],
[False, False]], dtype=bool)
>>> make_cascading_boolean_array((2,4))
array([[ True, True, True, False],
[ True, True, False, False]], dtype=bool)
"""
if len(shape) != 2:
raise ValueError(
'Shape must be 2-dimensional. Given shape was {}'.format(shape)
)
cascading = np.full(shape, not(first_value), dtype=np.bool)
ending_col = shape[1] - 1
for row in cascading:
if ending_col > 0:
row[:ending_col] = first_value
ending_col -= 1
else:
break
return cascading
@expect_dimensions(array=2)
def permute_rows(seed, array):
"""
Shuffle each row in ``array`` based on permutations generated by ``seed``.
Parameters
----------
seed : int
Seed for numpy.RandomState
array : np.ndarray[ndim=2]
Array over which to apply permutations.
"""
rand = np.random.RandomState(seed)
return np.apply_along_axis(rand.permutation, 1, array)
@nottest
def make_test_handler(testcase, *args, **kwargs):
"""
Returns a TestHandler which will be used by the given testcase. This
handler can be used to test log messages.
Parameters
----------
testcase: unittest.TestCase
The test class in which the log handler will be used.
*args, **kwargs
Forwarded to the new TestHandler object.
Returns
-------
handler: logbook.TestHandler
The handler to use for the test case.
"""
handler = TestHandler(*args, **kwargs)
testcase.addCleanup(handler.close)
return handler
def write_compressed(path, content):
"""
Write a compressed (gzipped) file to `path`.
"""
with gzip.open(path, 'wb') as f:
f.write(content)
def read_compressed(path):
"""
Write a compressed (gzipped) file from `path`.
"""
with gzip.open(path, 'rb') as f:
return f.read()
zipline_git_root = abspath(
join(realpath(dirname(__file__)), '..', '..'),
)
@nottest
def test_resource_path(*path_parts):
return os.path.join(zipline_git_root, 'tests', 'resources', *path_parts)
@contextmanager
def patch_os_environment(remove=None, **values):
"""
Context manager for patching the operating system environment.
"""
old_values = {}
remove = remove or []
for key in remove:
old_values[key] = os.environ.pop(key)
for key, value in values.iteritems():
old_values[key] = os.getenv(key)
os.environ[key] = value
try:
yield
finally:
for old_key, old_value in old_values.iteritems():
if old_value is None:
# Value was not present when we entered, so del it out if it's
# still present.
try:
del os.environ[key]
except KeyError:
pass
else:
# Restore the old value.
os.environ[old_key] = old_value
class tmp_dir(TempDirectory, object):
"""New style class that wrapper for TempDirectory in python 2.
"""
pass
class _TmpBarReader(with_metaclass(ABCMeta, tmp_dir)):
"""A helper for tmp_bcolz_equity_minute_bar_reader and
tmp_bcolz_equity_daily_bar_reader.
Parameters
----------
env : TradingEnvironment
The trading env.
days : pd.DatetimeIndex
The days to write for.
data : dict[int -> pd.DataFrame]
The data to write.
path : str, optional
The path to the directory to write the data into. If not given, this
will be a unique name.
"""
@abstractproperty
def _reader_cls(self):
raise NotImplementedError('_reader')
@abstractmethod
def _write(self, env, days, path, data):
raise NotImplementedError('_write')
def __init__(self, env, days, data, path=None):
super(_TmpBarReader, self).__init__(path=path)
self._env = env
self._days = days
self._data = data
def __enter__(self):
tmpdir = super(_TmpBarReader, self).__enter__()
env = self._env
try:
self._write(
env,
self._days,
tmpdir.path,
self._data,
)
return self._reader_cls(tmpdir.path)
except:
self.__exit__(None, None, None)
raise
class tmp_bcolz_equity_minute_bar_reader(_TmpBarReader):
"""A temporary BcolzMinuteBarReader object.
Parameters
----------
env : TradingEnvironment
The trading env.
days : pd.DatetimeIndex
The days to write for.
data : iterable[(int, pd.DataFrame)]
The data to write.
path : str, optional
The path to the directory to write the data into. If not given, this
will be a unique name.
See Also
--------
tmp_bcolz_equity_daily_bar_reader
"""
_reader_cls = BcolzMinuteBarReader
_write = staticmethod(write_bcolz_minute_data)
class tmp_bcolz_equity_daily_bar_reader(_TmpBarReader):
"""A temporary BcolzDailyBarReader object.
Parameters
----------
env : TradingEnvironment
The trading env.
days : pd.DatetimeIndex
The days to write for.
data : dict[int -> pd.DataFrame]
The data to write.
path : str, optional
The path to the directory to write the data into. If not given, this
will be a unique name.
See Also
--------
tmp_bcolz_equity_daily_bar_reader
"""
_reader_cls = BcolzDailyBarReader
@staticmethod
def _write(env, days, path, data):
BcolzDailyBarWriter(path, days).write(data)
@contextmanager
def patch_read_csv(url_map, module=pd, strict=False):
"""Patch pandas.read_csv to map lookups from url to another.
Parameters
----------
url_map : mapping[str or file-like object -> str or file-like object]
The mapping to use to redirect read_csv calls.
module : module, optional
The module to patch ``read_csv`` on. By default this is ``pandas``.
This should be set to another module if ``read_csv`` is early-bound
like ``from pandas import read_csv`` instead of late-bound like:
``import pandas as pd; pd.read_csv``.
strict : bool, optional
If true, then this will assert that ``read_csv`` is only called with
elements in the ``url_map``.
"""
read_csv = pd.read_csv
def patched_read_csv(filepath_or_buffer, *args, **kwargs):
if filepath_or_buffer in url_map:
return read_csv(url_map[filepath_or_buffer], *args, **kwargs)
elif not strict:
return read_csv(filepath_or_buffer, *args, **kwargs)
else:
raise AssertionError(
'attempted to call read_csv on %r which not in the url map' %
filepath_or_buffer,
)
with patch.object(module, 'read_csv', patched_read_csv):
yield
def copy_market_data(src_market_data_dir, dest_root_dir):
symbol = 'SPY'
filenames = (get_benchmark_filename(symbol), INDEX_MAPPING[symbol][1])
ensure_directory(os.path.join(dest_root_dir, 'data'))
for filename in filenames:
shutil.copyfile(
os.path.join(src_market_data_dir, filename),
os.path.join(dest_root_dir, 'data', filename)
)
@curry
def ensure_doctest(f, name=None):
"""Ensure that an object gets doctested. This is useful for instances
of objects like curry or partial which are not discovered by default.
Parameters
----------
f : any
The thing to doctest.
name : str, optional
The name to use in the doctest function mapping. If this is None,
Then ``f.__name__`` will be used.
Returns
-------
f : any
``f`` unchanged.
"""
_getframe(2).f_globals.setdefault('__test__', {})[
f.__name__ if name is None else name
] = f
return f
class RecordBatchBlotter(Blotter):
"""Blotter that tracks how its batch_order method was called.
"""
def __init__(self, data_frequency):
super(RecordBatchBlotter, self).__init__(data_frequency)
self.order_batch_called = []
def batch_order(self, *args, **kwargs):
self.order_batch_called.append((args, kwargs))
return super(RecordBatchBlotter, self).batch_order(*args, **kwargs)
####################################
# Shared factors for pipeline tests.
####################################
class AssetID(CustomFactor):
"""
CustomFactor that returns the AssetID of each asset.
Useful for providing a Factor that produces a different value for each
asset.
"""
window_length = 1
inputs = ()
def compute(self, today, assets, out):
out[:] = assets
class AssetIDPlusDay(CustomFactor):
window_length = 1
inputs = ()
def compute(self, today, assets, out):
out[:] = assets + today.day
class OpenPrice(CustomFactor):
window_length = 1
inputs = [USEquityPricing.open]
def compute(self, today, assets, out, open):
out[:] = open
| bartosh/zipline | zipline/testing/core.py | Python | apache-2.0 | 47,174 |
import socket
import re
from xii import error, util
# sample validator
# keys = Dict(
# [
# RequiredKey("foo", String(), desc="A string to manipulate something"),
# Key("bar", String(), desc="something usefull")
# ],
# desc="Implement this stuff as you want"
# )
class Validator():
def __init__(self, example=None, description=None):
self._description = description
self._example = example
def structure(self, accessor):
if accessor == "example":
return self._example
return self._description
class TypeCheck(Validator):
want_type = None
want = "none"
def __init__(self, example, desc=None):
if desc is None:
desc = self.want
Validator.__init__(self, example, desc)
def validate(self, pre, structure):
if isinstance(structure, self.want_type):
return True
raise error.ValidatorError("{} needs to be {}".format(pre, self.want))
return False
class Int(TypeCheck):
want = "int"
want_type = int
class Bool(TypeCheck):
want = "bool"
want_type = bool
class String(TypeCheck):
want = "string"
want_type = str
class Ip(TypeCheck):
want = "ip"
want_type = str
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
try:
socket.inet_pton(socket.AF_INET, structure)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, structure)
return True
except socket.error:
pass
raise error.ValidatorError("{} is not a valid IP address".format(pre))
class ByteSize(TypeCheck):
want = "memory"
want_type = str
validator = re.compile("(?P<value>\d+)(\ *)(?P<unit>[kMGT])")
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
if self.validator.match(structure):
return True
else:
raise error.ValidatorError("{} is not a valid memory size".format(pre))
class List(TypeCheck):
want = "list"
want_type = list
def __init__(self, schema, desc=None):
TypeCheck.__init__(self, desc)
self.schema = schema
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate_each(item):
return self.schema.validate(pre, item)
return sum(map(_validate_each, structure)) > 1
def structure(self, accessor):
return [self.schema.structure(accessor)]
class Or(Validator):
def __init__(self, schemas, desc=None, exclusive=True):
Validator.__init__(self, desc)
self.schemas = schemas
self.exclusive = exclusive
def validate(self, pre, structure):
errors = []
def _validate_each(schema):
try:
return schema.validate(pre, structure)
except error.ValidatorError as err:
errors.append(err)
return False
state = sum(map(_validate_each, self.schemas))
if self.exclusive and (state > 1 or state == 0):
def _error_lines():
it = iter(errors)
yield " ".join(next(it).error())
for err in it:
yield "or"
yield " ".join(err.error())
raise error.ValidatorError(["{} is ambigous:".format(pre)] +
list(_error_lines()))
return True
def structure(self, accessor):
desc = []
descs = [ s.structure(accessor) for s in self.schemas ]
for d in descs[:-1]:
desc.append(d)
desc.append("__or__")
desc.append(descs[-1])
return desc
# Key validators --------------------------------------------------------------
class KeyValidator(Validator):
def structure(self, accessor, overwrite=None):
name = self.name
if overwrite:
name = overwrite
return ("{}".format(name), self.schema.structure(accessor))
class VariableKeys(KeyValidator):
def __init__(self, schema, example, desc=None):
KeyValidator.__init__(self, desc, example)
self.name = "*"
self.example = example
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
def _validate_each(pair):
(name, next_structure) = pair
return self.schema.validate(pre + " > " + name, next_structure)
return sum(map(_validate_each, structure.items())) >= 1
def structure(self, accessor):
if accessor == "example":
return KeyValidator.structure(self, accessor, self.example)
return KeyValidator.structure(self, accessor)
class Key(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
KeyValidator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
return False
return self.schema.validate(pre + " > " + self.name, value_of_key)
class RequiredKey(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
Validator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
raise error.ValidatorError("{} must have {} "
"defined".format(pre, self.name))
return self.schema.validate(pre + " > " + self.name, value_of_key)
class Dict(TypeCheck):
want = "dictonary"
want_type = dict
def __init__(self, schemas, desc=None):
TypeCheck.__init__(self, desc)
self.schemas = schemas
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate(schema):
return schema.validate(pre, structure)
return sum(map(_validate, self.schemas)) >= 1
def structure(self, accessor):
desc_dict = {}
for key, value in [s.structure(accessor) for s in self.schemas]:
desc_dict[key] = value
return desc_dict
| xii/xii | src/xii/validator.py | Python | apache-2.0 | 6,612 |
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from email.mime import text
import email.utils
import smtplib
import socket
import mailjet_rest
from scoreboard import main
app = main.get_app()
class MailFailure(Exception):
"""Inability to send mail."""
pass
def send(message, subject, to, to_name=None, sender=None, sender_name=None):
"""Send an email."""
sender = sender or app.config.get('MAIL_FROM')
sender_name = sender_name or app.config.get('MAIL_FROM_NAME') or ''
mail_provider = app.config.get('MAIL_PROVIDER')
if mail_provider is None:
app.logger.error('No MAIL_PROVIDER configured!')
raise MailFailure('No MAIL_PROVIDER configured!')
elif mail_provider == 'smtp':
_send_smtp(message, subject, to, to_name, sender, sender_name)
elif mail_provider == 'mailjet':
_send_mailjet(message, subject, to, to_name, sender, sender_name)
else:
app.logger.error('Invalid MAIL_PROVIDER configured!')
raise MailFailure('Invalid MAIL_PROVIDER configured!')
def _send_smtp(message, subject, to, to_name, sender, sender_name):
"""SMTP implementation of sending email."""
host = app.config.get('MAIL_HOST')
if not host:
raise MailFailure('SMTP Server Not Configured')
try:
server = smtplib.SMTP(host)
except (smtplib.SMTPConnectError, socket.error) as ex:
app.logger.error('Unable to send mail: %s', str(ex))
raise MailFailure('Error connecting to SMTP server.')
msg = text.MIMEText(message)
msg['Subject'] = subject
msg['To'] = email.utils.formataddr((to_name, to))
msg['From'] = email.utils.formataddr((sender_name, sender))
try:
if app.debug:
server.set_debuglevel(True)
server.sendmail(sender, [to], msg.as_string())
except (smtplib.SMTPException, socket.error) as ex:
app.logger.error('Unable to send mail: %s', str(ex))
raise MailFailure('Error sending mail to SMTP server.')
finally:
try:
server.quit()
except smtplib.SMTPException:
pass
def _send_mailjet(message, subject, to, to_name, sender, sender_name):
"""Mailjet implementation of sending email."""
api_key = app.config.get('MJ_APIKEY_PUBLIC')
api_secret = app.config.get('MJ_APIKEY_PRIVATE')
if not api_key or not api_secret:
app.logger.error('Missing MJ_APIKEY_PUBLIC/MJ_APIKEY_PRIVATE!')
return
# Note the data structures we use are api v3.1
client = mailjet_rest.Client(
auth=(api_key, api_secret),
api_url='https://api.mailjet.com/',
version='v3.1')
from_obj = {
"Email": sender,
}
if sender_name:
from_obj["Name"] = sender_name
to_obj = [{
"Email": to,
}]
if to_name:
to_obj[0]["Name"] = to_name
message = {
"From": from_obj,
"To": to_obj,
"Subject": subject,
"TextPart": message,
}
result = client.send.create(data={'Messages': [message]})
if result.status_code != 200:
app.logger.error(
'Error sending via mailjet: (%d) %r',
result.status_code, result.text)
raise MailFailure('Error sending via mailjet!')
try:
j = result.json()
except Exception:
app.logger.error('Error sending via mailjet: %r', result.text)
raise MailFailure('Error sending via mailjet!')
if j['Messages'][0]['Status'] != 'success':
app.logger.error('Error sending via mailjet: %r', j)
raise MailFailure('Error sending via mailjet!')
| google/ctfscoreboard | scoreboard/mail.py | Python | apache-2.0 | 4,180 |
import pytest
from ray.train.callbacks.results_preprocessors import (
ExcludedKeysResultsPreprocessor,
IndexedResultsPreprocessor,
SequentialResultsPreprocessor,
AverageResultsPreprocessor,
MaxResultsPreprocessor,
WeightedAverageResultsPreprocessor,
)
def test_excluded_keys_results_preprocessor():
results = [{"a": 1, "b": 2}, {"a": 3, "b": 4}]
expected = [{"b": 2}, {"b": 4}]
preprocessor = ExcludedKeysResultsPreprocessor("a")
preprocessed_results = preprocessor.preprocess(results)
assert preprocessed_results == expected
def test_indexed_results_preprocessor():
results = [{"a": 1}, {"a": 2}, {"a": 3}, {"a": 4}]
expected = [{"a": 1}, {"a": 3}]
preprocessor = IndexedResultsPreprocessor([0, 2])
preprocessed_results = preprocessor.preprocess(results)
assert preprocessed_results == expected
def test_sequential_results_preprocessor():
results = [{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, {"a": 7, "b": 8}]
expected = [{"b": 2}, {"b": 6}]
preprocessor_1 = ExcludedKeysResultsPreprocessor("a")
# [{"b": 2}, {"b": 4}, {"b": 6}, {"b": 8}]
preprocessor_2 = IndexedResultsPreprocessor([0, 2])
preprocessor = SequentialResultsPreprocessor([preprocessor_1, preprocessor_2])
preprocessed_results = preprocessor.preprocess(results)
assert preprocessed_results == expected
def test_average_results_preprocessor():
from copy import deepcopy
import numpy as np
results = [{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, {"a": 7, "b": 8}]
expected = deepcopy(results)
for res in expected:
res.update(
{
"avg(a)": np.mean([result["a"] for result in results]),
"avg(b)": np.mean([result["b"] for result in results]),
}
)
preprocessor = AverageResultsPreprocessor(["a", "b"])
preprocessed_results = preprocessor.preprocess(results)
assert preprocessed_results == expected
def test_max_results_preprocessor():
from copy import deepcopy
import numpy as np
results = [{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, {"a": 7, "b": 8}]
expected = deepcopy(results)
for res in expected:
res.update(
{
"max(a)": np.max([result["a"] for result in results]),
"max(b)": np.max([result["b"] for result in results]),
}
)
preprocessor = MaxResultsPreprocessor(["a", "b"])
preprocessed_results = preprocessor.preprocess(results)
assert preprocessed_results == expected
def test_weighted_average_results_preprocessor():
from copy import deepcopy
import numpy as np
results = [{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, {"a": 7, "b": 8}]
expected = deepcopy(results)
total_weight = np.sum([result["b"] for result in results])
for res in expected:
res.update(
{
"weight_avg_b(a)": np.sum(
[result["a"] * result["b"] / total_weight for result in results]
)
}
)
preprocessor = WeightedAverageResultsPreprocessor(["a"], "b")
preprocessed_results = preprocessor.preprocess(results)
assert preprocessed_results == expected
@pytest.mark.parametrize(
("results_preprocessor", "expected_value"),
[(AverageResultsPreprocessor, 2.0), (MaxResultsPreprocessor, 3.0)],
)
def test_warning_in_aggregate_results_preprocessors(
caplog, results_preprocessor, expected_value
):
import logging
from copy import deepcopy
from ray.util import debug
caplog.at_level(logging.WARNING)
results1 = [{"a": 1}, {"a": 2}, {"a": 3}, {"a": 4}]
results2 = [{"a": 1}, {"a": "invalid"}, {"a": 3}, {"a": "invalid"}]
results3 = [{"a": "invalid"}, {"a": "invalid"}, {"a": "invalid"}, {"a": "invalid"}]
results4 = [{"a": 1}, {"a": 2}, {"a": 3}, {"c": 4}]
# test case 1: metric key `b` is missing from all workers
results_preprocessor1 = results_preprocessor(["b"])
results_preprocessor1.preprocess(results1)
assert "`b` is not reported from workers, so it is ignored." in caplog.text
# test case 2: some values of key `a` have invalid data type
results_preprocessor2 = results_preprocessor(["a"])
expected2 = deepcopy(results2)
aggregation_key = results_preprocessor2.aggregate_fn.wrap_key("a")
for res in expected2:
res.update({aggregation_key: expected_value})
assert results_preprocessor2.preprocess(results2) == expected2
# test case 3: all key `a` values are invalid
results_preprocessor2.preprocess(results3)
assert "`a` value type is not valid, so it is ignored." in caplog.text
# test case 4: some workers don't report key `a`
expected4 = deepcopy(results4)
aggregation_key = results_preprocessor2.aggregate_fn.wrap_key("a")
for res in expected4:
res.update({aggregation_key: expected_value})
assert results_preprocessor2.preprocess(results4) == expected4
for record in caplog.records:
assert record.levelname == "WARNING"
debug.reset_log_once("b")
debug.reset_log_once("a")
def test_warning_in_weighted_average_results_preprocessors(caplog):
import logging
from copy import deepcopy
caplog.at_level(logging.WARNING)
results1 = [{"a": 1}, {"a": 2}, {"a": 3}, {"a": 4}]
results2 = [{"b": 1}, {"b": 2}, {"b": 3}, {"b": 4}]
results3 = [
{"a": 1, "c": 3},
{"a": 2, "c": "invalid"},
{"a": "invalid", "c": 1},
{"a": 4, "c": "invalid"},
]
results4 = [
{"a": 1, "c": "invalid"},
{"a": 2, "c": "invalid"},
{"a": 3, "c": "invalid"},
{"a": 4, "c": "invalid"},
]
# test case 1: weight key `b` is not reported from all workers
results_preprocessor1 = WeightedAverageResultsPreprocessor(["a"], "b")
expected1 = deepcopy(results1)
for res in expected1:
res.update({"weight_avg_b(a)": 2.5})
assert results_preprocessor1.preprocess(results1) == expected1
assert (
"Averaging weight `b` is not reported by all workers in `train.report()`."
in caplog.text
)
assert "Use equal weight instead." in caplog.text
# test case 2: metric key `a` (to be averaged) is not reported from all workers
results_preprocessor1.preprocess(results2)
assert "`a` is not reported from workers, so it is ignored." in caplog.text
# test case 3: both metric and weight keys have invalid data type
results_preprocessor2 = WeightedAverageResultsPreprocessor(["a"], "c")
expected3 = deepcopy(results3)
for res in expected3:
res.update({"weight_avg_c(a)": 1.0})
assert results_preprocessor2.preprocess(results3) == expected3
# test case 4: all weight values are invalid
expected4 = deepcopy(results4)
for res in expected4:
res.update({"weight_avg_c(a)": 2.5})
assert results_preprocessor2.preprocess(results4) == expected4
assert "Averaging weight `c` value type is not valid." in caplog.text
for record in caplog.records:
assert record.levelname == "WARNING"
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", "-x", __file__]))
| ray-project/ray | python/ray/train/tests/test_results_preprocessors.py | Python | apache-2.0 | 7,269 |
#
# Copyright 2015-2019, Institute for Systems Biology
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from builtins import map
from builtins import str
from builtins import object
from copy import deepcopy
import logging
from bq_data_access.v2.seqpeek.seqpeek_interpro import InterProDataProvider
logger = logging.getLogger('main_logger')
SAMPLE_ID_FIELD_NAME = 'sample_id'
TRACK_ID_FIELD = "tumor"
COORDINATE_FIELD_NAME = 'uniprot_aapos'
PROTEIN_ID_FIELD = 'ensg_id'
PROTEIN_DOMAIN_DB = 'PFAM'
SEQPEEK_VIEW_DEBUG_MODE = False
def get_number_of_unique_samples(track):
sample_ids = set()
for mutation in track['mutations']:
sample_ids.add(mutation[SAMPLE_ID_FIELD_NAME])
return len(sample_ids)
def get_number_of_mutated_positions(track):
sample_locations = set()
for mutation in track['mutations']:
sample_locations.add(mutation[COORDINATE_FIELD_NAME])
return len(sample_locations)
# TODO remove if not needed
def clean_track_mutations(mutations_array):
retval = []
for mutation in mutations_array:
cleaned = deepcopy(mutation)
cleaned[COORDINATE_FIELD_NAME] = int(mutation[COORDINATE_FIELD_NAME])
retval.append(cleaned)
return retval
def sort_track_mutations(mutations_array):
return sorted(mutations_array, key=lambda k: k[COORDINATE_FIELD_NAME])
def get_track_statistics_by_track_type(track, cohort_info_map):
track_id = track[TRACK_ID_FIELD]
result = {
'samples': {
'numberOf': get_number_of_unique_samples(track),
'mutated_positions': get_number_of_mutated_positions(track)
}
}
if track['type'] == 'tumor':
cohort_info = cohort_info_map[track_id]
result['cohort_size'] = cohort_info['size']
else:
# Do not assign cohort size for the 'COMBINED' track.
result['cohort_size'] = None
return result
def filter_protein_domains(match_array):
return [m for m in match_array if m['dbname'] == PROTEIN_DOMAIN_DB]
def get_table_row_id(tumor_type):
return "seqpeek_row_{0}".format(tumor_type)
def build_seqpeek_regions(protein_data):
return [{
'type': 'exon',
'start': 0,
'end': protein_data['length']
}]
def build_summary_track(tracks):
all = []
for track in tracks:
all.extend(track["mutations"])
return {
'mutations': all,
'label': 'COMBINED',
'tumor': 'none-combined',
'type': 'summary'
}
def get_track_label_and_cohort_information(track_id_value, cohort_info_map):
cohort_info = cohort_info_map[track_id_value]
label = cohort_info['name']
cohort_size = cohort_info['size']
return label, cohort_size
def get_track_label(track, cohort_info_array):
# The IDs in cohort_info_array are integers, whereas the track IDs are strings.
cohort_map = {str(item['id']): item['name'] for item in cohort_info_array}
return cohort_map[track[TRACK_ID_FIELD]]
def get_protein_domains(uniprot_id):
protein = InterProDataProvider().get_data(uniprot_id)
return protein
class MAFData(object):
def __init__(self, cohort_info, data):
self.cohort_info = cohort_info
self.data = data
@classmethod
def from_dict(cls, param):
return cls(param['cohort_set'], param['items'])
def build_track_data(track_id_list, all_tumor_mutations):
tracks = []
for track_id in track_id_list:
tracks.append({
TRACK_ID_FIELD: track_id,
'mutations': [m for m in all_tumor_mutations if int(track_id) in set(m['cohort'])]
})
return tracks
def find_uniprot_id(mutations):
uniprot_id = None
for m in mutations:
if PROTEIN_ID_FIELD in m:
uniprot_id = m[PROTEIN_ID_FIELD]
break
return uniprot_id
def get_genes_tumors_lists_debug():
return {
'symbol_list': ['EGFR', 'TP53', 'PTEN'],
'disease_codes': ['ACC', 'BRCA', 'GBM']
}
def get_genes_tumors_lists_remote():
context = {
'symbol_list': [],
'track_id_list': []
}
return context
def get_genes_tumors_lists():
if SEQPEEK_VIEW_DEBUG_MODE:
return get_genes_tumors_lists_debug()
else:
return get_genes_tumors_lists_remote()
def get_track_id_list(param):
return list(map(str, param))
def format_removed_row_statistics_to_list(stats_dict):
result = []
for key, value in list(stats_dict.items()):
result.append({
'name': key,
'num': value
})
return result
class SeqPeekViewDataBuilder(object):
def build_view_data(self, hugo_symbol, filtered_maf_vector, seqpeek_cohort_info, cohort_id_list, removed_row_statistics, tables_used):
context = get_genes_tumors_lists()
cohort_info_map = {str(item['id']): item for item in seqpeek_cohort_info}
track_id_list = get_track_id_list(cohort_id_list)
# Since the gene (hugo_symbol) parameter is part of the GNAB feature ID,
# it will be sanity-checked in the SeqPeekMAFDataAccess instance.
uniprot_id = find_uniprot_id(filtered_maf_vector)
logging.info("UniProt ID: " + str(uniprot_id))
protein_data = get_protein_domains(uniprot_id)
track_data = build_track_data(track_id_list, filtered_maf_vector)
plot_data = {
'gene_label': hugo_symbol,
'tracks': track_data,
'protein': protein_data
}
# Pre-processing
# - Sort mutations by chromosomal coordinate
for track in plot_data['tracks']:
track['mutations'] = sort_track_mutations(track['mutations'])
# Annotations
# - Add label, possibly human readable
# - Add type that indicates whether the track is driven by data from search or
# if the track is aggregate
for track in plot_data['tracks']:
track['type'] = 'tumor'
label, cohort_size = get_track_label_and_cohort_information(track[TRACK_ID_FIELD], cohort_info_map)
track['label'] = label
# Display the "combined" track only if more than one cohort is visualized
if len(cohort_id_list) >= 2:
plot_data['tracks'].append(build_summary_track(plot_data['tracks']))
for track in plot_data['tracks']:
# Calculate statistics
track['statistics'] = get_track_statistics_by_track_type(track, cohort_info_map)
# Unique ID for each row
track['render_info'] = {
'row_id': get_table_row_id(track[TRACK_ID_FIELD])
}
plot_data['regions'] = build_seqpeek_regions(plot_data['protein'])
plot_data['protein']['matches'] = filter_protein_domains(plot_data['protein']['matches'])
tumor_list = ','.join(track_id_list)
context.update({
'plot_data': plot_data,
'hugo_symbol': hugo_symbol,
'tumor_list': tumor_list,
'cohort_id_list': track_id_list,
'removed_row_statistics': format_removed_row_statistics_to_list(removed_row_statistics),
'bq_tables': list(set(tables_used))
})
return context
| isb-cgc/ISB-CGC-Webapp | bq_data_access/v2/seqpeek/seqpeek_view.py | Python | apache-2.0 | 7,709 |
import numpy as np
import xgboost as xgb
import pytest
try:
import shap
except ImportError:
shap = None
pass
pytestmark = pytest.mark.skipif(shap is None, reason="Requires shap package")
# Check integration is not broken from xgboost side
# Changes in binary format may cause problems
def test_with_shap():
from sklearn.datasets import fetch_california_housing
X, y = fetch_california_housing(return_X_y=True)
dtrain = xgb.DMatrix(X, label=y)
model = xgb.train({"learning_rate": 0.01}, dtrain, 10)
explainer = shap.TreeExplainer(model)
shap_values = explainer.shap_values(X)
margin = model.predict(dtrain, output_margin=True)
assert np.allclose(np.sum(shap_values, axis=len(shap_values.shape) - 1),
margin - explainer.expected_value, 1e-3, 1e-3)
| dmlc/xgboost | tests/python/test_with_shap.py | Python | apache-2.0 | 817 |
import pathlib
import importlib
import sys
__all__ = ['sample', 'sampleTxt', 'sampleBin']
this = pathlib.Path(__file__)
datadir = this.parent.parent / 'data'
loader = importlib.machinery.SourceFileLoader('sample', str(datadir / 'sample.py'))
sample = loader.load_module()
sampleTxt = datadir / 'sample.txt'
sampleBin = datadir / 'sample.bin'
| viridia/coda | test/python/finddata.py | Python | apache-2.0 | 345 |
# Copyright (C) 2014 Universidad Politecnica de Madrid
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from keystone import exception
from keystone.auth import plugins as auth_plugins
from keystone.common import dependency
from keystone.openstack.common import log
from oauthlib.oauth2 import RequestValidator
try: from oslo.utils import timeutils
except ImportError: from keystone.openstack.common import timeutils
METHOD_NAME = 'oauth2_validator'
LOG = log.getLogger(__name__)
@dependency.requires('oauth2_api')
class OAuth2Validator(RequestValidator):
"""OAuthlib request validator."""
# Ordered roughly in order of appearance in the authorization grant flow
# Pre- and post-authorization.
def validate_client_id(self, client_id, request, *args, **kwargs):
# Simple validity check, does client exist? Not banned?
client_dict = self.oauth2_api.get_consumer(client_id)
if client_dict:
return True
# NOTE(garcianavalon) Currently the sql driver raises an exception
# if the consumer doesnt exist so we throw the Keystone NotFound
# 404 Not Found exception instead of the OAutlib InvalidClientId
# 400 Bad Request exception.
return False
def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwargs):
# Is the client allowed to use the supplied redirect_uri? i.e. has
# the client previously registered this EXACT redirect uri.
client_dict = self.oauth2_api.get_consumer(client_id)
registered_uris = client_dict['redirect_uris']
return redirect_uri in registered_uris
def get_default_redirect_uri(self, client_id, request, *args, **kwargs):
# The redirect used if none has been supplied.
# Prefer your clients to pre register a redirect uri rather than
# supplying one on each authorization request.
# TODO(garcianavalon) implement
pass
def validate_scopes(self, client_id, scopes, client, request, *args, **kwargs):
# Is the client allowed to access the requested scopes?
if not scopes:
return True # the client is not requesting any scope
client_dict = self.oauth2_api.get_consumer(client_id)
if not client_dict['scopes']:
return False # the client isnt allowed any scopes
for scope in scopes:
if not scope in client_dict['scopes']:
return False
return True
def get_default_scopes(self, client_id, request, *args, **kwargs):
# Scopes a client will authorize for if none are supplied in the
# authorization request.
# TODO(garcianavalon) implement
pass
def validate_response_type(self, client_id, response_type, client, request, *args, **kwargs):
# Clients should only be allowed to use one type of response type, the
# one associated with their one allowed grant type.
# FIXME(garcianavalon) we need to support multiple grant types
# for the same consumers right now. In the future we should
# separate them and only allow one grant type (registering
# each client one time for each grant or allowing components)
# or update the tools to allow to create clients with
# multiple grants
# client_dict = self.oauth2_api.get_consumer(client_id)
# allowed_response_type = client_dict['response_type']
# return allowed_response_type == response_type
return True
# Post-authorization
def save_authorization_code(self, client_id, code, request, *args, **kwargs):
# Remember to associate it with request.scopes, request.redirect_uri
# request.client, request.state and request.user (the last is passed in
# post_authorization credentials, i.e. { 'user': request.user}.
authorization_code = {
'code': code['code'], # code is a dict with state and the code
'consumer_id': client_id,
'scopes': request.scopes,
'authorizing_user_id': request.user_id, # populated through the credentials
'state': request.state,
'redirect_uri': request.redirect_uri
}
token_duration = 28800 # TODO(garcianavalon) extract as configuration option
# TODO(garcianavalon) find a better place to do this
now = timeutils.utcnow()
future = now + datetime.timedelta(seconds=token_duration)
expiry_date = timeutils.isotime(future, subsecond=True)
authorization_code['expires_at'] = expiry_date
self.oauth2_api.store_authorization_code(authorization_code)
# Token request
def authenticate_client(self, request, *args, **kwargs):
# Whichever authentication method suits you, HTTP Basic might work
# TODO(garcianavalon) write it cleaner
LOG.debug('OAUTH2: authenticating client')
authmethod, auth = request.headers['Authorization'].split(' ', 1)
auth = auth.decode('unicode_escape')
if authmethod.lower() == 'basic':
auth = auth.decode('base64')
client_id, secret = auth.split(':', 1)
client_dict = self.oauth2_api.get_consumer_with_secret(client_id)
if client_dict['secret'] == secret:
# TODO(garcianavalon) this can be done in a cleaner way
#if we change the consumer model attribute to client_id
request.client = type('obj', (object,),
{'client_id' : client_id})
LOG.info('OAUTH2: succesfully authenticated client %s',
client_dict['name'])
return True
return False
def authenticate_client_id(self, client_id, request, *args, **kwargs):
# Don't allow public (non-authenticated) clients
# TODO(garcianavalon) check this method
return False
def validate_code(self, client_id, code, client, request, *args, **kwargs):
# Validate the code belongs to the client. Add associated scopes,
# state and user to request.scopes, request.state and request.user.
authorization_code = self.oauth2_api.get_authorization_code(code)
if not authorization_code['valid']:
return False
if not authorization_code['consumer_id'] == request.client.client_id:
return False
request.scopes = authorization_code['scopes']
request.state = authorization_code['state']
request.user = authorization_code['authorizing_user_id']
return True
def confirm_redirect_uri(self, client_id, code, redirect_uri, client, *args, **kwargs):
# You did save the redirect uri with the authorization code right?
authorization_code = self.oauth2_api.get_authorization_code(code)
return authorization_code['redirect_uri'] == redirect_uri
def validate_grant_type(self, client_id, grant_type, client, request, *args, **kwargs):
# Clients should only be allowed to use one type of grant.
# FIXME(garcianavalon) we need to support multiple grant types
# for the same consumers right now. In the future we should
# separate them and only allow one grant type (registering
# each client one time for each grant or allowing components)
# or update the tools to allow to create clients with
# multiple grants
# # client_id comes as None, we use the one in request
# client_dict = self.oauth2_api.get_consumer(request.client.client_id)
# return grant_type == client_dict['grant_type']
# TODO(garcianavalon) sync with SQL backend soported grant_types
return grant_type in [
'password', 'authorization_code', 'client_credentials', 'refresh_token',
]
def save_bearer_token(self, token, request, *args, **kwargs):
# Remember to associate it with request.scopes, request.user and
# request.client. The two former will be set when you validate
# the authorization code. Don't forget to save both the
# access_token and the refresh_token and set expiration for the
# access_token to now + expires_in seconds.
# token is a dictionary with the following elements:
# {
# u'access_token': u'iC1DQuu7zOgNIjquPXPmXE5hKnTwgu',
# u'expires_in': 3600,
# u'token_type': u'Bearer',
# u'state': u'yKxWeujbz9VUBncQNrkWvVcx8EXl1w',
# u'scope': u'basic_scope',
# u'refresh_token': u'02DTsL6oWgAibU7xenvXttwG80trJC'
# }
# TODO(garcinanavalon) create a custom TokenCreator instead of
# hacking the dictionary
if getattr(request, 'client', None):
consumer_id = request.client.client_id
else:
consumer_id = request.client_id
if getattr(request, 'user', None):
user_id = request.user
else:
user_id = request.user_id
expires_at = datetime.datetime.today() + datetime.timedelta(seconds=token['expires_in'])
access_token = {
'id':token['access_token'],
'consumer_id':consumer_id,
'authorizing_user_id':user_id,
'scopes': request.scopes,
'expires_at':datetime.datetime.strftime(expires_at, '%Y-%m-%d %H:%M:%S'),
'refresh_token': token.get('refresh_token', None),
}
self.oauth2_api.store_access_token(access_token)
def invalidate_authorization_code(self, client_id, code, request, *args, **kwargs):
# Authorization codes are use once, invalidate it when a Bearer token
# has been acquired.
self.oauth2_api.invalidate_authorization_code(code)
# Protected resource request
def validate_bearer_token(self, token, scopes, request):
# Remember to check expiration and scope membership
try:
access_token = self.oauth2_api.get_access_token(token)
except exception.NotFound:
return False
if (datetime.datetime.strptime(access_token['expires_at'], '%Y-%m-%d %H:%M:%S')
< datetime.datetime.today()):
return False
if access_token['scopes'] != scopes:
return False
# NOTE(garcianavalon) we set some attributes in request for later use. There
# is no documentation about this so I follow the comments found in the example
# at https://oauthlib.readthedocs.org/en/latest/oauth2/endpoints/resource.html
# which are:
# oauthlib_request has a few convenient attributes set such as
# oauthlib_request.client = the client associated with the token
# oauthlib_request.user = the user associated with the token
# oauthlib_request.scopes = the scopes bound to this token
# request.scopes is set by oauthlib already
request.user = access_token['authorizing_user_id']
request.client = access_token['consumer_id']
return True
# Token refresh request
def get_original_scopes(self, refresh_token, request, *args, **kwargs):
# Obtain the token associated with the given refresh_token and
# return its scopes, these will be passed on to the refreshed
# access token if the client did not specify a scope during the
# request.
# TODO(garcianavalon)
return ['all_info']
def is_within_original_scope(self, request_scopes, refresh_token, request, *args, **kwargs):
"""Check if requested scopes are within a scope of the refresh token.
When access tokens are refreshed the scope of the new token
needs to be within the scope of the original token. This is
ensured by checking that all requested scopes strings are on
the list returned by the get_original_scopes. If this check
fails, is_within_original_scope is called. The method can be
used in situations where returning all valid scopes from the
get_original_scopes is not practical.
:param request_scopes: A list of scopes that were requested by client
:param refresh_token: Unicode refresh_token
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Refresh token grant
"""
# TODO(garcianavalon)
return True
def validate_refresh_token(self, refresh_token, client, request, *args, **kwargs):
"""Ensure the Bearer token is valid and authorized access to scopes.
OBS! The request.user attribute should be set to the resource owner
associated with this refresh token.
:param refresh_token: Unicode refresh token
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Authorization Code Grant (indirectly by issuing refresh tokens)
- Resource Owner Password Credentials Grant (also indirectly)
- Refresh Token Grant
"""
try:
access_token = self.oauth2_api.get_access_token_by_refresh_token(refresh_token)
# Validate that the refresh token is not expired
token_duration = 28800 # TODO(garcianavalon) extract as configuration option
refresh_token_duration = 14 # TODO(garcianavalon) extract as configuration option
# TODO(garcianavalon) find a better place to do this
access_token_expiration_date = datetime.datetime.strptime(
access_token['expires_at'], '%Y-%m-%d %H:%M:%S')
refres_token_expiration_date = (
access_token_expiration_date
- datetime.timedelta(seconds=token_duration)
+ datetime.timedelta(days=refresh_token_duration))
if refres_token_expiration_date < datetime.datetime.today():
return False
except exception.NotFound:
return False
request.user = access_token['authorizing_user_id']
return True
# Support for password grant
def validate_user(self, username, password, client, request,
*args, **kwargs):
"""Ensure the username and password is valid.
OBS! The validation should also set the user attribute of the request
to a valid resource owner, i.e. request.user = username or similar. If
not set you will be unable to associate a token with a user in the
persistance method used (commonly, save_bearer_token).
:param username: Unicode username
:param password: Unicode password
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Resource Owner Password Credentials Grant
"""
# To validate the user, try to authenticate it
password_plugin = auth_plugins.password.Password()
auth_payload = {
'user': {
"domain": {
"id": "default"
},
"name": username,
"password": password
}
}
auth_context = {}
try:
password_plugin.authenticate(
context={},
auth_payload=auth_payload,
auth_context=auth_context)
# set the request user
request.user = auth_context['user_id']
return True
except Exception:
return False
| ging/keystone | keystone/contrib/oauth2/validator.py | Python | apache-2.0 | 16,189 |
from zope.i18nmessageid import MessageFactory
PloneMessageFactory = MessageFactory('plone')
from Products.CMFCore.permissions import setDefaultRoles
setDefaultRoles('signature.portlets.gdsignature: Add GroupDocs Signature portlet',
('Manager', 'Site Administrator', 'Owner',))
| liosha2007/plone-groupdocs-signature-source | src/groupdocs/signature/portlets/__init__.py | Python | apache-2.0 | 294 |
# -*- coding: utf-8 -*-
#
# File: src/webframe/management/commands/pref.py
# Date: 2020-04-22 21:35
# Author: Kenson Man <[email protected]>
# Desc: Import / Create / Update / Delete preference
#
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.db.models import Q
from pathlib import Path
from webframe.functions import TRUE_VALUES, LogMessage as lm, getTime
from webframe.models import Preference, AbstractPreference
from uuid import UUID
import logging, os, glob, sys, re
logger=logging.getLogger('webframe.commands.prefs')
class Command(BaseCommand):
help = '''Mainpulate the preference in database. Including insert/update/delete/view/import/gensecret/gendoc; Importing support csv|xlsx file.'''
def __getIndent__(self, indent=0, ch=' '):
return ch*indent
def create_parser(self, cmdName, subcommand, **kwargs):
parser=super().create_parser(cmdName, subcommand, **kwargs)
parser.epilog='''Example:\r\n
\tpref import path_to_prefs #Import a folder or a csv/xlsx file\r\n
\tpref set ABC --value="def" #Set the preference "ABC" to value "def"\r\n
\tpref gensecret #Generate the encryption secret; PLEASE backup in secure way.\r\n
\tpref gendoc prefsDoc.html #Generate the documentation and save as as output.html
'''
return parser
def add_arguments(self, parser):
#Default Value
pattern='Pref({pref.id}:{pref.name}): {pref.value}'
action='show'
max=256
wildcard='*'
tmpl='webframe/prefsDoc.html'
#Adding arguments
parser.add_argument('action', type=str, help='The action to be taken. One of import/export/show/set/delete/gensecret/gendoc; Default is {0}'.format(action), default=action)
parser.add_argument('name', type=str, nargs='?', help='[import/export/show/set/delete/gendoc]; The name of the preference or path of importing/exporting file (csv|xlsx);')
parser.add_argument('--file', dest='file', type=str, help='[import/export/gendoc]; The file path for import/export/output.')
parser.add_argument('--value', dest='value', type=str, help='[set/delete]; The value of the preference;', default=None)
parser.add_argument('--owner', dest='owner', type=str, help='[set/delete]; The owner of the preference; Optional;', default=None)
parser.add_argument('--noowner', dest='noowner', action='store_true', help='[show/set/delete]; The target preference has no owner; Optional; Default False')
parser.add_argument('--parent', dest='parent', type=str, help='[show/set/delete]; The parent\'s name of the preference. Optional;', default=None)
parser.add_argument('--noparent', dest='noparent', action='store_true', help='[show/set/delete]; The target preference has no parent; Optional; Default False')
parser.add_argument('--pattern', dest='pattern', type=str, help='[show]; The output pattern. {0}'.format(pattern), default=pattern)
parser.add_argument('--max', dest='max', type=int, help='[show]; The maximum number of preference to show. Default is {0}'.format(max), default=max)
parser.add_argument('--wildcard', dest='wildcard', type=str, help='[show]; Specify the wildcard; Default is {0}'.format(wildcard), default=wildcard)
#Importing
parser.add_argument('--sep', dest='separator', type=str, default=',', help='[import]; The separator when CSV importing; Default \",\"')
parser.add_argument('--encoding', dest='encoding', type=str, default='utf-8', help='[import]; The encoding when CSV importing; Default \"utf-8\"')
parser.add_argument('--quotechar', dest='quotechar', type=str, default='\"', help='[import]; The quote-char when CSV importing; Default double quote: \"')
parser.add_argument('--filepath', dest='filepath', action='store_true', help='[import]; Import the file-path in preferences; Default False')
parser.add_argument('--force', '-f ', dest='force', action='store_true', help='[import]; Force the import', default=False)
#Generate Doc
parser.add_argument('--tmpl', dest='tmpl', type=str, help="[gendoc]; The template name when generating document; Default: {0}".format(tmpl), default=tmpl)
def __get_owner__(self, owner=None):
if not owner: return None
logger.debug('Getting owner by: "%s"', owner)
owner=owner if owner else self.kwargs['owner']
return get_user_model().objects.get(username=owner) if owner else None
def __get_parent__(self, parent=None):
parent=parent if parent else self.kwargs['parent']
if parent:
try:
#Get parent by uuid
return Preference.objects.get(id=parent)
except:
try:
#Get parent by name
return Preference.objects.get(name=parent)
except:
pass
return None
def __get_pref__(self, **kwargs):
owner=kwargs['owner'] if 'owner' in kwargs else self.__get_owner__()
parent=kwargs['parent'] if 'parent' in kwargs else self.__get_parent__()
name=kwargs['name'] if 'name' in kwargs else self.kwargs['name']
lang=kwargs['lang'] if 'lang' in kwargs else None
if self.kwargs['filepath']: name=os.path.basename(name)
if self.kwargs['parent'] and parent==None:
raise Preference.DoesNotExist('Parent Preference not found: {0}'.format(self.kwargs['parent']))
rst=Preference.objects.all()
if name and name!='*':
rst=rst.filter(name=name)
if owner:
rst=rst.filter(owner=owner)
elif self.kwargs['noowner']:
rst=rst.filter(owner__isnull=True)
if parent:
rst=rst.filter(parent=parent)
elif self.kwargs['noparent']:
rst=rst.filter(parent__isnull=True)
if self.kwargs['filepath']:
rst=rst.filter(tipe=AbstractPreference.TYPE_FILEPATH)
rst=rst.order_by('owner', 'parent', 'sequence', 'name')
return rst
def __get_name__( self, name ):
'''
Get the name and sequence according to the name.
@param name The string including the sequence and name. For example, '01.Target' will return a tuple (1, 'Target')
@return A tuple including the sequence and the name
'''
p=re.search(r'^\d+\.', name)
if p:
s=p.group(0)
return name[len(s):].strip(), int(name[0:len(s)-1])
return (name, sys.maxsize if hasattr(sys, 'maxsize') else sys.maxint) #Default append
def output( self, pref, pattern=None ):
pattern=pattern if pattern else self.kwargs['pattern']
print(pattern.format(pref=pref))
pattern=' {0}'.format(pattern)
for ch in pref.childs:
self.output(ch, pattern)
def handle(self, *args, **kwargs):
verbosity=int(kwargs['verbosity'])
if verbosity==3:
logger.setLevel(logging.DEBUG)
elif verbosity==2:
logger.setLevel(logging.INFO)
elif verbosity==1:
logger.setLevel(logging.WARNING)
else:
logger.setLevel(logging.ERROR)
self.kwargs=kwargs
action=kwargs['action']
if action=='import':
self.imp()
elif action=='create': #for backward compatibility
self.set()
elif action=='update': #for backward compatibility
self.set()
elif action=='set':
self.set()
elif action=='delete':
self.delete()
elif action=='show':
self.show()
elif action=='gensecret':
self.gensecret()
elif action=='gendoc':
self.gendoc()
elif action=='export':
self.expCsv()
else:
logger.warning('Unknown action: {0}'.format(action))
logger.warn('DONE!')
def show(self):
logger.info('Showing the preference ...')
q=Preference.objects.all()
if self.kwargs['name']:
logger.info(' with the name filter: {0}'.format(self.kwargs['name']))
if self.kwargs['wildcard'] in self.kwargs['name']:
q=q.filter(name__icontains=self.kwargs['name'].replace(self.kwargs['wildcard'], ''))
else:
q=q.filter(name=self.kwargs['name'])
if self.kwargs['value']:
logger.info(' with the value filter: {0}'.format(self.kwargs['value']))
q=q.filter(value__icontains=self.kwargs['value'])
if self.kwargs['owner']:
logger.info(' which belongs to user: {0}'.format(self.kwargs['owner']))
q=q.filter(owner__username=self.kwargs['owner'])
if self.kwargs['parent']:
logger.info(' which belongs to preference: {0}'.format(self.kwargs['parent']))
q=q.filter(parent__name__iexact=self.kwargs['parent'])
else:
q=q.filter(parent__isnull=True)
for p in q:
self.output(p)
logger.warning('There have {0} preference(s) has been shown'.format(len(q)))
def set(self):
with transaction.atomic():
try:
pref=self.__get_pref__()
if pref.count()<1: raise Preference.DoesNotExist
cnt=pref.update(value=self.kwargs['value'])
logger.info('{0} of Preference(s) has been updated'.format(cnt))
except Preference.DoesNotExist:
p=Preference(name=self.kwargs['name'], value=self.kwargs['value'], owner=owner, parent=parent)
p.save()
logger.info('The preference<{0}> has been created with value: {1}'.format(p.name, p.value))
def delete(self):
pref=self.__get_pref__()
cnt=pref.count()
pref.delete()
logger.warning('{0} of Preference(s) has been deleted'.format(cnt))
def expRow( self, wr, pref, indent=0 ):
'''
Import the specified preference to csv.
'''
cnt=0
tab=self.__getIndent__(indent)
logger.debug(lm('{0}Exporting preference: {1}::{2}...', tab, pref.id, pref.name))
wr.writerow([
pref.name # [0]
, pref.realValue # [1]
, pref.parent.id if pref.parent else '' # [2]
, pref.owner.username if pref.owner else '' # [3]
, pref.helptext # [4]
, Preference.TYPES[pref.tipe][1] # [5]
, pref.encrypted # [6]
, pref.regex # [7]
])
cnt+=1
for p in pref.childs:
cnt+=self.expRow(wr, p, indent+3)
return cnt
def expCsv( self ):
'''
Import the specified list of preferences to csv.
'''
import csv
f=self.kwargs['file']
with open(f, 'w', encoding=self.kwargs['encoding']) as fp:
wr=csv.writer(fp, delimiter=self.kwargs['separator'], quotechar=self.kwargs['quotechar'], quoting=csv.QUOTE_MINIMAL, skipinitialspace=True)
cnt=0
for p in self.__get_pref__():
cnt+=self.expRow(wr, p, 0)
logger.info(lm('Exported {0} records', cnt))
def improw( self, cols, idx=0 ):
try:
name=cols[0]
val=cols[1]
parent=self.__get_parent__(cols[2])
owner=self.__get_owner__(cols[3])
helptext=cols[4]
tipe=cols[5]
encrypted=cols[6] in TRUE_VALUES
regex=cols[7]
lang=cols[8] if len(cols)>8 else None
logger.debug(' Importing row: {0}: {1} [{2}]'.format(idx, name, 'encrypted' if encrypted else 'clear-text'))
self.kwargs['name']=name
pref=self.__get_pref__(name=name, owner=owner, parent=parent, lang=lang)
if pref.count()<1: raise Preference.DoesNotExist
for p in pref:
p.encrypted=encrypted
p.helptext=helptext
p.tipe=tipe
p.regex=regex
#The value must be the last steps to set due to validation. Otherwise, once importing/assign a new value into this field, the last validation rule may be applied incorrectly
p.value=val
p.save()
except Preference.DoesNotExist:
Preference(name=name, _value=val, owner=owner, parent=parent, encrypted=encrypted, helptext=helptext, regex=regex, lang=lang).save()
except:
logger.debug(cols)
logger.exception('Error when handling the column')
raise
def impXlsx( self, f ):
'''
Import xlsx file.
'''
from openpyxl import load_workbook
wb=load_workbook(filename=f)
ws=wb.active
logger.info(' Importing worksheet: {0}!{1}'.format(f, ws.title))
cnt=0
with transaction.atomic():
for r in range(1, ws.max_row+1):
cols=list()
name=ws.cell(row=r, column=1).value
if isinstance(name, str): name=name.strip()
if not name: continue #Skip the row when it has no pref.name
if r==1 and (name.upper()=='ID' or name.upper()=='NAME' or name.upper()=='ID/Name'): continue #Skip the first row if header row
cols.append(name) #Name/ID
cols.append(ws.cell(row=r, column=2).value) #Value
cols.append(ws.cell(row=r, column=3).value) #Parent
cols.append(ws.cell(row=r, column=4).value) #Owner
cols.append(ws.cell(row=r, column=5).value) #Reserved
cols.append(ws.cell(row=r, column=6).value) #Tipe
cols.append(ws.cell(row=r, column=7).value) #encrypted
self.improw( cols, r )
cnt+=1
logger.info(' Imported {0} row(s)'.format(cnt))
def impCsv( self, f ):
'''
Import the csv file.
'''
import csv
with transaction.atomic():
logger.info(' Importing csv: {0}'.format(f))
cnt=0
with open(f, 'r', encoding=self.kwargs['encoding']) as fp:
if self.kwargs['quotechar']:
rows=csv.reader(fp, delimiter=self.kwargs['separator'], quotechar=self.kwargs['quotechar'], quoting=csv.QUOTE_MINIMAL, skipinitialspace=True)
else:
rows=csv.reader(fp, delimiter=self.kwargs['separator'], quoting=csv.QUOTE_NONE, skipinitialspace=True)
for row in rows:
if len(row)<1: continue #Skip the empty row
name=row[0].strip()
if not name: continue #Skip the row when it has no name
if cnt==0 and (name.upper()=='ID' or name.upper()=='NAME' or name.upper()=='ID/NAME'): continue #Skip the first row if header row
self.improw( row, cnt )
cnt+=1
logger.info(' Imported {0} row(s)'.format(cnt))
def impdir( self, d ):
if os.path.isdir(d):
logger.info('Importing directory: {0}'.format(d))
else:
logger.warning('This is not the directory: {0}'.format(d))
return
cnt=0
with transaction.atomic():
p=Preference.objects.pref('IMPORTED_PREFERENCES', returnValue=False)
p.helptext='<p>Sysetm use only! <strong>DO NOT MODIFY</strong> youself unless you understand the risk.</p>'
p.save()
for f in os.listdir(d):
if not (f.upper().endswith('.XLSX') or f.upper().endswith('.CSV')): continue #only support *.xlsx and *.csv
f=os.path.join(d, f)
try:
Preference.objects.get(name=f, parent=p)
if self.kwargs['force']: raise Preference.DoesNotExist
except Preference.DoesNotExist:
self.impfile( f )
cnt+=1
Preference(name=f, parent=p).save()
logger.debug('Imported {0} file(s)'.format(cnt))
def impfile( self, f ):
if not (os.path.isfile(f) and os.access(f, os.R_OK)):
logger.warning('The file is not readable: {0}'.format(f))
return
fn=f.lower()
if fn.endswith('.xlsx'):
self.impXlsx(f)
elif fn.endswith('.csv'):
self.impCsv(f)
else:
logger.info('Unsupported file: {0}'.format(f))
def imppath( self, p, parent=None):
name, seq=self.__get_name__(os.path.basename(p))
if os.path.isdir(p):
try:
pref=self.__get_pref__(name=name)
if pref.count()<1: raise Preference.DoesNotExist
pref=pref[0]
except Preference.DoesNotExist:
pref=Preference(name=name, parent=parent)
pref.tipe=AbstractPreference.TYPE_FILEPATH
pref.sequence=seq
pref.save()
for f in os.listdir(p):
path=os.path.join(p, f)
self.imppath(path, pref)
#Handling the ordering after import all the childs
ord=1
for c in pref.childs:
c.sequence=ord
c.save()
ord+=1
else:
try:
pref=self.__get_pref__(name=name)
if pref.count()<1: raise Preference.DoesNotExist
pref=pref[0]
except Preference.DoesNotExist:
pref=Preference(name=name, parent=parent)
pref.pathValue=p if os.path.isabs(p) else os.path.abspath(p)
pref.tipe=AbstractPreference.TYPE_FILEPATH
pref.sequence=seq
pref.save()
def imp(self):
disableOrder=getattr(settings, 'DISABLE_REORDER', False)
setattr(settings, 'DISABLE_REORDER', True) #Disable the re-ordering features during importing
try:
f=self.kwargs['file']
if self.kwargs['filepath']:
self.imppath(f)
elif os.path.isdir(f):
self.impdir(f)
elif os.path.isfile(f):
self.impfile(f)
finally:
setattr(settings, 'DISABLE_REORDER', disableOrder) #Resume the re-ordering features after importing
def gensecret(self):
from webframe.models import AbstractPreference
key=AbstractPreference.__getSecret__()
logger.warning(lm('Your secret is: {0}', key))
def gendoc(self):
from django.shortcuts import render
from django.template import loader, Template, Context
from webframe.providers import template_injection, fmt_injection
tmpl=getattr(self.kwargs, 'tmpl', 'webframe/prefDoc.html')
logger.warning(lm('Generating the documents according template: {0}', tmpl))
tmpl=loader.get_template(tmpl)
params=dict()
params.update(template_injection(None))
params.update(fmt_injection(None))
#params['target']=Preference.objects.filter(parent__isnull=True)
params['target']=self.__get_pref__()
params['TYPES']=Preference.TYPES
params['now']=getTime('now')
txt=tmpl.render(params)
output=self.kwargs.get('file')
if not output: output='prefsDoc.html'
logger.warning(lm('Generated! Outputing into: {0}', output))
with open(output, 'w') as f:
f.write(txt)
| kensonman/webframe | management/commands/pref.py | Python | apache-2.0 | 18,742 |
# -*- coding: UTF-8 -*-
import hashlib
import base64
import datetime
import urllib2
import json
class TemplateSMS:
account_sid = ''
account_token = ''
app_id = ''
server_ip = ''
server_port = ''
soft_version = ''
timestamp = ''
def set_account(self, account_sid, token):
self.account_sid = account_sid
self.account_token = token
def __init__(self, ip, port, version):
self.server_ip = ip
self.server_port = port
self.soft_version = version
def set_app_id(self, app_id):
self.app_id = app_id
def send_template_sms(self, to, random, valid_min, temp_id):
now_date = datetime.datetime.now()
self.timestamp = now_date.strftime("%Y%m%d%H%M%S")
signature = self.account_sid + self.account_token + self.timestamp
sig = hashlib.md5()
sig.update(signature)
sig = sig.hexdigest().upper()
url = "https://" + self.server_ip + ":" + self.server_port + "/" + self.soft_version + "/Accounts/" + \
self.account_sid + "/SMS/TemplateSMS?sig=" + sig
src = self.account_sid + ":" + self.timestamp
req = urllib2.Request(url)
b = '["%s","%s"]' % (random, valid_min)
body = '''{"to": "%s", "datas": %s, "templateId": "%s", "appId": "%s"}''' % (to, b, temp_id, self.app_id)
req.add_data(body)
auth = base64.encodestring(src).strip()
req.add_header("Authorization", auth)
req.add_header("Accept", 'application/json;')
req.add_header("Content-Type", "application/json;charset=utf-8;")
req.add_header("Host", "127.0.0.1")
req.add_header("content-length", len(body))
try:
res = urllib2.urlopen(req)
data = res.read()
res.close()
locations = json.loads(data)
return locations
except:
return {'172001': 'network error'}
def query_account_info(self):
now_date = datetime.datetime.now()
self.timestamp = now_date.strftime("%Y%m%d%H%M%S")
signature = self.account_sid + self.account_token + self.timestamp
sig = hashlib.md5()
sig.update(signature)
sig = sig.hexdigest().upper()
url = "https://" + self.server_ip + ":" + self.server_port + "/" + self.soft_version + "/Accounts/" + \
self.account_sid + "/AccountInfo?sig=" + sig
src = self.account_sid + ":" + self.timestamp
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
req.add_header("Accept", "application/json")
req.add_header("Content-Type", "application/jsoncharset=utf-8")
req.add_header("Authorization", auth)
try:
res = urllib2.urlopen(req)
data = res.read()
res.close()
locations = json.loads(data)
return locations
except:
return {"statusCode": '172001'}
| davidvon/pipa-pay-server | admin/sms/sdk.py | Python | apache-2.0 | 2,956 |
# python 3
# tensorflow 2.0
from __future__ import print_function, division, absolute_import
import os
import argparse
import random
import numpy as np
import datetime
# from numpy import linalg
import os.path as osp
import sys
cur_dir = osp.dirname(osp.abspath(__file__))
sys.path.insert(1, osp.join(cur_dir, '.'))
from sklearn.datasets import load_svmlight_file
from scipy.sparse import csr_matrix
# from scipy.sparse import linalg
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import tensorflow as tf
from tf_utils import pinv_naive, pinv
path_train = osp.join(cur_dir, "../a9a/a9a")
path_test = osp.join(cur_dir, "../a9a/a9a.t")
MAX_ITER = 100
np_dtype = np.float32
tf_dtype = tf.float32
# manual seed
manualSeed = random.randint(1, 10000) # fix seed
print("Random Seed: ", manualSeed)
random.seed(manualSeed)
np.random.seed(manualSeed)
# load all data
X_train, y_train = load_svmlight_file(path_train, n_features=123, dtype=np_dtype)
X_test, y_test = load_svmlight_file(path_test, n_features=123, dtype=np_dtype)
# X: scipy.sparse.csr.csr_matrix
# X_train: (32561, 123), y_train: (32561,)
# X_test: (16281, 123), y_test:(16281,)
# stack a dimension of ones to X to simplify computation
N_train = X_train.shape[0]
N_test = X_test.shape[0]
X_train = np.hstack((np.ones((N_train, 1)), X_train.toarray())).astype(np_dtype)
X_test = np.hstack((np.ones((N_test, 1)), X_test.toarray())).astype(np_dtype)
# print(X_train.shape, X_test.shape)
y_train = y_train.reshape((N_train, 1))
y_test = y_test.reshape((N_test, 1))
# label: -1, +1 ==> 0, 1
y_train = np.where(y_train == -1, 0, 1)
y_test = np.where(y_test == -1, 0, 1)
# NB: here X's shape is (N,d), which differs to the derivation
def neg_log_likelihood(w, X, y, L2_param=None):
"""
w: dx1
X: Nxd
y: Nx1
L2_param: \lambda>0, will introduce -\lambda/2 ||w||_2^2
"""
# print(type(X), X.dtype)
res = tf.matmul(tf.matmul(tf.transpose(w), tf.transpose(X)), y.astype(np_dtype)) - \
tf.reduce_sum(tf.math.log(1 + tf.exp(tf.matmul(X, w))))
if L2_param != None and L2_param > 0:
res += -0.5 * L2_param * tf.matmul(tf.transpose(w), w)
return -res[0][0]
def prob(X, w):
"""
X: Nxd
w: dx1
---
prob: N x num_classes(2)"""
y = tf.constant(np.array([0.0, 1.0]), dtype=tf.float32)
prob = tf.exp(tf.matmul(X, w) * y) / (1 + tf.exp(tf.matmul(X, w)))
return prob
def compute_acc(X, y, w):
p = prob(X, w)
y_pred = tf.cast(tf.argmax(p, axis=1), tf.float32)
y = tf.cast(tf.squeeze(y), tf.float32)
acc = tf.reduce_mean(tf.cast(tf.equal(y, y_pred), tf.float32))
return acc
def update(w_old, X, y, L2_param=0):
"""
w_new = w_old - w_update
w_update = (X'RX+lambda*I)^(-1) (X'(mu-y) + lambda*w_old)
lambda is L2_param
w_old: dx1
X: Nxd
y: Nx1
---
w_update: dx1
"""
d = X.shape[1]
mu = tf.sigmoid(tf.matmul(X, w_old)) # Nx1
R_flat = mu * (1 - mu) # element-wise, Nx1
L2_reg_term = L2_param * tf.eye(d)
XRX = tf.matmul(tf.transpose(X), R_flat * X) + L2_reg_term # dxd
# np.save('XRX_tf.npy', XRX.numpy())
# calculate pseudo inverse via SVD
# method 1
# slightly better than tfp.math.pinv when L2_param=0
XRX_pinv = pinv_naive(XRX)
# method 2
# XRX_pinv = pinv(XRX)
# w = w - (X^T R X)^(-1) X^T (mu-y)
# w_new = tf.assign(w_old, w_old - tf.matmul(tf.matmul(XRX_pinv, tf.transpose(X)), mu - y))
y = tf.cast(y, tf_dtype)
w_update = tf.matmul(XRX_pinv, tf.matmul(tf.transpose(X), mu - y) + L2_param * w_old)
return w_update
def optimize(w_old, w_update):
"""custom update op, instead of using SGD variants"""
return w_old.assign(w_old - w_update)
def train_IRLS(X_train, y_train, X_test=None, y_test=None, L2_param=0, max_iter=MAX_ITER):
"""train Logistic Regression via IRLS algorithm
X: Nxd
y: Nx1
---
"""
N, d = X_train.shape
w = tf.Variable(0.01 * tf.ones((d, 1), dtype=tf.float32), name="w")
current_time = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
summary_writer = tf.summary.create_file_writer(f"./logs/{current_time}")
print("start training...")
print("L2 param(lambda): {}".format(L2_param))
i = 0
# iteration
while i <= max_iter:
print("iter: {}".format(i))
# print('\t neg log likelihood: {}'.format(sess.run(neg_L, feed_dict=train_feed_dict)))
neg_L = neg_log_likelihood(w, X_train, y_train, L2_param)
print("\t neg log likelihood: {}".format(neg_L))
train_acc = compute_acc(X_train, y_train, w)
with summary_writer.as_default():
tf.summary.scalar("train_acc", train_acc, step=i)
tf.summary.scalar("train_neg_L", neg_L, step=i)
test_acc = compute_acc(X_test, y_test, w)
with summary_writer.as_default():
tf.summary.scalar("test_acc", test_acc, step=i)
print("\t train acc: {}, test acc: {}".format(train_acc, test_acc))
L2_norm_w = np.linalg.norm(w.numpy())
print("\t L2 norm of w: {}".format(L2_norm_w))
if i > 0:
diff_w = np.linalg.norm(w_update.numpy())
print("\t diff of w_old and w: {}".format(diff_w))
if diff_w < 1e-2:
break
w_update = update(w, X_train, y_train, L2_param)
w = optimize(w, w_update)
i += 1
print("training done.")
if __name__ == "__main__":
# test_acc should be about 0.85
lambda_ = 20 # 0
train_IRLS(X_train, y_train, X_test, y_test, L2_param=lambda_, max_iter=100)
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression()
classifier.fit(X_train, y_train.reshape(N_train,))
y_pred_train = classifier.predict(X_train)
train_acc = np.sum(y_train.reshape(N_train,) == y_pred_train)/N_train
print('train_acc: {}'.format(train_acc))
y_pred_test = classifier.predict(X_test)
test_acc = np.sum(y_test.reshape(N_test,) == y_pred_test)/N_test
print('test acc: {}'.format(test_acc))
| wangg12/IRLS_tf_pytorch | src/IRLS_tf_v2.py | Python | apache-2.0 | 6,061 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeleteStudy
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_v1_generated_VizierService_DeleteStudy_sync]
from google.cloud import aiplatform_v1
def sample_delete_study():
# Create a client
client = aiplatform_v1.VizierServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.DeleteStudyRequest(
name="name_value",
)
# Make the request
client.delete_study(request=request)
# [END aiplatform_v1_generated_VizierService_DeleteStudy_sync]
| googleapis/python-aiplatform | samples/generated_samples/aiplatform_v1_generated_vizier_service_delete_study_sync.py | Python | apache-2.0 | 1,389 |
import sys
import logging
import hexdump
import vstruct
import vivisect
import envi
import envi.archs.i386 as x86
import envi.archs.amd64 as x64
import sdb
from sdb import SDB_TAGS
from sdb_dump_common import SdbIndex
from sdb_dump_common import item_get_child
from sdb_dump_common import item_get_children
logging.basicConfig(level=logging.DEBUG)
g_logger = logging.getLogger("sdb_dump_patch")
g_logger.setLevel(logging.DEBUG)
ARCH_32 = "32"
ARCH_64 = "64"
def disassemble(buf, base=0, arch=ARCH_32):
if arch == ARCH_32:
d = x86.i386Disasm()
elif arch == ARCH_64:
d = x64.Amd64Disasm()
else:
raise RuntimeError('unknown arch: ' + str(arch))
offset = 0
while True:
if offset >= len(buf):
break
o = d.disasm(buf, offset, base)
yield "0x%x: %s" % (base + offset, str(o))
offset += o.size
class GreedyVArray(vstruct.VArray):
def __init__(self, C):
vstruct.VArray.__init__(self)
self._C = C
def vsParse(self, bytez, offset=0, fast=False):
soffset = offset
while offset < len(bytez):
c = self._C()
try:
offset = c.vsParse(bytez, offset=offset, fast=False)
except:
break
self.vsAddElement(c)
return offset
def vsParseFd(self, fd):
raise NotImplementedError()
def dump_patch(bits, arch=ARCH_32):
ps = GreedyVArray(sdb.PATCHBITS)
ps.vsParse(bits.value.value)
for i, _ in ps:
p = ps[int(i)]
print(" opcode: %s" % str(p["opcode"]))
print(" module name: %s" % p.module_name)
print(" rva: 0x%08x" % p.rva)
print(" unk: 0x%08x" % p.unknown)
print(" payload:")
print(hexdump.hexdump(str(p.pattern), result="return"))
print(" disassembly:")
for l in disassemble(str(p.pattern), p.rva, arch=arch):
print(" " + l)
print("")
def _main(sdb_path, patch_name):
from sdb import SDB
with open(sdb_path, "rb") as f:
buf = f.read()
g_logger.debug("loading database")
s = SDB()
s.vsParse(bytearray(buf))
g_logger.debug("done loading database")
index = SdbIndex()
g_logger.debug("indexing strings")
index.index_sdb(s)
g_logger.debug("done indexing strings")
try:
library = item_get_child(s.database_root, SDB_TAGS.TAG_LIBRARY)
except KeyError:
pass
else:
for shim_ref in item_get_children(library, SDB_TAGS.TAG_SHIM_REF):
patch = item_get_child(shim_ref, SDB_TAGS.TAG_PATCH)
name_ref = item_get_child(patch, SDB_TAGS.TAG_NAME)
name = index.get_string(name_ref.value.reference)
if name != patch_name:
continue
bits = item_get_child(patch, SDB_TAGS.TAG_PATCH_BITS)
dump_patch(bits, arch=ARCH_32)
try:
patch = item_get_child(s.database_root, SDB_TAGS.TAG_PATCH)
except KeyError:
pass
else:
name_ref = item_get_child(patch, SDB_TAGS.TAG_NAME)
name = index.get_string(name_ref.value.reference)
if name == patch_name:
bits = item_get_child(patch, SDB_TAGS.TAG_PATCH_BITS)
dump_patch(bits, arch=ARCH_32)
def main():
import sys
return sys.exit(_main(*sys.argv[1:]))
if __name__ == "__main__":
main()
| williballenthin/python-sdb | scripts/sdb_dump_patch.py | Python | apache-2.0 | 3,397 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Keystone's pep8 extensions.
In order to make the review process faster and easier for core devs we are
adding some Keystone specific pep8 checks. This will catch common errors
so that core devs don't have to.
There are two types of pep8 extensions. One is a function that takes either
a physical or logical line. The physical or logical line is the first param
in the function definition and can be followed by other parameters supported
by pep8. The second type is a class that parses AST trees. For more info
please see pep8.py.
"""
import ast
import re
import six
class BaseASTChecker(ast.NodeVisitor):
"""Provides a simple framework for writing AST-based checks.
Subclasses should implement visit_* methods like any other AST visitor
implementation. When they detect an error for a particular node the
method should call ``self.add_error(offending_node)``. Details about
where in the code the error occurred will be pulled from the node
object.
Subclasses should also provide a class variable named CHECK_DESC to
be used for the human readable error message.
"""
def __init__(self, tree, filename):
"""This object is created automatically by pep8.
:param tree: an AST tree
:param filename: name of the file being analyzed
(ignored by our checks)
"""
self._tree = tree
self._errors = []
def run(self):
"""Called automatically by pep8."""
self.visit(self._tree)
return self._errors
def add_error(self, node, message=None):
"""Add an error caused by a node to the list of errors for pep8."""
message = message or self.CHECK_DESC
error = (node.lineno, node.col_offset, message, self.__class__)
self._errors.append(error)
class CheckForMutableDefaultArgs(BaseASTChecker):
"""Checks for the use of mutable objects as function/method defaults.
We are only checking for list and dict literals at this time. This means
that a developer could specify an instance of their own and cause a bug.
The fix for this is probably more work than it's worth because it will
get caught during code review.
"""
CHECK_DESC = 'K001 Using mutable as a function/method default'
MUTABLES = (
ast.List, ast.ListComp,
ast.Dict, ast.DictComp,
ast.Set, ast.SetComp,
ast.Call)
def visit_FunctionDef(self, node):
for arg in node.args.defaults:
if isinstance(arg, self.MUTABLES):
self.add_error(arg)
super(CheckForMutableDefaultArgs, self).generic_visit(node)
def block_comments_begin_with_a_space(physical_line, line_number):
"""There should be a space after the # of block comments.
There is already a check in pep8 that enforces this rule for
inline comments.
Okay: # this is a comment
Okay: #!/usr/bin/python
Okay: # this is a comment
K002: #this is a comment
"""
MESSAGE = "K002 block comments should start with '# '"
# shebangs are OK
if line_number == 1 and physical_line.startswith('#!'):
return
text = physical_line.strip()
if text.startswith('#'): # look for block comments
if len(text) > 1 and not text[1].isspace():
return physical_line.index('#'), MESSAGE
class CheckForAssertingNoneEquality(BaseASTChecker):
"""Ensures that code does not use a None with assert(Not*)Equal."""
CHECK_DESC_IS = ('K003 Use self.assertIsNone(...) when comparing '
'against None')
CHECK_DESC_ISNOT = ('K004 Use assertIsNotNone(...) when comparing '
' against None')
def visit_Call(self, node):
# NOTE(dstanek): I wrote this in a verbose way to make it easier to
# read for those that have little experience with Python's AST.
if isinstance(node.func, ast.Attribute):
if node.func.attr == 'assertEqual':
for arg in node.args:
if isinstance(arg, ast.Name) and arg.id == 'None':
self.add_error(node, message=self.CHECK_DESC_IS)
elif node.func.attr == 'assertNotEqual':
for arg in node.args:
if isinstance(arg, ast.Name) and arg.id == 'None':
self.add_error(node, message=self.CHECK_DESC_ISNOT)
super(CheckForAssertingNoneEquality, self).generic_visit(node)
class CheckForLoggingIssues(BaseASTChecker):
DEBUG_CHECK_DESC = 'K005 Using translated string in debug logging'
NONDEBUG_CHECK_DESC = 'K006 Not using translating helper for logging'
EXCESS_HELPER_CHECK_DESC = 'K007 Using hints when _ is necessary'
LOG_MODULES = ('logging', 'keystone.openstack.common.log')
I18N_MODULES = (
'keystone.i18n._',
'keystone.i18n._LI',
'keystone.i18n._LW',
'keystone.i18n._LE',
'keystone.i18n._LC',
)
TRANS_HELPER_MAP = {
'debug': None,
'info': '_LI',
'warn': '_LW',
'warning': '_LW',
'error': '_LE',
'exception': '_LE',
'critical': '_LC',
}
def __init__(self, tree, filename):
super(CheckForLoggingIssues, self).__init__(tree, filename)
self.logger_names = []
self.logger_module_names = []
self.i18n_names = {}
# NOTE(dstanek): this kinda accounts for scopes when talking
# about only leaf node in the graph
self.assignments = {}
def generic_visit(self, node):
"""Called if no explicit visitor function exists for a node."""
for field, value in ast.iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, ast.AST):
item._parent = node
self.visit(item)
elif isinstance(value, ast.AST):
value._parent = node
self.visit(value)
def _filter_imports(self, module_name, alias):
"""Keeps lists of logging and i18n imports
"""
if module_name in self.LOG_MODULES:
self.logger_module_names.append(alias.asname or alias.name)
elif module_name in self.I18N_MODULES:
self.i18n_names[alias.asname or alias.name] = alias.name
def visit_Import(self, node):
for alias in node.names:
self._filter_imports(alias.name, alias)
return super(CheckForLoggingIssues, self).generic_visit(node)
def visit_ImportFrom(self, node):
for alias in node.names:
full_name = '%s.%s' % (node.module, alias.name)
self._filter_imports(full_name, alias)
return super(CheckForLoggingIssues, self).generic_visit(node)
def _find_name(self, node):
"""Return the fully qualified name or a Name or Attribute."""
if isinstance(node, ast.Name):
return node.id
elif (isinstance(node, ast.Attribute)
and isinstance(node.value, (ast.Name, ast.Attribute))):
method_name = node.attr
obj_name = self._find_name(node.value)
if obj_name is None:
return None
return obj_name + '.' + method_name
elif isinstance(node, six.string_types):
return node
else: # could be Subscript, Call or many more
return None
def visit_Assign(self, node):
"""Look for 'LOG = logging.getLogger'
This handles the simple case:
name = [logging_module].getLogger(...)
- or -
name = [i18n_name](...)
And some much more comple ones:
name = [i18n_name](...) % X
- or -
self.name = [i18n_name](...) % X
"""
attr_node_types = (ast.Name, ast.Attribute)
if (len(node.targets) != 1
or not isinstance(node.targets[0], attr_node_types)):
# say no to: "x, y = ..."
return super(CheckForLoggingIssues, self).generic_visit(node)
target_name = self._find_name(node.targets[0])
if (isinstance(node.value, ast.BinOp) and
isinstance(node.value.op, ast.Mod)):
if (isinstance(node.value.left, ast.Call) and
isinstance(node.value.left.func, ast.Name) and
node.value.left.func.id in self.i18n_names):
# NOTE(dstanek): this is done to match cases like:
# `msg = _('something %s') % x`
node = ast.Assign(value=node.value.left)
if not isinstance(node.value, ast.Call):
# node.value must be a call to getLogger
self.assignments.pop(target_name, None)
return super(CheckForLoggingIssues, self).generic_visit(node)
# is this a call to an i18n function?
if (isinstance(node.value.func, ast.Name)
and node.value.func.id in self.i18n_names):
self.assignments[target_name] = node.value.func.id
return super(CheckForLoggingIssues, self).generic_visit(node)
if (not isinstance(node.value.func, ast.Attribute)
or not isinstance(node.value.func.value, attr_node_types)):
# function must be an attribute on an object like
# logging.getLogger
return super(CheckForLoggingIssues, self).generic_visit(node)
object_name = self._find_name(node.value.func.value)
func_name = node.value.func.attr
if (object_name in self.logger_module_names
and func_name == 'getLogger'):
self.logger_names.append(target_name)
return super(CheckForLoggingIssues, self).generic_visit(node)
def visit_Call(self, node):
"""Look for the 'LOG.*' calls.
"""
# obj.method
if isinstance(node.func, ast.Attribute):
obj_name = self._find_name(node.func.value)
if isinstance(node.func.value, ast.Name):
method_name = node.func.attr
elif isinstance(node.func.value, ast.Attribute):
obj_name = self._find_name(node.func.value)
method_name = node.func.attr
else: # could be Subscript, Call or many more
return super(CheckForLoggingIssues, self).generic_visit(node)
# must be a logger instance and one of the support logging methods
if (obj_name not in self.logger_names
or method_name not in self.TRANS_HELPER_MAP):
return super(CheckForLoggingIssues, self).generic_visit(node)
# the call must have arguments
if not len(node.args):
return super(CheckForLoggingIssues, self).generic_visit(node)
if method_name == 'debug':
self._process_debug(node)
elif method_name in self.TRANS_HELPER_MAP:
self._process_non_debug(node, method_name)
return super(CheckForLoggingIssues, self).generic_visit(node)
def _process_debug(self, node):
msg = node.args[0] # first arg to a logging method is the msg
# if first arg is a call to a i18n name
if (isinstance(msg, ast.Call)
and isinstance(msg.func, ast.Name)
and msg.func.id in self.i18n_names):
self.add_error(msg, message=self.DEBUG_CHECK_DESC)
# if the first arg is a reference to a i18n call
elif (isinstance(msg, ast.Name)
and msg.id in self.assignments
and not self._is_raised_later(node, msg.id)):
self.add_error(msg, message=self.DEBUG_CHECK_DESC)
def _process_non_debug(self, node, method_name):
msg = node.args[0] # first arg to a logging method is the msg
# if first arg is a call to a i18n name
if isinstance(msg, ast.Call):
try:
func_name = msg.func.id
except AttributeError:
# in the case of logging only an exception, the msg function
# will not have an id associated with it, for instance:
# LOG.warning(six.text_type(e))
return
# the function name is the correct translation helper
# for the logging method
if func_name == self.TRANS_HELPER_MAP[method_name]:
return
# the function name is an alias for the correct translation
# helper for the loggine method
if (self.i18n_names[func_name] ==
self.TRANS_HELPER_MAP[method_name]):
return
self.add_error(msg, message=self.NONDEBUG_CHECK_DESC)
# if the first arg is not a reference to the correct i18n hint
elif isinstance(msg, ast.Name):
# FIXME(dstanek): to make sure more robust we should be checking
# all names passed into a logging method. we can't right now
# because:
# 1. We have code like this that we'll fix when dealing with the %:
# msg = _('....') % {}
# LOG.warn(msg)
# 2. We also do LOG.exception(e) in several places. I'm not sure
# exactly what we should be doing about that.
if msg.id not in self.assignments:
return
helper_method_name = self.TRANS_HELPER_MAP[method_name]
if (self.assignments[msg.id] != helper_method_name
and not self._is_raised_later(node, msg.id)):
self.add_error(msg, message=self.NONDEBUG_CHECK_DESC)
elif (self.assignments[msg.id] == helper_method_name
and self._is_raised_later(node, msg.id)):
self.add_error(msg, message=self.EXCESS_HELPER_CHECK_DESC)
def _is_raised_later(self, node, name):
def find_peers(node):
node_for_line = node._parent
for _field, value in ast.iter_fields(node._parent._parent):
if isinstance(value, list) and node_for_line in value:
return value[value.index(node_for_line) + 1:]
continue
return []
peers = find_peers(node)
for peer in peers:
if isinstance(peer, ast.Raise):
if (isinstance(peer.type, ast.Call) and
len(peer.type.args) > 0 and
isinstance(peer.type.args[0], ast.Name) and
name in (a.id for a in peer.type.args)):
return True
else:
return False
elif isinstance(peer, ast.Assign):
if name in (t.id for t in peer.targets):
return False
def check_oslo_namespace_imports(logical_line, blank_before, filename):
oslo_namespace_imports = re.compile(
r"(((from)|(import))\s+oslo\.)|(from\s+oslo\s+import\s+)")
if re.match(oslo_namespace_imports, logical_line):
msg = ("K333: '%s' must be used instead of '%s'.") % (
logical_line.replace('oslo.', 'oslo_'),
logical_line)
yield(0, msg)
def factory(register):
register(CheckForMutableDefaultArgs)
register(block_comments_begin_with_a_space)
register(CheckForAssertingNoneEquality)
register(CheckForLoggingIssues)
register(check_oslo_namespace_imports)
| UTSA-ICS/keystone-kerberos | keystone/hacking/checks.py | Python | apache-2.0 | 15,958 |
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class Chassis100ChassisActions(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Chassis100ChassisActions - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'oem': 'object',
'chassis_reset': 'Chassis100Reset'
}
self.attribute_map = {
'oem': 'Oem',
'chassis_reset': '#Chassis.Reset'
}
self._oem = None
self._chassis_reset = None
@property
def oem(self):
"""
Gets the oem of this Chassis100ChassisActions.
:return: The oem of this Chassis100ChassisActions.
:rtype: object
"""
return self._oem
@oem.setter
def oem(self, oem):
"""
Sets the oem of this Chassis100ChassisActions.
:param oem: The oem of this Chassis100ChassisActions.
:type: object
"""
self._oem = oem
@property
def chassis_reset(self):
"""
Gets the chassis_reset of this Chassis100ChassisActions.
:return: The chassis_reset of this Chassis100ChassisActions.
:rtype: Chassis100Reset
"""
return self._chassis_reset
@chassis_reset.setter
def chassis_reset(self, chassis_reset):
"""
Sets the chassis_reset of this Chassis100ChassisActions.
:param chassis_reset: The chassis_reset of this Chassis100ChassisActions.
:type: Chassis100Reset
"""
self._chassis_reset = chassis_reset
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| jlongever/redfish-client-python | on_http_redfish_1_0/models/chassis_1_0_0_chassis_actions.py | Python | apache-2.0 | 3,731 |
# -*- encoding: utf-8 -*-
"""
lunaport.dao.line
~~~~~~~~~~~~~~~~~
Storage interaction logic for line resource.
"""
import pprint
pp = pprint.PrettyPrinter(indent=4).pprint
from sqlalchemy import text, exc
from ..wsgi import app, db
from .. domain.line import LineBuilder, LineAdaptor
from exceptions import StorageError
class Filter(object):
params_allowed = {
'name': (
"AND name LIKE '%:name%'"),
}
cast_to_int = []
def __init__(self, **kw):
self.rule = []
self.q_params = {}
for p, v in kw.iteritems():
if p not in self.params_allowed.keys():
continue
elif isinstance(v, (unicode, basestring)):
self.rule.append(self.params_allowed[p][0])
self.q_params.update({p: v})
else:
raise StorageError('Wrong *{}* param type.'.format(p))
def cmpl_query(self):
sql_text = '\n' + ' '.join(self.rule)
return sql_text, self.q_params
class Dao(object):
"""Interface for line storage"""
@classmethod
def insert(cls, ammo):
raise NotImplemented()
@classmethod
def get_single(cls, **kw):
raise NotImplemented()
@classmethod
def get_many(cls, **kw):
raise NotImplemented()
class RDBMS(Dao):
"""PostgreSQL wrapper, implementing line.dao interface"""
per_page_default = app.config.get('LINE_PER_PAGE_DEFAULT') or 10
per_page_max = app.config.get('LINE_PER_PAGE_MAX') or 100
select_join_part = '''
SELECT l.*,
dc.name AS dc_name
FROM line l,
dc dc
WHERE l.dc_id = dc.id'''
@staticmethod
def rdbms_call(q_text, q_params):
return db.engine.connect().execute(text(q_text), **q_params)
@classmethod
def insert(cls, line):
kw = LineAdaptor.to_dict(line)
kw['dc_name'] = kw['dc']['name']
pp(kw)
def query():
return cls.rdbms_call('''
INSERT INTO line
(
id,
name,
dc_id
)
VALUES (
:id,
:name,
(SELECT id FROM dc WHERE name = :dc_name)
)
returning id''', kw)
err_duplicate = 'line:{} allready exists'.format(kw.get('name'))
try:
pk_id = [r for r in query()].pop()[0]
except exc.IntegrityError as e:
if 'unique constraint "line_pkey"' in str(e):
raise StorageError(err_duplicate)
raise StorageError('Some kind of IntegrityError')
return pk_id
@classmethod
def get_single(cls, **kw):
if kw.get('line_id'):
query_params = {
'line_id': kw.get('line_id'),
}
rv = cls.rdbms_call(' '.join([cls.select_join_part, 'AND l.id = :line_id']), query_params)
row = rv.first()
if not row:
return None
t_kw = dict(zip(rv.keys(), row))
return LineBuilder.from_row(**t_kw)
@classmethod
def get_many(cls, **kw):
"""pagination"""
pagination_part = '\nORDER BY id DESC\nLIMIT :limit OFFSET :offset'
param_per_page = kw.get('per_page')
if param_per_page and (param_per_page <= cls.per_page_max):
per_page = param_per_page
else:
per_page = cls.per_page_default
page_num = kw.get('page')
# page number starts from 1, page 0 and 1 mean the same -
# first slice from data set.
if page_num and isinstance(page_num, int) and (page_num >= 2):
offset = (page_num - 1) * per_page
next_page = page_num + 1
prev_page = page_num - 1
else:
offset = 0
next_page = 2
prev_page = None
query_params = {
'limit': per_page,
'offset': offset,
}
"""filtering"""
f = Filter(**kw)
filter_part, q_params_up = f.cmpl_query()
query_params.update(q_params_up)
rv = cls.rdbms_call(
''.join([cls.select_join_part, filter_part, pagination_part]),
query_params)
rows = rv.fetchall()
if len(rows) == 0:
return None, None, None, None
elif len(rows) < per_page: # last chunk of data
next_page = None
def create_dc(row):
t_kw = dict(zip(rv.keys(), row))
return LineBuilder.from_row(**t_kw)
return map(create_dc, rows), per_page, next_page, prev_page
| greggyNapalm/lunaport_server | lunaport_server/dao/path.py | Python | apache-2.0 | 4,535 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import uuid
import pkg_resources
from pifpaf import drivers
class CephDriver(drivers.Driver):
DEFAULT_PORT = 6790
def __init__(self, port=DEFAULT_PORT,
**kwargs):
"""Create a new Ceph cluster."""
super(CephDriver, self).__init__(**kwargs)
self.port = port
@classmethod
def get_options(cls):
return [
{"param_decls": ["--port"],
"type": int,
"default": cls.DEFAULT_PORT,
"help": "port to use for Ceph Monitor"},
]
def _setUp(self):
super(CephDriver, self)._setUp()
self._ensure_xattr_support()
fsid = str(uuid.uuid4())
conffile = os.path.join(self.tempdir, "ceph.conf")
mondir = os.path.join(self.tempdir, "mon", "ceph-a")
osddir = os.path.join(self.tempdir, "osd", "ceph-0")
os.makedirs(mondir)
os.makedirs(osddir)
_, version = self._exec(["ceph", "--version"], stdout=True)
version = version.decode("ascii").split()[2]
version = pkg_resources.parse_version(version)
if version < pkg_resources.parse_version("12.0.0"):
extra = """
mon_osd_nearfull_ratio = 1
mon_osd_full_ratio = 1
osd_failsafe_nearfull_ratio = 1
osd_failsafe_full_ratio = 1
"""
else:
extra = """
mon_allow_pool_delete = true
"""
# FIXME(sileht): check availible space on /dev/shm
# if os.path.exists("/dev/shm") and os.access('/dev/shm', os.W_OK):
# journal_path = "/dev/shm/$cluster-$id-journal"
# else:
journal_path = "%s/osd/$cluster-$id/journal" % self.tempdir
with open(conffile, "w") as f:
f.write("""[global]
fsid = %(fsid)s
# no auth for now
auth cluster required = none
auth service required = none
auth client required = none
## no replica
osd pool default size = 1
osd pool default min size = 1
osd crush chooseleaf type = 0
## some default path change
run dir = %(tempdir)s
pid file = %(tempdir)s/$type.$id.pid
admin socket = %(tempdir)s/$cluster-$name.asok
mon data = %(tempdir)s/mon/$cluster-$id
osd data = %(tempdir)s/osd/$cluster-$id
osd journal = %(journal_path)s
log file = %(tempdir)s/$cluster-$name.log
mon cluster log file = %(tempdir)s/$cluster.log
# Only omap to have same behavior for all filesystems
filestore xattr use omap = True
# workaround for ext4 and last Jewel version
osd max object name len = 256
osd max object namespace len = 64
osd op threads = 10
filestore max sync interval = 10001
filestore min sync interval = 10000
%(extra)s
journal_aio = false
journal_dio = false
journal zero on create = false
journal block align = false
# run as file owner
setuser match path = %(tempdir)s/$type/$cluster-$id
[mon.a]
host = localhost
mon addr = 127.0.0.1:%(port)d
""" % dict(fsid=fsid, tempdir=self.tempdir, port=self.port,
journal_path=journal_path, extra=extra)) # noqa
ceph_opts = ["ceph", "-c", conffile]
mon_opts = ["ceph-mon", "-c", conffile, "--id", "a", "-d"]
osd_opts = ["ceph-osd", "-c", conffile, "--id", "0", "-d",
"-m", "127.0.0.1:%d" % self.port]
# Create and start monitor
self._exec(mon_opts + ["--mkfs"])
self._touch(os.path.join(mondir, "done"))
mon, _ = self._exec(
mon_opts,
wait_for_line=r"mon.a@0\(leader\).mds e1 print_map")
# Create and start OSD
self._exec(ceph_opts + ["osd", "create"])
self._exec(ceph_opts + ["osd", "crush", "add", "osd.0", "1",
"root=default"])
self._exec(osd_opts + ["--mkfs", "--mkjournal"])
if version < pkg_resources.parse_version("0.94.0"):
wait_for_line = "journal close"
else:
wait_for_line = "done with init"
osd, _ = self._exec(osd_opts, wait_for_line=wait_for_line)
if version >= pkg_resources.parse_version("12.0.0"):
self._exec(ceph_opts + ["osd", "set-full-ratio", "0.95"])
self._exec(ceph_opts + ["osd", "set-backfillfull-ratio", "0.95"])
self._exec(ceph_opts + ["osd", "set-nearfull-ratio", "0.95"])
# Wait it's ready
out = b""
while b"HEALTH_OK" not in out:
ceph, out = self._exec(ceph_opts + ["health"], stdout=True)
if b"HEALTH_ERR" in out:
raise RuntimeError("Fail to deploy ceph")
self.putenv("CEPH_CONF", conffile, True)
self.putenv("CEPH_CONF", conffile)
self.putenv("URL", "ceph://localhost:%d" % self.port)
| sileht/pifpaf | pifpaf/drivers/ceph.py | Python | apache-2.0 | 5,134 |
import unittest
import tagging
class TestRealizerArbitraryReordering(unittest.TestCase):
"""
Tests for the realizer with arbitrary reordering
enabled.
"""
def test_realize_output_in_order(self):
"""
Test for when source tokens occur
in the same relative order in the
target string
"""
editing_task = tagging.EditingTask(["word1 word2 <::::> word3 "])
tags_str = ['KEEP|0', 'KEEP|1', 'KEEP|and', 'DELETE', 'KEEP|3']
tags = [tagging.Tag(tag) for tag in tags_str]
result = editing_task.realize_output([tags])
expected = "word1 word2 and word3 "
self.assertEqual(expected, result)
def test_realize_output_out_of_order(self):
"""
Test for when the source tokens
do not occur in the same relative order
in the target string
"""
editing_task = tagging.EditingTask(["word1 word2 <::::> word3 "])
tags_str = ['KEEP|1', 'KEEP|0', 'KEEP|and', 'DELETE', 'KEEP|3']
tags = [tagging.Tag(tag) for tag in tags_str]
result = editing_task.realize_output([tags])
expected = "word2 word1 and word3 "
self.assertEqual(expected, result)
if __name__ == '__main__':
unittest.main()
| googleinterns/contextual-query-rewrites | models/lasertagger/test_realizer_arbitrary_reordering.py | Python | apache-2.0 | 1,307 |
#!/usr/bin/env python3
#
# Copyright (c) 2022 Roberto Riggio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Exposes a RESTful interface ."""
import uuid
import empower_core.apimanager.apimanager as apimanager
# pylint: disable=W0223
class AlertsHandler(apimanager.APIHandler):
"""Alerts handler"""
URLS = [r"/api/v1/alerts/?",
r"/api/v1/alerts/([a-zA-Z0-9-]*)/?"]
@apimanager.validate(min_args=0, max_args=1)
def get(self, *args, **kwargs):
"""Lists all the alerts.
Args:
[0], the alert id (optional)
Example URLs:
GET /api/v1/alerts
GET /api/v1/alerts/52313ecb-9d00-4b7d-b873-b55d3d9ada26
"""
return self.service.alerts \
if not args else self.service.alerts[uuid.UUID(args[0])]
@apimanager.validate(returncode=201, min_args=0, max_args=1)
def post(self, *args, **kwargs):
"""Create a new alert.
Args:
[0], the alert id (optional)
Request:
version: protocol version (1.0)
alert: the alert
"""
alert_id = uuid.UUID(args[0]) if args else uuid.uuid4()
if 'alert' in kwargs:
alert = self.service.create(uuid=alert_id, alert=kwargs['alert'])
else:
alert = self.service.create(uuid=alert_id)
self.set_header("Location", "/api/v1/alerts/%s" % alert.uuid)
@apimanager.validate(returncode=204, min_args=0, max_args=1)
def delete(self, *args, **kwargs):
"""Delete one or all alerts.
Args:
[0], the alert id (optional)
Example URLs:
DELETE /api/v1/alerts
DELETE /api/v1/alerts/52313ecb-9d00-4b7d-b873-b55d3d9ada26
"""
if args:
self.service.remove(uuid.UUID(args[0]))
else:
self.service.remove_all()
| 5g-empower/empower-runtime | empower/managers/alertsmanager/alertshandler.py | Python | apache-2.0 | 2,382 |
#### PATTERN | WEB #################################################################################
# Copyright (c) 2010 University of Antwerp, Belgium
# Author: Tom De Smedt <[email protected]>
# License: BSD (see LICENSE.txt for details).
# http://www.clips.ua.ac.be/pages/pattern
####################################################################################################
# Python API interface for various web services (Google, Twitter, Wikipedia, ...)
# smgllib.py is removed from Python 3, a warning is issued in Python 2.6+. Ignore for now.
import warnings; warnings.filterwarnings(action='ignore', category=DeprecationWarning, module="sgmllib")
import threading
import time
import os
import socket, urlparse, urllib, urllib2
import base64
import htmlentitydefs
import sgmllib
import re
import xml.dom.minidom
import StringIO
import bisect
import new
import api
import feed
import oauth
import json
import locale
from feed import feedparser
from soup import BeautifulSoup
try:
# Import persistent Cache.
# If this module is used separately, a dict is used (i.e. for this Python session only).
from cache import Cache, cache, TMP
except:
cache = {}
try:
from imap import Mail, MailFolder, Message, GMAIL
from imap import MailError, MailServiceError, MailLoginError, MailNotLoggedIn
from imap import FROM, SUBJECT, DATE, BODY, ATTACHMENTS
except:
pass
try:
MODULE = os.path.dirname(os.path.abspath(__file__))
except:
MODULE = ""
#### UNICODE #######################################################################################
def decode_utf8(string):
""" Returns the given string as a unicode string (if possible).
"""
if isinstance(string, str):
for encoding in (("utf-8",), ("windows-1252",), ("utf-8", "ignore")):
try:
return string.decode(*encoding)
except:
pass
return string
return unicode(string)
def encode_utf8(string):
""" Returns the given string as a Python byte string (if possible).
"""
if isinstance(string, unicode):
try:
return string.encode("utf-8")
except:
return string
return str(string)
u = decode_utf8
s = encode_utf8
# For clearer source code:
bytestring = s
#### ASYNCHRONOUS REQUEST ##########################################################################
class AsynchronousRequest:
def __init__(self, function, *args, **kwargs):
""" Executes the function in the background.
AsynchronousRequest.done is False as long as it is busy, but the program will not halt in the meantime.
AsynchronousRequest.value contains the function's return value once done.
AsynchronousRequest.error contains the Exception raised by an erronous function.
For example, this is useful for running live web requests while keeping an animation running.
For good reasons, there is no way to interrupt a background process (i.e. Python thread).
You are responsible for ensuring that the given function doesn't hang.
"""
self._response = None # The return value of the given function.
self._error = None # The exception (if any) raised by the function.
self._time = time.time()
self._function = function
self._thread = threading.Thread(target=self._fetch, args=(function,)+args, kwargs=kwargs)
self._thread.start()
def _fetch(self, function, *args, **kwargs):
""" Executes the function and sets AsynchronousRequest.response.
"""
try:
self._response = function(*args, **kwargs)
except Exception, e:
self._error = e
def now(self):
""" Waits for the function to finish and yields its return value.
"""
self._thread.join(); return self._response
@property
def elapsed(self):
return time.time() - self._time
@property
def done(self):
return not self._thread.isAlive()
@property
def value(self):
return self._response
@property
def error(self):
return self._error
def __repr__(self):
return "AsynchronousRequest(function='%s')" % self._function.__name__
def asynchronous(function, *args, **kwargs):
""" Returns an AsynchronousRequest object for the given function.
"""
return AsynchronousRequest(function, *args, **kwargs)
send = asynchronous
#### URL ###########################################################################################
# User agent and referrer.
# Used to identify the application accessing the web.
USER_AGENT = "Pattern/2.3 +http://www.clips.ua.ac.be/pages/pattern"
REFERRER = "http://www.clips.ua.ac.be/pages/pattern"
# Mozilla user agent.
# Websites can include code to block out any application except browsers.
MOZILLA = "Mozilla/5.0"
# HTTP request method.
GET = "get" # Data is encoded in the URL.
POST = "post" # Data is encoded in the message body.
# URL parts.
# protocol://username:password@domain:port/path/page?query_string#anchor
PROTOCOL, USERNAME, PASSWORD, DOMAIN, PORT, PATH, PAGE, QUERY, ANCHOR = \
"protocol", "username", "password", "domain", "port", "path", "page", "query", "anchor"
# MIME type.
MIMETYPE_WEBPAGE = ["text/html"]
MIMETYPE_STYLESHEET = ["text/css"]
MIMETYPE_PLAINTEXT = ["text/plain"]
MIMETYPE_PDF = ["application/pdf"]
MIMETYPE_NEWSFEED = ["application/rss+xml", "application/atom+xml"]
MIMETYPE_IMAGE = ["image/gif", "image/jpeg", "image/png", "image/tiff"]
MIMETYPE_AUDIO = ["audio/mpeg", "audio/mp4", "audio/x-aiff", "audio/x-wav"]
MIMETYPE_VIDEO = ["video/mpeg", "video/mp4", "video/quicktime"]
MIMETYPE_ARCHIVE = ["application/x-stuffit", "application/x-tar", "application/zip"]
MIMETYPE_SCRIPT = ["application/javascript", "application/ecmascript"]
def extension(filename):
""" Returns the extension in the given filename: "cat.jpg" => ".jpg".
"""
return os.path.splitext(filename)[1]
def urldecode(query):
""" Inverse operation of urllib.urlencode.
Returns a dictionary of (name, value)-items from a URL query string.
"""
def _format(s):
if s == "None":
return None
if s.isdigit():
return int(s)
try: return float(s)
except:
return s
query = [(kv.split("=")+[None])[:2] for kv in query.lstrip("?").split("&")]
query = [(urllib.unquote_plus(bytestring(k)), urllib.unquote_plus(bytestring(v))) for k, v in query]
query = [(u(k), u(v)) for k, v in query]
query = [(k, _format(v) or None) for k, v in query]
query = dict([(k,v) for k, v in query if k != ""])
return query
url_decode = urldecode
def proxy(host, protocol="https"):
""" Returns the value for the URL.open() proxy parameter.
- host: host address of the proxy server.
"""
return (host, protocol)
class URLError(Exception):
pass # URL contains errors (e.g. a missing t in htp://).
class URLTimeout(URLError):
pass # URL takes to long to load.
class HTTPError(URLError):
pass # URL causes an error on the contacted server.
class HTTP301Redirect(HTTPError):
pass # Too many redirects.
# The site may be trying to set a cookie and waiting for you to return it,
# or taking other measures to discern a browser from a script.
# For specific purposes you should build your own urllib2.HTTPRedirectHandler
# and pass it to urllib2.build_opener() in URL.open()
class HTTP400BadRequest(HTTPError):
pass # URL contains an invalid request.
class HTTP401Authentication(HTTPError):
pass # URL requires a login and password.
class HTTP403Forbidden(HTTPError):
pass # URL is not accessible (user-agent?)
class HTTP404NotFound(HTTPError):
pass # URL doesn't exist on the internet.
class HTTP420Error(HTTPError):
pass # Used by Twitter for rate limiting.
class HTTP500InternalServerError(HTTPError):
pass # Generic server error.
class URL:
def __init__(self, string=u"", method=GET, query={}):
""" URL object with the individual parts available as attributes:
For protocol://username:password@domain:port/path/page?query_string#anchor:
- URL.protocol: http, https, ftp, ...
- URL.username: username for restricted domains.
- URL.password: password for restricted domains.
- URL.domain : the domain name, e.g. nodebox.net.
- URL.port : the server port to connect to.
- URL.path : the server path of folders, as a list, e.g. ['news', '2010']
- URL.page : the page name, e.g. page.html.
- URL.query : the query string as a dictionary of (name, value)-items.
- URL.anchor : the page anchor.
If method is POST, the query string is sent with HTTP POST.
"""
self.__dict__["method"] = method # Use __dict__ directly since __setattr__ is overridden.
self.__dict__["_string"] = u(string)
self.__dict__["_parts"] = None
self.__dict__["_headers"] = None
self.__dict__["_redirect"] = None
if isinstance(string, URL):
self.__dict__["method"] = string.method
self.query.update(string.query)
if len(query) > 0:
# Requires that we parse the string first (see URL.__setattr__).
self.query.update(query)
def _parse(self):
""" Parses all the parts of the URL string to a dictionary.
URL format: protocal://username:password@domain:port/path/page?querystring#anchor
For example: http://user:[email protected]:992/animal/bird?species=seagull&q#wings
This is a cached method that is only invoked when necessary, and only once.
"""
p = urlparse.urlsplit(self._string)
P = {PROTOCOL: p[0], # http
USERNAME: u"", # user
PASSWORD: u"", # pass
DOMAIN: p[1], # example.com
PORT: u"", # 992
PATH: p[2], # [animal]
PAGE: u"", # bird
QUERY: urldecode(p[3]), # {"species": "seagull", "q": None}
ANCHOR: p[4] # wings
}
# Split the username and password from the domain.
if "@" in P[DOMAIN]:
P[USERNAME], \
P[PASSWORD] = (p[1].split("@")[0].split(":")+[u""])[:2]
P[DOMAIN] = p[1].split("@")[1]
# Split the port number from the domain.
if ":" in P[DOMAIN]:
P[DOMAIN], \
P[PORT] = P[DOMAIN].split(":")
P[PORT] = int(P[PORT])
# Split the base page from the path.
if "/" in P[PATH]:
P[PAGE] = p[2].split("/")[-1]
P[PATH] = p[2][:len(p[2])-len(P[PAGE])].strip("/").split("/")
P[PATH] = filter(lambda v: v != "", P[PATH])
else:
P[PAGE] = p[2].strip("/")
P[PATH] = []
self.__dict__["_parts"] = P
# URL.string yields unicode(URL) by joining the different parts,
# if the URL parts have been modified.
def _get_string(self): return unicode(self)
def _set_string(self, v):
self.__dict__["_string"] = u(v)
self.__dict__["_parts"] = None
string = property(_get_string, _set_string)
@property
def parts(self):
""" Yields a dictionary with the URL parts.
"""
if not self._parts: self._parse()
return self._parts
@property
def querystring(self):
""" Yields the URL querystring: "www.example.com?page=1" => "page=1"
"""
s = self.parts[QUERY].items()
s = dict((bytestring(k), bytestring(v if v is not None else "")) for k, v in s)
s = urllib.urlencode(s)
return s
def __getattr__(self, k):
if k in self.__dict__ : return self.__dict__[k]
if k in self.parts : return self.__dict__["_parts"][k]
raise AttributeError, "'URL' object has no attribute '%s'" % k
def __setattr__(self, k, v):
if k in self.__dict__ : self.__dict__[k] = u(v); return
if k == "string" : self._set_string(v); return
if k == "query" : self.parts[k] = v; return
if k in self.parts : self.__dict__["_parts"][k] = u(v); return
raise AttributeError, "'URL' object has no attribute '%s'" % k
def open(self, timeout=10, proxy=None, user_agent=USER_AGENT, referrer=REFERRER, authentication=None):
""" Returns a connection to the url from which data can be retrieved with connection.read().
When the timeout amount of seconds is exceeded, raises a URLTimeout.
When an error occurs, raises a URLError (e.g. HTTP404NotFound).
"""
url = self.string
# Use basic urllib.urlopen() instead of urllib2.urlopen() for local files.
if os.path.exists(url):
return urllib.urlopen(url)
# Get the query string as a separate parameter if method=POST.
post = self.method == POST and self.querystring or None
socket.setdefaulttimeout(timeout)
if proxy:
proxy = urllib2.ProxyHandler({proxy[1]: proxy[0]})
proxy = urllib2.build_opener(proxy, urllib2.HTTPHandler)
urllib2.install_opener(proxy)
try:
request = urllib2.Request(bytestring(url), post, {
"User-Agent": user_agent,
"Referer": referrer
})
# Basic authentication is established with authentication=(username, password).
if authentication is not None:
request.add_header("Authorization", "Basic %s" %
base64.encodestring('%s:%s' % authentication))
return urllib2.urlopen(request)
except urllib2.HTTPError, e:
if e.code == 301: raise HTTP301Redirect
if e.code == 400: raise HTTP400BadRequest
if e.code == 401: raise HTTP401Authentication
if e.code == 403: raise HTTP403Forbidden
if e.code == 404: raise HTTP404NotFound
if e.code == 420: raise HTTP420Error
if e.code == 500: raise HTTP500InternalServerError
raise HTTPError
except socket.timeout:
raise URLTimeout
except urllib2.URLError, e:
if e.reason == "timed out" \
or e.reason[0] in (36, "timed out"):
raise URLTimeout
raise URLError, e.reason
except ValueError, e:
raise URLError, e
def download(self, timeout=10, cached=True, throttle=0, proxy=None, user_agent=USER_AGENT, referrer=REFERRER, authentication=None, unicode=False, **kwargs):
""" Downloads the content at the given URL (by default it will be cached locally).
Unless unicode=False, the content is returned as a unicode string.
"""
# Filter OAuth parameters from cache id (they will be unique for each request).
if self._parts is None and self.method == GET and "oauth_" not in self._string:
id = self._string
else:
id = repr(self.parts)
id = re.sub("u{0,1}'oauth_.*?': u{0,1}'.*?', ", "", id)
# Keep a separate cache of unicode and raw download for same URL.
if unicode is True:
id = "u" + id
if cached and id in cache:
if isinstance(cache, dict): # Not a Cache object.
return cache[id]
if unicode is True:
return cache[id]
if unicode is False:
return cache.get(id, unicode=False)
t = time.time()
# Open a connection with the given settings, read it and (by default) cache the data.
data = self.open(timeout, proxy, user_agent, referrer, authentication).read()
if unicode is True:
data = u(data)
if cached:
cache[id] = data
if throttle:
time.sleep(max(throttle-(time.time()-t), 0))
return data
def read(self, *args):
return self.open().read(*args)
@property
def exists(self, timeout=10):
""" Yields False if the URL generates a HTTP404NotFound error.
"""
try: self.open(timeout)
except HTTP404NotFound:
return False
except HTTPError, URLTimeoutError:
return True
except URLError:
return False
except:
return True
return True
@property
def mimetype(self, timeout=10):
""" Yields the MIME-type of the document at the URL, or None.
MIME is more reliable than simply checking the document extension.
You can then do: URL.mimetype in MIMETYPE_IMAGE.
"""
try:
return self.headers["content-type"].split(";")[0]
except KeyError:
return None
@property
def headers(self, timeout=10):
""" Yields a dictionary with the HTTP response headers.
"""
if self.__dict__["_headers"] is None:
try:
h = dict(self.open(timeout).info())
except URLError:
h = {}
self.__dict__["_headers"] = h
return self.__dict__["_headers"]
@property
def redirect(self, timeout=10):
""" Yields the redirected URL, or None.
"""
if self.__dict__["_redirect"] is None:
try:
r = self.open(timeout).geturl()
except URLError:
r = None
self.__dict__["_redirect"] = r != self.string and r or ""
return self.__dict__["_redirect"] or None
def __str__(self):
return bytestring(self.string)
def __unicode__(self):
# The string representation includes the query attributes with HTTP GET.
# This gives us the advantage of not having to parse the URL
# when no separate query attributes were given (e.g. all info is in URL._string):
if self._parts is None and self.method == GET:
return self._string
P = self._parts
u = []
if P[PROTOCOL]:
u.append("%s://" % P[PROTOCOL])
if P[USERNAME]:
u.append("%s:%s@" % (P[USERNAME], P[PASSWORD]))
if P[DOMAIN]:
u.append(P[DOMAIN])
if P[PORT]:
u.append(":%s" % P[PORT])
if P[PATH]:
u.append("/%s/" % "/".join(P[PATH]))
if P[PAGE] and len(u) > 0:
u[-1] = u[-1].rstrip("/")
if P[PAGE]:
u.append("/%s" % P[PAGE])
if P[QUERY] and self.method == GET:
u.append("?%s" % self.querystring)
if P[ANCHOR]:
u.append("#%s" % P[ANCHOR])
u = u"".join(u)
u = u.lstrip("/")
return u
def __repr__(self):
return "URL('%s', method='%s')" % (str(self), str(self.method))
def copy(self):
return URL(self.string, self.method, self.query)
def download(url=u"", method=GET, query={}, timeout=10, cached=True, throttle=0, proxy=None, user_agent=USER_AGENT, referrer=REFERRER, authentication=None, unicode=False):
""" Downloads the content at the given URL (by default it will be cached locally).
Unless unicode=False, the content is returned as a unicode string.
"""
return URL(url, method, query).download(timeout, cached, throttle, proxy, user_agent, referrer, authentication, unicode)
#url = URL("http://user:[email protected]:992/animal/bird?species#wings")
#print url.parts
#print url.query
#print url.string
#--- STREAMING URL BUFFER --------------------------------------------------------------------------
def bind(object, method, function):
""" Attaches the function as a method with the given name to the given object.
"""
setattr(object, method, new.instancemethod(function, object))
class Stream(list):
def __init__(self, url, delimiter="\n", **kwargs):
""" Buffered stream of data from a given URL.
"""
self.socket = URL(url).open(**kwargs)
self.buffer = ""
self.delimiter = delimiter
def update(self, bytes=1024):
""" Reads a number of bytes from the stream.
If a delimiter is encountered, calls Stream.parse() on the packet.
"""
packets = []
self.buffer += self.socket.read(bytes)
self.buffer = self.buffer.split(self.delimiter, 1)
while len(self.buffer) > 1:
data = self.buffer[0]
data = self.parse(data)
packets.append(data)
self.buffer = self.buffer[-1]
self.buffer = self.buffer.split(self.delimiter, 1)
self.buffer = self.buffer[-1]
self.extend(packets)
return packets
def parse(self, data):
""" Must be overridden in a subclass.
"""
return data
def clear(self):
list.__init__(self, [])
def stream(url, delimiter="\n", parse=lambda data: data, **kwargs):
""" Returns a new Stream with the given parse method.
"""
stream = Stream(url, delimiter, **kwargs)
bind(stream, "parse", lambda stream, data: parse(data))
return stream
#--- FIND URLs -------------------------------------------------------------------------------------
RE_URL_PUNCTUATION = ("\"'{(>", "\"'.,;)}")
RE_URL_HEAD = r"[%s|\[|\s]" % "|".join(RE_URL_PUNCTUATION[0]) # Preceded by space, parenthesis or HTML tag.
RE_URL_TAIL = r"[%s|\]]*[\s|\<]" % "|".join(RE_URL_PUNCTUATION[1]) # Followed by space, punctuation or HTML tag.
RE_URL1 = r"(https?://.*?)" + RE_URL_TAIL # Starts with http:// or https://
RE_URL2 = RE_URL_HEAD + r"(www\..*?\..*?)" + RE_URL_TAIL # Starts with www.
RE_URL3 = RE_URL_HEAD + r"([\w|-]*?\.(com|net|org))" + RE_URL_TAIL # Ends with .com, .net, .org
RE_URL1, RE_URL2, RE_URL3 = (
re.compile(RE_URL1, re.I),
re.compile(RE_URL2, re.I),
re.compile(RE_URL3, re.I))
def find_urls(string, unique=True):
""" Returns a list of URLs parsed from the string.
Works on http://, https://, www. links or domain names ending in .com, .org, .net.
Links can be preceded by leading punctuation (open parens)
and followed by trailing punctuation (period, comma, close parens).
"""
string = u(string)
string = string.replace(u"\u2024", ".")
string = string.replace(" ", " ")
matches = []
for p in (RE_URL1, RE_URL2, RE_URL3):
for m in p.finditer(" %s " % string):
s = m.group(1)
s = s.split("\">")[0].split("'>")[0] # google.com">Google => google.com
if not unique or s not in matches:
matches.append(s)
return matches
links = find_urls
RE_EMAIL = re.compile(r"[\w\-\.\+]+@(\w[\w\-]+\.)+[\w\-]+") # [email protected]
def find_email(string, unique=True):
""" Returns a list of e-mail addresses parsed from the string.
"""
string = u(string).replace(u"\u2024", ".")
matches = []
for m in RE_EMAIL.finditer(string):
s = m.group(0)
if not unique or s not in matches:
matches.append(s)
return matches
def find_between(a, b, string):
""" Returns a list of substrings between a and b in the given string.
"""
p = "%s(.*?)%s" % (a, b)
p = re.compile(p, re.DOTALL | re.I)
return [m for m in p.findall(string)]
#### PLAIN TEXT ####################################################################################
BLOCK = [
"title", "h1", "h2", "h3", "h4", "h5", "h6", "p",
"center", "blockquote", "div", "table", "ul", "ol", "pre", "code", "form"
]
SELF_CLOSING = ["br", "hr", "img"]
# Element tag replacements for a stripped version of HTML source with strip_tags().
# Block-level elements are followed by linebreaks,
# list items are preceded by an asterisk ("*").
LIST_ITEM = "*"
blocks = dict.fromkeys(BLOCK+["br", "tr", "td"], ("", "\n\n"))
blocks.update({
"li": ("%s " % LIST_ITEM, "\n"),
"img": ("", ""),
"br": ("", "\n"),
"th": ("", "\n"),
"tr": ("", "\n"),
"td": ("", "\t"),
})
class HTMLParser(sgmllib.SGMLParser):
def __init__(self):
sgmllib.SGMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def unknown_starttag(self, tag, attrs):
self.handle_starttag(tag, attrs)
def unknown_endtag(self, tag):
self.handle_endtag(tag)
def clean(self, html):
html = decode_utf8(html)
html = html.replace("/>", " />")
html = html.replace(" />", " />")
html = html.replace("<!", "<!")
html = html.replace("<!DOCTYPE", "<!DOCTYPE")
html = html.replace("<!doctype", "<!doctype")
html = html.replace("<!--", "<!--")
return html
def parse_declaration(self, i):
# We can live without sgmllib's parse_declaration().
try:
return sgmllib.SGMLParser.parse_declaration(self, i)
except sgmllib.SGMLParseError:
return i + 1
def convert_charref(self, name):
# This fixes a bug in older versions of sgmllib when working with Unicode.
# Fix: ASCII ends at 127, not 255
try:
n = int(name)
except ValueError:
return
if not 0 <= n <= 127:
return
return chr(n)
class HTMLTagstripper(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
def strip(self, html, exclude=[], replace=blocks):
""" Returns the HTML string with all element tags (e.g. <p>) removed.
- exclude : a list of tags to keep. Element attributes are stripped.
To preserve attributes a dict of (tag name, [attribute])-items can be given.
- replace : a dictionary of (tag name, (replace_before, replace_after))-items.
By default, block-level elements are separated with linebreaks.
"""
if html is None:
return None
self._exclude = isinstance(exclude, dict) and exclude or dict.fromkeys(exclude, [])
self._replace = replace
self._data = []
self.feed(self.clean(html))
self.close()
self.reset()
return "".join(self._data)
def clean(self, html):
# Escape all entities (just strip tags).
return HTMLParser.clean(self, html).replace("&", "&")
def handle_starttag(self, tag, attributes):
if tag in self._exclude:
# Create the tag attribute string,
# including attributes defined in the HTMLTagStripper._exclude dict.
a = len(self._exclude[tag]) > 0 and attributes or []
a = ["%s=\"%s\"" % (k,v) for k, v in a if k in self._exclude[tag]]
a = (" "+" ".join(a)).rstrip()
self._data.append("<%s%s>" % (tag, a))
if tag in self._replace:
self._data.append(self._replace[tag][0])
if tag in self._replace and tag in SELF_CLOSING:
self._data.append(self._replace[tag][1])
def handle_endtag(self, tag):
if tag in self._exclude and self._data and self._data[-1].startswith("<"+tag):
# Never keep empty elements (e.g. <a></a>).
self._data.pop(-1); return
if tag in self._exclude:
self._data.append("</%s>" % tag)
if tag in self._replace:
self._data.append(self._replace[tag][1])
def handle_data(self, data):
self._data.append(data.strip("\n\t"))
def handle_comment(self, comment):
if "comment" in self._exclude or \
"!--" in self._exclude:
self._data.append("<!--%s-->" % comment)
# As a function:
strip_tags = HTMLTagstripper().strip
def strip_element(string, tag, attributes=""):
""" Removes all elements with the given tagname and attributes from the string.
Open and close tags are kept in balance.
No HTML parser is used: strip_element(s, "a", "href='foo' class='bar'")
matches "<a href='foo' class='bar'" but not "<a class='bar' href='foo'".
"""
s = string.lower() # Case-insensitive.
t = tag.strip("</>")
a = (" " + attributes.lower().strip()).rstrip()
i = 0
j = 0
while j >= 0:
i = s.find("<%s%s" % (t, a), i)
j = s.find("</%s>" % t, i+1)
opened, closed = s[i:j].count("<%s" % t), 1
while opened > closed and j >= 0:
k = s.find("</%s>" % t, j+1)
opened += s[j:k].count("<%s" % t)
closed += 1
j = k
if i < 0: return string
if j < 0: return string[:i]
string = string[:i] + string[j+len(t)+3:]; s=string.lower()
return string
def strip_between(a, b, string):
""" Removes anything between (and including) string a and b inside the given string.
"""
p = "%s.*?%s" % (a, b)
p = re.compile(p, re.DOTALL | re.I)
return re.sub(p, "", string)
def strip_javascript(html):
return strip_between("<script.*?>", "</script>", html)
def strip_inline_css(html):
return strip_between("<style.*?>", "</style>", html)
def strip_comments(html):
return strip_between("<!--", "-->", html)
def strip_forms(html):
return strip_between("<form.*?>", "</form>", html)
RE_AMPERSAND = re.compile("\&(?!\#)") # & not followed by #
RE_UNICODE = re.compile(r'&(#?)(x|X?)(\w+);') # É
def encode_entities(string):
""" Encodes HTML entities in the given string ("<" => "<").
For example, to display "<em>hello</em>" in a browser,
we need to pass "<em>hello</em>" (otherwise "hello" in italic is displayed).
"""
if isinstance(string, (str, unicode)):
string = RE_AMPERSAND.sub("&", string)
string = string.replace("<", "<")
string = string.replace(">", ">")
string = string.replace('"', """)
string = string.replace("'", "'")
return string
def decode_entities(string):
""" Decodes HTML entities in the given string ("<" => "<").
"""
# http://snippets.dzone.com/posts/show/4569
def replace_entity(match):
hash, hex, name = match.group(1), match.group(2), match.group(3)
if hash == "#" or name.isdigit():
if hex == '' :
return unichr(int(name)) # "&" => "&"
if hex in ("x","X"):
return unichr(int('0x'+name, 16)) # "&" = > "&"
else:
cp = htmlentitydefs.name2codepoint.get(name) # "&" => "&"
return cp and unichr(cp) or match.group() # "&foo;" => "&foo;"
if isinstance(string, (str, unicode)):
return RE_UNICODE.subn(replace_entity, string)[0]
return string
def encode_url(string):
return urllib.quote_plus(bytestring(string))
def decode_url(string):
return urllib.unquote_plus(string) # "black/white" => "black%2Fwhite".
RE_SPACES = re.compile("( |\xa0)+", re.M) # Matches one or more spaces.
RE_TABS = re.compile(r"\t+", re.M) # Matches one or more tabs.
def collapse_spaces(string, indentation=False, replace=" "):
""" Returns a string with consecutive spaces collapsed to a single space.
Whitespace on empty lines and at the end of each line is removed.
With indentation=True, retains leading whitespace on each line.
"""
p = []
for x in string.splitlines():
n = indentation and len(x) - len(x.lstrip()) or 0
p.append(x[:n] + RE_SPACES.sub(replace, x[n:]).strip())
return "\n".join(p)
def collapse_tabs(string, indentation=False, replace=" "):
""" Returns a string with (consecutive) tabs replaced by a single space.
Whitespace on empty lines and at the end of each line is removed.
With indentation=True, retains leading whitespace on each line.
"""
p = []
for x in string.splitlines():
n = indentation and len(x) - len(x.lstrip()) or 0
p.append(x[:n] + RE_TABS.sub(replace, x[n:]).strip())
return "\n".join(p)
def collapse_linebreaks(string, threshold=1):
""" Returns a string with consecutive linebreaks collapsed to at most the given threshold.
Whitespace on empty lines and at the end of each line is removed.
"""
n = "\n" * threshold
p = [s.rstrip() for s in string.splitlines()]
string = "\n".join(p)
string = re.sub(n+r"+", n, string)
return string
def plaintext(html, keep=[], replace=blocks, linebreaks=2, indentation=False):
""" Returns a string with all HTML tags removed.
Content inside HTML comments, the <style> tag and the <script> tags is removed.
- keep : a list of tags to keep. Element attributes are stripped.
To preserve attributes a dict of (tag name, [attribute])-items can be given.
- replace : a dictionary of (tag name, (replace_before, replace_after))-items.
By default, block-level elements are followed by linebreaks.
- linebreaks : the maximum amount of consecutive linebreaks,
- indentation : keep left line indentation (tabs and spaces)?
"""
if not keep.__contains__("script"):
html = strip_javascript(html)
if not keep.__contains__("style"):
html = strip_inline_css(html)
if not keep.__contains__("form"):
html = strip_forms(html)
if not keep.__contains__("comment") and \
not keep.__contains__("!--"):
html = strip_comments(html)
html = html.replace("\r", "\n")
html = strip_tags(html, exclude=keep, replace=replace)
html = decode_entities(html)
html = collapse_spaces(html, indentation)
html = collapse_tabs(html, indentation)
html = collapse_linebreaks(html, linebreaks)
html = html.strip()
return html
#### SEARCH ENGINE #################################################################################
SEARCH = "search" # Query for pages (i.e. links to websites).
IMAGE = "image" # Query for images.
NEWS = "news" # Query for news items.
TINY = "tiny" # Image size around 100x100.
SMALL = "small" # Image size around 200x200.
MEDIUM = "medium" # Image size around 500x500.
LARGE = "large" # Image size around 1000x1000.
RELEVANCY = "relevancy" # Sort results by most relevant.
LATEST = "latest" # Sort results by most recent.
class Result(dict):
def __init__(self, url):
""" An item in a list of results returned by SearchEngine.search().
All dictionary entries are available as unicode string attributes.
- url : the URL of the referred web content,
- title : the title of the content at the URL,
- text : the content text,
- language: the content language,
- author : for news items and images, the author,
- date : for news items, the publication date.
"""
dict.__init__(self)
self.url = url
@property
def description(self):
return self.text # Backwards compatibility.
def download(self, *args, **kwargs):
""" Download the content at the given URL.
By default it will be cached - see URL.download().
"""
return URL(self.url).download(*args, **kwargs)
def __getattr__(self, k):
return self.get(k, u"")
def __getitem__(self, k):
return self.get(k, u"")
def __setattr__(self, k, v):
dict.__setitem__(self, u(k), v is not None and u(v) or u"") # Store strings as unicode.
def __setitem__(self, k, v):
dict.__setitem__(self, u(k), v is not None and u(v) or u"")
def setdefault(self, k, v):
dict.setdefault(self, u(k), u(v))
def update(self, *args, **kwargs):
map = dict()
map.update(*args, **kwargs)
dict.update(self, [(u(k), u(v)) for k, v in map.items()])
def __repr__(self):
return "Result(url=%s)" % repr(self.url)
class Results(list):
def __init__(self, source=None, query=None, type=SEARCH, total=0):
""" A list of results returned from SearchEngine.search().
- source: the service that yields the results (e.g. GOOGLE, TWITTER).
- query : the query that yields the results.
- type : the query type (SEARCH, IMAGE, NEWS).
- total : the total result count.
This is not the length of the list, but the total number of matches for the given query.
"""
self.source = source
self.query = query
self.type = type
self.total = total
class SearchEngine:
def __init__(self, license=None, throttle=1.0, language=None):
""" A base class for a web service.
- license : license key for the API,
- throttle : delay between requests (avoid hammering the server).
Inherited by: Google, Yahoo, Bing, Twitter, Wikipedia, Flickr.
"""
self.license = license
self.throttle = throttle # Amount of sleep time after executing a query.
self.language = language # Result.language restriction (e.g., "en").
self.format = lambda x: x # Formatter applied to each attribute of each Result.
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
return Results(source=None, query=query, type=type)
class SearchEngineError(HTTPError):
pass
class SearchEngineTypeError(SearchEngineError):
pass # Raised when an unknown type is passed to SearchEngine.search().
class SearchEngineLimitError(SearchEngineError):
pass # Raised when the query limit for a license is reached.
#--- GOOGLE ----------------------------------------------------------------------------------------
# Google Custom Search is a paid service.
# https://code.google.com/apis/console/
# http://code.google.com/apis/customsearch/v1/overview.html
GOOGLE = "https://www.googleapis.com/customsearch/v1?"
GOOGLE_LICENSE = api.license["Google"]
GOOGLE_CUSTOM_SEARCH_ENGINE = "000579440470800426354:_4qo2s0ijsi"
# Search results can start with: "Jul 29, 2007 ...",
# which is the date of the page parsed by Google from the content.
RE_GOOGLE_DATE = re.compile("^([A-Z][a-z]{2} [0-9]{1,2}, [0-9]{4}) {0,1}...")
class Google(SearchEngine):
def __init__(self, license=None, throttle=0.5, language=None):
SearchEngine.__init__(self, license or GOOGLE_LICENSE, throttle, language)
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
""" Returns a list of results from Google for the given query.
- type : SEARCH,
- start: maximum 100 results => start 1-10 with count=10,
- count: maximum 10,
There is a daily limit of 10,000 queries. Google Custom Search is a paid service.
"""
if type != SEARCH:
raise SearchEngineTypeError
if not query or count < 1 or start < 1 or start > (100 / count):
return Results(GOOGLE, query, type)
# 1) Create request URL.
url = URL(GOOGLE, query={
"key": self.license or GOOGLE_LICENSE,
"cx": GOOGLE_CUSTOM_SEARCH_ENGINE,
"q": query,
"start": 1 + (start-1) * count,
"num": min(count, 10),
"alt": "json"
})
# 2) Restrict language.
if self.language is not None:
url.query["lr"] = "lang_" + self.language
# 3) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
data = url.download(cached=cached, **kwargs)
data = json.loads(data)
if data.get("error", {}).get("code") == 403:
raise SearchEngineLimitError
results = Results(GOOGLE, query, type)
results.total = int(data.get("queries", {}).get("request", [{}])[0].get("totalResults") or 0)
for x in data.get("items", []):
r = Result(url=None)
r.url = self.format(x.get("link"))
r.title = self.format(x.get("title"))
r.text = self.format(x.get("htmlSnippet").replace("<br> ","").replace("<b>...</b>", "..."))
r.language = self.language or ""
r.date = ""
if not r.date:
# Google Search results can start with a date (parsed from the content):
m = RE_GOOGLE_DATE.match(r.text)
if m:
r.date = m.group(1)
r.text = "..." + r.text[len(m.group(0)):]
results.append(r)
return results
def translate(self, string, input="en", output="fr", **kwargs):
""" Returns the translation of the given string in the desired output language.
Google Translate is a paid service, license without billing raises HTTP401Authentication.
"""
url = URL("https://www.googleapis.com/language/translate/v2?", method=GET, query={
"key": GOOGLE_LICENSE,
"q": string,
"source": input,
"target": output
})
kwargs.setdefault("cached", False)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = url.download(**kwargs)
except HTTP403Forbidden:
raise HTTP401Authentication, "Google translate API is a paid service"
data = json.loads(data)
data = data.get("data", {}).get("translations", [{}])[0].get("translatedText", "")
data = decode_entities(data)
return u(data)
def identify(self, string, **kwargs):
""" Returns a (language, confidence)-tuple for the given string.
Google Translate is a paid service, license without billing raises HTTP401Authentication.
"""
url = URL("https://www.googleapis.com/language/translate/v2/detect?", method=GET, query={
"key": GOOGLE_LICENSE,
"q": string[:1000]
})
kwargs.setdefault("cached", False)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = url.download(**kwargs)
except HTTP403Forbidden:
raise HTTP401Authentication, "Google translate API is a paid service"
data = json.loads(data)
data = data.get("data", {}).get("detections", [[{}]])[0][0]
data = u(data.get("language")), float(data.get("confidence"))
return data
#--- YAHOO -----------------------------------------------------------------------------------------
# Yahoo BOSS is a paid service.
# http://developer.yahoo.com/search/
YAHOO = "http://yboss.yahooapis.com/ysearch/"
YAHOO_LICENSE = api.license["Yahoo"]
class Yahoo(SearchEngine):
def __init__(self, license=None, throttle=0.5, language=None):
SearchEngine.__init__(self, license or YAHOO_LICENSE, throttle, language)
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
""" Returns a list of results from Yahoo for the given query.
- type : SEARCH, IMAGE or NEWS,
- start: maximum 1000 results => start 1-100 with count=10, 1000/count,
- count: maximum 50, or 35 for images.
There is no daily limit, however Yahoo BOSS is a paid service.
"""
if type not in (SEARCH, IMAGE, NEWS):
raise SearchEngineTypeError
if type == SEARCH:
url = YAHOO + "web"
if type == IMAGE:
url = YAHOO + "images"
if type == NEWS:
url = YAHOO + "news"
if not query or count < 1 or start < 1 or start > 1000 / count:
return Results(YAHOO, query, type)
# 1) Create request URL.
url = URL(url, method=GET, query={
"q": encode_url(query),
"start": 1 + (start-1) * count,
"count": min(count, type==IMAGE and 35 or 50),
"format": "json"
})
# 2) Restrict language.
if self.language is not None:
market = locale.market(self.language)
if market:
url.query["market"] = market.lower()
# 3) BOSS OAuth authentication.
url.query.update({
"oauth_version": "1.0",
"oauth_nonce": oauth.nonce(),
"oauth_timestamp": oauth.timestamp(),
"oauth_consumer_key": self.license[0],
"oauth_signature_method": "HMAC-SHA1"
})
url.query["oauth_signature"] = oauth.sign(url.string.split("?")[0], url.query, method=GET, secret=self.license[1])
# 3) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = url.download(cached=cached, **kwargs)
except HTTP401Authentication:
raise HTTP401Authentication, "Yahoo %s API is a paid service" % type
except HTTP403Forbidden:
raise SearchEngineLimitError
data = json.loads(data)
data = data.get("bossresponse") or {}
data = data.get({SEARCH:"web", IMAGE:"images", NEWS:"news"}[type], {})
results = Results(YAHOO, query, type)
results.total = int(data.get("totalresults") or 0)
for x in data.get("results", []):
r = Result(url=None)
r.url = self.format(x.get("url", x.get("clickurl")))
r.title = self.format(x.get("title"))
r.text = self.format(x.get("abstract"))
r.date = self.format(x.get("date"))
r.author = self.format(x.get("source"))
r.language = self.format(x.get("language") and \
x.get("language").split(" ")[0] or self.language or "")
results.append(r)
return results
#--- BING ------------------------------------------------------------------------------------------
# https://datamarket.azure.com/dataset/5BA839F1-12CE-4CCE-BF57-A49D98D29A44
# https://datamarket.azure.com/account/info
BING = "https://api.datamarket.azure.com/Bing/Search/"
BING_LICENSE = api.license["Bing"]
class Bing(SearchEngine):
def __init__(self, license=None, throttle=0.5, language=None):
SearchEngine.__init__(self, license or BING_LICENSE, throttle, language)
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
"""" Returns a list of results from Bing for the given query.
- type : SEARCH, IMAGE or NEWS,
- start: maximum 1000 results => start 1-100 with count=10, 1000/count,
- count: maximum 50, or 15 for news,
- size : for images, either SMALL, MEDIUM or LARGE.
There is no daily query limit.
"""
if type not in (SEARCH, IMAGE, NEWS):
raise SearchEngineTypeError
if type == SEARCH:
src = "Web"
if type == IMAGE:
src = "Image"
if type == NEWS:
src = "News"
if not query or count < 1 or start < 1 or start > 1000 / count:
return Results(BING + src + "?", query, type)
# 1) Construct request URL.
url = URL(BING + "Composite", method=GET, query={
"Sources": "'" + src.lower() + "'",
"Query": "'" + query + "'",
"$skip": 1 + (start-1) * count,
"$top": min(count, type==NEWS and 15 or 50),
"$format": "json",
})
# 2) Restrict image size.
if size in (TINY, SMALL, MEDIUM, LARGE):
url.query["ImageFilters"] = {
TINY: "'Size:Small'",
SMALL: "'Size:Small'",
MEDIUM: "'Size:Medium'",
LARGE: "'Size:Large'" }[size]
# 3) Restrict language.
if type in (SEARCH, IMAGE) and self.language is not None:
url.query["Query"] = url.query["Query"][:-1] + " language: %s'" % self.language
#if self.language is not None:
# market = locale.market(self.language)
# if market:
# url.query["market"] = market
# 4) Parse JSON response.
kwargs["authentication"] = ("", self.license)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = url.download(cached=cached, **kwargs)
except HTTP401Authentication:
raise HTTP401Authentication, "Bing %s API is a paid service" % type
data = json.loads(data)
data = data.get("d", {})
data = data.get("results", [{}])[0]
results = Results(BING, query, type)
results.total = int(data.get(src+"Total", 0))
for x in data.get(src, []):
r = Result(url=None)
r.url = self.format(x.get("MediaUrl", x.get("Url")))
r.title = self.format(x.get("Title"))
r.text = self.format(x.get("Description", x.get("Snippet")))
r.language = self.language or ""
r.date = self.format(x.get("DateTime", x.get("Date")))
r.author = self.format(x.get("Source"))
results.append(r)
return results
#--- TWITTER ---------------------------------------------------------------------------------------
# http://apiwiki.twitter.com/
TWITTER = "http://search.twitter.com/"
TWITTER_STREAM = "https://stream.twitter.com/1/statuses/filter.json"
TWITTER_STATUS = "https://twitter.com/%s/status/%s"
TWITTER_LICENSE = api.license["Twitter"]
TWITTER_HASHTAG = re.compile(r"(\s|^)(#[a-z0-9_\-]+)", re.I) # Word starts with "#".
TWITTER_RETWEET = re.compile(r"(\s|^RT )(@[a-z0-9_\-]+)", re.I) # Word starts with "RT @".
class Twitter(SearchEngine):
def __init__(self, license=None, throttle=0.5, language=None):
SearchEngine.__init__(self, license or TWITTER_LICENSE, throttle, language)
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=False, **kwargs):
""" Returns a list of results from Twitter for the given query.
- type : SEARCH or TRENDS,
- start: maximum 1500 results (10 for trends) => start 1-15 with count=100, 1500/count,
- count: maximum 100, or 10 for trends.
There is an hourly limit of 150+ queries (actual amount undisclosed).
"""
if type != SEARCH:
raise SearchEngineTypeError
if not query or count < 1 or start < 1 or start > 1500 / count:
return Results(TWITTER, query, type)
# 1) Construct request URL.
url = URL(TWITTER + "search.json?", method=GET)
url.query = {
"q": query,
"page": start,
"rpp": min(count, 100)
}
if "geo" in kwargs:
# Filter by location with geo=(latitude, longitude, radius).
# It can also be a (latitude, longitude)-tuple with default radius "10km".
url.query["geocode"] = ",".join((map(str, kwargs.pop("geo")) + ["10km"])[:3])
# 2) Restrict language.
url.query["lang"] = self.language or ""
# 3) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = URL(url).download(cached=cached, **kwargs)
except HTTP420Error:
raise SearchEngineLimitError
data = json.loads(data)
results = Results(TWITTER, query, type)
results.total = None
for x in data.get("results", data.get("trends", [])):
r = Result(url=None)
r.url = self.format(TWITTER_STATUS % (x.get("from_user"), x.get("id_str")))
r.text = self.format(x.get("text"))
r.date = self.format(x.get("created_at", data.get("as_of")))
r.author = self.format(x.get("from_user"))
r.profile = self.format(x.get("profile_image_url")) # Profile picture URL.
r.language = self.format(x.get("iso_language_code"))
results.append(r)
return results
def trends(self, **kwargs):
""" Returns a list with 10 trending topics on Twitter.
"""
url = URL("https://api.twitter.com/1/trends/1.json")
kwargs.setdefault("cached", False)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
data = url.download(**kwargs)
data = json.loads(data)
return [u(x.get("name")) for x in data[0].get("trends", [])]
def stream(self, query):
""" Returns a live stream of Result objects for the given query.
"""
url = URL(TWITTER_STREAM)
url.query.update({
"track": query,
"oauth_version": "1.0",
"oauth_nonce": oauth.nonce(),
"oauth_timestamp": oauth.timestamp(),
"oauth_consumer_key": self.license[0],
"oauth_token": self.license[2][0],
"oauth_signature_method": "HMAC-SHA1"
})
url.query["oauth_signature"] = oauth.sign(url.string.split("?")[0], url.query, GET,
self.license[1],
self.license[2][1])
return TwitterStream(url, delimiter="\n", format=self.format)
class TwitterStream(Stream):
def __init__(self, socket, delimiter="\n", format=lambda s: s):
Stream.__init__(self, socket, delimiter)
self.format = format
def parse(self, data):
""" TwitterStream.queue will populate with Result objects as
TwitterStream.update() is called iteratively.
"""
x = json.loads(data)
r = Result(url=None)
r.url = self.format(TWITTER_STATUS % (x.get("user", {}).get("screen_name"), x.get("id_str")))
r.text = self.format(x.get("text"))
r.date = self.format(x.get("created_at"))
r.author = self.format(x.get("user", {}).get("screen_name"))
r.profile = self.format(x.get("profile_image_url"))
r.language = self.format(x.get("iso_language_code"))
return r
def author(name):
""" Returns a Twitter query-by-author-name that can be passed to Twitter.search().
For example: Twitter().search(author("tom_de_smedt"))
"""
return "from:%s" % name
def hashtags(string):
""" Returns a list of hashtags (words starting with a #hash) from a tweet.
"""
return [b for a, b in TWITTER_HASHTAG.findall(string)]
def retweets(string):
""" Returns a list of retweets (words starting with a RT @author) from a tweet.
"""
return [b for a, b in TWITTER_RETWEET.findall(string)]
#stream = Twitter().stream("cat")
#for i in range(10):
# stream.update()
# for tweet in reversed(stream):
# print tweet.text
# print tweet.url
# print
#stream.clear()
#--- MEDIAWIKI -------------------------------------------------------------------------------------
# http://en.wikipedia.org/w/api.php
WIKIA = "http://wikia.com"
WIKIPEDIA = "http://wikipedia.com"
WIKIPEDIA_LICENSE = api.license["Wikipedia"]
MEDIAWIKI_LICENSE = None
MEDIAWIKI = "http://{SUBDOMAIN}.{DOMAIN}{API}"
# Pattern for meta links (e.g. Special:RecentChanges).
# http://en.wikipedia.org/wiki/Main_namespace
MEDIAWIKI_NAMESPACE = ["Main", "User", "Wikipedia", "File", "MediaWiki", "Template", "Help", "Category", "Portal", "Book"]
MEDIAWIKI_NAMESPACE += [s+" talk" for s in MEDIAWIKI_NAMESPACE] + ["Talk", "Special", "Media"]
MEDIAWIKI_NAMESPACE += ["WP", "WT", "MOS", "C", "CAT", "Cat", "P", "T", "H", "MP", "MoS", "Mos"]
_mediawiki_namespace = re.compile(r"^"+"|".join(MEDIAWIKI_NAMESPACE)+":", re.I)
# Pattern to identify disambiguation pages.
MEDIAWIKI_DISAMBIGUATION = "<a href=\"/wiki/Help:Disambiguation\" title=\"Help:Disambiguation\">disambiguation</a> page"
# Pattern to identify references, e.g. [12]
MEDIAWIKI_REFERENCE = r"\s*\[[0-9]{1,3}\]"
class MediaWiki(SearchEngine):
def __init__(self, license=None, throttle=5.0, language="en"):
SearchEngine.__init__(self, license or MEDIAWIKI_LICENSE, throttle, language)
@property
def _url(self):
# Must be overridden in a subclass; see Wikia and Wikipedia.
return None
@property
def MediaWikiArticle(self):
return MediaWikiArticle
@property
def MediaWikiSection(self):
return MediaWikiSection
@property
def MediaWikiTable(self):
return MediaWikiTable
def __iter__(self):
return self.all()
def all(self, **kwargs):
""" Returns an iterator over all MediaWikiArticle objects.
Optional parameters can include those passed to
MediaWiki.list(), MediaWiki.search() and URL.download().
"""
for title in self.list(**kwargs):
yield self.search(title, **kwargs)
articles = all
def list(self, namespace=0, start=None, count=100, cached=True, **kwargs):
""" Returns an iterator over all article titles (for a given namespace id).
"""
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
# Fetch article titles (default) or a custom id.
id = kwargs.pop("_id", "title")
# Loop endlessly (= until the last request no longer yields an "apcontinue").
# See: http://www.mediawiki.org/wiki/API:Allpages
while start != -1:
url = URL(self._url, method=GET, query={
"action": "query",
"list": "allpages",
"apnamespace": namespace,
"apfrom": start or "",
"aplimit": min(count, 500),
"apfilterredir": "nonredirects",
"format": "json"
})
data = url.download(cached=cached, **kwargs)
data = json.loads(data)
for x in data.get("query", {}).get("allpages", {}):
if x.get(id):
yield x[id]
start = data.get("query-continue", {}).get("allpages", {})
start = start.get("apcontinue", start.get("apfrom", -1))
raise StopIteration
def search(self, query, type=SEARCH, start=1, count=1, sort=RELEVANCY, size=None, cached=True, **kwargs):
""" Returns a MediaWikiArticle for the given query.
The query is case-sensitive, for example on Wikipedia:
- "tiger" = Panthera tigris,
- "TIGER" = Topologically Integrated Geographic Encoding and Referencing.
"""
if type != SEARCH:
raise SearchEngineTypeError
if count < 1:
return None
# 1) Construct request URL (e.g., Wikipedia for a given language).
url = URL(self._url, method=GET, query={
"action": "parse",
"page": query.replace(" ","_"),
"redirects": 1,
"format": "json"
})
# 2) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("timeout", 30) # Parsing the article takes some time.
kwargs.setdefault("throttle", self.throttle)
data = url.download(cached=cached, **kwargs)
data = json.loads(data)
data = data.get("parse", {})
a = self._parse_article(data, query=query)
a = self._parse_article_sections(a, data)
a = self._parse_article_section_structure(a)
if not a.html or "id=\"noarticletext\"" in a.html:
return None
return a
def _parse_article(self, data, **kwargs):
return self.MediaWikiArticle(
title = plaintext(data.get("displaytitle", data.get("title", ""))),
source = data.get("text", {}).get("*", ""),
disambiguation = data.get("text", {}).get("*", "").find(MEDIAWIKI_DISAMBIGUATION) >= 0,
links = [x["*"] for x in data.get("links", []) if not _mediawiki_namespace.match(x["*"])],
categories = [x["*"] for x in data.get("categories", [])],
external = [x for x in data.get("externallinks", [])],
media = [x for x in data.get("images", [])],
languages = dict([(x["lang"], x["*"]) for x in data.get("langlinks", [])]),
language = self.language,
parser = self, **kwargs)
def _parse_article_sections(self, article, data):
# If "References" is a section in the article,
# the HTML will contain a marker <h*><span class="mw-headline" id="References">.
# http://en.wikipedia.org/wiki/Section_editing
t = article.title
d = 0
i = 0
for x in data.get("sections", {}):
a = x.get("anchor")
if a:
p = r"<h.>\s*.*?\s*<span class=\"mw-headline\" id=\"%s\">" % a
p = re.compile(p)
m = p.search(article.source, i)
if m:
j = m.start()
article.sections.append(self.MediaWikiSection(article,
title = t,
start = i,
stop = j,
level = d))
t = x.get("line", "")
d = int(x.get("level", 2)) - 1
i = j
return article
def _parse_article_section_structure(self, article):
# Sections with higher level are children of previous sections with lower level.
for i, s2 in enumerate(article.sections):
for s1 in reversed(article.sections[:i]):
if s1.level < s2.level:
s2.parent = s1
s1.children.append(s2)
break
return article
class MediaWikiArticle:
def __init__(self, title=u"", source=u"", links=[], categories=[], languages={}, disambiguation=False, **kwargs):
""" A MediaWiki article returned from MediaWiki.search().
MediaWikiArticle.string contains the HTML content.
"""
self.title = title # Article title.
self.source = source # Article HTML content.
self.sections = [] # Article sections.
self.links = links # List of titles of linked articles.
self.categories = categories # List of categories. As links, prepend "Category:".
self.external = [] # List of external links.
self.media = [] # List of linked media (images, sounds, ...)
self.disambiguation = disambiguation # True when the article is a disambiguation page.
self.languages = languages # Dictionary of (language, article)-items, e.g. Cat => ("nl", "Kat")
self.language = kwargs.get("language", "en")
self.parser = kwargs.get("parser", MediaWiki())
for k, v in kwargs.items():
setattr(self, k, v)
def _plaintext(self, string, **kwargs):
""" Strips HTML tags, whitespace and wiki markup from the HTML source, including:
metadata, info box, table of contents, annotations, thumbnails, disambiguation link.
This is called internally from MediaWikiArticle.string.
"""
s = string
s = strip_between("<table class=\"metadata", "</table>", s) # Metadata.
s = strip_between("<table id=\"toc", "</table>", s) # Table of contents.
s = strip_between("<table class=\"infobox", "</table>", s) # Infobox.
s = strip_between("<table class=\"wikitable", "</table>", s) # Table.
s = strip_element(s, "table", "class=\"navbox") # Navbox.
s = strip_between("<div id=\"annotation", "</div>", s) # Annotations.
s = strip_between("<div class=\"dablink", "</div>", s) # Disambiguation message.
s = strip_between("<div class=\"magnify", "</div>", s) # Thumbnails.
s = strip_between("<div class=\"thumbcaption", "</div>", s) # Thumbnail captions.
s = re.sub(r"<img class=\"tex\".*?/>", "[math]", s) # LaTex math images.
s = plaintext(s, **kwargs)
s = re.sub(r"\[edit\]\s*", "", s) # [edit] is language dependent (e.g. nl => "[bewerken]")
s = s.replace("[", " [").replace(" [", " [") # Space before inline references.
return s
def plaintext(self, **kwargs):
return self._plaintext(self.source, **kwargs)
@property
def html(self):
return self.source
@property
def string(self):
return self.plaintext()
def __repr__(self):
return "MediaWikiArticle(title=%s)" % repr(self.title)
class MediaWikiSection:
def __init__(self, article, title=u"", start=0, stop=0, level=1):
""" A (nested) section in the content of a MediaWikiArticle.
"""
self.article = article # MediaWikiArticle the section is part of.
self.parent = None # MediaWikiSection the section is part of.
self.children = [] # MediaWikiSections belonging to this section.
self.title = title # Section title.
self._start = start # Section start index in MediaWikiArticle.string.
self._stop = stop # Section stop index in MediaWikiArticle.string.
self._level = level # Section depth (main title + intro = level 0).
self._tables = None
def plaintext(self, **kwargs):
return self.article._plaintext(self.source, **kwargs)
@property
def source(self):
return self.article.source[self._start:self._stop]
@property
def html(self):
return self.source
@property
def string(self):
return self.plaintext()
@property
def content(self):
# ArticleSection.string, minus the title.
s = self.plaintext()
if s == self.title or s.startswith(self.title+"\n"):
return s[len(self.title):].lstrip()
return s
@property
def tables(self):
""" Yields a list of MediaWikiTable objects in the section.
"""
if self._tables is None:
self._tables = []
b = "<table class=\"wikitable\"", "</table>"
p = self.article._plaintext
f = find_between
for s in f(b[0], b[1], self.source):
t = self.article.parser.MediaWikiTable(self,
title = p((f(r"<caption.*?>", "</caption>", s) + [""])[0]),
source = b[0] + s + b[1]
)
for i, row in enumerate(f(r"<tr", "</tr>", s)):
# 1) Parse <td> and <th> content and format it as plain text.
# 2) Parse <td colspan=""> attribute, duplicate spanning cells.
# 3) For <th> in the first row, update MediaWikiTable.headers.
r1 = f(r"<t[d|h]", r"</t[d|h]>", row)
r1 = (((f(r'colspan="', r'"', v)+[1])[0], v[v.find(">")+1:]) for v in r1)
r1 = ((int(n), v) for n, v in r1)
r2 = []; [[r2.append(p(v)) for j in range(n)] for n, v in r1]
if i == 0 and "</th>" in row:
t.headers = r2
else:
t.rows.append(r2)
self._tables.append(t)
return self._tables
@property
def level(self):
return self._level
depth = level
def __repr__(self):
return "MediaWikiSection(title='%s')" % bytestring(self.title)
class MediaWikiTable:
def __init__(self, section, title=u"", headers=[], rows=[], source=u""):
""" A <table class="wikitable> in a MediaWikiSection.
"""
self.section = section # MediaWikiSection the table is part of.
self.source = source # Table HTML.
self.title = title # Table title.
self.headers = headers # List of table headers.
self.rows = rows # List of table rows, each a list of cells.
@property
def html(self):
return self.source
def __repr__(self):
return "MediaWikiTable(title='%s')" % bytestring(self.title)
#--- MEDIAWIKI: WIKIPEDIA --------------------------------------------------------------------------
class Wikipedia(MediaWiki):
def __init__(self, license=None, throttle=5.0, language="en"):
""" Mediawiki search engine for http://[language].wikipedia.org.
"""
SearchEngine.__init__(self, license or WIKIPEDIA_LICENSE, throttle, language)
self._subdomain = language
@property
def _url(self):
s = MEDIAWIKI
s = s.replace("{SUBDOMAIN}", self._subdomain)
s = s.replace("{DOMAIN}", "wikipedia.org")
s = s.replace("{API}", '/w/api.php')
return s
@property
def MediaWikiArticle(self):
return WikipediaArticle
@property
def MediaWikiSection(self):
return WikipediaSection
@property
def MediaWikiTable(self):
return WikipediaTable
class WikipediaArticle(MediaWikiArticle):
def download(self, media, **kwargs):
""" Downloads an item from MediaWikiArticle.media and returns the content.
Note: images on Wikipedia can be quite large, and this method uses screen-scraping,
so Wikipedia might not like it that you download media in this way.
To save the media in a file:
data = article.download(media)
open(filename+extension(media),"w").write(data)
"""
url = "http://%s.wikipedia.org/wiki/File:%s" % (self.__dict__.get("language", "en"), media)
if url not in cache:
time.sleep(1)
data = URL(url).download(**kwargs)
data = re.search(r"upload.wikimedia.org/.*?/%s" % media, data)
data = data and URL("http://" + data.group(0)).download(**kwargs) or None
return data
def __repr__(self):
return "WikipediaArticle(title=%s)" % repr(self.title)
class WikipediaSection(MediaWikiSection):
def __repr__(self):
return "WikipediaSection(title='%s')" % bytestring(self.title)
class WikipediaTable(MediaWikiTable):
def __repr__(self):
return "WikipediaTable(title='%s')" % bytestring(self.title)
#article = Wikipedia().search("cat")
#for section in article.sections:
# print " "*(section.level-1) + section.title
#if article.media:
# data = article.download(article.media[2])
# f = open(article.media[2], "w")
# f.write(data)
# f.close()
#
#article = Wikipedia(language="nl").search("borrelnootje")
#print article.string
#--- MEDIAWIKI: WIKIA ------------------------------------------------------------------------------
class Wikia(MediaWiki):
def __init__(self, domain="www", license=None, throttle=5.0, language="en"):
""" Mediawiki search engine for http://[domain].wikia.com.
"""
SearchEngine.__init__(self, license or MEDIAWIKI_LICENSE, throttle, language)
self._subdomain = domain
@property
def _url(self):
s = MEDIAWIKI
s = s.replace("{SUBDOMAIN}", self._subdomain)
s = s.replace("{DOMAIN}", "wikia.com")
s = s.replace("{API}", '/api.php')
return s
@property
def MediaWikiArticle(self):
return WikiaArticle
@property
def MediaWikiSection(self):
return WikiaSection
@property
def MediaWikiTable(self):
return WikiaTable
def all(self, **kwargs):
if kwargs.pop("batch", True):
# We can take advantage of Wikia's search API to reduce bandwith.
# Instead of executing a query to retrieve each article,
# we query for a batch of (10) articles.
iterator = self.list(_id="pageid", **kwargs)
while True:
batch, done = [], False
try:
for i in range(10): batch.append(iterator.next())
except StopIteration:
done = True # No more articles, finish batch and raise StopIteration.
url = URL(self._url.replace("api.php", "wikia.php"), method=GET, query={
"controller": "WikiaSearch",
"method": "getPages",
"ids": '|'.join(str(id) for id in batch),
"format": "json"
})
kwargs.setdefault("unicode", True)
kwargs.setdefault("cached", True)
kwargs["timeout"] = 10 * (1 + len(batch))
data = url.download(**kwargs)
data = json.loads(data)
for x in (data or {}).get("pages", {}).values():
yield WikiaArticle(title=x.get("title", ""), source=x.get("html", ""))
if done:
raise StopIteration
for title in self.list(**kwargs):
yield self.search(title, **kwargs)
class WikiaArticle(MediaWikiArticle):
def __repr__(self):
return "WikiaArticle(title=%s)" % repr(self.title)
class WikiaSection(MediaWikiSection):
def __repr__(self):
return "WikiaSection(title='%s')" % bytestring(self.title)
class WikiaTable(MediaWikiTable):
def __repr__(self):
return "WikiaTable(title='%s')" % bytestring(self.title)
#--- FLICKR ----------------------------------------------------------------------------------------
# http://www.flickr.com/services/api/
FLICKR = "http://api.flickr.com/services/rest/"
FLICKR_LICENSE = api.license["Flickr"]
INTERESTING = "interesting"
class Flickr(SearchEngine):
def __init__(self, license=None, throttle=5.0, language=None):
SearchEngine.__init__(self, license or FLICKR_LICENSE, throttle, language)
def search(self, query, type=IMAGE, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
""" Returns a list of results from Flickr for the given query.
Retrieving the URL of a result (i.e. image) requires an additional query.
- type : SEARCH, IMAGE,
- start: maximum undefined,
- count: maximum 500,
- sort : RELEVANCY, LATEST or INTERESTING.
There is no daily limit.
"""
if type not in (SEARCH, IMAGE):
raise SearchEngineTypeError
if not query or count < 1 or start < 1 or start > 500/count:
return Results(FLICKR, query, IMAGE)
# 1) Construct request URL.
url = FLICKR+"?"
url = URL(url, method=GET, query={
"api_key": self.license or "",
"method": "flickr.photos.search",
"text": query.replace(" ", "_"),
"page": start,
"per_page": min(count, 500),
"sort": { RELEVANCY: "relevance",
LATEST: "date-posted-desc",
INTERESTING: "interestingness-desc" }.get(sort)
})
if kwargs.get("copyright", True) is False:
# With copyright=False, only returns Public Domain and Creative Commons images.
# http://www.flickr.com/services/api/flickr.photos.licenses.getInfo.html
# 5: "Attribution-ShareAlike License"
# 7: "No known copyright restriction"
url.query["license"] = "5,7"
# 2) Parse XML response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
data = url.download(cached=cached, **kwargs)
data = xml.dom.minidom.parseString(bytestring(data))
results = Results(FLICKR, query, IMAGE)
results.total = int(data.getElementsByTagName("photos")[0].getAttribute("total"))
for x in data.getElementsByTagName("photo"):
r = FlickrResult(url=None)
r.__dict__["_id"] = x.getAttribute("id")
r.__dict__["_size"] = size
r.__dict__["_license"] = self.license
r.__dict__["_throttle"] = self.throttle
r.text = self.format(x.getAttribute("title"))
r.author = self.format(x.getAttribute("owner"))
results.append(r)
return results
class FlickrResult(Result):
@property
def url(self):
# Retrieving the url of a FlickrResult (i.e. image location) requires another query.
# Note: the "Original" size no longer appears in the response,
# so Flickr might not like it if we download it.
url = FLICKR + "?method=flickr.photos.getSizes&photo_id=%s&api_key=%s" % (self._id, self._license)
data = URL(url).download(throttle=self._throttle, unicode=True)
data = xml.dom.minidom.parseString(bytestring(data))
size = { TINY: "Thumbnail",
SMALL: "Small",
MEDIUM: "Medium",
LARGE: "Original" }.get(self._size, "Medium")
for x in data.getElementsByTagName("size"):
if size == x.getAttribute("label"):
return x.getAttribute("source")
if size == "Original":
url = x.getAttribute("source")
url = url[:-len(extension(url))-2] + "_o" + extension(url)
return u(url)
#images = Flickr().search("kitten", count=10, size=SMALL)
#for img in images:
# print bytestring(img.description)
# print img.url
#
#data = img.download()
#f = open("kitten"+extension(img.url), "w")
#f.write(data)
#f.close()
#--- FACEBOOK --------------------------------------------------------------------------------------
# Facebook public status updates.
# https://developers.facebook.com/docs/reference/api/
FACEBOOK = "https://graph.facebook.com/"
FACEBOOK_LICENSE = api.license["Facebook"]
FEED = "feed" # Facebook timeline.
COMMENTS = "comments" # Facebook comments (for a given news feed post).
LIKES = "likes" # Facebook likes (for a given post or comment).
FRIENDS = "friends" # Facebook friends (for a given profile id).
class FacebookResult(Result):
def __repr__(self):
return "Result(id=%s)" % repr(self.id)
class Facebook(SearchEngine):
def __init__(self, license=None, throttle=1.0, language=None):
SearchEngine.__init__(self, license, throttle, language)
@property
def _token(self):
# Yields the "application access token" (stored in api.license["Facebook"]).
# With this license, we can view public content.
# To view more information, we need a "user access token" as license key.
# This token can be retrieved manually from:
# http://www.clips.ua.ac.be/media/pattern-fb.html
# Or parsed from this URL:
# https://graph.facebook.com/oauth/authorize?type=user_agent
# &client_id=332061826907464
# &redirect_uri=http%3A%2F%2Fwww.clips.ua.ac.be/media/pattern-facebook-token.html
# &scope=read_stream,user_birthday,user_likes,user_photos,friends_birthday,friends_likes
# The token is valid for a limited duration.
return URL(FACEBOOK + "oauth/access_token?", query={
"grant_type": "client_credentials",
"client_id": "332061826907464",
"client_secret": "81ff4204e73ecafcd87635a3a3683fbe"
}).download().split("=")[1]
def search(self, query, type=SEARCH, start=1, count=10, cached=False, **kwargs):
""" Returns a list of results from Facebook public status updates for the given query.
- query: string, or Result.id for NEWS and COMMENTS,
- type : SEARCH,
- start: 1,
- count: maximum 100 for SEARCH and NEWS, 1000 for COMMENTS and LIKES.
There is an hourly limit of +-600 queries (actual amount undisclosed).
"""
# Facebook.search(type=SEARCH) returns public posts + author.
# Facebook.search(type=NEWS) returns posts for the given author (id | alias | "me").
# Facebook.search(type=COMMENTS) returns comments for the given post id.
# Facebook.search(type=LIKES) returns authors for the given author, post or comments.
# An author is a Facebook user or other entity (e.g., a product page).
if type not in (SEARCH, NEWS, COMMENTS, LIKES, FRIENDS):
raise SearchEngineTypeError
if type in (SEARCH, NEWS):
max = 100
if type in (COMMENTS, LIKES):
max = 1000
if type in (FRIENDS,):
max = 10000
if not query or start < 1 or count < 1:
return Results(FACEBOOK, query, SEARCH)
if isinstance(query, FacebookResult):
query = query.id
# 1) Construct request URL.
if type == SEARCH:
url = FACEBOOK + type
url = URL(url, method=GET, query={
"q": query,
"type": "post",
"fields": ",".join(("id", "link", "message", "created_time", "from")),
"offset": (start-1) * min(count, max),
"limit": (start-0) * min(count, max),
})
if type in (NEWS, FEED, COMMENTS, LIKES, FRIENDS):
url = FACEBOOK + (u(query) or "me").replace(FACEBOOK, "") + "/" + type.replace("news", "feed")
url = URL(url, method=GET, query={
"access_token": self.license,
"offset": (start-1) * min(count, max),
"limit": (start-0) * min(count, max)
})
# 2) Parse JSON response.
kwargs.setdefault("cached", cached)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = URL(url).download(**kwargs)
except HTTP400BadRequest:
raise HTTP401Authentication
data = json.loads(data)
results = Results(FACEBOOK, query, SEARCH)
results.total = None
for x in data.get("data", []):
r = FacebookResult(url=None)
r.id = self.format(x.get("id"))
r.url = self.format(x.get("link"))
r.text = self.format(x.get("story", x.get("message")))
r.date = self.format(x.get("created_time"))
# Store likes & comments count as int, author as (id, name)-tuple
# (by default Result will store everything as Unicode strings).
s = lambda r, k, v: dict.__setitem__(r, k, v)
s(r, "likes", \
self.format(x.get("like_count", x.get("likes", {}).get("count", 0))) + 0)
s(r, "comments", \
self.format(x.get("comments", {}).get("count", 0)) + 0)
s(r, "author", (
u(self.format(x.get("from", {}).get("id", ""))), \
u(self.format(x.get("from", {}).get("name", "")))))
# Replace Result.text with author name for likes.
if type in (LIKES, FRIENDS):
s(r, "author", (
u(self.format(x.get("id", ""))),
u(self.format(x.get("name", "")))))
r.text = \
self.format(x.get("name"))
# Replace Result.url Facebook URL with object id.
if r.url.startswith("http://www.facebook.com/photo"):
r.url = x.get("picture", r.url)
# Replace Result.url Facebook URL with full-size image.
if r.url.startswith("http://www.facebook.com/") and \
r.url.split("/")[-1].split("?")[0].isdigit():
r.url = r.url.split("/")[-1].split("?")[0].replace("_s", "_b")
results.append(r)
return results
def profile(self, id=None, **kwargs):
""" For the given author id or alias,
returns a (id, name, date of birth, gender, locale)-tuple.
"""
url = FACEBOOK + (u(id or "me")).replace(FACEBOOK, "")
url = URL(url, method=GET, query={"access_token": self.license})
kwargs.setdefault("cached", False)
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
try:
data = URL(url).download(**kwargs)
data = json.loads(data)
except HTTP400BadRequest:
raise HTTP401Authentication
return (
u(data.get("id", "")),
u(data.get("name", "")),
u(data.get("birthday", "")),
u(data.get("gender", "")[:1]),
u(data.get("locale", ""))
)
#license = "" # Generate a license key at: http://www.clips.ua.ac.be/media/pattern-fb.html
#fb = Facebook(license)
#me = fb.profile()[0]
#for r in fb.search(me, type=NEWS, count=10):
# print r.id
# print r.text
# print r.url
# if r.comments > 0:
# print "%s comments:" % r.comments
# print [(r.text, r.author) for r in fb.search(r, type=COMMENTS)]
# if r.likes > 0:
# print "%s likes:" % r.likes
# print [r.author for r in fb.search(r, type=LIKES)]
# print
#--- PRODUCT REVIEWS -------------------------------------------------------------------------------
PRODUCTWIKI = "http://api.productwiki.com/connect/api.aspx"
PRODUCTWIKI_LICENSE = api.license["Products"]
class Products(SearchEngine):
def __init__(self, license=None, throttle=5.0, language=None):
SearchEngine.__init__(self, license or PRODUCTWIKI_LICENSE, throttle, language)
def search(self, query, type=SEARCH, start=1, count=10, sort=RELEVANCY, size=None, cached=True, **kwargs):
""" Returns a list of results from Productwiki for the given query.
Each Result.reviews is a list of (review, score)-items.
- type : SEARCH,
- start: maximum undefined,
- count: 20,
- sort : RELEVANCY.
There is no daily limit.
"""
if type != SEARCH:
raise SearchEngineTypeError
if not query or start < 1 or count < 1:
return Results(PRODUCTWIKI, query, type)
# 1) Construct request URL.
url = PRODUCTWIKI+"?"
url = URL(url, method=GET, query={
"key": self.license or "",
"q": query,
"page" : start,
"op": "search",
"fields": "proscons", # "description,proscons" is heavy.
"format": "json"
})
# 2) Parse JSON response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
data = URL(url).download(cached=cached, **kwargs)
data = json.loads(data)
results = Results(PRODUCTWIKI, query, type)
results.total = None
for x in data.get("products", [])[:count]:
r = Result(url=None)
r.__dict__["title"] = u(x.get("title"))
r.__dict__["text"] = u(x.get("text"))
r.__dict__["reviews"] = []
reviews = x.get("community_review") or {}
for p in reviews.get("pros", []):
r.reviews.append((p.get("text", ""), int(p.get("score")) or +1))
for p in reviews.get("cons", []):
r.reviews.append((p.get("text", ""), int(p.get("score")) or -1))
r.__dict__["score"] = int(sum(score for review, score in r.reviews))
results.append(r)
# Highest score first.
results.sort(key=lambda r: r.score, reverse=True)
return results
#for r in Products().search("tablet"):
# print r.title
# print r.score
# print r.reviews
# print
#--- NEWS FEED -------------------------------------------------------------------------------------
# Based on the Universal Feed Parser by Mark Pilgrim:
# http://www.feedparser.org/
class Newsfeed(SearchEngine):
def __init__(self, license=None, throttle=1.0, language=None):
SearchEngine.__init__(self, license, throttle, language)
def search(self, query, type=NEWS, start=1, count=10, sort=LATEST, size=SMALL, cached=True, **kwargs):
""" Returns a list of results from the given RSS or Atom newsfeed URL.
"""
if type != NEWS:
raise SearchEngineTypeError
if not query or start < 1 or count < 1:
return Results(query, query, NEWS)
# 1) Construct request URL.
# 2) Parse RSS/Atom response.
kwargs.setdefault("unicode", True)
kwargs.setdefault("throttle", self.throttle)
tags = kwargs.pop("tags", [])
data = URL(query).download(cached=cached, **kwargs)
data = feedparser.parse(bytestring(data))
results = Results(query, query, NEWS)
results.total = None
for x in data["entries"][:count]:
s = "\n\n".join([v.get("value") for v in x.get("content", [])]) or x.get("summary")
r = Result(url=None)
r.id = self.format(x.get("id"))
r.url = self.format(x.get("link"))
r.title = self.format(x.get("title"))
r.text = self.format(s)
r.date = self.format(x.get("updated"))
r.author = self.format(x.get("author"))
r.language = self.format(x.get("content") and \
x.get("content")[0].get("language") or \
data.get("language"))
for tag in tags:
# Parse custom tags.
# Newsfeed.search(tags=["dc:identifier"]) => Result.dc_identifier.
tag = tag.replace(":", "_")
r[tag] = self.format(x.get(tag))
results.append(r)
return results
feeds = {
"Nature": "http://feeds.nature.com/nature/rss/current",
"Science": "http://www.sciencemag.org/rss/podcast.xml",
"Herald Tribune": "http://www.iht.com/rss/frontpage.xml",
"TIME": "http://feeds.feedburner.com/time/topstories",
"CNN": "http://rss.cnn.com/rss/edition.rss",
}
#for r in Newsfeed().search(feeds["Nature"]):
# print r.title
# print r.author
# print r.url
# print plaintext(r.text)
# print
#--- QUERY -----------------------------------------------------------------------------------------
def query(string, service=GOOGLE, **kwargs):
""" Returns the list of search query results from the given service.
For service=WIKIPEDIA, this is a single WikipediaArticle or None.
"""
service = service.lower()
if service in (GOOGLE, "google", "g"):
engine = Google
if service in (YAHOO, "yahoo", "y!"):
engine = Yahoo
if service in (BING, "bing"):
engine = Bing
if service in (TWITTER, "twitter"):
engine = Twitter
if service in (FACEBOOK, "facebook", "fb"):
engine = Facebook
if service in (WIKIA, "wikia"):
engine = Wikia
if service in (WIKIPEDIA, "wikipedia", "wp"):
engine = Wikipedia
if service in (FLICKR, "flickr"):
engine = Flickr
try:
kw = {}
for a in ("license", "throttle", "language"):
if a in kwargs:
kw[a] = kwargs.pop(a)
return engine(kw).search(string, **kwargs)
except UnboundLocalError:
raise SearchEngineError, "unknown search engine '%s'" % service
#--- WEB SORT --------------------------------------------------------------------------------------
SERVICES = {
GOOGLE : Google,
YAHOO : Yahoo,
BING : Bing,
TWITTER : Twitter,
WIKIPEDIA : Wikipedia,
WIKIA : Wikia,
FLICKR : Flickr,
FACEBOOK : Facebook
}
def sort(terms=[], context="", service=GOOGLE, license=None, strict=True, reverse=False, **kwargs):
""" Returns a list of (percentage, term)-tuples for the given list of terms.
Sorts the terms in the list according to search result count.
When a context is defined, sorts according to relevancy to the context, e.g.:
sort(terms=["black", "green", "red"], context="Darth Vader") =>
yields "black" as the best candidate, because "black Darth Vader" is more common in search results.
- terms : list of search terms,
- context : term used for sorting,
- service : web service name (GOOGLE, YAHOO, BING),
- license : web service license id,
- strict : when True the query constructed from term + context is wrapped in quotes.
"""
service = SERVICES.get(service, SearchEngine)(license, language=kwargs.pop("language", None))
R = []
for word in terms:
q = reverse and context+" "+word or word+" "+context
q.strip()
q = strict and "\"%s\"" % q or q
r = service.search(q, count=1, **kwargs)
R.append(r)
s = float(sum([r.total or 1 for r in R])) or 1.0
R = [((r.total or 1)/s, r.query) for r in R]
R = sorted(R, reverse=True)
return R
#print sort(["black", "happy"], "darth vader", GOOGLE)
#### DOCUMENT OBJECT MODEL #########################################################################
# Tree traversal of HTML source code.
# The Document Object Model (DOM) is a cross-platform and language-independent convention
# for representing and interacting with objects in HTML, XHTML and XML documents.
# BeautifulSoup is wrapped in Document, Element and Text classes that resemble the Javascript DOM.
# BeautifulSoup can of course be used directly since it is imported here.
# http://www.crummy.com/software/BeautifulSoup/
SOUP = (
BeautifulSoup.BeautifulSoup,
BeautifulSoup.Tag,
BeautifulSoup.NavigableString,
BeautifulSoup.Comment
)
NODE, TEXT, COMMENT, ELEMENT, DOCUMENT = \
"node", "text", "comment", "element", "document"
#--- NODE ------------------------------------------------------------------------------------------
class Node:
def __init__(self, html, type=NODE, **kwargs):
""" The base class for Text, Comment and Element.
All DOM nodes can be navigated in the same way (e.g. Node.parent, Node.children, ...)
"""
self.type = type
self._p = not isinstance(html, SOUP) and BeautifulSoup.BeautifulSoup(u(html), **kwargs) or html
@property
def _beautifulSoup(self):
# If you must, access the BeautifulSoup object with Node._beautifulSoup.
return self._p
def __eq__(self, other):
# Two Node objects containing the same BeautifulSoup object, are the same.
return isinstance(other, Node) and hash(self._p) == hash(other._p)
def _wrap(self, x):
# Navigating to other nodes yields either Text, Element or None.
if isinstance(x, BeautifulSoup.Comment):
return Comment(x)
if isinstance(x, BeautifulSoup.Declaration):
return Text(x)
if isinstance(x, BeautifulSoup.NavigableString):
return Text(x)
if isinstance(x, BeautifulSoup.Tag):
return Element(x)
@property
def parent(self):
return self._wrap(self._p.parent)
@property
def children(self):
return hasattr(self._p, "contents") and [self._wrap(x) for x in self._p.contents] or []
@property
def html(self):
return self.__unicode__()
@property
def source(self):
return self.__unicode__()
@property
def next_sibling(self):
return self._wrap(self._p.nextSibling)
@property
def previous_sibling(self):
return self._wrap(self._p.previousSibling)
next, previous = next_sibling, previous_sibling
def traverse(self, visit=lambda node: None):
""" Executes the visit function on this node and each of its child nodes.
"""
visit(self); [node.traverse(visit) for node in self.children]
def __len__(self):
return len(self.children)
def __iter__(self):
return iter(self.children)
def __getitem__(self, index):
return self.children[index]
def __repr__(self):
return "Node(type=%s)" % repr(self.type)
def __str__(self):
return bytestring(self.__unicode__())
def __unicode__(self):
return u(self._p)
#--- TEXT ------------------------------------------------------------------------------------------
class Text(Node):
""" Text represents a chunk of text without formatting in a HTML document.
For example: "the <b>cat</b>" is parsed to [Text("the"), Element("cat")].
"""
def __init__(self, string):
Node.__init__(self, string, type=TEXT)
def __repr__(self):
return "Text(%s)" % repr(self._p)
class Comment(Text):
""" Comment represents a comment in the HTML source code.
For example: "<!-- comment -->".
"""
def __init__(self, string):
Node.__init__(self, string, type=COMMENT)
def __repr__(self):
return "Comment(%s)" % repr(self._p)
#--- ELEMENT ---------------------------------------------------------------------------------------
class Element(Node):
def __init__(self, html):
""" Element represents an element or tag in the HTML source code.
For example: "<b>hello</b>" is a "b"-Element containing a child Text("hello").
"""
Node.__init__(self, html, type=ELEMENT)
@property
def tagname(self):
return self._p.name
tag = tagName = tagname
@property
def attributes(self):
return self._p._getAttrMap()
@property
def id(self):
return self.attributes.get("id")
def get_elements_by_tagname(self, v):
""" Returns a list of nested Elements with the given tag name.
The tag name can include a class (e.g. div.header) or an id (e.g. div#content).
"""
if isinstance(v, basestring) and "#" in v:
v1, v2 = v.split("#")
v1 = v1 in ("*","") or v1.lower()
return [Element(x) for x in self._p.findAll(v1, id=v2)]
if isinstance(v, basestring) and "." in v:
v1, v2 = v.split(".")
v1 = v1 in ("*","") or v1.lower()
return [Element(x) for x in self._p.findAll(v1, v2)]
return [Element(x) for x in self._p.findAll(v in ("*","") or v.lower())]
by_tag = getElementsByTagname = get_elements_by_tagname
def get_element_by_id(self, v):
""" Returns the first nested Element with the given id attribute value.
"""
return ([Element(x) for x in self._p.findAll(id=v, limit=1) or []]+[None])[0]
by_id = getElementById = get_element_by_id
def get_elements_by_classname(self, v):
""" Returns a list of nested Elements with the given class attribute value.
"""
return [Element(x) for x in (self._p.findAll(True, v))]
by_class = getElementsByClassname = get_elements_by_classname
def get_elements_by_attribute(self, **kwargs):
""" Returns a list of nested Elements with the given attribute value.
"""
return [Element(x) for x in (self._p.findAll(True, attrs=kwargs))]
by_attribute = getElementsByAttribute = get_elements_by_attribute
@property
def content(self):
""" Yields the element content as a unicode string.
"""
return u"".join([u(x) for x in self._p.contents])
@property
def source(self):
""" Yields the HTML source as a unicode string (tag + content).
"""
return u(self._p)
html = source
def __getattr__(self, k):
if k in self.__dict__:
return self.__dict__[k]
if k in self.attributes:
return self.attributes[k]
raise AttributeError, "'Element' object has no attribute '%s'" % k
def __repr__(self):
return "Element(tag='%s')" % bytestring(self.tagname)
#--- DOCUMENT --------------------------------------------------------------------------------------
class Document(Element):
def __init__(self, html, **kwargs):
""" Document is the top-level element in the Document Object Model.
It contains nested Element, Text and Comment nodes.
"""
# Aliases for BeautifulSoup optional parameters:
kwargs["selfClosingTags"] = kwargs.pop("self_closing", kwargs.get("selfClosingTags"))
Node.__init__(self, u(html).strip(), type=DOCUMENT, **kwargs)
@property
def declaration(self):
""" Yields the <!doctype> declaration, as a TEXT Node or None.
"""
for child in self.children:
if isinstance(child._p, BeautifulSoup.Declaration):
return child
@property
def head(self):
return self._wrap(self._p.head)
@property
def body(self):
return self._wrap(self._p.body)
@property
def tagname(self):
return None
tag = tagname
def __repr__(self):
return "Document()"
DOM = Document
#article = Wikipedia().search("Document Object Model")
#dom = DOM(article.html)
#print dom.get_element_by_id("References").source
#print [element.attributes["href"] for element in dom.get_elements_by_tagname("a")]
#print dom.get_elements_by_tagname("p")[0].next.previous.children[0].parent.__class__
#print
#### WEB CRAWLER ###################################################################################
# Tested with a crawl across 1,000 domain so far.
class Link:
def __init__(self, url, text="", relation="", referrer=""):
""" A hyperlink parsed from a HTML document, in the form:
<a href="url"", title="text", rel="relation">xxx</a>.
"""
self.url, self.text, self.relation, self.referrer = \
u(url), u(text), u(relation), u(referrer),
@property
def description(self):
return self.text
def __repr__(self):
return "Link(url=%s)" % repr(self.url)
# Used for sorting in Spider.links:
def __eq__(self, link):
return self.url == link.url
def __ne__(self, link):
return self.url != link.url
def __lt__(self, link):
return self.url < link.url
def __gt__(self, link):
return self.url > link.url
class HTMLLinkParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
def parse(self, html, url=""):
""" Returns a list of Links parsed from the given HTML string.
"""
if html is None:
return None
self._url = url
self._data = []
self.feed(self.clean(html))
self.close()
self.reset()
return self._data
def handle_starttag(self, tag, attributes):
if tag == "a":
attributes = dict(attributes)
if "href" in attributes:
link = Link(url = attributes.get("href"),
text = attributes.get("title"),
relation = attributes.get("rel", ""),
referrer = self._url)
self._data.append(link)
def base(url):
""" Returns the URL domain name:
http://en.wikipedia.org/wiki/Web_crawler => en.wikipedia.org
"""
return urlparse.urlparse(url).netloc
def abs(url, base=None):
""" Returns the absolute URL:
../media + http://en.wikipedia.org/wiki/ => http://en.wikipedia.org/media
"""
if url.startswith("#") and not base is None and not base.endswith("/"):
if not re.search("[^/]/[^/]", base):
base += "/"
return urlparse.urljoin(base, url)
DEPTH = "depth"
BREADTH = "breadth"
FIFO = "fifo" # First In, First Out.
FILO = "filo" # First In, Last Out.
LIFO = "lifo" # Last In, First Out (= FILO).
class Spider:
def __init__(self, links=[], domains=[], delay=20.0, parser=HTMLLinkParser().parse, sort=FIFO):
""" A spider can be used to browse the web in an automated manner.
It visits the list of starting URLs, parses links from their content, visits those, etc.
- Links can be prioritized by overriding Spider.priority().
- Links can be ignored by overriding Spider.follow().
- Each visited link is passed to Spider.visit(), which can be overridden.
"""
self.parse = parser
self.delay = delay # Delay between visits to the same (sub)domain.
self.domains = domains # Domains the spider is allowed to visit.
self.history = {} # Domain name => time last visited.
self.visited = {} # URLs visited.
self._queue = [] # URLs scheduled for a visit: (priority, time, Link).
self._queued = {} # URLs scheduled so far, lookup dictionary.
self.QUEUE = 10000 # Increase or decrease according to available memory.
self.sort = sort
# Queue given links in given order:
for link in (isinstance(links, basestring) and [links] or links):
self.push(link, priority=1.0, sort=FIFO)
@property
def done(self):
""" Yields True if no further links are scheduled to visit.
"""
return len(self._queue) == 0
def push(self, link, priority=1.0, sort=FILO):
""" Pushes the given link to the queue.
Position in the queue is determined by priority.
Equal ranks are sorted FIFO or FILO.
With priority=1.0 and FILO, the link is inserted to the queue.
With priority=0.0 and FIFO, the link is appended to the queue.
"""
if not isinstance(link, Link):
link = Link(url=link)
dt = time.time()
dt = sort == FIFO and dt or 1 / dt
bisect.insort(self._queue, (1 - priority, dt, link))
self._queued[link.url] = True
def pop(self, remove=True):
""" Returns the next Link queued to visit and removes it from the queue.
Links on a recently visited (sub)domain are skipped until Spider.delay has elapsed.
"""
now = time.time()
for i, (priority, dt, link) in enumerate(self._queue):
if self.delay <= now - self.history.get(base(link.url), 0):
if remove is True:
self._queue.pop(i)
self._queued.pop(link.url, None)
return link
@property
def next(self):
""" Returns the next Link queued to visit (without removing it).
"""
return self.pop(remove=False)
def crawl(self, method=DEPTH, **kwargs):
""" Visits the next link in Spider._queue.
If the link is on a domain recently visited (< Spider.delay) it is skipped.
Parses the content at the link for new links and adds them to the queue,
according to their Spider.priority().
Visited links (and content) are passed to Spider.visit().
"""
link = self.pop()
if link is None:
return False
if link.url not in self.visited:
t = time.time()
url = URL(link.url)
if url.mimetype == "text/html":
try:
kwargs.setdefault("unicode", True)
html = url.download(**kwargs)
for new in self.parse(html, url=link.url):
new.url = abs(new.url, base=url.redirect or link.url)
new.url = self.normalize(new.url)
# 1) Parse new links from HTML web pages.
# 2) Schedule unknown links for a visit.
# 3) Only links that are not already queued are queued.
# 4) Only links for which Spider.follow() is True are queued.
# 5) Only links on Spider.domains are queued.
if new.url in self.visited:
continue
if new.url in self._queued:
continue
if self.follow(new) is False:
continue
if self.domains and not base(new.url).endswith(tuple(self.domains)):
continue
# 6) Limit the queue (remove tail), unless you are Google.
if self.QUEUE is not None and \
self.QUEUE * 1.25 < len(self._queue):
self._queue = self._queue[:self.QUEUE]
self._queued.clear()
self._queued.update(dict((q[2].url, True) for q in self._queue))
# 7) Position in the queue is determined by Spider.priority().
# 8) Equal ranks are sorted FIFO or FILO.
self.push(new, priority=self.priority(new, method=method), sort=self.sort)
self.visit(link, source=html)
except URLError:
# URL can not be reached (HTTP404NotFound, URLTimeout).
self.fail(link)
else:
# URL MIME-type is not HTML, don't know how to handle.
self.fail(link)
# Log the current time visited for the domain (see Spider.pop()).
# Log the URL as visited.
self.history[base(link.url)] = time.time()
self.visited[link.url] = True
return True
# Nothing happened, we already visited this link.
return False
def normalize(self, url):
""" Called from Spider.crawl() to normalize URLs.
For example: return url.split("?")[0]
"""
# All links pass through here (visited or not).
# This can be a place to count backlinks.
return url
def follow(self, link):
""" Called from Spider.crawl() to determine if it should follow this link.
For example: return "nofollow" not in link.relation
"""
return True
def priority(self, link, method=DEPTH):
""" Called from Spider.crawl() to determine the priority of this link,
as a number between 0.0-1.0. Links with higher priority are visited first.
"""
# Depth-first search dislikes external links to other (sub)domains.
external = base(link.url) != base(link.referrer)
if external is True:
if method == DEPTH:
return 0.75
if method == BREADTH:
return 0.85
return 0.80
def visit(self, link, source=None):
""" Called from Spider.crawl() when the link is crawled.
When source=None, the link is not a web page (and was not parsed),
or possibly a URLTimeout occured (content size too big).
"""
pass
def fail(self, link):
""" Called from Spider.crawl() for link whose MIME-type could not be determined,
or which raised a URLError on download.
"""
pass
#class Spiderling(Spider):
# def visit(self, link, source=None):
# print "visited:", link.url, "from:", link.referrer
# def fail(self, link):
# print "failed:", link.url
#
#s = Spiderling(links=["http://nodebox.net/"], domains=["nodebox.net"], delay=5)
#while not s.done:
# s.crawl(method=DEPTH, cached=True, throttle=5)
#--- CRAWL FUNCTION --------------------------------------------------------------------------------
# Functional approach to crawling.
Crawler = Spider
def crawl(links=[], domains=[], delay=20.0, parser=HTMLLinkParser().parse, sort=FIFO, method=DEPTH, **kwargs):
""" Returns a generator that yields (Link, source)-tuples of visited pages.
When the crawler is busy, it yields (None, None).
When the crawler is done, it yields None.
"""
# The scenarios below defines "busy":
# - crawl(delay=10, throttle=0)
# The crawler will wait 10 seconds before visiting the same subdomain.
# The crawler will not throttle downloads, so the next link is visited instantly.
# So sometimes (None, None) is returned while it waits for an available subdomain.
# - crawl(delay=0, throttle=10)
# The crawler will halt 10 seconds after each visit.
# The crawler will not delay before visiting the same subdomain.
# So usually a result is returned each crawl.next(), but each call takes 10 seconds.
# - asynchronous(crawl().next)
# AsynchronousRequest.value is set to (Link, source) once AsynchronousRequest.done=True.
# The program will not halt in the meantime (i.e., the next crawl is threaded).
crawler = Crawler(links, domains, delay, parser, sort)
bind(crawler, "visit", \
lambda crawler, link, source=None: \
setattr(crawler, "crawled", (link, source))) # Define Crawler.visit() on-the-fly.
while not crawler.done:
crawler.crawled = (None, None)
crawler.crawl(method, **kwargs)
yield crawler.crawled
#for link, source in crawl("http://www.nodebox.net/", delay=0, throttle=10):
# print link
#g = crawl("http://www.nodebox.net/")
#for i in range(10):
# p = asynchronous(g.next)
# while not p.done:
# print "zzz..."
# time.sleep(0.1)
# link, source = p.value
# print link
#### PDF PARSER ####################################################################################
# Yusuke Shinyama, PDFMiner, http://www.unixuser.org/~euske/python/pdfminer/
class PDFParseError(Exception):
pass
class PDF:
def __init__(self, data, format=None):
""" Plaintext parsed from the given PDF data.
"""
self.content = self._parse(data, format)
@property
def string(self):
return self.content
def __unicode__(self):
return self.content
def _parse(self, data, format=None):
# The output will be ugly: it may be useful for mining but probably not for displaying.
# You can also try PDF(data, format="html") to preserve some layout information.
from pdf.pdfinterp import PDFResourceManager, process_pdf
from pdf.converter import TextConverter, HTMLConverter
from pdf.layout import LAParams
s = ""
m = PDFResourceManager()
try:
# Given data is a PDF file path.
data = os.path.exists(data) and open(data) or StringIO.StringIO(data)
except TypeError:
# Given data is a PDF string.
data = StringIO.StringIO(data)
try:
stream = StringIO.StringIO()
parser = format=="html" and HTMLConverter or TextConverter
parser = parser(m, stream, codec="utf-8", laparams=LAParams())
process_pdf(m, parser, data, set(), maxpages=0, password="")
except Exception, e:
raise PDFParseError, str(e)
s = stream.getvalue()
s = decode_utf8(s)
s = s.strip()
s = re.sub(r"([a-z])\-\n", "\\1", s) # Join hyphenated words.
s = s.replace("\n\n", "<!-- paragraph -->") # Preserve paragraph spacing.
s = s.replace("\n", " ")
s = s.replace("<!-- paragraph -->", "\n\n")
s = collapse_spaces(s)
return s
| decebel/dataAtom_alpha | bin/plug/py/external/pattern/web/__init__.py | Python | apache-2.0 | 115,157 |
#默认选择是0.家电类下面的所有商品
#选择你要购买的物品,要结算请在物品编号最后加j,例如(1,2,3,j) 不结算的就不加j
#购买多件商品按,(逗号)隔开,例如1,2,3,j(结算),不结算(1,2,3)
import os
shoop1=[
("家电类"),
("衣服类"),
("手机类"),
("车类"),
]
jiadianshoop=[
("电冰箱",20000),
("彩电",2000),
("洗衣机",400),
("脸盆",30),
("牙刷",50)
]
flag=True
long=len(jiadianshoop)
def f():
#重复代码
flag=True
long=len(jiadianshoop)
while flag:
for i in enumerate(shoop1):
weizhi=i[0]
shangping=i[1]
print(weizhi,shangping)
choose=input("请选择你要购买的商品类别")
choose=choose.strip()
if choose.isdigit():
choose=int(choose)
if choose<len(shoop1):
print("你选择范围正确")
if choose==0:
print("你选择了家电类")
while flag:
for i in enumerate(jiadianshoop):
weizhi=i[0]
wuping=i[1][0]
jiage=i[1][1]
print(weizhi,wuping,jiage)
choose2=input("请选择你要购买的物品,要结算请在物品编号最后加j,例如1,2,3,j 不结算的就不加j:")
choose2.strip()
#choose2=int(choose2)
if choose2=="q":
print("谢谢光临,欢迎下次再来")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose2=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
l=list(choose2)
end=choose2[-1]
if end=="j":
print("你选择了结算,马上结算")
for i in choose2.split(","):
if i!="j":
i=int(i)
if i<long:
#print("你输入的商品号合法")
jiage=jiadianshoop[i][1]
if jiage<=cash:
print("你的余额足够")
cash=cash-jiage #扣钱
wuping=jiadianshoop[i][0]
n=open(user,"a") #购买的历史物品按用户名历史保存
n.write(wuping+"\n")
n.close()
cash=str(cash) #转换成str形势保存
q=open("cash"+user,"w") #账户的余额历史保存
q.write(cash)
q.close()
cash=int(cash) #存好了转换会int
print("你已经成功购买,现在账户余额还剩%d元"%(cash))
#生成之前购买记录
wupingdeqjiege=jiadianshoop[i][1]
wupingdeqjiege=str(wupingdeqjiege)
wuping=str(wuping)
jilu=open("jilu"+user,"a")
#time=time.strftime('%Y-%m-%d %H:%M:%S')
#print(time)
#time=str(time)
jilu.write(wuping+" "+wupingdeqjiege+"\n")
jilu.close()
else:
print("你的余额不足,请充值")
else:
print("你输入的商品号不合法,重新输入")
elif choose==1:
print("你选择了衣服类")
elif choose==2:
print("你选择了手机类")
elif choose==3:
print("你选择了车类")
elif choose=="q":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
else:
print("你选择的商品编号不在范围之内")
elif choose=="q":
print("bye")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
else:
print("你选择的不是菜单")
#重复代码
while True:
user=input("请输入用户名:")
passwd=input("请输入密码:")
user=user.strip()
passwd=passwd.strip()
#判断用户名是否注册过
if os.path.exists(user+".txt"):
#print("你是我们网站的会员")
now_user=open(user+".txt").read()
now_passwd=open(user+".pass.txt").read()
if user==now_user and passwd == now_passwd:
print("你输入的账户密码正确,成功登陆")
if os.path.exists("cash"+user):
cash=open("cash"+user).read()
cash=int(cash)
print("欢迎回来,你是我们网站的会员,你的余额还有%d元,是否要继续充值?,选Y/N"%(cash))
choose=input("Y/N")
if choose=="y" or choose=="Y":
print("你选择了充值")
jiaqian=input("请输入你要充值的金额;")
jiaqian=jiaqian.strip()
if jiaqian.isdigit():
jiaqian=int(jiaqian)
print("你输入的金额合法")
cash=jiaqian+cash
cash=str(cash)
n=open("cash"+user,"w")
n.write(cash)
n.close()
cash=int(cash)
print("充值成功,你现在账户余额为%d"%(cash))
#重复代码
while flag:
for i in enumerate(shoop1):
weizhi=i[0]
shangping=i[1]
print(weizhi,shangping)
choose=input("请选择你要购买的商品类别")
choose=choose.strip()
if choose.isdigit():
choose=int(choose)
if choose<len(shoop1):
print("你选择范围正确")
if choose==0:
print("你选择了家电类")
while flag:
for i in enumerate(jiadianshoop):
weizhi=i[0]
wuping=i[1][0]
jiage=i[1][1]
print(weizhi,wuping,jiage)
choose2=input("请选择你要购买的物品,要结算请在物品编号最后加j,例如1,2,3,j 不结算的就不加j:")
choose2.strip()
#choose2=int(choose2)
if choose2=="q":
print("谢谢光临,欢迎下次再来")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose2=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
l=list(choose2)
end=choose2[-1]
if end=="j":
print("你选择了结算,马上结算")
for i in choose2.split(","):
if i!="j":
i=int(i)
if i<long:
#print("你输入的商品号合法")
jiage=jiadianshoop[i][1]
if jiage<=cash:
print("你的余额足够")
cash=cash-jiage #扣钱
wuping=jiadianshoop[i][0]
n=open(user,"a") #购买的历史物品按用户名历史保存
n.write(wuping+"\n")
n.close()
cash=str(cash) #转换成str形势保存
q=open("cash"+user,"w") #账户的余额历史保存
q.write(cash)
q.close()
cash=int(cash) #存好了转换会int
print("你已经成功购买,现在账户余额还剩%d元"%(cash))
#生成之前购买记录
wupingdeqjiege=jiadianshoop[i][1]
wupingdeqjiege=str(wupingdeqjiege)
wuping=str(wuping)
jilu=open("jilu"+user,"a")
#time=time.strftime('%Y-%m-%d %H:%M:%S')
#print(time)
#time=str(time)
jilu.write(wuping+" "+wupingdeqjiege+"\n")
jilu.close()
else:
print("你的余额不足,请充值")
else:
print("你输入的商品号不合法,重新输入")
elif choose==1:
print("你选择了衣服类")
elif choose==2:
print("你选择了手机类")
elif choose==3:
print("你选择了车类")
elif choose=="q":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
else:
print("你选择的商品编号不在范围之内")
elif choose=="q":
print("bye")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
else:
print("你选择的不是菜单")
#重复代码
else:
print("你输入的金额不合法")
elif choose=="n" or choose=="N":
print("你选择了不充值")
f()
else:
print("你输入的选择不合法")
else:
cash=input("请输入你要充值的金额:")
cash=cash.strip()
if cash.isdigit():
cash=int(cash)
print("你输入的金额格式正确")
else:
exit("你输入的金额格式不正确")
while flag:
for i in enumerate(shoop1):
weizhi=i[0]
shangping=i[1]
print(weizhi,shangping)
choose=input("请选择你要购买的商品类别")
choose=choose.strip()
if choose.isdigit():
choose=int(choose)
if choose<len(shoop1):
print("你选择范围正确")
if choose==0:
print("你选择了家电类")
while flag:
for i in enumerate(jiadianshoop):
weizhi=i[0]
wuping=i[1][0]
jiage=i[1][1]
print(weizhi,wuping,jiage)
choose2=input("请选择你要购买的物品,要结算请在物品编号最后加j,例如1,2,3,j 不结算的就不加j:")
choose2.strip()
#choose2=int(choose2)
if choose2=="q":
print("谢谢光临,欢迎下次再来")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose2=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
l=list(choose2)
end=choose2[-1]
if end=="j":
print("你选择了结算,马上结算")
for i in choose2.split(","):
if i!="j":
i=int(i)
if i<long:
#print("你输入的商品号合法")
jiage=jiadianshoop[i][1]
if jiage<=cash:
print("你的余额足够")
cash=cash-jiage #扣钱
wuping=jiadianshoop[i][0]
n=open(user,"a") #购买的历史物品按用户名历史保存
n.write(wuping+"\n")
n.close()
cash=str(cash) #转换成str形势保存
q=open("cash"+user,"w") #账户的余额历史保存
q.write(cash)
q.close()
cash=int(cash) #存好了转换会int
print("你已经成功购买,现在账户余额还剩%d元"%(cash))
#生成之前购买记录
wupingdeqjiege=jiadianshoop[i][1]
wupingdeqjiege=str(wupingdeqjiege)
wuping=str(wuping)
jilu=open("jilu"+user,"a")
#time=time.strftime('%Y-%m-%d %H:%M:%S')
#print(time)
#time=str(time)
jilu.write(wuping+" "+wupingdeqjiege+"\n")
jilu.close()
else:
print("你的余额不足,请充值")
else:
print("你输入的商品号不合法,重新输入")
elif choose==1:
print("你选择了衣服类")
elif choose==2:
print("你选择了手机类")
elif choose==3:
print("你选择了车类")
elif choose=="q":
print("bye")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
else:
print("你选择的商品编号不在范围之内")
elif choose=="q" :
print("bye")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
else:
print("你选择的不是菜单里面的内容")
else:
print("你输入的密码不正确,请重新输入")
else:
#写进用户名
n=open(user+".txt","w")
n.write(user)
n.close()
now_user=open(user+".txt").read()
#写进用户名结束
#写进密码
n=open(user+".pass.txt","w")
n.write(passwd)
n.close()
now_passwd=open(user+".pass.txt").read()
#写进密码结束
if os.path.exists("cash"+user):
cash=open("cash"+user).read()
cash=int(cash)
print("欢迎回来,你是我们网站的会员,你的余额还有%d元,是否要继续充值?,选Y/N"%(cash))
choose=input("Y/N")
if choose=="y" or choose=="Y":
print("你选择了充值")
jiaqian=input("请输入你要充值的金额;")
jiaqian=jiaqian.strip()
if jiaqian.isdigit():
jiaqian=int(jiaqian)
print("你输入的金额合法")
cash=jiaqian+cash
cash=str(cash)
n=open("cash"+user,"w")
n.write(cash)
n.close()
cash=int(cash)
print("充值成功,你现在账户余额为%d"%(cash))
#重复代码
while flag:
for i in enumerate(shoop1):
weizhi=i[0]
shangping=i[1]
print(weizhi,shangping)
choose=input("请选择你要购买的商品类别")
choose=choose.strip()
if choose.isdigit():
choose=int(choose)
if choose<len(shoop1):
print("你选择范围正确")
if choose==0:
print("你选择了家电类")
while flag:
for i in enumerate(jiadianshoop):
weizhi=i[0]
wuping=i[1][0]
jiage=i[1][1]
print(weizhi,wuping,jiage)
choose2=input("请选择你要购买的物品,要结算请在物品编号最后加j,例如1,2,3,j 不结算的就不加j:")
choose2.strip()
#choose2=int(choose2)
if choose2=="q":
print("谢谢光临,欢迎下次再来")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose2=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
l=list(choose2)
end=choose2[-1]
if end=="j":
print("你选择了结算,马上结算")
for i in choose2.split(","):
if i!="j":
i=int(i)
if i<long:
#print("你输入的商品号合法")
jiage=jiadianshoop[i][1]
if jiage<=cash:
print("你的余额足够")
cash=cash-jiage #扣钱
wuping=jiadianshoop[i][0]
n=open(user,"a") #购买的历史物品按用户名历史保存
n.write(wuping+"\n")
n.close()
cash=str(cash) #转换成str形势保存
q=open("cash"+user,"w") #账户的余额历史保存
q.write(cash)
q.close()
cash=int(cash) #存好了转换会int
print("你已经成功购买,现在账户余额还剩%d元"%(cash))
#生成之前购买记录
wupingdeqjiege=jiadianshoop[i][1]
wupingdeqjiege=str(wupingdeqjiege)
wuping=str(wuping)
jilu=open("jilu"+user,"a")
#time=time.strftime('%Y-%m-%d %H:%M:%S')
#print(time)
#time=str(time)
jilu.write(wuping+" "+wupingdeqjiege+"\n")
jilu.close()
else:
print("你的余额不足,请充值")
else:
print("你输入的商品号不合法,重新输入")
elif choose==1:
print("你选择了衣服类")
elif choose==2:
print("你选择了手机类")
elif choose==3:
print("你选择了车类")
elif choose=="q":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
else:
print("你选择的商品编号不在范围之内")
elif choose=="q":
print("bye")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
else:
print("你选择的不是菜单")
#重复代码
else:
print("你输入的金额不合法")
elif choose=="n" or choose=="N":
print("你选择了不充值")
else:
print("你输入的选择不合法")
else:
cash=input("请输入你要充值的金额:")
cash=cash.strip()
if cash.isdigit():
cash=int(cash)
print("你输入的金额格式正确")
else:
exit("你输入的金额格式不正确")
while flag:
for i in enumerate(shoop1):
weizhi=i[0]
shangping=i[1]
print(weizhi,shangping)
choose=input("请选择你要购买的商品类别")
choose=choose.strip()
if choose.isdigit():
choose=int(choose)
if choose<len(shoop1):
print("你选择范围正确")
if choose==0:
print("你选择了家电类")
while flag:
for i in enumerate(jiadianshoop):
weizhi=i[0]
wuping=i[1][0]
jiage=i[1][1]
print(weizhi,wuping,jiage)
choose2=input("请选择你要购买的物品,要结算请在物品编号最后加j,例如1,2,3,j 不结算的就不加j:")
choose2.strip()
#choose2=int(choose2)
if choose2=="q":
print("谢谢光临,欢迎下次再来")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
elif choose2=="c":
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
break
l=list(choose2)
end=choose2[-1]
if end=="j":
print("你选择了结算,马上结算")
for i in choose2.split(","):
if i!="j":
i=int(i)
if i<long:
#print("你输入的商品号合法")
jiage=jiadianshoop[i][1]
if jiage<=cash:
print("你的余额足够")
cash=cash-jiage #扣钱
wuping=jiadianshoop[i][0]
n=open(user,"a") #购买的历史物品按用户名历史保存
n.write(wuping+"\n")
n.close()
cash=str(cash) #转换成str形势保存
q=open("cash"+user,"w") #账户的余额历史保存
q.write(cash)
q.close()
cash=int(cash) #存好了转换会int
print("你已经成功购买,现在账户余额还剩%d元"%(cash))
#生成之前购买记录
wupingdeqjiege=jiadianshoop[i][1]
wupingdeqjiege=str(wupingdeqjiege)
wuping=str(wuping)
jilu=open("jilu"+user,"a")
#time=time.strftime('%Y-%m-%d %H:%M:%S')
#print(time)
#time=str(time)
jilu.write(wuping+" "+wupingdeqjiege+"\n")
jilu.close()
else:
print("你的余额不足,请充值")
else:
print("你输入的商品号不合法,重新输入")
elif choose==1:
print("你选择了衣服类")
elif choose==2:
print("你选择了手机类")
elif choose==3:
print("你选择了车类")
elif choose=="q":
print("bye")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
else:
print("你选择的商品编号不在范围之内")
elif choose=="q" :
print("bye")
if os.path.exists("jilu"+user):
x=open("jilu"+user).read()
print(x)
else:
print("你还没购物")
flag=False
break
else:
print("你选择的不是菜单里面的内容")
| xiaoyongaa/ALL | python基础2周/购物车作业改进.py | Python | apache-2.0 | 38,553 |
#! /usr/bin/env python
# Hi There!
# You may be wondering what this giant blob of binary data here is, you might
# even be worried that we're up to something nefarious (good for you for being
# paranoid!). This is a base64 encoding of a zip file, this zip file contains
# a fully functional basic pytest script.
#
# Pytest is a thing that tests packages, pytest itself is a package that some-
# one might want to install, especially if they're looking to run tests inside
# some package they want to install. Pytest has a lot of code to collect and
# execute tests, and other such sort of "tribal knowledge" that has been en-
# coded in its code base. Because of this we basically include a basic copy
# of pytest inside this blob. We do this because it let's you as a maintainer
# or application developer who wants people who don't deal with python much to
# easily run tests without installing the complete pytest package.
#
# If you're wondering how this is created: you can create it yourself if you
# have a complete pytest installation by using this command on the command-
# line: ``py.test --genscript=runtests.py``.
sources = """
eNrsvet6HEl2IDa7vqxVtlZae732/vDnnKKpzGwWkiC7JY2wXd3isMkZarpJfrxoWkZDxURVFpCD
RGUxM4sANDP+/AR+Bb+A//kh/Fo+t7hmZFWB3T0z/j63NARQFXEi4sSJE+ecOJf//V///v1Pkrd/
ub7JZlV9ls1m5arsZrP3/+rtP4zH4wg+OytXZ9Gjl8+iJF439WIzL5o2jvLVIorn9ardXNLf8Ouq
mHfFIvpQ5tFFcXNVN4s2jQDIaPT+X7/9NzhC2y3e/2dv/s9/9ZOflJfruumi9qYdjeZV3rbR626R
1Ke/ARjp0SiC/3D4y/yiaKOuXh9UxYeiitY33Xm9ii5hGhV8kX/Iyyo/rYoohz9WUd51TXm66YoJ
QcD/eCBcQndeXEbQeVk2bRfl83nRtpkaaUS/LIplpDCQtEW1lKngf/gnoGdRzuHLaIpTz2Qeduez
osNZSP9JtMovCwtK19yYP/C/SwAFQ9IsoRM11w2K63mx7qJn9O2Tpqkbt3OTl20RPVKrphbJGDAN
iD6CLdlUi2hVd4KE6G47ju5G7hBN0W0awOhoBH1gLrgN6ej9f/72z3HD5vWiyPCf9//Fmz+70Nu2
vhlZG7hs6suoXLVr2Ds11OMXs3989OrRq1+8nsjvv3ryT79+8eqr16PR6aasYEdmTbFuYET8MRrh
v1V5Cn/DuNIimwG6GGASY4N4EsXSME5Ho3JJu/ABCLCsV7Bvy/r48CT6Yhp9yniimXVNPi9O8/mF
mtuybi7zbsbIxY71qroZFVVbWL306mfrm4d7ghBKfgzd+qR81eTrddFEeVNv4Oy8ZErGISJu2xId
BslwAjt9hU0tSoLF49ae5y3SWyINJtF4Xs+WZVXgNo9Tn16oESOZVgfkKh8qCOkwrdIRULBx57hH
Zo0Yag/HrSpXxar2u5gvDqIH/Z79UZwR5HC41B86H29u1upoIMZyG+lH0d0GDoVGX5q6Bx4+N1Ow
z3nxXu9NDZylsTAtR8r0n3IT/MMGsSp2gcDpYgMNgrv/PfBhIKXuRgNb5925z7CQ6AROTg1kydG6
LlfMEeuorTfNvGCMJDBcAWwyh1PcaTCX5dl5RzOhftgJOe282+RVdQO7ULYEDCkgzTQN439rJjQc
O6vqeV4lCic2yRiM3wF+f3NaRIt6FXdIfjCZso3m58X8AobwSX+d0TeJR+R3om+//VYgIYzzvFnA
uavKC1xcEV0VZbPAi62ce/3KFTVoO7jccmwD/OgYKXSew0jZZr3IO/79BOZYtF86/XG1ofX5m7oe
2sTlpqp4P7ZvpRzd17x1sqnAkWjyCETtKs4gqpf0OdGvBU//7nA7xd8YgGkDQCcR3Xr0BRzq1cKa
Ki65d6VgJ0Pu33th1qGNW7VCYruhVd3Rp9A0FFBA3usc1giIcZCitseZhbU8vRS84puzVo7uh7yZ
Ps3h8hhaVrdZwzZclXAAcR3QFUQmOEhIG21oeSOHrIDYYxgjjuAktEUXvWk2AAQIRY2AvRmWbDW0
LlkoWi0cUCKV6Sm00dV5AStuihb+GsDjOUBhojoHyphveEcAB3TqEREj63qxzoD+GNqAKAIrJkaK
R0N9Yp9omLV7jnW3e7rfssrP2uivLOnidj20DOLtuTSGKRAij48UpBN1pz9t4Ivepf5r907P1a2+
xNbReV0tiDPOiPm1JDMvZ2dVfQp/EQzgOFfn5fwc2CjuAooxwO+AvwLvKj7k1QYYziIblk8nPJQv
purrlr7NYAL9a5YvZzUbq609P6uhrMGCSR+Erktq4X7hiR0kIilALHUMMEVgxl2BxBpiHfpLvigY
7fALHHGbiFFOVJPIDJfFls/rVeHJDEE2MB6nwfvdA4nylDtj2QuLfeC+yuaxxPbJJ0B4rbc0tfuo
ZC2KWN1NjFpnwsgdUCFrgH+QMJpXUb5YlPIrbZPmCe0osNiWQAP9bapOMREZH2CEbw1DDw59AELW
N0naayeXZ0Ir9TFJGGFcuFQ50f1t/F0X89keCIRmH4O8/7QNeT8eKiythxe4HR+RhRDUiJQcaTOo
3k0Ut/kSsAFy3uqgKeYbUJs+wBhwBA6QSlM4Tw0yLFLMkMvHct8G120OSllnCJnmITMwsytb0OI2
xdAEBYp9833EHVuBEEqUi3dtG5Eajd2qDawKVwKyqrn29r1gy9W82iyK3qWqLlL/8tn3UoVpw9SA
Xo5PDHXgJJszJFXDWBQWYHBPyO3pZgZuhnfSapEk0HXikuQxfHRiqTiWGvWr4iagQBFl4v3HsgCL
43A91XOgHl7opkWSednezOvetUrzUVfoG6VEP1nB7PsKch4hJMBwgd8jInJLd9d3IJkNZm13U+GF
gvx7xMtYN2gAUJ9tUaQJfs+yo75gKYV+HbhU1ddZd2pfrObKKjprkjLupau9A0LgmqNPcaXJmKSr
MajvVb06G6f+5Ow1X2pVNKBDkJziXZXelfZUtzGrxrWwKDEEuSkqXOwAbBtDB0J0dL3zBWlUfXtj
aFWzAYjjz12Cie62R3cXX6Cy7oNHBXNiT+Heg4+TJ3ZoIJumQVnDSB32oRaZYtpfvJYOekjbS2bY
X9snJR/0c1JiLcU+wLVDKHS4LzOMbbsdkAP1RaimnGhIEzqW6t+xtASWDYJz0VT5DUnKDRmt7Kut
XHVFA8w0tF+vzLd8v+dlhWDMBiGzVjJODhA7aFEsImQUaMCzpRs8lKc1qDdXqCHi9On7lmQD+At7
iCzuy5Wa9wQFSkMYHV/TmZ5fmuGNu05cjnxtCcozBwMEaWKhf4Ja2aZazHDpU7y5Uv9uI/svcFa0
aoAgez3BeaSBy8O3lRncMgYLtCWvDkRGILNZBOC868RFyDS6DtKOauCQnOYSYVvBHTTe/wMrWCzu
WwZE0ZkOHkRw++V4TC3DgLJx59fJFs40iQ5dJd+axgQYdkeWnylucFgM0eRnjl7mKdNXBdyXLFDg
tUqkqEHj0cXdArYIoiG+aXRdU1hS5x2yIkAPbXcmSo4Ql51tIHMt2ErbYdOOzW2afHVWzGCcj+Ji
pbLqbFWk9A0tFgiADQOuWB90vgR4GhUAEVHRh8oQguzLI3xsOQiG72A1DTUssuIE+jGbsi7hDg1G
MmyAUrfYzGWQSTSbgGCDLyzBDbD5/kTQOunNeJ//ZMCp/Ow9Jr2+WXX5dUDW49nZF/k9y1xgnkdu
iWJC7DG0PDE7H74IjwnNRzCPEz6Hmhjt64Q/dBSM83KxKFZbbIsk0pdL5xYXGw0+HKJwD+KIFjYB
XjGbecTc1tUHMZojOFeHuKyBHtjASGwTdUc46EHpv0ciw5fqcdybVXwy2ktyH1IQeiOJarl9qL0U
BYFOipot5rVdQMrrzW+5sq9IPNA90gWOVuDQWWAC1D3+8ssvjbIq70c+r3Bs8r1pIOkP3NWVf1kb
jJzWebN4hjvfbNZd4BHK6xMccwyz7wlq4yh6imb8uw1Iv3hb3G2/W0X0L4rCy5Un+PKL84RgWodk
NawW+PgRY6hGk6BRn0GG70hz0twX5xAcSnOe/pegtm4pfvoL/RBarOb5ut1UaP9CEa5eLosmOi/P
zvEhBz0BjCJF7/h8KhUYZKxlYT3u488noty5WsWQmtidepwEvy3zqvyXgm/Xs/IDavkijngryHzz
o3ql7U4nUQyq1qq47mJPCCP1LQH2FBDOrs6RBlDn3spu8b+bsqgWvKmsaCPEYEsEN8V/M5mRR5Rt
l/nGZliAJe/174RQJ+hi6HC+6eRjPOFTph+mXfnDksjkEzgyaIbRHYaMQIYAlLTLj+5IiurFhwRH
3dBl4qc3SOQfyGafr26AfC9PyxVpAdiVtUy5GsmUb8uOcLW4/Ii8TPiSwXdWkiA6EvIOTosDLVJb
rgUtKihFcwkQF+7MaNZ5VdVXLWJQubPIIGptQQyATJO5E6sbcV/o2NaXt6jlJE1xWX9g8RWmvFnR
PVbw2+5p2bX8crYo8soBR+9a+EZEoq8yHiv59L5eXhq2ncJkrpXNyyUleTG4tlhT73tSeKfRoAiY
JNRCxNMIBjO9prShae+RDP9LLJKze9sOFwoSuaRUXR2n0KJ/zrCLappRQxt4OjC+UJk19LW2P02F
Bge62lqR0z+s9SA86880HbzXDf++Nnaj4KuI5w9VwgWquQEIgtYQbAZtN3CzJBo+32hpZnfGbjZD
tTRaktc70FuTtirh78PUX4SMwg5cdBkBRPiwN3myVmpeXFZwAhTnW02r/PJ0kUfXR7Sn15mWO9Pb
MCQ8LnO4R3MgelxbG9HB8088iDN45KPlZjUnBkSnD8VfYydVFueJPdQzgOkeAxl6QjyLzQWOXExW
XDy1OB3VAHTpHBbn0peYlqyNIlmPIQBSeuwU0JjjmxLxL14n8TEXzDNCA7+DorGF0erAgl0ojKCf
oqXmQ5Fue5Yw1Cr7qCSl1FXy503enhMpb9EfgGQ6Mn7wBCymzSZj2pyqyA26BFVGUSf+rPtlg9yw
RG5IamFyAMLcQaUUG/rrQerrbCzUYIvj8iRk+2HzrsHd4Pl2Td7WcT4+eHBim+To4aiGC2JRXG9B
GpEUtlG3AjGg+862I+k0hYHpzK1uyjO8f4Fm0DSwRgm0KeFvljt5gaYvP0o0Fs3auGWzwjT67e9d
dE/Mc0OxQl9WfJrzFiXeQQvHWYPeulEaK4oF3uN1dFU3F+IK4HVlryLa1eiy6HJYyRkg4xKvTHmb
XBTzGsauG/I6EgPOuvQA8SE5K1Y0z9Z1HyQqPM8/kFZ7fp9ev6Li/Qak1u7GBYQeUjhx5CYApwuY
WJhuelb2cpH0vkH3GMGj3FLuaGSfAi1B/LgIjWbbcnz2gDFdQRKvdTLJtEUnbIQ5/fFJz8RZ9Wl6
6a6g9z3o1+io0HdjsImDXO6wJexRFZa2YfRlpp44l5m8ZM8I68P2G3z6kOXTImUSswdT+OX23R5O
1UxD17d3ol2S0q+F9qaaR/eQLW+k1keS1+UaNJMkHlwRyheD846Da42/RD9fRGWslccnipE+Wy3r
sHNtS87AwHHJERguCXUutAZpdvm8qNa0xav8Q3mWa4HaY9CKgcxI8+9ARUIjQzyoNG7WWmVh+7av
r5T0RB22l+IXU28NPqX7Lw20NksWAhjHD04m0SN6XgR0kaUkQBSWhV481nXf+LI9i30L6JY5hCnO
GqDVwLfDw7WoPzJSmFoUl5JYHkvjAeJmwc7ZInf9R1HsvacChmVyMDFjXz/yLm2iPbcrCprc7fjw
ZLin2hG3M7Nk7v1gS2+8WjQpeuOfSv+HW/rTJFc9Jyz82DaK4d8gEeNHlrmzD01LO4l6lupLtt6L
telj3rKMSCYrSW/xGOwwgOgu3HanIBpN+UU4Spz1gZYuUpOZR+p4EM3RH1Sd2uaGHt+3eZi4CCGb
Mb+DuSIvCcKxAhiL1bholdGYZXCPUgCY60esnv5cIp6wDyu5kJLztQVDLACezcChqEIcUnI1qLZA
JG0dODDQxDNSk5YiIRUA+7QA4QyEwLOwGE6vI3jFhiIlzHZNrINhvaIoTpv9pi5XpA23vW/xR9b4
NlnksIL/3nMF9bAYi8c4AuzFGupY05Td48SnVPzYorSmsbgz0xugYvcrhUsfhp+gLA3fhTQebyDG
KQ9nnTMM+ABaILXHP2xyLhR7UMdEHxD3fdnWPcw5y1ydynJws81nAfHS85ofkCdlra9gCWgW/xoE
CcRSYkNHI7jM3dX1LEc5WNAVSyF87PFV/KYqpux+44ol+WlLtkdp2J2yRjnlE40aOjppbWMfeAOm
5D3mnUNlkXQfdF2DnZnqEf2uvCzQ8dhTRN1+uKCjCBf0O9q/363q36E584Ml53Arl3HI+o5QES+U
MT1KWFXrvRIhgXZEGJ7ZAeT3luiVMTvloQ3+lD1CFodwyrNVDQpcWDsuBRJKlDEDi4MPaUgcrryI
n+jb5zl1Tfp+bhQ2Z1Gyz7qSYemDWNnw13wUt3RXY4JMYWxr/Q5pKvzSPcaphfvlJTKsp2zgLRZP
WNBJLHo3vyqip3/DNC8/LapXv1iUr37ph4BcAofGe71Q00CGs/NtMMxZyAzh6m89NsA8g2ds8w98
yp64XC3tmy7hlKIOZc9vsyqRO/3JzFHmI/NUwQn+bvf0I2E6bG/Qvk/i8KT8Vp6KrZE1pl+wkaFu
WvOadYctIP5DG0dNVvXV7DJvLgp8VRp/wT0QtvXpk+FAhh0cWVMkc919mTC/4RomM7XG8RqJd6XL
EEVSFY411eN60REyPMo78qvbQCaPHhD8m/e1cnsh+5RzWaMFbaXezMT3wfaJWpZnG3Reh33kphye
Q6+Tnr9OP55TPXQH/BBJ2uHhDh6kP/ybd9A/wZ0QHqYhX+Vtg/cnMDQJ62Ad+iftMziPjIU0OmCF
QnsApJ5ERcfYcRfr+RgrVxY+8a53md7/NOxbFX7wNa5avH2LQmglDbvXWFPW3vraKT/oLez59Xsh
BzPLQd9frvKMVKehF9nVut7vlgc8C3/wCyvL5coRFS2PeBZHAz5H8L3tA88ARXJXy1HwU8990ZYv
Ye6PcCy+2mxBcubstXLLRDY+I4v39IBFUG0CmkQ7VcylYuLEfYllLqLNWm0z6UBZUMWy8LjDJc84
VHnRTeh5kvZ8Ungx0PzQHsD65vPo8Gio171pZPEQcxDWcHnM4D5algh5TFhw5t9X3XjtsqUOgHuK
+KnJsRn+RHnWbvNWWJK1fUVE5sA5sgD1CWzbdNjtZrg1HQrrprxHGBhvWUi635RNj3sPjr7XpMkk
N1cWvjDvkatmClxSbRnh10in6t7ytt9MBfRkjRE8UkrmkbEnMsaUf0zoROSVODn3WBzBdM+oa+Xx
wX5mIPqHMnSu1HrH8H+fyJ/WLXwmHuhN4dhlbFkLbmjrQlRjyLQGdO2sXVdll8TfrWIrjozENZkP
E5QlZN2TyR0/OHKDixTVyNhbTpg1QIig5V1RsBd4erHn52EqvFt0m1gBBxNRcQMXiqUM9xfQv1Mu
ihv6FEVxQoI85ojGucTfMGXIT2Fn/37c75u1mHukfwTJigqAsE0fA8rMK06c2PgkZPlmWyxos7OZ
xP61s1kcNnI7OzS2O8BAn6u/vhj3Tex9vmfo9g350RtvIE4Zgy/0pwV79cAldHrT824yEMhum6Ta
UWEib5QAl2xJkq4lwxsVMDYAZVG2Z5uShH7iOh+KBv2vViTdouEkCyvPoD1KFhnvfvcMis5ouO14
M0nnFK6xvz1U7j2WFW2L1n5nmwc2+TdOOBZxEmHCoKFnOXdT7x48OERqpTQ94mapJzmwlm2bq187
KG5Lgf/uOzKYE/ghqDqNxvDXYi1Z09uu/BCM4aSL/HKqVFlkcFdNCbL6oKz1NR9+MfS6jEGrmzPj
piESpytkhcRtfR/ZIrrnufSjC039Tbpj9IBeLIGJZZlY3jiHGP3yG3I3HR7SMSccUMDL8Dgm3mWA
gSqNmEFYMYh9k1wYcUqj1up1oGMg9vHIN39y7IWy/Ulzj5TWjWgVnIdA6ye81+rSsiQVJ/REK59K
GlECfk+0tyQJvqjp320CTu/2DEi4FENQtG1+Ro7g5OaNTIBR72bJGebpBoIS4PhJlUUM/foHnG7s
ok9sE3xYMOcaRRgaQ5V3AZZVAVebMN4BU7xNimiQl7l5iKJjL4B6G2Lv2VHIg8P0tXZd5AgZ3/Nj
8K3ExFBouyaahiYW6Im9WLFIhSTlo+8h4H7Wl2Z7c7O97UWf5KkYnV5vm3Gv7mcLs41Re9hjVv0M
Tml2WiB/r2isPnGIiebF6+1BIiFvXbyUV2u8juV6ZvBpKCSd6HW1HoXADlwfriLghKKYt2p9WHqv
70oC1c8jHvoMDOsFgV0P7f21necCAXBaH3AeDl9tVl15WYScOaDPGPh8ebm5tHyqFrAH57QX6Jw2
Jm0R8Kmgs3QU2hxvemYpntufWRL5RlotHRO4ejxy7hU4GE0uMS/O+jl8ipls3ynPPOV7PpDiyZHw
TOS+Sm3ehyyvx17oKc624gelDSVnEM30fC9LI75pcD1+L8iceugNOeUYxIx/+tOfAh8wG9pxds2k
RRYuCsxfReu6pVwl6bgH7RSEsIsQZzF+GLKEiRlZPwnpO9sXx+ynnNBpwkb2aQigVtFh6nSityzr
CHpvXKOdr3zOyBMD00QgUXBFXmHXo12PUK1xGTePQK7QGMg9+blKPemAyYoVvdrEm2558LO4b6Dd
68npTvT0n57xezGl4agq5Y+xvkEnzoNrUKMoOVrRUKiSfmPWFomRnexO2yaMclXWzPDLOnsDNPHs
hR3aemV9x4j8NUn16GFdTMvadwGqO2mWdP3scADljNKDbEAU1WHw+/sD3W2NT0TeRXcPr01qCO3k
T+6qyulbiKBPNzZZpEeDiUOGqMt7bPLp3/m731RRvf7de5Uq5EmOOKieW75YyDdWmllMh7TqyFjW
Fuvp+GDcewgTaNo83u9mP21YOyiuUldbVzu05cpi4o6kU92oWXk36hV8sYaB15OoLwDDt6TVCsDU
3l3D3wI7y3pggYLNeBYNP0cqdh5kjyEsmKvO+mvkpUUw/F3/vu0Vcs+dYAdzNQFO2El+4IsS3V45
Z8mNnb8Kr8p+Dp3dTvH2SvuCCMvcRlEjdS7gVCV7Nx73L9SbIRpSd6mSDv3JYAqegAALaJ71Lnnp
c1zeexC0ywXXgTLEdyF7iN+a1VMSP8zwu7Hi0DwSixCqpaObfCNCO0fBM+G2CR8NcYJxPhvmfd0p
W2aOwsQ6Zm+c8bZDcax1GhlawTwZPiPibm7YttU9cLR707nlcpQJRL7e9/QB2sm6BfTM7ooGWJr2
ph+aOEtzezAlS1NVL6v8l/LnCWirA4vkJfY2zPJksP/sN9T+FOaPADSeDrpFmIn9AAxPvwZ4hy6Q
rtqaxjbW4ryraTyFNKSFIM92JB0/wUe8dJDT8dVxWlcL8VcBMFP4n9vjzhBjZKGnt3p7g4ZWLl9v
u5l3LXv/Je+/XHsJoeecOzYn1MdjEo3ZUDwwro83b4ghJDj4ZFLpXW3baGLn8AP0t0sDUbIu5nrg
/5Hlfvzdqs9oduaK8XCxf3uZvMPJHHvffuKz7aBrrIEec1J2SGX6GUjA5/kE05fG9im/7S886Yz2
eGXLIPWmW0uq4yLH/LyuQ+YdSXuYr6yWoExxDB5mg4mKRYnOc9EGnZQopbhJqd6eKXFETVaTGi6g
PaME27TTbtQzPlQeeHIOQ4N/j4+sCFBNlJg2rz0Se7LGspM1ZIK93btKHmT229utt+mt7tJ9+JHN
ZVyqVA47+82a3Hq8ScvVR1feLe+iPhfmx5ZlTp6P4+AbsuQo1cvtAyEpFucEG+m9DQ68cSpLKsjE
9+g3PQ/84GH0BWIQ03ldlQvfCuy5+VCv4YhCeyd4gOFXTsEDrOUWD9T7TcPAvwdompA3TWCY7UP5
E/UAbJ/JDkTYFwT8B/eieq8Xh09J6jg/18/6Sa4CcuQKlUhL2+mNHHs3nfqK2JcbxxPpcbpacpPH
rf7QeJNR5jXpeLS9MoBuN7JDda0l2QG7XlhR7Ebv6thTK/exhuJFMIbyJA+0pTRj8oH3lVqsYOxo
rzVI49tMXrpsn7XaNvOwLJ8wPWxWH0cRHKi1F1HQBkvK+n2JYi/8W6g89mngJFvXKmArtBXbUeVA
VjujQI5UnI9UtKlPf0PhfXPtN2ajiYQrK4DecpxWziwGGc7zmMlZjabGWieL21GRBtpb4bY0ubi8
nOFgMTvhbm2K7Wi0vRrv3VKtwG/LgXRzSpy9Wjipb7ijW8In7puD3OYwDsCCcVINT3xIpHRUVrZ0
lyeuO7L675pnbu1tpkCaTWZXvtCVqCZzvXPm3i5fB7Z9NHr/X779yxmb3bPfbECsuL6s3v+bN3/+
P//kJ0xdxCzxa8mrj+bq6B/eQsuDb7/5WsTFCdEcZgSlvDC/3CxajMoA9CCRLyiX4BnnoUWjPj42
ZKPRz3NMHEruhZSTjImYDvOrGmShr/OrqrjJRki7vYJddat+awq7iJf8is+Mo9EdxRUeZt/SdD6F
n3jaYCqnJaWT2PX+QdPBic1bdXXUwC+tMlv4znFOGcjOG/0BJ30CBUAMI6szciLslJz3D4hr5LmA
7+w5FutAV1bZQUypOsLZ/7qg7BZ46ynPzHZzisndJRVJuQLhqVzoISk1R4vJ5OpmwVkeAQxu1IPs
0EpNw71KSUC7NqxzkUXRLwvK8FPg08yckteNJGX64gYktnJOBZPw1aLIMSMBVSaC4SkqpwMAb3Ce
cBR4OtiCxgMoc2iKrz5H0WP4LTo6mkZ3rv8u+h38+4j+/Qr+Pb5z/fDwAH7/26dPT/jvJ4eH+MnT
p0+/OhkFndao2YNDbvfgEFo+PRnNquIsr2Y86jRKDq8P/24Swb+P6F9Q5KWF4A2a0AZAw4eH2ORv
n4hGCp/8jD7BSZnPcF74KU7MfErTwI95HvCFHgi2e9YgaRyrCCiQhw9AGk5RJWZSSqoa85DIH5gt
MOjfhkcOm04ooWCKu+msZhSWQ+srIG4q+pdfyxxOwrODwa9Tk9rMRuYJiKZOn1FZeSAaLQckaqnx
8T/fbU+Acd7dqrXr5nHK9gFnJMDFoqic2dgfyNqtT2SCdKmeliv6u2jn+brAiAhLrwJmVyWXKKy4
nBt1WThO+qvsrKk3jks+G/SnRAjBUE69pDvXdw8ffososBKY9KX5ULfP7G4muAUZCFwmibsBGfAJ
fEiuJqqNteRURAzm+7N8seCaIQnla1aqJq0SpTr6EF85ed1jpUXK7VDqPP/0fWbAxQcH6k7BTCjy
1wH/mZNkMh23Xd0UbqTyAmY1HUMz1PDHE0o1hIEyY/lbRFqOSLE7YjqU6XjeFJhwUw8mBnW5y6iY
GOYg48SX6BG0Y/ocX2CvQH+yZRF60nAJ7J4zQEQpPJJYBmD4dE9wPSw4fGYxbKcZu1uoXIzpIRF+
ky0UFFKCG/w445Vl8rnEQsKYH/BtDe815OHwbVWf4cXcVvj6hpmO2yihV3kt7yrQvkzFAwGuqG+5
grnagonMA6kUZvV1fQZ3UyKwJt4sLeSnPoB1tTkrV5f5Kj/DgoTFGcytUKMTeBdBIHMOosgSJfXs
Z0ykJnkML9ksBBmMNdr2+W1WeoY8M5oafHtWFTOcH+0zmUOUKYd3HjjxNVovqxwddbP1DZoFxhZT
FgKByaFJLU5SSXbM9b8OMVe1+tXAuQ9Q4kzcN1SJRmylpBPZFyf8NmQvq8/wNE2Eau3QE/4GGWfL
XnfF9RpIBUREkKOdj7B8UCLt/aqVfTArkDfJY099IE6AQxB0gBb/4nl2AOZ9X2E2LFJKe+Xxe1GC
hrxwPKF1uIppRhmdW2pl8IVHCkfBFALmQR4dbY+c9M7Yr0cO3DLD41cuJIRmfHQ0ttZoMQm10Ue2
35oy6vHqvcSkui9qLqDaJocTu3UaQJYyFpD4mumVheFOx5kY+M1QnoGfmnmXMi8DY2bdtijST9VB
ZdwAGS82rFfE5AZuEgxYm0CvmqBirYHQi8WMGejQbuDJgqYqMCaJoTml8WoaX8ECMVjcNrYZRlWT
ciWDsV+2+Iok48cyL7ilFmQV/ahs+TLlUBipmeW9qZqNr+KaRgGjYH5mcUjaeWTvN8DOLg/ie2rk
UfDlRegFYCS+7KWHdMKQrQBDP6DXkCFShwLtBvTyhzM8wUM7bB9xwIhXSXCAWgRKbyDkAXinDIx1
R55INMVmqAric5TncWS5OQSUfUXt46u8vcYhe8EFFq57G8GHVdhYIi81IF8hoAMJ0CUNn5CCLz3A
mclLs7oZh8qiKI7oYC9YWMHy/fXWP4mUWsCFXNJtYQBe31uFEITwTg5qvTcrt59HrkG/f8QgNGUE
K1JQCB7soTbKG8GbaAjxVmBj4Fu5j743Tc9rUCXm3YxutB+Dsu21MO54qB0I2V1HRNO2LAHtTbIt
Y18ykPuasDWEAXVm/iA4UIN9byyE8SDge3hwDrOPiK0k8ONvI3GltsCMkh+zjTu272MZ7D7bpeCk
+xa/0WtWvJdZcmDZgfh/nWtMF96ByXFG/B3cEwVE3dbxi3nN6zuKxgHW7MD3MGC+O/67o5P0Y+8p
58E/SratcSeSMWQMdGw2c2O/8a4eajvMIIEkVqPdt+Kt+K/oqs2GdIYZqDTcZAsBy96yONO7fhUx
YjpH9C5Dy+34CJ3mKUEZH637XZE3i/pqFZbgXB1GzXmLsMfCl9+wqMx8+KIKHLDdY3mL+qle1NYZ
MV8KwQtLD3Zfda1vW5Bs/UetyB5L0f4QZQg730kVUn1tiDJujevQhvlzd6WGj8O1f+8O4UHVprKv
p+KavDL9+8m+H7TtJu/yPt9SALaovaM+uwrrwG4bniaOOjF8XtfXom/HAzzMUqZ57QOci5somOO0
j7NWyrIji+/FCBH/2iCTou9nqGujBzH8yPCfZBDeslyV/QycluFmrbzctbUovorRMZ9fjKa9WCI1
j3odmobbCj+cLYqK9tPveBBelzG8bC6VBcjRCW1peuQvSXyk48+/RMuhPBtOxw+yw7FZ05jWNP7y
C2tZbn9DPDS9pH8+6buAtSRMB0zmU4vkJz29A7iYtOC1uS3wzMnXcvw88wvOZ6pwFjDNjO9mny7x
rva3xrRNM/WiITHDh2kfQfOqbkMEp54dZu3mErRWnfZYPmZeUdgMwP+KcT9Dh9zxAVpSVQGCBZmB
cXQtb9gkC5N8/1+Zd3JCD2zz+z9783/9Fb+Tt5s1Py7UDSHyPgl/2v+iNbmwVBrC3jO2/Xht6jr1
3r15Dt/r/YZXXQ2/eoAAQvN309o7bx1UKICeNfCpRnWw3jk492H/oQOaMilF+OggaYJv6FFaTBEi
f+/zvkF+N/aTgZqI5axSLcgziXaOvuu9RcyrIl8hDpjJc32cIxSOSDfgvkAtaokA0iJbqkpXr5NP
OIrhk08urjyPTP2kr89xvc506B7M7tunzGvM+R8YGTrKuZC5w7YB/yUHSsmOIkRmsYcxEmS5RMwt
OMb2iGiUHAj4FYxVDVWiByOHuP6D6mBlNhtbbiA5p3qm4bMoeoK/sJPHOeGyVXVecSQLxGW92MDl
wBUb2SR53WXRk+v8co1HUCaMNsZsXeUdOpmg/Prd+Kpcffrwu3HszIg4F5c9wHXA9K8KIqiaR6ZO
kQKURViDx+p+3nXro/v3hUTq5uw+ujy03X11zrPz7rLiDumtkU9bZ1A5kUJlkk0P6FWFFmCix1by
v9BR4BWpVendsWaeU0ptrTAKjw/tH5cHtXfLAvOC6JCcOtBro1zCcZRpEkM7LTpMGqpfe7kaNcyc
qyZh5L4BdlNvokW9ijs+31f5qsPXzOK6mG+6wHqy6NmSMyzx2OXchmZyFrcpbala7CTCgTDDDCYp
QrCXXHAIscjp6SUDuVajWxswtKVaIzDBG2hfdkAlnC1KLosFZ8dHNqcQ6yDtNRDR7SjH5mcr5fGj
6hjJKxwmWUqIZKZsPlAvcsIihCXQBW5YiO0OiAiQaGqmjaqcw+rI/w32wWaESUo3Ec2LPxDaHI8t
iHyTESzcQnIqQpTiTceWGX3JkfOX4SpiVNDFR3oZt6dcgVbnOdWrFLV+dO2u02KVgpZv4JwI26mb
wXdKLAgmwbeezEulwqbUwP1CyktQ7YmB6vXncA9IXbWBYqQINrsobq7qZtFSTWYN24maOa3rajBs
Br/kfjxgKshc1at/KZqa0KhAGKBXeUs+VwNA7Rd7xlEMRy22qxGzZxntSzDhPx81K9dQEvNHcdpT
Renzo22xYNa8rDcDGFFxRqdQMlUp2lWI196JmfTp5f/VtHUUqv4+31nIxi9WAC2p+IxVnTr0skF2
42NOwmh3zOrlEt377kWfYZDD+J/Hk5NQb51rza6Crb0BW/pwvFeqJZrJcK54LnPxiZ7i0UPPomcx
pGTMftOFyAHAKe42VAmHVcXvVuNBqxvlT6ViwMNNQDIf/PKuOVo69zg+MpkgMwoNGjb7wToQCcrz
1kmLLKWlfYpDwfK3cd3GRyDG85Mo/NrifRKziIB/akbAH1ll24BvquNDDdAhd5CaF9lmvcDwdeyG
JSV5TrNZj6SH3Lb7UMgTWwD1ElssLByET5xyLmC+5A1k56xT+EuD0YjcfSBkSML3UOTEcgBUkjvk
CIm7raOPtkK0jjel3MIfQ0E7DitCcrKc+meneVtw8Zytxcpk6pQXezEjwcynCa6zBbpT+hHBPgqN
yIHHfHGO0+01rUz0H9bPE/EORB8qbIeSD8bi6Icjx2YycHQMC0CxTPksK2lSxIrXb149e/6LaLy3
g8OYbLwcoYgXHKhpLYplWrBts3G6G/V0gyKG06F95qbDyHKpUAstH01a/bRGO+yj4bHlpCrXDLnI
uW0sRU3dhN8qZBCauy6JAxKMnCRWaYnEnK4ucBQ2c64z3a/Is65cUYFIoleeTdX6hOZHoRx6PZll
3cTDlUA9n5wwflVbTVRHnMZaRslcwlE1RmGCo9Ho74X0UV3LgGVTNMYo8JRDknKCR16lbwE+QEr4
1JVhE5ZVY1aBY+17qNpnSpCxA420VSrRzbztEHsF8RziQdfiuhAcnI0/ousiT5jRJyBy4oJ4GY46
s77hoB5QMhL+3VpqAILVhuEMjaLNPCQYGo45bO+xF+ct18lPpz4MINQmRN2MqXaDJDuQr99RrcbH
z1+8efX2+QkRkwPG2xeXiM7r+oKKvRdNiI4wOaK8/vBeIcplLvWmm9ektdyURbUQtRIpTL4hxzPm
DnIV28hSXERowKDNOaR30Ce96/hhHtk7GnBySnhc0HtiJ+XTZ8aLqN3M54WYv6w3dzVQr6FEVsHc
/ZdLK279yoTGYEHaFjVvDLCpIywtxWhjgzwoKdeO+Y0Ay/s8eu4xL+JjT0mXmW5683LSvGUG3WM2
pzOzoWfBvSgV50RLYXMJ16nHNEcGDq46s3Og2h94uRVt8uuZBnYt24HLFiKQl4fWq1xJzEQNISl/
cOyiXsQpg6E+Alo15W3+rn90A2fSQ0Y/ZZPpayuu/XUFb4vB3dx1dQyjxW9nYX6YG7iPyzT5oRMw
PHEkKSJ4de5HHzcXDNfi3HyYON17Somo9DMcCmMWdPgVfUi+zFiKZ6P9AY5Gt/G82fHCrq7ua1k5
WsrH2ly+t+uBBsMPgAjhW/gnGX/78tHr1/Dbb+OboqrqK1DnkPf/3scNWRk9/KhzeZ8jAUbbHrIG
nrA6KtTkfWdfiB35mcHHFAVlcUc7tb/CjnIAi+RPtVqbqTqgceNY5mOt+jD1mvIc5VGNDeC/fPb8
zREFgsYHTRzVUni1ptuBvOAs8d+L4xsLOiLmdlTSiwjIkvHvbEsKysKolwCc/Lkx1hCfIQbwhWcN
m+Axux730nZfzQSFvY1SKUO8c6tgfRuExXi/DSzcxeXTELC2xBeTIVgYP00Thyt8/PTRs6/xcXNo
gPZ1cADxxLjlyp981GQLSa4wfvLq1YtXZrIqaYVt7MtmklppPMUDywnRWSyxyGgcThEVysbCEO2M
LHhk95o4npSJhELLIdGxIb03aHOs8De9QJ8xcW15Si4Z4lnrug0BF5ljfrUAFlxR+rgZx48go5VQ
ki11fST7z90E4E9SGwe7joBEU+2zbs2vTWTVltVf/7jLt70k9cU4jKDxt+ocoZOAYKlnTSDBanvp
onGk1Ux57XCwvYNJCLZ1oNJWbAujN9j2fcJsbIf8xf6I2MYbGKtkc74gxHfkosu2q/nWNEd77AUo
Uh/qHgpQ1F9yapuh3Bm95sc4+ImdRuNXxY1nib1juXzoz6jySPTFF/je0HYL4IUoghDMg8uyRU/Y
BonOtUfgX5JswMu+IhbLS/hoOsbpjdPhRfKsAci1yISJAuxYJXUqCcb9Eq1D7LXYJnJTqGhBgzYy
z36QEKieHHdR3NCQ8H3fQ5nztlDyJ2iX4tXyKWCFWk+wq+WGYee6gW8oy42iI+rAbStXNKCW9DV5
NCzILNvamocO16Aiitw0neDrjB5fVYOyr4wd9+YuCc8EHTqSWCyfW0l8xIPbQeodI8UB2YMUZjr6
QpwrwPUFrfFdIzmy55c26x40VHMJSAydDvC2tYLXNADKciu04Qly3hvgkh05pmG6sqU1bumnWnT8
u2YkFVgTV3LBOFAFa3MJ13frpvYTVoX3b2A0h9Xv7zfv8UDlJL/jqnj9q2cvo+O7i5MIfeIX4qMW
BJ5sWQu6sY3e/gVmwrIqYr7/r9/833/2k5/0HM2QOY3ksUMVp6LDOZJSWMxy9HMIcEfuicbdNYpx
sTSMtRfDaxgTk9kldjUty9eg3Zxyw5qT9HO1LcpFWJWXZddKcTo0YaGdqS3/pVBtLe2ZvD9W82qz
oOB5q3bdyqSQb5WvxGLTkFPDecEat5qNUy5ALM7XA3ZyMn2ig1vhri2TDLbS2StBoJwh5WvYL2Av
lXNCX3dNOe/YOya/wHleNZhMpsaIbbhVrjhrS+u4o9GEN4ECG/GYDamraDNc0cTe0xo9s5JxfLeN
KUPJxrOKANB4HH8U0Bjfc+MQ0C1W+u2TMxHuMZz+ZvxdbB12leM3uT4+4pSV+TU/3504vEVK5X4R
uY08ZFK89jVGTCduu4NP0/v3H7o85jemtd/4oEx7ZRbULEvMsnJ9jBO6Tg9+04ulwQcVaRVnWRaj
OHnM08fWW4ozEPU5jgtB8huicJsruGRtt2Kw+AEB/8Tziuy9cN+J3jRsTPmQr8qqymma4jl7gZmP
moJ5gWEC+M6ZR4zMHnLoOUKP7D+T26QEvM6vY7K78vMcE1VTCe/tXiL4sbgT6TQT2DWe0ceUsSze
rC5W9ZVdHjOIIwVPSsVQLZxg3aIdq+uvcGiV3oh6oiELWvz58d0WA7fgOApz5ah3YM54d6kEdbCF
h9d3r7+IUYcKjsYWJzUu0I+phZEptFFRjOutgXHB4wx3RuAuDx1oaBk6zkNHmpr7B/pWJ9U9rSRM
WnWrsdIrjTJ9+NlhPwVgTtfhAV2UaO6DnoT7Az4eXPJeO+fxVvBl95QqnZ7pYiWWSx9crpizQ5sz
W9tPEs4ghhDxUxM5/+WqMp1TOpbOcKyWEkfigA63WgcHm++uOXp3dnW0Ah7UiDugdVfD3Y9ojqTS
p3ZVxuOv3svo6m/qDQyG68T7976SVVSPHJO+qexplM04bxbRw+xvIryynav/DizxQ1lcWYuhSqKK
1aiabVqoSc3HhsUzneDOeN+i6DLwHXumTqMHf3No6xetjizmpAfv/5u3/0GlNc1mOsclCM/v//zN
//MkLNNhuTZKUziSzHakLDYqtx0FC0xA8erQ2QUhw2aNnAyqmR5Jdfo5cxo3QeYkmmlnMv2sMBqh
faE7h106O6fKXD1GpWTFp+x0O+jUieaFXrAX+gvSb6qCJ1c6G2bQ7DXGCjT+UKP/I2y8m/8GyAI/
jNDth8OyMr73ni2jx3IPWeIrtp0gla2ix5hGjZNSYat1U1/faFZI9MrK4LnO4HktSfTyebfBB1YF
FJtwd6lo/RgZrBwn9m073QDQ6BM1lU+w22Mqu4VapnmwaDYVzOa0qOorHAzO6Ie6XJBlYNOqtLic
A7CK6BjwLEhM7s8ncVf/mArgChoY28gDZHkBSNeCTO28JiEzRXdeL3itSzrZEk3AoyLPyDddjeI9
ZyVsKCXhCuEhuBd0ktBxN2c+UpUX5pk6twYDSNAKKZZzUupB8DAIDdo4RGXEoEX2S20fkcMHIGAs
+cvO2BoeI4M4GpA09AB+qyciWEBYFs6BSAvN7/pbOZthWwBDlXoZcarQcS2+/dLooriBdoxVmPPP
b5Qnz4QZIg0EkK3By1YDo3zw2k3f2e/o6rxuralcwsVACPd3WU7Mqob+83MDhOo3wwarieQNfAsI
w8p/lmc58CeVvsYmpgJ34Cn5AVIMyQQOHblp8HVFfjlRVdcXdAsaWmVANH8cQU9/GiVwT0/IdWwS
wa/8Vk1xCvS+Hy3qosVne0xwD9/dSJ5PGaFeFQMQS7RcIcCJ2qdVRF/wcibwu8IR3oQ3GD9zJvEL
BpeP5UKkAmqA5nJRNOwMccoxFrKt6lRVFBeKFtrqhjEcJC+p2LtoSEQA8spXfBflfGUItmxONbET
gbqbPUEI9Qd0U1iweKFJkNeIMRRU7FB2LRLZHhPhETRYHccKmZuZvd8RkGedtTHteNev8NOkqeuO
pkaYFqWAw7UWfvp8dJlh8ajX27s51AGmDv5XuhM10H95pk9prFFjV6rzxX+3KpzurLFxDBBOfEWn
b3/eAoppQR2PxDGxWuqgjV9Rakw9AvjDsWFYbLbHxzGfG3Cn8xI4NJz4G0ITc2C8OmwoTUHnC77c
rFV33qYYvRDUPbnNO1Ltl0zSXoXB/5aADulu8GZDoPSgBsSEbcsKlU5hCSqm6SaLRky7OwSftJQ0
mBvCOi6b2t2RvsYjnVxCEEM3Ig2/970fxLX0+DE1JJHCzBrnKn0fZ+qMnYz2dKAV5PHXPkrtQoA+
4WkM2kfTTGpKYV8JFnUznyb9c5e677/e2oIuf5ZLlVnwhO/WgEXXtEHGY/WwXlvO63JeWHU4LUrx
acT3hJW+WyJb7OW6r/GoYEp/ejp5EIQiLY4PT7bWeQdF4RRfz/HUXaL5Ffm19A2BxZQrSfxlLJjT
E5kAv96/PmZ8t03uNmmsI7Kd5VqmAPt4qvyaHnXMK3UEKUobv6A3H/gBPU1DZMHu2yjm3NZyysqH
y5A80iAXTLvjyHwKrXUVz2fo9QeKdIeSYkLSslY3HrE2BtdAwUlYlY69JKUUnWlEPtYxdpaqpevO
mbApVYNBTteycSPiBOccZKG76YiFfoc7IJdU1QEVCKUE8GwbskJ+OHfs8IUmjvIgA2RPdKfE3Uy/
fabrHE2j+HOc3hdx6GpjVr2r8ZwrxYqma83iMXzyC84PUDccPoA8GD9O0jAHJfxk8hTdNzjamu9t
jY1sEHlqZRtMR+EIAY/XKh92iyx6H8HW8dT1S4eAsx8sNqtbUgG+WhaoB9yCCL4haS/h4Gj84HV3
2SXH9o6epLtIAqa6fZN5lP03WPb1upjP/iAbq5G+ApY52xJWISc2YGhJ/E02hYOx2kDi8B2BaF1k
iPrnOvZVuAdVXBsggrbeNHNC/11yJ+PqbEmqDBU6BAxZuQ7V2xm8yZiHsckf2DpfPF76A+9FkAHC
6Dz7PZf+/+2lbr8inLXKq5/eatRnzccOGgL5baxQTTd/7J8ygrZchu0GH72e61WlPDlu5ot1HAGo
jpQPahvf5oh0sQJgJZNmr8MsTfdaifBjVzQJxEIR5w0tkiwgyubG7lgPdXBmvW4HK+y4EWwBafsO
vy/MN2zAQ+ntPCdbmJE52pCjBimeDgHZ11zYUSMQoUcr8VFDn/YvpYdB3Azs7Zh83JRfYeBNDFFt
ekx6+HV5vTOaOrPjGd4QDVrAZ1Wx7HBA66MG623j8Br07rdIV/ToncnJ/rmsvblNacUM+eOg0HKm
jBslzYQeSoeZxO0eSbdJaNapogmpA/xotdjn8EKzfQ+uIgEvrX0vRJpEsqAY1qftgLw1RNn2DLTz
nUe7weKyQkHWro92nmCrceAEu6c3cOTiBN9hY36c5BwB9vTRwy9OY7VVL5p9dupF8/9v1I+ySYCW
bXs0uoMGjrcr9Pu3Xnum09FFUazzqvxQMJ7J/N8qSzD8ts4x+p38/H4rTzMg+gKtwX9HUYxUZzEV
Cjie6HbPVh/QPxXaJf+b1yqVZr83HqUjndCRZ/qoQYfBEFX1KYttCI6Xl09f9nKm5td0D+IJXO47
KSiwWWZQ7YkVh5F3u/+2E+btLiYzx4+7VvAX+3L6w18qI7bzClmrw2soKpXT8PMycBz2o/9Hi4XQ
f+LLDPd6d2xqHYjXm9OhjgdbO36zqYY6frK141flh6GO97ePWA+u8e7Wji/rq6IZmOrwXMN8gPfo
j8IIaMJBRoDfpL22g4yAlhmGxBjot74NU7FO7M4DG2Q7OPl4IgseZiN7w6MVAEBZiQXvj8mXSGim
ffr+QjOv7E+Lv1knxZiyHudVhdXS99KApa1r7VDJ1baZOqwXIQtV4mGEENL4+xovbncr+rOY2rrs
H9kMIr5UAWZADltOuyAbGJaNP3AVz9/ah3G5io8YFi//94H9c5onsSNr51tK1rsJoHI2SP+K0/kF
ZFlJ9Ifk5lr8dFgCfdVLChR2eDfQ6BXHh+PiN3dPaT7IX2GRykxuYeXuAm10+FyIKHZ74CfH0u2E
FhCW+tV8BxNPyX7cm+pJgOw+iUOmjp5mkg+z7YGkSXqw+G47vdtOyAgpc5yoGaR7Dc4QPAADfF8l
48KAoFmfovTH4ROiv07DvW65rdgv3rqZBnJgUy0cfoJK2PC2BbFGfayphzZQoWsxgK/FDoQtBjC2
+FiUoTPQdpQt9sbZRyGNOi12oC1sP0zutmnfesh81rYcYrhBQJV2d4XWkcGcOKoNJu/bpxV75V/c
soMWGrbdjbushyBPuwzpx35JFTMT4cx6C2HyQRuEbbsn2SFkum/0YvrvqTuE3RhTYP72LpI7/vZ7
4joNyJqTKPCgx0LQL8TBaQ8ZSJr+YV4BghcwtWZuyrcuTGf789hOItlLOf+DvMH39lJWmvTN987i
7bBYyVqtfebQ8aZojQ+xkkcm7IGM4h/euZw9qu3sOCi9AUmsHlg8XE3wTQBzTs9mY36/iwOCqLxr
+ruoevb2cstTHi5DB57p7VRicT+z5+12+4fdbn+uTupFsnNa3/+ROAAZel4VB6U+pezQAbPbVJVx
wSDbj3p0oDCNvd4dqOU+PiAUYB1kFvhN6rQLMos7ESVZwbDyWKL+Obv81TnQtfw+RU/p2N6DhAEa
nNih2DH1Amyq3pxYrlfVxOlvrjxEvl+W0m9O4fvuR8cP/vro4OGJtTLO7GAFLeZtpFf5udXV8lpx
uR6NsduxR8FEGcKf1mjrU4o1gL/ikBWDOWEw7OcPby7QVF2erfakami5D1V//ytw55tJaBeByPEH
5o307o2Qz9UBVVRh4uqUE35Oa7y0K/0ikmwEGKWcDcHk79O311tM9bJebHfTgiFO3PbbHLP2cMoC
CCGfrMC1Yjto/ZFFAiHIr8p2njd7ve9K0z9dkuzRocrngNu+xwKx3T6rI2dbaLvt9ZO+72EAPkx7
zTDpplo/uwRLXkJV00+N7a2Whs16zncmGa/5MPimi/oYxsVwbk3//LoWC68bu/FKDqB600m5sDFX
wqQE+SRQYiyj5f5cqIBH1yxhcusmrWDbzvasHmv7Rhv+mKMmMcCD/k4epL0GKunQU2pg0ZoQKnkw
J5TcDWYo4e1yTfWyq7tuwNw2aE8kejT2RIcfBA2L28+6OefSzok9pc97EaTHD+wECx5VBfY5eO1a
hktfvdZZoBQFmKhbSYgWKWoYoFak+Tsf/x/Ilo9ePovuR09WgN9oXYMQ08KHHw9wxHVFZCO1RC9v
Vu15vam43KYUYTiSoEMqFuWTgBCWwIiR98epRROST2t8BkhnEOOJ/DLqG3dlDkDNgPQMYxdaJuk3
8Gt6tD/ZO6QosWsWF/o+NKaimXwyuxVpWwTp15ozt6sqBkIoxFoghvVZu9RzQ07GiU+kE4pHphy2
WGIpx0AtEliQ0/cT5Y5pRPbswyjXYlFiWmfibVylaVFyemFKWRtFrzdnZ6j11ivgjwF4GN6OSrRw
HCsw4bRY1k2hhCX8El3X4TI/OFjVl/lZOU/HoXMsa+XQCkl8f9meJZJk2HBWt4wNp7Hwg4hUamdD
UDpx8zOkAAWUKVqIlJJYUi6R7pSSdnWndoNt1HlHZm8OIQJQ9zDf0BQtr1Ima1o4VuY9Y/frGhg5
0ypmmmHCg7WQyjVFrvlHHdoHTjsF6o7CIVjXmF2z8csORXgsSd1IxnoU2ZoCCWSlkl5wzjwAkzqp
2q713n1PUYAysPQyS6sctI5iCtpth1qlXRMU2x0fnqCldBxFn3+uHEDVfZ4OyAkIhm24VhpWTIfE
puAjA8eTE3xzMpqbsPierTa7Ct2ROh+xo+9fs1563R0/+BvJYKIiv+BDkbZQ0PsDyx3br4vQTfEj
smxfLBiNSopIpt1A002MwYDlajaLjyTniIRCm7QXy6Qf8PHXpuxG4NtPTYmyJJCwLKYUK7GpzZCM
YYzoE4SFc/rrcWp/R9w2SfsfJkvx+cd+wDwPvTZLBnem+2KOnc/sFiV+34ON75DwIXU+dL+yGMPD
e5/e+wxoq6rzDgEwBcK2jYn1uP2u1bpMKyFqWR3QRV2vW5UvjFvA5TWJsETTg0n0MPwNT94eCpMC
HSNEWPcJreEzdy7xOeYSj4/xeyKBc2fU+Gxzwe+x54QF+O79v337bzH5Cub0yyhi4P1fvJn/jirP
jujvCL+KKEsxsmJmOCP8muzGs9lyg5Xngebk1seDPtOse6RywVAhTkyXI83kk0tgGWdF42SR4Uys
GEhdXJcd3M21VatW54tR82Jgl5f1yv0u4w9Vk7KlWp3cBqenvng9e/b66+e/mtAvXz17xb+8evIL
mXrdZvZQ+akkQMTwcv6tbOFDZIAl5bAs20XZ4A8uSV22wEcv6HjieZayzpz06oso+XSi8i3hqbrM
17O8nVEEMuYVogKDTf+gUQNobDdKR0acsOBQJqC1Uavzzk0+E460XwXjN92A97pFPIpPUDRuuxnq
pL0ShE5qHcaY3dNPtEPInlJDL+uB3QvTIlh/DhVSrK9W/TqKJEERNYQsLs/r7pmi9mIhkgAWs6Ie
VtY/RZNXxuxbkD5DVwglHM+k7JWUr7laoMVpfbUpF2Jy35T9FLgEBGOmB9bEpZa9NVnJurg4M1nG
0FiPda914qkffvlnzXrP5UNLrhR9ppd/tnP57EwFB2ogPlEObYAiM6xpQzexBQixsQUSnPp9IeGh
7tW670y9d+SnCCJJQwMBw9k+EJ/Wl3VbXr8EWAkzswx//3muFVi2wQCRy+nCfGoTJgBkofPpoUci
8/N8dVbwsWjPsVBzbSVuo3RsdO1S4W6HOtzvgKXcYDoXydjDCYNyzHZ7ikmRXFM9f4XGCUnoMEfp
jCoG+/PxHMTmG2CWmFgms1ehf4fTw9oKTq5cJPjDoPtMfUszhq/pZ+pWAJkf9VL2XmsT4QcQCroE
8ciFu6Pro+iaiRqF4wusMR/M3es1CseAhU8JXDe0o6ScgIw1iYhbOCdln55EmnZnK0NnvghRr8VC
+O0XuEcOd9blaV2Vc1QoLlxGYhKnhmejBpoox6UGj4U1k8sL/LqrhXrrauFdOzilNZ4BWNINiN7n
mFEP+yDdqgRCTFn2xAZnJLMB9KixLGQ5E4NlO3MTGyhc9HUFWv/0gX+wKAebjy+/bDHjNSGDFyUG
dleR9hi1Gs9Lmj+0Ppk1L4o1Rx/5w85tlH1O+BezGwHhpeugNfGuGAmxjeL7MeUmq7DsNSBCxC+E
6p3qyij6TqYgGQ7opgK8Y0c/GUK1KAyPHWrG+a+pKaxjs5LSyG2x9pyo8waYg4IGX7PWm8RZFk+i
9JMVCDOJni18kt5yE3iAHvWLdcDmWaK7ugJF32LB7B1oxyI+4svTSMsWQEcM8/ihozrjZ3pslyM6
o6v7vD+6XCvu8HwbTCN9ueP4DNabAH04Gj19/XOmM4ZOUgXdK/quQ4XPu+7Uffg10hvdhwzGylQq
ScPqpiTBhS1Ey3xOmT+VDE+J1ui4MeWiOABCuV2im8pNUH+d29vN3SnJVQhb35QtJdxhMYk/8wsz
W6XTa2SWksk8FXecpuAEj5cCigrczXBioLm3Oi1Zy9X2WqRTaxqPifaaVskH6m+7lDClQEJJxOP3
w0/wdg4g7EkJwEKv79vqAQ+CNwK9hu2ITZ7U1M8+zMfuydcvXry8PfRqAPzAoh00BsTQQVGUh8ks
uVFbXQIy6KAcOgyG1c0tgGwAbtfQ7b+H/BqSYbvMk4wD+t6UH/OL6zWcP+RDziOMOiPPoFuZV5i7
lks0SuZhPqnEGBQrysxy6HPJLagqxSihFqRHzOiI2wasoW4sKfLZUuwabCUk8z+nNWz93ld1Q8nx
w1DMopRMC3y/hMvngL5pJbOmzv9pOoOiJTnHnbXpG3SeNw1mSdQyAM3Y6w94rBkIPtJI3oLcupcV
X6RcleQHdePXxqC0iNc5NGpRZ7R6ZdHbFmthXgG3AqzgbZnjn8Ce1wMieekiNuBvKRfhoJpYUzmS
+dXCKwBm16MkI0JYJRoaizlL45gVwnD3KsRd2lsfKNjKo8q9k5mmiSuFBWYqBqZkl7jG6vo/osgk
WjoeA0ojShUa+dB5tLWjQPaY/JBXB8XluruR3Pd0IPTFO3aO+nneng8mWMMvkwHJfzYr3mseQTey
rUY/0EKZt2HtQyvTPXUjuxM1GitAOw0dDP9BVmG0ZbJPZB0O2z4MdtjzJnSCnXxG/QCNte1DN7Po
IHqkF9nJOaXoVK3cAlB1uwA4dPf5brTa1sLbAf/iFsCRB+kbsg/eUlLJgIBVQqhNHHGFWyJzyvOL
zAwhoU9kjNDiMMtSKdFvQ1csvMJRTfwJ2hDVUe73d/YteHv7LsFhk51FVaCZaXESUyALHvfT1nv0
qjiXAuNqMf1ta4rL+oPaNLSYPABN4WxVN8WMhmuDdz73wrSDuE/AfPQlGyUUQWf+pkyFbLIB5Ttz
UGOPY65h+fvqHGHTSPY1Xkq9AQ0IBAgGtBi82Vg1JssOgMElBg08IcOSeFbWqPfKqms2xRDHJkUE
9QPY5EV9FcwzFKQA5xKZn4Mwlnz22c9kC1IYsp53KAsc/u3h4Wg/C5R4qrTnGxBksuYSMe9tf3AK
7nY7f+0TVzdsRrqk95R9zRjbMOViaRt6thi1cPMGTVriFaLvvgldg+j1Mx1fLv4amMn8fLO6oFIb
f/3ws4c/+1mYuZ0X14vyTNxuEQSbjrjQBiY075n1ezdX8CoTLR8h4mNcTlmwQxeapcwGTZxqWVST
tT3PH4zDhGnaUbO+AMKuv6A20IBwr6geLtHgpxaThl4GtWnSc71N3At5EoWV87Ag9RVVTcdUk1Tx
HeQf5exzt6FBsWCkGtyu0rlUgku9LlZJ3JzGWzyLmTE9CGSu2SCcJdlPE00u6VDuMWg+VA1Oy2GZ
pqeA6ksNNutF3hUJALOWg7UDK98hOptXdVvYidBRQ2Bix5znQ5ZR2DJ+KpAnUJ2H3dVrlEqyrLEu
NrJsFf+bN2cbjjghUDcYQFnWGwaA7qBde3Q08paXH91v68viPra539X38/t0dNCRxG14fb1FMKa0
9b0O3n9Oh7IJBmr7/1l9UdfYu49iU5um2Luf6kynpAvLQqe/6Weod620PTno4sqTfk5/Y6kzQTne
ok47uT1heaJwN9EYmTjrnLCDEM8K+MHpDb5CeSLMmGEpULqvD2jsLCdWX8WU0Pyqd7PEdn9pRAmk
AFSwT5jBlCtg5+VCF7xg3yt2KLu42na7rdGF7uLKrkjqzsnF1V41vzoCeUxrOAndBeE0/6zntu0e
dztpyx3ZdZBifK+wLB54IuO5xRkHrLgnFmesV43h04AabuVEccVCAmqz1Bywu/rqxE+Pb3+HgdnO
6Q9kxnf3Q3XVlGx2wWvZFut4EvWfJ9wjpDxNXBIf300U+PZugt3hh9731icl67gZRc6cHmbf+KvH
vtuCLEAkeVyhoSnGRrHY3PwHQS+tBvFkjwmIt6JZ9MiKm6cSrSWWFJEIIBxM+TdO4jSyKnFSXDtn
pZYgdyfYgxwg0BoWSr7BcUNAWrS1cZ/6JGqeFnB8eJL2i1MaELLdg0Dc9yUGiTHt6T5isdpRDniA
nnYwfGhF+kyEj5TMKcwghqehnOxUv6zB6jR0dMNtlxG54B08OBq8lxxeLGfd/B3HYZYwOL1dILFK
nrlTjstAbVwflw5nHR5VEcsg4w0TDvLbnUChUfoRiBi+c5CKghePO21TXsPwC6JfEfRMjRuv+KZT
NhA5B3Ex9AGhBVFxrKqKYuwWowLiSAYoaMPBByEvUw/g0weo02/wAQ3jEHH65JiH1RqoiVRIIsYp
MYrISfBZbdB6brMtfltTp9Ty5mmEIx0rrQP+dgxD8HfKuX6aM8VrTmwA24QgzGxDyOPy2zBR/xoE
7KkZnPTcVGTIBj1uATeJTDfskMK2KpzugPnAmJ+bs6EWMheZ1Z55ja11ZOWKwsMPGXGjwGpklCM/
nwpPTMIAei/521T+O0htoMRtVtqLhP0VCipVqS0uWZ97e8PG9wO8zjTS+dzu05W+9mMGNS3Ib/eI
7u45CL+V+D0gJPRMJKGbn/RTPsn4fjeNsah2scI6uGyow7/mNZ5W27veP98rAgQHk+x3nrMLArZe
6vCxjAYwFjpRDbnwmmOZQzZZFIsttjiGFXjzATCEhzTjJmyqS92+vLSgMTVs+Slr0ul9Myiu0UKV
+iXd00dqAKJta1/mbWfxXc9Ban5eVov9SYaaDyhnRJDMAsWL118E9d7lA6nCgMyXISsZx42GTAYD
SqVyDjWP3n1boNQlAnCBkkRYoxwoYTyopJlXFRD/qelJ9M+W72l/PAUWiX9fuNSWAetXjmzoyHLe
f/32ppL/M/YIb1KzTtI7zs85jR2oGMn4ky+P0R1bG/IVR2IHgLZT65rg4ZUn+rZuutB5Jz9yY4sn
3/0V+rqv67YtT9mADTNQZSOZAcBnVBzXZeYgs+teON6WM44A1As99mvZkT/wsozk2m57UpYVD5mR
LaTbLvh8jtQR5NPXeg+MJtCzrPZ/PNbZnpy9y8hrqeWSiQgwcF2rI+/OjRqH9W46Mc7JT7faC4+p
7ckQJfuiCOzRVL3FP33+DfowAdE60/m+u9PTK239joB75dPDKOrL5R6NYbGismJcpaGjrLOpcRMv
PgHJk8imwYhS/Gu7OI1W3S1OuFTGWQTazWpRNNUN1eukpzL26gh446psf+g1ROWfjVNrV15uG4/K
ZbKRVvQC7KBr8SIT2TUgDWE/hqxvhMcoX0iSNkJPgdiWVQX0v6HW2fDzGyLB8cFVAYfc0XqjC+yj
dvyU1vp6zcLam8RzYYufTrmPVwJqfUNm+mLhrLZHa7j4wIGG7vjNQN++nsfvrfNknQ5eN2vXAX0v
H/fGsf1Y/pAiLFyLx22//oTI+NceSqVruO1OOW3Inf7BAPcioLbj9rV2QE/SsAqN91i52hSjoJ5+
vY3WQnt/PaFJpNvBDVGlXsNOdITJyCElPRXrgR4J26Uxnw0QS9VPMtZBdB8a1fFRWrE625Zi7BPx
7lABq2aTloUVFL2IxeZyrRxDsNLuabnqeeKvy/mF4ZBwoda8GvSJQ65mL8V7r7va+l639bWcR81w
gjK3JU3v9o9plxdGMvskYFsRpemvdEw0lmTWohkePNDHuJoj6txuYIRaL53PT9zspoObQzOa6O1e
Ozu9Tvu7bBZDCUZmp5RgW9a0yLvc1TW9BVKfiPtQa7OJWUiPFIyYOOuSPSH7auUPr0wSnj0KQpvW
0t9mzrSCq0l97GAYrIcb1iL3wBIFy7s4ija0enKB0tFZCmQ2rI4rwvqjoi8OKdO3RKiDS7ZuXMV7
4PJPnNToeZJesfr8XxQKW/ko29ObrmgZQ/u8SRrv0aaeF20bUf/xDleb/rBEzgOj3n6etLO4MZwV
hPd7f1O4dLUH7Go9wwlFZJ/pBzlFccmgS8elk2i+nyzOIcvbcv4Zbzzse8+tXwhs6pGK63bNTbjN
YERAv4sll/iebNLAnli6xSHucJ/oEurDl1y4AIQO8Hjy7bPXb0IWXcwcg8LboqQ8aaQS3QeAcjw5
GFXi6rpzlALvC1FnAWj45FDlcK5Qne9UVVHyPQwR8I4197Mj+RtgMiTzOd/nQUXYEHn65/zOcCBX
PfF81vKjBBYutAxM08vsj1hKM1zCTb2RuwEjc333GnrRpMw5se+Us9KqIKKJvBkXKCaeFo6v5vBT
y1YBxHsQAXjAtg/DwS3rAaIMeExYnDXcx+ZMu2T+tb6pUn2AQ9OztHwVejUhmsCbjbISuRv8yljU
W9GmOaVTLyRUgOBBRzjhGCIz5nCYIrTxDC67C6VagNU8ByAY15HTOm8WlDKn2QSTXPp9dKKq4BTc
yuNVKLKth9BqGKO3wFgVRJnZ6KIzJpYJm1tCxlRoFzCzsGenMp2KBIJ3PsGJlY1KQ0pI7ueznpqz
iYQbtzw49sHB0H9RgpnwY+tNxsQgSZZXfCqlFGvo02RGBv7YYfwSDIjmL7wThuQbPXLfSLtLCdxg
1957CH64mzBvBzQ5eKAg95O26vvn2fN/fPT1DzGapDBH2kjNuNZrSCCw0MrWYDnB1xw5YAXI1dWi
H+i23TG4toyjOvZ6Cx6ev3jy/E0IhEOQW5ISbDXtqlWMJN+JlxhIISlvZ/OrxRbzpfSLpCO6187P
BYWtfQZQUFhsGlGSjCxP/RbRGJWSsQKXRS8oPT2yL8xxpFLBcYwJogHADcSVGDTzDm+h4puygNaI
ha1CIzTQsOx8DhW//A7ixnLEELTA6kDk4UgRO2NBmw2ZeA2pqCclZ+AQR8z3sTjnc9I2bm1rzl1b
82yG+fAHg9FiLod9tzGl05U/hgWi7QIQbJM8Z/7DkYpWZcNSs365Y9beaOub9cWZRh9cOxdUzmXL
czsNctOdYxh5Pr8AatW+NVVdU5AquVK44g+DzTjmlf/QDx7YTd88uBsjz4dZ2MlmDXLZohXiaTsM
JtUklK900G+2vuk99l2dg5Rk3NjwNqE1HLDDP6bQc6XyV+aeJ/eeSPBELofk8I4XH+bwRPl3QM6U
Lh6PwqWKnqTs3+RUl4gXjS+ZKT3JRVtQr3V0qR0m4wGrs0m5oDGTCFj9MHF0C7cbMyuStmNrl2L1
ijyUm0bjjyHcbiHqlmQg+/kHewtF8VbQvvWx0pueN7I52nBgUXIBfd8+cu5baCtGhp5XQRv9lJMx
4jfHhyfujFQ6YBlC8rqo5mO4+Xrpl4nBGW+o1hEGMDrJuOZszeO0xkPAlendTE7ke4MnHn9hTziX
qOE+w0uyVEJDdcOGNwmerNsD5SJIIDzF2cviJNHtuxM29ZOK8BLdnCLGPmXlYqWF3G2iy01LHCBf
qUVQNlKCk35ccqfww9cWeYaCj1R6JtcktE8vSc3kefrozCB67/V1oIyXQr0h/VEfB7T7E09maGgg
4CtD0o8G3iRt3jZx7wRh8mzjpCunx9+hCSxqM2fDxHxeN3QEJFkq3T1OnzcAVr6kpASwl+tNdx+H
hclu1rRBcEa4TbuVkCxbTJB+PBnWe7pUBxekMTm0jLPxxLWUDVwmfeQdjcI8lC8afd+ngftF2VRw
VYN3jEMF21KfeNxO4BtDSN+EasXfqdbDrszQQrLTyiYd+3363iPKB4RrLxRX6Ng8jWOV7WmPKSoI
9LMXRTDgxt1ywAf+hp7zFK2o7sGBiEWGv67XgVA5teEAJRtn2qtkrwDXOyhsK8GtnddrVBRJlLrM
LygFifhFFT/k3rsG420rYiodCiizCES6uLA4jNOmCWl2MgoZUO17fmwJJuNBlwYc4E50BbIfRWfS
qUff7+6cXMpb/OoSqwsP+cRTZ0qPQlu2RiOy2g6si9PVwADmF6RlE3R/feR5M6XaMypDU4Cb4hfH
B58dneBYSQxrmlNZxPVNHQpzcuBS3yM/woPcB+Rbq4jU/4KJqlEl2xfs351grUiU/gambYBbXoPQ
x9mg8LOOu6k/3bmpg0t/eDLaI4q5bS2a1SkjBMyOZ4WAOcOB6ebocMSWNiiO8l1DUwmlBZNzMFGr
FDfOUZjGt1iytybr2nXwdoby0fE4zykdwxyErPoy0jNf1KiGtcVmUYvaNhDG7VS54LJLKMOFWYZ0
04dJxO+wf1FogUw5+2cds54BMVU7O+A6ApmuPDiUf2zgmbGodu9A2OYdoABjUb5pcZ7WC9EHfCFa
4xPErF537ZCVAsv7sG8ARcogkA0lnMT8lJjuTlwk5VVs4sWlF1K9grOmSHg1FWlB3sjQ+no7Cnbq
7RgkGa1XlKsPJNqpyEspJWLm0mLa8rCcx9m6N6caLqcveMme8y+fvXxiRw9+4FzXxn23ayjU5IMl
n2vcHceMJ44ddT8GTkEfOwPgHPAzegA61nRzwjEc/s5YUUA4Cl71CBbHAjiUYHOzwheAws0QrBpc
5WXnPfAGXs0ZeC+NG+1/8Nlbz2bnwzdpUB2y8sO+QSQ8FVhf4HYwCw9OB77bZzqOUK9ec+eXC6TG
jBltg05o9I851PsFxYd2atJ3x6fvneNJQZDzqmXX8AnLDUUjWhsFGm2Ln3FStC3rzWphG/PkWYZP
iWtEsPyBXz5680s3no8Uf9LeeDa2XuHuZKdVMHVI4XyLKz/dYKz4wSrYfpiLW5br/pqvxCpHK5iI
5a7VRVOc1vjkdAS8oiQmARt5mmMkAEJQmfbQvkkP8pwcNLR+LP2DKaixC7af35xB40FdkcPfAga0
NZOlKX8QdhLf+Sysn323Ostw4Nk2K+LWHD/cXV+yIKMVqw9lU6+OY7Q9xycqUPo/DQflxjGLMiuB
RoV5M/fDLeG1RAqqovhQ7O/g/Sv3OOwmxW5Sfjy9hNf/9PrNk29evXjxJj4ZSAawQ4IZTEqwZ+yw
oPe4KTK4cpL47mua6yuY6914Ys1cLIe7eQvbmykDH4M/uYXv0rbthjNvtvso7qV5yBeLftToNvqS
PvfcPcFxnnz7Rg8lakE/lTL1VoQxHqejsOFtgLzo1WKxQHkFGjGwAZz0zut1atRwys/N1jG8lAni
7Sl0n/PutRdGe7STIkSTkPZBD/5bWdq38qCtWtCjx4+fvN7zDNmeF3KG8eLDuB/UPy+L7hxt1vxp
6uaXOK+x1FqDl6Rd5MnfgGuPG/zyxTdPLD6w9ewHd9MDOEaAX7169o9PxiccFecMxQfqdgqTjxU7
UrJqE4kDsHDg4cv6RnB2R11bd+g+zyvJ8qxNrJjPDg+Ln72aD5YLQ1J9z+DrNTItbwe8d0WGE4OA
X6CITa7X6vnPWXki6gE046R1Ofy1aTf4WK3972y/93DFAOsEK51RIKIciL/TKzeDsZfhIdH+ynWi
x09ZNMPv0KVtmyD20hLEnPT8oIIU7Tm9n++BGnwevSo0HupNwyGbYcFEyiTKwh3Jm6c84EcjXyIm
SGw2e5yGKDH8JqHxLbiibDVTlJwFfirYl+89xMunNs7zi2LGFUBgDDnzcFU2xbK8noIuSQ9VB7G7
IZPooijW00+3SepAJxczfO5ntebB3z782eFhekRGi+6qjhb5TRvaVlCw3m9s/xkOi1BlSs5ol/Ch
I1/ZKZ9du19+XV5uLkHIxDd01HGlNz6qte3mkoVmzkuhdd58iYB56b0nF1wwdsc6rV4+Snt6FblF
4NwSmAR8eIAd3YtXie+clXA4kvbj6clJHE6l7nCPk0AYKQXfYAPCpSoFs+m4jgiJQQm72JJqIS+N
jKPUmTBJ+CuVtTlojgZApys7iZTAuUUOelOwMjldHWPkuIJxMph93jjMD8lsbTsaub7Fm44xoihJ
MCPERgYSqg6q0idY3XlCpqZj3nYAxX+I4kSCfWdQppoIk/CMhqRS2XsVgRvyB2AY3tYH31ug4dYn
LGdSWM2O/5hgT09kvMOPIkRJwHrDDDe4tRsmZbUw9jRnRGKKA8wdSaPee5D+UP7nYV9zfQtIhFYA
UMl857LIV+SZCQyGwus3fP/kZ6AFhzCtCWEq+Dy6haXRUBH3He0lbbKLiU2bG3SSxzv8gmOSZTHO
drFzqcelorxzaJyxhbX+nFQOFtN3l4ffyMMBbrZ4t9BMPE3o8mZNRaA4pzxWPOmp9ud5S1FtCugk
iq0g0tDTimrpBJsSXeFoeyX10iAkYGmgM7JcoPEZ422meoUO6R1+2YKjfZGkUVt2GzIFTTikR7l3
aWRzjdwQaXNuYuxACJVMz1elsHUmdAEDLBwLLnfnRQhS2V4g62+LQlwt4Vw6IhT8r0WVMW+A8J9S
6vmr8LOEPytFa+LLRWtMygwO0FUht3IAkHZ0JuN3QxkqViUId1bBZoaYZqFTZ1ERmkVpx7ZdLftf
RZoeJI32HtVPdtxBjpcG1dEE2GcYndUkAZJKnZMNm6p9W5VwMvIEmKM/xpXCbwjw2edTJRRFBzSd
AUUas6yzFDHMJPYyCnRYM6BaqkgCyXQw2BoLBuhN3a+PmAYcWRftIqdt0j086B6k0edbeOIQD6cN
bS/KtSNostsBQisW+5kLdlvcaCQ+e3zSUNCDi63V507npo+rZXz7LRCfYzognN58h5lwW+DJ7mvS
evMj3gFTJ/cCclhy8DKJfs05xOgv9CXYblYZeUIOVdazOvWwgIVDxEnDNly8ff3kVXxisziAtLme
RFinpfoetpMt4z1/hHYZHCuUqX6nzcSCHIsAHBt8tM08kkfgDVlFzEXIpYma+fER/KOSTx7E9PoG
P+FfBXpL3EObbVaUDwLh9QIeXrwOTNphpiGIIgIkMK1JFISbCOBJ5OckD9RaTQPD+zr9RgmTPY3b
19H976X6nMk4oifNw7qp/00zfE5r5lIskdpbkHTqEw+YzlwOs+JU90Dw5zmGrwBjOEPZgF4OqfES
95522M+d7iB9KZRAxf32SdqwK8k6UdP3zLMe9r6mqYqAt0dWdS5XaEUU9prRXHXQMGdgszzFrdct
8X3rJVWjNseHJxnIXdX6POfi6vIh14yP0+EKI04+J/bD01kPx7MxpmxNQ1VquBSxFOrEofHKT0fv
//Ltv6di3fKYqxyG3v+7twnaEs6B2x5UxQf0rdicHijR9RxkgAolSrQYvP9v3/4Fwihr0/2/e/s/
YfdyhW6lcFGionJeVGvd59+//cvZGimvy87r+gJtre//+zevE6p6HuFH7nsq21y5R7SuNmflCst8
y4spuShcgqKWrW9IPpE3btUyY2PM6E508EP9B7B0tTSa4Q8KfMTeyLjaWb5YEIoSXozEVpmyi2hJ
JIVOVgsyRb7gWB40nlLmTYCBqEe9kGBFH8ocfYwwC2xXM9+xoWtBlUdmn5qU4pKcuWkvvsTMR6gO
fXiQv2CTgy/ErMtxypf5oojOqvqUDNb5h7ys8PhEomiTDsA7reGT3Q+uf6KMso1k0aI1oCcHbjvQ
JFA5xa0yS0PDOIm4C1NA0p7//BLpuJhRGxfB5LXS9pZV6hJ55Ha9LM/EZD2hgfppP0wGmNCY2bJs
WlNPnSoOBScIh5zmyGP6k4MLAVqgJ8siVZUZHKSYug2CMsaSmnPWwwxQDDdJaNzGRgSTBsKk7w44
pavgnc4fYEk+FRxR6ZRWwl5pJzErIU1BRROU5hpmRfqoKZZH74SoP+efdbMomi/e8SC8x0IK9Wpe
KHeNU5jiijzmye5JpAQKlQx/hAljeVVH0ZsaD0eQgiYEWnPTo/XNEU4apkR9M4MikBxBNVAMjZec
vfRaffHOiJcyKqKJrCiMHj6PoXGgIQ2iAQwPBk1xJGr5QrYEo7YqsrNj1AYF+5FNjJ3AlO0ABybR
5eid7Jo/ymP6AehXJA+Eu8ZMiRhuImU/DqwFSDfQ2AVRdEEOgDXNYABhSnSHgbokhgerJnZgw7Jt
E0AM7hqdUIc8Sg06UnkbeGAUUviuocgXReKkYCrKLlyuQA42OaE7b8i5CCk+X91wcVwQL6Qhs2Rc
5Lt3MrN375iHKYmVivKqMps8wQU6XXMnXpTqCZO67jBdBaqvsuEEDUdxuIXqqQfdzinxlhUmZFX+
lTNIdzQHIilGiO09VjRnB6k3JnheFT/m888RnWSw5AtehcHAmLgkvsEo/nqzosnhIFVdr4N8luSC
HWwWr8yZsPYZjoSfUrJu4AJF3lQ3M8V4fXZo5s07gaCE8wjASAGUEmP5OdARda+XIToeZsoaB4M7
wI9eIW7GfsWnRbGSG3FkRxAh4xFhSTHw0NSpO65w6EKFU7dzjsrUV1jG0JKNzwC2D1N2GTc4kRfM
/UlPOqNDH3tlJApF0I0vILZAwllwyMcaNUg9P6wsaU2HyPtHkybNQAFcqiBCPm96RrBJXQ1/ewky
pL976HSvnZKNbjljgaVED0M1qUmkzht9PEzoBm25kKeu8zsh90RxpYSZN8UBiQ9ayCTQQOoHpD9l
/dOmZ8gkMkx9e8yGjpWqv9U/2lKrT0Do8rreCbLLbmKSNIyZXnXknFLCypjyVe7IZW1PKTOilxai
oRu6+vLpQRYgDGrlmJ1hmBI3vqbVwhhs+MAzrANUiWgNSwguak+KmOmXK1UMmN8DBvlIk1M1M3Kt
N69e7up5ZSFSNcPtOz96/xmYmk6EoQd+jkGzMBeynYcyzDwCYQAVsxU0JDiYdZ+8/Ilju9rEu3c8
JNzwGGKqQrZFu63qszPEA9+QLgYCK6Gn+0T+qO07TX/GThWthhO6lfAYyffFIsG/LEhXRfQbFPB1
AyWPY7vQDUfNQNvBMFz+EZyXYtpbZ7Yo2sKaVhu+NvR02sh0QCdwycrVh0tGPUUOMtcQGhU/ffdO
f5upE56+e+cWWX/MX7wicA6lBob7A1xJkuaCb3+lrFF9d8DOj3tJrW/UanHpbH3YceLyiEOdLBLZ
cug8Xqgexy2iKPL5ufG/JyRIxLQNoAjxBoapT/FpwYVzJOnbVY45oFi9kXSfFnQ+tcyG6c1mUZP9
jmbCR95q3WO4IcTtYmtuHzwHhpZVTAXoKAEmxxFq2OO+dTCpaLAEvOsMwFx7coNv+7It2ZaJE2/Y
OW0kSpUAGH/3OA8p01aQhkPImTu4BrVr1LNiVTQ5ZYxFBeGy6HLsaw2rWkTJJcAoQTdIkWgBjaCy
kdUIhmnZr9Gd0o9goqTJwPWspOAf7/h69wFfLfjbhC42C0GW9jkhpytX4AZd7Ix6p9mQJjBTAqka
oLjuvM33lDLVsQXKXd/HHbjfgVq3qK9c6VaLhswr9M2ApuZ5tSHFbp6vO05dVagkeSwy2dIRc2l9
F9uWJ4R3ZEBLVT/0WrVmamaF0pySIW0watFHLCHM4S6E47U46OqD0+IAv7XGSBQvLLnea+gNpWRU
YUG/SxCfQPJboUrEbFInpjT2DLwb6hAgy4DlbptCuTJPHQkjOa3rqshXR7qc/KqGU9GQmwvLqo5p
QLnOWDFTPUboU8quc+0TX4JEWy4mOmW5RVstiLWgGhPayeUUq4hv2N6YRyiIVsU2GcchxiTAtYxs
+85HIRdCwi7v3g1DNq16gHXwKEuWNM1377DtNoBq54YPnKMIBaf97t3Hk6+iXUMYIcIzHTDrogLZ
p2ExotGtHCZhJbgV1zm+Wsjq8cUJHZzkoLNgW6zIiEpPhcDfm+DZamu+zdWq2V1Kiwh0VRE5HKjb
oA3bV4zF66IQwZO3BEGEpCLLqKvsnkirRZO9gd9Z2FT23JFyIzIc0OouvZ8hqSC/823GHvjHMKFn
q2X9bvBsmjXc4nQOaQbKkCSXaojRcx++Azm0QLN7Y8eO1qC6k43Zjtimhf0Yb4gyWTIr/IGEaxmN
2VzfoiGcRxl3yN6bmie3Po+Qlq6JZELmPHS62tha19V5rXgjei2KEid6+Q+NW0sLJkm11K9zUk2G
XTR+PEzzMJQTEZ3Q88Y839VruFqKJT6hoENT75mxuF5X+SrXiWC5f9ni7QfS9DIvK065QguB1o3s
qvBYO4EfVWqqlbhuQbYdSzXDQJdBZZzmXIutBF3IX1pbkuTXp2S+QFNzSzHB+cp8QIA+KVef4MXI
iRtV76IFQYrsvSYDLrJBBMFZdBs026IFG7sQJLrwF/xE2FblWXde3UzYlEfV0BBbnBTbB6ESZLeb
y8u8ubGY649Fc+VqWW0K0Eo4WaVIg4njsiAsc8aJFPMq/dFIkWcww5eHotFUSDwAfZICF4dsFyNu
UbZANTf83sRAcIG12E949maZPVOqDE9KE/GEPgM31bngEjhA9eCsbmB3QdZrugrj0RqSsj8UzSlm
s6R86Usy69qjDg2464pRi5gJhSTqA4bkvPjg0y2+tCK95Xhxs20EHaIVKgSKNbn+XQ632VXeoNgI
emTb5meYvIBy8Snhc9kGxE9z00lvyzyISfcjDSwncLKTHNyVC2xCpQEfJac1Bt8tRRNBwzlsuw5x
KTmZgRpQ3IG7kgVKMkN2ZZU3BP2+nvSPodIu6jndHz/uJSmjiN8FWt+IwSTys0+3yh1/oSoziiGE
xSgBZxHowAB/ALseec0aFy5+7ZO36EVxurFMyD+egY+eGmfKlaRYiO/NJOq5N6nEybbrV3RWoxyn
OgdM0rIgWmsCejpewiiTzJFrhc3AGgnUqe0DvRB3ZoaO/szJVoCqfaTb92FqEwIDBW2DNE+W5ydR
j0vCiKCblKQLV1RgDNnOw+yzVI18dV6wG1W+MgYKOLkt+1gvVPZp+Hpd06VJXlinctBlFuTRwUSi
zQ7O2xEpMDIkFQAPDiYCdm5KeqwsPOsOVXlRRGN0kc904YFxWAHiLODrxWnSR/pmjY+Vi9OsxfjJ
htzSyNvwP7z9d+TliJZb7aP4P7ylvpsVs06y3Eood74u2U3xf3z7F0qtkQP7/j+++T/+I3spwqU4
rz/ItYTirDRppUDQRr2lmQd0Yoh89c+WG3TQwITmHIOsqqRLkr+RCk3GdZzm8wv1Ac8HaySMbCEi
E5OrtHrK/h+vivcbav0U9Er5DLUy7opIwSsio3tCer6RO+wVHRn89ylM/Wvh6fs4ibl+S7Fiddx0
WeVnLWbjE2cH+pvOi0JfbOKQMYnZdIxuGOOJchaZHo+ffP31s5evn70eS5ToWVNv1iqapUFXHvok
GYtReWw1s9y2xgcHMuaBbNfYDM1uKtMxCM2wSxgnbE2B63PrtujaOh37JIDSLDqk9mGjo/F0LG3V
1zvniB6TQxO05jbGxp9k3TUmUUUD9Ye8mY6B9sf+hPVkifbtZEN4QjVENlYKxPAaaGo7F8DPwZK9
7oDZ7PfCOAPUIgFn/nviw3Vmqt6kJZOmTCK91UOveHhNdcJx+ntkZRjK0H6FaTXdzIuYpoS9zcRj
zSWBoNv3V9zkm8Aj2Ehnpkz0oJzwEraKEl6CLIHF8LDeM09VqfWoC5cdZ6aligH4LPPdyInzkfwr
KyKMqXGlI2e4xN36frZ/mfcbmFQfj6MRZ9Ug7gK6rDRObN4jILkqAPNsVcMHvlUy5oT0OzurBiWB
tJtgJLL1p9uQ9dZpZNRVivislSSvqnXa9dEp/QdqlSqPNXX2gnqu6FOsl1qkw3PLrJG6K40XwQfa
2hLL7jaMEn6mY+xOIjbBSW6HBXb3E29wWXNrGFXYXMO1IfoLIPCIVvrF/ZKGw/AQ/GlVyGATnl/Y
wYKHPxIDwqmusW5maPIwpbpcwcvK5CFU6ZT747YYLmZlPU+kZQZIQFPoU4a/T7Y61fPtCqUx9BbQ
UkvqxW5IU5k8oEVNhg6RF7iV4zMHos7plMkXo56pu9fSWTkHsFSS4pdNmOrvXs1m/BJJdVUP50aT
76f9JHYDEdaqvQ39nlqn+cDNjCDaq4UqFAKALnHeXrZf75T7skqiljuRuUwU9NSrVc45+jQ+sxcU
svJYUkS5rV89efni1ZvZ26+ePX1qdbE/7u2B4jJu1h01vVRVSS7aZN74taJV334mMbNhW1KWS5oH
aw8m9MGhQkp0ED04hAvgTvTtt99+GUwMppidXspxecSdTwZifrGRzkV29/DTRXS3pSIB5b0HPPBA
PegS85892JO6TB68J98++ubl10+ir188fvTm2Yvn0dvnv3r+4tfPJ5yj/ry+Il0Xw11I4qF6MXmn
iDEQjsyGRgyS/OKLL+KtaFEU3dabZl5wnjbezXQP9MRffvklYAf+PyYE0bjbcaSnlmVZL5F0mN2F
uV06gFfcBDkVGYdvzRblcgnKNsKS9Q6zSo8tnWG6Pvt8pJLLbvzdarxProYS74eZLIyPEaktmu2i
SpM4rBW45nxGV8TwCo/Hb58/+fblk8dvnnwVPfn28ZOXSDpHTKo78rytm8SZFY+angyPphW5DJ8W
c0v3Tz7ZZ+oiV/kiU0gY2pICU+VRHbr/nbtW3bLOVUz2zGU9UOSJLvBly9IeCyDjY6GHE2ECXH8A
r6GRSlE3QzVwhllXQW1TIZT9G92KLUu+evH8zQxT6L18M3vz6u2T2dMXr2YPpprQg1/3adZu9/Ov
Hz3/1dfPnj8JQtHf9oE8f/Hqm0dfP/tfn8x+/ctnb568fvnosYER+rIPQum0upv6oN/02S8A5JOZ
ptjZV0/ePHr2te458H0f0OMX37x89OrZa2jy9OtHvzBj+1+IfoQbZWnxiaMVWV9gES9fPVJBOOOA
OUCYgEUDMwqoBRh94jBN8znGKx/qZ2v8EPmxNxNLcFe9fjftD3aMn5zYZKaae1K5Vmmc4yNiOsof
lpg+IPYOCKt3ohbOcbu8id659pt3dv5BO08BH7fNao42EkDGb39vFnupk/OL0jeToCC7rp+aKMJI
0i2B91zZQiAIH/5/mXvXLTeSJE1sjv7oLLSSVj8knbPS7kaDywkEGQlmJqurazAEZ1gsVje3q8gS
L1vdykqhcInMxCSAACMAZmZXVx/pPfQwegj90kPoGWQ3dzf38EAiWT1nVDPNRES4m9/Nzc3NPjtb
4qiqd6S8HLKIjhSH+M+dAKBRHFvUcnQxDZOI4cp9m0ocVdw7GDDA664Avz5Ijq1YLOSlDkKLbA+N
0jalkq1JqKbhDgC+Gd5aeB1D18MJfcTmBR+LUGMRLJFhuJYiWNkwz6oxnq/rIXE819fDsEn4iTuL
96G8Cdo9Kles7iB01txcpKG46c9vo2qITmhjvrzfhDZRKbiT/IgjxrWoGZxCDG1N4EZmH9Ytq89l
mUdvFO4aucEvSWppo0BFscmUkqk9CkeoLEl3aZ/StgBwPACoFu+l2xUxRBsgyVT9fmWDKYZ9cDu8
sY+68lflPjF2EeigexiJ9eQ0h2yfvLRNv7I3OdbhritFNX+LaFruLky0itbsm31MlgUGfpvXSw0m
MfNOj0bGpvdqFqnlDqmjDMCJU0bHYoni1dwbetuzSzf3WIr6nXnKKjaMW0lN+4TcYsyxxUjcnK9v
Z9aaK/m934zaSDWX40I9sDhIxXK9ubG68kaJHIlV68CIDp/kRQVIHZEnrCn68G/f/3fW/IzuThfl
+Yf/6d0//td8UwNPcOZF7XZxQHcEvJZ82yu6csWZisK53OePGQW/w3E8yM+XLqTsVcw+1yGttxNN
w4kuSMu2AeayUxw4W/Tr6cGBzYE6X/N4wM/BLYELtasuBi6Unp023kDNbr3JTB+iuoK6UHoNiuo3
dOhRN1DXuGGgCrdfOhZ3k3zbEGGGbWhMThiaeoGuUSuKldi7ns0N5A8FzDDpTLATAy9orF5Syk3Y
JJr1QhZBNCRQG0sHMlxBRx6hhoRO/JMtnYSdA63ZoXTj3tDvb8pzW6zQz8JsceCNXoNotp+jra4E
TDW/5SM9VUgl3Og2faKTV32puoeNY2LuNOq5u33blW1h2DJxVqBrWm29DPx2juo9sqZG8zR6IdUw
8f1O6CUC67ioP7wh4kwz3/iN/VhtvFhTbCg3I50OZzkaKAXXqriyFDGZpmYQflySoRTf4IlccHND
5uB7Rik0iKikIlXevcf7JHWsiGZ6P+2joHhbNFmkaBAi0xmeWlnQjxDSCPoy7Z2kscL8+H5eBfwQ
IK7bTf/qY1qaMrLgOYs/9mrJLEA2bm6/OwkXaHA7YW/71DWfuz6yTMP8YsTgoqKQKj6/sBUgRCq0
/0JzhurG3DHBuma9CZu7sS3Yolyh0bNGJVNBfCh85qjXJf0hKVh1XkMR5CwKo6CrnEWvs0xpO5SY
kdKlbEweLck2HJtcbkF2XxbuLg+2vlubbFqimJrJmbJFm8fQlPhhono21eOKpjFD9xc0NSIcq1tG
SQeTbbFd1w3PBn6wJ6oGmvBQ/D60a+mKlyh9YdebmEZPc+3g5IQmO/1bDDGHUh3FvGcFdUx9okIh
0gX2jM5v6XUaQgJzFwh6oNTYdYxjPZrKH9qppKmfZ0cfqFwKw1zn4rP9L6ixfEX5dX03QifHp1mD
a9hlYKbxbfPI94xun0MrMo1r7andPaJGvvt1N4KZvF+Hte9IYrDud2ZrFd7urAIwvcH9majrk6Ay
vrLmUzvdtyO0V88Bc8LHaTWuNXeyNoep/drgUE2GxgnhECBSeYOjtXMzIdadXs0G3CP+ZWO2m60x
S0t/lTLApGkknK3+5/f/Cu3EqAs+/Lt3/8V/RWeqjvguw+tV6QKV094rXswvX6NRScmeI+iYgNnk
5FTf1HB2RXAdzG/2bI4h94LBTtHGUeNx8s5N9xOjlssHmBF93AkrnBQJ9AErh0YjIj8awS/R0t1+
zm3k3PN83MgHx9w9snUTiWW8HK85aCIRIpi320Po3aMLKZfj1kO8jhCEOvJml+Iu0j25X5/KAuvd
vWGf3KZm2L9OZ3QF0gVOFiCGqiZKcjzgCSQRZZnm49jLo9/YtxRTQN66tF++f/vHHKQ31k1MZ8ms
mn8E5oD+KEDo2xdfvXz/LSKpL+tkuyLQwvnYBIo+1hV599XLN0z++DD++vPfRN//2r6lyFA5oTmb
6MoTkjb+ofOzt1i+RR86X8Ilhcf4T3MQP9dV+XGOZ2R7UeotUMI4MAGvk+9ev335B1mPNpzOGDEx
zsjPZ12wV01KSdLEgEImyTP0XtxOEXmcb5vVrcF2IrUNFrWz7qXLL7JJH/JfjtlEpRxzZtIsqgkr
HNOzfgp2QQeo2g2DjHMISx8IODhwsN2K5eVYFSmlo24pjJyF9aE0VNseZNbMlmloMy2MAhUJEuQH
7VIEZd9Zlbuid+n7V915J2ilsR8QNdRBTnQ8DYnl00U71KdHliPd96vLVXm1eoEJ7s+QL+D7MOIC
ZaQOQuOdHrPxntDPE36Rt/CAn1LHpuEcbIC+0xaOkQonSgeEBQ0SEhv9Q3f9nDXFgEbfcHOpvrEO
td/cFYgO4eTuujg6cI0xaC+vFDynh0cxdlAqJNbTVByvZBt1FvFoUr+GI9iUYvx6kXlGI2tJcAGL
m2I2e6fytqlBV3xhHcOZQX0UmRv+VUEDVrtlz1YzK2cQRTiUTiRIvTFzaAYE0Vo7ySVcJ2sJ/e29
jfVQRGWCBOGL2Fx4fK/1wsiGIbeGNVI/DT+f7bhtSV7BCnBsLuhQ+7ElNjji2WL5MEOkQMJTgS/o
ETNGho9sWoalE7lZbeNZ+HMfAb41PEA7bb1zn9CP9uBOuyOLcg82TSf5PcZZAwnwfkV6EJztTmpj
vNzs1mniLST8r9Mp9N7E+y1Ixf/+/f9gbhz4TIM6bIRs//Af3v2fD0hEfg9P881cNlubyjlZKXl4
TaoUfbFgO5o8NRyOlw3A/RYvtGAGdtouJk0Csm5GW9wvWW3zzNTE7LpKoyN/676fqLONxo5G1zT0
DCapXFycNV4pR4jBXjG3aHB0IdiFyY0NVGQ7pnOPL9/WVSEYoHQOJHhK23lVQScXgqObwTRDeAZC
vOO7kqtxTThoiFAprm0J769k+3dh0EONUEMRMcY1OTCyUWAB+emu7aWpSwHCitdCsYBlrbU15rJu
1T3128lkDHpMiWt2l7KJeGt5hd8IEBtqWiwnxQzbYF2rsWDxmc6hEVfFR5bHKuIK4mhVFYVCpRkk
P6x+yuGfn6krflj9RTy52TEaw/YhVfKdnskxnD2aoED0sVZ1rFF4L/zdy420TmhV6ebeL+n1PwLD
3IwQEgLvfPnJCmC9LJN6UXwkjlxtqZAyDN9pl3iLnFGvx4ggzUBLbP0GklL/vO/84MR3GWVotGGs
fclTUx0mpGUZr+tidIb7hTeSHW2MOyL9aOuoq5sYTCxzhNWpykTPBJPopT+sUgmiy7kymV27KmSn
1nNJg40ZrwgGAOcyb3nMCkpeJegKvK4pIiC66XdFHUMpYa5wXFF6gmr3+32YOtxNEwxUE3QdpzYG
WLEIeAyPM7QpVe35ErprSjZldnNO7Ck9JDsG0Rt02qM+cJwCUx8zf+Ygb5OR7mq7JGAu3YcnVMPB
afPmekpHh5+6EQNZKiZipWxz/dya6yDMpaMOYYp9oltEjDr5ROOx7F53u5qMFyijALvFjQ74geyN
GgMi06eeGWn2cNAeJnMV3FCGbzXz/SpcP2LWg+SII1jAZB4E0o1eZN4ADKi4U/ILCIbl4dGvB0D3
AKg+7Ow0SPDq8fAoDB7J9ce+/41edh4H5tW2e1XbBfdGg2Uw0vNsDift7XghHEKCozWYPO0CBIFR
hTQ4I9UAd07oxZ/wPh+WYMoh4oF/p7wCETKUo8DZ7SGA1WD+TIHdTF0YfLu2cO8L3EahwjhceN2J
br9FFazxanw1MjxP9wVKncC20syYchMH81jkic0LB/FTy88XFJ/MfvJuWNGngWq+kJvDk5T64Gf8
5y/4z9M08DjwTOkXuy44uTyaTThHsdUPk4WeEOwDJlMhwrPt+H9N3wR4xB/55jATuDBrTqQxPJx/
0b2OE6DcVslyvpqbroIiCDME+vx8iy4xJjqpEfNTntjoBAB0kSj+ZOqyZOgFimpw7hpfMnxdVciU
tVjrY9SAC8LDjY8g4+an1KiYSZ/788TsddzLA7kzgnU8vcS5cKiep+S7YF55t4/xKQFvdZhZ6MYm
vzaUcYCbTBTnI0xX7JkkSfc550gO7uEgB5Vl55y1FckiqaRGze3CfDVkDkNjf6RoPkJFkodphhG2
4BdU5wEVSzSygyP8UONkho468bgfbUqN/vs57D9ukH8d71Wy+YkreEIpsIXURFOBHatQJA5Kada4
WdnNsuN9Z/1fXCdE58lfoMuwBjrdQUCr2dHYvVJC2KWm8o7c0NTNg68hKKvvWPmaoNGrgOlgRCLy
0n/sjnfq81A9dOIaAS81/BBu9SlIU7LAJcw9QUNoMZ4WZl1i2Kg1yk1lVT+iX6tZbSXA+Ywunb44
xH79NfyDXVOusZuPUWUB75DH1Zrb5MlRQjF9bEQMBubHSo7kag+OmvOyX4/RbmJd9bj+y/E1Bgcb
YrRoKvnRsSw5allLXvrmMlPGA6ymLU+IGMgeWvdkT4EuWRnbVEhS7k5XnFyjzKH73Vl7vJzMxsn1
IOkpNdF1DmTmiMSw2aLvlD2dZ7sva4wRm0/KTQPJjhZV1xuv9NYckoECNLVnwM+Zbd2ulD34nKOa
4k/FCn5m6m4JD3pkClP0HIRwVI2E6ShNTHPJHYAt9JPIGnl3s45pzby4aRz8RLCjrO2hi0yTyjdh
9b7Eah1evWpjONo1CbzDYdrYkKS+OHUyCVdEL/QqbJeRyd1uZCnInLNG+3s5zZ2peenVw75sq0v0
YExsZVR8GHk0d9fM7kE0h4JKbD65/M3diiaTcF02vfi0wi2t20ufe/Nfl29f3laHoHCP2O0VaJ7T
bo8NH7QeMSVwY8RXLbFeW70Lmh2JT75sIguIFvjqr76IgOx8ddsqEiZir0gHnTi1Ez8ebC/teTCP
FCiFlJADMggpaiteg/A8Kzbj+aIWF6M+CBlB3VMQFyZkMzi2sOkXhPdOdtk31vShn6XBvdm2F/eH
zbLTTkcpGFRzGpYTrNBUL05kOwxO5uZ0tJs9ib9WKGoEJ2KL04jE8H77qiDM7WsKusxxSYnC+9UC
Ae8ODgz/NlrkjT1mkVvBouDQS4S5VDFEprEgNucsRr8itSdKJPMpIjmXFL/a1QVPYcvxQk5CL8/k
pIdRLDEnVQ3f3dhjN4obCIhVrqCOG4bFw5b4pyRS5WMJi/nEqPFX+BzZc8Tdn5mI2W25s7W+Hqui
jdghAUkwVm7KQHwFKTqzYUIrPKqTYiHNIkXISO4og1K4Qngt7lMKTkIZQrWLosYOZUVCU0bL6zFJ
7Gc0C7gsXAh/KqoSxctzZaBJ+j3WJ6zOix4MW89IdllOsihXLmueFzHNyfwUzSwpDfyOBKj19G7Y
T8nT5LPjgBhplg6bNjn2BEBSNAH8XW926MVOgKe8RYs4nMMgfboJaiZ2utuuJ1XzfL6iicYI+wcf
SVd+UV6RPDv3T1oyZ7hHBv43M9bSRwPPFNT2NfJw19vNsPRqjGyeLCqxUCUO9LgczE/3DfnbMkDh
IO0xMOHgPNw1OobV3DY8v2SMwnEaHEQS+INlk4QNkXNyhW6FdnnuwrcgFkWj5pmF7+vXS9Xxcman
t+hh95V5vI2mjcOYHQ0G8L3taJD8GSN4Cwwc25dKpe4ls5vVeDmfGgZNt0xFMduuxSrUAydgZt7p
qAHiu9z+mhVmPNu95nf0YAWpeQk1kzdZxdem5gNT9YjcpsY688ZUKt53Y2sUBNn+Y7PzJOCNTXRv
+xT2vQ/rjqzcZxtSQVzjyqU+S+5XSOB+le4+fyMiS26KzG2BAdCH40UeW3wa5YqR6n2DMwcZ0RgD
MFJoNYqikstVLflcSuQQVEp02qsbUZ2cuFqc5mauLLyaPtmrpqEE/Ibm8B3qHcCqxFQ1J7ZStq4R
THRYpg+1FBpbc+386V7gaB1bgll0vm8+capTTRUDTw6YhPvoMW/SpJ1tzIyyuaNDo5SXL2x/2yso
Kg0qDlzCd/nhQBmrGPGWAoLhIhe9zKsjVf4TKsmNvrWWAfk7V3MvptZ6wr99lDHuKj1bZUefLFpq
G3HTzS5SObK1JZXYuxQ2c3nKnPoykci5bA4o31DU4nVyeWr7nugYTWFj/4vwm9fLOd8teYKMLN2m
NNK6kEgTjmaxiqng887Cn0ubsDS7d0XSBsuSymlujHRg4x6/3NVvv4r0G+0Eu6r6FYFekeq+UVtb
lk8lRimYkT9dDky1fsa7mxQrl+6+5E4iNEx7frb7Pywsw2Wcvu0gcfOuY3VCJultq7Vta/JWakTy
CEbPR0Vsn/CuYpk/2JlqouGVrl3SSMrnt3EvltS6kf1VWmnnXdhM4QWRdrZzKqXTQvyAXG53owIx
STxDVkSQvQ3m4AIwiIN8OhlQQp5DqKYy7+n1Q1zjlG9gmFyFAVNHcuFAhB5SPr0e1d6sMwTVzSTg
6ZXkOuGrF45Ni2MhJnoDOhMFS4Cbby52PjvOmhfK/tKM3BGqQ10aHkmxCvPVtrgl/0HyqTkfNnNK
VwT3keYi8ti/2w0Vr0FmBz1rrlH4e+dD8v7fNY1XEaZFLB4//Ord//Ff/s3fOF8tZZfqYY77pq/m
UBS1M201IRV3lsB6KUqj3Yf7QWBwH83ej2bSg4Qv/D69h7Y4HAr1uP85WVoXfANMmYfDY5xwA3M1
fJgnvev8Jmun8Rs2zZmUHwuy6F5cjW9qn6i7VVbG32j7gV/x1nxSmEhGVBhfKfabKi5D72lyFLHk
Lol3wahgon1Mv70Mxn5jp2E4mYMv63PSFkr+KKpSRPsepRHdIrtPTiZVeVmsrJIckQaT8Sa5f3g9
e9rttBzspELOqS0HYchWM9u11M4UCuXXGNyzhx4NaPh+Rk9HwbVuvHcILxRx1JgQ6iL4XRRYilPf
eoPTaqHvFSlYpVBh9AOn4Iq9MwsQa9c1YYW1XPyI2TluEjts9r1SG8jB+90iuVqjJxXXfFawTQFa
D4uCIzaxjPeQOAPuGIRo3e7xHQNGy4CNtUzK6XRbJTOO/6k4plg2kLGyMbDz6UzLkUVjRkvcgjRL
3SfVdvW021i7XKudi0GVLn0Cp+5cYmMShGdk8FqMnRzV7pMpxdbEOYb29YhQVGhYpGA5idLNOna2
EBeAsJ4pKYfjNTYTFg0Fb6cAKGclcKre4zw5FE7eYN/GLQwralwVui2UoLTjPPmckO3xK0hSG2yF
3oS7/zT+OO5mytGiubPBrmk72uxw7gWGOnED0XH920KtXMz2p/ah+/7fqPAeaJRffbj37v96x7BW
vdm8RtUouVMIgFOG0K5E1QB/UUgHjp5DgQB0IB2JctPc6Ev7CznT1D5VhY1LMl/a36ZrWTiQiAGm
dfJoC9lOJFaS9nbxYpvY4CQyxIYSw434EsiURHX+/ruyvEQfAwQGG89mI47vN7qqUCCqOn5GDGzo
XGkkaOGLP7x8N3r9+46DO11vJ4v5lNyI6p6KsPka498YuQo/8oCTtnpDZq44wcjQ3l7fUDDqGmvc
d3aTrJu+puG6JiNIXFPXuM3/ipxXTzu/CGRsfbOYT9qxw1blwaYsF/VBuTpADADfy7IlJAfG04BV
j/nKVQAg1oS9JBCxXteGy6F8JBI9e/c78hnGI0QJ/V2yWFNRRFXcXpeEit6X6u+FMHaPjW+XdKJz
Md2tjwjOW5j2NH6b8WXhIgUyE2YsOxuaD/dkCwrjba3RFO1+fGFyxBmrGaBhPKk5EgfPzBFtFKMR
Ru/Afa1bTrtKnIgQCl/ZW9juf8ReROEGkeUQaTOzAvd3OFNmuF5uiSrR8GYVJAA830Itt+sZelWE
/qScyCBEEdjzbSgOM+tAaGhPy/VNgDc2O0mJqjIXNegPT1yTkvtV78GD+1X21EEXcGtmrgeQX7wp
YD3CsmztAw++rNEJ3ldcfvo5wJGiuO+kK/SQd9F7GR2j0DFtKtxLIpNG+t4nZ456ruGKTN8bPl8O
IIjFoC3Ankpmlmxe2mShPc1hpd4KzsQDesPc9cV2Q2GjIwUoK0EOAwuTAocCwyHGgoT4uXsqO/A6
6gw1aWv/xkiZN9BXwjgJT97ExHmq1uLU0PDSPyE3QgrIij9M/BceWvT/gR8y6eeyL5y6irIUMDJq
JuvXD6vcq/Gtjqlzz8tJCIibZ8/Qs9+RDmHA1OJ4rY8p/bMRf1PePujIJVSafZSz/zu5A1ESMg4P
zNc5nt4st93kvK9ch6E5QdwKQHXjkEekBSFX8MdePfv2xbfP3j3/XddCLHgzMzSI+zhe9KgVueqc
XIoV3pO1H0dMsc9/9+L571+8MSWTUorIZsBsD552d1Vj94HHNuz17jJ2FhHVRPlGEQ+HOEyNGCzt
JhZ+5Zr93kWpJ1qreINly8PTCkZrMycPVFbiDS1PNfFaU7NP2xjD9FNIDAEUyK1LybgI7pinEefA
PeYn7lV6rkfMNQrrmdzxnJRwkwg7Y8qbWg6VHLjeUNcR7JQjaumTbmJiLcCJ+Tpz0qWrklIleqPw
w0pQe9jpqclo4z29MJu3Zcc+XorynViwhis3Q7po8Fn0/+qIOGdcjsfAkW8cnDFDeNVe7eQdVzA2
o4kjD7tt0HxRrLVuE2fq5LovGfx+9ZpeZ4r10xHIw26bYxp0Ox92FSzvJ1WQ9Q5hiC8vmjPmEhJX
F2hCOSvJlcucrFCOpDrC7NNgIws/8JDEW9YNNn3ONQ8339uANEhhIgCHLbiHURg7p1epoYnrR3CE
qFDSoJgFLuR2TRfpmxQ2Zw4VuMHGd/bmlCLO+1hBES6mAyU4p2VumYJ3y8llb2WA4LCt+BzD4ItU
xmCPmImT0CZrHtCMBjqLwTBNfJ3BoBsL8WPERsiQhdiBi5hr8X9GyA1xKw4Yk1qMLoQkIrB1d5uH
wQHSW8gJx3//FUeJMs3S7snGYCWiz27UslF294x8jY6x0+h6z62I2q83o525GgB72sGfhPNIxJwd
rEeWdnqnpZ1GeA+uv9vWYSY4jwLy2KxrMVOIIrtC+ei2pfEqKnhXkD8FYrFBlFexz04E/zHgMVS9
vfmO19fdWA1bkVi7WQwbs5XfzM/8tdzFtWtAGjmkBfKtFlGViEYXnoYYjeOLqr66nUIbrqe4HsTy
mxjmVMfcFJVLFoV8BWuodYDN/MQ12pA4vJmRNc5EPiYw1wOjG0hN8CfXZXjolThemDrjb1tvfLDT
KFJ+KJSYuSlWzJZsI6GdsC6lvGokNeW7lPxGLZfpAnat6GGXZcbBqagLtNpLgleMyDMdzZZ6EtnC
4r/TMHwDX5/j1yyeG79/Sxt+1U5AEsQp4NNsHubeLNfyASXt5fpdkEoX4dJ2OlVxbdBZ6W6jj03D
qDvd3g+zh1nS++HqYdbNDLj3dsX43jtixALnBoIGEqaq5NdsW2k0B9vlFfk4otbde21IoLpOfvoJ
DGWGkYskgAaiGnioe7RnaGWNtEAkSGvIBmlNQ9CATH6qQxEqhNpWqraQwCgLqDvqea1tMsa0hsm2
mtWp8dNvMhhTHHWjHU0yPMFTgx8JVztRSr4445z5cWVCLQOc1/D0RvLPbkJE7ASS4qpCH2LIGj84
myhzRk/opvGO6aYXgZpUJoqN/PI/PzfQ8iY4jQmmYC6kVXCaP/zhD5BsiWYDsy0hA9LgMybEco0L
jhVwSu8DsvFyHSHP6S9AOkU1YbGqtwhztaSQJNROFZVRNLiGhDmNNQOyBtbcR4eH+5wDpOpDU9v+
8hLZBZX6kE6v8+yWi13Cc91ThvY1GjQItgb8w/8o12SB2EL6yPqGtPYyJs0E0wtsCEYNERsPehGd
H6gsPcNIMPM/FSasmDxmnb3Bj5+4eQqC7FMSpFULc1/jStRjnMFJWX4TAx30jVxe8Meef5Aw4I28
ONJRuZhBDdKYHlu+me5SM35WwNGtEOS4muO1XmDclwn6CQrGG926BSNHfIE0C5CX+RUGjoIaC60+
2fb1Im5hkCAiOa0UGCykwBbJHU0D01vROuPz39lK3zjbuHFc56xFzUjaI1fhEw5qotQJl8UINf2i
NC+qW24rRCjRwJpeUoo7tAZi3TZ9Pn+WWOr0Q1+f+NSyPWY5k+mHuv+GUMrpNPoqzpNg3sIcMpIe
92tjOgWoovtPTNz4F0oGHmHfUzxAAwe/YXTJyGUN27iLXp6/2qm32+zMWqU0LeY4ai6r6JoB7cbr
XjOLgb8EjtpGz+eSNk6fP4DtzN+2tg/cEmYahpfuGTK5NMc/R4cmN27VEsAnrls2wfVdRoJxpm7g
INqr4qoH7RnC/7LWziRDq7f0ohcEgLOL68b0DAqOK8gB83LY3W7ODr7oxi8sdK/Oa3KY7dUtS7uO
j6ouy/zI2nTGdaR9o+1qbgInWB3uiWsMyV+e7CeWVCTxnWZ7zz+/uKxh+crYPCYYNptm9alJIM1L
L5L/DlbTN3N02eWXuQPPYsnXHMWgJ2nxRD7pZs0j/aY9pj3Pw3WT6aBhqua24Ypvx3r2gLht1iab
8Ol74SztgolTxfTrGyJrsg0lg08U4+3uSY+opbB65ykGQL1uEuT4vdOz8zaSdmnaooVEqGa4wWJE
Fn25mrMUDBLEiSiY0mCnk7Yqo907dHzaX9+kO7t+I+F8P5U+5G8vQEQo4YpGv7G5CLX1+8T2UHyv
05DDHN9FEYOiPmW7JFajCFKpXa9cmj22cc2jO0FYsJPYPRLrm1YibqaorG5V84Gk1zVnLLZbCWeR
IJtK/Mah+aXnKyrDgyBfMDzeIS3IbWyJQsEpHQxS0b5T/yIZrzl+sBXvcyQMk5RLcmFPHobyN2uE
cuLozeuiQhs7o9vsndDIZac5Bs6jnVKcP7RldnvZLHiFhSN3m0toqKFYw0VEMv/aDRuuuppm0yCo
ghlxUkw40y0/Yu3tw3GtOsQEFSZF/oItqDxy/1wdf/0v3+dcuJ3bC3qMTmzzEcMf1mqJeKiNgapd
8rAj3SIihQlqntw2m46y1ZJMWawVBI9nGlJtKUfbjnKPYV7ojCcBcd1tcOIHj6UKuytoJhtsQPfI
2I9unVizw/COAqwvloJU2tV4tSGAaQmhwl6qVAUvJh2RMcZFobhs3jeD+jay0o0GB/dtdhq/px5V
0GorlNvwUoNb6fUh7F9iOxnsYd4ebfbVoJcW5sSiSYD8dbI+jW4Dria9B4sdNTxq8dUR23AqMrAh
NFIonZx8EMo7tSTeBDnHxhqhEtENoTn26punbvu9UszqQbLRTdHj3FI2N9g4++w1duQQjIkxFF/Y
vQEveLBDaJmvxPKamul3THpwIPytXC1u0lNvHHU2LoalHdGODT1NSkun+jnkb9Y4Kp9c0wnZXR9J
p7NdhelyTCLVLWbdrDEvxZPZUxyoytwu6U19psjc4jmXOGicGBsWyddG4IjFK4EGGdnLzN9AlxOL
nJuFihnsLdEpYHierrztksODqrux9JcypRG9xmFc2cWrubFjyLy5PQ281pS6aKo3R1lGzduWFnWP
khd2s4+T0NIJ3/vbmn0dQQ/GvB547MEBaaSL5ToWdOY2RHYaFunxriM1FKMsJT3D/0h0SV1xalWw
eY5zdvAi0FrJyv/eH0EXIpqBdBtPc63O1jviFdoCrdJNgnGdYEMUXGvaJAndbFYa4Gk4m1yMF2cH
xLDCcLj3DGodoa9dbGukjHM7QWEeViRKihg6za6SmeBYL25oXFY3ihaBgPUI4/yKYruM1xt2umLz
k3ojwGvooYBh2lTAicwzr+bDhBcvu6n29UOE90PVqKzsmPInGuR6Vsbic3duzSd1tWbTWWcPRWqz
gc3gYjxUsUWF9Yuvq1a5vRf4zUpToNGqi/eouMt3l7pb2Y4qbeUdlAvxHDns2lvqbkQTq8XDuiEv
aMCRiNSLluQYg9wYfpoyB21qOkwdsks4oHfvSzFkREV2SVAgq/oHP6zu1z/wlm5skSJqRFO0E/j4
BNDop9rvKNUlUB5soapPuALwrk1xIwlF3jhhAqd+mYqIP0A8jjhzhhgDBlkKnu49wAL87/IKL3x3
q50HoSZXKmy0jT93YpoTJSjinu+HGJHqRO4BVDajkEAV671utjNO+I4DL0rntjfUTSsrb81w0Ina
qC38g62FRZ4V2jiHNvbR5IZ6SfqfB6mhgQlOeZyqP1oWy9IccyNmz5yhv9vw2a5bSuzZaRdG9IKT
hexKZBxBf4uqYmFMA1CtPrKfVMHx5UKHIHh9kn73x3e/e/0KncnSU+dVVRdrVoHDyCG2NGrV8gDC
Crf9ksSn6dUMJCJUwX8kaUoTzZM0zU7Vrnx5dZJCQioN/jaWifVw7H9HbTatvRUvj3tj6HfK0Osb
7+xsOLcU0KarJPnbpFE3WTspyJuYZCWflIh5FLuV6XI79I3icVs6aKBOJx4GePLFOGlqwnSn2wpy
Drt5GN9YsfEjWoXot9rnSYeasiP0mLjqBlc6X6hiz45j+Y5vzdcwbEBJakiehn38J/BMp4Vg2YM3
Q8wMODuyo392nEfi+JUYEWoGrFR7NpPt9NV89fi4mzVw1FHCx7L6V2PPs4n240XQgLOjPpUR1Pzs
uPF6R19Xn9jX1Z36mm2roMIgkcMabgJEWV5BhlXQhF0Jf2Fv8HqbbZdrCRzD6/imFiuwHSlpiUtK
+N1g9tbkrWdM28iqLdfT7AAmnl7gijyvc7GBO1vvguJvRGSJue3YW0FZq5gQUf8WxfBs3UDof893
hS/oEjBiRE9ExLQczyIMNLIpEcZjUkzHCElWntlZwQEe13nms8P1zWRuOFo9BUlgwyJSKNmim6zw
NhuvkvLiy57LiYoRDxtFc1ViqoZS5sfuVcSCygRRig1yQmCgBSTobroXeFlnITYNGoXBMatC7Oly
JeXA4xnMDPKlLukwJwhMSeGitIbwMnjEWmF9sKu7PY7PWky3GwQYyJlwFtzWuqFBOzQKR0rFSTXg
uEiR+MY4Xeh8ChRjERrC0hr+zDneF6/KD+MdJ245AqM5LkwkbguMEXqR0fG16epOJ3E3MsFUQrQe
b/hgw4FjNRzoGVbDv7rjL4H0qOcX3r1xoma42xhoBhUp99u1XH6jQRy/ZxaWtU7MsENl/PxZqqoU
mNNgnT59WlLkeAcp8ffNK8r7Vfb3xjkN5saIrgqtJmQn8A4R70biwzcGbzS11yNLOGDPvKMtvWkb
H+hnSbF3B3cPpl1XUlgbd6//IKKP9+QYVgeutstJURWzEV6PQr3O5tfDrqV10PVFAgSvH7J8W5Ul
2v8NtYlZqCnTuq3h/TpFZLU8aTC7ey0qs3vGIhg1xqsz6AK0oDIXo9dZkLKpLbvnj+q9hgfVvaCy
rphhP80bFVU2mteB24H55PuAX7P/t4dlLimbWjyuwzq1itCTw9PbNgbeiLrC9roWacxZBazHV6uR
NzM4OA/eBa4JLA82dJQEjw77h3+1lenxyCZDRDb2scCLLxG8kzXX5oDqyxdu3cw3MC6WoUkWX+p3
Ja9KP199LC/JPikR8yQ0V4OG57F92HafRuICQRnz38doEGoOi7Nrj0vIbc1Mt8Y5JbUKZe9Gr/uP
4cC5Q+xtwyV94LTrzBbLiofAdBHwj8f9w27chPYGBLZ0fbO+GWl8o5TjoaSff0ZLz4IbLcfoydbQ
cXojj8QOPv8smczZIlQQg/AmxZto+miB7k1wRoFtvhulfM3Ow6bBs7JgnLKrsrpE+WROsU0w+jUR
+YdftZflATSdVUUxqWctM3nvUi0ZdzdYnqO0Gj+Q4ij36ezKhxPfsAx6mOxNpVg9j4TqUP7uowaV
pHyWCArpy7zi2DlqojW0pJjaohbBqQrk7BmePnYJ+pIXUvU5hzXPa5Hfvypa5Hdjgv7y1bsXb149
+wYH4QBPbgdMmDdJvOGejtHtmFcl6Tpp3Xb2QLzFxuQOvMU4OLW4ZISeGFzivLQB4t4V15uXrzWO
SADP0Tw4HQfe1aLRJcN0TsB2BBa6tMfX54ubzJ6ljvqxjnNRNgkYlh2KSDPc1yXuD2sAu4GZ2abl
bpPI4l3TN+Oj0Tz8FHVRXOqv3CQoCQgbn2PPmrjpTMawAgg8wrhgOsLqcaZHVzK0+twkoauSePqt
BAtlpU1mjN19xG/CGsu6/IGkvHMmBBF66HMMYEZmSMMK2bR8Nz0/Uxtp/mGNbn1vNB3l9lj5guhx
GGF8x3LZ1lZbkN2v/dFrVSEcN1UM2jk0qmQQ0Fzceq7xTlsqiuIb2TLvxoRhNIPZIDGoKZQn2xPn
JI5W0nSGpwrCEdO7yQFOtYa9Axgb+7zp+xHp5vHZxvpsnK2C5ggmienAAJOkxUGPqZC/Kf5V/SWq
mVVLp2mJiAPwzh/qGK53brSU1DLD2qYWLdKL8gqvq4/IwOS4seeHSieXVFRPMf3ZvnPW8jLVE6ob
VsCZuYd9twXFsD2Tkb2QZ3atkVVJvUZNJUdmDmSpkzBUE1e8ObCqxrL1RJCagoVm8rS4LuD4dIvr
8VSwHgaftLisu7qZoabUnQubC5cs+xTMGbCbQJK0GUxZd6xtG1qTaPH8wdqBZEt1ktT7NCI6E7x5
FG0yHg72abKbvcZaxSa+K3pTVSwJPH+7sqz3Pp3sCrnKIEmPhhkq8uE/vv+3TXRUs8N/uP9u+z8S
yGlHw5lKVEU8mXJARUSwcdC0uJLOJBaXpVn3Owri1EGNItipDwhari6Lm7WGLlWvWnDPTUqEP/9l
OJ2zYrI9N3YmbWidXG4av0n0wDq7eYtfIWJ3MpklyBMtyaYX5Xxa1EMcVdJBdslBkJFp8Tec1eAk
mGdtpTAWqMsdTbaEMfw4robdb19/9aIlDYGHwgCiUF2VCzeuCXXYOWu5y0WNMTWpVrujsIkxNx4T
Y7SQimnnbkIKp7e28BAGShve4AbPMSIFRWMXMWVo7CoFJ75KzLjVJO/vJJRKj6dJj6JZGlBgeW1q
2tl9C21bMWelj/XBdRN+U+7uZq81bU3BM80uZNr9p7tg0+6cjU1k2mBZrEoucOdU/OrFd29ePH/2
7sVXCZzm53DwKfiqyqzOIa+NHe1ajs/nUzQroEbK0z97G+9a/UZghrc4LywAMj3ZEK5hDFwCOG47
uBmjFmRA4fFtyUYw+Cdw00dpykXupsc+qrvZ+tVWoJt19oEJlnIa+lPFGm0gmYiSVSYK2dpGPtOY
dn1jK1J8cud2nLc5SV2WT+4KkM7rbgyrsXEZJkj8lCKinTGFWwa+437lXvJlublI/hNdoZCK8TlH
rkiO+5/3D9kB/9nbdwkwTPHJXwqKckDHTQluHXJqNHOG9Atr0RWeYnptisAUkdpT7O+44BPAv58M
HlOoLIKAR0D5iFAZ7xYzLL+yo6XQJBF0QEDo697S835d0qy18kKvYR85QrifFXIDFrSWfUS0VQT6
tUAM6Ni7AnIPzMIHw48yDLUn9YPIKd6koWFQ55jdU1FSywdH+I0ZVgQC6GWNaUn4BdCkUeCfiPS4
2VfjajUaT+DoOFrOCbfPBdNW3Ws6kL/R9gT18flSz2Mqbdn60hb805qGWAsaI1g7ZbpAph6SsA2C
SIqv8ixgOBHL34Hu9bZqmZGgZCrKBy1Cfrny+3TQ0tUCycH9HDBDA/pmnLYsajqbd0vZ6ANi/KE4
poWWAljJKd4Xkr0utQm2cZXErWE5xrAFiZjEoY5TbjGvZ3MBxLmX9Dgut1XgC3mv3Ez3ovE5u2tv
+tMal5xxzrM94neZ8e4ho3d2wnJ7IBnCYyO9UPAE0yIRBXmqUYZ3kM4LJsEAl7SqbN+SyT3aVOCJ
ok+IL4bSnMOEz1Gcgp4j9zRMOd2CeLAUHyHksh/nY8uPd1esnzBafk2m+WsJdkL0TEBDSDuvVbB2
Ov94Uc3J+BTmymS+YkX9mgO7mYCyntKcwNlv7zEOVcFuDOx7pzwHuTM5CHzt8PuNOyNDwlwoQhuc
wmwnX3s71RlMs/IKryLsqwfJ64/kk2CCzasYbDz1Z1VJIHBbDEufHHz8CHyMA9L3FZUXy0kxQ4Qa
GHJWD2Heop6O1xweHuUxHmUWiEeqIM9GBHZDOCkq0hKQXk0dCilDvpKFKfb+gYo0jUVLTAB9EOFj
xwYXGhWCyjuqC06jvu5sMwAUCWdSmGQceqS1EZhMGXUWm80N62eJfOxiQ/ygxC2VrI3n2pG2OV16
EbeKIWU0O0K5HpopOVTzcqgiIlr8keKKWoC8S1WloWw16aJK6169XVIg43XmAJVMjpOjwSkCmH5x
+OCL1hMUzlnVhD4fHvpmRj5Jjlt0ZLqUhANPhugdrYWmX8VUKvbOjuOCdmG6d11o0NNOWw0SE2jc
hqL4AbEyuz/g9ZAfXVxyxACy61YEnL8YCByTParW1J0Yl/FbZJ5mNeBf15b72JT797utAC2V3Ls0
ufhQ88r4VmO8QPVuEyNE8ptHIepn7osff7WN0ziYq1pE5GO7V34z/tMcPb3MMcYdDhZ0JUy3W4YV
SBAWGZfcC4pEa3WnMk6l9oTS3WLubTSZ2Rp63ODbBdhBM3CM6EScOt+gTHsSu4+c22me1na3hqTC
dTGdn2FoX+h4OAu5QzJ6vqO8hfPIiGpawtuN3dy9Euc7BtXWVhURgz2/El2joCVjIhAObwx4NOyh
At7d7XiCLWuPxRay+/2zN69evvrtICFzSE18R/zcrjVVjWjqpA/YnA9m4o7Gd2G3w62XIjktbrAd
cjq2Mw520V0EeljcTbkFbiqBuTD3wet/yH5g1cW95MX1Grk8yWKsTUH8chSrscLMlo00dovc2f55
yByl+aXz4W/f/2sXJKu6/JC+2z7ggGPnxaqo5tNkWYBsASyG3bQxEancYdeqCzpfuJaZk74faWx9
Y9VL30LuouKbRRsFEpaN4Rv0BSeo2ICPTQSzJWV8NDbY7Kxy0tyQoLDXyLB9aNufUsybDqjs32Kb
MGQXuo39Ig0+9c544Wn+FBE7KdKDS3V6D7T2ooq8LG6uymqm4mqlae4U5i/+8N2bF2/fvnz9SukD
WcFHRzc0fOYFxuucA7LhhDmffyzIRYltOpRmth/O2+4zXwddc9/jqGIEle2YbF91kpAAw2ugxM/R
dXDi2aIP5JooGZ/jvZGc9ThhSEiEeMQ4BRorge/AOF4v+MA5SA4uk5SGjUMWJSZOREiKfIpTG+yA
jiOYzk5UmsdCX4IlkMNbSMhGi9bFplguv+CCIr06m80ZwQQGSsaZu8Z0CIhXpnwCkLQ1a6kCj+Om
Gkvv8cEbOivlMNNSyEjanOJxL2/2MXpsL/CGRZXI04eUfjKEMNLnaNs0m6NLAQZRIx+DZb9rYrTt
nv3dg2X3ttlPSxumlZr+3a6a/t8+e/N7XAK3TX6L68+zHsl6E77RB4WdTcuEmMQRjQBuD/h0nMZa
GW3kAbMnjLu2U4nP9UZpWhhanfRE3DUWx3wWLqoDOKuRh32JBiB+TYQZQVUQeK3rCjc/XeAUO7jw
NUUhHE3P0lBRxNAvI0KD8LRYTh8upxIpYLCnP/jmSpngYXTR8eJ73Nk18I0+Hjjp3WtZFotMVTHW
MofalggY0MqjIBbvlZEl/lFtc/37NQXWYUqTcjGLhE+FrEi8V1ko8/BLzC3ewx4IzbEOO5Eu74PA
yDoNuexv0eKhrD0/u2GHbAE98UZLlj5Oeqd6lIGTb3wdQqwhlsqsxY6ydtBU3foQCqGBnWj4ZNcm
JQWwjDEqXjCClZy7z8coNUha2M4hzQxkw6si+adtvVHJBZ9iUsDKWJHFPJ1LoQpdE0uyY32aYMVM
yARYUJqJwGKxKWE6AXdO6lLQsKBhqlGeBfKBnmt+f2KpJIeq1yfGdopFITjSzRee8Y5f1MkBHdWB
1qDrXYWpvJ65h18DTWlwYOCB2BSDo/iJSRIw1oIVtu5d4AQeIB749fTHWT4YoK9cp8xCfBxTsLn4
MOhgOw4N7OZguqB5LPc7oRG5eX4Wm46ByQ6lwNntWmEztaIy39qWWxHI7dg0SJjZ4UoZ7MKUc8l4
5Q/diwBWiQNJ2II90ftbjqdoj+nf8cU9ynrkj4SIwhQP0kj6uJewXEfzZjylUL6iJykXHwvSS+OU
xbbwBQDmQvlR7Hf6nuo4uBw2EpG+PL1BEuxOtwm2CUiuAINNZoOy3MTvcJa1lCmnTni5OivpKjf+
Gc3e6YgQu0TkuuHO24PSQwdfV3X5pS/FBaLDC7fatE820SQ9gnYMf88tvtMw2l6i1L8l2chZoEuE
TMiAmDzqqADf8DqqpFGtjUAog928EojEkm0GkfWibu7qG6hGBMLC7xsi0mQknNME5Gw1gPW4rLxj
rs2bb5Rf8N7otFzvKBIn9AmfuOgK/casAOlZOLGOjZzvjmQGXCOIDE2xKk05efLTz7letqYqfbtq
Ml3bPdh0s85cHUmpT3eQwlaVgOIYxUbuCb5HnZBtNbaLer20sCEY8m662M44xrOZPiUqaWs51smM
pNjM5oCmkiGEj9NbFXPawsfJgFbC4Mfn+OdHXMas7jPvvxaB90emrs9fuS2mXHmHp11HJ6hMh7X0
Nwk3TB2dmIg9HNnj0qYMpG9/weDyLGZ2OTCfEzkGKux3GLl0bys6CDMThr0ZayOHY+qpjjIakbDq
IcSRmTy48qcXKOk33dMVR+S2isj8Ut4GLFG3g3iiBYxpac6YLqug4/gAa8/DeAaWJpZVs2F68etm
EBVDRDenUbFddQoWqKmIGTsbopyHWQbZetV7eGIy9VOTN23hYKYRCuuI49De0r+qGUsbudnfFHo6
kzVq6sL/iepXcQXtlkvKVdg6CIvNiuxY6Zq0kSBNX5LKlkM14tNR/3HDN1uqdaJKOe1YE/A2qZsk
a/ZxjNf05LPBaXOvJLmuUR6mPW1wVZWAGevSRqbew4CsubQaJ2PcNzVcpD5tjgK4KE8oszpJy6Nh
J0KGy/FPDWPzZJMfBX8PQ14WpEHGZcWMKvnxR1X2jz8mqHteFBvSWwsAbWJVaQOnfnetdK+8Q3MN
U2Kj4xrR9m0h2PgaOAQzr2Wnp2sEZKjjJDWEUq91KJiZhlEW2Vp+/NEr4keThiVLX4zAhdgmYs1Z
3Dk55CPYqBsLK+lH/+x1v3W7ebLEw+mr1+8SmrtsKESXA/UUFVo7g+LwwMdjj1CU4wAYXeauN+oh
7LpkjDa2aVJ4FgqXoSoncpLXF1Y7wqJ6Dv0edVTQREV5G5aCe2AvvY9VA9yu9kHocJuc0LA5aY+S
aoxSMrog7nqdhVPFgsC3d1h81oi/1Bj68hw3KrQr4+wqnLME68Pv9QZndw//8W8vzDTCL4hNOcLb
r9FImOUPtiomYJFNx9h5qBN8shgvJ7Px0y6FoHWPmgfZKeZ40LNkZl5G1IlsaWOxtVE5n3xPQXDX
0NFidzIXUNFpVaAVX9tad/cWNzBmLM1VxdngR+gDkFY/st0JnoZRI80CjRUdnsD0ZG8BdHd/KmKf
1ybL9ljxXp5tUKylOs1gHl3y7ZvmhKwMHno8HMOf++vkntkavdI8IscRIjT6R0OOR4O6snEFL6Cl
f4Lq+KQ6xj4EDXHIrglBULmHsSvsCNVNkVO35x+pLp1P4No8pi39iad8sRuciz8+2WeFdlZHfbRi
MhaGyDXHsikJ1jKNqhgiwmYmEjv6+m1R0BJ1lKvi2Er19uhi0tYIjmmEOiILa9fKdlRaXcJrR4yQ
am2nbjfofyWXzCwHyTwf18ZNm48NY5n7xrbpuL2VVpbco6Ed5Ql2h4aOrW7DQPNymzseiuiGIhCo
myRjoGepwhvEG7pJ6AJjxlfdfH2log1SHm9SmF543Oc5Y+YFn4npeoy6kGpt/XJAFjhDg/IpRmmo
tiw0U15l8Tzvg8Ay3whLxEyr4qptSgp6MfrEzs/xvsHR8XKktQ1ftF3PiBXYrK4zJFYsdeVcxWHC
GspR+FW5KQZsNMf2shPUPQu4SlBLw+/I+gQ7mDQtOBn4EBtOF1TZ0pQlkK8C2zeuzd1oZPpQC2Sh
RKZOGc1V9zv7qHMYBl6QhxiKPAxrQ9ukhBLDP/4HwdehP1CTEJiASVqYc0zy089cs39k6OjNjYrk
U11a3NMWJEyqyD3au9AvF6jODsjsYZP0jvuf9Y9gc52x4QVunXvEXnQYUnJcCxFKpe0z8r1N8SnN
mpEb/Tlx38Vv5G6eeTXh/f5WEHvcsSmqEM1Xx14hN93bm8EemD2j4N0ZTd3xohStAH3/DAfHDTOJ
S35kCobkB2Qcx4okya8dvcKAd/ZGgQkEAAEw32VmhJKZhIHnyAWIOU+EeqHM1OYpEnpFedJUOhoh
dFA9GqVZPNJbe/pW/+AgC2/+ZIGStTsKo4iMGfoueWvapqpmkZM6dwd908mKPplsMhTTaWu+dk9o
JTm76Abxi5HdRCLVitepncxFuZhRoBVfCLZMoC1qp7V9pOytUeMaxZiNtrfbsdIVb9lfrllda+52
3/C6rYFct0/pf85J5594NVutXrESPpC3yhVyxsurPm+zvZCqxuak3zvQ1myo5J5qO+1EOiDbkJCT
1ekGB8sdbL4VCzYRk8xBAI4YLaoWe3zo77dHRmKD3htYBbk1Ybt924RcsV0eF3ueFMv15oa5AZyk
xTZSGfyHe66mGsqSCW5nWEp14xHekD1NC+1gww7O//CK7phgPfd0p8jEyE7vEOrYSrawV9IAw18Z
arN1dj5t6WlAKFkBrWNIIFCzmZa0rcTIVyV2o5Omehuh1y2Ga3pdE4bw49UQjJ5eY8GK0jMThPZm
h2ILbubFYhaeFeqkgFMEW2cLkuzYWHoekKzrNQVFKl1xpyKR5vnsxi8yXLoujOKH3vt/Y2xR1+hH
NpmvPmTv/vKv2R613k6WILOgSTHGGDEqibrpas71NyQgRfVxjkpPzyaVFJn2idCYzSOCCSLCzC+z
EN2IwZPz1WozFk0PDkxltd1nANkglmUpHcxSYz1nMirrORLROyG6w0nK0AToig4jmp42zNIKND+k
NH/GYyF2KPbkhIror4pNs0vRiNlTEW+qG+Chm31U6qEC3RJH5SxUwPOPdpZKfhAVBJGkF73U9Dd3
d1GlOvANsbMkGBJ0VGSjO2eukFu2xyjJGBEHDSD+3osrR4oP1M7hMhanSXxpT25iX05LA6TxBdlU
ajf3e5zLGoZbH0kvuo0WozdV3ENDe7aaPhSwQzOR++8KnNnA3r+mKKZXD1N/WwchgD0eIE/VHxmj
uUbsq01RjASmOHoMCel5ydrC6Xi1NijIqDoPIGx0zahxUpf9HHIbgX9QoNclpx6Y7Tn6/prpIpoC
Zu4SqZ4YxM6WhPh5wqkW5blyDPZyMNLzbqohCj0Gj48l1HEVVzCJyy3erIqJrNx7N9bDX23FYa1k
vMxh+SRlsJJTvQZoLOvtEpjHjSqcB3lUF8BHMfpD9y0wJ664x+W/wwYnb5kb6ZgS0hHbCo9WLOKN
0HmMPvTcSGTNQsmys6tYHaU9gMQYogeRue9r+qLFb5Rh5owzvaC/zymdUWJRUmECis8a9toR0wYh
NZD05tmoykktNkiwsaLopHQzP6GpgfVx6ig4Vg2nkDxJeo8RRiFwlwL6i/nEgiBVC0RLzfEHx/uO
uC2pfAYPNcgfTUp7qUoo9I3x85iiO/3kfCzwazqwzXU7W7oARlzBp5R9GR6nsQbLjeBjqrxJqrw2
0uJ6Pa9ukMzRFSxq+cSBh3iKpRebzboePHrkRjE1dnLrclWTDw63uAd/YbMeb8ZD27QetyrLNAtY
kh0fsJFxNb3oVelFVZwNu4+q8dWj3g9XD7Numlvy9qZ/2ZCf0/v1IzQ5f8To31z8ss9SypHwWH/Y
TM7JeGZLgMbD0cw8eTzX8IGRLOReyBgcCw6/9FtkgF+J81gx60ZNjDccWNUnZgoxkg6xObI02CiN
U6MKv5DX4HYNtNitLiBNRXOMIalTsFk2rmlp4OtzjkHaX5QrDDNaoTNqZSEG6QkmOppynRwcndIz
8vxFOfWB3G6/unXlIbuGmop8fQHzUKzf16Hxe5tNv0GOjdjSY2PQn5fT9zZXWQPoH7aKXei1ypkS
NYKBOHEbw29IEprXE8D4U4m/Bp2Re/wdDiYP3v/3GLkAUXOlBZS9+vDw3ckhI9d1fgdCtApZzD4Y
Bgpn42RPcfOB0ULFO7uxOZS6PClr5Te3vnlMbrwRhvUUGBbj1DmfaYvXVVyDHDnB1dnpUIygERQ6
mm5u1mRxxpaO03IBMuFybIyqYyjgHGAo4tcqJRkandugidogjrhOPtBls8KeueZtCEh8e9kxeL6z
+bJYURzwnm/NQyNS1vnZdLVZ5Hz/1DG3O6iRxfcwzaebRe8ol9T9dy9fP//t9y9fvf1f8+4Ph4eH
3QdfCFZJgRAD+dV8xlHoiF5/u1rDiu0l3Qv4D6NGIO0sORkcezZKkjmh3B2L7e1YK33oxTyMvawY
HzRosx4a7V4PY11cD3zkV5VWC8NfP/vmmy+fPf99xw0Rl4WBfFTANmJ0z19/8/7bV29hc/riMLq7
cCQbFFNMHBmvxmIHkEzK822NSA6btE7q8Wp+dgNS00Rd3VIoP6zIk+Szw0Ewh7iCXxzqXpbe9TuV
FceNnkbnXKznlgomTc/IAHrQhcUE2O0VDdQYKj7iqEa88iCdYNTydZmgghBfhA+LbX3hhUJEdFQ8
sTV00Aa03jpG8/BDIbT9XW/6lcA/GzpQtL1rUXcEVKPNFgOmqoB7NV670LdG/pZQY5Z59ed4lrrR
lgNSrV6aMljDSfrD9dHk5H69RLkDxBzxQaFbYSjnNEsibtxEpfmaaR0u00zm0LNXb18y+yE0AXSL
r4uNhfWSLg9q9xAktR9WaSdsbYPl7GgmZDuSFgTmSeUiitfK3Uyd39NBS/A9ELvG7j063eUhI5R9
TGDIzj65IAojIFcySL5+/ebFb9+8fv/qq9H3v3v57kXeUPcjoAHINIvofUDv8VGeeVTevPgqjyS8
Bwtq1kLiOCDx2zcvXryKVQQEGyX++0Qex4j8uVGxe8lNgbYmLVQ+C6h8+c37SJegC9qiBWK39/jX
ERrNiqAZz7bSChufyue3UJFOupdMb8ZtffKbgEbrCF9dhKobR+Tv9iVCqylKxMVbRWUKBquQiUjs
nxhNWIBnWIeTOYi8g3T+PNTZMLYELPB3f7QJ3777avT6/bvv3r8b/e7Zq6++eQElHxwded9fvHnz
+o3+fKwLNizWcVO/Ghew6Gk5/bbYvN3MfkePvZDurnXaTsGruSMBXUEsrOY8z2H7KxcFqcmZVta/
stJ73Qk7rOfy/21yeH14prRNby05DH5hiQjdXGweAnBxEKyRTyJm0ePj33z+RXBP7/RzmOpkQGlO
A9nabU4nTMMLvYTvd1LdvwW28TEho0HVbrS4+wbp6F1PNDlwVt4uZqNZSQafcEam2C52ow7Fne/+
OAKJ5/WbtykZKKRHaeMcbXeEPbIfNrM7th8ahc6xG1LeoiRGkLdpKQNRMSQMi3/34s23KRmGprPt
cpI2c6AgcStQp5AemWDB6YqczPkKNjgl8pWTdOhI7WKoJuxNFiAuDx8fompjNoQNifeJIewrwuyH
sDvEr7ORjQ+B6wsvHgLzJoY6BP7LXHEIXDSe90sq9zMo9w2U+xmU+1sq9zMo949c7mePW/NCuZ9B
ud9xuZ9Buc+x3M+g3O+p3M/ayiXf8yO04ECUMihsAmLL5fDX6ED1ERF6f2P9VlAaneHJHyG6Ehvj
xlzitl1MKznUHtAZOdgFXg0MuVrlUaMxFDqRUIZB+B0RYIMoPLebJ7h8wKn7LqZqxwspoI4zxtKJ
V6ubuZ24ARCtnC7jKHHq4BOxhG5oFyTVoplMVymc2vaj+RF1BRDKvDi7seVKpr5yoo4WbD72n8GR
4135PYqt3GLs9GK89O+JTXUodLX8BOnT2skw/zCf4MTWCAnF17pSunetQF8wZufOQ5SXGjqCWSoe
aaJcNigXrxIXFD75UN1uyylMprc9cfnz155jvIIHsa3qr3JgceeTkH3j945nuQit1XUPw6DTcUEF
F9GeY5dXca8Oz1HB8NR9YtpsV5er8mol9Row2K11UAiKurw6wS+nTcJQpLYEc3XgDPGQfHYkuR/4
hIQjqWPwFaaz4BeCXGIM5I3laHYCynPQl8jI7BSNcjM9g/0p7V34Fsl5OV4gCQTzxNtcfEdOLBMy
ykVNLaGalXU9n4TXbxwdAC8o2ChXwoiBuEXRN5Inw6RZ7m36OO92m/AvMI6yXO3h0kF2sV2udMTe
q4J9IFbJOKCB11Hcni2BblQFXzcnV2MyhIfNaH5282hVbCmIyJ+KwCRakH0LUt+QpScGRseqcAgZ
ok0NzIJMdSlKngnFMEA10Ef2QfxYzmdij8SoqtI4Dj/UMo4HcJr2JE6cLu1X5wQdcjWGnjtOHibH
D3BQgBctMOwnycOYvWWEpPfRPh0dAGx+marZg1d7EzH/NQjoPMmBpnaQHLcQoVy99mxZ8uhR0vOL
8kflVfILCWAX0pKij8mD5FUjiJnE9nQBPTGPLG8SWxbZTux6M247OqxlpIJehbZoGrGKunb0WvN5
9ibA4krgy/V8sxU0YbumqrJkLL3xShyhDXWBuyGs2tyntgaZez7dLiAVr3Z0mZgzYxlvrL+JECKv
pu4o6XouIMDIKgFkJKQ8syhYH45dKRVASyBxPcf2dwJrbOZZD3XjrR7Sn+TNyHWoggvzdJpB/SSO
GbFztxmIbQhtB3RP02T38Lq5Q/q6UMrZo9sR3oKj8QL4TgwBaOFnFgGs0fIMG/1FqNBnpsURL3j7
d5XfQxbWROBfX89N11jAD3tOUstdDtV31KuykxK+YNB3lN8z2gmlMfI2xWWpr9NUNtSs6mjYhSpS
DWcDdWCXiNYUWFTkegFCPZCtZXOBwVONCNqN1lFPq7bW+QGxvAZqE5dQRLUrI3TQ5S6z3aAdSuZn
Z8e0Uw8DcgeKnOoul+FpchhxJRaoM1j3D1xaexCn44KoVOQw7p/Ns8H/35aaVpDFkJ/2XId3Vsz9
QgWdsplDk8zRBFsYKOsOvz5spBeNqssW80q5vGL/IlQdwDrWF0lRem2a1V20q+ITSL958VU89J+t
MSzju5NFZfluuqQiujth0rrvpswqp08k/edb+6bNM8NQDKfM4W+ao3Y3Re+OTSOyzdnyB3cuVmln
DfuxWhNPBRhRWVEFGWO1XUnVpuEIGLSiBYnUU1ysQLssn8155QRwd5AYlyv86Yshl5ea4uiRo1xD
paIq0qMyVQRV0kZHnSHQVuSscVvJqXxWjlqqBrvvxA0vyIpELj7lKwYuoe+iezTXogQAZ6wMv5dD
5ZTL6Nx2TdO4olF3NN56f/b899ToIU/6Q7qiG08vWZfSSP5eHPMl+REKuqiUMbfDAt2L3Kcf5qaF
qnMft+QmHtPIDss68Qr/rCV7ZQKGhHdjOvNvwhSWWZsUX/jk5zU5aMNJAP1zuAA01NjZk9CRaPxn
dI3ctc2sfq8eRbNG+lbRCPv2eDcN1cOKSNjDn+0mUkW6Iezn3xyGKcJ+/iJaSNjbPKl/9/rNO1TN
0grpT0f1RSkxF5ntPX/9+s1XPfn8lqyCtpVmZMB/i8WsHpGvVPoH2GyIZku4zV76R5viVBXz9ttn
33wDvfX83f5lfVOcbW4t7l25vjXNGzyx3prqy3KzKZfR2j9//ert629ejN4+xzkz+vL911+/eAPD
8vXr/Vszu3o7/xPKF9TjrbWYXT3fVnVZfSeudbdmUAJemlvO2P9+V566YuaIjbUDs6NK346v58vt
kjN5zRBHuZGWXN10Q73eYtG/LKpVsXh83NepmvnQpcyY1J3YhnyFLTmNpEbgY0iB26ZJy4zbblWe
OH0JdWm6742aaWThxAWI9ra1ZNhFLN5gbkQwlKc76US64svXr79xYyO53k6RiX25PTsrKnKwG6ob
1fYxa8l9G/WdzbvVIV6q891r5H5veu1LMDu9tSJt/aMmSuTspOQs7qsdbMAJUDvqYaVPadvkpirO
ekg8a1xB4Ft1VI9ain7S2VHaEm+yUsa9JbDVMWO8u0BmpDjLFf6CoBLWZE88XitPXhPvLBGL9R8Q
d2XLoQSU3hzltdm8BkH0ph/rhT5zzv4fc+/xD8lBciShb+yJAc4KfFQYqFtpVHVu5hTLAT51Kajg
WUITQs7+LgVJLwT4ZETLcrlezKfkFYNtsSrTPixHuiVEdeemoluIAm3ypxiTZY7i9j80zWHv4eXG
+ub4NwyUSWgsxgyb9ENl4q6TRW+6ah4e7skNZbKtt4R+c1VWl6S1laITaEw9PodK9wT62J5F5nqI
puM12TcQqGmm7U5EW2PPXWKj+57pv6DzRGDebBr3OaJ3LHHTJY0L1c0EvZGWFddr8uxlK3D/Zr/l
OBN1RvAqak45mkag+N+jFUqxbVpRb/C+AH0PkmZvcpBI7sdmWHqNPbfToMCa+dwCeHcP6rFYoPQ3
kBB+BJNkxt1F2uv4GizVP11JfMD5u1kfYVdmFI5sOp93d84DXdUP+fv/Fl0PFuV5H+NPQYd8OHj3
//6rv/mbwHVZzthfiX0ILJ/vOXmv+ar94C3OD8h/kHGsykbE5oYWml5jFvKBVlcuVpe/KgWvT5CJ
9vH5v18P7s8G4pJhi8g10YdHua1TpgjXm3a6Jr14B4zXc+zUHlk0iYOFdAK8ml4uio/FAo1yjFeH
1kHcYwtwnJ/Lskb83+evv3sJpxVxykAnsuP+Z49k2Or++iatExNITKbhPdwm2HHteqM9jjpNDHNX
JbLk8dzzGD0CGRv58OBzT0EiUq5EX4wykNmQs/bPRhSRbVqSLmhFr+Z4l2+LJBuxg6PAYJByh67I
AdVQpzQtd5czxHKicDxU57jCzBEgTTz8jSabVMX4ch9uIb3TsOXmIh7qntQDYNo9MSfqeBFebZkS
Lyy8D+3BiMMOJ9MRlkBm3ZL0PHVBlXvhhHU/cY3YuWt+ZC6cXSPrUXyukx2FuLZHb9LNcDqbLZi2
uAGhmeEZWUrZ0vV1IPEGyEHTD5JW4yveYExqnisVrC/HQPwObexaBD1XNZaDmmOhK5G7X/IpcWTC
2pASR+bGnSY14mjHeDs6XDeYIfIqTsumUo02vo0G2pO8G4VKIwL8lgRW+Xri8pzuqKTN1n3CBnxP
u7HhFaJsPjqidYyAWNqPxUfeP1stxHaM1juwTC/8D+ZZoTQxEzBo4JPTLgVoaLwvuxF7PKmX+cmR
UTxlvk/nP+J0w81yD2I2CGhqcyF+BrLyrDVKI/UiAgxTdJ9RiDPcJmWZsnGqwbnuYwiFtqcz5z0X
FViFKYSNp1ylGzZdMiXg/on2PufJvS8e/93Rr492VSs1zUnD+9vmkAdZuU/Ee5sFhRvaz/sUrqJn
kjqWxhr9iDDTFFAk8DXxHLvVUqhODBQ7n843PXmNzneb4rysboZCLm9M8CECT0h6qqLS2XCBQ/OV
H3MlYiDULxAPK2PRAms4YvLNnm+WCjPFJDZECLr3p58zE/brQ/99B+XA6+XivFh9ePTu//kP7Hcq
8+2MzDoJSNxFZ6zmaKiFz5CNjVw38KM2mrS6g/GcCXxAkGwl4p+gAvU7nd40Q4iwc+Cnl1VxiaKH
PI5BdC8q6ITtdVJs+8nx4eHfdRTCDjmzVkWnE0MYeDpEiIFDJYhue3VEXHOfWZ7uXedJgfO+jpgs
Gzvba4LqkxxNsD6hfd1XaZqRunAeXGcdt75bK2mqJkYR5pgwNL8sFNkrE6Hy2wK2InzVwyOyJ43v
CWE+OCLEgnSU7oFG7QOLy40WZrcVus3mwiYUbS1CZoruQh1lNuNz3PIdSKa8UDsmnu0klRIj2Fpb
mbFKmju3jWond+g//dy4xMOzwXx6ecObYSA0mKwnKSwWgu48DfHYprSF45gJVGfPNVSA6fIsd6T0
nd9GocFzZqDWmHXwznpNjM97CtaRX2Lr/dvTloNcCzaPj166Az/Mo9WKOlpv12hDMj7nk1jWtzl9
wB+GM+Nxod/UDgcMpEp2S3LHCc4sOTg6FKvNULye5OiHLtCOTKfBQrhRklXHVl/4ZtdviSWKHuM/
wzkMEfgN3qUhkPU/4pcA/tJwhl7XBC5f7AEva/Z/D+mwb2S3Jize/QoXisExvD/TILJzQeSEYt3i
HcZYUGrfgWQjcytPYBozvkqqlnE6SBy2WKqnPXzBWWA+6GUGn9jbpPOztQT4HWxDOL/hf3oLuOMA
IZW2cckTuoYjHai4Wt9hpO5RUI0KIXRA/AOGRPumjTAsrcCXPdt7xqFJ9QpUV1pq56frF326VH1s
/KBOYBs7ypyzsIooPM7Hk0mVj6dVubpZ5uPZDIM25QhhXmzyMRxx80k+mZX5ZH6ekztR7mS2dAIy
1+WHbbkp8kk5u8mBErDTTbnKp2MCYcmnBcqN+RSDseOAwD8LTQEeCTwG3i/R8SifzfIZSAazs1U+
m1fwv4/5DB43ebHMSRjVufnKDip6Vq7wn2qZ0/kMX10c5RfH+cXj/OKz/OLX+cXnOcKQ5NjRmsQ8
n1OWfL48z+er9XYD/9b55WSWL8YTqMmiOMe5sJjn1HpkoyjtKRLL8TpfjqsP26LIoQ3bHFH38iWI
ZnNs7aqEblmVXPlVyRXU+VdlPa3m600uCwbylGtG/ssZyyZf5yC95h/yOpekKjvH7svrJSKdw/RZ
IfzE/LLAPyXUtN7cLOBhO4H/rXNypNDZNzRym1mOWiMa8M1ZWW5yEIs31GNsQ72p8s0m3+bbRX69
XHuTAJHY8R8eBOrMiypHZdOsuM4JjSivx5Dp47jifJmE0EjzNCO39VNhaXL9jDXee2sKT144y/Pk
hj1c4vH88D+M6nHtzmQjPIsdpFmnDSeXC0TKDhC2Gl/51QSZlaOOJpPyWnD9xytjUQCvjUQnkfHE
3nqxMMEqONKZjpLsuSnswIwFylCVUMfKb1mAhB+m4tH9KGwJMDS8KkIHio+cBC88GGZN2rETxZZS
WuZ7mOPFk3tQPJUgfmLWw8bIyv80xSgFvlRG76mSFKb0p58FrH0G51WBvz8zzSlXfjauEgGQzIzr
oyvLVBk1KeZ3qK+mCHT+fsK+i7aJ7KRmHvhCCM41/Ij8GlUUILC6jd1tMDmBPQbQq3gFR5HlKgpA
h5hlNa+eR4QrThuMj7TKEicmdRJBu+KLyrembq7fT4DMaajz+n1xE9EgUMCv7UTEfBJIoeRlVYby
crO8c2/RGSJWfmlD4Z6feXRa/XTuqsKNdMZopIJLNacndROkVTk7EWo9GlpjGIfrXA719tpIrozJ
YIii/szQN+vjWBbFPefbhF2E0TsskDAKdIwU7WrKy5RfSO/6XOOesv3GLzHbdb6I6hkZC5NpNzvk
ijHSPmVhTZ5XQR2vk0B1gRyK7z3/RFnzUUZwIhlOg8uKCkaZ4xjC18iZhpceplGVQxFTHC2toOnX
Dd/vrFtjjUEO4R5mVgkvOfHUjgJRhbtpZJV5RDyz9rAvsIJ+X8Abo4mVBbfB85c9tVJjI+eHhgZA
8UnjLc2rBKUAct2QorJ2RwNEw3wI0nOagFDwICCbBcf+CBlXhYdDzdnbCoSSntyv79dPoTg460gF
c3fAZDMt6rbgXlqPlXFVxWRNQUQ0EC3O7TzTrncQR8PxbFcLHpkGmA7e1TEH8Y5pMj2kJGdsofsw
1i1RDwoYcxslsGXYoyPxSAbClB36DMdZc4PMU9cljpTtHQ3S7toSW9j3rNKzUEk92BQBpQdRDdvZ
99U0JkG/hp1ejeGi3SHbZAl1jDamAZ31bWVsoaLaC0cDM+7cAm3AjapQCpZFHw4C1vkaI1qROE7L
YE+tRFBPgcKH57pdC3l8Ssgho1ANySG6HYfSZGLu6+52Jo3GVlHBEfzrneYluUTyhv0sFtJlRYx/
yBWU3WyPCWvzBbsox3DLYurlNLlfD7v3626qlDJERvW5HajYZGZpnojZYeG43fV2zpgpJK0BAbyO
8OTGxrZFxaBigblDn56bN5p73DFJlU5Od95vA3UT/uH6YcrYsTdyzqPzka2QOe2dRkvBrYWScl8i
h4BXfw/bDc9gW5IXUEJzM9u1wSSG7sJjXo/h39DfewJS18eiquYz4LRUR5Fhi1r3rVZEugOCV7rs
n/9cRUugcqdLM6fB2BExkziAEhFKqZcSpV/qNI2SJxXpV0i9wAoB1IxcVKwqIcUKqRHSqJiesl6G
VAup1h0IFgR30R2qM05Q65WI1iuZJEZ9kUxmZTKZn8PJIEGdFcMBzs7QAjKhBJEapvMEGpdQJZPL
ySwhxVHyIUGoyeU6YQVNQgoadJamCyF0qo7RYqUNjhlqxBOjlEk2m2SboALFNB+mbXb6i3gu3fqw
aPcLeC6nbY14FNjDmQlPyn413YzSP2iFV/Dd1qRBTBER1wjlnPEuK6yFEOeyqiDBH2mxhmu4mrEp
oAYep/u+Xop8aYA//hb1qn+fZjk+PLFvF/bdU/vunN6FlP7WfodJKJm6ade+XJd1I1ugUUHzxOJs
VBXXDB+O5rVofwOE/mz2fdWe/mVxA9xXC1kjUbCZo/wSlRYtNzFM5ISSCKT4oR/bxwtgt2UdWrDL
wdFFDCVVMB1f63bL9iZ03aVrz5TUidulflW02KW2UoIjFoEjpfYGOpV+SuPoNm4g+sAZ5KrQdK3r
ik7HTiyZj2j2efj+vzGhcKrtalVUH47e/d/POBAOML75FF2SiEXhJgJJKBbOuio3JXxIaB9A3bxg
T6h7+clsoq7oGUEaLbMtKjL87igIaj9gjgGcNvaCM5vPuB++AW7gocPba5OeH9pY4fej3W9qh2BA
dsAKfh8j++rP+Kw+cwVgRCjZINHPHoj/fKOp4LMB8u/c69yT+pqQMBRv+K8dAIgdQu3D+AzSDLts
uLHwwgN50YFm24oHUgUlMPGAuhQPCIjhvfRwjldfXiAgF0/olQomZGL+XJRXyasE47FzLKXNdv2I
esEWmfReDQ8ZrgQklX4XuM+nYP87esPbggDYpMamzOVtbFl7hQSYGVGUZUwB7KeXKkJAn3ZF795B
QftLhgbggTE6ge85YihyTWOHmJkO/GVR9cWGauYTV82auQMpsOohR+9OrgfJte2oTCXEuLOVDRBj
qJsOHOyKsWJmgT/+3Sxqc9ma+34dEgCZ3T6oCDHmJI5/TwY2hWwequuDvkH163wmARL4wV4JdQcD
GD+MBw+/uq3BXe4fHvePz+rk/sEXAj7kjRaOju3cnMrBYE25FG0Ct8sKkAAxZJtrIsvI8MtTf0QL
C2cZsvm3+PAWH2CUmoTOQAYh/+9bKPU3xbialVerESzMnr1bfwV1dAEPI1c8aHO3cZSdqb68RwNq
+em1UzaZkdlkenhwQ2zz6w3+MpbC8LM/R+bZD/ItynPuo2Akh5SFf7va8VvzlIsdHK0NptdSjaGt
TydEYKXGxHND3QSc3dJR9mWwxk3wmqFd8JyxO5Iv3UxFmTKpLRA7tsWkVDI9vcbBkU89U+m1GAGT
kz/zMlMgTYAuVdmmhs90lwU/TztWk7TuczhgzVYoqWFCLSXgaynAK+GWbGY6cla31TRHxrg+0PYn
EdY5L++6bKU6KWwYcUlPaF9oGW4DLdqYQhKr7rxMxlfjm+ZQhJ3uxtMHVaevtgilnJeJJF0RXRg0
Mr1wLcRW7hrIgHDIaaO0sN2alB97A+lKSj++wwv6A8VpnehbFBASih5jO2qMsCYIVrhZkgYDGMVk
e36uPeBQnsgT0adtJmKZDAWRCafSjtK3zaS/mYxwiKlIEL2SBwgQ+IB9IhyrARoISjQSd1H80/xo
DrP0N5LXRMKhgv2gYxPjVFPNZ+x0x9fx81oZ8Hg2hdEBMDNyB5fbyZMJ9DDM7JdEU2mEvzHvtu7x
G+dywc+095AJkF37bq35GgLJwKGGBjFwQGMBLXf5cBxHUx90NpmUdXGAUWFjerguHXmw5Bf0D4J4
dH0jeykbhe51WLihIh9JXKBIS79/+d13L77q7lAlmqyYnP7XYWH9pXfCkRhsUcbEW7fj8Gh/MjNW
jBLqhXKaoUce5OWVHLwXwEe0O7DbnOKQuDtENr7l+LJQNRoyaSxyiP/YjQMjzDmv4/gOKnT4z1Am
jI2niWhjI7L6x8PBx2JUGH5ATDtPvBkWFmMTWxJ01R3W11DxSvfYo4zG3WpjLGAx2iwkJEbVguKs
gJyZBEI5j+trnPfdaCx4TbWPelblVQdzsm/55l1zw2m6/+Vs8r9s5xsjFu41lwZ2MsndRjcYa7xX
xRw8tjLrjG7Pzb7c0vDG4TnUgbzO+bgwYAp6+kk9uFpD/MdKkCazMyp6U9Rwnnxke4m3kbHyCVt9
FEnNKWrusbUoWsnYjCrmcN/sW+Iub3faGIQ5xTunSnoxtY3fJLJ5W4MBudPDK8cuWaLRObW00l0W
y1LUKUG8ZmK9QzcQCuJ8TOsdtSV6K2zcu/AVIHYfuhVBO3pZxBJnUkJtXuJSqbbrTYQEHPLXzeLc
FuaTjBBwHW287bxBCaG6/fJuNV22MFlcSsPbEXY3nOOWAYhnL3lZ6Ix8Bs92u04KMe5VQawWU3x+
l2dNY2kzqXm+36+gAk9dPl6bTDuz0bcobC9Wi06NyqLLG2Ypgo20bBayKOrscac2MwZeNqszOiIv
xu79uk//Twfpk1Q70qSnJ4PHp96BKqwDym1I5eR+fZpQpLvkO/byccC7PqjZSTqfpac5/gCJy8BP
45uPKCvAa45ujFeQaQTa2/CRL8d18Yb3K2um2NnPKLTFT0HNRBVMUMyPtnpfM4ETzS28eY75C3H+
lAzyAr0NEGU42sZs4CjmOCWiWP+mPKDrahpSt9EdVWOwGTumf9R50NbSEPxkRAZLqftkuyI8cbpn
NnSfdv3La5B8Odih9CGca87m14ECTbxdODaru9ORnFE4ex1ipEHT/Mdh5fmzpW9ixOK5F3ldVa6L
anPT02ozaOS05Ku0LqeUwzvLzftkk9CknE2k2n3yGQFYuvEfTQaH/Ycmohe7fF9E48U3iqTlQkfR
2BEmkDtZhBOqsrGRZGN3N6PyMh9wEziQX2PBNLssbvCQULf5KZBoYhKdZsEmryUp0WtNjVb45FSr
LHVK1xG2L+3IxVa8FSUa3rlhVDymn8f3xCYvMGXL8EcXM1qJqAba81FcfpQ7nkAIbS3YzB9fAnBF
krs71mJajRtQJ6qGFF+9EgfXpDK+rPBLUG128aBbOmJ+5qYVTXqSviKWTK46rNBAyUKiz5qRiViT
oYOzkkP5zgKFTCPR3VrQSJc0Wt/YaXArYlbDxoW1lP71xWZCnzKnxmbpAhaFxwZtXVAL0GSH5o3R
uvW6z8dr9NqdIcKP2bt7RJVKyCz1zNd9wgR749iA1bRGFKzN/8xKzs2o54nb2qhdLRlN9fPE3VLY
eFC2Qk5EUC4MX9L9Jl0iyLlaXNx640VdIvI3B2bhkScILKM+xKlWwwxkKe2iuCGenvW9AOgR8SPU
PueRhjcbGvSEbfOwl7lmo9/EgweE8+8fXjgQ4/xPFKedDh9zcQtI5oFxtL3y4B8dTWZMcRMKkNTQ
EEl5xkMVSvTYp3XEwVXQXQKRttiNpfDITDfb8cJ2AJ6fxjwGOE+SAwfVhFE65gi4hvPYoJb5pzJq
DbQLD2JF/7yPPGGcOAv/+eqiqMjvg/KPFUH2Z+/vcTXh9QGdog+eimUL7kyQZlzdGDcecopfLNz2
BdNGUyAz/apG0/ByOh9j1SSkAveBO9/6NVPbofnp1Yxy26UzXlyNb2p7PpUtLLd8NHccPijH8Vz5
5ZViztnjRLgaWRYVtVGPNbrTMkQrF2tyUr+UFhj6WOGywr9mnaWoQpa5VDjrAxB+w5HTB2hdBF3+
QRmw/qYsIBIIssTD0JHXCSouWRXFDHGHgzGrLyTsV3B6dVKFOChM5fbRGxuyekBEuPISGwTtSMgT
jBwx0NrBI6qkI/Ozs/vAwou+szfo1RPHGeFUa0+qMuLw82nztKb4Q57os6yaNYrz8viR9N/Gglu2
dxlJpzhpv180XLHJ9lqnX9vwhe5ZbV2sxF8Ue0fCg4wmWB7Lylf7WjWZ/d4fKWVQoBriXTUmZ36K
zBqEq0Q+KBY/xqJmVSrFDvIFp0DFrum9ZVaRYygWTNunE5mfzgfauUUNqvJ9ggiq+jAUsHS50Km4
P3XZbpDk1H8JKZUuNMypxNbck0F1rbNYoAtz2Le08GamdAFIImHSVKnPuVSzFhWZVrHdplDXxELm
TTDRLU9oym2WulEnszY+ZeVZKo569uq3r+5I2MWiMD02goSjlcF4c3cA1sLSr1yD1eyQxCLipij3
Oq2yJsldLUwnFKFu2V3vxrCsgld+wFbc8Ctt3ZL23kKa2gIjDLVsKKykYpUCj6rsC/zqLjuTN5C4
OS2KFTd2CEeRnTtUY5fisFG227KWzaqxPrSF4U4Iyx0bjkGu3EOFaHVhHg2OS0vez9Yx29nzNNyx
azQCnSVs+iOHFhK/yc5uTKamdHKxoja5FdtlXO/w1g6bSQB3sT3zDNsIBw2KYvPTz8qdajaz32y0
WHnG0+RCXXmbFgHHGE9RMLYpTTgIttuVXH1v3+F3xnwv5+t48qGHKvi0LsZO7oJEbAASBibzLJ/6
un6hk6cp2priuC1PPkngySzMGQ8uK/VUOM5BQVbBSaNB4Q2X5HvPhnUGiI0Pr5xo8Kh1wDRmmK3v
yWlmtAKqQm6SrMs1XYNbG4ZgtpiqDlVNA69FroftZzwLmVoxxzGTQwe7gl5yNZf51JxE3mxstBer
YRvqe4zDVhy6ywoKqc0eQtCtCMvPI57tA8a36kVV6BHzGmdfgUDYr1eLm4TgKpWSSrAicONcJYS4
VlR5hADN+FlBaNPkdsIA1JNC1CAmskUgiWC3tLpyuH5rMd5hAn5GNfNhX6ALxh7sqlM92DumRtvA
S5w1f5bYeRS5RLHzIGrvomayNbtRDePQvf6aHLXNlHAN1/ZIHV/WP/gdTbFBWvmK23A02wrWJM9k
V0QMt6uxrv3WXhbkWWdd8nVzozhgO8YQaDUYovMQd3QjjWPLJwFxihlPsVFgMStmI7fPoWQmyZhX
y0MfmzO9GOOSjMlVrhGb8gp+1b0G6ei0NalF6mzk2XdkzM23bL7DJqmTgRV4KFEWCZ/cBLVoHXHb
FmNE2LbcSPxg5TjDOQJTKVnx5+SLhLrW7OC4QR0EyBc0AbSOFcr9OC+3NXApj3xf776x8TWLVY3o
p46lQOujhZHBizcBjz1FKnASNpCYEcg/ipIhg/BXdqt9P/o+JulIOh1PyjEb/yjnhKx9nTGLVaE5
bYJZMziNmoXg1m3scsvFHpsb1oYtVe+6xYXtaN9UIuaV4YmxoZIxNk7uBOvM6zx7cknAtuQ29dDR
U4djL+NOxVCYXw69eP61ZzIUH4x1tBNNRK9Ci2QvmzdtN7OP/Z2tWYsRXtY8fd8DTvQL/oP8qIpM
XvOR7IVTOxGIbLFYo/RGsxcYl7lOAJnfHInCjL3wbhOZU5iIiM+BTykAIt5Sa6ds1np7A9HF1mgT
jv4qZveKE8jBsr71FggOh+IzBSOCJsYhypWtaj9y6NwvUAITJjMs+rW/bVUj3KpCqvXjIPgoka4b
n9pgCk14mLyJocnVsadc1vv1wlHzAsFcADua1xd4hZA8vsS4LWew6HB7WSAkmeCiCYusJSOC+lQz
PkeS5b8UpURoMkUhnGJyuK8ereZTce4ajfjKiCqdGtKpqfbXpPtrqzVhLPD+wDdRGJGbUaN5UUOV
5IoetYIggpjpc0uxL67nm17DsC9SKoqOy2UxwzsntB85r8ZLck2sE1j9ibV0rx+xg928qLNbpnB3
u7pc4U4Na7MuPZm5ZWo2Khqd3wKLidVmFiA3euINgZXuOe0LNhJf0ZLEoYPGUfDIMV7QNotMruDD
ppqfnxcYw091tO2Di/ksAM5kTOAXpuROB0t0hhSQFL9x/VBx3aP+6WoFDXoq0LCjSRtxMmQhFpCL
lRqib+4nwPc3xQC4VFqbyEBEakKG9DhdthKxSaYNXiySeQdiw9MlI0VjKmCaVIW5XoQ3giVobIm2
K1wQMMeJwQGjm82ZAVNk9+W8Jo9r6laxz6uN5fKsQGmgWE1hqmAIpULXh+4H1pyP3FwFcQ8md8Af
9+h3wxKwU6n7fTMWdHXjFDwAtIJ4AKIclmcMr75iwVf3ODTTbYVXw4ubg92D9K0MErPUwbg6N6UM
KMoAOvpQclwA9oKUg05hxHp7eRqGYfH/Y02SgUp3rijmmO8f1u/QncKqpDddF8nfrIP187qXc3S4
e7Xbcw+YE1+rLucrMRjVHm0KW4azwZQTJPk5ucyg4DzeJIsCpequI9Kl5buppU3GFnXkIGpgyr3E
6xFVMp6nEd2WwmZSd49pondEx1ei1sQufSPQS30QKQVPnpPCVpV7Flcig3GaYhDqADhkjXv+SkJ2
Cbae4NublOx7z62gFdU96h/3H3dha+JzLj/3Z8XHoy4eOyka211WiIFdsAOR4gV68RGtQBkBcIor
GCRokGGuGVGgaVsMbJjaDIxYCHnuXi/pY2DLSePfnZLuiDSeS9HZUxgZqY5c7ywZHBGkeO7t+kQS
WFs8fxRjztdIpCPmyoI/Zi6Wlhi/OVXTJNWCM03aFcaz6WGCEBnCQABBkt61sjC0aY0NZL+b2eqa
OigNjiuCwL6SJ+aNa1oW9p7MvfsVLQQ9z+/TDcCH7Zxmai3m58GNhx10KVQvQ984C/vuw/H7f2/A
HljVghLmqriy8Ts+PH73vx9zvIev5ysJ7GGEBpxTW7bzkPsnQu4UpQ3dPCyFz1Xl9vzCiF/Js7fv
+p13qIkXTaN4UWPobls0SKxQAuSnva5PER40foT8BDahQSP0dbZrk5mKwMIXLUn6VWGLNsm/5Po+
M2lovndsYAmzAWqj4e4/jT+Ou1Y0xT3wYrNZDx49mmzP6/4/Ee/ul9X5o3ldb4ujz/6OQy9jzBDS
VfS6X5bl4jV5j3w5X/GP92hRxD+/IQNf/PXy7MU1vfoKDondUK/b/WZeb54DL8AUv+UAHWUlOf6I
Fsz44zkxroJ++q4qQgWvvPDrq+2S/OY29GRdCujddsI405QOZl+8Lvj1HeolxZwB5P3lhlv8tXjz
fFWcUU1QkJXfb2i2UiuLRcEFwnjMz1fNUp5tz82npPsdCu7442t2H/welWncbfQIg0X0cQNsknpX
3fB2R7Wubr5GvePiRkqH2UCUaJa4X1/DxGqSegGiA40BhS8iJ8Zr7tPvMBQQDjPq4Hg08KyxtT2E
c4J0FnxhselZiKV6IzhHHiQgTyLVvXfKTOORaayj0ZiUA+uqF4AKe54n5kRna8ClNggh/f0JueoH
oVD2qJdSiWOCHJlEH9Nnd6lUlAqmd5DeX4vBRuR0R7xCwrmxIteyFxQjkQUSR2s5TqE4Ol6xHWe3
cZDieJutWjBxbLIUyEzbPhmpzVSnV5fbagrtYzR24clkjCIBDAZqy8ZOIMAZycalGshvDJ0EjO6l
i//UI6pZU8KQLAJrCsQ9AUN6Vl3eiYgcNvpEgTLKwGHgNRkY+Wv1ZaptTaOFbs9tGGy5k5NYxm7Q
fE6sigO01sOF022I6F2rLfD2SYZ3wTuJZDZn0YgEajgibUHyrNn+OkYPZTkEX5QtFacwu36DUHpG
vvUX9iMGiIVJcXDAz0OKH5F1M4s+0SvPzjhcL4GRyHiHwfYonmBD4DJhBo27wtcc2C6I+ugJGG5+
Rct1zm7W8l5q4U9c3LL7fCoaqS+9Qk9wmYTiu1/w+jAzA4cd2B47+A28oFq1vzIMWt4Pq9QPFlif
HJ4iUHA3SZ48Mb5rDFCgb4l1vZGIYJ0iAVkA6DNKjnXGD7FnKnt4qgDLG03Dkwnk1ZJG6oslAzMP
9O2AlId/To4+H5zq8cGXnQ7a9aBgMFqO1xZhDtf4l/PN6yqBWfln2dLk5R9Kevu/+W+fAZeDt3+r
3n7z9mJ+tsG3T56o12/s66dP1etnMyLwUL0CuQJfHahX36KNE7x7oN59Nf+Irx6pV18vyrIy7/WH
b0sq5b569eIDvhkO1Ss43/HbX+m333BbvDcv6JVO9VtumveGUj3Vqb4rr6gZuh0va3w1r71XUBV+
i1xDf1nR65Vfa37LqOLdzs+dzhblxsbQClFMd98rDk8K9Okv3vv3ZiT8t2bI4C2WZeMAB/yfS9So
oLJD2kS4GSYsqSCSChz9l8jKzrYLBw6iVSOsomt12a58REa2O9NhUrUnIcjF8+loR/yEe4jTwuD6
tA9cFTqg4ZjVC+MQ+M4zQtolsfgb6wvL4AXr378Fs2Yr6oy/x8WbNRJ0XdHHs1tvWt71Js7pi5Bj
h7k1/1NlocarJyZ3sXsEldHYWvoebLuENb8Dv6Xjc+8EE53u0305+oaDgL5HN+regyyjv3L3SWfw
NZBnaxTBJuGIdN4GgBeEPC8xBIQ4aZHg6kFsmrZLsAwR/ZZ43dfFSdFtCsI2iyTuPlEHa7OIafiA
7xGpLAhNMsKJ7STbqsXDVGIpQALjjh+QkeGVqwGY/L7DL84PPDzAlz4GKmoN1yBSvE/8FYk0seCH
kQlq5pLPQXzMWz7+i1JxJXIVBR5VvirUpyQTVGRDg5/rXmaw2CXobS+zIdb685kSOiKzWsvl0clM
ZdzCDnbP5XvM/RDOqa637Mwyr2H2RcvxJvP8TN4rRZm17+HKwGrfVFt0Kod08RiapiNCdrGDowSj
LZoPa8Sy1Lfji+KMYvjA2z7+9j6MPOr0xp8TPBfwg29wUa7ZImlUErrgn+ZocAgllOuaa9BnPfI4
sAMyWGZ+wfQmVrAU4XOXcj2qb5aTEsdDy3wn5dodvE938PMuOX4aqbXZD7aA/Z1ZwzYF0SjsyhjR
zokCwIgG5j4qRu2rCn0DqVKuCrdy/nCNfMremSdBxYZqLtzBpTdsy1CN7C/bYFr7NtpU8S2JLESp
y95hkfZCl2lCyLTByMRLuW0p7lwxHc2P6IhJRge8/or2QBgcXqORQU09XQuvyFjEjYifjrfoKhVD
ZH/Wxgpk4WwTeCg9y596RIoajSVFafrIn1CIel1FD7V1MxYJcRK6FpByBMp20MZIwu2TOZaQaRUn
rSt664Tm7iTCQ27hbVNI0K+4N7ANdNsm/9Bzy8G+20OELNqA6Hiv64kOcd2s+wljJhp+G54PntSY
rcd4yU/qEHOiO6Ff/Tj7lg4NOTS/jI2AIRaMg99yUwtitc0CdgolKq/m5wWpwrMdwsq+DBh/DP0m
7ivN7ME077D48M7GrL35qgzFij2lB8ra92UI2h38/PyqnQB9V4rhqAjASeMzKZz7RgrIWsSAu8kA
jRZlnU/f/ht7/6fIxv/M+31jr9cD+C8yX58blwAPpYcY/HY19QcX3/izjDxV8bWy2a/OR+27Bj3/
pMcUc3cReh/+/qypbJfNSJmEBl+dU4DI/4+9d11yI0nWxEZ/JDNotas1rR2T6YcsBzw8mUmiklXs
y8zgNNjDZrN7uNNNUmRxLlZdB0QBWVUYopAgEmBV9Wzv6+ivnkh6Bdk+gfwat4wEUCR75qytZvc0
UZmRcfHw8PDwcP8cm0aw3CCgwm++Rw9iCwSe540vFRPR6Z/ReOcZCPhAtZzXR/oZJsuzbYU182B0
H9Nv8k1d90pHZ1liLxD8wVBEwSDekyryeQiH9/PSRxodOofhenBbKtYexebLp2pQDXyrbYfZ4doo
Ha/B2+SaYe1ok/8APvTq2IniWLj7Mdiwe0dofFM6eR9uIc+byw8lDtewE2kQWvQjkeb9abMDcXBA
/G46JwA3vP5nfTKst00bgzWS6T7cFNV+A37DYXM88i1bLyGIantYy8+50d6544/7A3dDqz4D0X6Y
//U2kgB//eSq6osdrNatCjFl4aD/vPd+jFZu8ZAihovbIt2xLDWDrLES0snFPldrYrc5t07fjTFN
fnzoxFqD7nsZF/1cc7628jCSsizfBLSHBYrx6opPtt9Vo0ne3l3fmOtn6ORxB8ouP4tqF9huiCkb
rl+BVs1idVMFsS4E65Ks5Spw9JsPXJsfZAmjg5hHmagRa1OmRzYxfz+6PhEPB5OFkkS+QjKhZ+7F
CDO0Tb5ss2YZknhOesMhoXU7b00eUHoXt3Q1rVX+rKiDVTg75lhFLql8x48+uWnejYS67WSrpKJU
qWyCHzgpdIvGpByKK22XkMCdRu4mll4bqNDCm27tvfxDxv0xGPGDt5TGWrbbSsE7i4F4bm4nN8N7
TJKPVc1uYuZW8sgNb1WvZg1TUtHafofVsg6I+zmgEbj/rz/FdiVH8f5Isgy7PdQ+/5xMEzYU3nr5
753br4jB3Svb2LAWO16v76L2/NxKjWzbdF+se3Zde5kgJATVqUSylER2Vfy0wNetK1G8oETqNOrO
O9uvO5zm8102RpIWcV/xj357E6O5098I4adnc0t4+MMZEuknPun5UQvt4estOk1RFMRj1kGuhfqy
bZFzEOq6TaOwUdiyDYKOgdkGbt8UrK31m3E1G1anp3W58r+zz51ulpdDLiSdFYLKh6BSID57T6DG
/d5s60d7f2I9ifi4mL4dbxSRUS+XeL4I37ulKRhd7viZrZVuU523n8YCVdxokbefHf747S9+oalE
NTxKI0Mw2VPdzC1aX9eUHH1RjiXDqBBtqSEg5OpMvIhhcej7CoeEthASvBfUD5suq722eBIEZ5QQ
Gcqd62I5QA/Lq8DdfCOEXSN58NVYYM4koRb93pRVy0CmCWadtv6HaXnZwBnDh5QQmzVFCYF8cpo8
wi1uZCPqq1OqABG4ynnyCAOrKIS/xFKLZXV1rWDLoyU6zSuQsj69KpLkENURhgs2lVKEKX0ul7aP
WAWhxKa8+NCHe5Tc0a7cwc8eYewpgbCVE4RGwmqWGIcGh4pZdYmNVcnoXTXFgP756brWkMzLkuOz
3+HAuReEa9LsT+aP/hGeF5UMTG08rMvwIjVdCTHNSYlBECT0md3IKbxAgtu5VQruXa8qdJUcE6IT
UBkBkbE+rO4ZBhTKBZAN7FV0uJHTGNQEpZCRCVHZNoLrQXjQpSGGjluyyHzp9BE7vAMGprwOit7M
9YlPKVUwHMIXXlgnUwHrcmjOadIlOLA5lcMhloVqCAmJCadg0YyWDjzEhd6U11COqQp9/upas9gS
q0pDULPT+LQ2lV1UmL2WQ0zH/nwnl+dV7XQFrWxE8HCWZcXMK/geAzMNCEXNE6wdGS3hLQWz4/nX
xCCjA8KczZkuM1EM9jegX5ZXI/RN7cGio5hnjhdmrLAZQp/QpYppliui/mMLpvuDJIPdnR0Kegn8
ZGMTxbty6O6kKtEHDFHMEPXmWlBrpQXUduM1UupgrLCn8zRP6AUPpwe/lUaYY/Z6ReHnGDbg0vIR
sg+ILsLuAzJPJxTTy2YCLCbTqqtqhlruCKFSZtdM4Sh7YRQwAo8vKdIb2Gs0p5Bc4FfaNYRarqSi
LWVFq+40mOwe1lC9QwRcDIhwWZDHiB6DBCgksxbiD2LQpItUbjyhqSLNu/PXnzRM2lDaw9xE1WGY
LatqRV0jSveSO2TW9ROF6YaAd8acPavxdQNIlBcwfRC+Mh9RAfNXx7kJMvqLJc3WPFc2dEw/NtQ4
ghqOI4mvImanlqqYF3R5+ABljvXQpa+cgM3GHJgpb7litiHHGZ4/OZ+ChIYVf01kYgmMW4dby7Kk
9YWwDQv9nKcpxVAl3ScjAVJ0QDczKg+oR6bTqcxkmssA3BHaudmQMUYqsDR1ayAcCVuFpKlQMpe1
b9YVJQ3OaHYW+uElIp5vtSAM6WJZ+bPVdKSWj3wmcSDt8H3LvcXRIypI6obtNfZVvn1U6Po7jvsq
xWysAqUUc2o9OnbIFzCloaC7bG2nBPq3nK8v7NOsuSZz/245GFs/BkXt2NrtgBWBsDlCWwaFkvOF
c4Q5r6bj0mJ0upwS8kh4mpdv20383nD9KAO8wZbvc3QjO4jWIiUw9VE7W93Cc8QJKCSMJwGTOUVZ
Lt/GqsUDSpZ+mQrlTEfIULYzXHKKd4LLPDW2R2+4ToCZuzxzCQgNuGM80yVIrvX4gsL74R/40hZE
8Ry44a9PHCCtsF6uKWANTqnlfNixT6G0iS4yMUMEikuatDmKPOTDGmwRZckgfJJI4xS2dU7vILqz
AQz0rRdksbMAEeY2riWwKH5PRUbF+Ae3EAlmtkexREgnRUtywjpt5HR0s8OSzVAhfzLD8gUmaRFz
S/oFdu9BGtv2WFRvKzzmRI5yEHZ68QieGJgBiuHIUQbj40agiyal3Hg76J2K+zfMPOkbHWixbARO
3xQ63XiEcd3W8uoCils2wHjbm3GBwcu4AROIKYhjhihmEiOkjtwZPc63sQR0dfMkcyu7T3BbjNTP
NbGG6IjN5gUJx6VkJJo4nGMb4k/WSU/sSIXOPqZWTE940N17W0yi3nekW6J+UJKzPcZNZd525x5e
IpjlJbckH3kqovIPWufe7zj0/7qHunmH8MYauGQEnhoeGWKJdhiaQX/m/1UQaMNeWK8X5TJ7akaV
c+ccsIjwjsYsqbCqTWKbeMmkdeBYjR0WsxTdaSQijn3NRPvjR1Iso4M0gRRsJMZy9w02cLWoW5GB
5Zgm2VPymA8FoVaO1ysBpWDQZQK41H27bvOc8BnI3eV2jwPCkYSkoafNPel+lDYtc+sHo8WCaJZF
M/bBoW/x8cLOTNU7RhDH1B+JrH2vmDHyRKcRf3jkGdNmg9fOBiFxs3iuTQqas6qoQ7qAH84nuyxe
KLbrwt0Q2kQdmDqpb+uoFtbk7Yi61cbZLSFOyw3c6XGQM+udrSvYKZxvC46KLLk0S5O7SUrbVhqP
fEpzg0L7bLnLTD1b/v8T9bNMEpBl0xwRSndCIWfuRdBg0HlTlosRISYTnelmoFYjMfxS5xCgt48n
grinmMl8Fd4ZpxF8kTT7z0GpXIr9VFAmokwTtjM3UU8fLtE9L8ZVTc5iE4LNdB/hL3c4A/sz34F5
Ipv7Vg6KTJZttBD8wyyNE+9m/9vMmDfbmGwf329bIa8Bl6n/5puK5A4XttbFazkql9Xw1TSyHHbj
fwTCYf7PQp3hbmOPzVMf4ajlw72NH36/nrV9eGfjhwiK1PLhvc0tVq1jvL3xw+fVZbls6Wp7X+Ny
gOfo7yIIBBEiIgj8GE0t2yoIaJjxmoJgTVP6JkLFWbFbF2xU7GDn054MuF2M7FwfjSDt6Uic+v6e
csmEb34EpZlH9q9LvjkrxZqyMNQToVZ3OgFLWd/aUVXbzTrOhZBDKnE+whry9EONFzfbFcNeDNyz
7N/ZDCJuVhFh4ON/EUBqTAy068bvRpx10V2Mp/O0z3Xx8H+KzJ9XPEv90FujaDdj+vCS1om7YXv0
7zmgMqLLatwqsJtv8TORavQK++KvjcZlm1+b53at9fj0HQWBfK3yFQapVnI34HWCNjq8LUQS+1/g
kyP5jMINW7R+E7Xblj5P5uPuwHQCdPdeugkvxMYLt4vtOPVsYykG3ta91Am87WkP8p0a5xqCClrk
vpPCbDlscpR5HF8h5nUe/+qG04rfpRsn09YcmVSHhnfwENY+bVGq0TdO12MTqOSatNBrsoVgkxaK
Td6XZOgntJlkk51p9l5Eo48mW8gWtx+aWFY/WpjkrGs5xECtyFHanxUah40hg86H9mk/wOWov+fA
Fjtk2LQ3brMegj7tC6Sf+yJVzExEM+cuhNnHw3VT3SFmul+awTSvU7cou2kQ1kJSB9Mk9JLIhR4r
Qd+K79MOOpAU/dvcAkQ3YAmgM1dj2J3N12PvZXe+8Yn9bxC8mRnm0UHHIjg7reFy5is/svPvGdrp
xmt+yCTsGKwZ55iPFbD5cXjqb+Lr0RAasqSy5j2RRzM3JNIJhmRmpsR+1o9dFd8ee8ETEvm0ZnhU
TkPWXOlZqjd5jUjJrkZKdvmiOI1wrVygN1lSYixDoXGDIEn//NVMU32z2f640x321SQ+isVZ/p22
GrIovij3fHRcdOCmdF9OapzBwNxucYzkLhdcVHIXXyNKVhbdlfwYSopjiu1KtzgBIofH8GIf1cB6
EmX5hTNsx4fIlw1U+XY3K60TVTruj5OyYuPNltPAzxCv+fNab8zcY1ThbnMPJXeZ+w/XSLZeYcVm
kSNAcRJD6RrzgKNkGsJcKw2X4KDGCw1qYUty7Qdg2tu+ng0ibVyf5BviJ0OnOWji2C+/yU1uBxc5
xKeOeMhFhO/moMu/6capKP7Tejxa7nTdLkX/9bJkgw81ryxO+w4DxHK7jM5FIG+7jKb3DQoQGnlY
rMCWNFUK53Ch/I7DzflSSmq2aLhCmjwizsN+Wxg3RjD1ky4vYm/9+gak4DPWeTGstF5NqvWquFxO
V2Um6U5w35SUJ8CNbrYTDU31rUScvRuhKLL6vZLVhOlptmSzcXhNGFXyHJn0iXKpRduUY+tklc13
yuayUfMu8aM173ryIGrn3bzW7TpvS520LWlSwFWReY5uu44dObR2MAeUy6VyQJhYqZ8oN7RwK/L8
rff/H2hgD58/Se4lj+dA32RRgRJTw8P3r/C9c2ZhOseQBdwEWshdIPvT3OGJW6x1dc+A6FwFJ1aA
H52mrV36QDHuzMuH8DPv787vHg9KeKEjfj6EucKMXO/F045Gq5H9BZEIU9dn/01n92ouVieRiMmx
dVGfgdwZ4wRZ8emJMHrXjNuSFw7zGJiHJzjbWimzrXAiIl9wMtbVCf7OVidugU2ceKuRPczPGUY8
cEK7OjVcGIY4UpOqtbXCyXp1UpjTVl5g+taF8MsVBQuG6xnKR5Y0xU134lFvVz1sKA+YN8ElKHlZ
bF5RHlyJXDLf41yynMY1gWr89GlXO+ZH27rfIxVkh/2Y2c900/7w/GeBMvA3S31mtLm/sXKxeU+I
bQc/o3gO935KbquzgVaMFOMvp5hBWXPaSmS6RSE5zZoxNp85SbYirz+xr8+zq0gg4xwj/VNx/iEd
sAvNJHewNuzWZyL65B1J3SxvPsxOJdQCvwMhuh+UOeXqzsy3UyDYp26JKb5v1I3Xv/CQPt73Xzmy
4f7dT+5+Cuw1q0YrSsVIQ4SZ65L08b+70nHZUsLXMjpgjapa1Kl8xiVgE+sliMp90Evux99w592m
LkZX2RHWCOM+pjF86vclPS9nsyo9wvfEBedeq+nZ+g1fg58TFeDd289f/fcMiPP2V4f/5b/7xS9u
Jc//fPi7Z0+HD198++jZ98+/e3z4ePjs95THmQv2k/Ucdk2cnVPJADyake0H5SIFbz7nfMn00XA4
ms3oVHaUIkumx5uYlW22uJuPCIeGMhQjB5xcJ+mCqk32LhLuSSqsfVlqNnjaewkKJuni9tpFTInJ
lISbYIycVkCiS9IHCPKH2UchhrDajt0OXl7XsAk9vgIBK7BB2FHSc2/R/o1tU+oRSTXf8UGGEIRm
qi0l+G0veVWPzniP6UG5JXR2NFnM1mfAfjDxFxMUvF4t+rmTVrzTCb7MJEn8olqsZ6jfSAfu0PVN
vcA03XWVAH8sZCQ5p7vvdN7++tW/sf1dlm9/c/jbbzh5uLT/nNr4HuQ7kLiHuEHTcYIQRtPRbPoj
bx0EAQKbOuZZp3mXTld1JPu3YjdZZKdbkp7PmwjKVMVgPtPlGMal+E7Qa1ljIK4dsjgZ3o/694+T
B8h0B3iz82l63EtwT4clPptxlxfLCiT4hSp5GJ2CEDFVhQnNERPvonpHUH/rxdlyBEdCYMCUdj+/
VeSGxfUnIrL1MelfD5Lsk96+sc8cjs4OUfFZtmBBhShQw9Xo7D5Cx1hwEPOOjmjL0HWG723nEwxI
BRHn5kxcxZGJRXSbrr1cn0jBTPGz7ZbBIZjAUMjDUgz6WDNydYD94LgVwUtU9HoJXnI0nLUxxps8
Zuoj94JdPBhqfefdvjddYrQeIJRj74PNmkjRVeTsi9EiA+VKeuxciyvRQF/qwo7lULLT0L+OvJa7
aCVJjm7Xx3Q4zvirnrbeS7p9aRxp5bR53PHMUBwtKLdQcx6Q9agNAfQsdDk0Dv9j7UyMNdoFp8q8
gX9CtdjZJYyiut5tdl0edPIukRxo+rgo3Riuzucip4XmdQ63kKWeUroVV0aXzRHWfRxtYSOkzMK4
HyNd6nIl3WCS8B/hWjXrkX94nwtlq6VHVPO0dd1wOf+cQsMe0apgzAn8QwVfv5tvwk1pGsG4BT4+
djYREVVj7W9DnIHMaEW3QyQLHnFIMXwjcEL+Cx0e/OPUOiY9Qmq9E3CkqbCIcHERMoBMy8V1ODGx
KbFVN2ayCCdyZ3HrgxZlpo2eQ4G7jgSmY89oMhkyFsbwcomLf4lwH8iQ9AeB/VtgjQRmBXSyFdoq
SPERZY16NEm68hU+Nge5LgJXWSSPLlTfNXgc+v3qHArh14Ljhbh6sFMTihGoYVNUB7Uww1294EGP
5qA6Tipb0+X5dHyuWF1YDyN/LUugtAPBBY/WYliiRmYKp5eMzkClEmgwlwqEJsb1GcC1Gg94NkHN
iHvPFbESqKOi3GirWq9OoS5ofsIaKZYYomBfgh4J6/1Urh+xNVQqN1BB4Uvkbsrtr5kABiabTYio
AwP5RNNsvQGQGagjeObMHACwYFWclfOgoUbhkDG58IRWWwbf95LZ6OJkMuprrxo1iH5Qh13t2S56
JhKtL/KBNCGXCKSCE6fjSTcLKN9LLuozGStd6wXvi7PpEE/yjjb/Yj1fTS/UvhPOJDD17SXuorcn
bOWIuRxl4wqUZM4skvBvvW60fy/rlaDIUh9lOB5lG4MJ1u4f4b3lqcpdfrIcz/TGUlaR8dAi+B2x
LozGq9l1UiHuN2MxMjYPGdYZUFAPTi7/Sk9Nkwb1Uq18K45KeLZeoT+NLn6nbmnN9pHQAamLvIwr
hBuc1udJJgetVbV4spIgrhy1n2o54eXldUmFDRphZ+Wq9FeWfykD+lNI55zvsQlE0aUT6QJeHwLD
4AY27Kr9mCrUAzluVpXQZ+BSK7N5t7zuhuxbo2rnVpPv3qPzEWIh4tHX69TmcS5U6itkltM2XogL
qm/mR6bIqCznKlNUpy7TYmW9pJwyBuNcuiKwPIQkCxJKBC7P6SOHIV+/tu2/fq3Cn/hXbYPW06XC
euxVBMEwmtbo+YRvD7xNEe8ReMcoEoszpVZ0c8aKZa63K3cXB2AUwruCMCsUsPQiQP51j2UG9161
rvCCbxf4JG+0rirj1h05BuiX0bAIV+FpZIlvnuEschPVGUZgwDm7zUWnvDo6OC5QOg7N7UIGD+8H
GeVdnGZhlOxOeWXZ2jN3+NDJkek/r6o3aM2oB+yasliWp9OrQZdNGF5WElb6cK+4z2abyLF+KAYd
PypIocHnp1jphiL86j52iriZg4nCNmDjnsPaitVApEP/PNXtM/KGybpc8wVRpZtHmx1OpmgEJB5q
9Kw+X68miJjVfIe9hce/g39elLPRdWZoiov56BiIejFQBZ2oy//k3iFrKOanjac0Gp7R5uVYJ0Ud
T6dlyZa79bKkbj2yO7FgK6uxqxOcrXAj8JFeuqysQ9kG2fSF+kN6Ea/DSTW2WR4FA6+XNJQ8c7+q
Swept0Q68mibBXn8YvC4O0gOmkUyB74olsjtpMKVardQZ4nCq80SwTbSZSWg23PRkrp7ew+6Paom
5uXZHMEejsCUoRPGIHZW0onELVKIG0wIfqR8mmFFDodNqqHhCZN+Hv/2QOJQ6AC5JqSgwAZ7MoVz
+bVrj4IvCq+ugIuX5RmsIlDXsX8kw2yaT/zTaW4Dy2kt+lWD9cICQDL96XhNSSE9HdMqZ2XdSju0
QbH3BFMEJZTnTeFuFa70I7nCc053eHgVsBeF7wzzT9A/IBr0eKQdc5358QJ1OpFXeSR8bVrr8MpJ
5g4tj/nb/QFNaHJy4A0iGc0Qo/06sdWgAW6ANjg6P2Rb4l156NpygzxOl29Z2ZV1tTkUKD2vEvcw
d+acHluZQgjf8TBuzzblUT/zKYUsfzG6ZrUcVg8JbvOJvwsRlq0RULAhwLlfNo6sMeK23eyIH6E5
ynnavsMeaUgb/+mluRRejRkQg51WLa0hmZsKjDgayLJoiekZbql2Q8in9zn8H/qe7zfnH884c0oj
cDHCNAFrcksRUHJNRSB+u/PyUr5PVlh4MVquPLO0JTSeyxyy+/2zLwqaWmkma5gY7FaHMmY9j0qZ
rRq1oQLf0zTnZjtmeDjLLTVFTPOWlz2BJrb6vMET/OFA2bA5sZNy1sa8LcupbYUUi2oRHcIHT6OZ
Mm+bjB6CfJVP+ZxPvjYub4586dfk1HF5Pp2VQU2BC7aT2No2huPPG+WyvFXP3lm5dqd6PCtHS5co
3nbSsl9GNkSQ0SIDWaBukL128QTPZPWqA7MZGkx6+CoYpTenJLyl66h9bz/LsCC3n7ofNVR0M9J6
I/i8dv7uth7Xb6aL6enFtK6t0h9s3y4QJWhILqGDJdq4/7enRboYx8b0DIRmQsznwC2TN6z1ciPf
GNPUpisAA8sR8kCMZu3ji7o7hQ6F25A7tomejcK0rQ4+N6prAbqlO8peR3w30N8XpRNQYwUqG5+H
+J09cZfzd0biohReBnQA8YqJK+oCCk6XFbcsxSI6Dhbfts6gjHrW9dLGFuYufM1agr0MOZuCC2BV
2AWoQ+k+//Ph45eHw+ffvfr2ydOX3RgyKnOl6khYT6xZODSsFytgphoqhwlmr+mwJ9HgwWTxhvJq
kxdlrasAD8Nc1ZDr6mFgCUdQTqv502r1jUn15DDHE/q6nT9uJX/605+A7jXshKNETBA+JiBlw2g0
n6XMQgcHYXy+HAfKRROLRJaGF30cEWORg8XRr/qNzA/Sgp1G14gj6kH0ZT+SMhW9kdfl9og3YyOC
1meUUS8W6Babmh1bjdpudLPOoFV8ZhSyKFKiaG7eThdIQ8OoqBSztyRnV/HNGIynuzrowX8ISPdH
kLh81UTeHwf946ZehR+gWtXdW3RbtGXbPPUR6sqwhWgHTQnTQ1/YwgOPoeZVvxtnSSh59EnAR2Y+
Y0K/YeiUjbtNTJnxOfpz7BzRfiLaO9h8jgl6icTYAHzTlFf4QYTIup87NhT8k/1jIwqSGZ1fUnjN
f1gM6TZuONwANWVMFOQdHpG6pqdcZ9BubEhSUAx1lYubJIH+ehLnpI4ih0SjgVN45psnsOi2rC0U
359kM1qh7L8REWvSfBZBA3C3pmaDu+1BwUsdf7A7N71OpAy7tmxQibWubbt1+y7H8aPcz6DSiHJz
Uo2WE/LvXq6jYZc773kwFmln1x3I1aJ8ukpFuC/lN4wV3exHvlnzFZ5TyyKX6CUpqsKpAYpxo1F6
rrZsI07yXcKv/YsJ49qGNeG9r9G8JQIhszxUFnTJvskq4wsSWoQNLmhd/7igLcvjgnOMaorSoLtk
HTnn6YkmKjzDY+i244/xqPMPqJR8znsiVu8I9LI9E0qlO0TfoxXAkWO63zdBKm4ANxPVSRygfGwT
+A3PljKYGFq+jlNZBr8KTWnNSmGQ5AUQqxFpeePa8KNoZY6VLlZZzMo3i7c+g2W8wsfYVuSxUqLp
5+nsWXip78lrnUpsy7vvaRxYv8eEWHiBkolNiCEZzYqwNehaUGttJBU0NzLgf3rsksHXPbwxU2Q9
8E6mzm+eDBc3cNyN+q5PlX0ejcAh3JzgPOpA0IrYHfKSDwUziFGWCPUR/Hvc2bYNOPU53d1UrS12
rHfRhubWwUIIA2cosrrDCamyucokRELdGup7mjvLppyM3GEHl4v+dIRXOTTpakMWs778GdjYuDYo
wz9iL4+6wyF1nr07u8ciCGN+CzGbnNNRdK6wf21PvoZcu64prm0iDhEw8tvsW8hRbhgu5HYg7yXm
kQ65MaPdL8ykwV6lNB3cXj7AfYtb7bk0cE2iwvRBV5E2hpjOlzFbqfQrUIPYY2bgFYlYSqWJI9ug
hQUkJFFB+pvO1QpTy9znx5vEOKm9jiDfoAB5bnL8Lbsc5gazh2jUyE4G/faKR7I37nCckplWCbws
6zyGcxYyXws+mUkMGTVJtn7f9JmpRQoaquMK7xH05Aqvwa9cvQOlhFSh/q4sIOmcIJCwKiJ09fc0
3y37AmJwo6YFR/etWjy6BFQdXQC7KkW6yl9eDl9248JUxios8Lapi4Pq4iUTdMgJHpjOx7P1BFOi
zkEU1sl1teZU1iOysooLIrw9oaSrxgXMRJt9kmTsCInOb5SUeUHSVLK1uZ/JlKqbl6bfVeWGSWux
vDiZY2M/MW5ymK62O9SZ6Xo7Q9NiatzsIulRt7mSyYWHeAwz8Ts30Lqs9dKFXWUGStQPJIohYXuq
fMOdJbdQv4AswmAsTvd9Oqfq2Z/KLZJ3HrTdi2rPXvdhJ8zi3dDgzNHlmKE9NMoXuiIP29wysWIp
gs69OstHtuW+8xqW2xgYbeXxQNuMXHGC5ybNJeV1badanzjTDdTRpz5NsNqro/4e7lRaQqSzN7Im
4+KHbsdNMHy7k+iV8Zoz3lvbPeXQn2vj5VLT3OF8iwpH3ryzZH8x2LvMHw3fuAuc+Yu4v9viwvUN
o5sLrCj0m+FOUzAM/mhcsKpFxXQidu9sLsy0lBLDTRkbXMg1ijrawQTl5kDwyw0NWaY01gVLGq0u
j11zqzSnTd4pG49Fi1jaG100bXjqmrGKyQaUOq9BGrDi2ajjfCy+guzL5dyM+Xqr83s78rwoNvUg
UGu8LVaIl8dgNV2jAPflfNwsp1Pl3eIapw2Gj8lgi+TMb+iZQUzoxn4464Nq2+asZNBNpiipocbb
wqkJ2VOifko8fw7TuXeSCJ8wbpJezny5K2DI2SD0Q/KHYg8SukjtgdI/TGoDLhWiyq7jv14uC0x7
T6KOVcpy2TyseB+42bodD6UW3xRyv0Yn/Iv6rBmV5pxMN7mqxez2PJHsaQbTOZ2MDHCGcw+tnmay
buszOLW4QTnuusb5nkzNLUHUwBwuZEfm5TsacMSZ9yyyIvyLWO9cEpIneidCVbdeQUzrYbUQNIKI
bNF3WEm7cDE7D4WugDoJpcUzw2g6tp241k/80D0lhVPTBGM9fVxusaG10lKOXLHjVj/WdSwrmUnJ
NUOl2qZ+mhOCZDUdvYPHqP3/MO+2i83uaLxaA6GB/wkIoJpL8PGmT7Rm6hwFCaMxd4NwhjI9CTqH
hnToG6U5EFhirT0KBDKbQ5WwhKe/8I6yUYEhx+rIdmPyLdTGnDLwPMF812zxX6TffhHnJnpXU4fv
wyzdQFUoNK7ES6r90NAqvLmRM5twlmkAE5WOgD7XDD/SZiCSX06+54ZE9qxQTcNjsNkXzjyEWXZ3
3PTNdu8Rws6c2SK2WpGM0cd2MlFTj+mb6+Lt+tTFMB+CDdE683tbo5NSuDVE+k6bIZd3Wie8oAij
C3yDMQERhKbC96g+uZu0N7RruEObMdoYL9un3dUOQ27xDc5uIFC4+4b3WmhtkQ2ZAerQd0i+ELMr
+wc2tpS+e5LidT9o3A6jQeaHTqBQxC4V8agIBEDwcW7RCk7nLK78ipAVfiINLw655+Bi8MmBa0MK
LzDmWB/jPivP5KiNH0EP3vZf/QPipNDReGhA/GDxv/3nw9f7DDDzDSqlHi4fomiuBXhbzb4odFgS
ORDcgj+YSCxX8vDlYdE5PAf1gCHgEklrlti2q9mkWFxDE1DBGr2jGakmAlAzqlcdB52GfaV0MBaQ
UIF5mqnpey1I1YR9hDZ/KLnCz7yZ/AtskQId34CS+SLJ7veSz3rJ/VwhvV6WsJWuVov+vXsn67O6
+AvDLlXLs3vkZnXw6W9+xdC4CFKIIiXrflVVs2cL3CW/ms75B2VV5J/fUWw2/npy+viKHn09Ha8a
e3T3OxBEmG4bSxjsZPnizxRyCj8kHzf9BHI3a3kBIhXfPl1f4D8vV/SXsZfQs/UJg0BROeDNeF/w
7SFaN+XAPkQsXh7xN2Kw+ro8pZ7g0pbfjEtAoywxmphaJ6TqZisP12f6Kuk+x5Ma/vimoi7/Ee3v
TDb6E2aT6kclvlnV4fKahQj1enn9DfvqS+vALlQT8Zb99Q3wYLOqxyC0aA4oET3+QjBb6iIMk6YZ
s8/ybLCGqRRCnhgS2DHtNatMVeZRvdLwDvfswEzkkPdGH9N8WPSCIWjRUJTqJEjapoQngEQDkWF6
wK02KsL6d6/Idr9jTW879suxSmEBhhC2SNg7dipaC0NK6/ajEN+RDYeEScJHDL77MWiVaDhAWbjp
vs8RVINuw6t7PELM0ka48WasbgvT/d6QuwLc7aLscqvvgFgIIjBIvi5B0BkARtAH29Bw5ZOC/rX3
uW14iwKleENQXPn3vykg23klWLa5qhkILVidnsIJAvo2dHBfb4bM6QNvhjidHoKIZa5ou3kDg1Un
KobEGtm8tXwzMZnE8Si/8FJRJhHU1p0xW+lF1rDq0eNNcF/dNpxXC/O6GeS1C+f0XUBe/VHuH98Q
77XbgvfavRHea4dzO1fL4cVogcHsJkfxV9PVs2UCrP2fuj334Z8qevov/tOHICrh6T85T797eT49
xczm3S++cB6/MI8fPHAeY75oeHa362eChkd7XS/HM316p+unb4ZH95xH38yqaqnP3ReYsRme3XYe
PX6LTwYD59HTasVPf+k+/Y7H4j15TI/cUt/y0LwnVOqBW+p5dUnDcMfxpMZH09p7hGnk6Skyr/tm
To/nfq/5KVsSup2fOp01Kp+NqZVKsdxtrznNRd/9z97zVzoT/lOdMniKbWmWjHAT4RYn5R9407Db
rCmEO2rC6s6qSs5m5egC5eHpGi1gUNsZi2UWJfbc14YYEmbhpbPo0kkx5kYA0aFxyBuZWFN9jeIW
GjM5iJs2k8tSwDTPR+/QTQgPZ1MMeUHIHDw8jSwmlSzdTWqPvzvbXAmZn+vW4DCp3+LFYqrek9uT
+6hZy5KC8ymMq6hrYQteSjM9St6WjtJvixJLxdAGRBR5J7sGisl0s8YXzWNzhIWOdyEfqO5omuju
mnjbT0zzMcnnRJT1/Liyph8j1+PvAFl3PGK+nE9AZZWrC9R+vfslHbtEIor+CJQoB11kim5Tmzaf
SOHuF84h3cvm9YBTMrh2M1paQ2Rsqx4vW8xavAwpFwjv82E137fHAriZZjBP0Uk1iQU2y0rno4Bf
OaVLikYCRhjUBpq4EiTMkIc2BMmSp6GEMzTEFc3Ed13OXEevEXBYPHfO6OgJD0hnIOvUdOLoHhGu
dpX7KDNTG1vEwWZevsXSD6HT6nqNA8ObG+C+aDvB1bkoYvgyerWogwxFwQZpEcykmEZMEMzFwtUY
y1OcPXxa4G/vhW9loif+fPM844s8CK7qESbasFpoeBW1UC1q7kFBmCWkazUuMfE7r2F6EmtYmvAl
R7UY1tcXJxWFaTr63FG1sCfz4w2yGjFu6f+LX2ZIB9PA7knmwzHl8dS/bqJNmhjohvNoOT07X1Gn
bBe2SvWQ/99nX/QyjmHHBg4v7E6FxlgGzsx+2ObRStvoUGXRbXKnPEFTVqxLW0J34hEnbQn/mq1s
W3YbV4e9iF/i9DsnXbpR4jXnyR561o+4t9ILh8/cbnhtxnxf+5vywDMk3HvIMTYnixjDiPbKFWR4
VY5mG8fYxWUKFEaoDT1bRg+pkagVEht0WyDt8AZc99ukRrgPsniSavKtacpbuZfJSRUPeITbeEiD
E4gaOAa61JH/0N9tB/VMoxQiuczhTdfJV7z7nIm9XyaNDmC56+G3gkWErZuj2RH9KuKyWggaimN+
GJsBrSyYB3/k2guSq80GNmoXzrdhluRubxN4+K7SlvI7+kPcVS3ZQULeYPHhDY6uvem8CnWIHVUF
+rTwFQbaCvzv+VF7BfTeMRNH93suGuekkPd1y89b9vybbfiNEUWSuO+81zc2+vdRcn/mzb2xsbsT
+Hfh10f2fp/A9/re3bM/uS62D3MZflL4HuJ43d2+a9Dff3XnFL/uJn2q/Ce3FsEEb2w64nxFTTez
GgTNkwtQdIFQKHz4pewKbv9u27AjEPCBHjmnWJyhhu/btsKaeTC6j+k3+aaue6Wjs4zUeFNeX1bL
iaGI/P2+VJHP9SD9t6GPNDp0TrX14LZUrD2KzZdP1aAa+Fbb7uW7UTpeg7fJeVcDRHG0rn8AH3p1
7ERxLNz9GGzYvSM0vimdvA+3kIc9gD6EODGs0xbSvLmc1B+JNO9Pmx2IgwPid9M5OZ9bF8mw3tYL
o9t1pvtwU1T7DfgNh83xyLdsvYTXpu2RT9fPuNHeuTOvP+JuaNXnLvrD/vU2kgB//eSq6osdzM+t
CjGUxh12EbmF23U/RnO1OE4Rw8WNiu5Ylur+acx9dHKxz9Us2G3OrdN3YzmTHx86sdYy+15WQqmA
zX+BtmLckByoh40xSligGK+u+GT7XTUKvbTd7vpWWao7IFyg7PKzqHaB7QabaGP9FiIoYnVTBbEu
BOuSzN4qcPSbv5ESTGcujwgfcdE2qGUXbsFrl0gXX7A3sLWFlzQfVM1uE3kreYTO2WrjJ0Cgac3o
byO8HGHmbTf3q2NXMBcUwcIBsikFyEbWvaPafCRuwW4Ptc8/J9OEDXkBW+TM4rwPOXqx4+XiLnvF
z70TiKyj2zIVdHW9XHk+GnVw8qYnUVGEnxaUXtKlVliD5yVCwhdGT/8mA/F//aEJGR9WAhqQVPHD
PAQUCAuTj4n/6Ojgs/7e/VbzgziriLhr0KDhtuPQZAc8HRZKcR/hj25zj/GB090IM0zP5pYZ4A8f
a3MdiB1+1MIP8PWWzakoCuJ767nUQmnF1kN3DVRamtY9s/NmG+Qp52YauH0rJF9Tpx01bjasTk/r
cuV/Z5873Swvh1xIOisElQ/RX75cmZxXfm+29aO9P7GeRLwOTN+ON0riqN9BHIjb9zdoyl+XO35m
s5PbVOftF6/+NwX7QTk9Gc2qebkqL9D1vnw7OOz/m1/84tYvk3vrennvZDq/V87fCX4OZvf93RRT
WC3LX8LvP1drgrk/KZPLaj4h7P3kkvHUYTM9m47mq+QEFGPEyDhBCMnrZDJajRKsALbbHsFZXKD9
DfMGvyvnXNVyOcWMd5RPr0yh6HqBN9Z1hTEtHAF8OlpOq3WdZGcVTCZaYbAqQswtERfjFkFpzKvp
5Jd5kVD8wxTz1J2M6vLzT5NyPq7QP5LBO36cLhIESOxxv/VPCj4cIbrYLQIGQdx4J7Uz5/uVlLzs
jF90bkHZ55KeGNvj/jLGPDysoVvjN6Mz9EzVLMYrZBcuLS+5PA53D6rD9KFEo+QS6QmEkHS9sL2J
DxN0jBWZa6TWrKreSFYWdJvldqeEPwLVYbVOQ+V1WG2hAzinzH4zEOIVojpOSs4wNpuVY8pzjXMm
oEvURo92qIpyWdXr8Tk0taRvu3hiAIphzOesnJyVXR4hNnBSwqyXcxwoNjFhqNua28PZKpKvSna9
JmB+oNNlybTncTNACk8tzMe4orTJ1amhLw2cv0ReLJI/oiMYP4Dmue7pKpmVq1QAVnDcmJUaZ79c
YnXLZLRYzKBNks4T4NUZpea6PK+IfjCxZYUoMviA3cwmJQyZkFgEfeoCabKqkGzQVei7nR0sVq1X
OgWa4EvTuelYZNqYzZ6cYmdxwu3iO68ueWAIObMsYUFP+jQkTCbJD3Co8GTJXEd1dG6pT1zYoJcY
+uQ6WdfcNUK0ubig+Z5rV/FPnEcU1v3k9evFNak8yd4enOp4gQxgyDTiYnH9+nXR6SjG8oA8A3/7
8tmrF48ev/xtS/wRr17968fZ9MQ4L4LKzQEhO+SmkkZDV0PbF/nlpBKbzic+nimKAwnEH63OI/CG
WoCC12BvI//9bjyI6cEAo5h+lYe+OpeYSH1pEgZq2s4vmKHuF79ScJ8FNFZLFCo1lAc1kUs952SH
KZiTEAVewQzlSb2aACUTCxM0xXyrlFyU1mmxFT/dHa7CPQsNW4/60Y/vJqlB8ElvXpXbNwtNCXpP
fO48Z1FGmeg6pfugOJmyHfcIlKyuFxYYnJUIRIXZoBKoKiFjOdJ6A3ywerh4c9bwvNqctKOtap+a
LQ35CbZI5THHTpfLUxT1qadDwaAlMkSQ+jB3mEDrZPZLSx7zMA/rMQjFfOi7J1qvwS22U9b8EGes
XNKnIV/xKBsup/wdrlv6ypkLC2BRcRJZ1NzMF3yuz4FZ5tXb0SbIwqBGzRfIM9SW/iCcSJMIKshE
VLclNmitwOeEhrKIPi4YgKlxDCSyhkPcAYfDrolUiYqtT3rJvudwCUTrSmIwQ8FZNc77ifcnnk07
IdrudPxmVgYGE0ceF6S2wQFnVI+n0y5OBCdLga1p5fjp2A+5RoKJrzPcL4pJieyNvtoZ7yb0ZFJS
DZnuDHkMf0p6OX5OlaKWEHR42+BxebFJ2Bv9+3QXwe0xvYftb8d2kdBlvB1Ry3FKYmTUcjWiFeDg
sOrHUhflGsD1+NvHTw9f/Pm3bHzRkdHbnjFvm0Xx9sGrf4chwVR3/W6+Xs7efnn44284sFmkBsNj
wB4Em8/6RHiKNtKE927a6Y1ugUAMS1C5O6pzpFBxKmq9LLlkNL2gKAQ4PrzhbRIKJQfFJ7TlnoMC
DZocboOsS0OvK/aBHo1xa5yJrlaOlrMpEFA6Vfux0BVCVF5javgpirZl2YyGNsw8QgxcyYFuQ6Vp
kMpJMBw4VUXfQecvx8hj8CNWriCoiPVqOtMv0GoyGVf16uEYm3yE73vJQ4zBoN+dztePv3r17UCs
+qw3vXw3f8REfT5CYBttrIAX+OSrUe3CJ2oPnYAJVuUx4mwJk0BN4zRkiwrO0yeo5ZYX1arMnZmW
aCYogXEl09KFYR3O6mX5TrEHY2PKLkZXyH3w3eDg/q9z/WxeOR/aYXvF9/cRb3h0xVmA68Hn+8W+
BxWBhoAh597mCYQ6JaffaN3U80iJHgjTFs7nub8NmcsQrhQLhML7Hdswzwv46d9yrgljmt7h79BW
al6CTMB/befMZBIQycloMj5H4DIs5N3MmRoQ9mG6yNJ7aRDXKlVLuRCF9h35Sb4LYFS4216PXZ1t
K4SIzXTwrj0FIokCkDHZ7WWegsrg9nZTGEb4cS+RCjK3hp7pgcskuDRBVMBT9dq5mDRBrjxM4jkJ
IxFfvYTvqRMQygmiIEEXJI84mgdhSSkEZ0iGrZmEqW+YtjVz+rSBDgpsm+4tUzvYY9AXCL+4eem/
vTGfTvT641HJJQxBveMMJrdrnDmo/7jTGBhok13DGMOyHo9AD0XAPg7Ex19ybnKAEWcO9k/tQoqL
g0FYK6klE+Sk3E82eKUnedonS2NugKMYx/yQteQSrXp0bHMASJecALerXgIzT6SQKA8g8/EAk5Fx
Ej9lFZrrsZ+ikWFgpw9ohuVxkWZhUZlq+MKfU+8bndmdpjRJ8XAJk+VOI3yMPTKyCojGxMrQlRfe
NlYCSZZWyGKs7+4gSeH/3bXlC0z4CK8YoiySfVHsvDw2ZGgv9Eoex4bbOOoxiWXjL7wag/Mc7OVB
sSJyvtshWcVpkpVFuVwWaKfI0m/QePn4ClZwnebJL1F2JsEFSsPpPfxcs7iWXjVtOStwKCV2vXj8
+E9PXh6yLN+QlaONuRYVLLD3YK2eQXYGJXrBYM44AFwJ9Qr++dfLczTkkOP4YYwGSjnCKW9+6Mii
TekERRC6m51bC+lSw0k5IzVfUR6xkIOvyhu8JQ+ZtloHjzZUz2jEHXH1tsI0iMKI9RTWu9xASIcy
FPyYevkvXJDvOZWFcwRZ80m9R1l8NsWbBvy2CHdafKieO1mXAGOWr7q9ramP6Ttx+hGE7XJCJowL
ieMMsOMYUQTVsgw7NzjAAxTx82gl/a9xAOXFYnXN/TdbxfurBnZ1pdqW7mTZjn4W7ezTSzYmm9mh
L3tL9Cx/rx4BCXob+ubyz2RKzxQOjjM7RGDbHE5CroHPQMev4EjM51R2HBmvl0tEiaRnuE+XfuJ3
YrrR/FqYjopZT0e3LU+vcTsv+Rb5Ubnw1AB0+Z3lyRfJpzEOtUL5ydM/PPxOE17j2VplGWGydXM/
altqBaX70/Y5JKUkpN6O8w/HJDJnUK6yNM1bKhM8L1hC09NrlOQKmJfxMe9iXa/wxnA6F2g5dbPE
ab54g6iuu0yy3Iv8U3y2eVJlIuHjwgP9hv0mvajPUuMEbnBD8cKyXJm7kekKel/XdHsTm3UuM0Ss
Wk0MQuY7qh0mjYaDVzCL6wSt9TDmim+jnNnjkchJLZilWClHhU+pAWgp3btIe053wgTBwdbgVKYL
q5n7wxZy08QtdGPhq38C6h2k8Bxvf2MjDeU8VsFLCojNlfA8kWDFcHKmOFZcBJJe/be07RRGlQoW
74a8atF1pXV09QBX00XL9OJivUJg1xAtPdSiWQDiYPYuSPzZ/4AgJAzlzs2Ech4IQ+5hnm+ZTC7m
TqSZrmXpOMS6E9blN5MW3gzlKSfcQMPdrhOXtMwcd+VD5k3k4YfMGmZGN7O2twdq57j0Z2/zzBE+
9seaPnoYWYWthb35pSTvTBKQfYMDWY/8YrcFyWXRWYIUlqUjRjPKrmL/Xi0ZYYGaymn2OzssW1ff
+RtP9fLCW566MiOzd9N5cvIcoeaoi6wKFG4+/ZCtF/EbBKXBSlVzIqUPFe8TqUSHODQQwCSgD4dx
MUAfHKNtBgm+CxLq3IjaL6Nbl7Q38L/JpP/tJpkswvcR/7SW8lJ9z8mwtBN/BCYvBQ1GGx/avZoZ
uaLMIPPAlqcMDn4mlAMDsBobIHfWmWi6ttpJO5EbLrKyj9hQtSc6CZGdD+hJButT7l7hjOGPmJZj
XojHCAMfnV7D84bSkgJXUqK51OcoxE9CTYbt/5Q8aIwuWhWqX6NAN44rOH8fyS2bywZtCNUqX+Ni
PWjfD/K7wiOh7I8L9NLNoPFyWZexhLkw1is53eXbouDbMILskJ8+e/z00CoZnMhpRacK0nvpsuyX
Xe9gcIXaO38SzWig3cP5hv9rtesgdfBc7/bm8OsnL7IrOtA78/KSn8Z0/itHVIiyrZ2DdTdbVe53
ei7AIlI6OAH1koP8aP/YccS+WPAsegKoAI0WX2UbPDXl00LWozbYS5QuDTPPgC4N9M4tpq+1HFy7
spDFLuuLMm62xTnXLys9JlZWWuUtX14FxdpM1a2yzrNatu5nzuydMmZvnMqiYzSlI7GCHvfI5n/a
PPU5xrFltcCVus2Pwr2pSeWjNMw0YX0gjvp7B8dohsFLMOCV0XQm6WbQo63jt0/3Iw0LW3vTWD7N
G+iXS/SaIe5Of5hH3h9dFXwnl1spxOiWB/3jBgirtWg+hyYRABq9AIROAmpps1SjLTkYgW89e4JO
HtD8erxCztVL9T0Yz7sp+2oG0H72aExWESGGUXU8l3UOpC3f0qUiFi+GSCQoPNTXzhcnBppOuTmS
oYm8UqZzr/poViaUjifz1mSL+G1nszgW9dkyRaPv7aSFXq1LDKCpuDd71G+7icp1Nb6mC2aXstgW
hihOymW4t7RBJUV5clbTWf9dGgUz2uFaYrerCaG5e70wr+Z7pLQgh0zFf0B3cr1oaEkc2ToVPW6i
mUCGDFpO893H9/fhf7/pd3/ulvgehYCHEZD6Zx9Zik2hasJO7ca/4jrNfzn4Odt9NafsNaAMokn3
52ttVl0i00urkiOJNKEZvprcpOWHjx49frm55fATsvRHym4T5RF5d3TcyGc0q7EeI96jQGBcJj5A
EiEDEtnWpyfjL/L4F7wFFBzED7yZFmk/kWuIg+IzFAKTNTqmwQuUThtyiLjj00vyzNbOkjlvp0mB
oQJxWE232Ee7e9rNkuDeWFi3JDwkVEuSwVmLt4hcWt00dlUF+87XKN7lWXu33q8zTnesLzVvcupH
PZ2JbxROX+gbhRselnc2NgyowYuPXmK8w6AO9FeE06u9moNnFGjjn2LxFKJf1XTB1nrQdByuoC46
5Yc5V6cz9nRGZembp9+PVkDCJZZuVVKE5O0qitycuJ/RJcpBM/NdT9erW9g90wS6iqf4JIz3Bovr
DeaJgdcpHvDTpmC45aZVtge+Xa7x5Zjn3+MjO5FWak/TNqdF5qz0UAnzbDSn0wA+1FR7ZZVcfsal
4YARJi1FBiAVnMoxAzZ0enrXcSMBzoyB8x1huKyEf/FvkFlo9IRD/UkFx/owv7urGjNfwzb7XXX2
mNxkld84r7Kxbxcd0xLKGWRxSZx5Zr29NB2txGxhIcrAhJFfOdeAZyL5fjaqV63f/u7xw6/hEyCb
DAO/QgNFz9pwIn0mZ1iMzpIgEGSnOhmfj+ZnuMrc1dW8v86TWyhvMa6oJtfQpdCgJFcMZ1KUEoPE
owpOAeP9YfcJ/8+8975GKgwShx4tX8Lbju8yb1oeaFHxt+M6G2sU38DuQWFJG4WxKUZ3132JWzAN
Gs6yvCkTI9/pNNEg9t4x+KFt8upiRu4sg6T14hyYOtnbg4KMCqdWiB2lfSZD6Ln96iX+5bkf+wJN
FRP4V0wgF9P5FP50bCwl9lceS9oSGYe7QiUqOkAWg9GUwpyyNWS8QrFaTkz26Bx2JeA9+C+i09dN
25tWUiCINgae4Aybh4+/e/w9qJ3Dp8++ftwGLas6jD1Q66rJtJ48ZkKgsPFb+wf3P/n0s89/9evf
7PDr8191bkEd9+9/9jnXdr54oxUffP4Z8Pi75P6nycGv+p99ZgLZCgpy/NOf/pTUi2q14uuZb9dA
8V7y8g9P0e292KdYrskUPbPxqDWaTc/Q6b7HBsharqYn5S9/+UvqwsEnB/eTv1Tn8/m1Q5CDz+//
Kvl+dJ3sf5YcfNr/5D7HyGMYAmMiU1/EndxXPzUhFzaU7n+Z8ulkymLoYjpBtPNpzXgbE42tBKl6
eV5SECkWA6IyXMa0ltpm1fgNZompgc9oBZyXswXoxmyyntUWPgJzdEgsk52r9F+SO9mXz78Axn/w
w+RuntzFv3A9VcsHxd0v8cH+l1ymnv5YUqH8y8S3iKf0Hl0OHvxweTe5+8Pkr/d/Su4e/TDpH2ud
KEUfFHfyf0zztkhE0s7dKLSViVReXS8wxJWK8MLj5V5rbrOiKGyfbg1prg5gruh/f1lf6Kv95D+u
ZzC5ycFn/fu/hskHmX9+z8ZRoeqj6o2hXkGPM//0QLHbA/6iOFtW6wVGRGWN2y623mLpI9ZMmlc0
VOgI7W2ovtxLY/nApR6nPBromgVZC6K7i3SDsPbKUrcs9Tin5Kgm0x6qIjP0rsv2SX9Ln6dBJiiO
rB2y5zt63fJY8ULjOKAGbtdD5i5DE/4zDXQaZDZTBP9Ij0XT0/r5IZ1m9oPsmBiPwoY0EOH4xxAt
PcOLaY0Ru8PrcrSUSpBnG72Uz5267iQYLAH/cx31y7eGc8kbrc27UCLlkHhUzjwBZezW8D3+RwHP
uM5RuZAQ/Po9q8KhbKQTPrYnBtT7RvPR7PpHzvxE1CFBxmBBCX4IgozWKQa7yyqFzbwjd2d0oq7W
q8UaDkNViXD7eDt7Se+wyQQVKBbIqbTOLDe6OJmeVWtxOlI9TMOHRnDcmIzYsRfztl3inQl8XpzR
HGZycFvRfZW8g6rFSpFLgrE5HaXpO9zx6Uv+RNZAL0lvn6TGtDehNL1byk+gvIDIkMI6SLwiIOlo
3BUmXV8v+6AtrFdlM18ZiItuv0tWEahlixem5WmqO+zjjHLwpbf/nHq3R9g+6ytrupfadzOi2dr7
3gc9Wz7eyO/6t7+Hdj7pf3bc6BXOFPbAqkwWJCbDQj2elR6Suue110v2e/T/fBwh/f4BV+7TiZrd
AxW380Ft2WzwxHQkMs4uMoucq9lhQWUkTak1/H6BnNAIvrcRRvjaVfVQi6OJ0PjP9NXhN3u/DmOU
RmNFgqIKzsqVhapL+WWat1ZhHL2lFhD7D2O7ErppEcqV11u/MS2zh2U2tOm269XbksLGlLEbz8bm
cT9C95K3v331Pyvki87F24ev/tdbneFQA3oxEDq9X/yq2E87b7969e9tjlr94NGrgkIAGcxCEt3C
p/cIqkMgCpB2D58/IVH19utX/8E0O1qeKbjE28eH/9e//cUv8FhJllu88j3fk7eUB5AhhklIG8dL
A01BdkuCpSgnnXlZTmr39YPBfvFZ8TnVIV39pLh/75PikySr0JplIjspW10HDs1oT2aVdHQ2HUvI
DCEgDB+++PbRs++ff/f48HFSzt8VmG+eDIZ494/+nYR50iGtDSU7WVuwTya7FNGw03EHYAqKp7j2
837xWZKNZogEcsae6nwCV5vCaNVBNwdYndUSDvkdzeCKItOlb0aEW+bqAXRScrrqiVjiZteSybBD
bcArNB08FB+Q5/Qxn9uw1jrDtg7JKDEra22EyEBYP+hSMsdrQIRjuaQWCa1FPEbwEoIAczoWUJo9
UETjR5MRbAQ4NZhErFwlXegn6nv3YOc9xabowReHD7960O3g3JejiTpgq8+LfgP7BQlWOMYyGYrR
ZDLUprNH1fx0elYwrkC1BLVkiRfCMMxBeifddmjOCzP8gU+NTucZKjo9DyZGh9kzuCpLk4b5rNKD
EFbUmVQaKeZ2lyanxihgAuohxmKfG6ka/oaXtledkYoB+grDmoske3LqL6BaQhxkEfW4G4abzktb
DczflNJKV7AQgVHwQ2geJniCbPHy+ePHX7963hnw/4hNBMLTa5LRXxifyV/snexeuRrfw6dD+7SY
3ONFsefUUtTnSd6BpgkqidcM6Otny9EF9m+CBLhALYxML+uFtmrRU9CHcbooOn8WRBygIjyFko7w
QXwk4msoTOvPIN5IdYSURKBDHTyxPf/z4e+ePXUlxfDZ7zs12/FoBTQGskeGs71yVF/vySzsSeUq
GzrOXMvidSUV20hobkwkAkphvgaR1FUjtK51LkZLOC5LJEK8s50nT18ePvzuu3sUVfjtk6ff6oTa
/3UOzbCFGiK8MHhyzqBPLu8bgCcQkusF5hc309Dhsdb9zh5yO1s1zcVWG0URQG7ENoFVtdBRI0aF
8oJDFKhZO+usMuP46Ao3bpV0+aG3SljJDmRrL5CC+HFPZKhfEo7gFJ1AKWPdBgUma8FSEWQm0KDD
VxKueBVLsJWfNm4k9YQE6L1PThloDDchWqIEgrBkoIPL0TVSGjEh4aBfzb2FadM0En9kZAnGgxbm
cKS0iSyZxjAYMvJZSWbTxlKy94cvHg0ZYOAAWluuxbviRtz/jwJnBIxknXTK8XmV/OOXnYQlEP25
r8CtvGXT2cugidF1ey85EF83HDsix+HhB43UOLrarCbEe7hGqtC9R80IV9x1lwMHB94ItYfYKeqx
SPXV+RL0mZFF6klSquHJ08epYjXh1cASwcPwHqtcydUX1MQQ0hG5mfsgFA4gViXIEvipciz+trmy
6xW6IdSPdNp4aCm+UJA75Tj6JG3T2vUWD3EUApdHdfLR92jFsH+5BgRcKPYcsCxPp1dtjq/VHOgj
oq/CbOVzWTOOsK6Dez8MiqSIqZJyuXED6CnVT24FsdL4Zsheg3hpp1+KFpFxAYwGdercdC/r1bjv
hvXoxuLZunGSTgh2/sgzjoESovGHXCMJi/TL4GnD5C1DFYOa0+e+4Iih5gidWM+R4jbqfF7RhtxQ
fKR/GgePfwpNCEvoThpc5uu7QeKqUe215BHvWr7UzeSjpllfRzWt8Qr6KnKJyR24lwbgZwIkQI50
C9WzpMfZbPqG8ATPe5Jm+g2vCdiuwoR1RjuR24GrIzvr/eM8ln1TlJwOdx+OENMlfE9uxq540UCK
W97q55OMqGuUEW01PdFIVntoKD7vc/D/PXxxj13y2kCbjvr3j5MvEG4u+dxdwuTFNV1lB5Gs8SJY
XFXMbgOuuYTxhgJ7ial6T+oONrpBREpljs9By+mm7wYWmH6N1qsqLNmxq3VTfRTPF+8g+Y68/ebV
/6LH2Ytq/qa8XqAh/O23h//lfyd0o8R5SuBGwHMX1ZiwOC1u6HR1XbApLwAVMvg+koFAyT7Ee38+
lHIGd+4DpYPATBVDp9lsia6F9craLg/pDgX5EaTN69dO2devE6kiEYdOOmZIfLEYWzXiErZLdsgV
rJsaNwQeEbsqLh3+7vc7DkybaRDh4cj/H6pQmF/OyE62PBgfbyzRbyflLPx2+0fQIAJ8ZBcjWvde
o63N0Bc3bQZGnnkjAsmAMkLcCNraany2Q2PXNSHXSQMBmo9bcnyOctIJ7X8I6hPNoijpNWtUeJ6b
T+gqnU7bfO1GfIQ8p7jh9oSImhYCeNXnINsRDhgYSzr++nVH/H9hVFiZOZWRM4fVjJBjjM3KAnxr
h8i4O1EtDwh8D6nFF4QLNTRxJIRGEngm8ouFXGu5i0MNmTQ0tAuQt/r0R5A3XL5AOnjmTn7e6fDK
Q5TVd+WQFya5NPPPoROpZQDBkGadeEoM56ueu77paqdbdHWzd8r1A39kvNZW8AEJgx6d1NVsbY8u
HAUm1izKGb/sbnAM8HqVKxVq55LeBNM4JXnWzlGTDMNNmsgoJ3+Bj4dCP9ADNabFps7uIfDgpBoP
h90GXEp0d9FbAku0WNQLqfPD1XI0LjEh/DkIO7J4evkLbOgbyXg0D2bdsQmi011wfo1QZnQF1Odo
jo1mI5dDfHdtGbwGu0g8DF/OwHgwzCUEeDSOCX4MQwTYid10lsqDG33EefpQDcbyefIgafGipTQZ
A+4IXuTEHUt5ms9cYe8n34hUF/q+kfa4uQbhCl+IfKxpFxjC20sVMtYiRqsocXoVdVulrB5ICpQd
fBh7Wq1qZcxNwGdSQ/eLOX3wADQF/kWaCP7IzG2LI97sdeUz7vybslwwyOISnTUmHE9ANL2Hmxwi
u9+T7URNzTaMo3EEDA98Q6nLP8OYV4SsFXk1vpyoRmXaqb3AQHdhyMY4YAIE+6PvnPcSCGRniT4j
I7TGqpcXcBz9UdX/ClY15yowlXxlIyhZxvqs5eaK4TYmUye8trA6zzdQeFzN35XzKYV5XFtzowRe
CrwXsNbr19xB0MbIPNCxyCG8ExJGJWz2K7b1jpIJO/x4MJMG24YuWxeu4xzh4QcdpyxTSfL4aoQ6
riWjpx8ada1bUWAkTBzIpNno4mQySq76SfdeN5ftnvpLglIhL16/5i9EyVTPHn5V1fBYUHUt1QI9
QmbQ1x9EGSC2E5ML3eK4l6HeSOXwpGhWiKSKdnV1myTF1pMGbKO5KEdzMhbRLBA35HFv463iReZJ
7ss8v0QeIXexbgnUdLQG5WRXYWhz5HXUA7yMqQO0C1cPJp0jJsi74VdSnpmNdQHmLsOU8XrUbkVd
7vrikmnAjtARN2mjcjRVLwFFsBSFJQ21OVuPP1omso+lzMzGBln+umU2YgIh67ZuD17TudNHOOC/
q0BssPSGIxcZ+NDwSIaHGjFVx3zoukdl+Lfn3M6cVExrKqB0CFQtpYWoFeqAYyChm/TwhLoDCxyQ
kWv2IA7aectBXZLzW1PE7yLbvy7JIGKp/Po1forSBQ/NVoj2kpNAipt6Qmm+apHmC3Surtb17Loh
2J/gmrRtT2v3Hm4+cYU5+fDJqd1M3uomUt0IdRsUqsK9Xah7fXWl6bRm2VwxtDxOv2hQKJqjx66O
q6cZcSquUu8rDSWm4W8h9JTlvLXYKu18keV/e3OZt7sAvLmcEyKB5GkOr0FO4YA2kgUSbduxYjcZ
sZvv+UYpHSjV0an09Eey2uhNxdiXQk090diuruXW0KxpWCCsFnp87Wm17ridtuBnU7rmLtDE+Ehz
4FITnnCM93+7SPQFIY6rSF7Q1BpLC+4Z1vEwItM+lpwAzXGrhFABIId16PBNWVbHtTuzfsCkCfPZ
yfOYDs13FoSgYfuLBMQh84mVlBw83o2WU4qedRnw9Wuq6PXrQqZHKnS0bdpeEM57NF6h2wmqtx4g
YbQRZjsvoo5rFsVcmk1GExf1jt+ZPthmPXAmVeQQUScwsPL9FLWDbWqaGMdgHFUJ+d+75tu7zhdH
fu4UgQZhKWBLbVBGwqm76UpjLwxD4+aic3w5rMXiZ1t60enethb53lYt3g2qKUnkX1feBjboKD4s
ku65TB2wD7xU7sYbIflbA+lkh5wJsFjdQH1zdzzBDxrWo3eldCXNo2vfFhCkAvx51D/2LqeCNBgG
etYZMJvSW4f5iIwn3vpDDwxUExzEM3bosBqjh1HPRpjRnAGVxD5AmGk+EhcbptpEqzWxRPFgXQuM
OdT7EaxKY7YrdGngYRZRzj9BJNkkf6GFxhUEEhON7DF0jFfw3Gjf1iDFaT7IxWYMnLG+4Hsq8ylW
hwen8ZsieSQOPrD6gIiU5IEd9uWYVp6eomVsPZ+VtQXJQvvMBC+40LsyuMMzNyLUDPbCk3p4eR1e
Z5m8VaooNXY49+Af1YPdc1VYfWAFbZDd1ZfMty3HPFgKzRg8qxy1DwtFxsZhRYcUtT47W6w0e9RM
jOUYfONZsDxQ1lvJJR6qxOhDN0cUMVahJwSlumPX0RoW54QO3eUuRA06aPS4qMLRIK2zuHYRYFZY
WfwWW7yhoDRLtMqFONqBWa2mbN4qO/g+/Hev/r3eh5sEf2+fHP6f/5Zvw9Xfz4TI7JG/EdazJ5k1
gfia/McmbMTlZd2bmj5PizdnmPam46Te0Z/SHb2uo6x9K0zL+K6cZc75CLlc94Xp3NC674GQNQAN
5QsXpOkECmHZe575CkNQpycbAu7Fwo/5C+3F7UTq4rh6zE2/uMabhkYGMCjqQ4dHK78QqxErzd9V
1Zv1wtWb2ZXgzRm5OymRYNOvqpWkLrL7nAYQETEY/S4/ovsiKa0Pc9laNb9KIbiIR9oAQiweXRWL
9bI0cAZOzD9Ucmy7BnM3lKt7d/a0LuhQZIY7eqi+Xy/HUOSvP+klrJYLiMcI02snj5d+bXrt9feY
kOnkBUqYzEkWhokEOUmnJ1IX18T707n9kjNBp3dglsO1z5n5dG7EQGBmiOtqGhGox/DRMTEulnF7
56CpYEGmseYWM1SWf/2rbE481pPsZD3Ou+nEg0rWssn6YlFbV4v7uVuGkpmZVGb4sJf8xish6c04
VkbSm+ErrxD+IxnQspSywKXe8PB9JxibZtwlQqqToXTT8oeCiClyhYPAI2WL9QJjJ7MYY/ohUm1U
5Y6pXByyxNRMatpNTVvKA26OwpTjtXCxmDUElaZNzC3yGi/DtJnwmdhPfLjoGhoee1xNT+G/8BA+
GAPdTZbWtJfYGYoV5MRxUIwj5l0iQcGO5840mghej+/gRWHGGtm6RJWVnmCavZP12ZjCOcSiRi8K
W0/XSTyLOdBp9x50a9DGS/hbrMSMAOLgq9WrQdf9DlPlwZFq0EVKdm1J9JAadAXP0hLWzwidjFYC
aiMWPpqmbu4NfXwxQQ9mSriY8ZB09NoPYgUKXYFKSOvInE7mHYOwKo8cL4xL5o9pVRzSTdpo9kdE
fHd190lJQc/JUQrcgIoJdwx/yi8/gjtMBPlFLH0tVqruCjYBbuADcUmp3WksEqMh4+WLt/Ucr3A1
3e3ne/zrk+KTu3e7m44ept4/Pnzx9MnTb/tJvAFUhcJGWgy63cmaMM5SHUqKI4TBgcS6LpJXdbm9
CopvMlqRLe74dhq2A8008AIznBBKEB+ZwHNA+mfZ/19e17DiH6MbpqhswnQFMV2ep73G5Nkntg8u
Q/pCx7yJFC4kD1jwPjL7unjschJ+FtxUU2XTyHxSzUKKiazZ9yUNifcFSijhWCn21/R0WZY/lkMJ
HanTfhI8+Uk1S/9xZv0t6d8XCqlt7BuS+JKapugpxlPnBNmc7l2D9Xgtc81UbHw15PZ8FzcTx0s4
SENc0kN0r5eEmDDP1okLC95tLSnL3d/FFM2Exhupnbch9eLHLB4+DZ6saErrpHonJ2gePIITsh8i
pe2ksYvdgwI3NGCHpKZon9LUshyvQeqAFnQtVrSGB0P74JK9Bx7LHKmUK9hXCv265+WluZgLFoQR
isV4tEA41Pb3VfNlUfhJtI/9eP5TSiitNDVo2LVrOtB8ouS1PjCp4DWP8dH+cc95KIl80Vc/jaSy
NckcraqKCtBQD/xTo3CQXs4nroJoq3Q9wjqO/XyN9rN+A+CQ0uw1p0ed+kk/4TGSRmUYy+QvTiP3
ftfTcjYxwQfJxYYtIShKB7a3//HVP1BCVYE7hX6+KSfosPL294cziVHuiEsPvMGIW+ssPUouR2TW
wx1kZP1cpvQXfWDyv6q9/YyclJIpHKdkncmdQb2aYOI4CuBbTcrlUgEjJIyTa6jhKA2nAowJxdgl
EOdovFjX6yBPLZyJNx2hL2D/Oh/NOqqQrobr+cmagIImw2mVnU44zs09MqPJkDJvrQgmr5RZ6l5a
7eN0gjCSpwW9rlx4WpjlyXpxP7PvoAExMLArxMP1qvpmtq6dQzj2zU0gSbpmwFiys1g11L45xdo8
JQcd2uTW0QQhRW5NZWL0fvLUtaDJO9Nb9MTTWCvDIlzd4z89OXx5+PDw1cvh4z89evz88Mmzp0DE
T1zQYD/ECjioRwD/gkrHYVHyx3w6Lod0BBzsR25YCQ9rCAqZC2tnHiKnhNdR01POhRC1FGuWBE/v
4/7EP5B35jxlTEYwKDyor+cBWgOXbqZbMBXxjwCnxkC97w76Th++eHz4h4ff2e8U8T1dkjodYkm8
PPz62avDSHFeppHij1+8iBeHpZw6l/eLqRjfUUD4pnd4RdFabAmhrGKO9PAa5Frgv77Ak69p2rd9
7OGhOgY+/DazzNYLOCtgKi9LJn0qy2rnClz0K45YJRzC0kS7k+Xfitdegkm8GSDRlnIqgdM2aRhP
nrFywfm+z+kk4t37iMgdJOZHUxIe9FyGyMPvUU4PEvOj+f39nsshnmYKbHeqpl1mz4IEbPfyxDnb
WCnixRU2LOl4RWZo3twogeHwtTOx+Xa/8OlpMHft1uO4GIo4iPPAddwwqdkdIw8wFNSRAW0fq9CX
XUwMT/wuzzeOAjnuBoOgsLWY13k/djkhkK4gjwj047Fe3T7xYWINCxHX6NEINhqposA9Cj3Dx8sR
bjCRIXlMQRSLbTabkzIw0xfjWVWXQe+kZ7FXOgPhO7w9oEcHkWf3vWdMVdthR4RcjqYrDiwSMYIP
yuUAvsJfILgccQF/UdwcyFxWgoAWXD5TMedlcWHDhSkdCy/945NvXj759unD7x5/nbll89h8q+7F
4vyPh49ffA8f+9+Bonlw/9c73C01qrP08WvcoOF6dTgAcaQx8v23pdU/JftXvzoNb9idKshTAw8h
9Hm/076IXeGVLk/SfJesBFjBUIyc/JdrddzIubaCDSxKOIUsH2bVaKLyYehrilF8bf7az1Vrtgq7
GYT9tduBFfiNMrTHch6UQE0RzfIFHbszOxE9mYGedE2PCj1p0N2BnYqD+3W9A1QFJZqcyCthkrU4
2uqknDWDQbRqVCtc0Yp7MQfxz0o4368pJJ/xOto0E58yqlgLSdjzIW+FFbsByYJIFm/h2D9CeEZd
RvTDf2m4hn/4L10lI2jYUR86b7979T/hYRQGxabet98fPvkPfJd6sgS67U3QyYsxlsUezjns59O9
enU9E4SmopM9ypMXFcK4Pj8dzef1+PxiOoHR/66ancE3v1+Wb8pZsreXfP/kMJmBRjCvywmdH308
sO5+cb+YlO/uw8FySKf2IWVOezKfMo4SGogJL4ouFtPjTufRs+8RXPfR7x6+wL2pe+ufuxYYTgtm
Zmtsn002BaB5cF5RJkJn0sznxdaPfHobs4dzgc4onfQJ2dLwRwCuSTlZ4b/uUqhXGyK3CAlaTJaZ
abnntnVXNUvqpZDoJSOq/XGJyupyK8czmzTOsIIVh+/MtYH/UkIOyQbiYPvPy+q0LamSRfeUewin
eCHek75Trs3Q9Ka8Dq5bODwKzqk+Mn+kGa1F2qCq9GP519Yt/3oy64zdMQzRoIbNLdY8CfWRafb4
CD469o7taMaKhMUpBGGsvnAoR8e+bUII6vdvZ+LjZ42MSA6qhvSCCg748zxmIPN5AilXh8P0aodX
bdUwg6vbrEGIFtmxlbt5yeB+HVou3LVsvPVgt3Fi7tzdibSMqNHCGrW0xh10l1X1BgSmccphgHNS
O0/zHbWXpv4cucKKNUP33xQhTFliMrrucKwLmhwBEbNjxhjlRX7lI6+L1DSMErh+cX/6TfBKMXrW
LQcq2djxFirTNtJ5ZT48h02NcAJO0R8ojSal4/EceV07bkrrIMAk3h+3z3OfKv0N6feCAUzWDHVW
mspuL9PbdpVFDm1eUzqWY3+S2g8IZlzzloreq/NUJfWcZ3vnbrdFMkhTFt28uXvzhaSjDTgbpLd1
Own3iPltpSR//cR7jWwCVhh7mrzL6ppxqZyvL+iyKotVboN05LjiLEi6vsT/aNfzAHpoXl4GToLN
KCibxkIlVbt5wk9PkLUsWjrkxLoi5QJ+29ad1oVE7qvw5Q14D1T71bWZHPw4jfGczl0jLHQXIlg4
iQYR0PFwOl+PNlPhBhMiVOAu3YAO63l5hWGcpYShez2LkERQtGXocfAFwppnDqCfwq/4+2jvfj+a
mc980z7R7z2G1vawYy2OwNzl9Hb9w1ySK5svXG+jdnag0VImz7sJO5q1Z+rwBQwtZitkzJJ2LdQn
s9H8Dado8y8+MU1IOV8ZgRAIEBI1W7LJSBnyylhK1s/QQNNMACdDsjzv9DZc7lPWMo/2Oe3CURpW
Re7z2onGFe4YhaV7wmvOoTsGRjIZI9p6czxudwS1rnvcbc3ISeUOcFp7zZsLjwQ6iCJs8VYgiHnF
C02xj9Pa8wrZqAS6ypE33HSQNoBbfCT7doG5AdF+Cyb9Vgf71v5Cp6L93aGxXeQC8StqGLCSm6kI
HadUnS7pZGT2oqK7NUEeM4PHCPGTrrd1hicNYxfwFFE6yWXeh3keLjL4bFtmQSl2NzmInZrDPX2H
83Nj+o1HKpXONulyDbileJyFOtpybzYdtAMTQhD4uUHx3hjmKe0HlhKnxe3H9Ojp2OkNn5GNrabl
oLxbDyTQoTZUGS3PmuYHxOALacK+Ga0by1iX8UzY+6h/4PneN4R15+3TV//OwPozK799dviffvOL
XzD68fB0jY5Nw6FBkmW3vGoZ8yERGIoeHwynP5aO7wlHSF4vELSFH35P7j4YoN8ZL66HkpZJHJwR
AL1jeFdjOUYCXSXQzLVB4Hp+/eib4bOn3/15+PDlIfrE4L/Db757+G2nDSnMlDA3I0PWpxiHSM1v
RBPfMEHpX6Dli/WKYioFGOq8mk0YBUDSUxH89ulydIaTZf0zTMbKSTlFV9EVR6f5nnxKjnG1njPg
5H6bUeQOxUhEMXMNw9bCGoEDx6Q0ThUMQKpdgg0rcJvkDOh+aX7YKEvRDZLWi/oWBXQgsLQm4tZo
uYp01OhUBgRiQX4DPD355nooH4avn5nNvlFjtoKTMG0T6P24pYNXqsR0f5h3cz9q5XiHxiyyZ7EZ
zKIxnLaswhovT92K788M7WZq7LdCM0xPbSlUyHTX7G8Ec6BMcq0lTIWR80rcyuEOHbiO15bQT9l1
MCnzyAyjoW9LjVpDZh5GMjkrWLFTZrcMWG2br7suNR+W3/0dkOx8hqo410Qzra3brMQZ5qbVWOco
0nx3c7nfDTLGT+erfMu42XzePvPoWlZeA8eVC1UPJOtje6baJzCTVwL2Mh7N6aYar7JAIrOckMQx
CdbazdtzKOOI6UsYMveCuI1/Vgtv9mflvPXSZ6Zm5CbTOC2IasdtYF5QW8+8vJS9ZKA7Ud58aSS8
Q16qrg+1HYedMp85sfu0riPh1/YT3dNkr2MQCCD7zAUzUo/WWTmindA5GSd8jduSn9kOHje6niFc
3vGFFnscfZEY2AhNZOwMukVM8ad3NeeVrRPreiCvI3XC61bRh5/uuTW2T5Y7U0787rbJCmdqASrH
El3ihWs4l8QghT2YwtLpl8jENEmTO8mnbfmKMSWU4u81J9ePH+FmUgaep4bS5JK0XJiHJbvor+JT
K9kuDEn4b0sYjqY3r+nPfLcFYLbhJOMhw7GJ1GFj0TUaNBU93rB0uFvy591E/+XuBbK5fSHJXsJz
89GnAT0YuWNcNYeC2Bzp/HSPlYj4bNxQoBwJYe/uRtZ2wqC4w5D6C0seNQ2YUItN6bVFfps6BJNR
j1Ie+AOFGRFBqIn1xQkwWMaK9ISPDvv5DnKIRuh2fInoE1mj33ns5thd01EqcGXvQYrM2SkSUlY1
38hixJSgRMsXjsuJDE1ItyzPCDzGpaBCxzl0q9rBmkBCZvvJF3rZBgLZCOw8dm53N2b5BB1gKsR1
Aio4WzGlSvRmoUEzPKU59n13kzBqjn7vwvt4a7M6Pa3LVQyNySzM5qandfC64yr8GJ4np/LY3BhQ
ivWzdVlrTmP4XpMaU0HkR9+2aHK9z6v5nt1CiyR5uT6pEal8vhI5wHNI6BUjX22tLjEdW7M5hQwC
sTNlqIUTeH8BhA87cU35QYAl1xcm7RSd6jB0DdPLs2ocZRLOQ30xuua8YJKhiW0ClMbIZLs5hy0F
ONhU6FTBXLukHEblalwsFl++lxzj7dZjAH6hbJDvItlrug/A876GnujZIwIPJfXgG7pc1nTViamk
l5yX6yWcMTGh8+w6CLR07QIOemWc2E2/bwWMRHewnRHT62tglasheQHSjmxhfTVj9lUPdv96gi5e
5VU5TjcirYXVSbR2vYazhAdEosMNvtfp1UNLYWYw39hu+F3eTqhbOjD+6C4ep3tJ96oL/8EBBmcE
f0juRzFD6Uk1Wk6eoPFmuV7EoGPDb4wLW7/9VLbVxk3Imrv6pP0w73KQXfSQYojDgUgS3yXG5otq
AmoncUEz8uh0NjobWEuhZlxfDvFFs/gE9qDhdA7H0elqANo/HI7mp8uI6d1ZWlLlhI1sLKALCjiT
fqoE9lcGOgOhmg+ybYU5LzAdp/mCpekElvpsdN2Qh8JZ90gB0hRsJtMeJqVaJtTrjfumaYwQQNxg
aRNA52KR7DfROw112m9nbRFGOMPTJv2dHeSUUF477F/nzXvmbsPUUJyybXhcDbWDPe9txN9GkHK6
X9ye7OHHUDrBQG2JJvIMmxHXniFZXofDsKh/eIvQNOJUpdRmLBGMGUU/zMkDukWGAUddNOJ2qFhd
y8StztAnVrGRS9FVh7WBHGkXVWOcFcdMLtLHrktekj1aew1x9JIkV0Tol1fCIlCQcBsyk9PbCcEq
9zD2hGHSWQhqajzOTwSSnVIocvop2bO9Si7qs4iRol9eietrw9ILb3hzbk6E1mWAJJLuHVMcCfkv
gU208UFGQgPxwIwYAYXkZIRGeRoRiCK8oqxzZFuzMH33n/KSiOeQlsyxPLXaZB75SHs6sIOMFDJL
0fyOFFqVV1IP/opo3lRss5kNGSb5J3sd0mo9HFdNyyidETF2OjCAx06HLUgh6HnwYJB80myXtcjF
9SdpbRDOjQUYZyXLE5KVnA15FGI22XrQbjFKFuXik/37aLOr0L9wOMQYfAwYhA0oXYkavaGSFW82
wjN7BCLI+HEal3g6elNyPsllswZMfWHvvLLucHGN9Ul1w0VdrieVBMF380guEowuLJQQEix/QqhO
R8qjx7EoTudrJL58eYTlsPxFs6eFQ6NQ6EpNOLnjEZC/oP96PcAQSXvZXvdalpDlq86tzq1ksT6Z
TceUKrw+Bx11vLb5pWoo0XGUkmFD/kX0EmLteuAbBtq0kkALca769GLSno9FlsOEj2CdXbo6SM+D
0sXQFjxe89kP9jAMFyF6ibJByc7rBFGXlq56tpyWGDzi24X4PrFa2ty+AfInsfn82gXFSBiMyNOT
6iKEmuCr1MDVz7HqK7GxXAGCwvN/wkUqSCVqppE8wLCicF+AD75sJELccSur/YlhBrqZflPbMyK3
JBgD5MOhXJC1NG51UfMrAPayiAmnNQtqvC12+OeFa8o1cLZJ5rq35mZemcNEoe2IUcFlAr7YOHHS
oEyMaagLJxdNqajT66kSxAU2HvURgqZhb92MjQYyf8M91ik6YmaBSPJlM1abVMvITWDzM/NBQ3OJ
dMY9wdB4Had1xfU9nTOUta9hzwnq35OPZpvdO3BVda5oB6evYc9WgciDtfEXiDpqPXnW4jhlEk36
e7QZEE2XF6RkmqVXZDFy9ARJ9uWsYTWZ4eWcy8HchA6DRbFkemxI32CE/SaD8VsR+6aLwYzHCCUE
cm/EYWmXV4GTeCcWcO35d8lExs1DoX3bc6z07dt2IJ4PjQorJZfadc1wGo4YnPtMlxwGco8ux97C
8ym4WjJKZWNZNYj1xNr3wgSrWkn3h+5X67Oza1XOFVsVYZOnGGSxXpwt6baup6IF8Wi4wR9EhDSZ
ievn62aXOipn5bWhhOZKFAuOZ35zT/nT0IDab3jp4+REHBdcL9PyagGrfzU6qcMo9tBDqqGcRmAS
VFtHGzfdg+yRtbupofmeDxHwa61pPxjrAB41L4+nYueWlkCVDxxjkKQYTEwRCkaxkW92Jhy7rlGB
4BDK96NoUYp9kaYsdzldX0svmuytRfGrYq6nli3uDt5HwyF+Nhx2mpULYBX8/6xmf+1hDr9L+3to
4pamP+JiFNQ8jmTKtJ2m1YWqSx4YJojF/9P8g/7xeE559aZzxGqCZ6DuTTbVpzObb/Sdro/4m73k
4Lidwx1vU8PkfCePHs19ZrhWv+ZIs0dyDD+GgX0t67fp+iwjMA735NbbcBafIvHl4ounpIxQm7XK
r+lMphJDPX1RneRVUJ1GPjLXCBoql02LsnAei20i32kE9dH02JO3WShwradjcYg/6KGLR6G7+q3k
4YR1c7m4odxSOMK6hA4+Ls7IeDmau9kdRrVkdig8AaDeSNxFn4GOfWRfeW4xvcxVGoOxaDWoF8wr
TPtg8kSTHeRkiq6nBvSX/hoSIo+MSnZ4qqzGE/K+dPVqZTZ/o2+xPDkIXamHBIhI3nT8YO/AGhK0
V8h+dEjBq0QgoPhrrJzDKadrGFIJNNI5vc2clpqmOU6gZr99wLI+IEi0Gc/7g3z8zJWo+f7I/eJY
DTuwjHlh+k27G8Hm1pyNWaqEfu/QqHNNdrWa83kg/lWnXf/3Jjj69d0Dt9XGLZS5EQ7TSDgYEBi2
o3U7zdgB2HfA2/pXpuNqBMpoiYhjkN4267eGE3vOUCP+RAO8a7e30fgsj82QOC6ZG3Oa+aycm6Qc
iYlBZPemIEhtAWJrlXR5XWPCMPeWPOrI5tyBy+K3BELixB2rZ1a70DaxdJfFQy/pnuLeX8vfxZD/
hOfcd3iuEHlS3jzfzw0fENT++kT97bsIkYlXbugdjv+eVJNr/JfvhpfYWrdaoj7VpR7MRzMq0vey
0TlJHrltaUKi/jyMOyzeGonhOzRy0t6IHCByqSyOpCKeOckduBIlRrQi2XdsRY05N/yLKVddMT+z
p5ANnhJybgl8TPABcUaghcvTphqOGzFuyHLB6hhUWuy7cSRoreYuX4i0SxrtiAkLyORToCe3bm5r
e8nB/v1Pc9yX8Afx2cOXh50d45e2uJ1Us0k7MfP2eKNgnYatbNqS3UUrdBALyS0PDa9aYkIhyT81
6UuJPaTdnl7OkC5IFx7YVm3K4LKyzkE1Jr3hvEpkDK9L+B6jwh0JiF+h73ApMHon0CpF8FovObzA
mUcSDvGo6fjkHMTzjueN6esHyGiuM4f4Y84iwZCZRu41z3BLDftCPQUd67LurYiNXcspXH03Qebq
kr90lw503Zt9o3AP3W2eo83Toj1NqksSc0AvJttvwquec9mHOZZ9iFMZX1CBxmt1Ad6oQXBjo96I
PDho33zNBugVJoSaVWfTMXIdYUwRpLaD4v4pbbon5ay6lA8PCrJosXl1Jb5N8gc3blVhtNhUC90w
9RoCl9NIVjZtayuJe/F9BVGP2DsIgFZ1PqJ2DI/n2a85uJUlP7EK7w/soQg9tuhGfA1Ml+C+ijmO
OG0W+2yPR8sG7kBarxfo0Sv2AfbyxRu84JHGHZnHuzjiYw7jGeU3X5/QxX63YXTpMim66uhvWuxS
dd7zZpNyMizdLcTcNTeo2FeopWPnBuPtejp+AyIP/kO+aSj0SnOtbfz2JHbVd8y9FfICnLczXgZy
Rw4sKJt4F90e0eyGpps6b/TYyTiX4sK+urqCE3rqFTR2zfQHTKZDt836fR54z5n//UtCx33/XjLi
Y+APxLTWsMpnzq13L3kG2/0p8KH8affXyJ5PM+V0876zBEu2mzQXoK6ukqHg3bWFmFl2P8nvHjQj
ta1HGP2IedsLI5rChevutymextvaNa2CdQgA5Rljl6cTNUezMBOXeWKm2xPyjVFRgcg9nbfPX3Uw
KnO0mC7enL39Pw7/n38gzLcOP+gTJZfVjKl2taCA1cTkMCA8fTX4Kv57p1OXwACr1aJ/797iejEt
uEBRLc/o73tceaeTjXOMZURYuDcEC9dL7u/v/ybxsOE6rSm5NgZ6Bmhy6UHxCaLJpZJSYHE9HJ3Q
fUTmpJpSSU9pEkczNz+YlGZLICL7kn/rrETtBV+Ih8pfmBZ/gTlNKIVEeAFKt0SOdgCybnFN4oqh
9BFoPjTQmob9vVveKoi9Nx4J4AUpCpTOJJtTT6YQZUKPziYDTCuX5c4WjZ/A0Kc/6m6qUPw28SjV
wZhJ0yBfJqgDnEvMuf6nCE7pgeRTcjIfOYcp/hgzwEgKodQ9UeFdmWN/RdcxZxLF+XJyZD9G/4BT
/VTPR04bhj1cbH+qwL7BOviTwnm8oVJ1ZmjUaV54VerTDTVatvDqk8eU5s/jZr5cIrxL0wwXPtZW
4PNJNcbtVfY7yxaaJyDsBZeX+Qi6wu+8cdEjnhHNXGXPm5FxduET4MTh0FVfbW30SisSiwXz2cPF
lBd9nMcRrlJSOZj3zPdyS+d4qUgBclSpROFluVKd/CWDR3x7iZ876AKSW2WAsdJUeDi0ZS0uSi9x
KGXIgOTX+hrrWtyDOj6srPUZ4owmA1OBYmoUovdYy6afUczUpdOsOKVXQQoWAhOV6HRLZytjPfQ8
GJqiR+walkof3TAO1bKuSQiKmc6Gw/atUwu9KWm0pobc731tsC/CvO22s17STakHb1wpxmB1nSkZ
eqbKvDXfguQ6IZ7B/cZjVmYZ4tQY/KBJsRJgVMlLA5Z6ZZnANoRkvMJcGUAJBENcYvpahMRF1gyr
ItpGsPyI26W3VMD+mQjCRWgHa2ZlMbgwlLUEihQM+Ri9phEjFdB0JWZJlxIOYmBrLtsgWXoTsbLn
UcmlWbxjvv3OnUqUWJFRqC2SMKYKgZiKjKNpLwCVKRR4xuoY56EIsJkj7ORjFHZc9U5U46K7xtLX
zBZKQgES6EdwBERgCuQAR+zvR4ggkk7LsecN/kLXTpjEbjd2RSm1K/BUkcavIm0nHF6+q487beBr
Tfn9cWbcn/XZdFSH8y49i396s8lW6VoYlaB9SDdhjnYGCWWMwXSM7LSuDEX/8GacyWxrtuO4uudZ
xLtSZNCFacd2dKLMp3m+sQlRPNvqT0mFToPKJVumD+EU3TTTL8ziTygg4EGMm7qJOP3PmtBzfg2x
zz1amz0ziqkEOv9s9OMUPT/hOA87tiB8GYQj+JcPqv6ejgNcz9/Mq8t54YfyiYjXZuMy3uoWnNMy
cENmHSHc1zZrC6YmtwgJoUhVeSQCDhXEO1xLnm0IA2uwdtBmkHt8IxYWWhG436E7LVUc399jGys6
AV/3Eq+oyVvzDjFmWcjVbRrWWUMY5JEAAZ8JAmCtWJSZoKpuUsE722QS17IDrrLNYh6dia2zoVfK
kgRpZD2j99RiMV0xfBy8nFTrk1m5h42iuTlID9SCFemkD6Pjhi4TL0cCnZRC6XgL1ySik8xm4tHN
V2JiApMjDAfe0ol0klxiNLHWhxyGDkv22AePJ2U9XnqBBuakRkcd+uUolPgAo2P1UwY/0c56YZpY
svXSNJC6ddUAcQ0hwlUjvgFiYXhoiG/ROxxZou4GCmM3FY96Q2f/KEHPcvH6djUAmDBPA7Arwj0t
sOZw5KMCwMMswMUDKsNTv9tX8YXn3287un2I4NskwpVz4LxqW8J43JfN8iqPnIdBheu4afscmnjH
0iDtXlxn0F6pgtcyrNhQ8C65QC1BC7Vs17Z/uGEjX+qua6bwKu/EkwQSR23KFNiW28YVyTrjMbG8
LUw7uF8Ou1lHchk2z86uYPY7o2XDekEobsuRKEUao/OSJLoaM5QVgoNO9PbFq/9Bkt++ffnq/3bB
BbF9PVnT/olhNWbb9HIbG/NR6aQ1lnTGHbud2dp74YeqnafSGbKgSZnOLU5bioA2GKZEKcyTBah1
cOpB8zrK7RSL1qljie3Q5qOGhmJx3Xl7+Op/RAO/PHr76vD/FQu/5v/l6IXZ9KQPGzWm0cGnfBs1
garflbNqQRevmIW17nTo/lO2jHWNV4FkzYfG2P939OP1Hm4i5AqwPtFcux2sjpZtWRdJcnheqnV6
DwUddH4EI+RLU7a/n07hK5yNvQesWEomewSjQJ8AQX3m69rRnFDG7NVEx6kVQ/bmFqxgdV7w3YOf
kgY7SJXUdA3x6d79/YNPI4lp0oPi0+L+52mHLyBg3Ma3Se41bmGwHl0SAJHpbq64k6idgO9OF9Px
mxle+rgXGiFXlJzYBnd7IbLL1V0tgV4AOMf6R8rfwXrQz/QewGrSZFr6ayoF0r628JNrPx38Va6v
RSnEMDRQITg/9hKvlez0EnpTvZpAVWkxhB99/KMXrQBj2imwd16lNWJoCl9wJdx7qoZ+9vlBj5c3
ZjqeTJdpQgWGFyXe/59WfX7K7aUyXVjJ4rrvnv16CKM9GmOwwMRkPp/OpqvrjnSVV+De/WIfVQ3M
/QjCxvJVD/1hEGlkhFPhDf8Wju9NWS7Qcg1yCPMw0oTbDOvcPVrtSd/kt+/Zx8W4ms3KcftrTpzu
v5bWz6vqDQbdVIKCstijXIvefC1hPXB1kgcLa/qrka7WvCr/IypL0b65BrDloT8UAhkrX8jL/4+6
d11y48jSBHt/rRlmbH7sn/kbQi4HAQoJMqnenl1YQdUqXmpoLVEykpqq6VQOEgkEMqMSQEAIgJmo
GvUD7EvsY+3rrJ+b+/FLBEB1de1uWbeIjHD38Ovxc/3OSAqpevflctlTJN2rBy/h9whLqVouy20v
ruXyJ49UOar9i+we3u/+oPl49KRFNn7KY9UBKz73vJLucfA5m04r+KJLYqWGbwuPVI4rV0flukrV
Ua+DTgDFO2mZIRAtscb1p/XDrBeuFdox4c3ow6f1H16+JFP+D/Atv+5+q1baq2veQOWGqhhKl/ws
vhl9C/8NK5nmvtnDcJv7iu/9KToTGzZHQhla8QzccM45bAOkNnMN03TCiyPTKR+GoslTQ/4PyfIM
jjwSHwlvdPhKV4vrcRlV6yV2OEvXwi5iCX3OIPq011aDSqgqXsLRXqqKX0JV/bg1dBWIcq/pa66E
qqbiYHtNk6HL+FU5NK7X8EVVQtUzG2l2N+GwwbqXqBeUUHX366h2UDcqoWpPvhEnLksBXG3n4BWU
0g1sC4wL3xj+GLz2eukGwlINLfTCSUu2ENTebPEi3BYttb1iurqh7qvpbmKYk+V0TTk+0g0kCoaH
HXYSmiUojn0+JzcG0IfI3NNR579OIZ5cNHXabYafXqK8e6lqbAvgWIp5L/UF+1KTWXTMCc6PVOCX
qvh0fYiJiBSHl7qsf1EHZf37ubY7I9UNf0MY5vnP4N+zS82Ke6lq/G5qLjghIr2ghv9S1fq9BAq+
NoJaWMt/qfcbhgQ3TCi/1IQBNNeThuL80j8MqBRNrq99qSsY8Y8EhV6ignupd53huCa9hrWgl/oD
DhS8F39AvfQ6VQEYTMM54Je6fFnfAMuaHrW89Cu0fIBf6vKAH70if9K4vHsZVAGWEaTFXqqKfRlU
0pdHVCm8N7wbI6yQIvewPgvFIUSLhy81R2FOIoiTyQr2pe5S40pEy6DXwCup5t8RVwxGPa/2O4hJ
hfB+wW3uldVxtknY2ipFSOf7zSJgm2z54Wy6gQQSIymkGQzTz7ffp1ggVY8LaXoDExHWC6tJIc09
vXpJL3st9Vwhzd/t5nHVsKYqlKz65lXveFVTyJsggPeYLv8A6eC3Pb/yjl9irvjtKCjr3Sp1OUFi
l+h90Ioq6/NlEyk4eSjnyMg3tJAoq2+iKUjfm20vtXbycmRLhZu4XoGeAoF1IH3042r57G63WmZO
HqAtbV6csKfxu6aoqZ3a1tBysDm9Kvher9b0NizulYf3mpmYPrQWh/eq+DvRdPTSxd17Ta9qs8VS
B5Mr8ftAMF1WgVx8BoEVt4jk88PbLAcdxXw/M9wOA4IDlgSwS+Zv83PN2d/KKQDmP1TbufJLb14I
84nUKoAs/zDdrnuJ8kN4Ybo1soW0XM6dTFaEj9kCPqckg+klK+kCHl/CITi9ho/Z98F91VrpNlEJ
JevUtrHDCkVvTEzfa67ABfwqr9+/b68CBXSVQ43bprkKFXBb7Zd+5+f/+uN/wOzmlhT+/IePj+d/
93dJv3KXSoh/QUr4hZ8nCFW/ZSWq3w/IHL39vjEHEJaXUlGtTkM041cDgQcjgxfdUbnU0464xSJD
wiiYwZCsMJkNR3msUV7DPaiQ5ilsBM6HyAW4uG/t702KtXmJnGHW+/Hjm/P/vdcfgASDMWqBZVY6
Poy6qkwmNEjA07HT0zTtfPk2zjrNGpf69dP2GVNGvg4WSwriMnbZNAOeigwWkI8OErOHySGPTE8H
VQNmj7AZ/S/PR6huLwFH+IJ+G57L/PGC/ii2294v4r1rmQ3n588PMjPTu+oZzutUIMFqVhzDnkdb
fbkxIhJ5+Te5s5IvymJuftFhYR/WdfWAuNWYUhtHkIScr8HTRNrI3EcHZJbZFAyU7ltAC1hwsPAs
GDZvC+DlbzG2j7thFs83RrJpCWAa5VwPP/KPvG9hy+GTIZjKOkMYTIBta0D1B7cBO4qxHZC2t7t+
OY8dW+WLcQjLAIEOXj9xwG+gs72Hmy+DAyaNjzNmg/MFRA3RCR138XwGYXFxOnAahW2Jf4Vpvs2C
4Riremi+lXtD99w5ZN1HCQTeajk3b5RrABlK1V6/lDavApSFh0R7GFejky3Sg2NO6WYIC7j/cj20
KBTt+w8peLgwa2YXas+Do4OEF8OzBlnsotqdl/PsUO0RSpC73M92D+Ws+G3Xj3j395fZLH4iGV6q
YAfxKmGu+Xnxab1fLsmdwzz8fvL+FWSz64cTYtb0RQ7H+Xn0ivbLYh55goQOkbS6vZQjdONye0O8
GgA6z+59MZ2/MRTqLYiW7aDs0nM9HUNEXKzAT6Bhl/4b9l93RG3MecXpfgM6yDrgjKzlIHvsNwP2
aaDIaO90IpgkYzj2kyTJmxDe2o3zYBc3fRCiHRiTK2/e66K4137cJ0/xZ04vt5JMjyKdcVOPl2u1
vU1f+bAIWAKzd20xspwMqtW2vAVhky4bd7jTqRKAZG+aaXe/lRolVowatFu5H12E5iWxDX6easEF
SJUXws+Yee7GWAlMxyC72S+MjAy3x/MBUjv4yYAe9mJRXmdRZhO8u9U2RccIMPvDB5flDDAfqkW2
cH5AK4YDse/nwrsifzIAfRp3qicBqu6Sp3dZDRGMOaez4FU0XIGtSZBIPR5RT3OEnPCkQIcSDyir
FJws8o/EeO3AuE9z7kaLqThqqAKJ5SBhIn0RXvA0Uh4WVV+q7mALWI6EZrWYt4CuIqlf2E3iz+iY
/jG30QK8TIpT4PBcf9vSSgi+p4CHab5A6EdKwgEYKSfiWDgP2lTN7oRqNA4L44awMHxgC1lsUxrT
VcSRs4s5XovYaQHktdXcDh9keFYW87FLQBrgp9KQW9vrf944uZevoWAxfxNwc/14+pnb1xW8dLIJ
th3SdhTTlWo2DDWjAuCDRL/813YUY9tCQGklD5JFcY2FKsQyTctUBGlq/ktf4pspnoMw6yk2Gaeo
9NxwE9wDfg3cBT1YXT0RTGTxvT9QBEZQebYCJCcWpnuMpGBL+F/RJLwT+YQ2OUUmHPGHdmXZN5L2
Bot98b4A3jOfLQFCfb8GQFVAU02m+vUofL5FeNU9gBNstw7NJYSlhyxcQHBNocyleMBrFS0Fz9DJ
i//wnT33eIIFITgD68cefvkJsWbi3E4yrC1OiTjNCJ7RQKIUkqwQ8vvBHzUUOPU9T+RDYORl3Xaj
bzFOF7qUh9Pafk/LvCKkxBAmbZdH595bgY6dizFpP1bmGqvmOTxSO4qaSubhhHAPoNSkUHhGqgSc
KQEipwmb84yB/x6DXQ7DPDQht4et9/rHRCjAo5wuIeLgQB3qhmeRuhlQddMfuPlwHlh2pXROyGsb
Poo1D53E9Hbx+tB959asgMV/E9M0j9YJqBSXgI4HYKleZR9LSjrLpwN/fzGOPs+vkp+nIUiJxOe9
yvEOspvHyc37Gr3wmzO1zvbbLWZpWU839Z3pIG8LsxdXxcrwyob9Et432Bjmc7ynYXWgu/Qk75+2
lKn+Y+8lk7qycuX8S3GnH8GBmUpyUgRANmMqgPQXn7x5dYGT/+bViyDTwkGMD4b+vfvx229Z+wRV
nmc5+lODj4aClIExMkAIH61y3SdNFSC1chzO88HF4EUoXTiCVUJuEsJ4KRl+hPH55UT6cFWJ295M
FCvjzHzxr1X5WMyZo1cIp5NQa0d/ijovYhOqDfnQjJU5B3tjvtkd4Rbzn5semOfmv8Fz7I95g/8G
70y3zBvz3+C5dNK8lJ9BCdNv89L81z3/JaHRyk9VMsFtqW9orBycFtNPG9/H03PZMw9VUBQRjqAI
6HFVEKA7Kq4IaHddEZyquBA+VsVkauKS8qZ35UPVTk6IOMGmaKhWzQy4UrEaONhjPl/Gu0x+JENV
khq4iK0j2j1q0IvGmdoUGgnQwR6SgJTSxzVSBTxK88TQAruJuXATw/8O2nLcu2k7NkXRSeR9JItt
/hiGmtxfM7Vm50X2EWHucMO1zVt7TzSOKdCFk5ZCeJ72aPOW9W9eOzp5bu1e/A3XDg+4zJj549ev
na8LB7oVkqmITwMa1U9QPvPcqtiba8O2S9WGdT9eGwaeqg1zEKn3ITx2VTTwseYN34zAyqMcYFV4
wJlkCdakkcBbfe0ATVUtqVjzkHchTVO9qzauOxFD5HZzap82TLKv/FfnqolJDY9hwE41rIb/GbUZ
m5hR/RkofvQziQ0XEA3YenEzsBQps1JwlTs20VuYjpfbgnnQZo7XcroUeFbBidIikl3cY3IQskT9
lAQhwo3hFvljebi0rcYPbKT108B19VPSQ9On1XK3fVqkqHDOL0W2UGHI8VcGmZ7zRWQ4YAE6FG2K
9UyZDnF4rBjqJrC4zbuUWK7zAJHPKuk0CtCCdFm/qATQxXC3NZI8IJE977f2WaTzhGDy68USJ5r/
D953his+R5Hr0PUEFZQQSLh2+KCQ7w3bgakMJJJh9naHgLZab4xhmPFX/wfWyGygMrnSz6sClZlO
FVIBwu5+PYcU3ECGQSmZvXLSTZZDLJ2W2Dg/2XTX/1XijJNctGDTIrcs53z27BDjEnxEqITmOGwJ
MxeuRLnuJLgjArvWnCb6qSYJATv6BGoPq2Y4xpA1MGI0jJB5Rb5Ld+4I70WtpHtoNRGdhHhg/vvr
pKsWzkXmJUwJLmupbruoWjRzboHV7RVVi8Qiu+qZuq3gd8J2/f8hjsJuqYiLSF3r8ZTy0TnCYyTV
YAnDsN0+EbPR0htPNl7OI1kgYkWSWrGm3rQs9Fif/M/hM/5W/Oq/KUdDmyC68huPo38OAQ8WPxim
TLOcTuJ+9aWI5u3TyNrATmr7MPI5bR9GASR9xzv9Kd3f0bm3t7x1AjAzvb+h6x70gPMClP9kVaYd
ZpgBCpoHXPXdXcGKfdkenAb1psjIKAwJ3ga0aTZVXZeQYBqsAGh1Ev0gJybMFtNtRvYX8mwwt3FR
rBGVwXzWNU2RxfstxI5AJ6r97R2JvDfFbAoXN7AA+121QiM9wkmYKaxBP2kauil2kEwd2JjttL6D
S53OCqDEI9I54lYUy0N80yOnR9TxaWB/YuT379lYACXRQIS8DM4dJXhg8wRwVTz+rgCcuhxvgmok
z2r90LAbO85MjM9s79jAHpys2JBhvlluyUv9g6wqZV8mAgEAA+tKmut3HSRpCVfxIfoC7TfkaZzl
znrshPnAnN+scvti0zw96LTY/4F1qYeEUjbOeutdT/vN6vZ67378tpewiQelnpm/n8GDXufnP/74
v4BDth+8+fN/+/h//3vrlO27Ynd+R0y6H+Hq8+8SSTX0C3U6OqqUb0KIVAKysRtYBwvM5Gs2/kFy
KUkoK7KkVL2sAQ58w4nOEsGmufrtuHs8WFS4JpcSW4h21Tt4t+RDW6xuijmgcNlMjNBviqEw1OCu
eoAQUE6NImkWdnfmEDtflXqU/bT+y8D85xe8Un9a/wvDriDMW7Z7qLBVGKE5iXPOVAztrjGbg+5j
PUDQGWtUxbBZ2TJeQYs8XDxOVxtzX2X58FNZG6b9Jd5Qg4z+shsu7/e5X5CEK4OVKV0rSBDhmfoG
ZqMhWWhjphJT7oHRmVN9FpDhDurRxi9rUxFFDyPKLIKE1Nvpw0ROvV44cJLp9foCWfrTuifJcGgR
3MJgDgzMjUE7x8w4zfe/UJYryaNpv3T5/OrKDm9JSQbk1cXoymNulx5Q/F96mFTYf/hL6uG/RBCU
XsLBNjmeOnJ+cQUYV72fzMizLyHtkwfJR4VGnNkCMAPvYYzP1d8zTNwkj6LEoPFQg6Q5MNo4NSa3
DP1L6GyBxsDUZ1nvBJUwFickoV6YnWl2byerWOcMItiPS3FfYK4ukm+lmef9BLihxQXNvoRZ7pl+
P8UPYu3++UXfPK1h/iE75sXoqh8nAAw3w6gpa19YMDFkALxMDzLxioZwiSVgDnASpKMtG6ypS//S
a58iMzduaqI5kVZdibEsiaCxma1MnjeCCWmhzb3LIk/eM80OVCF3kqw+TFbyALLNg1ONbav6lr2U
oJY5Y0ljWUt+67QXn6vb+t3uby5vtpCC1ALrXWVPMCPnk+eP868htiWNZUx9NVOB0w4gVOXcjqCN
Ji1UUmvEHQG/4CEGZ+NfF/0TAD5tbpYFNQSJi+hZCtiZS7ciGrYCSHqf5FQvOntWvhhKyiaXLsv5
FjYgTTYls0x+NbJ0tdPDuNc2697QptM2l3Uqn7XbcoR29vrd96/ffWxZhGTfzii3FiC6GQa3yqrZ
bG9dsRSsCF7RA+INRDLy25lVCKssqJTgK2TYhe5vjHDydXfYSS5266ZXX88lzRkoblBUmiym5TKx
eA33jj5KMxS1YI/NCwpmhjuSGEUzyK+7qQg+bCEmGLZxeAUMjXwJQJ2T7rfgfZuJ+21EsQjaDKWf
rnDWzDb5IDFQgBDP/CUiePRPZnJAgKS0JIevuirUjwH0WA6Y2JMArTHrb5sEP7Pgw50mp+IXg+wf
kC9CSmHYuh1MqZeU70+mY91+v70f4L9+pB9qNOqpv2Wwr52f//nH/zBhoEiCm/r58uP/+X/8T4DG
mP1A+FMoQhueFRnxA3Ddu/2GnO/2mIYICwgfzmoZ7uFivQLrchy+yqBWyRBXRJvE4U8shuX23uZT
Mr9fFbMKIVIG+GexJZnKn7IZuuxTJQEIeG8uh458iVvvzPZGDmaxzWF55fJxC4c+NKUwn43Zu0As
IKISoinMPzlZdt59//HD64/kHFxAEqNOWVsBZSyDHrqH5j1d+PolPukIQIYvTsrTzhll+DMk5KEy
k8NJtSTx1QsdIAA7cAnpFM2zCQTwbMPgtp590xsYmWd1M5+O2FglORYFsip3TtNnkKLNtAyEgtej
zordjOMZMe3dFJP7ztHVXCW6tDYHcInuTiYPW2Cq5kGuHetnrQpE2XqghR4axMwZ6SWry1s6V7VN
3SwXeDg8zb4p/20pc3lxBTadXR89yDVDJyXsrJUr9LCF/3DPvNtZqpnXsM3MPzzAIyEQTSlssKFy
hS19ThIbVd2yoG/KRzR+8U6lc9bIchrqsEGnEsP91AkXFNDImQ0rpq8DZE6e7R7l73JeJ5PKYLNw
78O//iv6FKXdMD/8l/w5SC5Bv/zX8nnzXn4GxqE5Hsl5rR3eZ4jYLcZhnpcwgAAnL0+8TWrjkhxP
lxZgQQtA7F6933CiuxycPW9QbMUgnT3abLUxmL89ZALG7Sg6BCNI7UFS/XRIm4iVcpz4cVfedmWJ
JRuQv6zBMsIlkotbft9RCaBaSNKn8p1sMZ2ZFwfXeZp41FG5egysi5QvZ2sE/B/8BIU+9g2YlroP
36C4KTrN7kvyBdLwUFaIBQc8+UXwe+aiLrbmq3zOGUCVUg+TvnmHTrDmziA91B1YsU2L5nakJLCA
iDzKLM4yw7NCvxljFr8DTUEn1JU3NM9kF+T8w2Fmr/BEwjC8+xcbIxUvpYG1w6LkXNOa4JA6LADt
KfNxadPXTrkjUk3onw32cmn3uEhHbzuruizXf0I1My/lyHyLzvGIFfxwtEmhBt9FPtt+E1T+0y1o
FjE9/CIOULZb0kbUGXpMRwf85mmW4ZeZYODEuroftIlHoPYk97bpErOcw7q5TcQ9wwHRWotiL5bP
7AaoG3cTaihNU/we90XU0J4C8Xa6t3zKRkBhyI8gbJ9MM7vyExo9YFIx9AS+gH4Lcd7Y6RrVp+ZD
6PKNBzjLoV2ZTfjIWhTD5azcJZDY+TSgUFoUczoW0hHdSz0cQyVGdrY5nhUIbjHF7Mlbs9s31Xqu
wlx9Ei9A1ubNIeNctTs9q6a1ISMuQLtYZFt9KucOPAHryk5lBCXAXyDDUTlDvbfd5/R9HoNKnCnc
GJHJvuR+gntJQynY22hM86yzYNBJFRonIE7asJK6h/OW82BvYflun7vn0n1KD/0ULT4QizSSw0oN
KIiq71lz+P7FBO30l5esL91xn1GQLtLVUQJYEl4/eCtP/laXEH7tvOkqwlby13/84fX7t9+9fvfx
m2/7+nbaTe8LjoKHq8RRVENoR5vDCMGfr5mq8zeusxsmwOZsmeWvE/cT9A8xvbPra+zg9TWSYr5d
4DGN6vpaVNycjRyzOON2d82aU/6hILJls+BijyABbrF+BrdcvXuGH5IqiC5mIf9BjBj+//MAKH4T
NSL6PCTT137W1m3zt3b/83oQbHcgt9B7s3coxRtd7aAY5BPAx2JzMALHxOZjURGXLG+EJtc1hSUj
T2teg61xvctF9qMtwNGYQZTJvDgaBAyFhiIBUSbKLmNWP6ldZD02Y5qDCDpGkhceImeGDo/rEDNB
qw9hAlGwYm3z/sAjLXYGMK+lzmhquiHYATS6yXQ+pys+x6TWoja/NQLzhqQH8xAmB5/kXboMlsxQ
48Oha6N3LnSiBnAp8xetmwanFJEH9864WxtqUkx2ZtnNzpybLplHd9WDNIMPcQs0BKMAXiVV0Yor
rm0kLxILbg7ZZrm/BecHIyibLbq2HqI8RMAUXpd5VzGV5tugvRh3YQSqI5dXrhf0ebmuuYQTTihD
E3LkyJDjepv1gcVPf5+2CCpVwg5oxAPqCGYYmDwdbg4w3U8nrKnqRR28XVY35/XusCTcAghvMCzc
Gi3cniaL04RYhVZrJ5lBb5ynLvDe3UHUGzrGxORDz5j3DvtCgt5pXbEMfnNndv+qznh8aoAI1KWQ
Xa+r+pBxCgjMTZOTi07femzQ30M6QUO995W0r55KAyFVfu4fbOHacE/UOWTdUHoWVtyvyrreFMsl
gxrMqtXKjPCcJfGpQG/NyB2ZkoGA2gfuc7Opt6G6Sb4ylNkC5ZMIDOWfPedUVqj/5fkv6FTjlxtk
tl/OuUmKYIlu4MSjNJ25aXpIGvm4RzY3IZN0T1lENwuwbXE99fFTlFC0KtaWzneWIUD8DfVR+ZT6
Qv6U5VcJPuc/JQbd213s81X4W4v3FZ0TdKJDn4W8yx1Q9KSrv2w+gCdgAMwa1qv7IwpQnwZHwTpy
uMXo7ko4PeDMhJtqjU4cBQYee+I0zM7QfYKlDJArVGPWbwTyBVmhiGuQDZZOq00/hN7wKBWj6kK1
BVKfbQL59fpoY3aA+GKom3tN3jKj7B/15PVMIxdm915eDF5c9bMH3LpLYEpBVnuocB6tDKyaYwZP
fPLc+YGhCCLCxZgCnCV1knv+YqjaAsk1ZmOZh1W9TbCwAXXrik8QN9//7J3VoKO5GGTqrxeDbDgc
ml2GEgfJ5VOSmWETqf4oBYGDBZQPDLPWkWvG/Uz1K5Ox6UPFahFBo8M/+GDxX0MRwFbT9fQW+VFm
jL+jB7Zap/OPWm1lKA44Tumv2VxFkukPaVo9tPD4Ns/YECyX7rlm+xSkMfesN5K5UXCsnvBoSnh/
eyDE0AVA+6VfXjIUynjkwShzOp0Rp5c0lP8l3N/mAf47gBQ6JDybR/JTY3Dzljdv37jbw8L9m8f2
t5dlolwuLZcJ4zV/8sJSuPgvZvoDUVl0Xn21BrSpjSj8syEHOyUdO60jb3zh3en02TRZM8qJQ15T
5javbapcG1eETQ+plc7xXbE5oM0DcV7oN+Q0F0sJ6K6d0tq9tzIH6DLc00lZ40pTviXk1jXAomot
f6qrOb+XAD2J59z/tjz1GuZiKiE9quC2eD3ob/EyoSwt1DiEQaPmLs1/rhihxf7dMJqnUskTkhCH
xL9GcVMj550T2iIJhDxNgCM6plx05qfMMD6F3JKbQzdK0kn1h0IwQH1U7tBGilnAR1EOVPMY54Tq
MZ0FQ5nhdHtaMkhF+RBw592QrcrwjX7aaeRmC/bOo44PioXB9jFb2Tgc1S3wvtW9EWoeD7mf3Zyn
GisO7aaWiYZUrSRuYLWxnvMxT33HPw8NVYOV4s9+lygQHDroGNlOt80fgr2Z8wOwqVMqTGforfaG
eGBiaDxgHReoyG9giibkw5ZbNSOUSKoQiJ39sKs2b3cauf4Myt5hoImnKoClQ6fIARibe0ZgqsCb
3+I5KbtbhOhlRzXEAjAys73MZ/NEDs+XbIp3lTBxxt5I8Ctqv4tFuhGQDk4CJjSWacCCOc0kr7ht
ti/kxusgnOewf8ithMZyskN28eVPtidgxJbrGtVVMBeihdMKt1U1uwc+YkX+yqCdyPbrG/QfIDGv
znIjNu2Lv//PX0EeU3QDB+wcZQIg47ookqRnZMHu0t7pRCDflmL5y0Qcl0wDQNnnYJYfd1++6AJy
T10bZmMcK/xm0zW0y1Uhm60NAbG5mqfO3heD3D7hne51NRwTnhcYUxAMZ3eeCKLVNjEud1Ts3d60
KU7w06KWUOHu7dG1VQ7k7uyesEnNP0yAvGEccVdQbhOcj4xqDMHFbTk1V+F/yr56YXaabdHXpafl
SsiBRU4uypaJyd4BItOQrZngQ9I2NscLotMM87IiHM2dMuVojCIvQIWdG34ARepLSDn+uPNx51g9
NA51rV0itnz0Z8s6UUQTB2s1jYsJa9jte935rnws17numdLtQsrrACN+kdD3Nvp/avTZifBPn+Eh
ogKLxa3GBhyafmD3mj6qP4cdF5y3ptTM7jP4Ptp/MpMLVP8uaqcE7qqwblZF0+ryqtjOe0BJPIB0
YJMHYMicASE5wgOV5NllM0cGiP1izC033dGmstHgs+V+zrd7EnjenjMcAIYqbSG9dPmpEIsoQMxO
S/QvoIb8aMjZ3dRFhAK9wAdqifDvIadtU8/BlFpTinbNtpE9YE3V2nIRrBFdVHZ4ghhCx8v13reg
U0AXGRSi1OWpD9BhTDTPXltoOijWc3asBN413p7yVfPP5ej8q6uk07VavzSriaBUakWbvZhxbiV6
IM593sy7tlSkV/E6whp2h12KLsBCMRK1A6odXoKjxGU3OBTWKa7xZHglhj4AqWm92u5A1Anrn2V/
/OMfs9l0BszebwOeQlpKAYaQsx+hG0/QoXpb7ziVZrAZgJmsiyCdsieN1BwDqDNck7OheBlfWd/P
cENSZX+HzbqJ/Wi/Qj/MPru4ioKb0BkRPp0cWVsUQk28v20k6cnI15oMl22BQqdcmdjjUcIBwNkx
chjzvj2QL7gr7aVwKLm73iTh/NC+NHPmQlh9DrgNVpYuCRQDi3pCFo5JtZ2AfWMitrrQctKzwLMu
TtUXCv5an2S7UfzBI/UZQwT/mBArF3TGp/LF7B41uw5kFokaf4Q0O/ghsf2cs12MfeNUFhBwFC5d
eHUzuizcB5W4FtOs+HK86n4i3Q9QZuX0ToX7jRyLh6bKkRQwbHIBwtEujYxbm314Dyp0tgOiU9FN
sQDhBuQljMZmZ3S/NZEZ0FyEoR4YcYvxomDpom+YkbJ7kkilRCQGQWsgr/F30InNsNwP0FHw81jX
fIHDJ2K4k7z3tAff4bmFcM/fhg8u1QMSDn9K3iHcBaspYangc+baZ5vtgaHxJ1BPEAtf80qnCFCC
yHTVUZfDu98NIaH99FNVzrOtGWS1kpYJ0mdTFPfi/zqZQA6GyUR8Y1Q7OdwyAlexPAAVo6xn7H0w
NUTqhwMq4YFRA2cwsNL/1pE6aBq5oXhgPfmwOeN/+aXvs0uQmxREhNJm70aKu3X3pAs5C2YEPyl3
PbczlG9pZJti7WsclzHXZiqRfco0GavirKTIZYagBqrztN5NDgF8N83hJBk76eol1L8KwZF9sZZC
g0Qd5eTYVHdYt9QcBNfYmzjjGHpRBMDoce8uzT8xh7gETSmsFErS7ik4ZeT3xWFMwRQZDGmE/x0q
tqh/OXpxFV2sS3fa7Gy4eylErT/jW5vc1lEPKTwHT6lc1Cmvb1WrWTOYVtSMXZ/GtmNjnwX0VRN6
EPDUH4gVvANnJWb1PZbQnAnTj7gsCuB9XYyA0KEwZa+Bn9AUUFMv4G9nCE29KLYTNvHl3EPIslAP
uHfKyX4lX28wlikQdavyBzZwNXSKOssb2/nAr/Ujw7mp+B3/dP1SjQz0NdzukGbaH6sRjcOBRdoD
UQmaI8CSZtL/4upz4k1Um1cpPh872E1+J2SyuSmhmIh6n+5hAAvm7Xq/JEZZmYO9nebc/MAuxdj6
mbgr640fWhFrjaVAN1KKWtcI1OoGVBq17bZ1X2/I28btgbH63Ra5fAbJ5a1ZKe+DHZqrmiOLOuHA
SvVCvfbj5ufzCTc0IQgZOQQTU5Z3tozQdHal5kxuIRy2uSHRcaRtMnDh9jcstHef1JdP6iuI7KZp
kXaG5Ty+MRITOea2/Ak95sspXxnLD/owUD45l8eaaFiyo/Uknez4L2qoI7hPf2Ed7pHlUAYetSYe
0fJ9p/yIDiKQ1XaFoQWODyZ/D3CccnpbSbu9ID97YK/ZzdcaVfNyWAzlqXPZIIyajs+aP4Dz6HrH
oThoqLYZMMR35obg7rcFOsd0hBs3Bc3NTiiPONJXiN233c/I2ZojJazjzrDjFBQPBYZJbLbVzRSx
q5AhxamZLm8NY7m7W+FBNgURlfKQAaDMB7M0L2QeKxeKM5vyB2fbYkrpn1x8BB49EIiAZTQ8p5C1
TjOtAOACmc5IWA2XEL2bBAqT1Orpue9ItDoZVex6gcs5hsHQ3gPeFtfcd1mSoKIXNjqAOVR4JsF8
/Ojk08+2dDqt4k+F2kg5CNLLNAvLHBI3QeyfqhzED+pjzu8uuSowsPL9ToAjwSOe4HyM1RPDIOzY
RTO3o7gM8DHckV4bTrzwvggAIsEH+m1fl6tQehqlFAwmwi5Ngt7C4/UeeB3bw4ktb14AM2NHdZoj
fNb0Eauniem3/aSeF3xQX0p1x0jE2wJDm0Xlf2bOyK2ZJ0ikZhjxRTkrp0tFHnq1jTAiggOhRh7N
kcNdrgohEkx3IAAbrROa3JhzglQIN7KEeWAU/dCeDU0GlG/mJHX9eucBlxg3HS8l77roJJwxpcKz
P+XwOxyloRN35e0dondNdZqiAmVj1QKGzNybziP+b0GuiIj7hTQNgj3AX8hcU6hvsf3OVFTHGcxR
sT2nDhgJt6yH2R+gK/uardGUAml2VwQUEsGC77QREO0SaBuh0DMbWOhUV0J14j0URklEYKRY84ux
Ci3xjwjXA+cHLDuBB/qeVXEmieCKtPQq8hx1eywhjTgpSmpVX/kuYOHd9GDIIr7l7Mm2FlFfaJkX
Iiczz0PCTEKjtFWjOBF02GSaUnJYCGr/XgiuoCzgKKJbam/rs33Z0ogt1wbppDbNOFOHMfcZnkHW
6w2yUwgYLDLdmBM8YZa1Qj6qrWa88VqL2wM9kFyMQYaoY1Pnnl817bxmAKFT5t3FJZmnHYt10Dw9
gc9h4C2I5S3qgCnOq/XWcMWNeAPOhxv/mQDgJHh34p96akJEAesDPbZt+AW89tg8aP+OiwaUO3xE
s3NcuWA5b2A8dwry1Eildxh/ns3YBgs0Dt2IOd4HocnpEbfBL7iFVbG9LSQ4p7BWZSwztBfKHWIV
Y0LQoE9JrQDJzdiNMdcdumefowuINHW+vdc1mlTSSeQBOVpK0Xhfw1OZ9Tbp2LXhSvPWFG87MmC9
QbBUZeVSDrUvPSrrIn0YkT6AyjnNDYGtT8WqKv9czNFjtgdOEj3JN0ygNth7vg+OKe3b1FdDDEKi
UNiSU0jGFuS9uchz8YbGIkP5ltYChl2L7cGg7id/ngdEDyXOgk0xFbjRAE9FtVtUVZXNOMUG2s2B
vp0Xazi+9QEfB7hYvEk/HNa76WNjPnVslle2AbxDHKOs8/pb1PKio1Sx2eYYnoahfNtdtx91QHf8
LXb7u7JG+02iUwVbrgG5h5XJgWX55G53BawJ4uZW/MnRT+tuU0lr/QKfOwhtQoIltnKHwppuI8ue
1MkXhLbA15MLqK5dXD/2ULQB6JGO4/rM7/yXt+8+jsweXlWfwH60OSCvaTr+LAMzPmGaw0l9Zk4v
hWAnWtmvS3N/oakHOR846odqv1U9Za+CuHL2JCuGUeZPtyHONlsjXqvpVpGLXaTN/iFm+yvRdDnB
5mFdzuHSoUMHFD08wdCSS2kAIklMJMzDiVXOP5ph71xBNvKZBz9uPAc5C4GuaidZ3lPbt4XaP5Dm
bM7MrTrSDiFm187uYSLJIItvaPYCXSlcRjuO8Z8CjHnJaCFm9QaoqXHqIQmioCjUEE3wDKTKhwID
x8iFi7NVcufJTy3yfHLmQ9gv5neuh9y/fJ5CnVUllGfOKTiAqqKXB1Yjhjkb6M6I1a+qhzVHxiQS
3CwIYDxektY256ZN7kZLo3/rte79v7DYZqjpNcbcvIYVgKgsQWwzzz5ruXUbi2A6iFmez22Z3P5y
MLXk5H4yEyRKTY/tafYoMDcL8PxuSAk/UfQW7768AP+1wB9cRVj7ruFwYyVJusgYwJOSjrjaAiiB
MtpzZG2DE0P4JG0Ncv6/ZIno5v2uZ4boX51ElAU5MEkze6pML0EyqXLjKfK/4I6pfQoZ0khl1xvo
0v3PbkbCBk5sJ3Q15I2c+EhMUmg20hTl2IykvmGfebNhn/Y/qwE1Dw0txGdSnfxwdqzcItstcU5b
xA7ljOmcnhP5627+9LeSM2xEZbOksS4erOCY6kSr17oWtHBYTiOBIDDOPR9/KlK3gr85RxjL6Qw0
AAmYNayCDQJxSDrtJx2aN4NHuxsBr9JnAHhIh1EfTXOO6e23n4o5rWaU4kvNTFDUB3hXornaHI0u
3/E20muUyqXTtkSJ9IKpbdpJ3KmlZJ1vvE2AwrB1mYkn1egF8ZlMS6So41eC0nFnU9+QBTz1K3F5
1w54kzJQh3crcLiCfDxxGwS1j1wLfukEy+Ezd/h5b0Sfy9O1Ej6P5Fmqttnu1xCfNCtuDPvHx8AI
y+ge0ZZoFpUqQX5RH8EEYNqx5VCRNS8UqOxLsAWkuTHyx1Iu3qheN3XoBWHBJxzA2U+JhgTZrmg4
Q/vMt07oovb3cLbf6WhU9Z2x+h0bLVRrqrmEBll/dt3w3aRT38mfOPYZMy8Qj4FfI3DnCBrenze/
CfYF74fiAYREfvUPf88gDmANv9nvOHUNxsVDshiE/wd/iKA2KrQIxCkD4BAwyUAcPYc3kr8xeBhj
AxEcfLwLdzckeoCJCKDIukm/TTAeR8PtZ19nL9LTit0wc3FAj+ew4uXFKJnmxE4s1ASDN2VhwA7m
PdR39fpeslXzFnAewEKwOfinc5CxnmxZrW+7/lmVHhXbbaQjDoAjEq7n1c42gFYW0FjH51jzApB5
UtdoYE08HkH4k2iUMr4gnQmMFf/rx87YquHsUAq9EC6ZzYb0UkRuIxShDzQ93ONGQ0Bdc8VDUrm5
ytpHm6llp3m3RtvWk6ZoAdOK7Ka5CeaDeTAXOxvMdJqlbeBDz4hlciGiqGlGT2oXbw6P4VgTuOiy
rrQBff2pumfT/TO50MAovak2++V0KyYt7YNersnj/ObAzCHyhV2CNeuCvYggoADslow0FG+dpXwW
LRONXUDEx6FZSuB8As75DJFivNkaOuB+BoCRNiZl7Zi+ycVXfx/kkg0YwhaGK3BCj73Ugb6Ug+wR
yct6v0KvS3tN5v3g1DrfPjT51crlF6EnwYPxsZ866BaZEn70G/LMZADhz3nxnmwzhg8x9de2Pu2Z
J6AHUCIl9qjf7yS95Jss++K6ePlkDo6LWXmCisbW6T2pe1grFR/Z7p0f5wA0Y3WYt4RGxFtvfc56
bWzSDTsCiTzu2b8URySctsCL1Vpx6zEBgIkbJS5Xm0+8XnoOlwpBHwIHf1yxHAGx2IaYFFKwnD5B
hHu84zPaYJ80lS6fXw281EZgE6AQpMR2plA/yUIWJSs4kqqKW/BcVbzjIf0JmxLMzuBAsfleVG1u
KuHPkIfviYLMU6GgkFXufBx8qP2F5MGwqaaSwT+EUyOw8w7cKATRB1EY3iOmH/oXiW8iOkQ1ycMB
ggESaMbVIbQ8TbSP2Ku1eR7AuaZL4vox3qJkaDaC9CV/JHIZ0x5L1g5fFX+q780ssSE+g0TnewSD
Z1MxlozghijE7IH9pdCZy+YAyW7KHUEYLg2fCd0c+gELdtDHgxaSsQoaKhClfvsA94cv1LkZDkIg
ylOWw4GNvacHeeweKB4kE2/b6COMUiDm+vU2y59qH3x9XTwotKfguhLiuigbIJtUbUsN7Xj8N/Hk
qO+qdpLQxOnBdjoTQ+/QwbDWmWdERS8eyT7MRcKlJgDQtNe/CkKRn36B9NSxV8KcHWI9rkCn74iq
QASnkfJBh+FG1lBmYpmCXZj2ncvgd9qa0u6s2B12nKMQ0BpjtW6Vl3QBSaUo31bQlHjoJwbFbr30
RvGpVvZJTP4Mirvl89ZCxTjVzs11v5lbbkoeeiXZN1SX08DtXErG4ZWTh15JHpZXkJ955XyfYV3a
e+PXsVsH8W70dvLLqf3ib45kqYlmI/2nifKye6IdFd6rM50pBgOUgVZUO8bpKObOb83nVRgkTp8J
Bx3nreSRVDLd+R7YFfDCJbYNrJZ9H5sEP5AOcm9KKaQ6FpAwviv/qTgk3FNEZ+HNIziJ2GN4Ap8a
Y2R4MqReDiz1j4IM41JWzxvcqbrnjI2xmm5yw6uBIgy1PiQ2e7tNT6IhNAiZ6vBrAItZ+yN+Am/N
HSC8D2zMzsA6uSdcP8lfEpIZPvajoIeBaQ52wp/LTR5+I4mbk9x7sOs6gTer0nnyKPqCOWj+jltW
kQlXknWJep3ohT7VUkP+DvcJjrB9Z1CjxWpjuolb0SKjBnKHXjcRP3RAmEfKhBA50qaXGTzsXGwm
CSmwptiBYKFQnE2f05ZVeeyn71HpV0zCgbO22vmmCTs2B5IB5Fg7OrC4+RgnPu2TTp9cKlzMtvlq
2z8TlarKmejkcLxEnw9g4J3UADDoBbu729iO72wMnGeh85lOTm2w7u0wFAJZelDSEnoGQRwv1Ne7
PuCMc4XzM2fEhEo3krIBmksKvBZsfnOYx+trhTBcX18TwOa2OH8x/Mrvh7YNahKq61s3Vwk0Ts9q
M//oEJ29yGQOSsbAYyKuHHmcyvfG+LNjrhUwouIkFviB2sPDMbge2JwroMbqPNhRtdvi4j2Jg3PC
msnwWBqNxJ8HbVLIZMwTTyjtnFIlEF9rZ1wDcYd+8BZoHOB6yBU/zIgz8INiwhWAffbNfJ5hhlGV
z4rjahS0mg+STimroAiQHOdBsFDg52IcJ5Aa6bWRen/www+BuLC9W0Uecb5dUqCimhaKbu6mdUHp
qw7V3h5QUnM6wJUUtw4BoyAVmrI7QnHnGYPbBKM2EWKUne8lAIxahrCvofM9oQxdIhgaIRxhks9r
0Mqifo3BaOaFoZnwAxOaMSK5xJ02WVR4bS2y+zPUYvHUYk1uvk70iKZ+hGnuJEzTrYhN7ltnYJWq
FjtYmAiLGBaE9N6sno7R7jn2k1ZC0OnDcBWUogHX1d3dri8QUesygpUAyO41+i7VWp09FFuFkq6O
QLba1zsfUv/dOSHi+wwYwsoSScfX58WScig5hPwp9YQ01zvIs4sOBWZnpToVLoNsLJu7jT5mo1ij
dGfTtZutMoAJuiGHQpi62g+fgZp+52wrOkdfqH2wl2G5o1Bn9jTBvelOEFkK1CmMfQLoMFJiKtw2
fHQwonC608c2dX4aUsLxzhddvJ8l+62UNMVwUjG/RyJ7nNsWYea4CGUvzCIXflB6Au6gEl8DDqN4
qO15yMXEaz9N/gQB3DRuGxlttZWbGagypq+j2xJH4KWlc6r7cs6ENcjwZ3siGe5AnxENJpEe7/S8
eD6h0LMcri3nfASJ0B79ckf0UDKnYQgiz73Lv+j3+KMtaBbaKmgwQZNNs4lpkKS+xWANGnrL80IJ
3T+BLRmjGA9yVs7t+WCUNElvi4pVbQvAweOVY1ZyfgBLwYw7ybcio+oDnCtm/GzGV/MMjfPSrMfe
SEae4skR/WJXSzZUTo2M8b/mBbjRqaaqhc354qf4fIAbHeripPMBwVDOzWZ58EPKWPG8c6wycH5a
fZrWfdEHIG+zO4MRSlUphNu3CarcLAHJLtHV31pgqODATwyesPiZXnK8mleS6yNc+ueFsXNF1O6C
X4SMw+awaegB63qtGKoAAkoEE2useMk/bGevqClvofzuWcgC6mcIP5lYIR8/QMFDqiVMVOu0WOAc
l9pmhlOhmZePQ6CLm7zv5GvLNNYbI9Pk3UG3D5+yJaNoVwZOwEp9cFa4GDVMEu/LHPYRfZG3o319
FY7O8Ved5vacmGyafep36KoTx5qnI2Q4gN1FoXcS+AyEhjBEhUwexJ3LgggvEsVLyFUGwXdAFAlh
UXj2aoOxEAXSjd9G4ZcszqdtI0rLyXATkTDWgLqWMlqjfWxtAYpY0XnagfWkRFLEuhliDRjk/OZJ
ojxBpG3qAuUWOV+Fc5XzxTqBG2BdDyDBY6AWpRqQWNx/XCfawcsZ/MrMtoGmwKDKWyhJGaMp6z0R
6767eZGHfwKsQ907Onf+1wa2J/14b83rUWJI5RzwIrZ5Wkw1k9HXl0hCLg5BYrQAbV5d+vaR/lUq
44hVVIri9LSLRunAYNyscwU64p3jFLG2PVa4KdbY01pjaBXMrarl+lIN6+rEK0vfOYDb4s2MuXZa
tdSp/2nNdeOgLG61G2S/QR8iRZzCYzqnUGdfEct6pHI+VgSWeqOeBJqN3Hm9DTCqU3FWfdR7TFHz
gUhJkcrDy7zj5KcWxYSWhfZg339X7QqfVRcRTkbZV/izqmXmIYPmN9sSiO9aQ/iaIZqpNgwiMHCY
iHvPnmQOvMb05L9Ve6H4KK5jjKv0QZ9sT62CJOTgMsa5ZHdupNasHcgAVtfrWFjeiohOCmF22wNn
EYWs46hxiBhtGwbc8JVyPnKCgeBs3hJyCswKYP2Qlmmz35o7TdQLZpg+BisqfOs9MsKGyb4u59co
mIsUlLGTVDmPZaWwU7jNQC/k5BKVVv4G5eyq5szpjDgU5oL2KatVzZkZMdLPLfmTQDpopRG4vg6V
zyGOM9M2a72XhM3AkjvWzRk/YKF8l31VsyFmSrtPJNX92oHt8xgE5fSby90MgX7QCm4ChMpQGGhD
8Z5riydJ+zJYu0siF3LEpxiyhPOIXCIIjMW8mzDhNJld0JANN4yijao6mrl329CiU87d7RjdxW22
YlMRuagsMpNBO0Ca8FNtXgmeOZ4tZymbWYrqW9zOWtI2m8+aew5ZcYdyVc4fmV+wWWXhrxbzNXEf
C7y3Iz/NutnjOzRvW3/yACfEnJ5O7A2Ivc4Xy2q6Q9R88GfeDrKbqlqSexQ4nPYT6B5mRW1H1TOe
gP6XtOSPfT1HYBzAiRG+wHEKaqrgDExw21wemdpOIhZAlxU7dPg5lrxCyzp/WBIMHeMEtTWCmMdL
N0zzj8fBRWPtxP0+ieW76ihhkSbUsLfZbyyrq53/ICFwtpoeyO3O6sym2c10Tqw9UfZVMV2TYkv0
a0iU8M7o+AzyJS5sP/sSbzbW55UD+cvrNXTHm2oQI3ByU9mTqeMIdD6xGeEgyIrAPkB+lxyguk39
PBfb3UR/AJM8y0aMXtg66ZSirGGeuOBNpBv7rZG6yPSF8CdLc8MtXSo5m18QQO387Pbs96PeAy4m
vUl40aGS11VAC2aJxupdrX1ybdPd0Sj0FLmk7g5vwMm3WHK6w+3O1O9fmZXE3xejKyUlUXM24Yp8
nsFe6mIzyLrPJP3K7oFmoqyGH9FEM13+YVu6kKRPxfYGQAzESgrMPG7nvMuvpCVGqU77ehL2oM0o
b8UuBS+I9/YqMoKOOjHMWh06jMIzB3hJExDlGtH1FRcRSpfJbyfBozzcN42QphVfZneRa/OSGanc
lRySTwAtseqvzJNcWDkQCFUNfL7NANskp+AjKtFMW63EZjPdPvVDdgHcM3FnckNCtHp7EsG2Z03t
djUkrQFc55z6IuM+qctETT+ohOQLfkpG7xNfiDk/Ga7FiOMqX0hXiFkq287uYYh5mvupN+aU5eZy
MifN5kUW9T5SySc1KJN4bP24iXBqAi8E8GTig/mbcfZcoyIawoCxUpPucegzaePr7HmaMyW1QvdJ
nZ2fc5/t9MuCnMLhUjtctRPOoCo1yG63RbEOQLN+xRmaY5X4FJjnkwmq2LwIdvM4liYwZhLyVVQz
0fuKPvindduW6BIm1JfYglRtmyev4pMaIGXgq2yus1sb5t9MwSBxIM0gedIEItHNldVD8iQx30N3
M8es07lbR9eigonBVL7WMZ6GY0OKdUgKRM1IokYsO0wkmQKZDpLCLXdV7vXLdiR87TF+ZpKezGE+
FmtJC/XlhRm8S+zIR5fSbbNbFk0OPcpfc+yE5boH2VMSI54+ZauKc+viOwQtulMKxLgxM3T/zEe9
5q/9Y9Q2UUGwy6J8NM04HpFZf8y5A0bsB/OWpfqPIFCyO9mGksmAh8D1dQP0m5HCOQSG6r9dsF3w
B0JhfDH839DEfVN9MscX1C3AXtp0RHYg4PslmenRlMiX+GjkJLOvv/6atLs8l/9cbKtX5acSLn8U
+9RiDodD+Ofi2XOq/z1il6FFUfQ8zuxORlWKuZlmu+r8pjhnLRWH4Qe9aOrAwCIVmQ+7s/cbb9Kg
b19Te1WiV2BouCl3W9AX2Q5SonLRR4XdQf+6/LE/kp168exRz8SJfV8MsiOdPrmdx/Epw/8GNsF2
DkhUtTgUluiORaB0TIoIAJ8jjeanr0V3kT/vd0/oxw/Es2M2U1Ba0DY6D/9HhT+UqxJCYTHN1f72
bqdPEx4FNETT/h9wjF2J+PgA0i06P6R0Zr3Q1WiGAfW8embXNRw3e9jQNI7NwFFFQYzPmxHJi+1m
i1o4s6VMW/sNOs7cml0F0dFOzcon9iX3yrQC17LqEeZphNNvn2Wzw4w5gvz6Ouzb+fnX8ZTAQ4z4
N6sJQASwXDIH0G+/pKCI6RrwnGbrk+koHAfgDGgzRp3oi4SKIGc8KfdFsUE4A5k9O6C5kyGRyuL4
IKYZ8BWQHJha4ccZopb7yi1QMusleBPtDdOzxCJroGTQZjlLLACpzT8UhfU4qhYM1s4dv77ebQ9m
ZjGIGZXK5oJGGkDq0YW42c2LnSHtPBxw3NyupmqNRSs6mVikgzsj2RumRLmXm8sxukHgOH6Dt5Ac
qwDU1AJVYkK4aVBYlWXJMAMAyXp/Q664NlZQ7k8tGlGE991ut6lHz54Zknizn90Xu2G1vX1G9+z5
vPjEP58heMWzi//8D/yAyIK73LWaYCqdHO5hpZgp+R31wR9BkH7OXFPpW3ZVg1Nv3rXb2bnoVcv5
OSEGSOBjtfWw2bpm+bZm4zCb/jsjhCj2AKSEJzbpsqeMSzAT6H0QOO+bPsGejUonBRIHJ0RwW6by
CcHlMPwnGZjzsIJK/55qM+qJ1hc2txxXY0HPeSzEeULe4z6QTNhxE50U8BN6kkCQcb3TTMUMU/OO
My+WKk4qSsWgqtp+FNYMgLaYeBb+zi9CaQMfDxcTpDg1WVS9MjaiI3DAETxVZgWZfJJfGmm//aab
1coByM8H9CzGIfUxe2u5DKVPxYXDHT3h8csHKVgCMtBVsxBxBjwz5uV83fuIKsXMlgZpUvqKOSYh
b8ytxCT/NmhGtMAE/WEmvjqYw4Ser5r7sQrv1nMQ5IAP+N2Exo1DR3nPNM8sxhwgsw+6M8Xw//U7
6JmOXr19lb37/mP2/pu3H167lOz+wTgWO9t2ZNHaEV8d4wZSaaswJIooYdSnC+Bi+Nvhh1wtG3u/
Lh5M4eSEpNHvuA3vk49qtE93G/XZ1luT6c9uAzqQpAGtdTFSQ7NoArnpRth/uOduFBKihTBCV4r0
UoCIaoRUummntWFGGNvVeTsa9sKUCRABQzAaHslbU3iQtUbPoPTqmocaMDnkumx4/SW5m0t0AqO2
sAkf2C7iy4kLCkziHkNzW6xRi23VeIm9q7OgMSQH+VeQdjmIHMk06gSmQuPCjN9hVu/D648u9HEs
AZWohG5qTCdFC+OCPIQlC64kY/B7z10+JRGi5xpGnROTRRwCVIfXGrt5YfZU1lvjwEdNiIZcutMe
iqiQZsOMr/xFmPLERxIeR3Gh8GvW6SgO7jZfk4ej4w0FEZQW0s/BPjRgQGtciFaY0ACIIh0hKrvA
4fRIGFa8FawvkQX5m5Q1AwDJYdIKQioSIlnYv/4qsWawpdmJw8+foJ6naH6j00LjuJKRnpQZToMh
dmWHdPuDLJE4QACTbEYL7WXVzxiQGa2l1nuyTecaoTGqDsRcttrz3hlo8uITWCgvN1hDrLEQTpVr
MeW3YhOW2nSQ5DLabcgl7KLpddt+iKwApDXAoJDHRUsYqU0krx3gVPAeJ4DpWSRR5hSiENEEkrKG
fPZQmFuQhBmyyGbMcRuU8+WY47Ke593Lbh8QWxLpCEW0A2PR+UUSEAnNLKPyqnUMDrCM4Q+aptFI
wjBMm3pZz2ROkbXgOEeu4igSEOgPTy/I3i+GLxgIT1aStSvJSFx92KPTmkYw8EP4u3hLdRmDx7Ol
OlDB/XqXICHAjLD2sDXO04cYTmWORlxY9CJ08zdW+7UR5fiMPc7LRbj+5oShvZVVvOsMQQD8QNPY
90dTgyRuwOckKLKuPg32tHrgTC4Wt3hR07NIGEQWsb4vN3n3FvAocTjONRB9W6za/wmEfGfWrtJp
9Wm36PQD1Sd9gyUZKVoPDd2r0IV47TooKr+AYwJ3DHAm4sIwhjtSvb7scpaOK9T3kIMD2Fxt8g6v
MOW2s9H9URtfmjbM9d0dBPVEmRBXlRahpi01yIL6NpQiqq9ahiZsQTME4XK6bNDDekI7CMqfmN15
NdNcLJSdU7hRgXXyEGYJ3a4dIQPDB9tGaUWdCRWCwMnvKDxDyinT1kOfQDU6YrHgwVWjBxzK4bGf
Oime/HOSgxUaVUvWi6SETUspBedirAL9St6QW9SCAVK3YuQ/O7887AGHRdJ/tQXZM0Z68+2AIvyr
9KiM9o0l3gQxZ/1yQRHJLs9V7K47FPuNT/ox+L2GBJlFbaPquUgYK2htk5isYp0RFOt0CbaP0L2X
stnWhTUaSH8gll6HD0psy/KQgJ8PpEJHwUNuU12OqAmTPxw1H7kZqcTbGRVQMh/m502Bl2UU6h/y
z2FcjGn8AwZEkfVrQOH0i8w7qUgyLM3BBEJIrYLrqy2yith9YaxTOGW+75MGjfOEBDdBnvjwr4KZ
CFWfrg10ekz09ldKJL4U5o+m1a8s5qnWVUyz4IwpdkPZfjBTZU7xDICraE1fsoWIXDQBm8AVjCUs
C8f0peOzq2CBcvPqY2r0m9zckivmZUJOwdt7jm6JtEWU+Q4MHaSalYQ1GFyhoos5PiWoDCwHbVby
BJQxMOgzRxwYWrKemn5AFHfQAJDx+zXgGU8xJzhYIZ8FUfsNGVbdnDQK7egM2DkSTKsuA0VjbN6I
wIOw8eyk85TqmGjddUqdbBZltdrv8AqjzE3gywo4TTDzBWQ3rhknROFdyWELjp+/FZ73s/Ps4she
gFskP6f2vs5818K6n4zz47vsW8OB7zd0HXtz2Egd9Pz44xEOQM0lBZg1nWnSs6VONWca5RsNMH34
DjTXcDUrEVSAzZvuXvAlDO88q/3A0EDcJ58J6x+XhKFzXgwXd6xc6PuYk0RBomoHloGJpU/qo9X0
Bf1j9tR109xUqR4Su5JzhD/JdIRVEgVB4anmg1rM/e6Z1s0bl9jFdZDAAM25EvD8l366oXLBlZPG
FXqlRhkD7zWk4YHBybtgfJW4z588vDPwqd8RNM9+g1ZqyvUGlnGxZkuR4Ufzn5eGTXoTBlW1Qh/q
SZtAO5bTOlF8VMBUgciuOBb/vlCsvHgZNGdsdvYu7exXruAjCjO3/bSk87Ji8iHSFnl5637NVmya
02gjcjbzxiOkjjgKl03UBzNw1oca2FTEZ9bQLjwchsOQHru8Sqf1nTrQdAREXEx1UN49E2wOFQUp
YfeKMcKs3I1dwrfSYlNvJKQjvcSEboUlxG540gxIAIkO2HVJejhq1+YN9D5rSrpXLpuC3Uvk1sZ+
htPFDsGE3MnDZCvYb7hG2OeoQZiC75T1naf2sdGwPkEBk/u+3iPMDe4GlxGpvpvOIZgP3CGVnzvr
lGYgiETYwsn8iRxnPfbYUsuVts0g1/R8LpYsiKX4Xh8Gomn1vHQS3ufD/HX0eKz6w98f879qIAgj
Q9tbMI7xjxBwjoJs+SUFwTJ4kk9bXNirc1n46K+QYZcX+6UELlMUr3hgoRFiKgePz53DqlujK1vi
YxGcEdWEgN7R5jDCi3p07SKstvdDD17mOpXrE/jJHe2kqQ04B69G18i7b757nQ+Hw/71dTp8N631
9GjBJfXVQ4sJBn7CBRZFj/I6KsUylCjwHa90fRsss7hYx1yr83IjbEBOiBTQH5fLOhQyEnyww/Cl
rqjTFas3VWyKPkgJvUYgQ8ttXqK9v6vfdgex1N0PMVAdrnxTqDMBp+jYaLp2miOp9WvN50vSlKSM
qADJIRH2fOJ09Jw2zqVvE3M+EjCnYDFEdjs12y4FKemZBN/TRUK6M6DTgs7IHu14Jq6v8avX19l/
si1dX0sXzGMKHYaH2BFQhq3BLVi6YR5YeEMCFNBXh9eU4104HJ9vHIGKq/c3Ndwqa7YdatHb9fMB
wRS2BR1svotoYKabf4Adbu1CXyF1QrxFFRqDK4xYC9fX3jJADIFhq8QRXjR4IJUuIbR0SovhkbJI
I8nwc7dG2C1IkSmZfxlCQI1JoVK4mwDJkE8IZRJH7iuma0X5ieIEzJp/Kqt9DWnPECHQTkgADwcv
gZyuq3MLDuEiOGBCqcGm+uTGZ93hCVcRwD0Nw3d9LS1dXw9gZoFc00/au9fXfhqWLS4qXotm3hHz
mr6PLs1mXeD3slwU5ApdLfy19rsm23EEbBEhPKADOuhxpS3zGlrJFQCw3PJpus/enNpOPpSY2x7t
Gdw/vRQ8OAeMqmKU8oAYn4dier8tFr9VmWVMCejhOMtDgjZo5jgcPej7TQX5xVQ3mq81wjDDQpfS
nxNB6I96pmnwrVVZr8BLW2kLY3bLv49QEf0d12OLCGCrmI3IeB6k6p8CLgXbRLpJLVjXkkGyc6oK
vg2hEe8qlx4rW0r/6GwgQHBiukOroF1BtwQJnHhQoQgf3yqzUqofS1vSvhLzYtm47D5KLX0yRno4
mRH3+WQWdmB0OnmCd3c2aYrhwntFmlqUIbZgeymQ9YSS8wRlZpqnyOs3CKnjLgxEGzUTCkrIAjF/
rIxyU+x2zDbTXsOItZBlEXhHc7cCl4+azF1GlqVyvdkrLF+WpELUWOcEIHDLiByEN5KRf+YCqEzR
bdM1dsN+ngGU/ZS1qFJuC3qjkB+avhLd5PE7jX3Lbqr5IU00Q9vAZIogucqGY7mhId++5kTulxDT
71sMEjWb9kK7XimlsT6NrEXtRoHvdpyBfcNyfLomfyvmo5uQ/MgsNwaihaSpmz6/pMP8oS7284ob
f1UsmpOHevMuN84gw0CDWD8WzGn4mTTF9gJuVhXsK4TLgq0G/Ar+JM31XbX+ykVfaeMB2H6X5qYk
ewCbceG0q93GQ3DuIJp2qFD8BnujZt2psWbbY9rc0azTD7azCEM7yZKslQtsc6OBNCV5JEDWi1Gn
cTuKpMTtgbeSe9+o/VRQAk0ZFZdDWLRtnYr4t5kD/QoM35BaA3/I/MtIgqietGnivOlL3Amq5ZGX
iQzTdAKfur8J/AJuigVEqcEewrx2TehfZ1m+nCK9V8JSaiT2kCagIDyyeUzkFQu5asaXNiJaJM6N
zlwjflg2/1BEhZgC5b4WYqB0Dv30R8jnutOASWgKPG9x3CKYZYjkXJUQlAnsyg2CncP2c2FpknNL
wfSBnhswxlq+HI9fkmUF1sGwTwsHFQdeGxrujWCmGSqWQawFvzpoBlG1d4hVR+UF6sJqW1ox5shb
zqHLxqPxU3qpMUUH2W+rlS+U7UZgtpdeTYWXq07QOPuwv9GusQPB6sbKAx/+U5+VlPPhNLszW6HY
ni8NWVkKB265GOSwMOAfUN22GZUK8Goskm96mP92MskZHEZimtAU5uLCIAp6sV/G5BOzP4zVfJqb
ZQo5kA+2kynK+lnCD+y1o+KPA/Q9JgANmpqAc7LEKFEcQVnUP60JHqVVXvL9DvghYIkQ6BNOUf90
UUrvKogcx5BGQ98m6A3EhF7QLb1zmLOl7WZ/+1udwhIcn3c1Jy/3g+UJSB/JADAkDdwIfB2Lq6WM
yPYZq6GPEQYSD9V1wJ7DOa/S2O0l7VO6BmFlFJO6SBpaYJw2DIuIMqrtRXFVKItRIvHUUXOGhp5B
i9DpWO/qU+6wgLK9XWyMj1PAYskRDCCPfRwlJ7Z43FpAA/hTMcROs7uydlTm6oG/csp3IEAv08hU
iwAPmaNSJMKZAu8ZVyh/qoBs5Jn0wm8GZ0lYN8KZGdEEP6kbzvhGhjmQiVH+0RhXGieZhq/4/J3v
QxbTXkvvx9qLcBRdxqUwGQivjolpINf2wosAnBf1BmDW4bg7584o33eDLdbDmqO1nWDfJtbzTheX
sfh6xbnFoEctgxsb+a2EA1uYYw27Gsj9QgdLwhi0yKD61dhvG6WjmBnJWg3PlKequeES8a+CQ/Qb
380XD9ST7dc2gzsdW/EJVvyDX8+L1TSn3vMFvjPTg8z6Lcdohs7BnQgs+hkXcJathNstf+NULiZy
zfVkFZQeXR67BvfaFhEhTBXYGOOYTvbnXlm2OJU20vPF9cWLUNpNy7hYSlP6YFTqTZsrs+WBUoJQ
5A/sOKYwkizlHqyyNzcki0u4ANtKwasWv1+vSmKaAxumFPcSAR5xAQ6H4vn9nnQ+3ZEDNs8dz2Yj
g/JGdwc3ZkDD8Hvy4Y+YDGBRFY6Sy2gWlUQHoQ5rOWZ37BVI3D/t2LwY3g7Nsw/iPbMu2FA4VS6G
ot9Aj3uSbsDvXFxu2B2JxmWvEQa249AVnfkyzPfhrp6+i0yxwgNrMpiirIsHfYvJbaKbs0Wyr/0X
uqV+EFShDfDqt1qJGRpJgY5sxQrMe6mweTez3HS75oRYhk+G5Mz9VoppGXhLOlf1bTK5pNOKhHQO
X59GNAl0SZ2CmC0LslYi9I35rzsgxAbBIQmPyO4mwRAa+oWwh2N5zZyQ+w736VKPI/RHvfKLG8Z9
B9wUoNoSKlv2OMoeGboxGrHilmhAMjZ9V8CbRsFXOon/Yr4kVlOgJ5ecPPYTWWcFA8YVu14NyrO6
JUuiEil2GaV3065gtVaPLDi2it18qDthRB/wsk1ssvWojBjUQTZRvC5wj2jSw3cN9XhpzQEGHeoT
5s0J1jPL/Y58eRFwxQwVLZCYDiiahNZRUt7HfOrrYrhlCMw4WFq65OAy+ylDALaytBicDqG1Z7Z4
r8Gmh5hpncSeSYADWMVk04Xj9TrGMQ6TEITotAlgYwKQThkkibFySeZTh8wLVIgNNNOZzmcPX0fy
bNUVSjwd2O/1k3PPbaXnOIIqjrVydIS11gVRlqv9eu6uYSGscdUvx6AhUXMulGLEgLQQckX6E1um
39wO3MK9zWGI3PL5uUXnvYQHcAKuesRzA5gfZr0wk63y5fFtEt9D74HC+odoIDQUr4mBdzG03GjY
koBxwx8taZDNSZbtZuFV1WeRsEnwWmw0xA65a4B/BUA4rglTRP0V3lhLx+PTz7CA7SCW8dFgW+5N
O/ZdteNJ4dHvHrTVw2LoJoyLvFW8BRgozFx9fGkWrCqEJyWN10svLX3zG/KaURPXjCHswQCnMIRT
fY5xqG0zEsxted2lZnTVGiC59zfkdwz66mH8sPe2j2ldEhoHZr5xcIfoxWyIDThUmzdsRNIYj5B/
CL7IzgivKAmRCgNbIJmrC0e1dNIimxsITPvIYWPhjnZF8HNrckdRc+mstxaJkgZALlO06aY79PTa
z3C0jf0ExywtfMN5ZzvO+aqYGfG9nNUcibKDcCQKuEVMJu4HoqDagIeSE8yikRriglApgVDD55T4
1zokN7Rh3lB2H3FqQPOdBYGC5molagCVM3sCIoYrkE+choGEN5Xdhz9dNH35rlrOa28jkLOG3TMq
8f0rkS+3xbL4BM7TFPoMSQ3K2R6gXZUvyDeUCRQwOSV7r220pHZArbO6QbDg8p5cPBii9hzqnoth
Cpy3uSq/hXRL5uk5+hvPVW+XVZxzyyypOdf7jezkQH99rtu3GVGnEgnyDHe9A45Gyw5X9N3QrbRP
aLfWO5RTpsJrHVybXjJKVbzaAGAvTxKNh4Jwxa3Q9hf8QnVyc0wtwvMuhUiXXQ6zYhhVdGZiBdWj
NhgappFZ4P6wHcHrPSww77UfN3QN49ycqwTQNbnRe4NGg4c9Z/OB2//YFvrAL/Fi4HGX23AjB+Hq
wLYZkcB8BrgYhh/hhZlMiCwmHM3HiYcNZeGKT5aHF40x837Kk9Au4VJxdBI+mSpRBwc4HtMORQHp
dVGsN8v9rZltcuGLIsSBEBRbQxWgaEMZ+hBYR1LfIOZ2YgjGhI+nTdmad8Xt3aUlMe3kXXXyun2V
ptUf75C6Lm79YvPRwXL0rquDuANcMEa0I4RNSkIA2JoW7Auv6VEyiRnQQO1YS41015VDroq8803j
RwzaIBaJbu7Cd7uKHBKiCh7wgfLr4OgD5TJM8uVAVW+D2VLNqOXXBNL57FC73hpG4e1EiaRJaV3P
sm67ccKCdlQdzF1s+C5douMJihj30oRIoLGiLNIBMt8vWhLYcLlcf/TXGAxDG0hkdwqInUqR1d5h
9mE4OyPvbeTeAJBJcju6YAaUnkpg/cpZhm5uNd3GTDslK3lhG5Twh4eC46jQELuDFBiEk1ksq4eo
On7enk3BhcJzPXFGXEEYwef+aaRnzuapSFrKlTJO1+Qdmdg5KcqlQY0PJxAYVGi4xX89MNQZhB8S
qyrQooV01PKTxBHuOf8jOqKEsBCwDI73AWfe0kGEKraMMv3mUxABAKZ/HjR0c+DLFRmaEEoBJh+T
GCEzqxPuwOpiXNrmkIK1s1O/Gc7LLVqB+5wkRF8G26raedlgvA/XxQbA3brPGnxJ7UeCRFoblTsr
tDppIYWWOU5DFe0yTHjIO1NCaVVUntlRKKKFe9yeAN7bXg423Cv2Xk7eduAbRyVyX2B1ByJ5DGis
TefMG4eYtPW1re5LMytBxJ6N1ALMdMXZE1Xi+FzYuX6oXsgW+I4NkkJKdlrtznoDa5GAYcctoLYo
595K+ivTu/SuKjnTJtdP77zicWdki63de5flqPzyIh2KAL2T8shPyB8cvtcdNW1wye3ipYNKTano
7e0Eal8hSOuQoQThrQIQDcjxRZKAdiMDj7znqBYIOFPvk5gfzOsVhPo5q0EahuXxCpKuDeU70bWn
P5Fi6OT+9dxq+WIk9Sjeph6KIMmDoSylZJI6s0zSbRB5GgPE8OzVnrxleOYhhmiYq1EuRaDImOxV
t8HuOL4+5vAwPQwQ6gccQewZazLwwpJuNmjE4AtZ3LuKLQZfbAvQnpaUrn5eFXUmiKuqDfQJPJ+X
Zik+FVsPf2Z6C2IzpkGYmhtJDVx7HUtV6AgqrZWbcs0KAorehZrPxI8EQDHXtXmuvC2VIt0tYqgz
1/Mv7I92p/domFWVeyh5q2J7W+QYhgKCaz9OJYYZoznFU1qbTuEOcnhTnUof5lRJUcabBpUpDTuZ
jhJuFfjAZrZEEe78InDEl1dfjDWOkNeXYDJcY40VUpOXil9unhcVOlKus8aEkifRwwT0VFPeyXhz
pIGYGkwpn4cs5dk39NJKBsqhTZnbEJ/RzvB3QrZDko4jMI3c5qLnDNKcqNm3Hu3NoefOWmULtyGe
xdEbTRYq35eHSnnJOZPrQ15ER4RtCWqYcGEnxMq/buAWBbWnfO17LNmapyBwmj8vL+HB1VWaNzBi
0n25cSHZHtiiTUfWxCkEof265yp3ZOL15fOrZibCDlF1Jk/k5+QZ/UyBVhAkUZEi4ZNhcMjntjmv
dRPlPPAhTStKhDiAhFRKlr+U1g9i+G24YSdm3EWDDMlYy8WBLMB0kvC3x2DUEMQDEf7eUpO/sYcj
UV+OKHir2s6L7YRapfZUH3whxTHjk2o7IZRqvBdtUgXBshqjs7kvOrOI1OKpZ7V/zMvyV1pE2IYa
Q10rEs+kUHCnA1q6bc4y/Z5GslXET5RHYc0+6Z8ogggVNDtZVfa/TUMWsmFLSbKMZMCawMuDtTuP
m2y80c4SunlGqwlV2xlrvXPzLyJJYIRSGEKExL7IuiK5cvNdMnv4agGG3Rkrvpsxe7wZFWcNKt0Y
j8ezEKoQrHxA3e9/5nVv+2jOzHS/FL0nNRbg2aRg4Sm1e9CFq4DGSC4ul6eKWh4ITbHJXkIUI82r
30ImO87RROLFZMK7yKyeCmkA7O89+Ag+bCtlt4/3hLUZnbaZ0sRSJUA4tjbNUZzOQOipDeRMwPE4
ifAzKBGLofzX59xGiI8/2z2Oua78fVptS0PlxwBvIG4run0cI5SGfTW0n7clp6K5vGoJZR3eTeuJ
ZAEeNfkIHQtSTS+yj2uKm29dZfIxsvGR/Lgwe26XqI8yZolmcbNdap3GUW+FGIzNNVE8luR0p/uC
JlnQWyDyiajCzKZONMAhUsr6iE0w+vccsa5ZIVkPO00ancsFOfoh58QrKLnkvTVIcHayCDQLeTnI
mtbBUsQh3zijdtVN0h8LOMFGdVerZsymoufr1wy4Z3hWXaTfT2lWUEQKvGcjHWCcoO04CPJnAhmw
7OPp7OO9zaVQDcnkqtFxrrYD0VrPoHgYp10nRh8HYTUKi0mVpDryjdpJpFteZD46b0MInFiwEIxf
/dbQas2RXK583ybTZbdx9D8S71KdpRt3G0HQROkNowZ1zjZ05Mu6o5/WP63BN6oGUCJCQyJwhbzf
hwL0VroS0+oNBckxa0tTAeyUTMUkngvr8U2ZAgf2ZpB06gv7ZBTiJ5W1SM/V1htgKAe3LEbMqdS3
Y0qKNLEOzaaPJWjJaLVdqtrSYV0q40W509ErlBMxSoSIeXM9ADpI5wyJ/h53k0k3AT+ONZJsW7ot
eNrVSgo48vAw97VsFnunfwJESVBfkYcPu2rzdgeLkbwWPUNb8wX4eWuFdg5/rayTD4R5rOgamlJI
Rw9L9rw5ScCkyXykEmFiDtV/q03ns/pKkuAcXKAlvr7GQVxfD5tiqN8a3reYzg1TCnGIhBlHGmow
Qa0L5/v1zI6jqS2ovSxYYW16As7pFGSMTn242gXCF5g/htFmaz4B3CXwE/PdIy26DIX9AH88LbHT
lLwjUsi3xpX4PgQDa7lyuhTyuFBxein+1TKngfBOXGcqTCWOuGpwZ8DS1C1TiH8gE+AX0fyOK6hT
RPlewZSw1YscPiV+piWCTyFJePFEVAX2UIIUuujBWgIH607ST8aigMPOuoD24lF9huvMOOVFg43I
YppG5KdfwPbEdSrImDXHeOh5HGXnQhcvrz4bTzloxAkQUlQFH6FvVxR4FBJsMiYE7Y6SaRRV+gf5
/qZKhZbgYT4KCwD+KSQ1YWtTiwtQo+GMg8KmuxTkCbvnerBnRzMMKtiplIMD4NBxRjFV0k2oICB4
Abye1tBsOOEDlF7Yx+SExO+If03Lo626YqYst96c2BJEHIMcNL7G3zsCLUhNXlRbM0ZawJ/DfCBh
nOAUTe6LwwDTqnthuP70edbkhyOgV4HG/ItW7LGGyF/xt4fpVfbI1cF1Wk1AKk45BCgL4Pz9jZRg
wvz6jaYMmU89l9utYGAmJxAEUW8cY1c7qaWDBo/aUnRKgW2BCvb8qakZH+w0P+Zf1mGfz6wB3WHF
gk7AheOGFhWb1oo1dJ1Ie2FaRAhdcF8G8JXStAaHSgC2HJjsunjIQu9KtUEC4tWWINWjHp0o+6pc
A6k1FsKJdFF2nowumQE9zj7o2CTdWoI0RkwuzJS48dxA6Jd4fZtzvw+QPc+ysHvWIQHTXz5U23vr
UdGFbnRNs7C6ddDOtOZcmcV8GO7h6ButW9QfOua8sLnrhk5WTevisNYXasZaLfnCErm/hqjXNcxi
2Of+cTnILv5nirefY1bz2JWkzGVj5UdpzNQIjpGQvzWdGXjoRnm/fwR1Md2u0Du/ZZ96+mTkVKSB
3EKBQEwRJmgUWCn4QYww/ALwgU6MEKWjID18KMV591lVAeH5q2p2P9mAsgmzsbqYXyeWCKDJfnUD
l/5CsQPoBLrfgIsWNKTe5HD3rQ99F/PFPjihozaZtLv2tXcLseLDvoymi8Dy8NtjXFmKjSFDfxee
dwfxc5tfhwvgB+0XsbXkMRbQHdDWbsjXEd0cpXuNu13GMbSmEcT1QXZqMwTCDmg75rvDV6/ffPPj
tx+vPLkRPmg/0medW0ok4CypTixQwhp5XKOPB9E8SBc23Bx6teE5t84//UzdHBIYXtYotQYR4eZq
Wip4By94iXhxuXl0SVT7PGyB3Z9PJl3vCHjt6T+Hqoqsk5KnIjWRJ2u5UVDeDTcMJ30971iCrjrx
hRtVsu0vx+2HKMb79YfVCUIbAnSpXJSa5u/t9AF+urb7gPPLVAVsOemDhSmzJ1Kk10iQEzUnttpk
Im4mfcz6RuMyI1efhj0qf7rD64pEp4iU4zL4Szero3NVzT8IzXVMuY65qZe31dbs7RUeTvABRViO
NadF81gzC19OrhaGNJZ154xhq1UQgigCxuPnmaQQ7lPkMZ1fygday+dMG8jAMWeigFdMkf3sDp0z
q2xlmLYVgGU6utoFJEcq2u2csYsGfkN0dDUd/ZSLxkj2krmK6onDye+IaOXgLM2lNF3fFvlz51E7
Ceh+1NalxbQEo3YsuBGCFiFpBdIaNMKBW8B/6EWwnASUyRXGFzjfRoYrKJSQdS+hHnQL3vvqNjVN
kylhwNB0DSiObOAPErFN4xkOq5a364pd3XRd23GrybdT/vU4nmk4SXBoaPmy32RfRUcEX3Wsf85k
Xq0VWEQEYkyFSHBgT6BJjYwA/Ua3UUTToRGYpn6KGallOSv0zmoYr9sQ/WQHQk8iO4mtLGEwhORE
y4fFBCSKajvepmylgPCOEJaVdI2g48GX3FMAKvlJzZw0m7L0qfX50htEMDdQQtzhdbFgDuEqcMP5
UvfCFZQ1tOvJjEHLChI86cSbT8seGEl0UwIwHkpeQN7wUFPcz55AZEN5lrH3O57AVQgKH5AuTuNK
cjNEZWEc5wE90v9QUGfZWX3D1vUzngQyga8xv5aEatNMrCu0j+pNDh/TtdVsyUkMBj6CsnNPAoe/
jFgITgfmAvitWhRaka34w2kHhi1CGYD7I46GM6R0AsgbIZAO8qYMPby5d6JxDrqLrCvBicU6JqzX
Knq6xvnXUJQWM9MV3B/9Bg9opLs4VhS508MMaYh5z/1Xn0zrWBOkg/0Ry6uWwjUp9on8t5Sjwxth
7Gi68KD2IRgtgKfYSkg7bL20pO3de83+rb9qXRvWGCEu/ekl33T79qdWAhvUlRxduP6jE0gzTLqo
6ts73axgSyzP8Sb1rUU/woziydLob+kPujUpQ/65l6jHnzK5JbWD+68V2E5nfiLxWwggrjOqypfL
Bu9tprlIIgkaAZDVKeej0GZnR+SLzvIqv0mwKnD2tcji6Yhmcl1ZpPXOCdGo1kDu75Mz0xzBzu8K
i20FlM2wrPNqldG1XS2c1KR9slBtuOYIIYWRaE3Lwm1BHBIgqtwWtkXViJvJuiKFbFHD1bXiTAbA
xW+21c30ZnlImS888FN0/6otlH4Srgs07bWPT+9MC1845vFE91zNeo7H2fOReIL7AXOKQQelVqrz
/djlVLd8gdcC6VY+p9lBppLRHvnGixEhk4cOFZ/zCf5jtgw8JcnBBa4xOqKPoBmiRFLWdZqXClQH
SsJOelaXi9gnGer1Y46UTzW8deqcyJk5ogJeEbEaE/drdyn6M8I71GTO1WHE3KBy7r0zzM17o+ty
hlCl4I1SPztfuZvKiKhvAfdou9+oy9CpU5uVuGcI/G8RlCgugqCuKOZfcC5zdMtcmL1wn5UrTBkQ
IbefOQAA0tojagwbDSJnk07KsY/RS83qviBvDSPuYsEe/tkbZZh6nJQpPdr+5iGlgean8gXznB1d
ADB70PnF3gUK6ZqAOzTGKcB0yLm3/UBWQWFgOzgPf3N5EN986rtJD0WNQRFkLlXZU7v79f0adBqk
m/CuvDXrpzgjNh6xn3/68d9tDsPJsrodmv//+erj//wf/+7vYOOBlmWWmWe36HDLczRdlrsD6rbn
rKzZmqmcFdtn1gunNpwSIFZ1QIlpln5JHmkovH3zw9tRlq+mB7PAkBisZKmhhu+AZf/wW2XlMg/N
zH6Lr9LCKFgGxqb/0P0PH199/+PHQUO2t5v97SkFZ9VqZa6vsW8ggFpohOjeFctlBQCERvhYzrt+
Ea6cKJUeEvaef9t8GmNIy9ug/Dt5GME4YD35EG7IriLOTd9RPtycDnQz7qAk/WV0+8A3RN6SZocy
hYf+MSzPqDBDzJ+93jUBOQvII2DI1rutQ1P0/I4psKCpjcsn9VWG/qjdETfnddhz5J3Uu2ZrD/kL
0cfA41TQJ3AEDjb6Bz4O/oRGiWrNotuTA4eC+cDaMNa15CgW6ykeDRdXgzJ2T05bzyYusbhua9Ny
uSYHeySm9W5e7XcDpRU3bNSWsCQBMnI3G2Y/woFGbQdEVEPqv0P2w+GHw/nF8CLAx+ItY5ZTflEQ
uOEojXRGkCOzfb2rVtqVRGb8haUTqJg8vt/4F0SqF1tOEAy2az8+TcwXrl4PQRdDqFi1U0lDLQ8E
DjtE72jb2qTodL1La7P8IjbOaOI9T3/V1vHLno6FzqRCdmX2pEYkdP8w2DF6h8EGFkk8jo8kasbe
m/SAY4YXqZzevjgRxCXYzS/eXRM8P/w1Ne1fZhT54i2M9oxhdpLbi4y68kIPDS3jMq6nAUGDs8oe
qnISrevCxrRmDhe4lso2zuu+B1Diuax5ywZXr6sVTHyAGdasIHJOCEK6/aZs+hEmSP9Ez7/DLjSQ
9yZy3nRWAXwaPM3bSGXUhtYBoO5TNzIg1/WjHRmGqYYaiu03c2waG/U67q2AozNN8DR2Biwwhcsl
b0lHJ/DKYPx/1DRC8VVVQ6rvcodwxrZJQ2Ifpst7hOq1qCHANvvNIbjT7H5gNVysW+DldjrguK/T
FCFCER3HZ0h+vuC0gJJohSkGE6l+Mt8patOccQpMI3YeB9nzQXZ+cYrHfut2uZQnoFq8SrmYpKN+
kpJ2+8YEVr3HA+4NLH2W92r/1I37Z2BnP9hJlFXPLg05i8NDtYg+xBFoObcrdCtEwgPXVO37+dso
Uvd5w7scu+rMfQ96cbpCo3tP+4GlL1RLY1ouVdvsJLotU1GwahtDF/upe+TjYSPCDSgRnljfQpuU
HTCcoToyerbJvu8eKSugKKtFZVS+iDbGOrGeyj9EpkdKmenBS6PXmM7bDSMd0QC+QxZZj9ZI+pJh
uNiyOAcJGwdpPxuYXhXBflPqQRwhmPa0XRGGGD4kbUd4HHIXIwb7G8gPF8ncZDgOVN+hxIhmOaVx
7mGhnrs+wfmHeNMhziVw/fi1LxE5G7sTc06mw94dl7NdWJ/VxlOabHF4pKptn+8vdXM1tqeLdrw7
1M+Ukm7AFe50zjpn2UvuSm3+slEqyyIWOkDctHsC3XIkm5QpjdupLzDObcEq2tWAdNCy/xdu4zsC
JgZZd84ZuR59UvTxgeqgN+lFuYpLTLawaGTb3C48wrTBFGjy6toPdhmHEfY7Dd78VAn8dJb7+i48
6KpZfJ9bDuwHAC1sXxl0dYV5ILsQ9qsmSQ9qtwcSCcQ72WU4/qeT0B0sp4diPkE35ULChG72oHkH
tUMI+sEhuNgoiO5BUhg7XA7agVmUP8PFtB+B+CD5HYZM+v1LTS5MEG5cLafYhwEvutL54HgUMLe9
aQ+jmB5cGBOEJkAzuT+oAbbxt9uYwc3CLuD4wW7/yHz8qs0NXwsW6PiehhGT5im4CPzN7LYwU3Ug
/AGhN4+aCT185PX795/3EQg8OPk24TxfB9A9Hv8CGuawbDafFqtq7RQiiVNprjbwbjswZmwAQysv
k+oCVRXn/9vvfz95++7N90HkmislP//6O9KwRWYGhzRs/if3Po98J86qmVDgbSfwBsEoX3/3+v3v
s2++ff3+Y/by/duPmVnN7A/fvH/39t3vAUDo7cvXGYwre/X6dz/+3qYjo45SM+OsC6OHyG58EBtD
RBVAqzigYgNrHPEGwG/7/ZNtnj//9x///cQCtJbrnycf/685asfNNgHXGlFuT3VWCKBzm20F6YlH
iOtoodsH2Xa/xgC+ZVVtiOu3tpGO09C6X4Qgwn2Qx1WNKtwBfL9jpwPtLQ5wqhary3d7FDu/k7u/
zvjnd+Vjue7wPLzFwmoSsLkfzT3+qoTsEtQW/MZqUTMdLC+TZYYJkbFcizs1MVt9Qrk2bcLa2R5g
eyPcZGnHAic78F/wHS0eyx142JJ1nUKAUd2NiAOd1398+3Hy/T9hgnb8/fH1h48f3nzz9tvXrxAo
HR++fffR7Mgff/iID1+oh+9g077//r15/BU9/vHDN79/Lc/+vtPBTK/sOAda/w1Q4e5/v5ye//mb
83+eXP308PR/FTrGKEvT+bxCW1qOuFzCgNIf4PqAYPbgpjDbm0dmvOBf3yVANAgs3qBFe01swaeq
JJdXKo62PqchNpLGuIug8laaHV/2hk+NcNt7+V8/wD+T+XQ7Awy63l/Mj7tf4NfTYXF722MnwbOg
Z7QA2AP+1BlSE9st8lo44O6vasKQlrjg2mVMAZ2zboA6C+sGqg3d4e7Tp89w6p4Od487XceSMldi
c4DZMn8/nQiaNCkNzmg4t9tqv6FA35oYanySdynSfAm1YdfiEQHHncLmeiFjbFe1M1Sr2Tt/hLk7
P4dNiXoYAEnBquMupsOZ7Lb7Qg0szaDNIXa7axvpRgUgPxUVkIQXywOY4Ej3gyZYlBMpxTVOQpft
SIlOn6+mj1C0R4iOn6bbcXe9X8Wf9YZiRoHrZeS2AfeY21Hje97WdUL3oT6D1wDURZhXwIhFM/Kw
eabPQd6fNU1w+qtmUSWJEebjgwaQsRtkD9PtGsNcbooZGLWPfL97Puuq2UK+jOcDzVM0H+awCAPZ
1KVlNZ1ngunNAchAPa+h4jWuLsAkwJHZHtgLFk9SgWAR7OsGlw+kaw9bggPWMobz6fJheqjP6/1s
VhTz7ufM5XvRimLlukZCTCF6ElZNMPG0CcXu2Xj2+GKAs+cuLq/r3upzmWq9PNCR4wfn/KR1IDQC
KOlwnKGvA+uAgHHWnP7NOzheJzYHBu884Wtm+WDtMMXUZlugp6qO0K5hZ95VEJY0uweL37Bh8GbZ
yIVNLRfJNHpHwgUZ9YE98eBdNo8SWeWYU+scTXfFvC/fh7wABbtdkc67xHsHpny9ODcTdW7ug4FF
mz+ARoJg/GtMywJY1twSBYLuN7fb6ZwZkIeCsayTi7xeEFfQkyPlHikqgxqyjsYlxTnAQqlFt4eO
r4ceKMGXU4hBRwHC+tyZFoZHdy4a4mn06nNdJDOSkAfL3MqF4p3S5j0OnhW7YrWxg5cH4dCbhuyN
HJNCQO1qOwVsEstMEN8Eiwo9NjQSh6z5FfTY20AWBHFz4VMzzsDjLwcXmTH6yZi1p3O5HdtfA9RH
jd+gwoSTF4/5X88nBdvipsf8r884ueQHXtoD5hBteiP+cYaZr8wk34A699Bx2l/YqDTPQ3vLKs9I
rwBfasgsYl9AdTbhheV+AOEolUbnw71hGXaQK0zxoiAImI2/skKpTdb0wWusr99i/0DftweNOnOz
NJI1vSiQt42kocjKwqOaV2oW+1FWHGnxIlUXcq5IDgjuH9qqJL5qXOtVTTX7wnsDk2ZnMKrL0ggv
7o8gnCZsPOxN4ocHXxIAdh2BiWMOWHS8ToQkRVqCLrL5kD8SIMWegGwbKtmbl8nJCQmIuia3Oy4F
o1DgaNb3DpP29I8tzT23PynlAzm3OeZ/Tx2DkouCQfwVuryudkZGmFgfR+nkwD+An9VXEddCMw03
DYk4lSb6w8FwV6vXj1FIR3o3gNgPwvoom033gK/6YWMuUQCTcA194SnWYmd6BGvg3FHIczCPLtYn
l9AMEWXyLrFoE2HRUh1tnBIl63aSMDhu3dAEZijm60fiJMHrBzIoG9rM0SbZ7DBbFsMopxpSALgo
Z3eAc+wHd9mj/zV4BJ9IUxgYo2mgQmVivbabgXE8Kc0du2gilPt1ilQm0+oUa8DRn9R3hjdBlDov
5X3UGf9WW2Ee7AnsL/9i4/rJGweL8/Woq4bJAUFD5Yxy/nW0q8w1SyZ84tPYS8tpqagbLAwTQ09g
QhsKm3Xm6sR6OuYyfUMkqrBSDI5ZXKeThFPP/fEGc85xvlI+91tJfE7FgfpZA5kfUJJH5O0CWPWe
hwkAEYIPe156ME2EfYuReRKrYTU5JAa8UKXvi4ISs6g7Dt0GiDFnpw2OuPOAwM6kNWyj9lvYAZg9
gfRy9OWyWOyaGYhgYinWy0tQJPpDcOQ/AsLaaY2x876hZk6mkzz61eR6cZjD5n01kU2PESVjit6R
dsbyo99JkGsyyBsJb5NyTJBS9k7H5HNhTY8q0GZRm5HEM7tTKQLCZ3RR/tjdKRWoiykFKIM7lfdr
Qh5OCxq3eYfZ4EW6nlghEl6NN8mWvD/NYnEsnlnn5X5emAPhvmY+JgIqt2yd3V3xURAGSy1LZO2l
r/h97OMGwUgc14SPLICirI2Eo+asFfzDfzEb4Idt9Xg46tIs8SZRNjVC7cG3YOnDHw1JVTmZ6ulu
kzqDqktfB7XEaUsytaleuL35GFTE7Y6LjkmbGCScmwjNghMQtiaTS4FBewxJ2SOLgCRCga9jsd0d
cjUEhPjD0Abf2nqWOe9myoYsPLyH5BjEroh9g2zGHd9rk74NH7O2uneGy2RfjzrX5ofmRQ6SeOFE
rMk0jKaIEA4SIhU43yf9Fcyh5HYeZ3+h1FLg/IrH+pdgFwBR0e7MbXCI2j9NPgEuOSciXQsaGPe/
MYGCj5cUObdjbesbRN/X3vHxmAaEgRh5DeghmKkioETV1LxYtkxPHNTCMLNzkLCLjCNprdcZbOuu
57sM8d8p31abltjrZr/TPJFpJ1xMDyI+pom56yfgIcCiX3jpEifLYt3ou+3SSshodKwCxqE2VASK
j68+w09cnyxKYwJnhL3Fc3tqBpmLdIAqsVsLapyoADRjFULIQYK6yCb21mrI3bZgKcNVqPc32E7B
+UoMcVzOzSwPsBlKO2gmZgXyDaW3H7ZZ5HXCLtagEQ3lP4TxDMz1ZyPDX+3X5c97ykyJeEyEpMCB
W6yO5y0TExWVK6Sjm1WVZnbQSFMaiBLTo6iNA88mXkm0IK23VUaJXWG/6utLNSlaKmRZJd0e5ILV
sbfp5OOqcYlV062jdQIFZ7rGDTeLONjynYdpLbMBmD5gFckhAJhdIvsNd7S9UPDL4KKFbwQKyeuC
nNFnQsqD74GOnjCAGmOcvHtIHTXZMBgIg4bTW9gehtXZTh25Miw73I+omuI73/8SlhcPQEovULic
6+rORUw3BAyC3AOgaDby2cKI8tOlnyHDFCHdrwpD91FeORUExC9u6mI/rzzgVnH+x8+6PsIBy20i
W76y7QEsgTkJKQ7QCF62GmKNUP+OaaU3wLS5XMUYM09bG17X2i9F31rCcN8CXlh1j634vBPZIQhL
VzqtenXG3erSMvlCMdsr3/EZsC8hLS34TJD7RzF3LUEKVDAmOvI1VKFTGHaKJ9HjsLr0gtnnlxzI
GhbC51zmraC6xMXkFZd84/DMwpJvfFjsN2Wyb29K27O3JIdFXzSPu0EiYIoAIyy6JCdMYbO3USyP
77VtCn0xTjOMgdsUxS1MppsS7Lp598XweZdSy8MZRML6BAVwx6kOEujyXce7TjYHEZwg9S2KnRCc
ifsTvRQ5zi3VjLrd8HKCi9QfHu9iM8TPuKjNEJ5s4VbO/bAp8wudUiatOJ7+bPeghoWLc53A+ZNM
03M0NaGbWHCMJT0kpJkiS3qQ8pYsw3Dxi5+ZmJ180d1P8Bz5DlMfIL7BeWzWCnE+AEqWNymMZFUv
ySS3tUvkJGo1AZpbJzOiuMeXoxdXnQC4XestzD7mCcWxj71FGPO/J+K1coo/y8GV8xMruu6O3U8h
phmhM9LNgqgcsPq4zxsuA/p26jaYZqPRuWQ+nEtIByaZRxXXrg45Rbhat2YW/I3TLlQF6QKPeP+F
yZFAWLbOxEgL5nk/9bFHLy/QfWLICYmLvwFpbkZdCS12/CLRhbtpfddIF+BlrtbYD1nab8Jq6N3Y
idKstJaarIpVBTcfXM2S38nMn0tXEYASFo/yGs7BpHhER0551tHpSCgBs3/gXPXUQeZKjRJaCl2c
K10+vxpIA5cX6veLq0bffzfUfnKT+V23Zds2Z61wWPOEBdT13zDMxWNCAdlqnnPT6ptJgz3eNONc
vd9pw1tOjRmApesUtLLbSSChzu6m5TpFDgIMJYZO8qQk8CZBnw34sD8eHUWZbasK2wgFTY9sYEd8
yDHWNkPzQfZr0lo37Tlsqhkei5vFKy/QLVHNbfHJyCPa7CQsArz2snaw6kIcvMOkkWj/OZjlKOGw
HqAGeHszFA3F+7DPN0mctub1NZW6vs5Y7tJRdtYXens/fGqRBYfpIFHtEAw1rEOy+f2qmFWY36ch
rlGyY+pjAKoF6kpwBm0GT69lbqPfGn8o3/FqJqMQtToqEX/ozwxl8ew2RN1dclo/0cNyklFtQ/LX
N6A7xHztohXFGXeytIMc6uhYYnwnAEScIYHbYfZtynk8veX8NF3KDWhjPEER3ECgoXhzDHvb1niL
7gDpTRJvFPOdQZZ7pQe2mX6/MejY1PMJEkp/955Q78+4c34sLGUKJHtOhUIJttcHJlwOx4lotRLu
nVTfRns0eCEugCOg/dAnQzcuGkqkOEmlQkRp/Ab8GSLs8jQDcvlIei5rvAn7eeVtb23jaOCNTrWM
HMllFN0unBDHt8LeFAIjjeZVT+8ljc4VlfwIBYgHRjKJHoZtjVCyG7fKvI/MPYbzM8CJM/cvaBTE
TZPVG8C+9dMk1lOATAiEGhwc/Mw0i3JN2Dk+EAKjQLGct6wTBAbJBWTwU8rNLC/XYJdDugeRCkuV
tpQwaGC8a4c7ywpSFglRh6HvX84Am7xv5aV4ymipkF6FXfeb5F/hdSvXKr1VLO5mu18XNjjFskXk
P/X/sPemO25l2bmg/zXAC/RtoIGLvkD/OMmwTFJiHE22y6aTmVZJyiq1lVJCUlZmIRRmMsgTEawg
eSgeMoaaXqifoh+gX6Gfpde499r77MNgKPNeo4eCnWKQex7WXuO3GhljEtmFYQKqFlbrcxbtWK98
qrfc719DHrZGT6bTRV5PSJ9mT7UBMsTk6Bsx3uCoDeHZnDA8AAyLjNSxSSR0iDjdzue0TpHMRFMF
EXx51t4hIbMcFS120k3NjIrCJsi9IagHdHhdjKc39KQBmawZxnBIhK6GmWsTCZ2kQBaNmnMwSES+
rh+C+8OyYdQNAmngEwAUdszA76VaI7o9jf+C8Xzdqg0mqXpIr/PmxI8fxrLe7JoAF9gjlWU05VbD
aZHZdiV3RTXEg3Gr+gBGcUXG/GqYmJD/9faG6DDRf/vuIAz1g7MX+7sHM4rvohqrnFmp+8YbhJHM
eXuT3qvK6fbE4ATUa11uz84z707klFUE8LKtJOGkIGcBw4rstjdqOf1vMBhmJp3TZKwWWUpMT81p
v6+5J9GXSBSQohTKw6BUHXCjmFUZuG43XQZeFad1Fbl6Nbdaet8iLWf6mWIG+k25MQpz4aTHcL7h
JgfmW7uFjSSY57ACuZLeBzsQqZrHwdVpKkpn1O5IjaVCOKeAgCa1TCAIQyHyQD56dNxL8jS1lyJ6
I+QG9yKly45tJG9fNICivz8i/zBPQgaUCSFkV5T0ozx1PmPuFIcLFLogWFVPZ+QsZJ1+Nh8vTqbj
gTcz567BAExvz6e0pmdR010NQEjpvd8M912YctgWdZ/zyZa9q4ZJvxqNiDd1TeVEsmHbyTLsRfyW
qLMo0LWX8qDOG9oSuJ6e923yVMx9ut296c4mb2dVDV2zZMngYby+vmaOl8gAedrRMUP7y+rGP3ii
+uOKlNAqUPxhJPwtfhbrYu4bgD82pZh4mzYQyiQ2i8cBv7USX5OoCxdzjrEuZQX82KqftR9a4X2L
sDJm+YWdzt2KW+cCXeREdEPSsex2hfFcKpjqshj2gUVtV3DEYDl4OpTR8mENVVablU+718Dp8eY8
aAM6Yxam2RFE6bPzKGZbOzkEyKvFhdGWGD/S44zBaCWK89LZcMh6BA9LwVoLAdeCTuZFhIJOfgaL
GXrwg5xGUW8oaqVarPyjHfmfei/WX87BhA8XB1L9jFM1Yu9sEAIl+r+W6DcqIaNGUVVcuQTMNCTO
8mWKf496VMVnN9Fk+LqTF/lpuUv6V+LFS6c8Td2spIapWxMOB8+b1rqLOcjY9KgRM49elLzmBNWU
cr3I4fvTFuE2KvQKCU6lE1glBVSg+BBvTdQAiCt/tz0yTVOldj/701/2SArpiA4nBHKWyEd3hdZz
DQWyq5J4P7qu6aKexaE+irqjbbToXT0SrhaabpDtcl88Oa4Fatndxsxn8cZbvzn9zfm3fYt6pxqD
TpwnwbVTBkKFWhQXRYnTHvsfSG0WkDgN/asTTiGBxp+8FjcWwwvOzoD9E4WLq+bDOgMebzRiAPTR
CNaiI4r1qkPpYlCF/bSRuNV8pM3Y82RZfY9Ckrhb5hN6GVG6YahjspckjMdR5Z4hyEOHqY6YPAUB
ZKEUG1NPG5aloZVew+Z8+VUNEf5OLJv3iqc/afqcwrIW2m6tywL34VA0rG99AhEkakDjsGrMWg0R
qnrivKNi4OWddmorToZROBoFGtRSLcZtgRNQJIyaCOr0an1qKAewmEx4XT50/CMcAH+XR0F2Pr8l
/oqodeOKETCS2Fr2QDyIQ2N9eLA9jD5+TzExauypVhSEkaifr4ZaoCFpbnAQEXhAIH3IR+3e1IFp
kNNuOt1T3GnPBFPrzvIS0ZlKL77fHhSkZxsi+AJ8hVx57RRpyAZ3L150ErhhVNKhA5+KMHs/6/aQ
H3HdPR3om+uL54eNK9GHyOz9bb4gcUCamMxRp8YczlmxJNaTiZMfHHlG1kM+mhdEc9KJijXqlxI9
u87CUaeD+PDFQFQfSoVjCXK/1UC9zVglEdMw6q0Wkrqje4kLTT8FQe5Fo3poXG0z94BkUEB5Ui2r
oeZmA4Igc/8AdNtRv20+IgLInajBbwSmwAEZOCAy+jpsOI1b4J1Qv0M1I2JUZL2pEk0E2QvD3yU/
UjpqXtuTE4bIGFAIj1aK+bKDUAmB/thZGAPEMBsYlYzUaSt6I0NEsW6CRZglTpS8HfzPEP65dVsO
7bbo6+Z2JlIVchqcWvo6WU5Sf3kqmG6DuN4Z5Q5td5clKynuUfZLtCiXp/BHj+i7tlhXOYbj0VVv
4yvIPRK4AXslcjPYYyK/+sGPP/44YF4y8Nr0L3kNqKF7n3utAU3qxUvSb9iJXLKv7wyjx3JofCsS
q+aiVma2vUHzM8iyvsQh0jc6Rkry0+s1Zg+9VcN+ylEQmyrx6K03VY3E4G4PBnG+E74niYui9IaN
TOz4z6BCbaI2TVWaCU6z4BgY9O1D0ksvLefpuk5JlSJR1etRHqgx5yqdTVl6uihEwcBvYFWma2lk
GEU3zG/INOsMNmh2dlATaZbKXUOnvqALUYMW6fVa+ywqkQv/EtVeIBvXLInEd9FRkbVZhMSMnI96
ofoUGH6QpCcXXViAYQy6b7LDUwv9rD1bAos6owSEQEjQjRynWzfuKMNGPVwCl7dBcN8hK/+z60F2
Lf2i/g463supGLZoqKojEmn62ckpGxUpI3fM/Ow+g4R8G/GHuw5inaDI+phVlNm0mnsnsZ4cnbsN
o2HBs0qMSkdUOx8Ov7jGP88sAxhwyKH626L5Gp48sTwzCczJmwPQhzYu2vJzze5UdZ/F1PM7cqs0
DGwTcbSiHI/PWRIfJR9if/xCU+bJRXR+Yx/1SKCvLb49bUs6Tdh388qG+oa9z0Nq9R38V+MW7IS2
GMEDAYtyCXeGQPBke65D0G1Vk0ck7KisWD8xPiHxqdvJO71jdHa/4R+CNSWuBxb1WlCBsXCcIlyA
7YArklTh5+NNRx0zWVHM7aDqP6qLtCBzGdc4caFC+2VXiJGBBnV0M4aeo7pwuyaEvphjfhfRqHGl
8WSzJX9banqG9J1yVLMDX6wlpvR3MOJqu1qVEvZ2wulh4BFco+4ijpesZ0nHuTJ4YM2WyRi1TLE4
hn9HZLiKrLczBFPCRWfgS9QjRivhrBa1bKENQMO3DiPtw4JBANPdebR5uPlkXlZFN3q/EaoSJ3D0
hLK24iS++7ffjF68evfy+Ye3735fby0+yHCbcKpdmHbveI8Ba30of1zLLVpOrfyqvECKbTA+8k4T
wQ8O50ARbRBRGE1XRJj1lGs0iplh1hOV6AHbkfZF4uuR4l1FqxSRh4iTEalR+pKLjVxvL2HqVMHP
GSXbD4G1ZctkjT2yY/W2PCR3zFBLs7CSJ1Wk/ZV30xDl3j5uWamlIPpXEStP601PA93MzEhe+7hn
2VYaqzbJX1j5QVZfe1hMAZKpK+fgZ3/8Ig6n7ywZdcbGCiO+Wrte5U7KD2rERFu54TS2SumHCTGB
CvZuGWDGSiVEKvkqg9FCdWCNeex3Vgi4xM2J3VETUjTwo8HjuoMQB89Zrfze2yCH2FmcJMcKFUuK
27Wwc+MKgQclDsYM3DbQHq6yC1d4PLAEDaVu3cJIp6TiufY/2OHYz3ATJh9vL/kGNEw0MRwV8JZB
Ug+jvKwn567Hogaj8ta7aJGTKqp6v7coNM7HlfjpT2t2p5pwcpvGA/oSF/3hMJGxsWnNrHbEnMij
6+O+PwgJzVF6DoFjcehoO54wd/CiYlcOBmWm8JR2t9dmHxKyjjo/zabjYPsk4LsCnwBdnh4lQ6cf
/IJgF+kFcZOE+4+HofsojlL/JZeOfCjvqLj0EXbatbWpLFVz3wSGJNRRS7alVOvzbmV9Atiq8x9k
BLb6TWDJOMBOaRTX/4Nu4ylD0uyrYQy1Bn4fuIVe8+1L6c/ucixarU8/ff8/Y0pvUQ/mk8UUwc0/
jT/8l//hb/5GIq0wBbR+3J5IUZOBhNyz/C8aoPUd5hzqZ9+9+u6lQIVx41341/pH8JncLmcEAVBu
N5jsm6BQEGWdsr9BjY4GwnDwt3rNS6ANv6HSQU7cDdVndR5x8CbzB5szc/X28no/Fs2g6UtSLS6z
TrFedyhelXx91Jta30OqjJ0dyrAlrM51lSvGHUHl+CUSSZDiEIip1C5VGi2WsBrQzUNdFw4frFy6
ojPn6qNFuz0ePsiFiIzS526JPkovxNb3s873H745/KdO6LqmIxuaYea0hbhf/azCPOUJr34YH0ai
juejZXGFtoGU6z8ncBraluFU9CW7dPw93zVMQ42LS1mL5HTCouJ6cHY7ZblhMXAEyMajChaZ1y+z
pwN4LBAt/+Ypqn4xQilayT7//ARXptlGqjn2dJElRjq5+gfZHxiifzG+kdflsrDC/B6uYw39SQYs
9y1mJeNddLUxadZQJ9ml1dN1S9SO+zGhr7Tk2g4l/zbtINzx7e04mGGtuQIaZLaMfo5B3F7yfS+X
39D17HKpfqb/0jHUUxGgPMKX6i8VNwLEjS5obv31E05LrjNSoKS79AN2fl6hP1Pd44ZXQTB9wx9N
R7zJ7s/IfWmB7BD8NwJ+om2C/4Zf8ymgBTFofDty1bejFRugc4g3+nXNTPpuQH03BsmxRAQ/JKUg
hxaccIk4pw46HYWvDO9JpxWS7WG8hz5tVfRDHvqo1TvoNNRi9BusIyOQi9kNb2bf52F1GZgw09fJ
9/8J30zxifs0+XD9X4O30g9YovSn5DnNH7WvNzAGc//Z3ZWLwJp/2iKkdqjKqf+MMql8K1FFCLEC
fzOKYqXhlFrBq7VcgHJcPRmUEwRecFOYFmqkIxqNOr0GXxsunduy3V5zGkPTuIPF7NyW6VcNmqgU
dYvUyxbbaiOgBNJuykddRqXz94tro7DqxRyYYctQ7hpRKT6lHMbga1JZfKr/hCGdxF6IjgJbqHWR
BKyMPJzCSihDpCrMvIfywrA1lp/Pvor1FVzxMDYJRxHpOMuj2XHakmqnOWvS7GJjiY1/vylXrziE
zgIGOpTMs8356Hy23Ny6RGbS/sai48QQ/7vjnsKvXToIJ0EWasfG6281lcV1oxY7kHYDa44Z2hy9
COG/u4Y2n3/e0PAaXu9pWAosPm54aJUlSscfGgYpRHM0WZCoBf/lrD+KdIjfjCuXfh6utoEnoobd
/PB5Xg1dhnL5JFd1+MhMHxqWDgNz0+yU4WGb7AIY0gC1Gg0hOCZpunvdz256TXolWi4z8+414Qrd
1Dxx0jreu/Wzq308QEeYprpbcHBlDzgb+cSwAvyHPSrHu91sZIWaLSt+8eG/tdFQaKA7sXZrtF5j
4/McD0RXyu1CWZOSvdDzl85JXLCOq3PbKVFUC106XMbuyKwrrOW8ZsWZ25tTAIMJkuMfiyXihQyj
LxpuEkvdxcbJ21Et4Musg0vLoecj26ifzSh+Pa4Kx9dCmeDvhjHEdXx53+5vGPqvXGP6EygT/N3Q
Lr+6QcnG4GfC1/BsJ+4T7qHEXaBbKX7HQV1AZNBbCL9zeSc33luIU92SkKiZoeaM4ps9/45Fuyf5
P0j+OowCuYJ+5BcPfsOyaIjkQoxjbTUiBtZNouq0WozkhcEstbCOvskn08++LRbl+kYY1qD5ntkF
jM7FK0ZhPfxxxzOiRZDNSSLIEbsGP2LSaUkgjKHICXEcPdW7T/uZ0mPkyzP0R8MnmjMSw6X5F/p+
NMT/amIAPtUaeaJoggSYijGvV7AkJoEX5jfmwGZeaAMBxaZE45tQIoIPD1/lVnk92CFxWM/DbDSJ
VO8EdWt1+5+4dTY+GVAXxbOTP+TTguXrmvzd/A40VtZx78avonEjNGbYMrda8X7WGGT4zkiSs4rm
3b2uHwrT0bUuj61IS35rPULuZNUaYsn4jaMGMSU8/tsK4WNnC2TnuglYwQhxT0uQfIF1QLgIYJ+1
RSxI2ebu0mZbw/DayTZpx+7YHtZJtYfXhPyjTqHc2bw80diAeTlJnV4qkj6VEkuO15ZmjpDP3cit
jasPMw41H+GfwBDG0XemBKN2RIqteRbE39MRpWrJcUmD1LXnY5GNIJ0kTrxtUdNqmruq3K4p1OoU
te9jC8KZDCqAYur94Q19AmBMPnfUHryw2DPRKUoHugEqB2/LbBN5CxA9605K2R/eGthCf7WFwI1U
kQCPAHC+jt4REWNS5fVxCdLm/7D3xpbXu6N3SU8PwSPwx8Y34u5X34Hv3uniy3h7P/9mzxZ0tf/j
L3ZTANKum967i664uT38SE3WR8wPb/c+h7jcv4/v6FkM6LUAsXgx5gvLaeQxuu7phbzWBByGzG0e
QYzh9exkHUt6OvAt2W64o1gdgjX4F/JZptJmESigpfNxGTYJXzc2yVVsk1ja3n064B381zRCRl5b
i37voYrb698DmeAq6eGE9wyhxNmb6AifVZuniGsd93YrtNTz2kjBnAYYozEwEiOMEhpvRhRgGVvN
Xa/1kQp3oDUTJkzEWuC8hoEbVfwbzS/82Qwn1BuYerAn0XM2Yv5E3hb/nOkf+55Q4i8bDuho5BBT
zmdT5rpjADMZQAPm95qdX259LccEWE8/NzyY/Pg19qQR7UE70bOqL2hjI66BeAC4SE+CBde13snE
P7L2E0VKnmBlgtbcnJhNunWtxUSvwgS10DG1UPEbWYigqxylMV+oqx2bd1UkDNj328a5c4yuS1er
pWe1Yen2aJTGhhw33MqoegtGrOkaMZc7cQfd+9aBy5jL2VVzjSmkOb86ouzdCE9RTMXA7I+/8c6q
WrHrdOQUVXu4RiNuV3BGao9Uo5+uzx/aACxdF3RUQQvleQ4V5yDjtdXNvO8QAD9Nv//PCg+7LiYI
hv+p+PB//Ke/+RteLQTfo3QDjJJfKUYaRao5XM9CbUQCvlD3fND6YeZrgbtD8wZ13V2j+aAyWY6c
peUHaeAdDanwIHJspBcPANJUwLkT0HyxL9zPfvoJXyW0fJ+BtM8U8aefBk6dNK7cEL1zHzkiSJXc
NTSZF+N1l2rTR4fq5qeIRd8XRXa+2awGDx9OgXXNOVNkXq7PHs5nJ5gj/KFWyM83C00TyoCRGkOF
LhUyKhnIrIgwc5rIzZP+rww5KedTxrhCcub6la+OjFOh+62aIYKcAGN1xE5tSApb3KqCIk2wldid
N9ELSh9uIIZxpD0PkVSDprnXKzgk0EJ8EFwu2EQrWCXXP3thztdisjsJuxT8U8cnMSFVTWeQRd/8
hRuKviWGEljEJKdIUEtMU+j4KmLcTz9hrRp7+dNPkoF8dnaGezjOXkhnsOeyIOGxmHuXUFhy3ItR
IbmHDfy/7hFhNJginV6QocR9v4PvnavTZ8hcxZ3XWrBDbB5ZNKDPHke9+1ZwVuOFCv6uF5US9fTw
a1JEJ/cynSr6DiPYOQrhTOd34CT4pX5GpxGOkzDR9zCP1VS9q6ZbILT1M4ceDnTMe8HlWqP+XDxK
5JpOpUajB4nmWnEUt08ML4e4oGPUspTg6cirUhPeDLWJyP9DmiOpnD9GIHHSC+6XfAwLcOdkbMEP
9R/lJ51xTKAaphzPg56RwI+bjHbn5ZVmqNl7iQLrXdCBXo5oV+oIJvtvxx6QWOxaM5+Ods8m7d2Z
niF1nQp6cnJgKvb5S03YnP/j5w6yNqDIaylqwT635uvEexvUsUW97qFceQzxoWxdJNhBGcPcrmWb
tZu+XHZj7zn1ro2hzUWyR1/FqaPlJCViXStNxNa9yv161UHDY0cCOpg4OZNoel/551E/Q1LlYndw
tBxtTaKHH67JTUe8BSNYrW9q+encjpTAXGqpXLi+RLHRSD5q2dHIlfYWIfqi6doza2RBBJVnias0
HZfEyWt9Ov3+f1LWfgt0Bz98Ovvwf/0vzNtPZ9UEnZ1uOOO85KAv0b1weigsatbWim1BsiY8I+by
ObUE8PCIiw4bJJz++KQq59tNIUKPCgAeQda6ejkvaMqr7n2i0fxKkKfr8bI65ZxXkkSyFaS04Puc
+U6ouKabDaWNRI65iccZ5TvtLXYHKFtnOv/sBDPaOvkQl2xWkSg6dvkHcfEwh9YHKP98XBVf1/kC
Z1ZyV4XNSkZc80t+nGtLzaHRQvWcdTfWx8pUbhCSoCqzeYFhsDJr9a2Vu/g99KsdBoCYw4n399dH
LiisYQH4iwx1WSqUuVxcSmXGuU9wYz0CP8LJoS6Iv0igPAYUfFPC/YAhrEXuK7drD6c9LTk0H6N8
1yyeBghCcWu7M3dx1kS+Wid/CJRcwqQSiemMRrpjo+pitiKjGGnu0r5TaISFYqvCeqRieoph1C58
+/2K1rSTyPrCdXakTaZkFl4pIYnHar3gDy/gh8aOXM0dfcESBXKX1omBfe2pUQxWHemtQC947d11
lAuPjb0ux1OTxKF52xgbx0y+PSJ8NrJMJAAxIgSCXSkkcgokPh3jJZm5OEIdbWTbmtOA8V640Zud
WmjiUgUekOwhcsk4fWkvmBu9fD7GvqY4Nroq7hub1X1443LN5KG7Av7vOsoVlxO5qkdl4VVjS7iW
vmZbEv7QQeSCGGI8oNNdqd9nhSEvQSpuSQet6VRDOhU6kSdXxuMsmEMRlooOASPyJdehAz/iiGrX
Rp2kpO5uB6+N6PfkAWAky80Vmv6mOYj847l7vtupjtThy2WjR6uLdv3FEH5w70guA94F41JbYjPL
EERE34JaDTmq34RGvZFXZ4bozknq6+ETJwhvPQzSOMLqd13qxhiMLTyHYTv9LFEtAsa3pTs0thGr
E5vhK7Fwbot23VB25Dlsj0Rp1e6lQZ9Zo4VasLmQm8oSyqZkkrvmo3X2m1JU2s7K7x4aq/AIyKy0
cnMSH3gq5CgotOj4qp6u4ADI59V6vAKRBoGtMdG2EVvwHEFt4AL4omR0UdzTf1LMyytr/7jyh09P
hv8SH3H/VycYULNa37e4AvkA4wYdF4YZ0bqpRnYLqUnhOfmlqvhMCgNSHI/cEnXvp/oPvTV9nG37
zdsPLwfZq6Vx3POeie80+8dYDOSNIZVtkCpW8/ENozFzlpbBx+XHZTs9BqEUGFHYbYvVd96jQFOc
2ZD5qDrcngScmOp+8fvZLiTnmuW4ofHB7eN9+e7d23cDYIUvlsgfNSzejsVaB+sK68SSqzmYu5fC
A62G002I27WZuuSniRUcpNZk51mPaXACUb0z8vfr6Linmih3RC2APusgQ3rSQCa4P0NS4mMvTX4T
ZLn5OY3aVt8DJ19vshhXu11ZZNVRDuhK6VZ6Y7BI08bURhlaCaMlvV5RXPjt66ATGLbbe8yBQJTJ
kUEm0jST611n7E5T+X5ZyGTes/G2YQfCCXAXW1dXDL8UW4alw31NNtz4pCE+9F2eQOHOdvM8XQ56
r0G27Zs5L2TC8t31zD7vyP6jiXJCNwlFKqQgmDzy2iB21sjInRAtriHxTypfj9cgtf7V5oCFk0na
zlYCxBwVPRK5P6O8H2hok0UK3ZL555iVjbyYMQmKkrEoESK0m3sqGZSM4SQbX3V08qSGIoDWPR3a
/Dk7SLNF0cIhygFyWCCZp9ZutS43mBCL8yvfYc1IO/ZRh9ZJizLkPGbEHhNuFAlER64FsaEL9nrH
W0qEqKl9JSOhR750McYmBXQxuXDXbDTTVGnViIYoYKiBUM6uKxO45zZR2ojiRNX3Hf5GnDP750ki
FcNkvEJO/nfjuqO8esFry82+TSIhWLZ2R+7xRlg3HXNzR3L0uNSQgNy6bny9fcbV1epmyXR59rgT
0dbGi+/XXdtMm5HsqpvPdzUh3Xk0PXOsayeSSSwdqlYghN9SKRpFq77H3Irghux73KdwAobAmrmr
Pg2OhFGHkS7uj+WqyCkTyumYYRpR8iKVgMs+WPniIRmYMWGSSq/eyRh8+JJvohsTmL4vLywejFP5
yce91qfz7/8zobIQ1Fy5WJTLT7MP/+tTsnugn5gaGkpGLOhjNNPsmgDbjAniIPu2vAQSSqMnJzMg
QHlrVl3Nlk+fyI0DYWeDohdBG9EPbVS/dFXILElDjPoGpxXGop0l+lhoxkzcoGItJHA0pdwpoxLN
/rhkS3LD77CJDlrpzGfLC/x3OlvjP+Rq25hOJoJ0FS2KIP/ZnGjQWg3joCmbpDVUzYvPqDYtN8ma
xioYZLLj9CsVei8SQql1fk1jPquL7vosrpuCVYTvGcqveSgUGDAMQ2p5Tz5j/jqv5NAbV6HWPwcv
4Ozu1pBdEsIvNCAKmuzu1sa4aFhdcHb3a0ALh03A2eeB7WpDVVt0unNXJWxpVCBRVqt1P7u4imK6
WXksTy16lzNQfQ2YkpA2k8BwqBeL46IbnzIpbDWTKV36npye4M4eDZ4eM0UpN50GQDodfxIUbqeO
aeewj54OjntNHMM+U6iFd5q5Ua/NILqRz3+y6fayxHwOEyav2fgSnk0K0MG9J1VLJvYDf5b24EgQ
WVqUIJi2eXxFESw43h58O4IzOCmBhc6+yh43AgR2CUUSqnSZkepl/y7btAssMAEmsJvFk95OSuDi
uSPoh/6ivn9et14JpxhBb96+fPMBNVb+iw8vXr2z3/z6+/e/76Vcc+iX7LSAibBHynIzW2OO50m5
RiztfqIO4zpX2cUMBI4ScTpQaEe/BUaOhv6/ffni1fffJuqKTWZMcj6p7BCDP4xFThnQmMNNvNGN
q681L66aF5kiYkhUJnow2Inxv/MgGIxRIgnwrAUubZ83OFyqnznAAPZC8v5hpPk7CjSvxaezIMvl
voObibHyHjZG3Z2q8zGidzuelrMPsoNBAOedcVXv4qRMF6yTfrQ81HR26VioEqPfGx4y0lhT9C8V
6rkYhm1BTWCidG4MzhxG2DTwAJEbl3vxETGYgnPwwWzOIg0bc3JTrYpJt6NVOz2Fl/XsQ6ZZPbv6
neb95P86/gDGXE5Go17AHjYNVn76jLFKTT9UbcqMVL4KBypfJsa5AsFo18ri75wxlzoSiCca8j4j
ts37Ydtv7djt9+EE7C+JWSQAh3DwBHSK/gd23FkXiN18O9WQBeRx95oLtOanwHGvbuTwZzhg+CJ9
KkxGut1RkSb0xqVHoKutk5ls15Sjmr7Di2VcZMj5DZEkEGHobIYQEjR3F9uXJ6G37NSXxZU79sMO
rBHd3bRDOLPC4+noZLYch06Cfjpj9gNba4gKRfhyXDHJjFhCN6V+J2g27EuAEdmd9UmnhxHMp0mv
oVMNwg6Ht/Hij4NCuG2Y3wdAjp8x1Pa6bZAfXL/7DD4YvXp+w2CGnXXnv+36Yi93Xl8C5FSf2zDr
T3qIGgtEFT2cKY8O4X7WyHjQw9jHX1qC+umgR9fFwisdCBGQWZtiPDmnVvMYWg6f5knEXEsyaA9P
BlPrrL/vJPNsSWHN//Fx2dkVYHbqnKv41MaNJlnmYJF5UeOY2ASkAP0Q5pTgRNTjaepCdn0ATs/g
b6Cj43a5mk0u5rquflF6wWrGczvp9G4NR3esLYsZEvsD31abac695jjifnZ6G4JCfap4FtSmBNSp
iFMS4++UL46J4aaUYrVDIl976T4JCujZ9FdvfvfsdZdr1TnvtqQCo+4lfxX0TZ6wnrSzNyz0tRNm
greCXmuZYt1+qKP68cXL3w2IeZcI78m6rKrDaXE5A24f1WL1tifl6qbWskXSwyW2SgPUhyYAAs0L
NlbSGdrv+RmTvUjyP1i+q7oLa9TkfBjyiN4P9BTYL/1M6WdgqQlnzCkGkfrIo02hgcEUfwASiIim
7pHsWy5ccZWpVdZkCa97hYeBiFTUoKy66isHrWTg+/CxE7E4rU661KNQEGNoga6C/xJKFI2qrmKg
DGyZ6QS+SKV2vAhGUjsffMZw3ljuQD6Hc/p9ucXEdxlyTLPTm2yxnW9mmPtFtQqwfZRvsFzC6vHe
jFEaSS1OLf1ZX0bZw+55hNkKuBKC7aYbFSxfgvLHctvFFeacVmVxNsge/yXJDKnMw0cx96oyOHxN
6jxJtBXDqjJXZw6UMlkPMaiaWGLOEUDBuRS3fdiRtjrmgPHhkh9gfcfr8WSTOGb3VS8ljaIQyYmp
gmJfR8WQccQEjrR30nZQ4agqPh1HFVxJFv1DyNGjL7hGXIFd/Lm8q/DKXTaeHiOhV3KrDwWYvSTI
X4lP3yKYus0iA8cYock1Fxo15KKVkRKMs879DhaTBOwMDIl8hjmCWC0PfGDtwBDtHPNByACT49PC
dmDlcn6TuVwQZzi3TXAadjLo37yh9C3FuquHrBu5PVjFtGQZuoU6X53PJucCXodVyB3NCalK+fzT
VPLxJNmiI1108l2Xz5jCXeIjzieqGovebeC799YDIX8yas0IyZwLuYNJ00Y3CWV9Aif2vZnXshSa
QpQE3f15dPiYMnGJQ3uUN95Ue+DLeLdMCbyKmhvuWfRJ0LOdEH4nswn9uCXCvcHSljS0kV1d7Ww1
+7qmFVtCa+RoI733rFkkJt6pSrrqO+LbuOUjAiP2FQbHIQqJFLP929Ktz2pZEcrb1rZSod1YTX0i
scdKBirkWQOWxEX6luzdYy/CN2t6uCEvYf+pDZXaA2I+/mKu9glKdTx+Gda0qDb73u6xvdvIHymH
H6xaF84i8ECa+c+Mv8cxRQwFck5RRXU+kVQFZpx0uKgSRYJJSlCo34rfe+IeoMdpAWRtQSspU8Hq
afJSTzXPqdG008bjBod0sl3H3NCJcaBnc3iXFjgFaYRlicWWYTMGBtSn7B7LaXlV7TjriXax1yd2
BEzI8ZtYGp1POdBiymAk6WKyFNRkfR2W0gv9nJMdpqvEppc9iDC300YTbONRHfeXCACu7TFwIMt6
Ppt5emccHoGUiKRlEkx0zjBIdYFOXXku/BkQZ35nbrNcm9srOfp8lVmVchGo1eD0yLbabZb+MCGw
1+Qau7bCUsdxdQFlwNxxhxz4rHoYBP2mDFbIueiVn0UJrVbzbeXJHcus6YupGsphSGDofOA3Bkfl
HKWZyABIMDNDbSXU2LiW5ZPPrlvDvaJWXMH6kTtZF+OLVvIQSp1aCoG94Zw9/rIP1yOKkjaVRHpf
Lss8mJhu6FnB36hqPfUq7BvFNDGbui5YXrQtpbdK1jqwzBOiWz+7Qeb8j+K4nfMx68nY9c+6V941
Mi7Nui3sL7XZ161UQaNuGE+njZYms3zL4soyhrxuHarQQY9lx+o7hnsvPTh9o389sIYsM8TJYrXP
EBElXLwpuocg2z7qZw8e9/LdT5uBXRcWaM2MNG+H/Pk5wI7UpHKT/czMDUNw1eLv9x17MZOebxrn
3DQJO4Hsy3AGn0m2afDQlh+9HyMni+chYsp4RfCfyCdO2zw0VlbKAi/aZ0oDn6Kn5DtYEenEgVcc
TBVYaFz4nlHoOHUPSstdlPKdTIkJBQURtdev5dC04qnjmkgfTgxJwQPCwUsoiShHiCx0vfQPA0is
YjGN0hEjitR+QxVoJhCE1+W8ApJe4AaE3mOSaLPCB3xCEndqmFFCZE6njYNYmkAhtnVTn/ATF5pe
oTZA0aHgSSDA//BJgKqoeTAybZV1T250GH3aSQ+OnmHsvPh8xmtzcoq7Q8pB2oDJGJNKjOlFmW7O
BTGvGK+R9QbRF+0f3G8qCxciYkmlPHvB3w00L0fAIyLhCDqmb+Ck4VtNpA5BSNAE4tXLc9j5eZr2
O+TO3+ENATEbtpouht4JvAd8A3qYMDClEdfkgJ4iYHFafmVJKmkicDrEb2pvhwvHhB8JMYAx3ztJ
wbHidAvU9B7sqqtgYzKBmDuuayfNVkUwv77r4hRdIuQhwVYIC1TfnHHFJK5R4lPaNxwGBItPtmxG
IwLUjk1qwpKnKnVMdqRGQ6NTgr+bsmxSf/UWRFKZBM3A3wmIeN1baslsrRgGJ40NS9wMLtggdM2r
b7SphsB1MYAVY1eFXwuVGcpqhj8GF3oICx2BGK02dAM5Gw2rjxj4BH2bXcAPhvXbBKhpl+N64Kzc
a/ZAJgQclCxqb7yZxk5gBl4XXaKWsRJ4JAieUPdoxcpTBjXa1EQB6+MXyDO0Bl15O1yQfTHprnq9
4xo7XVvj6HxyglNoGceB40wnQV3ZrKZdrtHbFbu/aqWr6/Rlyg2J1+UR19npdzDFep9xf9rGf8ik
hcC4mzrY4QwfWTCUD6THf+gU25b5pfucPtq+om0n0K2rQhthKKddOvCkiwASeUi5kY0SnQML7Hdh
ZY1S4Ba+wBailTnI3imQ0zL7gfU1/UC/j6BGID/Jb8xsOeV+1Y9aw1Mf15bjsUDq8h0OKW4kx4Dx
+XhSRI1hQ8kKXghMDyuP+cNwt+DWU3fhAvFm0VIFG7JrP6IMy+LNZT3nd9BoKY4PH6sID2QE8ihy
9u3qcvl1SvGm+uVZNT6puvWDWp89mnoeuEnyx8Aq4z0U0BdBbHnO616RU7ijVuvTH77/Lxg1Q47N
o9XNExdv+eniw//Z+pu/OQBWeDVTHwp2lT18kv8qf9qpDBDB6qZ1kD3/7bM3v3n5fgAfDynnDCbR
ROIiWwVcMoI0M4c8AZqxKeIEs60DBx12s8KkJWSQjFAORmh26hYcccUezs459BsqG6XVVPPP2IyY
j8cHKOcM5dlYNOC+JkXA4X1gCZeUxXAb2F50cuMyC6N8P3KF3TdUK6deZF9ciJ9zG5Lkr5Jfl/hd
EClItEH9lvgKyXjf4PTm8xu+3tSCMSqKpZMb+5fsvLxCQYks1AxPfANFrzXTs8oGlO7JtVJhlfFc
nJlIMOmiPCIJjzH0a9rLBPEBFdyUINSCGGfjE/Q/uOLsUchSUrfscEh9lxOSKadmExTQE+4n5ir2
yn6/F1qLVm9+Nb5h/Z3TwYiNYLZ0ayPtt9UacpA9W3oI6eq83M6nPunwGAYxRjJ6up3LPiEpWqAO
v49u5yab0mYGAyazKeHjbGaT7Xy8nt/wg8cDsSssB0da+Oqrr8QoyEUf9+XDE/zVezS1PGAZ3C7C
nWaka1xfZuNOChbXCFyOAjVdk2a3FcLwimaM2c2u4KAy++8XGI/uYlxdsGZuPTtDhyV0NAG5eOGy
cncNIy13MEhphvkfzPMWl6V7nb+Sb9EoGlQobGQqvjgcfLqhcvB1yLVrHrmRkAhysUJCMcLjG5II
AVqsJKiVfnN5bVutNFCftGCXcu/eq0TvB9krxByny0pt8o34F8L+RC+SyQYOkb86svhUhQCreBv4
cjrc2IDZXlRn/axbR1AtT08p290JO+PB48dDyx1aXjOsoIvVCR/ABIoubln7Sz6DXxnFLA5Y9dGd
LPsGFeXte1Wbx5fdm35cdjA0pwa0GrDZMvYd+Q2jfjgrM7YsVfFhnq26vZqCnZdnN2rXBB6IDeGF
Uw4hbnDNLZIL5dGAmznO59JPIghlCS8oNwIUbsP44yBfzS6Q3gIf4LPwXuB2IEUZz0EiWsTmg9qQ
ut0JsBICaE7XeUJZz7MOp1OZIHH0NVKDI48OIBAF0CYsQ3qB8YSDm7ANnuBjZJ2A84psZsktuFf9
O29Bp8OGLjOCnnUE2hKecyWhjkE7+1yxAI2aaktmiD2IQz31gDMm6atKB+/wMFvSE8z2UYo5SXEn
qivhnNIbxP0bKUfUNWH0cPCCoH/cLjx/Wm8Q3CDMk4PnGYPYaPiJO+nLDTItG8zV5ZoyKyULFY1w
kIQ4xzqa6d7MpJl4NCrOw6XY3UhAhaSBzpfbJXEkFN93rxKN1Vd41ujZ4NY9kf80//6/KobsWKHP
QZCg/DqfFh8mf6Zo8Xf8ReaKZM/ef8B7oJjoS/Q8ZVZG+JbKZsAYe1RbKLQs9Q9M2bopy7nLjwH/
6MfFeF2dj+c+SF0/rQuHnbtZbyebRJoNYZV9FHsAlutnIQW2m9mcsHa5AJxkcmTjZdhgjMUNJphb
ghgMn/DH0ShvGYUmtNPP2mcFcLzjM0Xb+e73H16+/zD68Ow3qH1arHL5vYsW+PYh/9y2qdyMhhTD
8durm9XNyKa0sEA+GOyFxxoLtVvehyb2EfrD+HLcrlf7Awku7cR90RKTlSlySUidcYqN+jzhlh3e
q+A/Mj28a9hgH1ugfML47+NjBaWaZ8SPY5FW67vfPx+9/PEDNgPyUxuWqTsaTYuT7RlmtQPi3Z6Q
s1MbFoIKf3j26jWVxrJmHPgHNdVqvXv5w7tXH16O3rz84fWrNy/fJ2ZxNGB3rO6TfvarnpNXglwi
X1L60Se91rP3z1+9Gr16P3rx8ptn37/+MHr55vnbF6/e/CbV8KNjqPhUFTMutwBfJyCivy3Li1o0
33cvv3v66Imkw8kQnUYYf7mWlVxDDuLbD1Vf8FpjSzBnDmB8GXSY/Usrxl5ELO9iPaJ8SauLM/iG
MxgGCIyYo0R6cDnr6a94GKdLELRZG8n4sSBknc7O8GbA4LttPnEjClps95qmIJ8CmPCp5MA1cdms
+UjksQyaS8ZWCzmN1qqqfFZ6HoH32+OMyPi9zmkku4Q/hQUZzqnbNsMm2xa7tGp2tjAinvKyQA/k
QrrmGBU49v3MpA1DuU+YTyqOHomeYa1H6LMW6LyZwzvI3qMnHomeqJqS7MVP86d9V3Ocjd5ozpjv
yFGjz8c1aomEskrZ98qF7JQUd81me5ZQa0ZQye8dGsDVC6RYdp0nWiLQfOUgPjTCT2feFFefjNw5
nXLKTjRvCjW3h05XXjSjqfjwxuxWTedNxnk63c2Bn05rCUdpFiv2ppocPTmOm8TfeA7f/X70/O23
3716/fJFMsA/fN/45o/wLR3RK9huUEifLmWNajW6p8veHYL2qaHT5dHAnmT31ME8vnDzeP/2+3fP
X6ZC6l+U6MiN4JRwMEkThnblKt9rFxKRXzgmVVMS975C9wm+mKT5INxRuJw9WHt86vEtM0FPS2Ai
FHrmhpshWJ1gbUAsrnikY0kyBjTx67qBBegNahRmG3KSOl3WFOE/FKzLYYdPSnSA0gZqdEimduoS
Op8VK4tmleQvHy9j5TXnXoJRTW4m8yKvm+canprmq8Wu3s7cwW9EI4yAWz6nwYU/dgTiBxRXHfPd
iir+BdztXiPcfsKTbPexbbrOjeF9iQdulxV095S6qBrhQB1UU6NLxYIVHAOebuM8o2U4EDUt4S+7
bIDZHDiSKkPp3J5NyqsgHArw9ajipWvWCg0fi7JC1v5sNkFFmg1CWJcIQTGguEZWkkqqZG3Uulcc
KA/UJ3bN1fEsO9fO8fyP5zA0Ij9hGdOavsLkxULu1bh+OMSbCbWhCojxFarEdcwlYmZw1vjy1DQH
r5r6CIlIIdNzUoWGquDI8TNKGLBmZTa+LGfTVnDhJhc3GW42NjvVGKsrDCeacXAKhcmU83l5xere
y/F6Nl5uBrh/dlRjOinQlVEYjyk19bzYsDQ8m/KU364w0Q05yiCTtNEFsDuwKRczKPrd2/evfuxU
8nfGUYzYakHU5BymeePpBO/lkMkX8MuU/Jq+HGFgs0s3zaocFLXQVBpRXE8EHCxC28hm7UBFRo3v
8cZDD4sLtMC7bpMP+dv3DY94UUMfBDknZ3k3BTqIjzD9mr98+eOr9x/StOQgezkjrTRuspmjcQEa
z9HafCPOzFk39kOy55LcRQmUFQWpGaZBOYHX5wLOxckN+XEtD3HB0Z8rz14ts+bG5mQzyxhq9qro
zOfOpYuouewqHqfWXnBH7mGnO3gkayNoOu4vhNI5blqrt8sAy4LONlBhNGVdjTXC0q1cn6jY/Kah
MX0aYW5rIQt/nK04AXGyip7tJmyieGrPnj9/+d5N7d3bb943TCyg+BRuT1pJNxEh7FntXvQ+a5yN
71oAn80HUbXdrmc1EQPT9vRYJHGU0w0i7yS+z+7G9U2r9iF6U25mmuOUQmLxXvttwSU5DJekn73q
LLKz0sZDUt4cjiImHbKhiPJeadgtUPpytcHc0Hnu93qCQJ4jQpyAKp4KIbIT+U+5XQoIEFRLShrR
ruqz0MiSCDeN1fpuLHzJGEYZ2+v7ragJS00D4bX5DkSz8ckcqYA1i8r7sili2ewWwSVBdgkUCLGI
R+4NpoWUgQfzoxWdlL19+SBOeyYbGmjuLM5ZuKhW/cH5gTHwoKTOU459IfhCXeMQpG14ZTz1xxLD
Tfm7mNUQX0KsNs06GiKMcMCmDQP/K/ZtzplDjzGSJr4QYdN51n2r4ev9iP1aFwQb0ctOtjN0zfUO
xlfl+kJBxuY3eejLpaBfKZxiu0a1dMvm6vTl4ttFJ0jo+K6/gkFV5/DPhGyzf9hWnBiWJD1aeKTq
ZGPH9SVDNCZgg2/gWUTXnE3ZslQc9VOCDgAzw0fNKTKePOiLlAXtsy34pCCWBZt3y2Gbu8mCMeQ2
Y1GE2KxHCge+LK70wIQTrvEiUCp308HziHB6tZycShe99wPOQr3p8dDh7PJEy3xHqG17zn0BPmRU
oBZACBKrHJ0cs12PurixXI1zHcSmi0HkEj1vSMqdfmOaD5aNrBqtxpOL8VnDVawt8C3KnLslJ0+C
vN2it0nobJL6Gg0Z8vqaf/vNCNifl88/vH33e16BfyXdMiem8brgXYrbMBOl9WF+ySGvspqZq6JR
kHQwHCgPTE/SaGXfwrN5okpMfyEUJIJ8Q/roJkR+FCpakH//tFhwZpt9Y0DFpmOnFHvrcUYDGLO5
D8z1IveHzt2ClkfgyQIUUMXG5KALbzK728FoGdLxntAYRSCkDJLkXyWCGOqANuQ3JJlL0W+JkPv6
keirQQTfcbJ3msB4hQQOxL6N4U3DGbgzIfebDIKUaCwsF7XeE/xsz87re41XP36zXdbID8XkfIlS
PhInEFanpIVTVRT/q8718kijm5rzCOo+7zGV7guKJfv5tFFQbjtPJXTNYgpP3mf6rEkbvMN59lt1
+6IHuYPKi81mXkjGiwyBc2hCKJC/ys5LMSN7xyYNl0arKnwUfVn0GKECW/QQC3iD3xfaiob8oTpA
cs+p2n98Ul4W8tgGR/0U6QHhLxEj1L46aQdW5VdvoyMHZ3oP+TNgmdheq2wnbQIeOtgFNJVTxSHZ
IngM8EWgHIJNFXkP424kGwGuglPXiNIlAG85EK4IT/4CK/Im4CnEwHBSgR1Oi+UMnSOtvHtSkM7E
NESjLTZGvKwR42hNcxpaV42xpIeyni8L2uwh4jh1zcnO6XvDN87+SMK+KUFf/V326Pob+V+9V7ZW
50hdu+0v5/N2n/vrU3t2FGz5zqfbxYre19NVQ7p0aNs+JAG26Ls3aBn9uP64bOcETgc7vt2cHv4T
HCT+KfFDa1KWFzPkzynEIpfD3V23//0o+7j5eHp8/yC/z0h3R4PhMX55fP/o8ONVfvwA6v/67bej
7z9880/ol/vxujj9eH1yAv9/2hEakpZXvO3zw/oGT5BBlOPDdP90ed9CzPGVmiob7XAeg/2mmB0U
TRFQwoaL8cbxT4R/1l5H1+vl8nK2LsnBKLpnRuLpe7kHHvxGw7AVCqiQgrFofJhCByJxEM70Se5R
qzL+1bTCqV8lC5nWkFA1VOYRYA25nOIFI7xA+6yYlqBMBUSefuKxrTEVbZ69H08dV3lSALmeYeR4
WQgqMQJwTkM+W48KKZbGmJN4TDHTYg7EJ/YU1VhlRskS4E8Bxrlhk3Yr0CzNloePgYg+22TzYswQ
OjeOhxcGHYaxms8m5KtL68eHt8p72Qc7Mnrc1woRx1RP50RvEwsa8AIhkpgfuZJ6WphA9SyQhNLt
CUZSk2kfCVQ4mDz7Hv2AN9slnHVeUQOscUDeMjD97YrdlJbbxUmBoQMfzres5NUXk9UWcHCBx70s
yH4KmxviPRHCV8OW6gEgLmhZmkNXuXUMBB3JLzzNs280zIEgEhERFTMOok9okR08+cd/zrPfj9nP
V+WqyAJ8gPA0Ys5az87ODa8Hx+ixI6UcjNAO0uRBgSeJAn2u+cDavxVQXMoaFBklS70gqoTlUqF3
uUB8Ud2jRwNs/rjnvND2rKeDwupPfPVecygk08A2vNhTUn63k7DguluVz729rRhfigwvzF0Jtdyp
punXlTXMFLgRpBDy75aSUTaAm+u2x9VkNms3pi8UINYXVHoHRP5B9hrdUTfEauBxBtKtqVPy2+xw
/TtY40hIDdekpeplPeLi5lMJcUNuBo/ek/xXbNYpOIaZIqmLT1sYKNzhp/ljz8tjWAxcZnQ3x0xs
VXZ/ObvW4IbKOZfH/kuD+ismy63hP+/ewGTRx+xN/UncoBvr0BA5rhu8fUb5Z5+u14WYzlflCukV
HT6lKsmtCPVzY2WSqeNBWiOX3jHlGsTm18VZJHhmUonqM4Rl+vz2C5JCGxUW7X2m+grNImzcm4uO
zTwdM0G0ReEABA6Nr6jZJlHCdVpd1lK9kotKzgjkSnuKjxvIfxRsXMznt66ezO+O6yfupaw4Fl7s
jorYmEdD7tQoWolywNf3ofx9x49FDomWJiN4WTvQmGrU2xk8WBt8lSbzbYWAViLQQ+soETGmPV0z
RenjaBWyP5r21uI3D/SRlIhim15KXDfd/8ias6eQm/CahOEjRgHFsDmMKNR5IN6WGkVnlbOavsDl
i9hY+5AvHe6l2vInHJBO2opSNBxTaQ8JmQaVofXdqwCg6IgVr/jfB+IrSWHplJRwNZt2w5CCfdZA
Wo3eqrJSDFstIMulzL8YTtSyTwU4rasPpjbvJTpD8i28EZHAmdNhLhKgxQi09hhKkNM7RZsNjiie
SM5qiS4wBlYHdi5vWfXTTnl8fZs8HltGyIHudLVDLVkXPlnujEMzAumTBM/I92s63oxRuFmxcPP4
Sd2dPBJvkKWtmciR8HT8pt2reoN6xXsVBanojha93q2OlAecaYIYRkUwxjCKLfmCTNGTUnURecDb
oa8fzo1ifh8/wUr459Hg74/VEcxI9TaGyuArogi+XVohnNr4+8ExNdsNRPJ9lqR5Crgy9n3dtSi1
00CPmWoCyGijWoBUWMDeG2hblPfsM7awDvyJJIID2Z7DLaMgtn2GwwZ5QwLvsmYOuF00lDGbsCin
NozFq6UcICdsFMe5whOORgy9+pGnNubAzNdbQs936tBqfFoQkjXmgXfd/KYgEFP4jWCu6ZF3+Otk
dHa8QRguYcNvXRFWipJc2cUgBZ/eHCj9khQAmL0IXbRma9GDKwcpOsuimoxXwDo4JeoNK52dh5PH
9qULKURQDUt+MHnjAFgvwKqCrcySMxjCq0sJClZr8gmxTFXpoHVL77OhY+JBc3Qiup8hdB2cTPkZ
RtX5uPxTh94/+PSXDk2JfTlk2qLQV3Pq//b+7RsaR2UiYPkMrdbsiDkr82BLlX8xFq8R6s+vN10s
Yw1M3ICWwiIJBiEqRWqPVoSK7nj4DUvBcKfgA37CY5cOYqF9UbNIfXuQMowSX2PU3adxGA/GbS6q
s/BMmwhy2VF/QjVKmeK/ZW2/cRG/cioqDO9w/Enljyijt31c/rWDTJpTjTSfNNJxGX1OsbAATHRs
VCHiJCkeEIf2SvqjeL97HlOaYJrCM3JAezOfnaCVpRgzMpAESCP/zyIghkorbtWpA2htOYdADUnX
VBdwIUgrIhe0r8HprNlRkoHm7LE0kjtQ/p706Zp1KwqtOi0dQmpK67k08VvtA8gPu5WRPugK/g94
bo7o8ovrQ+2b7wKeFEJrDn6i021OEd1k5KBO/mAioUdy+7y+Prwy1Y6bGbbgFBbpQVa/3G2lGJD0
PV3+Fe6p/eme/HLvXrvX84mjadDhLMMWoyYDGU5fHzKA8XngFONknzRLroCJBpsJfiRHSbZktjnU
CQuYmSrsG690QB4wY1656pobiUgJ1ciFgFv6MszaXYrgogIcvUWxW/wf+ltyD9kGKTau196xl6b0
L06DTdtmQzr3OrQdnXv3MG07rwnh4eDyo0AOz0+3XFFeCcxYjm8VtCTgFSO/J/TCCmYallAETtLQ
En8LrfR6cTu7nBkw6a+4066NJXx33KhUCtR8tUTHoec7/Exk2c457eEgb8SwXh5ndzQ79ssS/YGa
0xBFStpq9KRQFpCKxTsJT+YW01aBRIDS25/od1R9vSk3A0yLiYbOdt99/YpTQGbtvwZff/9+ewJf
HoZfPptO4csH8GXrL63WyWxZrmr9/Hq2ebuGUn82FeG7HzHIq/3v4ZfPltje35kvX78/n53icL78
0nz7Tr/96ivzrYzGfCODNt98S7iD7fvmqxezS/jmofnmm3lZruVr+/23JXYAhKyP2mjhKQ20h+dU
XZWXn6DGcGgagXWnL7+wX76mKQZfvMRvbJnf0ISDL7DMV7bMd+UVzs5O71UF38yCLa547/lABXuP
3y7DwdKXnMWCdrmlwY7kq4EcCbpJe0CJSTkfMRqAZ57ewyNM4JhaJwCGWReT7Rp1XPMbJ3wwyZ1d
39a4XJE2F2gjLSGv7ZHzeom89OhXLk0xdfghaMl3sW9rvgapXvWPAH9tcj6bk1sLrioKUyP6ZoQN
VDTJSFSkyVOZ5OxbrkzjAoUP4LRoNYTggmDHWz0tBJbRb9yzuhI3SrBpwXrGgqR3Q3lryMJbstci
CY49zdqnl2lLmVZZeOYzR5wy8beU+gQtXeSVmBZY2RZ+6GTahRpjV+y2w0bWRTFFzxaBOZDcKA6h
KBJFgVFWn5XzzWY1ePhwdXOC8cb5ybw8q1blJj8pHj559Pjxw0e/enhSnMMIDxHttagOy9NDlkuq
Q+AZD71wcr5ZzN3Th2wtkNjLWUHa73OC08VZl+sLKwDTQnIGoExdaHUZie+lZcLtw8lhk7pSLac5
d3J9uOa4BJSeaEzwQ0A5o6WFZtXXYlrJDjLCsOwhxjbhKJfy/YhbQHPxGAFLeDhiLiSoL1i6JUu9
dB0Q5gsPE0JZo/bfDJdcH1iJq5NxqEPxGRggr69HoHI+2qJxcDvQkqQut50Lrzx2R5UyckhvLUFz
xzB09NCsgk5El8EV/QGz/KAI4iRuMKIYShoagn1qsmx9Q9l1cMuj9aUAYqx7yXfVgcDB9Oir4BQg
QgWRGFUu+N3OJWqtQApDJ4Gr024z2XAP2dI15FAAyS0YN3XKoFZ+kvwYwvl4sV0zprMEwHGAOMyJ
AGwo1JFBxRz2liWzVGXgz8UgeyaEAMdizou5DebcyL60rCFpQR6W3DCFh52gE7cG6aFNbzsn98OT
G1h3KaBHnpdIvtRmSVw+uWG9jR+SyPGsaUK/0TFndMQnQtGqvQeESe0YthOPdUR4OwUwTs/5g0HM
w4u0Xc4+bQs3SJSIi6n3RYVxmraz7HeuHLKx3A6a1HHT7PSN2d+5LBKyBgy4/a+rG1GZPGrreBG9
gD3hKPpux4ahbu4ann/y+QzAo/214nMK7VVEc3ivnNHINDYBCsNgKGZh1SVPdTR0zfny0lnSbkiT
rxMwx3mkQaRrPIEZ+nejVn4up8SWJFg4b4agU3Pv0Kgv6XCel/MpAivLRZqtw6vUcrKLkobKeVuy
mRkFN7p0EZHTKTp7ga6VHlIMNFAxVrxY2L+HWiSgO2nA3b3JBU6aPujxTK8NLUzlojGpQx1HvtpW
5/WOcQHSQxJ3AozldMKcH6YA1LmVLaYmDxC+bhIJ6NfJ4STSijgyhEoeMlQSbrw8RdFDSDSxYD88
XMeWXCSfpULocBWjCpIyfrv0iW1Df/NvlLVJ6uHvQ/n7gfMfOdQm8o6hxv6knN4MakERDISPRuoy
3wWZfJC9WvIQ0Jav2mcNhRed56Zc6QEQr13Uco9PN/TOBy5T5UT1njj+0eh0u8FEUyNt0g9mPJ+N
K0asw3eJ/uxa1QT/6/3O+0xv9Pt2r59yefFttRthndrcFP44Gq/bPZOEizxRRm4asYoA4c1sUiAn
SzwKGP7ZhtSyDfuDlrxaR5Q+yluTZuT1TkIk0IK611YCTBGr5MTPcMX3m3UqIRl0iSZWVzivUrG5
bcESevH2zYeReOCQSATVm5yTPvjzgUrbKSLYhly2kyN2eSsl0eNgkR8MCesEBtDLDqMETg17Vw/v
pBDUbmS684vNsQzfrMuFRAfDKlF+geyr7FHKnppxGZn2F0NULuqZTzmyuQPDTYcSaDPaAp47mP/j
VhgE4m4Pj7t7RCf/WCXCYV0wHD5KBsQKoOCY5QW+mcc2rouO8REMYwD/LxFdOABr0y7hYaTkAIzN
1LJyMo0U2okTI9FvdbGa9HlTCY/r1eDD2auF2NdArKZvkhJ1KmVAweJ11QSegW+Cw9NMrdis70X7
YrldkDTGDe/A4wiHIVK+Z8131JRrth4vK2LBeKXzneVhEjk7K3NaHZbkqNcdeB/NEdC2Xc0lRY3t
LN085/cfbpmwO0F7dFdJDkhWi/ApgYH20pHo3fpp0CE10lsPp8OKK/GmsgwbO0cT6+/TpzY2Vrst
8cw3WXqY2GWv6eTa9eLzaLIFKXNfT2zNtns891oo4DoOsu8pgMwmw2XXj9lScMAxnG25EbZwI2Ae
lNHmPCD7EjRvJAh1ylqSuYqOaiT5xHHLTtTRmYZxi6r9wphRN3noa3amfBktYbgA6Gx3H7+/jwuB
iEZ2ARTZ3fbejVHavJQlw5K7jf0SsUa0MEFd53e6XKOL07EMqDaDRJ3XFMNsNlXAtxsn9pyyOAWW
RGR+Qz1E3pTO5byYr4p1t61V29KF719KWOy5REpRGsVYCjugJQnatp3DqZC1dpO3HFu4CI712xCo
Ki236hK60hJa/fCE1Zcwsdg4zC62xs9DlybSh5egb/xKbd5SZknTwbceQFblt81sqWh397Hw/WDq
UiI5f8/83joHvwK2wX624wzVxdmm0+QQcZFUcAB/k6wrqdBXa5NpmiQxVjmgP8CtVWP/YkUWMtVc
YEaRn+XZTz9hxN6jXvXTT6yqtM3687KccvOoAeFsBC5GJOhBW3eqndnGYIBQYDCJ2XKhkIKxJg0I
YFLINWtht95J1IxqeneKmBbPj9xHZNxwxrXUwPfIUu37RxN0ZWTZlBBffz6eszaKj0V9c4ITUMRr
a3OaU7KzcWCNJveMANhA/E90TQQ02esFZCvrKpIMZF5vTqazqDpZbkO8yaa6uZxVkXRjrl5K+YOO
NhxS0aDbkDnaVOMa9evSfp8Gyg3VT1cyCK9YSY+g4WA1H44E6ClpfPTx2lHVpmitz9bRDsTXjgmI
cahKqXJQeZa+gtEZcs1rPDTu1lLFX69bdtpBCd43248jCHRzqgtt0PCpHhK1jp6UTBV9TU+OBAPI
6KvxzM0OiYJXkgacpontVLrv5yAamHi1GrY8zFArexpKUw4IllR9dRzDxgPjawUAqxfFjRNIYfW7
8DdjzMMHgh+QhLZYrmuULnJwSLs6FKv8ZIPVK3mAtSbfBni0ekFlqfXr2fItO+fQkeir1QsjME0f
vSQTygXuTnL9Tt6d3cM+P4/dOyuWxXo2GdnMn5HUC4fhty5I1AknIQ6BOJfQYYXBBHyIqCaN8MFS
lZM9zLjdoRD2moYSz4vzJMYHyzMaufKwULAXZzdVGxhPlcc2auSyIptDaFS0Rk+dW8yc3G5VNBZF
f6E+z7IYxYp+rnWRXrGAsDVbGJMPhFmxQO0RCls7ftM7wuotxrWX3Ou9xBOTaMnQxfqPSUuCEU5D
jH4NIDrZLifnuHvWtOYf4NXIYUr2I/c9oy+RA0dmU9ulcDyRncm3j1ozl3rQzc0ToOJMOyOXKvTV
AhImzjdIt4IB7iVpvjrtarN96h8FlzC+SWezqM5Csu/cn3XMItZ1YvfoTt82EqNEBj6QH5dfyYYg
cTU/7sA1s+PQ16Td3tnNLX3AQmEIXhE8Fa6bvrqQ4ZJrh9bNsmdT6bgsf3WmR7sx79NiEy1mu+7X
jUAYdhnhutflv9Ba2SABFtcTqURCrDYEZwAGclyXYS0vkMDu/2qYPR3U0alG0gclk4aFmsTN1bd0
V724Np5aPdJcL7hznPQKUZPWcF+MxRtt3AWLHOIQRA3nNY7H2/+DMQY0bsfT3ayvVOW0w3J2TYZa
5AnNYZgZXsGV7Js9x+GHO91L4E3X6AA1b9fsm9l1CK8QmKuqzcKjTxuni6gr4/WHNTwJUDOD/h37
ocXqJN9F/MrTrBvgBF9IfjnmszmyiMRtWu9ThnIdk5fh3EuFxmZz0Ow0jim+Z8uLShqZoM8wqT+9
pa+cVPZmcZJTUfy0qVM0SdIFa7xkS9uKeAQruSFl42yKTbA7ZhdZxCOsYTyCp6X4+4d6ufS02v0o
hwIxMkIB4XGWp+btGsneEY+uL10cW5qy0rP66vTl9aqLzQgrpzwb9eNpp04mqY1t5AIjnSIfCRko
Hwp2wA/yrVWjy/F6h1KW5AHK3BTyqHSnULjA3UpeMLhMNXWia60PTw5dtgZCzE7/HMtqeGieQF6u
uNZbK08os4Cc+bB2U9yFBR5iZMsk+AoyWeJBQ4OpdMn8aGg63Yut+k5irckHiqDrltM+ytvrzeFk
tp5s2TH0VByOQtIy62eXoXksHE7NMj5L4OPjjGfLJTGWCXMcBRcR2CBla0O95GqNaD3zslyJkyRy
CSfFvLxKI8+npThgpbDlvhkBM1MaO3ZLWxiC7GrW6fbuhZfTLWESATd6GZayz2VS+JRLUpM/WdCK
mSoZ0g4ux3JIUQOk9jRkEoYFS2aa3UEeb9kNdIToYvv1RH2z7Es584kEfXg2hkFoSGCVHDWhrUjF
BsYcf60zAjtO6W6Wnc+ZOWK31cdsg0wvwtNVL4a0onXLCQ3K0G4ZhjnFvPItbvczQxBpdbYLjnfq
hRR4x2kKumvWhgSHWKjtrpdkXntJdBf5KaHAmGQKbxcyc0SfgFgjZix6z49Gfl+FEo7ojuof9buq
TdDvu3Qm9pxppb7tpZfWqOi472VdO4p+/SElmUfeUYzWsZGJN4uTEkfuwniO6FPD3OfF6UZ0bPox
mjbXxh/NqBH4Sqq5z8l69Gt4gGxU3b0qo//rEf6LG0FfpmFbv23FeVHMfHTappF1w8pbOTNaa8ci
9okimpVGFOTT7RIkH/xvYgWwfI6/GUZkfUYFI90LNoWmyfq3F1fp7xF8Bl1rtEicoobcgNZnmRgo
KQ3sYJ8XCQqGFEvH5sQ4OL+hiK9z0hLhbona+KpcT91o5O/9RiSF8yibZLhClghLBVcRfk+9kLVx
m/JoNhuqBiKM4sPx4+rXV1T3JTWJoNot42jfb+6ZZztIr0Jjx1zr1m4T/UqD7XtVV2+pO+39rAP/
x3GvrjWzyDiumIvwl0bPVd/sYv82RyVZX1djp+paug/KBNd0L+21eyW26Jfm1+Pj8k/3sEv89Bda
GG2+n/lPMeXyNMe3V1OQOxlFdOSbIC12HbAEC+STzbV/UXtpTHucXWhwoLbDNL2SSTZB0Kif6Bqm
ibGfgrqxYlV2xEhKaD9ra1bE4wZ7kvO2xOcYSzbslJ+0GW3zc1F7J0QJIE8F/BUnjNwtJNDDRa03
vMPYZPwMMyPlNV2R5MxyG8ELuqBsaoYisw0TgCjOmpXRCxuXwV5RY5eMVWi6NZp9FEeam1Fp5dZ2
ZriZYSS5HzQvbU4B6vSJw7HH6LofUq3YvHGziL6RuHT8Uhf/OJZ4kXtAC+JIGDIc9mwTk99r2Tn6
VN8520BNKsQhOK83aSkSpG4WATuXZuR0jvYhfE/ZdxcJyZCvC/NeCdbLTKVet9ZFQniUa7WONGT+
kMMsUH7UGR/vpQy1IrE5bUez42N3k9fRSNL3KrFnkW9kEpomQgw4JR8iVDNegugVahlJFpLXLxC7
YrCFdv9Wp1mc94ftCh09YYdDuekOlf01/+wmBAriM2s7JIjkEyAZUN1dz76Kk6DyWxPpPJ8tydbj
J7fTdkEt+LI2keq+xu0mwVcUGp+W3/+Pq5t8RBmhqsvl1eRT+eHr/51yj7fg70O4DAskJZhcaoo5
F0wYNgNpvt+eiPkm+6FcX8yWZ8/L1U2GKWnJ6vv+cvnDc2kGv8w0lI5Ct4k0QDmbtLzEOaETBiPC
YaYZGC7cqfHaZBbX5OPbE0FlZJgmnY0CM3HmtVbr4PDz/9c6yJ6PJxTahMqCCr2wVyWFYCG4EhJO
xHtik3t5iN1XUKd7Zq1CqCecYypUtKZbZ78wth1OxAHU/fHHHzPECc0Wm0N0f/1545cgfEL0H2i0
+kq00+KHRdm23V8YCT6zuamxAv8ljb2Dqb5cOliUWibsfrZdE89yyRsJFLLOZkAR1Hys5+HXUIl0
Z5fh164Z+NF9tom4q826nodbnQ1xbzDH87/AQ8IvSVfH0Hfd9qOeena6z/36wXklTzD8mpMtwPlG
ODl0iZ5gKHoxtaeEz4S6f3IMpp4cjDF9dYpOIRgnPqapIaKhS+QgOJZAnikFm3HXcodPjFLrsgam
KY0NsyePMLFGgSq/SqLoJJCVvHDOCGYYWE74k2ByKe3VmA47d7JnynOaNXMvrgJZBvcuvdpumo9Q
Iq84Hpcd6cRtUX+E0jmooxOWMw5oK2C1GCVBDZc0gZo9gQ72kIviCUtkraSf0kc6XbZ+KxwXsFoj
ZEMbDRE4HkJphyq3hHjVnxye2tBf7m7qEtfZH1oEp1LBimEhHSCXkKvhRmn8y8zGW+1VeRVtyGfs
h8VAdvuSjtfC/r6s7dADTxvgRjWEI8pEHZgBkHd1zXq5WS9vGrfGYijp6Pp+51u3lz18HNM8/brl
KBHTdEfM8GjDawMvPTykK3kwEbbl2evXb394+WL0/LfP3r1HBnyUHT78+HH4t/lfH9xrZwfoeeri
cchReFngI4yeEeRGvaHUxa0IhxrvBOJPP33S5vUL+3kAvw7aYeej3759/wFGEJXMOv860OQmCOd3
uRQupAv/Do+OZWMD2C5ZFSigrJSCCMOZjNHILyVNODMX+WQxRdSAbhvX6vBTdngo/Rn/oEvEPJ9Z
X1dspJOL2gl+hjd304UvekeDJ0aMgKb08lzWpPhLmSWDP42ENUdWTueIfuxDciClbw2GW7WZ5sH6
fwHjofXvGGizqL5DYOv8LerMPn78204A7EMYbwLGhoDCyGCOTjAjExyIqssBERRJX8h3w2DzDCbb
hK7tJpiOahDzWTWeL7eLbi8G6F0CVxUCqE04gNR0eUsdm7GnlsCI5kZTg1nxpDypAL7ufHXRI7XW
py06/lHuNk77wu6PcCngDVvDlQPW9Ww7m5bZVf61slGbEsnbjPkeORLtAYJ+aU4N3DssR4G9CMfc
9rM5L9HmBfUlkz180mP1sNO3WTkOOFetT7sKk6E763LSYM5h6mhSck7wKoB9Tu4u91+/ob3sYwhJ
vk6fDhl51EQcUUjuV9nvUMtF17Lb1sHj2AWBn6gbAbQejH7e/5TJA1kFRZRfg7DTZcEh1789PlWG
f0ZcO62v1I4Z+tzxYlWBzzzsk+GkzvDgzRt4VlEtUB4FBmglfpnT8WxuulLb2rX7WRsLET+IIgfI
cCTctXu3Mso4NzV/atQAyv4E6MvT7M6Wk/l2yr9cHrJbVy9rCuDToZuez8fVeSOPjj92bU0z6GVx
JbzB/fsXV9GwJxoCtCinGC8w1TwhbiFoEbJnWQfGjbDFZ9tFnHdwtpzOJhIchqFEyveGQbJhznc1
FmmDFY+A3QPLLffLd2swaEWPuKB94ZVC1eBDfKUfYoWHhMGuUJr+f39uEPD+bHAWOP/0rYqOqMk/
Z5o++i41uV8gdcWda7sW7CztOSpP/sC4tZhmbDRCAwkfG69Q7NnCwh5fXCEWfZe22Yt1Ycnxlkin
FsU/tSx+Nm5vmuRep9cPJovKdeewgnfx5AYDVrrhIrS1FVctaAOaaAd6pY7+hDmJYZA1XrZj60sh
pLcdaCpZJ01RNYhas1zwDWfaenG1SzG1OuHVA3FDUql1wzGFyxRrZTf12jhyWs2ajw0WV7RcDMQy
TDzyUQm5itqH38g8GN6gi6sjv7oYlQgz4VI+migcmOwdDK6W2D0qCcRdDxF8DHY07DZK3QHnUYgd
6bm72k+F6okV/uPWseKt2bEzza1FVYXgIix08BbJCWZCix8jQotaAg3KmjJdU90DU9isA6SwQwlX
LBJ3ZgGq8Gkd42uIthBqRmlkzlGz1CxqrXmYxTSi08xjzQUSaY1BuxbIXg9zHhLcm3JLeea5zE1E
yZlGhzXuSKB/cfL8ucT555LmnYR5HQfj8HYNgydfeNL6dQhc1WEjlHntx3dZAhHIpTcWq1RMpyJw
2fWWJsieh2nQsbAwRoM+Ghw+jp3eCKPDteyu7c6mubHDx8dxUyk4E22SLLxSr66EQCuaFMzXlH0P
yV3SlxAHevg4rY9IPVf+706ntT8Ey66mjgYIKO3+mg2Ok2oVXdXgsRg0qEL86ja+JfX9wofk1gZr
78zuifPTieZheTlf+FxiuIPyZiZhQND/03C/xSenHScktzT3vQGplITAMUXtTUnB6XEq2SWa0h+m
OG/Md8v8Na4IJZyirlLgMl2vbh/yiOgPDM5I/dALhIhl0TgZg4GPBVwjvjpdQuHn64gdXg5BRpyk
qq4znFDqamXOFbkPw+nLBYrc4ST1XcBRUDtM8TnQGt+qqsPrTLWXYZxeAvsu5ZvmBJ6W9Z1Dkrgh
U+IR/CU0zBFE6/SGhY97NXoaSEIg6QKrgiWty5I89CFbnKJ0vZqJI+R0dcO4SaOKB1HTa2XrGCNh
XuAl5pPXJafMJE5WzYPFZMADHbh2krxBVDQcEAVnJ4RXMmgR4ox0SgOuwq51lLZzarBXz29EJX3f
lPIr0a8JAaYcYRoyj0hj0aKQWbRRXubs4ZRazPfKuch2d0t2GpZ9hY+nnGZmILd2St20NP/6OWag
JcScbFFszsupJWOsiVTvocW0fvEjZSWWaUnbojeczaeL8TUcRzuzg+hUQYnZYrvwZi5WOOC8qIUq
61pSRVdUfumZLDI0rkvdcVWmH4SwbmT/VCMCuZYDV0GGd14gM0LfIAywS1cKy+QC4TByAudB8BJc
2hWA8862ZJ3brnUwVudDtxanCte8Cs7UgdcT1dRHB+KAlqAZTo0kI/flKeE8BtlQlBviZIznFwQy
MRNcZrQ6HuLg9LFic6FrgTH3Hkcr6FTzByE1DdaV5hYcl16vXmmFfaK2hZzmzhMFeM7UGmwr/tuN
2hFTQFcSCvazSPOf09e9xJC9Ue2gySB5EPISro82Z8DGh2VaYBQ/BUsWV7rbWbDb7P5ED03j6WJF
JiX6K9aVajH1b3NTGb6rZpStGU1SJIPWUejGBZ4CYcI7kaRPK7q6yRnmviHFsG1Y7x0B01u7K92Z
4Md9x+0HLNW7vd2jfPP25ZsPjcNMYXumGEdrYAhmgcT4l1t0bK3z8+cTjLC4hoWqfu4Yf86g0meB
nn/MYTwar9YjehXZOKu3cuaM6OtYZkrJSXWVmL+N1I9cOO2k5T0E1r4nFH1awrYB8ym5RMlWjaTX
DhPu+73fH95bHN6bfrj328G9bwf33rdD0xpWW1xQJd+ec0L5DngVDB4loBoClfFWiXGG3wKpYBMs
8sSnBYgIIC8IJkj3FWzM+8ul+nSpSza8lfPxH2fzmyARSujLwyzoRXHDXmuGjMxIPRsUPupey1tC
ZOuadJJS9ThCkzCU2coW8DwiVK1rEkF9BjX2UTpPFbZ8OxdPOnwEjCidXmVGg0bMTKt6Z+KP3cS6
8q2/pmDtQhmJ2DCeMsxKM53Xz0fPXr8ePs869qyA8I6mewTQXgL7h5a+7ZIg0xUFqirnl4WXIpEp
AHZULSP41adtyWG0VQUnpPXq9euXv3n22ln9O/ezP2cfs4fZIPsy+yr7Ovu4yT4us4/Xj07wP5Ps
47qjCpwMbhpMqqxQ8sAdDxrjSQVfASO2KC+LLtfotV69/+HVmxdvf8COY58BWZoWsFZnI7Lzjqaz
6oLcYXLNPL7u/DuIWod/PP44+Pix9/XRvw+OH6AFG4q86ll7NT3/ZF6SvZjPi7MxckzBAI9Ei1Gt
lHWwvBTM1Y3YGK65KZ1bZ9DpxRJkNIecBPlutbrNBNqhjUQFJraiSTwxiescLXODnnTFqX/YUlqt
Qps6fs15lRTGi/xVBLLAVZNZ3DYgXTcPzHqPquNAKbUt/tDvOdK9OR9tytFp5da/jwhm480QX0mZ
fm2Ldm8B1aejTL8i4/VFSOWpaude9a+abnfVd2U1n7g2lKj125fPXmi9MCHgiqcFt2qEnqe1U8Xz
lHHXJk7vLjeIl7BgbxP014AG57OTnL7dcdJY/zNsOE7cl1G76mD4g3fx+PgRfTwehseU2sjP1uV2
1X0cnUvXUufhvUrWNCyfaPx2x2uargz7CF2rwzZ7A6unrbNcblS2nVS6+B0FReOWOkR+0v4g8Xf1
w5TsLThKUjM4TsTJDR4+DBvvGc+EZ1s4PGwPNc++0AGBLSXLps1zRn4J3kN7xwu/rQoxdq6gQzRq
99n7dISN0hXtM9QYXPXZZWEvrffnlUbQMUU+xs89t03Xgj+GBXyXmNjM/REWMsNgrAv9y2hNxhcF
SG4lJWGsMbNbC6msI/Xnts1+T+2OVcrBaKeeU+Cx76yypuRsxlCCfojYUU1/qIrpzuGhDmbYBuaT
jgJV6YexBzyaXe3oCH07XCdqSDW0Zt13tbosD7HIIZXupFsy27G7qeWhKdqpcU8dDddcI6DPvl7e
X8pNcedveK/K8jz/yvt760HvoV/k9ehkzmch4CQ+Vve7H6cPevTv+we9rJvfxwfWX8cgqGGHt9Cq
7hIEPNppwek3JqhieBhq7kryx7ziYAq44KtZYXTSrzg3sWjlMMXzbD6mrEyk6tsuSQggHy/grBzz
F5Yz2lCagzPiYs+T+YySVlkNOLsuMasW2gDQLWOCcTZXE+xsyE5IRDMiT222BMQuHVA3OEcmVJRb
BDI0TyC28I9eg4Vs4iSFxCEng8uHxuiJUGdpizj0oNSswccRrSpcaY+nzf8v5Qa5n8ubDFUC4HRh
YlRQNwHxXXWT0sGGxYXKBvS1Zs3w7gTO7QwuzMl0nF0PyLp07bvtRZ5o4kSGPzk59zLd0jWrDeBy
Em0ZPuqxrSJoT7VhoTPbDqOa1SeYxRmiHwNnZsYKQLndj50azHqDi97s1DXCfXdGUEqrp6VNibEx
ave6K/2IdbekUMaPB+JW2NVvRNkceDXVUVNdW/0glzPaUXlX0hhpWKP1ea6GPzyn7DLB3nympcDp
J7cLhQ4JU5oTYZ1NLlBs4XztBLtJu6tocU32DrfVOTXP6badiedymdhro4rXqC119nRa+B+eH1Jy
ndBo2Lzh0p5eU+lYNzk44JJ8vuHFo7C97r01Ax4E3pPZQdb3+7r3I6oGZHOxTVXkrJBkNLBWCnbI
rGhzVI51PrDbg/XyiHuz3ePvMGV/pvpkFY0fGzHA0j/4/vmMCijoGqbMAnLwUsUT7DVcFRwF9c8G
bDPGaHyxgRsv1lEHn1rm9qHQcTxwDo/XC4Qnqxfk+CKbsW9Rk+2E0Blzx28pFxi0GpwV6ziO8TSn
s2uJXWRswyI7AdKMOQGvCkkyQ8TzCp8t0lWa4HpJhGW0XghSkrWZoTNx2jWNclLFjEjeQ9KDffvy
/ftnv3n5vu64gjjqzKIUy8vZGpixpBaPfAJcmSP4Hf0AO8/rDXLUXCIEJKafJOgxAH8ymghHlvYs
qQ8Ey97BNWVazONGWnWde9KSFQV+idINKo2YUB1FXkloqVyj3rtAHX6OPhTruksWl8pZ+Y4iwWm5
XU47vVigDrmeyC7AJKXulWUbb7988gj+98+D9s9uG2MdgnGT4Z6tIDryVBK2eh1NA3nHuleP/wGm
8mSwb4W25Hhip/fpbA0PYLm+0ZXo3b4UL3989T61FFSu5iS6tT4QVzNSVyai9PCV5J9RymD3j+/f
vc7rkN+OiHe4PHBNR9DWsaGhxHWXNhBUJAvge2JaT3oQKU95Foh5gTEwKKtw98Ew6kBVErSZfLGM
Od3GKoeWXnxXxGEqPo14wnZHYzVm/oqC28inq/M4f5pyfcZhYggdaZraO7Rls9Pmdnc0e2+KLEYc
jJimTvZZ7RyuO5LhgJ3IUoWIQWk4JZOyw6+8OSDb1XTsUIfwUHRIv7ZTR4UHhetZ8fqGH1V3Zl3c
R9alfT38KsOme+EJAu6AThBOjgZwXAM9alKEYFUHZFXThLQbl2GLHuZY2SwDYc87IB/yNaLMr8Ww
c9WJps449eqRRBwz3xAkXnAwcRWuJon76llm7i7oKEgHxq4Vt/r1IVfh6Bb75YkCQvnr3Y5Ska+b
YellBPcleRbpIYy/mxFNlhi3B+VrHCwCnXgHsdBdxOnfKDEIER30sQdxNQqR4AK2l3oGlObq9PPi
IvRWqInmZKynDuyKc4BUYu25KAf5j5mDPER+TD3w+dXKuic3mUQ1gFwZwrDRSYGLADNAb3r1pB/X
4qDIHYS3ly5hJ46hWrqYAUoSUa4xDRZlXMYwLn82aFeTDpkr3SHiKe+HnC/ukaxvXXWkvwjxK6ZD
tsbUqdgqHyM6f0p8WYURHkTKKJwI/WD62aM0qtmq4Ui4M7dKHLidh2kld9ImKoiH7Ybs1eMX6vuT
vqfCW/xd+sLKqeHEIDjx2FN2p5es3bDaIa+/Jl7Y6lDhTm+3+y3lW5xOUzI8Rq2XmD5zfqpq1jpv
Qj1ByY6hFmLO1cdmMnzc5zM7fFwjcFhSbgqyBPYwAw9X5BiCOOnAKNB6MQsv2OxsWa4l9Tcmj5pN
CUhkPL8a31TsF95VMaw8DXmUJZSd3+CbRuH8xWK83MwmDd7MojCCkfRJg4ASHb5ZMnx8kjjTKAxy
ftNOmwyiSxQ9tixKks805RCTBe+OlzcLmOTXQJ3/sK20y5B6BrpL2ki1qPd2AXyczscJto42KjIX
YkFjjKAinV7qJHC/cKPvUyXLogLrIEdiM0b8lPgOIWtBFA6V7lQiyjaZRhegeop25r35+xwozz31
gsO59PkLkiPhEt5b1Awo22dEdCZkle4yMti/i9Q9rCjhK/6KKckn8y0es54mMlwXFVxS6Clgt7be
ZdsxRNhCp1cLD5JDWgPpOMiATBMoB6KGsAcmS0YsK9vMLWmV/HaJUR+aGAm9KGB1aB5iSLHaz+2y
af7bJa+A+q3OKYMIN3TrpLnZ9LShQhAhCRUGnd4vsQov9bcu9HH094NAVpsX4+V2ldaaMjlc3tDs
KhbPGneZga98Ur3T2TVyJ6SEnt988cUXzYojls54yXuREiTmzSrjzY7IfdtKxUwSDqrhI6byjyjO
Cc2F8yrg0QwrC8wwJSCnE/yeGlOdtNMN1x3wDzS2EHbopCwvgLxND09gGSnOkL453yzmBxi/Pzk/
fHpYQYOHf58/zR+bNuz/njx59Jg/PP7nJ/rlH7aLjPNvhEvcCiNseYa32aNwa+SZgO0gAVYWr5e1
d1vB2phMTPohaavKbgob91x/9g8e508UlKYa+FGitu7wkB/KQ/dt7ANrCndCeX0S8yWToEwKhm/C
fQaPYqcVHdppWVREdlCyRFKGgSiVd72Qf03qeiFTifU/qE0iNeNAdcEHN1Jb8JdUf7triqagabZ2
xeBNwCK86dnhJTwJ14t5Rm4BPLxMcTrJ4yB5JqSvPvMebjrhu54ifGQa+izlZmLc//1HTEepLDcy
imH2w/P3nvT0ciSMrFlGCstmm53okLatH799fafmNGrAtWFl+NNTo1VJqNpcbB4WjeV2djg4G6Mh
0kcvoF6sK0JlHBIungsUwoSdNTCsKYWdaN/wFtWVdla51D5cZ0571dv9vuKsnLpplyqUHEd2gqhQ
QBouEIYBio8FiACwm6lUV11dLkILXmAGmT6lz+qlATLM4GngNJ7AjwYRTMNFozLYJi4mcQvovUtf
BPIiEC8pFBkoVQmKDPl4GlcUJOKu60YmEObiqd0yBFD2I+v7/mNcoAX56YjPjrg6uor1+HA4F4t9
oDBgIVawmpSq7B6FlyFfx45Bjq5LwO3t7h7t4npVcM5fye6KmMW0GDXk4UtNjY456hbs7lh1U8DL
epS7mNoJT7HWpKmnAo+hEh/WES4ZH6nYzym//5y+3xQ+cCtjz6dcfKdfvP3w7PXrnhF7sIKQiEV1
Nux0RCauyT/UI2kJFF2O4u3sOyqlqgQbOMvOtpQBCq2VJNc6vnCKetmTAlOQZOcgIn/9xdetiNpL
74cLBI9uq/RyOC/P2GW1Oks57/VrUkSNY8D2H0AH2eGbTmtv8l97TNF0R64u5BhA5t6a7e7fipvE
c0b8a8j0128JD8VvvFwWKJtUn+ChWnhv2zDetgoCgPuKvJ/SG6EYE0TfInnnIAuS7mrhitRP7K6E
WiFMzdPdsX6o/puyKqlDHUR6IdWLJTJxGJop0+vo1FAXgCFVLNKues64v9PQO/Xaif0jp89qS9W8
QiZxgR/3mRt36vYTtgWG8qKUCMs3ns3xDi2LKyQY4TjhLDaPE34sNsXPGyq08QsN1cV+i4TW9PQu
gFxKYvswGlwyKPM3HBmVt16RZIC8BPs5k4La8DkusEqbBcaecZ5JrbUlPAb6BRqLdKEJkSOxQsQq
HL5LKzfRI2XkSCaFrOOY3DrsASOe6C7qSx98BMqQmJ6Py4YyR9eqffARXvTb0ePB8XFqCkHoGo+b
X3irx7r0ubbTm4sFvEsKekcuz5SxcqdxFm3mFCOhWkadWVNXp7aIWadAEUi9B3uUXO2Gmp2db/T/
OyDu/n+EuzsAKZHdKDw91oE3smzi4aBjsePwNVe/xYrqyu2wmMaILY12xv+HYrfssQfecGUm/wss
LXlcuTAcdoB+3LCqywzDePGt3E42aM9l/vqSoFwvZ2hpMQFASXdU7YPNTI4HzZVd6dU9GU7LPdz0
RIwKaB9W7TQFg++hu9nPOU1Vmbl3qvpOTMu08aG7hzPINful7fYf63NPDWBUZhRtGQAZ5xzBRveJ
dTlv/9K9h95bXpB6/7s32eP8KcWNyB6V6OU7RYc+VNSAJE9C72aKckyX8TpAeELZN2pPjuGjL9Dq
U8LKnkA5ij/uZydbyh4A536LQcmldjbTbqO2kHWiQeR5XvOX4hqOzUD3pE7KMc4fPPVJNN6H48yZ
J53BobO/m5xdc+6jl/Lnl7h6FxHU1bLRfJ+T294aDsn4BJGZJTUPZk6BEZdXFd1l3AKOC8IFIvcw
EH9rPgx7gnvbyBq840TYvogp292PYLtpeQft7MHON7KN+tYvhhrL4kbVj8ZUS/EbysoSI9GqCbLw
nTdACuQHjxl4DnEirMr1Zqdqsyo+bYvlhCCUkJJUBktSGuWMHArDP0NfaEzegao+tvur9s/n/uBh
oRqHRJNlHBc2OS9nk6L5ETPxHTAXklHj6NwZeipKNNo3b75FoR/uBHzdi7Qr2yV57qi/DrA2OCZ6
TF7jFnxnIFMCeBDYeKTsJtI5djPBmg7lEA8lKg+NYYEFp0AveUp5s/XhndVCNXQjufNeXRWw/6tb
VxLSdAgfL+uSzzIsIowH/oGvU15BdCAUnbDmNQBni6RUKsZnre6KQ2fCnVMQR5W3Yt+9dafx+cey
qFtEGu34KvZcxLpJpznXvoeTi1z3bgN/TUfQ2FpYPolBFKp2GqCIEngjKZ84CnaxmCFJqB8FC6iz
v5QDUWnW/hhCDkxLu98BctONCGY/9rvu3QVc6L8Lr5Rkkzq9L4aNzEnTeIPG7/Yef0ZPdYZnTzAm
dk4583YuytW68S7mI7ja6OQFwz0BuaTmIJg087wuz15KLhpB1olA2lquJ02CRn8InL4o372ZzAX1
ztbONiZj0/pBgqa4LnstIyqLTCNScGEDiTHDVEtGNxUYIfQOFl3LNFRviRuZcQfr0QNDqcuAMazo
gVvLMgAvMVvZ8AJdjGEWLAx5XSM1bpMjuzjW8+9BbVyIYWaWpKFmYZzr2WyoHQ+1pCDXcZPDLBVO
Ar+WK0re2t6pAHLF0OhYDYTPcZ268xXkf8HtkXq6WTSPw0uahemSI6qggcswwsq87wQKdAJsIfIk
jtX3uwbvoPTVEPHm28Jnd9kX0aCvQgMHWq3wWXnaNQN60OCckv5fh72nzsSPQC3xmYMsuFNruvJ9
u5x9N9n+3RpriI1j45Kd8R1msccgs0T2C2I5cKlxv1y6w/w7etQxwDBJOHnHhrbCq+9eNpaFXd2z
7HkxnzMciPvdsEDhORnywFH3twCGE1WP3bgwG39chPKmpCyVrqEbcqsWwgYseYm8s43JBL51Ni0X
/ZfXsGb0KqJoQNkfYT+6O2MNC3wupYGcghjfs88Ed1/zN/F93IaNtBSzgNLmW+zyzP+euVRmQHM3
8Hbye0Ruw/QIPEcgzJzgMN8A/5aARdBG8iX8/uFmRbDY7suXr19+CyzJ6M3bFy+TiObG0KwvQ1dr
925VYP9/BSB331Q2EcsdyigWhxnRcZlrVjcebhDhBTRJdbejmv9Ov0Mu1Wi1huU7nc8maAnsbJfy
SOMf6qfUqV/jDpv0qBgag0a+YWyEXFzpIzk+jVzC4FRTsyWqMbA5rIG4lItZRbZm/Fv82TuMsHDB
n8TsPq2H3PZaTehEinihLkkkv/g/6PFapwBH8gjkY69Eo9w2kgb6UEfNIBrDH1rp1AZUUncvFmQs
bATbnY+sP+14PjdhVKSrYK4tMgtNfXrWu/SvePkCBMfJYy6ujvDL4zpVwGZVKj+rDb3XEJh8hFVQ
SfM4CHuf5hfFTRwLBROM7Bg5flcPYJkrPjUqMFj1WE3QLAvMrmgdkeUpMMs1h0w8ATl2jEztSbG5
KuAJdQhVGnB5INiW5yCsXGJOVBSpSYvGCeXI2sttzLi62pGxJ1KRLjsbxc0uOJDwhA118HtVYo4d
IKnrElH7B13vkeO89yLkoQfof/Pnwx59ev+A/s0ffA3//ulJ/y8KRKSHxTj6wW0d98mp77OuS812
o7TI+TOj7zZ2AjxPJ50bJOngGI1IB6Pj8NssBIfvHo4ufB/RPQv2AEdgLdSDlN8XFlblcf2I1vIB
0vZJ7k7hgXDjObMDeSHUwkfI8I7POP58NPinY7ZoH/1TlPziQOS3STnfLkLX+smj/uRxf/KkP3na
n/x9f/IP/et/7E9+hXw99hA2g5mf7nfU0h779COPyMOnqu0+pW7rcswKQedUG/0SP0fKaQSHfIRt
d77+8VVCfXy6lInKwvM5etykXIC2UGH/dUMuDkeT/clg29opiBrjk2r4uJdWBrjjlcszpcxKjG8U
GGRkND/eYTRek9ioyzalIwuhn0UzOBRpJU0Tdd1kYtL6pt9l1q/+2+2BvO7xaJpvW3hmdZR46v76
RYcASP+exvy+kzjekoal3Lgs9MVU/DfXxaSYXaJSFI67XNrJo2gkC0OSckOAxTOOL8V+HqQ47l/R
SO83rC7dF2wymbvol7wHEY9229FopH4qP+AdDxV3sTf3YJdmMCDh7KpabZRaQ00gGLokv8jg5LUx
8SWdXvZVozqRWQcKYSTbOcZCw3s9LcmNNM9zDG05H68qNGRejZf4a0ND1Ybf9wVp8TaFtaRSYKPM
BN6RPiZIXs/OzjcNbaGybbYhtRnr9Tbl6nAO/Mjch82gv6BEUl7NJkVDS90SrVbQndbrZ/oNyKTr
BaxP5uQECsXpNbTk40xpRMBOkSFZ8oFWUTzP3fbyILsoCnT1u4mjAdIO2jEwu3hq6+Pc20sHXGM8
+nxNG9yu73o5D0QZKkVFHdpKv4zfJuhGqj5KpviOYBbJKVqP2bc8iCrmnHqyoypO43Gu+6obwqEy
3y6CYd+RZ0ygn9IfDzrZYFfjdE73bflFZ2dbIqzu29rz3a2pvLxvc3/d3ZwVePdt8ovdTXqJet8G
3+1uUOXtW5sjXPFHzVxzwH6pPWBno8mL+DPfcZz348ZLZMYYqDZ2jVMD+AjZrEQhEGP3GAbVxe1x
nEFtJE9oJK/5cvwD/fFvu4fFipBd49nNXtzh8U9jpmLLnqbdcnRi/UiakiS1JSm6EOlOEm+8ZyAG
e/I+3Ln/43Zprx7+Rjybk6RR0Y4uGRNG2e36XxwCsI240w9yZX55qZwfvQ621Mm60LVC7DmHrg07
RRLyw6bH2D7jrHLie1Ja53OPorULauwHsCzVOQa7E7sxIDbCVKVXx/MA3uWwT4YrYjqozOl2zr/j
aGenFmbwvGDopasxOSQTe0LhQU7QAYbMRhciE1LaJv5v9t5+ya0juRecjdg/dhG7Gxv3CY7BZQAg
AYjkjD1jeKAxLXHG3DsiGRR1HXarL3i6cbr7mGgcCAdgszWjB9gH3OfYV9jKr6qsrwM0RckzNzwO
iw2gvisrKzMr85fLqlxZvxV8aMVUFjB4sxyooGB+i10xoZ8xnAvkLNWIi7SF81NutfjE0colCIRm
HkqM0g9KTqJq1mQo4sddZT1pGxlgcWH6QGNKDeP/6a0n8kRS3P2NZNmcZ55IgBqPfiA57JYQCX0Q
gKMD2/bgRo/R0GZM8Cb0jKMo/+n2TXkJ6TmtquIjk3PFXPhswEaoMCRlhT6eStZN9OIPX3Lw6MDT
SLVCw1R2XFhoEGFEoXjJDQS9IRZxELaEw61Wfp1EbzfnEypr1K1H/iojNQuJSYNB11BmEF8euuoc
3gmsK0fiksoKz+yTldZt727fSWoYKMwEw7Xa4XFjPWD/ydt+9PzS1p/jLD8fYfU5ei3kVeYn2LaM
Sejjh+qel36K0R4laudNWJxVMn2MEvwifZLoLa9gcF/1g7kJ04mnodfBo/j9zEpig9/FP1rZK/Uj
YiLOw4TQiR3pizbRT/lFtu0RCgDb2YWH7Rr/CnJLjjHwh9kdFYx5nYjS3E5anLRLJsWO28V4UQcd
vy/o3tJdpC4vLJvpCEDVbFuzrFkZkz8hPrQtbJZFeQzMOn2zeToPzVjRgQCv4/QDX5JgomEcmDK0
ghv3beewjho/jigYee/u7KKXMMmoIwDuzPa5HM5s2lwzTDy618tv72jAiW5hMw551IW337gTlk1n
njNAXMy7vNWbf1xS7CVU0HkbpLoGbiXszC/wA8YWuWUcqyUNl3yH2RXlpdlx67E3/VFUqVP5xTjv
Q4IPForZta7Lb+ZDmu3YLvjo4y0Rf3VKurbNEReAf8JgF0N8y1WFWWxbFkcF9wScIa8btJhfNEHA
s2xNe5Dt65bjTXMNJdZOSdKuXOr62Hry8rZbYE6wbF0fycb67VgeMpr92CeeMAT03TGrRxhykfdY
tR5yC6OPMGJ9SgNLGFQ1y7kGcbCVRs8CJ7JZCHv2zes/ziQgGTJktkbVfzddVzvAYPsMgqkwMHm3
Ndzws2Xd7tR3fkuvgfJqZN3ffPP8y1lxsXy0/PXZxZPJ8uLs7yaPfvn40eQ3y18+npz9ujq/qP7+
78pyWXr1+SGtePL4bzWeG9xwxX+tzWTd7aB+/tpcMsv9qpqxqUT99Efwb/uCr5CneG7NZDfvckXM
EKD3R49yBb40JGdKPHr0y4mZzZNfmz9nv/rl7PGvioePTLVi+BVYesz3L81lBsW0//Erwleoq5Ya
/QYpeCntPTZLVDz+1exXv5796jdee+b7F817bq/Lz0l8QSRK8NN7g7i8rr7nw2A2AMeHsKwpZP5r
HycttEwBhz04aNIq/pt8IF5KPoj3ngCWAT1EdPrlyQDyDx2JIUPWFu+N7UUmPqMfGMtDQ824yFZl
E37sd0f5q2HMIKvBp8GpJBHn0Fy0IiKYMkhZXskD6+Henk0tK7+fjo5bGdUE2tDS6Yo9gFrTDZpr
wtzG6Ouqcwujf6xnmxqAYyoLagjbAGakxIAI/WG58OYW1D3NtsyaRa5xKLmwt77fMFc9zTWNEnyu
4WvOhk1Zu2/O4b5HZ12/D2zjNIHRw9VVWw+Kx4/wfx+RAGyxANAUyhSH5ew3Ore4GqWfXdx5FLem
PcMzMPsemLnNdXBuFIhv3nzhnIjBqlyCbeEjmCihnIlfygDcASf8/4X5/xn//6gYnjycnOJf0weG
z3iJymPvlfhZnSuQp1uAdJbLfE7dfA+BNtHT+T14RIMWWPizJREoHnCTxl5ubIXoZRbv7lnUi3QW
dQjOWC/LLdLP5bWfSV2Sg6bwdG7OQWLpzuhHN053mW31wXfr7KsbsVkXA3TinPVHEWn5aEMcPDz5
XKPnOKQhS2wOlsfB8cQ3I5DEB85VD524W5UC/rEVlaWqXsvVh6E+w0cq5oJw+JhEfReMtNvUYZ8O
hrSaoHdj0rmDma8jOi2yKz9v2MQOQyB0JE71KgRDLQJnDAM8o0enHqCy0XNDKz63FixV8lq3Pdvg
Yf4iyuRnSxpqvwZnoqvyfUXJlAS9ytDS3yjobtjRE1oEEBw8vCV5PrKtescFq/boZLg3IUIhOTl1
+erxm4i14rdWvC9M1ekSXrawIXk48n/H/d6CYdsMS0q6l6OeC/fnrGYniQes0+DIwyhYdZDIlazK
YCNaZr2M5GAjZnLWQP8RaLXB0MUgbsc20hmwA1X9aB38pjtUx6v4AskPLMl0Wac1Sj94gWqroKK8
uQ+twInertvLTFe2vGs/b7ej2729vNug8ublRLsJK2VuUiiLZHwH8SJ/9OvJk79/Yy7yR387e/x4
+rd//5u/++Wv/y1ZgS+su0+MEs+QbYWkknKzXXgyydETQqSBLpLg8KSAG0YRIGkKx/6y5B0a0iJS
3xxB6tkBCxMFbZ8i1bC50ejY1JmD3/5RQu7AC8PIE+yCcb9Fk5b59/M4glM4xVifqLHbM4jl+m7z
zf9udITF4rqC7OwXzXffffNrZnSb297mFvBDQjUCK4AFebEYqSww322/+S+LzS3YIKaQJRVssPXl
d+2b//m//OIXIDkIrBD0My6gSGFopC0v4fbYbctziuiHWvsto0Kh6OAGZP9CSwd/aloMv2ThGH9a
lEtOyklClcjGeCELsW6BaRKU6GBZne0vqW9WgvGHqWtnMJnwBAB0GUWfeR99axeQ76TvS1owu3l/
WRuxprzlQZl798wuAtzcPH4Ng9XXnatZ9CdXfXPFTibQcD89AENK7W7epxKJ0YCXjLfskr7FLTiO
JTeGwWSjpk5EbXvdrPaX9brFz5STCY5pJH4Cnb0vt/M+XO/96GcaaFVuV7eTVVMuGS+EGi+G14AZ
MCkJXA3ynCUPYf9NU5Tvm9qIRqYJAdek8SFwGTq9vF03s7fgJXxRfwCkqstsc1By2ZzDFr71N8ij
DqDgikkIAVrxi4l8E7ad2UFcO4JxyKwONmuz7pSURb25wI3Ew7e5JSgEM9ZxbrBI70cPCjYYaxw5
RHI5x1xx4JokZxub0OSOIRV0OPC3KaAewxL3ev/InOS63L6bXjXNu5stUNxWH/LzawT2XuBpFoOZ
0SfOGxRzb+uKk5TSHhCSOPwIB39BQcFDq4VSoSmt0RTHMwtFes5UPzWSOnK9fjB4ZYPCFPIQ+U4i
LWS5U9nssasF1sPo4nnhvMUuJFOWdemkXib3W8BowX9iUoWhXDVr8+u36/Ob5Rz+xYTD8Me3a0iE
E+QuwvVdLLgXCPRFzu4+e+X7U86Ka9S6Iaq34CclhfGFJHAkaTAi3oxlOBrbKTfb+hLhCKPVoBN6
Xa4NZ9pOW7NDTDdDXhBltTI9MyIbLxX8gync3X54hLZrClgHsfKNAgIzVyvYeTRx7dfCGTGPoPnL
aRhXZYuPfvQ9pFS3O6nVrmibp+erpvWQBRITgQenw9PoBYbhoCNfWePfkXNMUes0y4sdkoA06vVS
5+raiATR3P1jwvs/87LdheICExqLDB2Tf8N+h3xfyl0JBE93Jbhcg3aWWoRhkqIVnRGBMRaeoUz6
zIXR2Y/n6mXns5UCADNl3M+VSU4SKz0s+mYCUdwwvXKQy7y3zMBWI7paNgtHompZzTUP5aPZuGqa
thODQFoIm6Hudze0v3UzfVNtrwFk/F+Ijtj4duNSbyJJsqRl5sF/ETEvsOmRrQSrMkx9umc+ttVm
WPTnIAAxJ0eeaWgYznzb9+v1T4gQTs3W1BOGihFX17XR1APmyTx8akr/edd8wH9N0+bGPL+gnmb9
cGS9MI49mC6o8UFEO0zYiLlGodAiUqIeGkgwxl3TIVRMJ/3FX+ZFnwykKkvgBhMC9Y2SMLzfjhDc
h4L4ocYotJ/1i+L+5MmvbNK0DSSRgUErp1uePwUSm0/gMn1TL3dXggpgV6j4h8w+mm0MtgvQNSsj
i8qzcQEc2RxKHOYMiA2ZwWTC3x+sbwQ6AJyNG5AfwhaG5D9enp832yVjUlHK0JoB7My1tQBVCNyF
2MUbPgaEJEkHpeqIOwJCkVxmjlb8+27BBRTF2PHxTzOBouTP4xFYqTmfoTIyAUYHy4ILIX8jjGOJ
IYtbC5aHEfFC8B9Jyrbo8yA9D8ydfkoMoYtjznzzLhtvNb9NT5o+LuAV24JkZpguGS0lTw4e0F3T
rFoz6UtTHfNP8qRmfd/qBc2PZXod/HqFPuTi0UOl4HLnm2vAYen0Qwj7ye5QeOCMEFaUO9ku6B5c
a8C7fkEnEL/idQT723kCvt7OFaBIH0oHnjURy3lXNzibbIG7lubK6dydtMSLFO59rdSa3GbsW6RO
K6Wav4yqO8HpH7yifZlzpJ7TzZd3vKy5xiyxmGyh5RLcS7QGR0yWkl+nSI0iMHKkDhv/hL6bYskg
HRERhiNSLBNZYUXyjIkz5QcDdzX1KMWOiXvDNxNH552UiZfGo5ZZE98v21GKSnvf7b75P8RAxMah
/Zsf/h8yDnFeSrqNmGuN8QbHu14sFaLgOuyLqTYRGc0Cb1L53F6tqg/yATf3rDx/Z78w90MrH4T/
9pS56V7x5csXA2v6Z7EU8VQk6rjeFfhHK4XOb8/hRdGo3OYusq0bMXBsFKIeSq+8CFIDtFu8rCnS
CCkV5d5NA/7H5nTDowDm5NPVzRqCkk1tvMK9+oqIrWcGzl3gm9B+07vHTw5f8Bo+x2q/N1cmyIA2
aeYo+xRBWmz1AV+dFa3ZqtPOCmnvhMjfgouDVxj91ZOHUKOJoDZL9lw+fToGTaHCVB9gV5olxKpd
QOolTgOJkWhwuCZMPuh6VWz3a0ZNnpkuCH3fYmV6dGlTS+jywgmK4B6lcKpWYHb2u2airqrlfpsK
KQesbrPX5ar+ngyCPR0M5/mRWKPGYmOOa7llvjUk2H8eFS08O5IkN9812B37lJf8nYozSvo+2XMn
sr/N3zp8UE1jkgpFzemW3yhDcccTkp69fv3y9axwr7VoPxSlf1gh0SWbYKr5VS/mifJqRxwdDuo0
ryPP5Ubis8ZFZuBvgBjqu/qsXtWS4AewmsiNwjQROhrCd7ahb8BYS6/R4UkFqrdJfvnM79G2i1+9
bwhFDgioB3QC1EUyQI+VEJEEAbmN5t0HQZtGteOdhjOyhoOENqZiszwDXO4ddndebjC/Q/uuppxD
LBX3d9cbEJKvm/W76naDWekMkQKfhQiMXXVmOnAPBWbP2oqflYH5XRomeG623apr/X+HLgG6kix3
AGXJZtn+iP0H2G8CZgrbz1MT0yC/WQNxyFoQyhV/kHsNrIMsmfL1DZqqCMHRT9Y0Iu1E9GM7UO4J
9wAjk+DbYQ1XK+QUuOoI2G7z6FyY6V5Z2FslWfMA4BEdN967A1i68IUQyzK+IFbh/ToSvDJwbF9X
1bJa/s6qL3g/gfeUTzSzwLgiHdGtxKUYclu/5OvCbssCJpbn9RBkAnk2I32Ys28CRzJ/vocsT86k
olDpmUdqO9UoauUEIjvRVqkaQcf6sKEhoumP8QbQQOy58jEefux2QjDU7PUEOP2CgA+diN8JtadU
EVkAkHv4TLjkLyHZpIg5vmesPtbGFrBQp0o4sfuEITdgJFymKCPJcMnY393N3PtEOVLnbh1sDDDz
0hwVGw5kGNuivdrvls3NWpvKENCb2XPq9Hmf8lKViH7t/ET+PNXOHvsNtBW3z3lynczlGrJ/jUKf
EV75KHNOY6789ft6a6QNiH4ZvPrXN8++frP48tk/ffOHUK0gsFx34/s/MuqtUqJNEcD45B8QrHN3
8ZvAzziJZc+wvChwLPcb0AKoNWlsLn8k0e0Tm5sN/6PXevXgYK21Ok8ik6E1nNIOeiq2srKWSzO0
Gr0HFySgiPkqeFYxk0d/ixnavcyRhw0ME25c7Nf4MkdYvTtEgzKqQmjIpNN4XZ5fmR4xYGsFr2+I
OwlXDGgr2NlngBwAr6wAFLBpjCIF8Rr90Y+ZBTyg/FyTgL5gDiu8ItNTCKxu5FobW9nUbOHHIegN
8/7zx/2xPJPPuZIDv32F1longdFniiZM6gv03C/KLGdzJLk+xxpQipMrkNQVoxYnoWMX5bpZ3143
GPzzEqn+D+jW0D/ftzujMbI+3R+z48Pc5wHUCDkr+t419IvrHk6k/RCUIrFzTgMPfmOzth8fYX9S
To46hyZ3A3oCO01ryZeZVzjCyGJirVvtLvE6E9QechduHNZFRKPgmsZQNMW38j5rmSlAFMBCGwJV
oDA3AvAQ08CSB1XgLqmMPTP4dcaZSwnOjEvSE74rqEZgFM8GH9ftN0iI5Coi7rWuJo7V9YEO5tQ8
+kZgMmgjbywrFDyCVmwzb64qvrJNDfKqYfgQSN5llOy3b62/wdu3BWk1hZdez9FBfbkuUYUwVWeb
2xkwitlbfhKxzdjyv/WNSNNXQcHP31KGL+AbZxWDw9D7j8Y2h/BWsOTR6Pl9SNKOXZgJcC+8Bm/f
ptMdwXLZJvRJiiiR9nDNDm+41bkMIFjU/iheS/poR4SYOdu1qfcozHt+ud34mc71uEeJgbthE/lE
4z4zBP4uyVCmRiqEdKn1w8djmkkk9NFsHay0dVThLI7mEyYyBIkiTuJIYo05WgkToj5b0MpMzhP7
LJ8bSiUSAT+yLZ0k5pb6zEDHM0oKWYqXbctpR9CZCKidH1uGI0Pw9q6jY6zX660YKcEfbFuqyDmg
bQ78M4ptOyVte9psLz978hkX/kxqT69216vP33oWXUjHttnpHGBP0f7lrQzUrt00C0jt5h7PGjoH
bEqgE+ayh9MRf1+XZsa+5fzF06+emXnfoHH07Vv+aBS0fbuHgAp4rbXtnN2iEIb2OFMYeLMpbFd2
zFhPGJdiKymOMuxPJrBV1jkJujMfptPpKHdOg5tSuUMF9KXuIFQ1iAiD/KGeIXZhfhSAjsIam2/1
1/4w7Gu4zWwCLxz4jY9xpJsY+lXT+TzdOzv745phD1GR/aBSIMO3p0FSLe4+8EA9OE/0NJMvnTqg
Z/jVLTGuV6oHn7e5xM/88WFxEuzXaZrhdrJapvCYWwHbBCUFS+mLU5qeJjPomukLK3brDAdf5Dwa
/hBaHCXx6Zn7yMDzI4T/rakfOF3IrjLgvKUrhgScK8aD9wY8fLBGwtdBsWdle2U4xTuyWfPWilSx
NNpDMSQ7GRzfwWdKl0svyvD3z//4bPHy9eLL569BgAJdfPBgMJpaqplniMgmheNmg5O5MCzFuyjI
uBJJiRRWxUtvwxzhaPt5cd0rHAV9gVVDVbVRePJ8l86W0PBtrFqKLzx5YlbNjwu1TiETWiDuBZ3m
JD86jqNow1bED7LLPo2GgJ1DUJO+s40g7wnJzuGFtRjxHE3C+cstvnZqUnyFx+IxFIb83nB3uXLk
+IGOMVxOytgr34h35e4KbGlv347hbjFTMneQWZG3b+GapF88iRtHP7POO0wkYOILxwwvUK3R+s4o
qex3eyNMQ0oyT3ym+s2FN4dWxmc2wdQBjFG4bUvSfBXGs5OT+R42+w07gI5/kYj8hS7zeeaGZPM4
eRqtCw6d6Ms6gfsVrBL8KyvUH6UVPfJfgieFhNvTKKMBivmdfBdF137KbCT7/oH/vgaD3BINw2tb
xRrNMXM3RSaTDlKvzerXS8nTC/ha6xYocL3Tj3zSQ05Hv24vE/xGBfSY/0Y3P7rLLvHBZxvrm4uF
+TqOBBFN19ZPZjTtM/ndb62vkF9rbAc36kwN5wQKmEG4FXbp6Wu05FzX1/V5yz52oE1DXOhZdVW+
r5v9FjVN5idTkgbs1i0McSyuy40HFDSo17vBDFzInaFpQMZw8zW4H9uvf5A45jUKsPTii3Dnl+gp
vzGHr1lqczV5vpIN8M2/vnq2+Jenr1+Y7il5Y26zH7DOkNRDyOPdzO8aXQ032/o9mKbMsabciugq
sC70vZgQTqFdwLOBf8NsraCigIaRtNCAMJz7cUnZP7BRMvDCNz70JBa0S3FEplG0CnCjJwP4NEiC
mg3u86HH2HvfjdXDvGK7HNngsrFV7L2GOPTodGG1oaIvPfVdtPYg35DFsdo1pqokpByZQzPIx0z/
fg9min9h97t8RN+uPH+HqPjzXyaN02lEh8g2HSeCu1WLDjz1WBgNr9n4wN8jxNdrQP7np6gSYELo
ZZkxZQ3/bgDZnbMNmBtpx2bcCDfQvXeZQY69tHmQb5kRdJLRojjFeTGgFNP5uM0uiv0IwkKawJvP
vrMBMrHVEUdwhdjnuPvb6aC7qeL3mI3KUCa4iAieLjVWrtAcfAOpTdv9ZrMCL9ND7TkEghJv04MD
GIpuYW6CEUYH4l5QWHU3LMCRZN5N6o62rL2DIOVangH5Ue04Aysompn6mGdCyXKUpogxutK6jj4h
gM9g/hjyl5y7fHBq5NdUdGouVPevkub+aqkm3EERPaYiLZyYf08T+TrQhI2eK+DkDIsBzxaTz9UX
C2b25i7U2qaP6EJO9WlWmycUXdf8k+fk95y1i5SFisVUGCbN4AsJDYgfXNTdLoEEPFK5bT/mXmBB
o0IxY0Fk1w4VDoNyabCSRWR6CQSSzBqxUBIUNkfy5Mns1KFSTeDFeDE4Jl1DUkxJ9+dkKejw8Sym
I1605+tl9SFzUzuvDV9DyYsc4FMDWbgbcuhgozOetwE/6Dvhkyw+6XjwSCJ8GC6kUszRJBS0c48x
2M09dKsN2mbv0TYbU5yIpkJgaHdFeW5gXa9CIojJg2RQVvNsGd8Cg+5HICqjzf2InPdOdD6B/54q
7wM2lexCHALeXguYcYwwZklbCdI4/0COKS2Z5YVjKFM6KrcysiF2JYoOwrw9QS2zTZdUBf6KyicG
AQ2kiYlUDqf6hFzAPsAGeg/hoK1uyVAsKl5PIfqDSAkc40wrhmVrZSJYZBLD3btIucN84OEzG3Dy
Gg1TscUXECHND6Pit8WTXC6XIw5q31oGSEm1l+8sFx2OqDdwwe7MGABEcHfTgHJhdE9QMvG4gyZu
s2YnEtrYwRvJ90lSLIYVhCKGYSG69YQywcM3j08B7Nd8M/oETMrOX3Onw6vgrYSIac32uph8GBfD
D8hnIF/GEmzLsDgjWZJOWJRR5rQ71ifMBFbviEvCW8nZE15LtZhPfpLFVM+Kd1lLdN4ng9VkAo9i
BHwALFoW80cto70uvFU8BpXETAsuA1mH4SBmk26PIuMS1H5oqusyRvZ8SGEnYfUR5K4fF4kuMkKG
7sEVCTuwv0Ttq/p4hQU18cpKjElCc4hVksw4yo6MTNR+yxiXebhluS3yjVtbdThyulkSXVDlk4mR
vlD9Nj+j3zqnJ0YCrtbN/vIKODXaYIZ9u/39YpQaylyanU2enKaWdzSIM7XDj9biqDwesj6UaZcc
doZIuUsxCpr3xCc7a18C57rFlA03ZWqzTzDhe1XKpaHLnAgVwJLNPGPHKXkuA38hMHcTp67AD0fi
5ZsLK2bye2Or4llrco1xj/EVWiG2VSuq01WFkt208PFNbpqbZrtsGdzkppnQR/26BX75rVj60PMG
Hwl1cXOvA7wiPI24NkFic6IWuTGYwZi1WS8fPMARwWMkoCCco+hZr0uZGb9fTaWt5NuGff2zNJvY
gtAcK7NfWHOWPNXhmu8Bj4RwUDTXvNNe/3TjohiXxLBcLe1Vl5hT5MfuSsSe4E42w6fSHPd3Tnks
yRgVT0ky07plZOKu61e7yWPx87JlJx45m5D8c/u+WvZjtWSjnDcTp3fqex7KA02KA6hrkx5xmGsF
Dg/WRi17/EpDUiVj7dTvyk0x4C7qVTfV/jTpUCpAA/jJFxvgcQLxewjap6BoLSPEUo6h+ZfbZvM1
MpvtHw1z+WdT9PdSJHqwDp6q+QnYBj4lHoER1qlAZXInoE0AX84WU3mV814s8cEfIj28d/3wqVr1
65EDVIwpGYxnxFPeJ2kXYwQNxZp/HQ1nkrbgI6Ddn8VwsF9DiMvluv6+Wro5gbUtk0+FwiKR1qG1
++ZKHxAODwwwCEIXvZcmrf0JJP4ZK4WXLhQX2u3aZEfJ/t7b90C+ioj7XwT+dPROuKwvLlC5Mzpj
jcmQt2Vxdbu5qtasf04gA/2q3Jgj/eABNGAuAK8JcFgTJ1JR5Tk5oGqLmmJAXwa2Uka363JjRUD8
BaivhWzLeBOia9pyydTXE3wANxtpH2Z8Ds/8qPSAK922otmBHmh97DDToKhG12zYJRWL899NCBuU
L1Hww+Mxs9OdUpnBFwDIRzTOJ4Tg6D2II1vheEtqaOHCAeVgngcv140hBMSBTG/09MgGveASahIX
AP+EM/M3dGaMfIkr5Z5F5ECknqOpuhY2lKWHegZjoXCtZTxKI7kQ2pEn+LbJx+9t1QaXNHRHYxCw
YtOd1xar8VCWVHmc9lB9DW8O9AscDvX9Y/5+FJmJHYkBjQ8mV2N2SEa8eSNalWfnKJKVvm2fZj/t
Wg87oaOWG75dYIyzJ/iyUEEP+PF+BOfMjPRPP/gE4uon4W289nX0AEoYeCSjrAvOzS7pYOdtldsP
+nxCVgHDZo9MwMDCEUs+GoR5MkmiMiRMCAPFWtQ5QN2/MVsPNopZcXK/Pe1+wJH/3ZeHnFHK5vTh
gxU7Zc6BDRwQuD/IjMi8C+eWswq5LUmYY602MndN+Pb8dAoaV5F7URtsNieZeAt20RU7sU2cghkQ
fpQRZMTJZF01cGUwB48DNSKIcjirKoSZBV4Mm4RaWoW3BOjHaLcBG43iJPesSnNTcWp5YPloJG3E
BZVDL9Ciqh1GDpB4ce/TELk66aGAmwytOWGbgVsdNI5ntn50hw6D7CRzjWZ9HHuDaiQtqU5i+edw
W1a2FwyFIV3LKgwfRRKzcSsAhwHHIxQmzxmmhIO+OBDMXKbQhQKA2DU3JSjCAnIC+a/24A+IZMBu
LwwpMY1v+iDV0Jq8s9PRYuDQ9wT6MV20qZAsqQ78gf8MSsgosSnTTKoVKGOuLUIQdilKEFsO/RAB
5kLZcEg+980giOHARd3SoLswQ8OWBQWOg+2ib5vpTz0qe9qGla/LW7SxmE0DWK+bdRKAxk/Lh9Ee
iG/v4vqAyV/fAsocupxRNlw43PTwAfFf8L2vZIB6xbrNe4wDfAqRSuQhgwTyy+0S1KjdrYRVUzAT
9O21hBwI1qhaQizxTfUeQ4MbnExjWQPVBrYKqwCuaOCSt2ou6/MAJnhbpV1DBfQsxHYcde05V6Lz
Z7dm6spQxgM0VBTfHrzX6osi1Qha+v000kKZLVv5qIrCcVOcdL9elutzwpon/z8vvh0TjK9di8nM
yC26cofpWO8VkD3OUAsAiU/gSeV2vSs/+EyUUXDM4ABPaTbrx0lYQVqePE69TloQnZNZHTwwypS8
PcimmzXdkMfykCqOQDS/kTwO6EtJ3s3IfzKvCRCuArRomYM0loiE0KsO+x9amlSJhKEm0RXPUxu8
4kKspPAUfdIFpFoux5w3Gv49c6HuBoaFr/CEEf4yksYDwPKA0AjPuoDVOeGm+dEsemxsIPd7Kgl3
kyk37GOT/bR/3IeOBg9N6IMf1xr+7HCbVLvRm7v3bOxdJSeY0+M4x5PzWJFwZ25Lvtt0MoAPtsP0
YkRMB5hn+KWRIla7ZkjNZlYsKdhLZLgcfrK82dPUV3hk/VE6o6+r7Cdezeb4FTsW3bqOvFVLaTvR
uRafTEMpX4DEbgGLgJqpLaYfLMVsAS4EhNoFN+G9vwRkI9LLPEuM/lRg42H6NWje8I5R4Syg5OgI
JxAesakxtioofohTsB7p+xFRBKltQs0SpCCLE+yXw4awu3bskQpFqxP9xbFuXe8umWA9Etzc8g/D
OHcs10iq4Oilz2AodKctF+1tS/sz9LswZcDFN1735L4RxQeDpLUc3hXWgzYoDXCnexgHIGepk9K9
CZSd1k+ESGkwwsm47BjBekv5OslJZ8ngQRo8Cu3paunANL3L0CXwSBwc8UUeyagQcceocvxd3sOV
Q4Vg+1hbh/6zxXFwOd5k+bjoGjkfL/k9bsIdf8YLzFOr+QdpcxYDHS2rFZIG850TLnra6zxv6CQm
yuH1kl5+2lg9vGpWS4sJ5wzDrfK/m3aodA8evLsJYv68ZIGcfXfIpY5NLdP/rRtzcX/7OUI/63bH
FoXuRbMzetvs+JZfvHzz9bM3n/d7vTVWRTUQ/jDHQb1NgJquZPOBUrTry3gdSaPGAAQvcwnp1+MA
5AqJnaJ+EBjpAOiKD4imHCZnXr+Rphjso643XJpVqs4h9mxE2Udc6N6lDWDtCtmzqBbwcFuVy9Rb
JGC6OeJzp2MBToNqsXNPivxU6Z0qejDs3xf/wuKEqfYULZH8n+l0eoqRZotybLoLnMQVck2IteIK
jrx1DnbQA75zb7UBlJmPKOe7Y2MU2DxRTydEAJMVo8v3RxlrBwPnkflHTCw+b5KiQRMIh5QcAvwS
x0PSU1DKNLPZPWFf5tjgsqrKNUanR34paQw2onnGv0/PPU7KIbUX8AoFqJ7ML+z3IbKbW0PEzVom
Au2Cut5hZBlOAWyZfrlgeklVgVQli53cd7mikDf00150fxp4zUP4YdTnD4n9nnpI6wuH/coDYJC6
8VEGfblIGd8uHoEbgOHbIIZ0TNiZyfA1Bm5+RUwonZwHLyZe3q0g7N0wHPoBcM0V8MGBNSdmAkMY
EBuSlWWu9EPsuqGoKFAnO7qydbArTsdEXf2gJHgvm0ZwpSng0HAcXWTuWTYiugjTy8yDCIAgT0d6
SHcZjm/eisYTJ7xJwYQdQki0w8fYJ9GDAIqZ0dQihKw1QhAZvi9gbSDr4lM3IViD/RxSwUXxsnrw
q+aSqxOAm9flnP897qhdtCs2+ovTimmrXs45UY4yoCwEHnfBuXQ6wjX8dbOA9mDsoTYIub/a9kfT
xe4mxh304C8T0koECaEoBFMYJpBgx0WqpWmIrJ9FzFATjAaczZ/EtvRVZZOf8MUVWMeQvQdFEEB3
FJbTZGfOaH1xqyCmaYKMMi3ebuHDiH3MEl80oDjIboISQmCC3N1iwq4+evZ3RNXaguCr+L7qa90d
RGeHcg70gG66IBaiuxSwDp9cwZkSje7t/DKAGhm43wZjOuaBGIZDmeN/U2LX1llq9KGSBG7kfsSj
nvO/d7q3CpnpPAKEZ4uvihEu17fDbWT30YjiYHDnYQjyM+ZTOiL9Uv/5izfPXr94+kfEC/9cAML9
BM6J2herfXulCe38BrLcr3BrF8QixCCGH7xwK/pqQGBbUksejEH+JN0eIHDAOUmXMP1E96B7l8Ss
NH/DewcNmc/BuhkqlncKVYhfZWioXgX6zhoiva6mZxC8XWGDQ2k54gfUAi3VP6I2R/h/du3gTRPU
S/TQO19Z0B7UOSNGRukhjBqw3Z/vCEuB8vxAEN3+jDWKqvWviKPBjsXMi8DYKUYY5XOTF0f2syNZ
phVP4KB0CB7EerqasG8F/aBSBHHdJLZyMsuIvErwTM34wOgf2nVpVk78DpWXwHxJ9pDQ8ICcEfxv
4fUxKEvo1EP/Sw1TnZLaZeiR0Zv7VwNWgJ3Wh1mDlqnEXOaXIFQSv8mEjvhKlkWxgSrwRS8VlYGB
XS4oQ26QdHQssDodzjEVHi41UjxMRTNG5eUvRtbxIN0w670RiQQGn9bqog3s0XkxxeYGeBi/z/qq
ccrCzx1FkgE+yvGjvm1ARoeoxItIYAh6Uw4EusLUOf4uApffzr4l/6PI7G4VsagFtgoYE7kdLARz
XekuGcgst+QYX0KiH+KY7TdD1RrkoAOTmDytuB+UoSzE9ya+Pha2jQ24T5DLbl4EppHgHjn0uu/A
3WBJBsz3wMeGXG9TLhTwK4x8cKCxa5vcCWqYT/U1wkZuq+/2Neb28nJCDnx2UIVQjY4ro3vBLDax
4Lb6u1JLoE7AFaDUCTg5ebj6Gdj0p19++fLVm6/Z28mcHPSuzjTHb2MwlD533Y/qMO3D+yHPfngc
J/XuqeOquLxqC0w4jimI2iP7M8sRlkwdSPDKuOuBiV6PEhpgkqto9oD67DiHjkgZAjwYXRndKHx2
O5BJB0t2ZtOR66CdEr7yFv6Mkqk6AfKmCtB3NvBMut6xL/BnNjk5/HbTbN8l2mib4t8hrHRV7STw
muzjW0gz5fRvxmWv1sv0Q0wmcaE88OSDWoPcPKFfF+WppHw9nK7nmHRllIrBMgPvnIQoakkzFrEe
J/ACr6Qj7TGlyHWcfvVHRA3RL6KZTAPvgOtbKhUnw8vVgM6w0m+L9yn0ZXr65PZUtqB0xPr9dnZ/
ORPO6mWXLs93e7Mj9ptBlDY6uD9wrUQUpcdV9QNcBM2Ft44ZEBkqMU6syeg4QN579CN7Bcozm3PU
MyTOQZQs69PD0jRl4wviben6GhfxQvQBrBFDGKhz1S0mgLxuwGlzTSB0/KLFHUePDJIYm6Iv2giC
P8nKKWOPe2HCR65Zh81RrEjSnGaMYZoU4YtpVu3AVzmnjZNOI+UeU6pFd6B19nV5woNF0esRQI2q
nBTaDQX1+EQoLckkADIRA4wiGiebfzDuHn08wbdSPQ0bNRakEAhbdZgWtxUCHsDbJsAKooMoxiKi
Hzj0eFZBvvnWwwB3GFZiz6gNgeKEfBX2QyAi+E4VTLAKbO0DZyByDcgS47IYajGiOQQBE04jPjIt
KW1dvYYinnXTx3ONAFuVKuk9wbLf+rqYbauL2VvTCvkO/pZF0vbzt9PiuY+K7oKTUW0z5w+uOvRu
VSnzdldbDEAHl9+62SagUD2ZsvhtLk0AYqB6zxTFUGDKafRsBzCHvlTRpnBrIqtd+vvU7dAjz3lB
TudOB55UTUMMFFRv8X1jmlCdYxi91gaz+xkN38sqEGas9lhBJl9159ziGN79GuW+FDVxoitKdzDq
kAeJ9rw7/C6DgkcvnqLZZDjbaTcoXlwuGzaRT9Kt6g4Gqe06ibN9AzaJBd8NdmgTKWuZC3mU8UFa
pZ0v2bZH8TdO1yEY63hKK2Ewy42EWayclzGqXkniXPU8/Bs7XcQXTp+laDSZJiw0cbKZEwqjI5el
cjNclddny7L4MDO80maZJFas7Moj3JPTDlu/h53c+u7o/qH0zAvKarKQre5yq4zIHp0czaKD9u8u
amsniX02VbNHHlcauTcj05A4xHo0aL63mYZThBcQHRMcjJ0cP2kaSaOZuua4XD5/nxs6tT2Xv2xA
Uf+zflAdktOPOlrgKVsqpxFYKu8lTwWXiyywK++i9dAiBM2EoNLJcWqMuTUjVAbvHo5dkoiKFXo6
BK7nEgxNi+Jfmz2FxYB7O93It74HJwo1ELq0Kt6+nUxevnoDiOkSiIa+SdJqH2yTfZ1tZeoPpBNO
XULhLijge02B3hCgELQC6zKDSrAJvhoEP6l3vWVTEV+ndvwQ0y1lLUIil3VLhcEwcoxvJUZVkbbN
v4xTd1QATjfV2QIinQ/KMzMhz3aYbS6e1PNn7hYL7uAKHd+La+UGePzNmB563q3EmWxxIyEdppEE
ZDMxPCQlFqTlC0dRlH2Lc2dKZX0WcfMjDpxKWaD8+sitTznzjTyq0dKgKxQ4kEvfzRbn+yNHgPyC
rt8jh8I8hlFjYEQcEUTXeX25brbV/BkZPG3gcsqNVjJ5uqAHL+smtRQVZ5cwCU25kDzWRqIRf3JR
9yWLxQc/gYVFqcHkFjoQG+DcTntZfVTaC4zfp70YEcSvFd1/5jv/MQ+GzlU5kIWjG0KztFRQ00XF
UM0+EQFmBgA/06WkyiajUWQ/c+7hRnS0r6C3U/tx+nxdS7rgUc6xnHHZBzxmrtlW5x3pbdTmw7gN
nY9d3RNp8TRZl/L5qrVCWZn4hfm6n+/xHiF9EOKk0d2r683u9ujxsZuckonG6r89lyOaAjAXIK9A
aoChslbdY/PQtipu4Ha7wYPLeQsoKWcyX5eoqJI9mpA5CUmLk4Z41GgT0DN+O21eMDAJze0kdeDf
hCozcviT0Wu0Hy6z6T4cGB3ljySpN8XD3XRI4RDiwGENQUUKb4ChBz8cCfKABBdUlcCyUaqN/LDz
puz2qiRPvc2UGohmkQZOwFqdamt6TFSzd+x+JCaVeC60WbLD1apbCG4cdbUXVvE1CCXrqkJ86MLX
VPsAqk4e8Qz4WtH2YbbHdToEO/u+6vgXc6P+sfYHacHTs/hNl71XkjylF5OTWtDuaqqLcWEXzA6F
72FvnCfSzOm4ONEcd1z0d80H+RP3YGpq9099tB2eUPKsozooBdZ2GocvTr9xjs6kIUBo5pRvvszF
56eu7HhospthSU/jBeXWkZyriUjJWdrwZfKUZqqMhOBEIjJTPI7u8lNtYFlsOHQioSg76zUI3VPz
/pOWvDQ1y7wNi2OiCnfdUp6hZL486hdsIu30K3QKeQOJGCQEK+luFwZfJWLtuOnpYgFe34vYkKWd
ZtRso7Ax85s04ueeih3c0c4S2Z0ij8+406DO0d3eA9X17N/Bt48WOgSGcgrSMVOgdHPspe+N7Lv3
3/xvEnq0btrqu5s3/+//+otfoHFhv2Yf7H2N+U639Q7g3ZA+m5ZfK3pMwGb37N+b255P1jqPpFD8
fg3NgVeVFZtAB3G+u5bY8Wt0JGRHVYkO5bBhQzELEZkBY4/axWzz/PcT+ABjHvhByvxIz1SHqrxq
b5Ty90JSGHxthvQG8ygFJKCGCm8NaBOTwt49ZtjTqhrq8iNeCX6sM8uP/16X7yryDh8CCxjji4W7
0uDTlN1q0UygvwDoQMUjkmvsvaISxEG7P2t39Q58UdEVgFtHSKPSs7Do6wo6fsKxJ4YqcVaLBdtY
08xX6/ggY3pjJwod0YSnkC/HDy6xs6Yun8hns4z/yA1DOnrx7kqtLQsLZlmVjNAuNg2kggNPEqAZ
9EZUZWLeCz9OCRzAvmH/gSIbPKgJAmkEx3ZVo8tH0BQGUDsYhfkTR5vCFMP4F4EPg0pTZEEDrpC6
LT2qWIeDf86/5fASov4YGaG7WyjozSUD/+E3j0vlNRzyQnzCY9dTTCtTUOsWlBGFY0FSpBKhhT7R
Z+ec7sHLOyEyXkMKJ1CyyBeAUjUaTQ5CYjjNEw4H/HJUiA72IeEtRIlm0XcVsI0LCAmpv6+2Qzk+
tkVcQCJHczTwX2IbiQJ3punO1ZcOwg04tHSuXk9fb0Lj3uHJ0zpeefcSfXk1Mt1JfLcenFfPZ73A
chcAQ2roZcG8lz/a8+yfIftr5/n3R2/rBCRGQzm4ZfcY8RSNbAVklzFqoBnReUXuYriZ+A+gI2Mw
CvbwmSwQtwLWcUzjYW9kLs3Xkr60Ap5nJ/t7zndO7s/OWTh4uqE60s0U7sQvyraytXnq/jJZaYrn
TcEEhyTaur2oP0BGKlPQXt1Y0+gnLIlwCUnWzj45IANTF0qnxnnRbKRd9J1zYSswaIhL4F48yn3O
jAgiQNaYYMcUvG7JFiVXbGXB3smEbYjnCgCGrkoNRNiCXzBkXEFDKhiGAFHWyTTY+TB6ckI2+92H
b/4XgXb97vbN//ff/qdf3Cue7ncAs1R8vQOPwuL/nhb/ZNowFFL8tsWvpmf0+R8vr8t6BaaJz6e9
nhH9viAz1cQzU0lS+94bTClITBnwbl1G4Ymhhw069Z53tIBvPzPBBDaLsARAmbPGiCBeHr0USDRX
Muu83BtdrbiqL68QcJgg/iC0BqPbJULQwvnuN3DWW3YCtZjF4Ml+jqkPjGQ04R9hihVn5YBxE6gv
3AbSOqewp1baPeYG3RmWs2VYP/CB8ZYA8wJuUdjeOSxgcFtCJ5oZL4cN289Afw97CVeO+cCMgJMm
8yDY01ObG5lppvJ4e8lZsYEBhFjuyvfldo4JW21674cDzvzsJXBFcPEBemBhfTbBmmFdV8ujOp5M
VpjzSN5HOUCr2e+4P7sgv69Z2xzcaJ9DGgJ5YK3AAFsJmLRZG5ctjzUdHhM/EHjub+ij7H6emnFx
hNngfguJ80yDQ/blp8UaBaXPV3BNj4iK+JyA7bGs1y0n0BHK2uzPVvW58GN7JvxNLyYT9GQzh3Rd
oPN2gd7bqCSlDtoUcBG1G5aQa3kGsHUQVNiOKfeC3o2R4o57Tt20aTZ7yNrk+bgxlZLSQC8BCrq0
4/SiR946bosXXfUPAQzvaAhm6d6jC8aV87/Uk/NWAL1BS4utTUNU+B3BApP3F6+v49TkCwbpeIKd
EMv7TTANhgfHeMqWenULBXcDrdK0eEYluAOMlvWiTpjt+5OCps/LPTgnxkOCta6ugaqXyoPQb0CY
oZ2/HCKY+tPiogRZ5Rb30BzAGp3F0boDL/WEq0knmnNtH0dgIhxgK7ZHBHm/3dTn6JXHScLK1iMK
PPEOLLm5CAkVKqvdJDx13kZ8miOomQvJm7l1O/DG9i2ka0akvVPNBtPew9hgJfhBxnDRdxXkqm62
1WJnbl3C6SbjzALjGAc6kTvSK3bRNZVp8c8C7wmjvcQF3ip8TtPfdf19KX4T4e7THMZwfYkdBUrR
ilyXtzobDO4trDYCRfHCJ8dpl9aDiR8Xr8ubL93FE//4xgiu/re2fxn4l8ThW68Y7J37IBNBv1sn
AJW3+BTmDTxMZeFlXNUUGqybCjud+pPEXJ1XldxEbgIdc8cVTU3fqG6gMlpW4InPu4ZzVeuZCE4C
Ro/sTHuAa7k8chUNabVOFoKg9zPAr7ZrwnNSh61xSYF6PUCrbVA0lR3ApEFO2kNGxEFB8DIFZx4a
ps6WRmKo4VgOn4KnkZkiC0jJtcqvJedZ6Kme+J40m86MHR/OgWpg+E9fPde56agZYdV122t3AJOr
mgNJJTX26aiHhk8VpQAgVo+nT6aPBz1llsYFHPg0xenFB97dEn4JLND7QfiifPbWIqyd3HUp1LGm
qkhEovLbC4nisp2eE64/fXr5ApGmvnr5+pn96tWb5y9fPP2jfH719PXXz17bvp599fT5iy/dF19/
8+rV62dffy2f/+3Z65eqydOeNTKfNxvMJLmAP+TLBt+7F00rX8Cd2wLEkLJT4zdgr+av4ADdbMsN
fi8femS0xizBH6zRWj5CyV7PvgQaxbrHD3ywQuqF7x5dL7dGZFlDcsrprzDM39DTWb2qd7cAwbHT
ihLJCaTHP5n+kvVIHAuW5IF8XeEgoGM3DJddPTMaV4Bwi/UUAH5k+ZGzcLBw1MwQ/QTP4B1MHu5C
90VWbuYYgGDLh9nEpq2PBYkpO7DB0E0VS/Nvsf5Lvws8oNXVzfHnIXE5G9WNFpkFllwsBiN2EvR+
w8WEH02zQrbACOZz+TSfD3o9OQDw0+8GPU3Q8NWDQU+dGvjm4aBHhwRzLE6n00HPHhP4Cr9ZfPPi
9bMvXv7hxfN/e/bl4unrP3y9ePrmDf6+0FmGUGg2o7hXzLv+Z37/ZkcEKeYQig4UI9Ch+gzbt7De
hP/crAwbjQD8np61AB2w0wIYsn+jrL83jLe1wIIsXYCwJl+J6E+bpbKjc9o64uszRxpfQPtAy5j+
fU5GIvWnWcqR7UVlQobbxboUGMm5xvvOSHslDXlCRmRbg5nVAuEXKdTVqKkNVgFrR4Vo/6x9k7xb
WEgIC+d/EFsRJN4FP8BiUnv8fbrA7xYL7ZwmqXtjl2v28bFhJKxWRQFa0oLzZt6QE9AoQlpQL9Wu
WYKiPKJhozHP728H1ieTnyajM2zKDVVaelqJsUryoNoeBVgIMpoMRBMxLR9Rk9/bw4Y6mjk5DfBH
yZ3URj77T6/WjCx2zKgcWjPjx/g2V149NvOIMk0Dx4oOtPnm9066hNs/UYaPuZ8XLDzjTkpDZYUM
4dCob3STNGokdVLKSKYMWvOXmEfLnBnnsA4BnDgCiuS0oprIfkbIm2I+BahG3ILkWGYvqLKh6oyt
BKJqVuJLHVGNHBq/kgFmZuLrer00bZu6RkeGXuZPEoWuyw+YiW8h1pH5k18lit3US+cZrEzE4rFt
6MQqClhW36L4RdrRIwnVTOXBxWk3XDjsg5PBFy//+M1XL74enCYBm4fi7zRWLtmjfPO/edSLv57M
iye9KCK2QZ8t808E7uAvMQ7a/yqoEa03eJ2E3wV1ZMC0sCFYA6Uj4KGYUo+CAnR9xN9z4hnofFWt
L7GHR2Hr4Gu0YOdaG47wNX1mz3Hfs8cfVFhTNxd2ZXTrHTGOBaLJoNXZXE7wCrAxOslwO/i2fRiB
eyC8DjpW5aut4f8eCpDHAfHCcB4qxdPkPDpLe071UT+uLcZ3gSZCdp7cxIfzDH0lt9aUfqyYhWE8
x/YzOdiPj/3o1/7c0Iu5D5/Tp2UFpjg0Ulbw0vRoOkiOdmJHy1KcUJPl7OpFIUJNvnDGHXnGvarK
pWE9YdiC7dcp4HNXPZmSldPOxJ4SFNxO/cAbH/0VF8FrnKQgbxKcyxDOeLgvVuO55hcSWlx4XuVj
kgIwl8QVXY64/uRlj4PYTJBl5HSqsvD1Ap0hI7dJENnHHAWxVtNOAMmbksMHvg+oOCPgWpiOUZY6
USU7+zgdfbLV4FMy6kWJdBhXbneFwj4ZcYSP3Rg1OHbjl7hBmVdXPG6Yc5ESQwlJpfpbN+tJsk+P
KnnyVje06UmCAunViS6RcLn8AlF9dzYG9x+099vZt2sUnf1qgFM09kZ1DOqKajtaPiRftWATAXpZ
VzfwzjD2FhfWRIyLqYA1IkZzXQwsVxm7XTWjN7+cBomuF/C7MCdLtlnGy+MkchalZKgqHryj+Oev
SLDFXF44ueNuJWc7gZglGQ6PXzZllpRyPLylLoEgOWPXepxiXNaQlkYxy7HhpMeIFkqewHCy9dKb
2aHdcMHMwS4FVwEV8jkHw4SgKY/nD3/6rk5k1UucUP1DfDKjBYKPvDxkiD+Bf06DoaDmozNxj92j
zOW22W8AeWpbXdQfEti0pDYFY40BVcySdTd99ESkGbwmvHnYJ6lMCmHAUKN9wsskGnN4x9agAhp1
zUHRtWFKaz+lozfOOE+jr2vZNuH+95viFLWjGAOh3Z9x6mVrzgDr5EKEgIUt0A6j+Sf6Fo4S9G9b
GUX3HWGEkgpbfqiv99fI8QpSCTJzdAqDqTM8wdyodHO3FJZnBxTc2LyItnrc5MOkrJmipoT6AqPJ
/HoXDGFvlKPclY3LJIlY63Z3NMVLwuIT+uM0Q/atR/dtADrqyKZMRT4SppE+RJIwu/uC4F/B5jJR
739HXS69I8RdFvtibVCzfd8GH0tUXiOx/jc15D5EpQ/v8VUQS8q18YWRcDjgXgcoRPg3tLOhrOBu
fCcY2xx2cI2yITCy0g3YWngCxbIUyE4BnIjOtZetgPmVTHl6Nt9Gd4snpGg+e9St4LNY+i5tveHf
jL49HGCbs2Iw0uYhfaFY5CWKIAdDfPbiibuin/nWMaIlGE4xWYxL9nIZdm2D8ctVi0GZ1gGIrHHl
+7Je0cuR7R1hCz1LD0YZ2t89B83k0ZORDiSZzeDI4bqx+t5K0WDPy9U5+kBRXx0jDberubzjwNgR
FPBp1PgoUbAbYCKLOhSLoqX0nJKYQAe4mn/rM641H5Z09IAdjNyMwgbjMO82E8yqBh02EiwTyuKI
gs67wO+jwSxB008KF+3C3091FwlRUfmhW+OHenxykkdpgpS3i5PW3dUnaDz2ujlF5fI0mh4+W8Nt
Rw0iF0TXdAjj2TUNpiP3rbtGOrXWS23LnBy+5TmpOLMkM0/4hD2Pis9Vy4GcxzsB94EcBXDAg7Fv
EMo9tmqwpxvmG6o+wN2wHXw7nD743bejh3/+9gT+ODV/fP1w0EuQV3Zr0pvBFBXWOriJ0hutOlk4
QaI1Au1QDX7shpTu92B9O7i4PhsDLR3Z8WDOd9vxwXp2HFjPdpjaSLh++W0Ht5CcttdVvIMS14e/
4pTQ4Y/sDml1xwNSgmrp+E75Of+rd012h8ZLYyBdgkOCIvFJ8TiDQNEe3xjNON9YKGt0IGbACZTG
HxaP+QhCpfAEdgnTuLDCOMW+7WgB4SJHB1vIr/1HroNUkZHhtA4v8sO5WoSHmbZ54br37OPW5CMo
rUUgDfvniVqdWXpJBToMz1niUOIgGjQpYrqDMfuPm5uAPI1zUlfqMo+ZPIogxW/nxaPpr/+2eHCQ
0qzR0iygKT7MtWi2K7uolpUdWEsGOiAmg3fnKeV5WCQ5zugACZiZibrOfEtYo7SUoQMU9Wzhjxp0
1FX3oA9zIUI2gtYPEA3ICaazPZjMISQMMGkhfhFxYY/vN9h3te1pOY5vPneLqjXoHbd4/tZkD39F
Z7gFPvn4iEU7vSuZRAQ3+skozd9ZOFA5toDGeCqOMldaGLDSqFG2HbtrPWWIWRzyElYsBwkPHUio
xA8NVIP1uIT26wnYvmHFyls6ig6Nhfg9rE59zh4kNSxUfeE89c2Mqbatix+lPw9BgGgWRKHWz6lJ
bvbUFzd3hJcEWvAhMIvjCGAXPwyx+nThjcJw/aTDhPOSOBqaKHUWDV2hhycMh/htahCjvCbXnmDl
mWkIIZ9S1WdZacbpjMfXizYK8RNyuqF646MuJFNIp+xEC4LmUNzzbjGJC9FKnIKcMShOBtkq3dw4
0aBpr6M5KUw7UAxOBx9xB3ySOQ8/7ZyHx895NMiSWI2YsRAYMRQyfzwGuh/NDjZfY+N/Hmj+xtHY
+BqhwtXAMBCa/uTWChwr67Gi+2q9v8bMlkNnKw4tOPuNYZEUp2ITFYBZCVArIO4LWG0EJQzP4ZDV
vPizYWNmJBBizraDRHu9tJlGHmfSj0l2jiIOx0BBFk8LFxSBU2rUHM2q5iQDKgopHOuUrSdsDmXD
45skUTJYKo4Apk3dXhYpI65FLDvShLUhFu/bijBlDb8hcDuYzisjdaF5BixqxLzGODAy5DR7cItJ
iB929yxb7eanbJBmyD7Daaxp+mTymL47HeTPCk8Tyz+emRqpqci7Cw+MvbFzDzARXZGCl9kzlyfh
pryFNinaU6EWw0Kcbcvzd9WuPeJO9DbWXtLBfpvlyuwZ9PtSYlcBTxiSFOzKd5C7lsG3mF/UbSqH
D0TKofIFUcyBUc4/nWt6zTXK1qzXsTUcaeyN/5ixw5jbOw0awgvcyPHjHXQClzxAnYypYVZeDsbw
NdvtU/6k2bx33atU0EINvZUa616S5xRQQEAbWkH2dhu1LKcXOIbNTtZs1WnuZeQUnr2thO7PdHDW
BIF+xKmWWZ3cb09hUviI9ZOfzHvMawv0wLLIAesK8jACWAML5L3ohpZ4IGLVLhwogeOHAzipZ3g7
n7i7+lSPAxWakh28hR+iix9el+irqiIXwZdDW9nxgRgHB3+IvY02Fb5wBpxTb/qYgLgw2kBw517v
d5Q4xIjvq30LyBKB7tFs0P61HZx8ezJUMhzG//MPpyP1A48ZjMHwaLodQNBDQScd4KC2g28fD9ij
JV+pkFAJ7OfYWuasPLCHBTrD2iOofrjyt8PhyX//8+mD0bejQXeH8I/kW9CrLNBoENxtioTqJX4X
qZGdXj7yrobQxPDzLHoM4fFk391+7MNJYBLpLOtljrmoVyuaHTkWuQFY8wA9kMMTeUa7zvnp3HPh
TQw1wkyJpiReeYQjsSrPqpXnLuB5ytfrnIOHGd6TbiePjAP+yO8tvepp93wehFTxRzvJ+FwXT8L6
4BnnssJlHY7iZ8d7xVVjGONq8w+seQH3hfgVi/BCGakQ4kt8InvpqyJ2syDQtNQkyJHTG33q0dLO
i5xCyVSDAFhKMQEDsr2azNjVVNCx05vOpjS0ctOErBfFajA8eB2jDVlv0UdNjw/BHSc7edAWhUw3
EZRDwqYXcHEP7aIdS7EGzuHtYCwH/XR71jEJ/3Ak9FyEG6grBt5Qt6nniWtVXW8op4GXBMUsg2M0
KpaYYsc2iLlY2QbZXDjIgozPYOxWtGAuTStYfdgYqiOfpJRxCGuIgZSqoKsEm1htg2PFWka9rCCk
l3yo15j2zvUGBr1RNmW9Kvd4dnpA3fb69DaSOuW3KLUHzZa5CvMTkfkV/pNVWcA0SC9C68BNxXEe
3uepKUs5FNAda9axTN+uIy8jh3gDoGeAnGWtuyyd1oxgwRD1vU/qhumNL8HF0x6YVggBLK/15Spy
GPGuaOV6Ro/nuXvY8/zMus4eaYVg0K2xJXD+YmGDA5K2iOHjZEoYrtzr4GDK4NX7hHoxq5fjlEZ8
nDZMe8zvKMmVGx0a8XHasNaEx1k9OK0D303//dG6r8PYWrjo+Du4ZwWs6BjtOVhkeQ2ygdnR2YgJ
Vh8KOzkpl3Qw59/yj+4WYcKv4PM8/u38qsEHpWxjVAAmjAcBwYrxK/Kxpr/VSnODp+kRDf50v/0B
VnUw5iVS7XdBjNsWghVKxbYNEWJ60dbfh5q2j11KbY4JkXqUDZRiCI3D9M7lbbsj8BewQwkZKY01
wTclzv4YqoAnVKGHw0wxbKeXYzpJj023iXQwVN+aw2rqsixMsEByLYpV5y5takCRjnYL+P/pdHoa
dfDk0KAdPEl+JWzzd23dQpvk2ka4k84mCC2lY2zQRNeqtnEqCkMyeNBxk+GEL9x7k+5/lDvhwvu4
rVHQv9dGDq3Gngkt9GbEBzBLYaArWjFMF1YsGhdZm4YAeBToJdDuhtRKzC7oe85E2/lwBDDLuvTd
+xKcHd0KAu4QykkKAN0b3tz7GKOj2OlQlnfm1IPRIeaP1x1s7dg57cI1wDcAWxOhY9vm6WmkWvi/
A3a9azwtZALZoBZAt61sKxl/TXOKTrIScoZoIicGzBBgq7m7c+H/cJccggDF15k0IxnOmFQD/EGk
Mrrc1tVq6SoleopDBT390BkSx2QL06ul9dAYGoFiXAoxeloDZ7CpFmFsCv8Z6q70LWiNf9GIrPXv
pxsZ9D4MusQgXt6oOdsv7hDAhUklqu/2zvoxF5ctH2BHk3qGbEVFc1aDniDTdODbDb1PDq4GvhZQ
GgUNSPnctxUh8oIKq4KvICeP6+avArDmbhQVBktZj1g0L2jTBhnz4QjRCcKXHXg9cTsSwQkOO7bp
jhujNgVgU81KWgvTX8OuHMt71BOIXmy3zJ3ojx9F+ogT6uXnRYBfD7Hpr2GJj2QqHIOomYr/kCQa
90Cea7NWSyejBtlzu8QmLGjGt1hLRlLRFsaejH96dPhVYT3xRJ9ZR/2kbQC4FuT7ZTM1uwVoR5mQ
zBxKoPme7C3ksfnUeiflyvcc3BqZz9aI4cf1HHSapcVIUwvzlpPiIMDFOTsI1yoGnzlIuVSNUaJN
0T+ZOIaUGFR2PHF7BRUTTYKh6O7tLSWRnC9s6RUJmQZlSn4m0OYKrnFNcOUUXojA3shpt8W+JfgO
twc2zMqPMBgpIEc2SBFaoLAJh6jOoMrMkMwZ3V9eE/AOGKFp0yPcYkZ8ZzxngJI8LxGYvt4dxG2z
FcfSaYhLIQUEdjFPlH49YapzaVgPwSxDDO4o6DF+j7lUiRSwaLiSjLsdHFRooYJdqftDr6NRCyc8
0ZhnppFHci4311M9IAx6vc3jmcY3mUVBPkyYRo9h9xbJA1D6WXSd/0uJyJlTSYuN2knMuMwXjI6e
A0C1o6XHhBD41I30eUSxV5D0xQ01MVC8aF8R3C5DVPMp4lHZ5L7birIwADbpOsQvh1wykCS8paNR
6dODrXHWIEjhADXco4clE1xeiK/GVHtbN0CbwYQ8090UMH1D8a66vWm22rOVfY9kWR20Oj0RrVoC
xUdXTYfE6mDqbaIAXon/yj3Y20Q52E6K4CbEDAZgdYAGvawH7EooBX00eTzONi2I2e7mvC6FHxH3
4gtW941sm/MMOIFIAczCWlwZMiH5hrIl8JYO9YvFhBKq2Lb212eAWX4RJn6wS4yJZmS8YYbm1pRa
Tot/urWo9bixdqtvAP/8rLJFyVmBKcJrjPi3lGeHTdN08RJxcllWrNfnq/0yYSCeFC+KocsDM5Ie
DatTkxmic4Gk8Clx90aJtga/G7gGvq+2DZCqnlibqvQgUQnp+yMG8FC1pY/KXZp6YXHw8d22vrgw
ci7ABJ9Vu5uqWutsA4R1gKl2HtOdWe4C2ExOLWI32h7s9NaNKcmIu2blf9zLmMcsGWrw6u9u0jsR
eNqFjKk8pQqR0vIs11hBwEJTiDNSAtAAWB+mlmITEa0fPuitGybD4GTSAt5pJCCA2dF4ze04Ocob
zkbCilMiNYsQgmMmzP0rP7ntrhmrMkhMF5BNBJgT2B0pzdLgYtWU+AcAaq6qD5hkpIyy8mFwPedC
wLUpz+EibcPHbSc7QqoyFDS9tmQAmOOqJT7ibS+/niGXZRIhRiVqo8eYKO8ZMIznF9bi6vdYXmCS
h/QygoXYNFOt3SLK7VJuzD5utrVORoTranYHcmQoYVMIlrPsoAuobHdppDbhtcDlA1aKHjQh17cu
dEAQ2731wWDavAYQk3J1U962mPXKkfcuou7wOq1Ri8bsktdVCafvYr/ykynpheql5C7/IKCCx5Sr
kJfZqHUmYOVBpheqK4qOvuHoDKEkIt4/NDRIa+QuGGJLMfQP9e6oDxsYwD06CNjLXqV1gZ4/AprZ
Fw0SBaDfxNfEC2NatffrLvej5CvL/IyJjHLt0tHK/SxER+kVEgUw41mmsiRvCyAYUDYP5Kd5sGp+
YRR45oVVPO0PYsrAf/2f6EqY08KFzYnDBP/l/4yclgDwgyaZDdk3Ff9ne0Dndtn8AgqFKlTm5GnZ
e24/AC5P2WckD4zlpf5CDgJOizQffIerF35JqaSi2rhe4dc7lzrG1ecnqOBrmHz4HU9afX0aQd8z
dL8AzpN2jXrdyHv5w0U51YeVUmw46K4WgIQVYD1dIWOfAEOapZSZRoh6LlbBakm642I4mNpORpQU
lTJcDUZO51x8DQm7WJd7KqbB/2Qpn4ilgG9bxo2Lds6FPQ+J3Onxb4diND9G4g1qroDPi0f/AC3e
Nvti0G10GFyV7ysLHAz5O6FFG2ZetHuwc7fc0aHGIJ1bYYdFDIzzjZG874SPgedUQkWjdLI4z7/J
eoVkFkaW4f6WEpJuNpB0mhLL3bdtKXPUHhzNNIETHCzkDWGq7uVped5F2kDV85i0iarxv+NeTNH4
37AhouYonZqlZRTgUo/yc/533EsSsfwxjoSOOaLZphwr59Yx6FOzqUTqDnuBShMhU/oClutn50yJ
TTqG7/wk8ogiYLUYPw8VP/oUFHwsNf7cBOcEsZDoQIERy2a48D8H+cly5sjoeDrLEJKb4E9NR0Qy
0N9PQzHevuFy/CVsXDzfT7NvaoI/z8YlBvqpd+4p+lr/p/D5lyV8kgd8VvoEOIlDAqNoz/DAgVZE
FNbEuGKtj9TT+LD8SQYuGBVJlKVN4PvXIYNqQv9PIfQ/SgiVjDiYOHUK/xlmE9YpYeFEx7d5uTJE
bj1O6MCqEff7TylXH5H/scXcv3Z612Lzx9H8F81+/fNT+yej6phs1YT+Qwj2L1T9Wlc3hur2CP9w
DM098gFsu4nKNq4ICxw6f1q6ssmNOwiso0gHDbmx/9Qk1EktSfr6CWmE6k2NzLaOUm3wb9WHeqdc
fRf/jfLO/3QbzYnt8/L/p6GDfguuUBCStC2vB610K44uIKDC1PsxrXgr8BdNLt5DkkxwLlP95NTk
etAdakVASiQ9C111Jr2wAZ25kYvgE5iLshzFZac2Gxc3l6Zw8Sp09bwENNrWsT8jF7c2eQbkliWh
/FVb7ZdNsmDGK5TesnD/gmAlgvJge0QwiGmqQ6bNsBVHqwF5npyOiQzdGGK+8yNPdyY3MW3FAhfv
Iw79HWTeRC7fhc1bk0jpq0dm6Y4+BiUx1I9/vy43PqYvQ7BwvJ2LcdOwBpm9/XmEGgpkTWuYyfn9
SAEbjiU15hH+gwf0oO3hRRlZhMDCxfETWHOL3uBBdjnDY6g+xVdCpcEozW6o3AmVOfXR+iLasLnL
FZgN+k5CUAkeuYn4gWn3SorMj+I58KEbXqRpDMlsVjwNwBel8qMUJkDpJyxMcR3FUBIhiQFFiprh
NZ+aNkL4kKev4ErVO/GEMtQRMFk7Qn1+hnazu0+Sjq1tqyhSlb/13SKC+NM4ZCE5/Z9IwBIHfqoC
MJ/KQ0r5ufDvj2ceyBFQv1pu8ma4QM3CnGXlSCJe/OgS0xn10y3cq0GP/IE49CZ/J6JY3mDXww1V
PZyG8byStT0FZKXqjVUsdLITn9qv20vOS7Z+t4bkAjzC+9uCiR2CEAi20PQ1Spgi/YAVogvTrI9U
T67sHFq2ra7ZYbXZCD48HxG78np9yeVgfVvs19vqvLlc19+DczbXZdgv8qk0G9Y4n91ds9G4YZhY
myd4XkLg1Xm9BA/REo/osrHucNe+xxCRgk+UoiPAPwtcOwaDcaXUufABLVyRQNQDiABXZ2rokQXc
4eKbF6+fffHyDy+e/9uzLxcAc7N4+ubNaz/1K4eLhzScqTuyKD1uPKN04AXEfRwMu/h9vaqgoEtR
zn6Avy8hpuCWYvIlXumiVo6wphYzmecSUgB+ntIiWutNofocgT0hQIRdDsEEHAU0UFSGH3fhJXkc
cXRjPmxBuVY2y4q8aC10z7I+pzlg1AhiIq8wLIhcLgGmEzxpn7Jnb+heiVh97IXLbpOQEs20iFXN
6CDRMTmzunGc7S8Aq0VcPKHLAcSCtug/Z369MLOEEl7HQWc+7zjUcVeUFizLfLA1zIYHlnRZXODq
zbF08IvMZy4NZC8ZRjTSkg/ODNx1y5XzZe1P+uiL2xbtbTttd8s/1etxs9/94N0EDONk5JrJIAoR
NRNy2GYw6CyUzoL7qNcxcvng5q6tmFEegeJjmbU3ZeRZuM73t8CmXbdxZET0eBQyamDRDUaM+Pj3
4uOLlI6sJbpeZUdnKeAppC/BqXIjHPtVu2GWuhrydDEIsIqD+SSEN9u7J34kkEWgWeCV5D0Jj431
mlrlt0cfqjgM27/fCv7vENjWULQXgkJx6F2jbMxuELkLtG9kzFc2rLMAXgfAeN31mV2/kAuiI1Du
a4zoFjYt/n8YnmDjwZiJWi/PtjASWLmqd7cYAWcDrMj3FkYsIiJH4EBoOcU/QBPYGJ9SGylXHseU
ElqSdnRNqRVt5B07FhUIxTAmrMXiqmyvFgtDERSt60ZQfWf7x5MTy9R4reN2A9/BT1RSN7OuDjUD
tIUlMI1g1AAHebS2mXfVbdyI+dLxpgVEji4W6uGP5P0vJF7EXeVHmzoUBkfSzgFqo9EvzCIkfrbh
p0m7nryDXaxg2FeGfFbeOsl7XTCJyFAw6kW+8haSc64nkIk7VuiBwVd+BT1ZtKK4j5HjvTcn8sH3
vgo1HyP3b6vLGg5K6FS/cD+QscVD0oSfKgEhiNzx6Vdzw+DPhvlR7Id2VR0drDNAiX3wcdUW7EUf
u1oe3QI4BdsGnFfZ0fUv4LHNNqDcm45ogQwFUFl7Vhxd0c0+enU+og18eoLK6u3viGoUYKBfqo6o
xGZjqOdZ/o+oSqpTiyscGPV82FeUVrwIMQ7gktNRyIGBcKy1uVfqIH6EkEaokA9sb7MwNFudf03D
pgcJhegnz66hCvrnLMg44IGx02+ptiV1wcKmLvAKe1DxBCmTGb/9ORyXhZiHsOqKBL110adJ9TGv
RlusakSKXVeX5t59L0HOoTGKf13Qr4JrxekHIAjSiIrD7eC/T75dPvy//gz/PPh2Cn/7qMQyjIZg
OnyEYpchE3V6TPuBo5PeVUM0Dox4pBhUCo9tG7DAgboPIX6G93L4arWsd2G0EVzy0cSCRN096i0p
WdFPr4n9EryAhtHpqAU3qz0qdH0zF79lsw6D4Xq6Nd3rVMwalRz31/aDoLU//TCKWjjBLsCgSJ2o
O9/WBvNx1/C0P0POuOgGeOI1corGab+hUdd6J1ffirUuNPscUqJABPjBvcjsjFlHe6TyoqZ/9Kb7
zRJyjYkpNwLibit3ivHwIMVbC77Vv2CpNyvfJIa2xm31vm72Dg/KM4S5gPN1dePhivvJAD1+l4dM
anc5ARrFNh9ZSaz0J6q6ii+7dKvJi4lY2LMfPcI5xTuqLD4h2FM6DbCPXReKch7t2t0FUiWzMGVi
O0Cn/OvT5VLvrSeFddeVtyFrw2IyRGfKFDUKmgruj65Io55Op6TuzPnAmS9G6QoBynTwsbshq61Z
yl83CoiI1Gfg9uiwWlPeJoyqBndYwByQ782m6oZ2FtfcXQrl2ocOGVsIFdejlgB4gi7/DwvqkfAe
AcJvCckLc2nAsgPpPSa622LCMkjxxihR2EJMtzhxU4FDqjtOV+zxi1RuV2Z3A69pFx7Ck51c8D5G
fdi7Ao6iqyRBsyFReWkN4IK+qSF/0U01gGuaSXrtAt+jhY2tOYlxSPVjRkF0JIcbslzIakBuW/0M
NyGrf3iowUonkbmyUan15zBqeRTFvToN91LYo88i4o10rVDHp3b+UueEWGVkUUzrn51MLdtZpLOm
XzFLjWo0TrxmBm9dLNyKQwA/azaSO42fNsOHTT5QC4HGGOrSo0Oe7/JoxWPt32/7YF/z2ggGaOEE
o7fWpOKBeFY+agTGuvPSy7B72id9AVb0UBwj0YnDvuWqYQgM96FjYWzLB1fl/jYcHr7e2fq99NWG
PP88zqAU3jykkhy8fyhTs9EG5FXlDx3VuhQleXvHT5FZldL7eQPNaVF5yc2O9Suu+kxq/iFV8Vit
7U4j1+ufzRC2rdpm9Z50I9GB28iZpjp/t5BfU9mwMNVLY8NovNSDnrQlEwiT+khCBQsrQ8TuJydc
Vh/0sUE0tNsQ26tG5BhP8vuYRBkdqvmJ9+2pPf8pV4KLVXmJl3M80Fj1FIXzRw/ePqBkVOop/uuL
YQmcbeYYRyiyaYDRIyoKPSCecCZLoGCnBWjfnoztaZaQOfkA4ftESVViJoUnyNLkwsdZt99750ky
b7GVBeBt691KZ6uEj5wqPXJe8xLSJzhXtM2UFh3bRJhmv/GOB7/zcg17e11tLx34wgRzvPHQMZ6t
BIi4+9vBcQ9/8BKlRhSkygoGd6JKwiFS3IuWElanKkGLtQ5ftdFs/fSrxyyk3aloNeO0QZze1Sm7
tLaoRLciCpa7sRNbgzastxpoydQWnjYJCLSDGbSJTLLBlrIceXBbDywtSWnxjZs8tVhrrloYZxL+
2kcNLtz9TpN4aOF60ftLuC2wu47vi09GvXVLHGUUCJJoTxcHsu66paMisGpdixpefnpXD6eLvVe8
ePnm2QzprUO8KCCNbnGJCOi0K4HOqvcAlmTNkNzwdI9YWgD9yT4klF2E3tzLs+Z9dfCsZKWQIFHZ
fld9WIjAY0ktKzP1siFJtMDy8W5kgKP4CYlBzTLcewQz1WMiMHTBx9vyKP2x+eOKOFQwKjsitBKV
vrdyl5id0cedkSzjCIxpsSGSySHehSCJnuYrpbJmB7l4+roowjryK4tCo/XsDm0/5SLoYINDf5Pr
cvsuZbZAzyg3mQuA5FtVJYDFol3Ic1lBAL8IPyzweSbErpEw6RySe9oZ2q4CEBdIP4d6CZJAiaXn
sMEh32Na3IpBffEuXDeBABsKYogZTZ2NlQd8EcU+hOQZmmkO6YE6rTFnsQUYxcZ6f0YjjHDu1DsZ
1Ft0FUgL4dEy32PVXkn4+MK0bMCkSPlkcSEBydTF74d+9Uru9joF85/IhNqQ2CHiycHyFsUIc7MO
DIIBBv3r0aK10fAVdOFNyIHkLxyYcPP2zgPiI3oEB+sqy4mjgimQbGYkVQ4nsSMkoVVRQtsRwSGq
h68FhRvxkZvAaZH9tovPi8dpTSnq6PGxHXWQcWaCnkoNLp5wTOt1yalnJ5OLppmcldtBMfm8GJgP
C/yA/Gbygb79MOgl7ZkURUHwhvyMEVk1UyEisFzxBFIJNNqdX8h0naipPf/z3o/J1robUiCYfs3p
ivItRVs2Sh1tqN9xcpF7wkrZ+8oxIX5Mv4tO1kHhaj5TfhQcDibgUrEY5PRxeoZcxhfFR94IyVyu
eDtExl52Q7NNJp+GrVlW06T1FYlSqqZektm2aiv5mXajZFbsDRK4h+p7StygOcVG6GwSpB/mr9G4
KiEtA+7G2uEWFEKVdPfKx4uIqdBzUoz6G90l91t04djRoHFxj/PGp7xZTLYZkk3OSq97YJMMzDxq
Dy7qNYnocnwoe4jsgMP4b3wHPSwRump8AsNcVK3D0hizCH9U9kngGDvlgbbsdRG+zvrFwjTaXdZj
AiTSfcx6XfYA7/HOuVh1VbG6UNCRJpXw3FQqrMirbn5Uy6dOssvzApYzV9QXrYamCsQ1jnrRaO3t
opy/vWU+LsdeRHbjIpECMfs/UjTDeWrH8kT4layQrMH9cFZdS830cdRie2QF9lg8tfFqd5jHx75K
HXYS2fu4H+3/5/rpJfxRAgGL7cYd92vH8cR7KRg+e35EVkmVq2Dd3CA0fqiPjWU29c5P26Jt02KU
gMBZZD+7lHRyZB7x8M1mGpje7WVpfcG9d7vIqzrvE+5GPmZDGDkRa/sXfZNUF9FIYzjUdd1iPqtY
wz0z7AsulMhIa1shoccJFc4RLijiGITwqIEaf+JOC6tr4dGrmpYqpVr4CO9VDX8MQNtwsR3mgbdP
oa97ouZQ74O2AqddLchU6MI9AuB3NHyz9TV0vdV2u8iDFV0xi+sG/LVcbiFryI/3NOHb7hFzxhne
9R+b7Y507VUVk0Xyzreqpnx7R4/Tb8MLVdrrrHinZ2UXLN9FUAm7Zcd2516Of9wzYNf40tysa4jh
O6IwvowTgN/vcezPx++Kp5LuKgpYcY+Xd0yfsNBP9O5I3YU6nI+ldJHRJxIPKY5lCzqlUOgRekXC
cyeY018GUYZj+mSE6EcwR1F64yJ1J7PT5UsXsbehoMB0TjsMvE9mtesMikbUkUQ6NcaNUilKIcDV
kMH7k0dKcgUPfZBOMaya/hSB1dUs97tmclmZaaH9AqUkS1AjP7ebDd/CFvUXZmA3AgcgowPPUVW/
2tQrng7k7L5oIBVSmG7Hyy2tV8LcYkDgpjYHspj+mjaXYGoDDpuMdoD5gyrVlgVZYi8606aXJJhB
lHBTAZoMQ9BVumBvXDr2zDT0hbO6okrNxtjIZ1O3AUsOAb+LIxuDsq3KR+bL5Mtl3dFX5NzI1OUl
O6Z3uFW6hUg/NS18nQ7Zb9iGktCCJ/jGK7mYnoIsWkyuPpvgN0rVuFN6IyC8HA4Tkn8HqpsvMsdF
iHpzvx5AjWPqBYirXkp/1eQ492jxQGznHMySiSZTFJUbXEgSHbjYvqPCAG0Gqf5lbxELXWuxPghb
2qu2vt7A85URkdfeexX8T76cwh/xo7mhFiDmPvfRV9lxmxCpooaIDrM+58D2pp4fv2ruFbyGYlx+
rkTfc+JHN326YeYqdk/oo/+iT577/VxzkOuzKpfmr3HxJY/P1PwXmvgogOvyVBR/gt0qSqeGkjCc
eIR0ILo4pqgj4o1j8urwgLGBeeBSSDAnyKwVfJqhNLw9ky8r+IuRppp2uil3V9Mzs8eY0njh3aNh
mDFWikHa9mwGw3/9n/jOmzP7OASJ6P0cXlPzkFMExZO3yDzNC4KIa2HFc3tye16MSuTH4rtMhZh1
zk2BWyQfFyO8plwQBqNRUkFMVI+v0bhyuz/jwFcPRCCIyyYoHA2EWC9Ne/Xudpj0/JT3EN8uGYTe
stc5GV2kvcglGFca8Rl4/50IA6amCqLoyu2tqjWkjOPVh405D/XOEr9hakag3W4BXely1ZyphYlt
GuCQMkFPlPzbKVdyiISmRkd0SVRct6wfCW1oBVNXwqvY46K9DKwq9/RwcOXerPi7B08epjK4Ob1m
LvHXhyFa8eoy5NYS8JBhIVr8s+CsmvZkhrynP3qC75MTtBdKdo7uyjk8TbmU9MCzi3E0YG1E7yT8
aJcjUzYy07AKBSXrtQhMKe/ztPcxVQhca8NnPrXQDC0GCT5jk9GRT3wk2bXtEY/q6ZhV+Tw6Lqrx
lWFCu1sLvHNcnO2PSNeI0JRBQkS84xKZG0V2CH9KU+wguMkSGR4DY7H/u/CRT56X8Q4B0AIO9JkC
BuIgvY8OgIZ5ufsrH0hjQxnUZZeVp0kQkYyQ7OKO2QGvDfHVADxkm/Ev1jCWxaFAkkQimYf1/WXY
Ig2EIYV6vsedjpUfaDROw7EcXNPI0xgGaPVWTpTgPuoRXc7LT8zmi6H2eqDmAIZkf8bWGk+OCDTC
qL5H7vQ6lXrf8gSRnLM5+ZLr7rrAuXINa5HLw4MEkdVyPN4u3xDEaZ4txGj7rt5stIxzL2G8QBYu
DpB9bHDW/xFguBpa2z12p4C1ked68mUywlauhVHsMOzqZF2502PD7cO5Dp3YP9ajGXMPhrSS8cAK
7TcD9D0lf6luyNtWLLWJYFG1NF68Gdc8Lm5UYcckrMNeUykQ71wAn6PbvHt2Ns7Jn/pBm/JdDO7H
RryBO0iXKTx3XtWmdNbPzcVzB+6GFT7JeD50QD2kvDSTC3Oad6D/WQaVfYS/w93tru8vtJnemonE
iH/XG1xf4gRVq5y2XbI5B1QbuefhsQPLgwVGSCDedkHdvp/lEIcVmK/l4IFPTiAnkLvZQLxyoPVR
TKeQ2DyYtRru8XO/RygNSqtF3NFbo1RfF176dAF0SF4ijHTgTDgBkLW075BrEJWVXjxYrolUAcyg
J+WT3bqf5wrrMRKewNWCid1dyGDUByVkEItLHw0f0wW8rU7SVdmG+MWqmVHWIzsN+JLvLtC7bFxw
hP+S6Ktua4YpHvqVxgVY7LIhuul+naBAeYjE8SqylMTK265jqUIMJX/LA7o6uOXHYFzkdzCzdYkJ
qCy4ASpGAlDcF/JF1WcXLAUjhsyjzbvZ+iDfrY/KfxSr4+knpp4D3054aIFIypDcd0DxHiVMCatj
K6dsicFqRM7F2uHQn4VZZ2F0pvCiXl80w9HJ49McOzcHZWj+Gnle5Fl+nYJV98ER0CMeoXi9oB4g
AwQuNnqAIQtAF4801rSVOqu7+ljw4ppUohrVLqCxBb6GBtjqfngkvGR2ek3wjRMAMvs4YLeAbWcO
bXN+vt+i7hNBssgjZxx0raNBHWL1cZGkoXuXaitwcIhCPOsxl3YXSbU2k4OX/mG+nVHGw5rnFk5X
q/S6uxgz32vJHkHd9cmsvnudGjLHzXzGFTi409O0gCG7CATmbOJotkGVbO3BJtbnV+haWq6LwcuB
Y3oUp8nNA/WvCYxjrG8BU+dpVEcobAx2jJKs8roEhBsN0mF6C56HT2LxDOB47wL3fHVEFjXp2vCP
f3QC8nG/+cQTtjWKvIjZfcBM52JHTgKUCng9SQWhedkaCDU+hI3vmqh4HJmlHKXVDG8e4ejTEsTB
zp4OIq9kBdslFjNgk85QFkVhCqEsgQMSQRKmC9rSEHPLktkRhmamlt1+g4Yvfb+KCqk2b9TLx9xR
E0mR1xkVcFngnehpHD2ZHmCWpE/qUxdohp0f6vPl4M4Ewl96rAJUHDKe1PD2ZjbrkqBSza6BcYYt
Lz5n0CSkhuQUpuQwfLtOs35fbRH4X12kTCkqFhZog2AMyndVq4L9lXmlWqsbwvDj4cj/0Zw7EfOi
gt4rKPTh2yfG7k3RCgadSYfCEYHdLLJ1eG4CnDcjEtHb7BCig6cQxOrW6fMsWJTgalaxyZKueYH1
XN2GgbVm6BoVtGzb/TW5qZU7B2kAHwQCL6XK3GNXKop0NjLLCvOEQ0JUc6T7LPv3E1zQWxOZgKf+
pCT89CZn114Yo31pcSJCdL9rdAVzpWdVw0RbuXHldTZrt4j2zW5qZLmIH0IlPhBpSX01DMaYb6cz
6gfNI6rZiBzhGGlULrM6N4YW6iURw3mFAqhVm4lWgjaG1fRyyv6YaSU1Ty95fdwzivoqiG5oXOQD
pG3YpmFWzMQ8QR39Lj2gSp9dAZ8B/HXTn7uUMI59gVdfLExcMnCYbZGcKy4QsHRHZx6r9jruwvSt
o/o9TQQ3RTis4oQBGkh4X4UhSjTIW+2T6Vxb+NLBxEHuBkjSwOTD7feY98iWb813xeS2mHzPGJ0W
4NUBxXinCsCW7NuTewHxvk5MnyYWCJLoiHcFih649wWbZaOzbsCNdI9yTUgJ9MoUxll5BqZuwcOo
AdvS+Zk72vO2M324rSePLYiJp9OT0GK7db9x8jvggRtaJ1jpMLTMfzwbtII14emdiMEL9JtaCo/W
OlFIRSSRjNv+xlrGFYusUUCdW37IFLO+XFWTZdleWUUHhBGUQ9ZNpiF1d/L6kBXpGtLLxU22RZiV
SS0ejWlX1iv52wvvSwNqHYQXTmgevHDz4hFOMcKdyIMMZw+NDR12d2fIVfPXD3QU8qwQdiErUkMQ
CtR/6FFQV3VIqB5wNq+qUa9BWX2hHenTQzCsjYHnOoK9uoy+USi41263sde+UXp1DoV8+5zFW4Rw
XbJ188pODAhwuW4gmCjiJ2n4irsJJXqwRx51UwViWKoP5jMDgjsOd1MN3leZZto9Iv5f7EG0JRDN
pTqj/0BWYvhi1TSbNCNfhefvcX4V212DdBVw7iwxcCIyvSSnH3l0ySB97OE9M+L+u9ziW03FCHXN
/izgr0ZERF7DqkWujZB0enenyR9Ljz+aFv0L1kgJn/6CtWGuLtgoZX+xo3EUPT5AvPm1JZio48iU
UtpWS7EStJTrKrQskFQzO/3Yaz/qZtQ7dL7MkG3DvY7TFcleM2glM9JPdMQSx8s5qNmcfWxJweQ0
cKUrnxLaFKM4oHU3VN1UIx5NYRIgM+ilLye3Leo/em4ZrMXkHJXGTZUTOKZpm0ysoyXF3MaDZrRL
YsSpV9pZqLrYMSYmSmvLf2CtqoWclz604T1EBmAFTjm3KKPTnVy07qpa6iCCQJ7QqqUema9d+hvO
SWEgO8z61lsU851ppK3PAlskHTOw6NUKbsY/fK383us68F3nnYYbnHp7KFt73LmfoeeIFnYU48et
IJtFc3ER2xuVORKIF9GGX8WZPZzlWs6e9oRdLxGXGM+NOopdR4PZmDkQ39cbfzpu2pkH3RQr4gUM
OLFtKmZSXsF5BwPMncgDq6xpCw2IRkf+d4jYpiMXsKnQmuAxrbC9wUFGpdbzZIbp7NUXnGHFrvFp
r1tlDuzYcN78k2Nm8tKaOsoVkKChrdXtuDAdGG3OzBxckD1XA8w2jYSCAKRJRY80/yCpm9o2o8D1
csqE2HwCR6nywyIqhi2ZX4bJBrpc/vLNTZwMQNYTPe7fznM1I2OYrHfIrSBWsFpKQPUaYqwTz0bw
fW6+AMAU2yEiu5p9H6R3qnV6lVMGDarxuSccnUY2Tb0ufzPPjjgRjOFWYwFJHmVih26ElGxKhhlt
vwhWFayQ2qrLnSSaiswWxXK/lX3SNyV7H6ZWLj21z/VadYijdiWSzeRgyXf1el/dVfI90FUvZSfk
paMH+3BR8HcI7Eefc8yFZBSk8BbaKcGuuUikqCJF4wb+gzwk4eeRID82+xzBRmgBxFMldxflaDm+
i9jEyQ4P0ft8es1zzedYSEzSeJ2Dq6LVofCk++vtd9ppyk9dFOgXp3bYR4JA5i93R0/rJh9xnu8l
z6ilM6vp2pfNkEzaXnpD/A2WsQVOKF7foNFLv0pQYjQQr+dd0xQX1Y3vIOsQfzkF17SX5g9d0T/c
cjbMxyGcw1At7o28euLoPoGnahrixn8A4KOnX5LTbCf31Nf1wuevinpZ1Piyg1Fxvwie97JLxLEl
V+USLWjMfVJr9XFuYDYHRn7tPgbVP3h0+ZgX24y1S7LHebRjDsESE20CwgOcBNyDO9wxNlTxTtt9
hyXqru9IGCOCg9fWtNnDGtqQLi4Cb9qQ5A6Ra+Caj+sxOgRK7HzU2ZkkewfGvqnMBjVcU8L/MvIe
9VxGKc2GmrV4iCJ0Q61NJmBx970+A4TZrCtXJChD6W11uV+VW/0ohRoXq6zXxVl5/k6QghIqqueY
prJN5p1ZY7oNphQ/W45iCE7rYptbMGXbrAlhZxfagdOnKBkVLWo07ia+n1Tai+kE3StTlbKtxd67
J3nju+wrBuNwIgEcyhSIDcLizI23g1/b4QF/f27K7hFbkRbS/GLXWB9z/OJAe8E81OaNjpz3sV7L
XW9zegSxBJKue1Gv4V7p3B5e5fNV01bDUUIIRVf05y8z0e9HO6MfdkqPeJeYHZ2Q5h1OYVT+4jg2
ld13x6mC3bfRavyTjncLDOs+cm/CghekjeHXC+u8eIH5b+pWJfHK+YdiphNlLnS2VO+X6PqjTilL
PCW288qnR51OzolE4LyY4CIHgRYyXqKPJLpaU4daJKURZGKmYCgULgKuzBGtYBW4M+lxslp6z5KD
UQxTIXlm/FpGL0To04O1XzyTHDJRAy4fTq6FH3rpiCPd0H3nlThsRyhasiSx9qI/nMSgF0n54FH5
TNjSgUe53FljEI/mQkNJ0oNueDzAUog/TSmA/bEXVZKxgXsHpj18YhBmpGpBGF7dUjqbau2lucI3
jLNbNLAiOjWZWP0IAPdA4KyOSWo1fdGOBbJGDdfItlxfVkO0kHJ8xLh4NC4mj4Obg39dkM1X4iQw
pKGX8WAW1PNjD/VRUq03jtOEH5jHFbr5QSAK2SPdleQdllIuKTQsS6IXGKaT3BQRmUs9IV4EeoVH
sPKE5Tgm+dZGgiwNJ4p7craKQG71yFDgLIAJXm92t9aYxI7511W5llezZPJzl8ecm0+CSoUgVeT5
L3nXvbRDJGgmB3BM98el6skPK7Zk1q3ouOLQKEASarcC/08/2OMuyR4ChNp0ggfXdvIVI761x+kd
MBtPqgI7rbmMSndY/5UnR48SHi53We2z6gKc6OCH/ryvFv/Acg/mA19ZmuVTQEUurq4WieFD09q4
eDz6tDk8jtraA45cwQ53TMvPSlxuUT4zrOjyCq068urL581oW53eh/wKqjIeemdBcvdRk0bXrLam
3V3p2xu0J7H/aO79klYbkV4snI+MAlnsOEFHGFV0fVZf7pt9G5Kr110iPZe4bd4hb4d+6O0iNmdq
p96DW4GyqumDm4olC2x7Mk0uC8AH/KBAtxlEE0Q51u6R+63yiCPyZKFoTNJ7W12CmFOKx+xl0yx1
+F/b6GMpj7z+6UQPuMSyx4xCFxgHrudtiqWkfdPvqcgyuB3ISVuHmY2Ls/0OikAybs6sVdp83Koh
EhuP5Yb3jBBmRtqqoHFpc0ItTbCrcE+PzNudvMPVLfhRCbs7b2hGoUNPabKV3UEuGBSdLDnXcaID
FWgB+6Y96tr9+VX4CHyPmJOFfCqGuytkfKbt6/ryakej9rIkwkAdSFiEBEL4HukLNUCwES5Gclcu
17qWxyMgqjunPoSnXH0q4RKTF0Nzl/WOdxzPpD6kikm/8bTPuLuSvQrZcE4R+9LBJwcv585QUQve
2c3Cg64jT/Bk8q87SQDh4k5xf1vY3KE31ozF7lNIEOqWOUKISIeXoP7DZsLwVgklSXuZnptjjMj1
a8CjXqPVXiy8dRqUAA67vfLjFjlfLwL9IRI75XRSjXkykDnaplP/WrojredjJI4ntUNkhraARWd7
J7Mnp4k6XWfn5ImGCvppyBlu9MTo/wNpOV6XYyk66cb/P/7hxQQToJNTCvpqV8Bdewj/zYqi+7Wz
ZuqbbCYyaD7kUgPiNauV10ggAOYMH7F1KQOMhwICKDNG/r1GZeSsWjU3Y/HZA26Cgbl4SACowcev
EdAMikZkBbVCEZMBqFMB9Wvx4tRG0cBxWSyrQ4w4xxi1lhyHRNZi/hpxOITxyiKHhTb2wXDy4Onk
wWjgITrhlL+vtk2BCa2rBFPGc8AzmTuT9OHOftfdG4bOHepOJ0I/3OXJ08lpsk98pT6yS202PzzJ
bJfgEORM0DYscHuw/9fPvnr6/MWXz14f6v1k8vRlfrr2hYk8kQw9nd1+/KBePX399eERwXqkB0UA
BTqfDZgMLqtt28FpUh3cb4GqwFtg8oD1dECjeUCFR5G1idz948DtMaC3+EgKd0TRDEfnfVYplh8M
YujUY+ty1TTTDLmNdXNQbR1CkeSfMXsWPy8iAvpxWJE+I2Z4i+w7oseO0Qnf9HZrtgiMgEbuIxrj
KBDEjIUoWh84yG0NkSaLZydUd+wOz2kX5tYJeQC2gbMFauXgnQtdJpQ0hRYMrznLPSCrWBgDc0NU
6yRKdGSyJlnTm0iWtR5HjPA/wuaYq2yd7TG+E0G1FO6HD1uIFQ6gFUqjOXxC/DTKQMpTxudkBe9C
rpjY54MHgwLiuJVZYuwC8dBJa91IErMJvIMHOJ3jEPaErSHIOphECWJld1WufQkeGeXwmA3WlxkU
CKafxYId5cExPcDK9OPVEft7gDCUK8LHblYgwxRWE8mcKS7vQ4hYs6JmKmhUjJcbmAKZcORYZTnC
uGOOh+g4Bzt1l7WxPEs9VNs4MXN1CniJzVu7jsgvpLSMDCGzOckeS3rQfB8wRo8XOjbdMVbg5zhe
AYOAR+qt4kfJYaekjI8f8x32w8+MpCQWyqkDwhINkknUUKjnuBMfoE8zbFsC687uwi/zqhbulpEH
/cPFRejLxMWevdcVd4LFWkC4o/IQw9S6twuEy+dcQrzt8Mn7MApvzMW52QhwyRnalkcZROj7W+GC
UmdwJy8SeC2xfSQQzXDtaFMTcGZQNQ8Ray2/tofcg5cM740p+Iw8iTj/YBUC0QaIqrZGjE5rOhZI
WG/lBwt0Ml4sBui97P0aRCnTMoN3W+wT9/HOOmqWhs5dBlew87XNoZkPbfWxqjv6CecPuAJrekO4
3/KRjPEEDhMaJ3FRNPCRhzWyh2hO4J9XunZmMWFL2y7NrvPoPr9qEJhzCILxpjpH38VRQhiXgkoQ
oWxg2DIL6X7ZWeIVlCVL9wR7XW6GsDXjoPKh7aFisDnF0PwNOVURc+w+e6nt9puPoNpDuYwgyeWE
k2AcDbhvN49zZrhkHGq3js8lciizhxfKMu50wjoQyhG9mGXSf/TCGeLXd56gfnL1chL+nDP2kPdt
dpvMUHbVB/KqnXqZcLRLoA4HdyhqcHKMPrKdmEUzuorE4SSCk0L4Zf1tm0lHQ1bsRVvpUG/GTMbM
PaOOVDE4J69KMstPXNGKi37tAKQ50wCEWsp4vQUMMlDm1p+KBdsncMkY9ILcgaoX5VnzvvpY4mYD
SkjfufyvHblfB28Q40W3yhzFT/MKhqydlyB2ECQW68wXaxTjVbO+NB8gBg+cUpfTIDVZKmXrj+VM
bnN42HdnJ0FP6TwtqayjQ8ypbPOfdnN3zdtt5uwjOLtLIgKVFF8aY3BLCDBr7lIMU0mnkqIAFpS9
2t1SY8sxGDL2wemvhmrO3O+IOlWLRzUsK/55B0U7mRmTPkGZYX3kYfLa/ws8S4eWTVYmXrlEpbFk
Q8tuLf8eGZfyW57YdiObR1QxvdnWAMZP7eeOF3/77EN9+EjZiQKAFU/PXGO7fTt/ZCd67BRTy2zX
Ss3LbQwrPdA1dqpWnuN+9IKrIfT7fY5w5IEUkdL3CsYBJi8/hV29Pm+2hoQo8zzGDvEvhvJofJ4J
EUbXTl2zzy+K22bvUukynCy5PCEXRM8ufo8BCZ0ZJjyLqJNMv1c1GkMQPgwCEKMwmqmecphi2vG/
5OrSFQ2r+2SMqnw7o2WFN+Rv1yiwDy23dss86n33/Tf/52Jzu6va3ZSu83L13Z/eVF/94hdmGIV8
xbG5FFJBtqg9BK2bavCdaRUA0szivZHnIvR6LNpmvz3HOxi9BhGJRgwBW8DZVg1fgVvhtAezZ/ZE
43Kf5C+zBPLnrr6uer0e8j4sDZnUSA4cslsa0ZLkqqYvIXCGAlX68RT746KvP2Cqgnn/slpX23LV
H7n2pqq3wQSS8w4mE0MvZ0ZdslaaeR9jEPqR4Ax5deZ9Lt53qXnNecTsun1DwVvMM09l6t3ttM8x
Ronuv6Puv9vX1e7YzrFwqutldZeuvUys0nG7a/7/2r61x3ErS2wRIB+iPBZY5Adw2GiIbJfYD6+R
iTDlpNfuTho7thvt7oyBckGmRKpKW5KoFqWq0nqcZHeTIP8tH/OHcl73fUmp2rFhu0Tyvu+55573
2fK0sCdeUDICsvpj+TIgrhLgAcbKn01bPBjKMcyuybjFcP5Wq3J7SFCAgvYDmrGmCGRkiZTN8xLw
aAVb6c08zV7lNcs8sjanvJZYLLvP51Ij+yGXqEPZXa5vxO59X/LC4yiXzQyjLnWtxmS33ZslMRWs
FXkNj3V8BbgsWUpsy1mNLsow0WrRoqyO5q6cv3oGO+KNOHXDuvcKWoxsVWYoAdYIjbZ5z2B206HV
ZLs7LOsQVOPj202luB7gsNzvmjAJtgg8zi/kezJECoMSvV7TSiTDNb8GXoaeyAx4eBm0xFPUi8/U
EHqk1kmGbT/Fhp9Sq0+xsafc0tN107sjiEqpUYYjfBzJc+9SGGiKg449aM43MNvvyIDBgR+jAcPo
evt+6Jk1y2Zr7xm9OHXPpPDpO3aoW7M/VLhrV6hpc18BLb2ByWbQACz/U6xK87JvCgz6v7jabzn8
/0KJ4flBFIiFoOdkdO59INQ5YD6JCFXk395L/+/klbR8lhgSP7c72Sz3V4v1qlzDbbwtWNQPtVSL
MGk1I/VKRKWSV8EMp6qn+6tEUjeY17TT/GrshENcHYjE3Ga7UlJMtlHFQJqkRpAIpJcU9aNK8uCY
bCU36Sy9sPq+hHY+YzmgFUkKx0mFim3T7DBhlBASQO6lvEljLgYwowYsewgXOPcKE3X3T79GOwUh
peBJqZPc9eHCA2OZ6amJNoeC0kEu1kKFZenXr96+e/XVy/evvh4LhktQsV9jiJd5ohCsCKMK72gQ
IxEj4aK9K1EVrMzOhL2mQmLXnZ6l8cx+Ow74Lb90AuGIOaouDKsl12E67rCD1Cv7GRZO4xafdoP3
fKme1uB9am2gMruP7Zcxv4furFeONJSDnaOnbqyAOkRYRqTrZjQxjw97pFhr4GgR1FcHwfDrCf5m
rkcOtuyYHnvB5IbpdVmLPCgteEloXaWw7FGsdBuW5vWPFX6d2sAnxckk5HfwFXWA3q6ZqvM0nH0B
uG3WoFaGy5157wvg51kMzcmmhXdnTDk2fPhksV7sJhNhCGdwuVp8+BoeF5WQIvMWKSK6Zzy+VUJ8
VHzeq9r9oNjAc9Wu+5n7QJMy+uF+NH1itm79MFCz8i+AnonxxRCTLKgIJVBAHwD3o6bKlcwwemm5
dZCGvAYsZQSNppUv7QCZuvS83ZS761NLI/HDDCFQ5WEdvwq6WRmjZWzRaxJOjJfQTr3f7qoFTgKQ
Mw6wIMI4yx8kdOuUue3u1OD5vuF+Fk2htvZPfG+y/Mj3RnP3gYkSzFuHhExMhrK7K67LFriZG+JO
MXPKyW0CVXRCk0QVeqC1327r9U5vr2OMRSVcFBy7a93ydofhKIysB15hG3IEoHkL6iW3xc9DuTOG
42R4T2Q6Izx8boe/UMwLLCsN+OJqhfX9eZhBMJHCs5+wAlglPqITj5jLJobcYyC7gRQLQKGE26J6
6koej9OczGFseOQ2fnce2YHIJvo7xD9cOHZGpU5FMUWLQqCI4VvGRfI4jBCR1vGNpY/Sw2dIA+aD
jkK4VP7S1msMhCeeHbK0xlcNKL7zFAi6J09u7jyyU0O8M31YtJhTzNGZ+IvIjfTNWHxR7PHQeMd9
laiENZ3jAxm9sKSfvFbr+o6m4Su95qfBy0OWwXXC5DnoO2lHUQOePOGD699JZtJhUR8I9HSAIFjg
dR1pVIyULCJ7ggzQ/S6LhOCiKGDndmFd8kwMXEhqgYbfPrx6qxyBZnrvDdRMaVvb69Q5HxkiWvnQ
BLpWL/1xi/xQZ3e8gm2tNEvw6ywhzbKQPl0bdHye3KhqLtq70hNz31IO6pyn53JsY3ARNgzlTKMW
OAAb2N0MlTMlLNUW09QUJwFuYVtrUN/P0MTFNYZIVNQ4H2BUaYWuf1yneeRI2azsm2/fv3r37cs/
vnr37rt3XyZq74K753kw3GVzJRJEh6A1lGNA3FpjUbJHC8m7eZVTVSJ1syrLa6jnUNkZdn7uj+Dc
GszRmHdKUeRT5DL2UJEoTnN3SlPnLxBLQCZK9FFX6r6g9zFUeFy+YUkv3v7xw396821imkeNSIrq
EO7gLPdiLP7www+s6alvUUm5Kg9AtOAcVHRuWP795imw+9uquVuzAsJtguZPvm57IIW3uz3HuE9m
5QagBG1h91sloHI8Bf3qV+V2unTLc6w1FDt4QNoLwMYAy1r5qlbZIGTJofiq9c9kAHNDU29IUKeC
OnH1oJvtfq1OAhEpHqmlQM8NDEpojJMhiJ/ONYe1WZDPoAl4Y1V63u5YPYABoFAqu1fB9NsEBrG2
fYoVMPl8S/TGYdykBkoLqkb/RA+/a/1dWogxfprmA09i4nBcHsPSWtyoR2N29eoSt9gC99qzOUwq
y+44cgqRf8AYLGmZWOV6VDHq8Ioj0o9z+GOJAcudkRjcNduKummPACHVQtgT9OI0KXcJ9jvZlusI
W4VEh0gzJMU2de2ngNlsm2k5hZMr6iD36EcCcwTwZfjfPwCHGw1AAiwRGdZu8e4c4i4PcxpVL1D0
7jc0VejzRdM8JfCDR6P4FcNKrk8TLuAZex1HJJL8VfOI+HjCkFguFYjH3NB10uTPQwDhej0c42b/
EpdMYmO+QKyjMYy6fKypQBLX0dahRvfIWHPdGMbewyf4EMc0PUDUCzAxtMR7ZAhCv/ojD0L6yveA
WCeNas3ptPnEKOuLxy0qGx4nagGLK7h978pDsajyYzB/bAX8ziKU4MmMn1ZCkQgsZvBoBSgy8hWN
GeJqqi+D+FBm07NUdQZkTFEUCVCN02ZZnSNc5l0j678XugXN3cibDXpSH4FjwwP/jEUF3j1NKwVG
b+NEqaBn0T0xCvdG+F+Io4mL2u7PZE2KN1Azv+wTZH52Tv51Qgz594HeR++q6UPlOCyFCYavhhFK
Q25aGUTm8K3OF2UQCPwTa4uP0tcKrv7g315y4Q2seM8ctZdCe5rNIa8l+sjUonVN81Z1VFFCP68S
iXZh+FFCLTXbkPbcW15pPA2py8J/xjx8uL05ScNocx3tCc8+0g2qtZKnyeNKiiBm8pxxcOFjEO7V
V5ANDcjPU6ErvmZuF8AEx+BKSAG6HixUcSS0icV5MHLsRnoTiud9oBX1EWAMti1U9R/lTOKUgQ88
YIYYv6MWowWjF7dhPORVIHuwiyJTh85m8KfA/2V5FCsbjcq4jxi01gSFJCRFSdmQifsULiWNrjGc
QNGopIWvikclhpi0kjfZxfhz+7AID7xZlju0f0WXq9EoeXvYXUOfwgZjE6rAmerMma66i6EkIKDN
YXOY2H36F/Ox8QYNeINWA0ewvMDCo8ct/HtJo5XGu1r6/NKdPIE2TJka4R+47iMlAjgUE93ARCN5
613eQ9F/mTxLSDcfIE1ti/GjPSvlQRdWwGUF4K2ni/WQg+RFTDHUXJDkaHk7gPOuZ/sdmn/lg17e
3z7wJ/BsDNSu/TeXPrdqniVK63Du6CC87uAI43rWriODFs/Nl+hXsCZM0eafgk3cQdvqVft6Wy9E
85c6mFsWQT57iFPX8ZQ+tpLFbSBc6otUaqHk6Uy1KWKozG1Yf7ZoDJZTyZGKmg0pGVq1YCGGkwBK
1/YulzDzBm4KNqFkbyQ77ahuOYZiFTQrxnxV5A0ao+fxvR37S6x7Rmnem7wJ/1z8u0iG46Ui7TgP
k3+nabFjyhNoldQPMS/jpGUeSG+pZt9dBbfoor3uvEc69bVYH534O43oNX2h7kFquvCISEdwZSgl
0ZbmPRw9XTu/w8mb2SRYD8WRaR4NzIsSH6VGPak3ERMVu0bZrGVqAHlXBNXnseimz37NZfuIAvSf
kSRQYgfXQMHVGMbQQb2e6vFRVPBPtzQxb4+V5DhbfPZcN58HHiTBVsYkq48ofSRZjzYUUxf5EcnB
tNiijTrKnhXkbJ0gajWlmU1mJWWEwzQUtzW18kbYFWjppkwsbPsomdazEmXGgGrrbSK3G0dBvWrI
O6KhuHXbRSX2/Su0EwWu+VPZg2PFR89DANIJjH/+JZ5YFJYJ95OWqxdvYInCEZgOx2MKgOrLTN2+
L7D+JeFZfCSIZ0/zZ3k0WzsOiwvoJMUtOS1l0gJDQX5MeMHQhm4bj4nCz6xG81MkK6euj7Jtslao
v6CsogqolY7HWT4eIypBEfQJs/I1QzECeYfW1M6FhHkTKXe9dVsfmeO6rqu6mphTo6aJcUZm1yUg
/RwzVmEsHQqNhZcuzc9pRifhnd1EBZIy2POwv4sxp7WA7/nluCdHfbBgWKXYNL6BJhkxNkucb9iZ
6Wt8Oe5oUm5BqBWR5hHxsyz4yMBCZ3nESvORkH7xpK96lzIzmmQExyx5AnRqkg6OArxQ4twQAvwy
j7F3SKPebXE62w4Wz7mbyZWL/czMlMQOEUZ7WNTLyn+Ph11JXbrsA2zFzWJudYNblD0DBHOWvDhL
/jpGxorTimTuiRlqqBLqYu4ro/SrUYOPkK5XN/JE6me+WTlR8F2TA+h4EaNdhPC+qQ/TptxWpJrf
7jc7b1BVvZQKQcnJql41g+gMLfVkHi9BVEkW8gOql4kZkLYTIO7WZ/3jwwJAkQoFG8BtMww8Q+HS
4hJT+KpdCkJhruajaSzDjl6H+QNX2s7A2FHIG4sYQGi9w5FdYSmCVMKLYDvblu11ETWetcQcSG86
zCesQfq30tcb1VdKSX/aq1PIB+0oM46lpKPhHSU+49dnOLtoQ2atHV2Nq9FmAS5bCa2bs6RqVqVj
yUDuFzfAP2aBxYevE5L5z+4q2N2lpCegStF7FZaNeyNiEhvCwA/McgXD8PrDPxfjESJyKRphu5Df
o49KD30RoQGWGBP8nP6YQJwFWnICEZZT6AjyRlLpLOEb8Zrltly13TLKC825xZgGLyShXJQiTcfA
hUAXU4gKUvVXYmFxjQkukvRpOlqjg/hy8fd1RfT+IDTeDCJSoQG5vY0ukZSSxp3UyEjOt7qH0gl4
otmciHYfLR+5b9P0jz9iw09jjDMOiaREfxAp0cmGoeGh4OmlFwrOL9NI5CQ2DXXDScjlhZyadTZQ
Px9FhqzgU734yC9+lNgQQStEY9DABXvnR+UcMWzK5j//wZ0QoYPOqUQilXHQDJ5bgcYlLmbJg/hj
sA6L6X4XCz4WzaPa0UV+Mf7C425O6MFeDNnKRxJBgO/ZRS2hXm06yyrnOGMphpfEMpZdZJhD7z4i
XGCm5+Iy7zWUuMfLc1NNUSCwjgkitHDoPhTxWFaOPiXVYXorJuvpnX/k4tZ5OA9jlhdV1quvXRvR
JQI1ygNl3ydzSEO24Q6Xt7sjj6z902PKOC3E+B25zhyzBbwrbCPGu8LEFAiWWBO0XdbNrkkfO/aS
F9K6SccRT0O95hbgaclUbMml0Kev+OuXb/744d2r7yNLLYKyzi76Z4mcF25Ch4ecYyDiYCPH5Om4
0Dw0YunKpS6RClWPPkI/pV9at4lP/4UACKwXNKeZEmo6gB5hmH5T2GEF9W8DOmQ03AM4BnlcyDgi
/PynbErUNggd/Lqkt5ps6oELnk5i687Zs7fbTIp8Csm7FO3n0vHx1ssd29qhMPL05pVh3ok9aBPe
nk5OhukT4Nn/GiMnjsrP+SaeKfEgOWEI9inEbr/t8A+hCYzSM1U/gHZp7mKEQjJc0B9jS6k6Pdfl
x6Pnlz0uKVIscrKZgQ+0/kRnTKr9VvlZWnr4ZNShrjcEO7CzeMazlC8Ebbwp5iZGrhDKp5S7cnIv
lcxFLqS5uxPQlUfGYOdZSL9gQfEyxhLhquJbRbbAb9PNpmSU5ZFP0nPYGHc2puRNjqG6wn6Uq0Wn
roWyZ8niai3m1KtYCPOQwKFapKeOIZ3bmKmJnotWzT2uFMGBPCdUys9wOHnuu9MYpR01ENg3PCbx
2+PiBbmuNMDwcqtkteKDkxU3yrLQRLuL4ThxDTQxHZsQFe4uo2pfULX7wdPae9ak1Dyg4Ghn4X0c
WrZGG+izUAhsjmMXlDjb+DaPYXOAEkbPx8ccDeIuRaFQL7zPbc+CvnWN6K5v4n7QAE6BpTEekB6e
IR3dMFTeeCYp8R5wpvX9ZhukxO7tYpU83mIfqwAHL8dH7yAiKuDwkIG7jc6mB2k2i5OSjpXdhb3a
l3D0Uksz7tqFiiHhFuOEXesEJ9mt7baOxz70t74VR2vbSGcQ8LWK/XzcFvQvG/zgRG4HFhd7NFq1
xRffyrC1cYkU05zn0oE819YzW5IVBJuy52E0joNtthLLXEb6heTQc7a5xP1g8PHPH/5SxV8T/6CP
v7z/h7+m8GuDTb0dsYSCPNWfStQ6Lsgh/2bXJTDkq2JAgdNIwjWZzPfY0GSi4zwudtd029aU/ioS
R61pByIdW23Yd57fv6/xLxzd1wvMFhuEYpMQbVyZ/ORNzfvdm+8G+/VCAjF4fniDwQYTtMIYqsUM
yYmfn6GD964iI6jn/BsmDQ8v+AGmPvzl1wV7OxLDTe8RRjri3bAiqUksI1uOS1GM0nmV2jKtpoXT
VAGlm9POA917sCNl6dBJHK8xPbPiHc0rcng/qKhHVrQjDnOkIcKGAWxmrKKOz6s/Q/0/r5vihFm2
/vQmlPMmJdoSIzYBE6WDN/GCpP6QKN4VRpbCs6HX7ZwHMBh06vEcL8myovAYi3JJYZkIt2VAuiwP
E2VspjLIW4YblG7aLlXcrIHWmVD+JqRxKQHvwJhRicmUX8v5yPGZys2KXIe+4ul8w9+ydauOaR42
awI4cXW0t+HCUiDVkTxXaMNhnNzcYuzq1sAiLQ8oZ60pLy3Z8Cbt9X6nvY/c3jEosPosIyio9kQD
SxsZwLK5ukI4qpqaY89zbEodmLJVIRtrCZZJFzqgA0CXE6k8KXema/dWT6UIKXrwMmDPPJ9LMR8u
dI3LgobyyozEDmfRM/me0ekFoOXExCrbxRWuuQK7BIFRJdRiYwBz1rJF2+7rf/95bgFJQXFdzBIL
jxBTN8M7clVH5onrtvsWaQLZ/0wH91JKadFAxvMTmRgm4uChw4tZX6E7FagA402pQDUuXHeGqWHk
EmhMJfLvuXz3JOYymY4WKKgsV8dQQlUaFaB/gylsZZA4r3OmfWEO8ku+nb/++iv7QGrRgNUH4t9f
18n3h/ZoLxF5U7wTjtNHvcjPxXoi/hh9Kgs6lobyydL9mtBd5C4Q8pKX3uyOD6keDY4873bnCI14
D1NTKYgEYjq3xZQWCCho6awo4fT1s+vA4uIvf8jwRfc7QUJiMiFDGnvEPquKog6o1pnyCz5iGyiv
gT2a7JpJAzjCM2rAQnBjbjqHvQfazFqF4AiZ6XPheEMefhBzQg0uXqNVjZf5bbnjHDl4DWa5t1a2
Gbq7sw9dJ14fxmUwibLiF7GF4llYU8QpwH9RVS43Y9YgnJOECDjgCzTxcgFCihHJqUpEoEOKdUxb
NdIHJVKmmLB/STR/mT0c82AmF9uyEMZNGx2g/v9vMrNlQ7b6DzLCQmpCGadNHI9BbbGWd3FcuoR2
wEBeI2YG456q3AfGqHWXd+2KAVHk0nU93XuNwswdHQ0UqAWxiuHPUkH/lTBysE14VQctwhg/pUWo
ltIE8/wYz/mwbVXhAUiUaNkyh05a8Y1xtyKy9hNsjZwlz5R6IP+0IWK8wk8YIX0MTyA10TGNR92o
ljKnwZ4QN9wJZcfWgUIvfuIyKHHvb75ZWtnS4Y1nchX+Csu84PZ/SGcdkYIe0k94A/MamMU9oySq
rpXpabiGbiXkWsRsT0VdwvZgfQ2WOLWGwQKDAc0ZJwSk77yiJ/J4LdeI/9EVgL+RnRgi/0oxeS2a
A6NCJbV49/niHkdPLDtXzLb1xz18kpljloZ1ScFqDD3YSCArCixteJWnhjGh7vHuaBU/xThNgt2U
t+WCEhEmt4sy+ekn7tohNX76SZHgeGq4GTbtEmIC62VqU7A4ifUKNWrFb6W0CCmr0mhmhT7XloSR
SHBVgJ4YuiLrnUuIVC5LkQg8asBIF17z+mY+m6FCGnbuxLx68EaQdE+lnEIreU79+uLhezGvfrOt
QI7t2F48YBc8TTgK6IbVfmPrwWV9UUuYMTAkaqvQ9r5NmrYgmd4nbKvHoppddXlxKd4dMpaLUZ0g
Wqz1jSlN/Wgx58YPuoMdQfrpKFOs2Y/j4cLkH8VGB0ON84R1H0fI9OkD2cD6wcyNVugbFudX84T6
rHij7zQidNYkymGdYOHnNKZ4K0IgbTmvKY3YBCAb8UKG8hoKDyey7Um9njUoDTtPP7x//fvUYBlz
qBsMjoatMGpppugJSmLNIeV2h0POSUhROI1Jxu2wXa+/Hi3r23pJvqpN2y6mPkZQI7BYV+Rb1Wtn
Y52FnKN8al7goNaNLJksl5YoupLKaaqU9HZPuCBizIsoUiERZxC5HzrqDgO01StEuKxw5xVCE9xy
RVGZYHH+DvOF4mW82MV2bD4ICfl1fUfzYmyUzd1cotYMcNSePhnVMGgfPLVcqripeYWbmFHjCgKe
ke3yukmm+/kc3e6vbOT1CmdeV68FaPQmoQ+xBzpG5mhXYjCRddPBQ1L0zJrtUkntiOBwEN8nVG9u
Dp9TAEWRw37xxRd5F7LkUZuR+QiPv0OX/MMPo6lhTv2MR0yFWXTytfDtLBH1lwcf8AkXfvp33JPI
yVRXnH4oFsaUx6qEvdC3NyryYLaClKIi03Y8KHcl9DuU1LC6RMSFI8OytvHSRI6EXmDPxFfgQh0c
3t5CVyPkpOumPJE0l2Y0hLh3jw0i6OF6LoQ1XEBBHFtv+7vuLVesG6Y7gzKRe4A7VON65mZt8KUA
VIOHq2o8zwd9XD7vPk1N1XhhcyDeZdhhCdk59MIXT6kanUPvrNE5dF3DcGHhbekNHG8RKJVIlKyk
XQPldt2QT/RTo+fmPMjz5b69RnSKLQkebQsnV5rPekUvzL4dY9BvtleuIqVvy0wVJ5uHkWTSiAJu
0tvNuEj3hG3i1h62UbE6COA6slgPIAV1DakE3+UzxXSikIuePPw4DHdPlVt40ExjVXy9hj3qaBhg
nHbQkOvVaLdhn1qHEIzAPpawuDRcf1b2mncC5w6YR1wJqZpv1Rvqi+7Q22mJx+JAnWM4KZ8C5xF0
ZCLo3p6qWT9sczoqdMOeVDiBnLaoU41SlBkKZwFq5l0WNR5KkWYyc+KgwSy3F8TJ2M72HmE6LOv4
uQ1g55EGdJaUbxu5EXh66pajIOOAcMkfjz0EGAI5euqeBOHLcjWtyuQJ8c1yXXKrmiE15Ly8SN58
B0j2KQkwy+RqcQu0fdMKjY6ktJEc8Ep1XcAYHaPeITm5Y4uiaJITVQqNfOVnN1fk1Ji0SFZrQtj5
FrBH330f4YuMe5xeq7GbCsPo1XiNo4WicVHNvAITTE/RKqtzpoVzePzkJdltJWQSFbrSqzLnxIBl
uA717Xq/XCL5mHZYqLeH1jD6RtqUhSvX7SOC01NGYl0pbhVv4ZiPZXFXEJrkfNwpSzAT7eRXz9O7
6Wdp1BhZ7wR5mBqDs56AHMcWSYP0Q7xqwpb10Y5dp2bS8quzyIRPDz8Ybtc6l2S7OenI7p3+QSMD
dAFrltW8On/cfpnqlLFm2uEB9KnUGCr+no6YJY1cm03Zt2y1sAKqDl1v1TQcJBwgAuRY0Zoxiw3I
O/qZKxo5U7jg6GWJrVUeyqMTC1fIosLwOCs0BzQdMiZ6kXmb4y2brwbSEBFS0HRVeAs6V8StDxbQ
VF3f2LwJu/AyGezetFvf+go4z1PEKzZBDDWQYLH4XWgV2G4Uo3b4JjsmoBN0c7nfcS1oLcr08sR2
WxSu7hzGKz5l1z3ZB/Xh0FK7N2GylZAmo0xAaBapCLEzTdkDF0L2eOo8ExK39ra+QXBuUIqxIDeW
v6+3jQfX3lUWEV86RSKw5nzvhDQozVLTjqPiQ6JHm9nw5poHWBxNVKpsQ3fAOrjnpXciHh9xrK+A
Xj/1aLrSFOTqmJghYYgLKFRC8aMECZ5ipfCJ9kiWG69dS0aDf5RwKN3v5r9Pc0lOgfJCI26bm/4Z
RO05G0mVv74i3RFy0NxxnbqHHkJOYm7Zl+vFvPKDJMPFYhsfYSxR1wmNc++ZuH+DU+kNCS/Iln5E
K417iKWvm/XuHWDE10Dhvllv9r5dR/wGN/XZrL3jdFjXdf+92NrroGd+5jT2qTeBwvuw1sTt2D5r
UVwauzo0Du3FmL3TwC3PByG73PhWBg9HLUd7PYowHrT8/dhAd+tITsOvJMHKzKkLIFEzY5pqBuDZ
TxMqXiTJm6omi2VyeCVIxwNRzjBjLqbbVnoVS3xw3eyXmC4lwf1EZe4cxk8Et5auQ5OMdJTGl9po
m2ReoqvpeodIjiwDZuiFBuN4j+lxTNMq5hEqh3EQzf7qmm9CFXYQ49Hsd80Ks4JzihaAwjZZEO88
5TwcmHsafdtRccTG7fD3iiKMYcgJTKBj9EK+YsgVBRPBw1v1xIvyyGTem++ExsOS2ALb69CKssW3
yrbT6lXRil/2uCY+u6ycd639coKWIlvi1OmdHp1Q6D41HtCf0Odiy45d36u93rT1vmqYX8TQNetG
NZfbyV0WKB06dBD8bEDfr05F79fBx//64d9ihGTJyVzcLJZL/P3xv73/3//sL/4icAVCrTp6FA1Q
tmFFlSbEfLdYf/4iRfWQ1hCgEp6cNNDXZZhzSsudpLR0aH7pYLY7bISqE+L+DX3weHtGVjjabBME
V8KUoTKf2arC+MlIZLU3WDx5+jp5+vbN1xyDESufiUGdeyn0dvD23Xdfvfr++8n7V+++efPty/ev
AADcuJEA1xUHFab5FLA01XJZ3NRwQpefvyi+A5z3lseYdTLFQTdnidiSb/ygUB3dSDrTXa364nGd
JaPnJ9X/CgHnP1MdqZoPzDp1rlHDgESrmzz/Qvz0vIJIYNGOyF6h7m9RGYrKannw8b9/+JfKg25T
TT/+w/v/+6/Iey4hAzCy0kO6bLpnNxfCfm+//hvClyoY+tf0uRaircOLrpy2zRLVWPysXeCqqe1Q
53nGmWPyWzquDUcjGMYwkDu6vl1w+dfanQsQM1RJtaq/w6RDfLyIh8cFsxdVFq+SxUPOnjW55Pll
zVa7YmU6Rzvhop+HaHdHId2GYykMW5PlhX7/i9OQia/nBPg2cbw10aPmZxuJxIJna5ett9X0zfq2
uYExYjz2arqgp6HgVaZfM3hvxnZmhqyCcEuzuYXuHYeoE+ujVE/IJKcG8dBsqKReDYz1UrSdbhc3
/jCZLetyDeAkpsUwYC191s2aY/mWriA8QOy5BnOst62iEeCqW9KI9VENxuPe1XoaMWacd4D6Miug
TzJA7rbZbFh1ckDBtXGpcHiv+Za5CryUKHgKPgOMzSfTcnZj2xCxy58j3lWCem8efZb03EqkFkIn
v8hCx0A/GAe1Ewm/EfUbU6pttxlKZR1NXu2XiyWPVSFDvkTjF3cPsjffjQyVaWjLPI1pBt01sMOS
1ohMMJaX1TVCz1t8Y0A8ox0zlLM+qmPf5lc7LU4UnrKCM56RYWIkiZHeMzLni6KJU3auY9dO3TFr
NcxolQOgHvVDEqLG7J3t1AvoKX9CNlTfj9BPh/q4hUrJ4zDOlFVTMT8auKaw3BNgQnYrjD+74o3G
85ipMRuIaNDDhIplu2kuF2mwXPZKyRRYYnKHeGmEjAiiKHUS3knk2yEgL3nFCd+lpmJeKBko2nIR
kitvm0UF3NHKEsJQZE7kbOBWRstb5WAq8uJZs9osa84ACqgW4z6VLfpyDPQZPQnu/JC9aY4xdgbh
AXYOrl5XzcA4EYZUbCGnDq0szgxOvVQ7db/8vVJd6oB9tmpZixk2ak97O1B7+hLovxldgh/W9f2G
eCRt0acwM2zdfL+kTbDGVEgTHwQY9sBebpcHnGttWlgDQVBKNiaiBaU/RQhGuh2E9l8qeDDRI2ex
SiFz5tTBs895bV5cRtgQr8pEfmBhWU24yqsJpkWarAEjXi+qql5P+PaTyOE8bPTjLe8xfrUXxntg
AqIv2D4Ev10sLjG065yC4C4l5OHEbBh0BFRzKhyJbeWRjBQzJINfKKrYBhupoXF9JleCF0IXXbNo
PEpECRS9j7rw8xlNUNooorWCdFndmk1s6/jCxkOC8mD4nEAzb/W9t2EPFP2kiWwg7TnhN4zx4z9+
+DeK0dmtNtVi+/Gf3v+ff8G8TrvfEGgSvG+b2wVhpp0WJLEsoaFonkirITSjzTp7lbFURaB7W3fz
MMwZKe+bZn1TH0juq06G9Urf1ijNgu6ZVez2NA8y9ljRdjQZ6wkOhSQWzEmPDI28PrZMhLNO4oKo
/C071vBg+qLnHuGZVXAl1ignrXK1feFaHjgzCDKULuEEiIUtJ7iPpGzpHYxvscRnDGOF8jwSMO3b
PUr1nBaMTEzdWkOe0VBvGobB2GM0F/axcIUcBACYCESF+dqvFx/39UhFDxkhZc2hO81snCZYt5dc
7cttCcDIuqxpzc0V9mIZvzigNJcNMBTlZoFx0+AyeV48x/uEJkHjD4efBlJnFZxxWra8X7mkDM3s
LaM8UwMTTtvaWazI8tz1fjXFpN9MaZk9Vk1boSBNb372GdWIJ0hC02GpU6xuYDiZ6rfPCXPDFDnF
dCb0WZDrrOpjgu1wUtZzPY1IsLh6cw7YWrJGnatxUDbtmwl6b6H9q6u4NScmS3m5YGs2wepvBnZU
X70q3SZWWCR2OI7o8B2vBNXNQ9wa/D1043IFLcqGqvchYrfaczYoUx+i5iUaBGbXNdwpHYE1dSlg
G5rbmCGMB0qn6KS6BtwNUUPJuhXLITqxmtvZsI0sPQUNj1mpMDit6ztdPg3uU4U/bRG4ztURGIRx
i1zCkyJ1CH9oyNbFo6hgMT8yV4+qFpV+SBL7C5gxEIXARaCvHQ5CUgqu0AmbJdcqINFwwkiNpZ+Y
33R3RoJ89JexwnlYVXkmUNVcR1itMI9BC1HXP+449P3TTjmdtxUb1VvOeXgF0e2AGL4u4Y1DHCTm
qmBEBOwicTsUOb/dT60OOHWUhxF41w1WSN6TnEgJDdhbCPVYyU8OGP80YWLIDNr1CxJVseOJp/XG
+iOw3dMsvfjxT5d4ESFRajD0Ny9/+C8v/wjFPn+myHekf6lA8qV8DvTclJNuzB+ZLL+3hqHAyoGM
Qq6n2KVkU8P3g4//48Nfkc4Fl2EGm1Hvd4vlx//5/p//a44IR7o3DpRE8VdLYDeSFXpN7K5RzzRC
G7mEaiJNtqSAbyVTeYPBy+Uy+Qq/cRhQPpGA5pstWiZUHNeTfhoRJiwJIG8OSjpgB092Gy4xqbnR
/hGwkGqSOaDdoqTwwAgRPB7WY5o4dZRlVQhH+g0gBYPhaK5MO/5N2S5mNGLXGyQWqai8x5ECgXv+
/MXvfbxivjKzIw9uoc12v8ZkceRissusOiOrztPf+9pdFcDuF1utBpx+3NMSSxf8PXcDJ/FK83Rw
vcdR0ogauIDvl3buub1fG62q4EXQPU0Sdg6g9a5eXF2H2Y50++T+BG3Y0UqWQTcuvurxajQmAGYG
3oX/t/UhctWjyRp3EjEsQ8WpQ7MoYLaGqEC6Z3A0LTuEkQX2Jw3SbaDrPHl7sTfdqP0K+Ht8ywIB
J9egv4sx5xosRJ5TCo5J+cXVEq7nkmlrosnlmOiAobRfLl1syn157p8w3xeyvsXzUc5mzbaSZIM0
qWErY3A3WyVnz3jmXIQPRKc6VEIiyxrq4LF81iQH32XYTYHJ+vzALuuqRoxuTXHkogcfNrnGlzFD
b0rFYCagM9ddjKnSZVccdij3zfukXez2jLs5/h5j82SF7SEzduUbq3vBoJYWcPlUheBXhM1Z0+5e
khUJY1qDdC117Esu+x6Q81MuPKLshLihsevG8Ig4dF4DJBBKccFA04pmhQegUvwxsK3VfoalBm5i
v/1qJFYu7aiZj8oRN/GEbo3RrhnRERtBGyPrnOA/SGvQKwF97AYwL0ZU3QOlw8PiRJgEpW5SdXN1
WVcB3rPtNaoDUS68B5Z6hu6pZr6vMU2hsxbJfFnfL6bA/AMrvuJQ28CaU8BOQ44Rqai2VkaDUTlK
jn/uED+P+MLURJpIChDcVjA91svIqkBBOwbACchROUXIZWwdEI4GLjVsN/R1FS0vOOxPtM519ZVA
zCuCS2gMDyzQLNifUe/ESx+7/0XMCovKuxpcfAw051zQc3ll8FTIW7eBalj12ywhK5hjd7vU+ixo
KH6Rm5GosXHUzx2tzd4c05eo7+w5nEQVMqQSkmoJ6fMLA7cIS5Q0tkHlsgtQRwkqepbw6ufPnxXP
7LnjKcjMINmQLi90g6apPCDLpE0my+ThVLLIvXNDqoIM1fESARrhfgM8SJXFfPxcIjd0C1AXvvvd
vp8fdr66TxFPh1bSOiP6XKmkXmaluoZjwc2Djg8tEyEt7NGH71OOkNuCbZSmtsCPftGHOXaavpCD
9PF/7Yv/B3sp11k=
"""
import sys
import base64
import zlib
class DictImporter(object):
def __init__(self, sources):
self.sources = sources
def find_module(self, fullname, path=None):
if fullname == "argparse" and sys.version_info >= (2,7):
# we were generated with <python2.7 (which pulls in argparse)
# but we are running now on a stdlib which has it, so use that.
return None
if fullname in self.sources:
return self
if fullname + '.__init__' in self.sources:
return self
return None
def load_module(self, fullname):
# print "load_module:", fullname
from types import ModuleType
try:
s = self.sources[fullname]
is_pkg = False
except KeyError:
s = self.sources[fullname + '.__init__']
is_pkg = True
co = compile(s, fullname, 'exec')
module = sys.modules.setdefault(fullname, ModuleType(fullname))
module.__file__ = "%s/%s" % (__file__, fullname)
module.__loader__ = self
if is_pkg:
module.__path__ = [fullname]
do_exec(co, module.__dict__) # noqa
return sys.modules[fullname]
def get_source(self, name):
res = self.sources.get(name)
if res is None:
res = self.sources.get(name + '.__init__')
return res
if __name__ == "__main__":
if sys.version_info >= (3, 0):
exec("def do_exec(co, loc): exec(co, loc)\n")
import pickle
sources = sources.encode("ascii") # ensure bytes
sources = pickle.loads(zlib.decompress(base64.decodebytes(sources)))
else:
import cPickle as pickle
exec("def do_exec(co, loc): exec co in loc\n")
sources = pickle.loads(zlib.decompress(base64.decodestring(sources)))
importer = DictImporter(sources)
sys.meta_path.insert(0, importer)
entry = "import pytest; raise SystemExit(pytest.cmdline.main())"
do_exec(entry, locals()) # noqa
| pombredanne/pymaven | testsuite.py | Python | apache-2.0 | 260,904 |
# Copyright 2020 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# --- This file has been autogenerated --- #
# --- from docs/Readout-Data-Collection.ipynb --- #
# --- Do not edit this file directly --- #
import os
import numpy as np
import sympy
import cirq
import recirq
@recirq.json_serializable_dataclass(namespace='recirq.readout_scan',
registry=recirq.Registry,
frozen=True)
class ReadoutScanTask:
"""Scan over Ry(theta) angles from -pi/2 to 3pi/2 tracing out a sinusoid
which is primarily affected by readout error.
See Also:
:py:func:`run_readout_scan`
Attributes:
dataset_id: A unique identifier for this dataset.
device_name: The device to run on, by name.
n_shots: The number of repetitions for each theta value.
qubit: The qubit to benchmark.
resolution_factor: We select the number of points in the linspace
so that the special points: (-1/2, 0, 1/2, 1, 3/2) * pi are
always included. The total number of theta evaluations
is resolution_factor * 4 + 1.
"""
dataset_id: str
device_name: str
n_shots: int
qubit: cirq.GridQubit
resolution_factor: int
@property
def fn(self):
n_shots = _abbrev_n_shots(n_shots=self.n_shots)
qubit = _abbrev_grid_qubit(self.qubit)
return (f'{self.dataset_id}/'
f'{self.device_name}/'
f'q-{qubit}/'
f'ry_scan_{self.resolution_factor}_{n_shots}')
# Define the following helper functions to make nicer `fn` keys
# for the tasks:
def _abbrev_n_shots(n_shots: int) -> str:
"""Shorter n_shots component of a filename"""
if n_shots % 1000 == 0:
return f'{n_shots // 1000}k'
return str(n_shots)
def _abbrev_grid_qubit(qubit: cirq.GridQubit) -> str:
"""Formatted grid_qubit component of a filename"""
return f'{qubit.row}_{qubit.col}'
EXPERIMENT_NAME = 'readout-scan'
DEFAULT_BASE_DIR = os.path.expanduser(f'~/cirq-results/{EXPERIMENT_NAME}')
def run_readout_scan(task: ReadoutScanTask,
base_dir=None):
"""Execute a :py:class:`ReadoutScanTask` task."""
if base_dir is None:
base_dir = DEFAULT_BASE_DIR
if recirq.exists(task, base_dir=base_dir):
print(f"{task} already exists. Skipping.")
return
# Create a simple circuit
theta = sympy.Symbol('theta')
circuit = cirq.Circuit([
cirq.ry(theta).on(task.qubit),
cirq.measure(task.qubit, key='z')
])
# Use utilities to map sampler names to Sampler objects
sampler = recirq.get_sampler_by_name(device_name=task.device_name)
# Use a sweep over theta values.
# Set up limits so we include (-1/2, 0, 1/2, 1, 3/2) * pi
# The total number of points is resolution_factor * 4 + 1
n_special_points: int = 5
resolution_factor = task.resolution_factor
theta_sweep = cirq.Linspace(theta, -np.pi / 2, 3 * np.pi / 2,
resolution_factor * (n_special_points - 1) + 1)
thetas = np.asarray([v for ((k, v),) in theta_sweep.param_tuples()])
flat_circuit, flat_sweep = cirq.flatten_with_sweep(circuit, theta_sweep)
# Run the jobs
print(f"Collecting data for {task.qubit}", flush=True)
results = sampler.run_sweep(program=flat_circuit, params=flat_sweep,
repetitions=task.n_shots)
# Save the results
recirq.save(task=task, data={
'thetas': thetas,
'all_bitstrings': [
recirq.BitArray(np.asarray(r.measurements['z']))
for r in results]
}, base_dir=base_dir)
| quantumlib/ReCirq | recirq/readout_scan/tasks.py | Python | apache-2.0 | 4,314 |
from symbol.builder import add_anchor_to_arg
from models.FPN.builder import MSRAResNet50V1FPN as Backbone
from models.FPN.builder import FPNNeck as Neck
from models.FPN.builder import FPNRoiAlign as RoiExtractor
from models.FPN.builder import FPNBbox2fcHead as BboxHead
from mxnext.complicate import normalizer_factory
from models.msrcnn.builder import MaskScoringFasterRcnn as Detector
from models.msrcnn.builder import MaskFPNRpnHead as RpnHead
from models.msrcnn.builder import MaskFasterRcnn4ConvHead as MaskHead
from models.maskrcnn.builder import BboxPostProcessor
from models.maskrcnn.process_output import process_output
from models.msrcnn.builder import MaskIoUConvHead as MaskIoUHead
def get_config(is_train):
class General:
log_frequency = 10
name = __name__.rsplit("/")[-1].rsplit(".")[-1]
batch_image = 2 if is_train else 1
fp16 = False
loader_worker = 8
class KvstoreParam:
kvstore = "nccl"
batch_image = General.batch_image
gpus = [0, 1, 2, 3, 4, 5, 6, 7]
fp16 = General.fp16
class NormalizeParam:
normalizer = normalizer_factory(type="fixbn")
class BackboneParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class NeckParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class RpnParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
batch_image = General.batch_image
nnvm_proposal = True
nnvm_rpn_target = False
class anchor_generate:
scale = (8,)
ratio = (0.5, 1.0, 2.0)
stride = (4, 8, 16, 32, 64)
image_anchor = 256
max_side = 1400
class anchor_assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
image_anchor = 256
pos_fraction = 0.5
class head:
conv_channel = 256
mean = (0, 0, 0, 0)
std = (1, 1, 1, 1)
class proposal:
pre_nms_top_n = 2000 if is_train else 1000
post_nms_top_n = 2000 if is_train else 1000
nms_thr = 0.7
min_bbox_side = 0
class subsample_proposal:
proposal_wo_gt = False
image_roi = 512
fg_fraction = 0.25
fg_thr = 0.5
bg_thr_hi = 0.5
bg_thr_lo = 0.0
class bbox_target:
num_reg_class = 81
class_agnostic = False
weight = (1.0, 1.0, 1.0, 1.0)
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class BboxParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
num_class = 1 + 80
image_roi = 512
batch_image = General.batch_image
class regress_target:
class_agnostic = False
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class MaskParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
resolution = 28
dim_reduced = 256
num_fg_roi = int(RpnParam.subsample_proposal.image_roi * RpnParam.subsample_proposal.fg_fraction)
class RoiParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
out_size = 7
stride = (4, 8, 16, 32)
roi_canonical_scale = 224
roi_canonical_level = 4
img_roi = 1000
class MaskRoiParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
out_size = 14
stride = (4, 8, 16, 32)
roi_canonical_scale = 224
roi_canonical_level = 4
img_roi = 100
class DatasetParam:
if is_train:
image_set = ("coco_train2017", )
else:
image_set = ("coco_val2017", )
class OptimizeParam:
class optimizer:
type = "sgd"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image
momentum = 0.9
wd = 0.0001
clip_gradient = None
class schedule:
mult = 1
begin_epoch = 0
end_epoch = 6 * mult
lr_iter = [60000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image),
80000 * mult * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
class warmup:
type = "gradual"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image / 3.0
iter = 500
class TestParam:
min_det_score = 0.05
max_det_per_image = 100
process_roidb = lambda x: x
process_output = lambda x, y: process_output(x, y)
class model:
prefix = "experiments/{}/checkpoint".format(General.name)
epoch = OptimizeParam.schedule.end_epoch
class nms:
type = "nms"
thr = 0.5
class coco:
annotation = "data/coco/annotations/instances_minival2014.json"
backbone = Backbone(BackboneParam)
neck = Neck(NeckParam)
rpn_head = RpnHead(RpnParam, MaskParam)
roi_extractor = RoiExtractor(RoiParam)
mask_roi_extractor = RoiExtractor(MaskRoiParam)
bbox_head = BboxHead(BboxParam)
mask_head = MaskHead(BboxParam, MaskParam, MaskRoiParam)
bbox_post_processer = BboxPostProcessor(TestParam)
maskiou_head = MaskIoUHead(TestParam, BboxParam, MaskParam)
detector = Detector()
if is_train:
train_sym = detector.get_train_symbol(backbone, neck, rpn_head, roi_extractor, mask_roi_extractor, bbox_head, mask_head, maskiou_head)
test_sym = None
else:
train_sym = None
test_sym = detector.get_test_symbol(backbone, neck, rpn_head, roi_extractor, mask_roi_extractor, bbox_head, mask_head, maskiou_head, bbox_post_processer)
class ModelParam:
train_symbol = train_sym
test_symbol = test_sym
from_scratch = False
random = True
memonger = False
memonger_until = "stage3_unit21_plus"
class pretrain:
prefix = "pretrain_model/resnet-v1-50"
epoch = 0
fixed_param = ["conv0", "stage1", "gamma", "beta"]
excluded_param = ["mask_fcn"]
def process_weight(sym, arg, aux):
for stride in RpnParam.anchor_generate.stride:
add_anchor_to_arg(
sym, arg, aux, RpnParam.anchor_generate.max_side,
stride, RpnParam.anchor_generate.scale,
RpnParam.anchor_generate.ratio)
# data processing
class NormParam:
mean = (122.7717, 115.9465, 102.9801) # RGB order
std = (1.0, 1.0, 1.0)
# data processing
class ResizeParam:
short = 800
long = 1333
class PadParam:
short = 800
long = 1333
max_num_gt = 100
max_len_gt_poly = 2500
class AnchorTarget2DParam:
def __init__(self):
self.generate = self._generate()
class _generate:
def __init__(self):
self.stride = (4, 8, 16, 32, 64)
self.short = (200, 100, 50, 25, 13)
self.long = (334, 167, 84, 42, 21)
scales = (8)
aspects = (0.5, 1.0, 2.0)
class assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
class sample:
image_anchor = 256
pos_fraction = 0.5
class RenameParam:
mapping = dict(image="data")
from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \
ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \
RenameRecord, Norm2DImage, Pad2DImage
from models.maskrcnn.input import PreprocessGtPoly, EncodeGtPoly, \
Resize2DImageBboxMask, Flip2DImageBboxMask, Pad2DImageBboxMask
from models.FPN.input import PyramidAnchorTarget2D
if is_train:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
PreprocessGtPoly(),
Resize2DImageBboxMask(ResizeParam),
Flip2DImageBboxMask(),
EncodeGtPoly(PadParam),
Pad2DImageBboxMask(PadParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data"]
label_name = ["im_info", "gt_bbox", "gt_poly"]
if not RpnParam.nnvm_rpn_target:
transform.append(PyramidAnchorTarget2D(AnchorTarget2DParam()))
label_name += ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"]
else:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "im_id", "rec_id"]
label_name = []
import core.detection_metric as metric
from models.msrcnn.metric import SigmoidCELossMetric, L2
rpn_acc_metric = metric.AccWithIgnore(
"RpnAcc",
["rpn_cls_loss_output", "rpn_cls_label_blockgrad_output"],
[]
)
rpn_l1_metric = metric.L1(
"RpnL1",
["rpn_reg_loss_output", "rpn_cls_label_blockgrad_output"],
[]
)
# for bbox, the label is generated in network so it is an output
box_acc_metric = metric.AccWithIgnore(
"RcnnAcc",
["bbox_cls_loss_output", "bbox_label_blockgrad_output"],
[]
)
box_l1_metric = metric.L1(
"RcnnL1",
["bbox_reg_loss_output", "bbox_label_blockgrad_output"],
[]
)
mask_cls_metric = SigmoidCELossMetric(
"MaskCE",
["mask_loss_output"],
[]
)
iou_l2_metric = L2(
"IoUL2",
["iou_head_loss_output"],
[]
)
metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric, mask_cls_metric, iou_l2_metric]
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \
ModelParam, OptimizeParam, TestParam, \
transform, data_name, label_name, metric_list
| TuSimple/simpledet | config/ms_r50v1_fpn_1x.py | Python | apache-2.0 | 10,314 |
#!/usr/bin/python
# *****************************************************************************
#
# Copyright (c) 2016, EPAM SYSTEMS INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ******************************************************************************
import logging
import json
import sys
from dlab.fab import *
from dlab.meta_lib import *
from dlab.actions_lib import *
import os
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--uuid', type=str, default='')
args = parser.parse_args()
if __name__ == "__main__":
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG,
filename=local_log_filepath)
notebook_config = dict()
try:
notebook_config['exploratory_name'] = os.environ['exploratory_name']
except:
notebook_config['exploratory_name'] = ''
notebook_config['service_base_name'] = os.environ['conf_service_base_name']
notebook_config['instance_type'] = os.environ['aws_notebook_instance_type']
notebook_config['key_name'] = os.environ['conf_key_name']
notebook_config['user_keyname'] = os.environ['edge_user_name']
notebook_config['instance_name'] = '{}-{}-nb-{}-{}'.format(notebook_config['service_base_name'],
os.environ['edge_user_name'],
notebook_config['exploratory_name'], args.uuid)
notebook_config['expected_image_name'] = '{}-{}-notebook-image'.format(notebook_config['service_base_name'],
os.environ['application'])
notebook_config['notebook_image_name'] = str(os.environ.get('notebook_image_name'))
notebook_config['role_profile_name'] = '{}-{}-nb-de-Profile' \
.format(notebook_config['service_base_name'].lower().replace('-', '_'), os.environ['edge_user_name'])
notebook_config['security_group_name'] = '{}-{}-nb-SG'.format(notebook_config['service_base_name'],
os.environ['edge_user_name'])
notebook_config['tag_name'] = '{}-Tag'.format(notebook_config['service_base_name'])
notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
# generating variables regarding EDGE proxy on Notebook instance
instance_hostname = get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
edge_instance_name = os.environ['conf_service_base_name'] + "-" + os.environ['edge_user_name'] + '-edge'
edge_instance_hostname = get_instance_hostname(notebook_config['tag_name'], edge_instance_name)
edge_instance_ip = get_instance_ip_address(notebook_config['tag_name'], edge_instance_name).get('Public')
keyfile_name = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
try:
if os.environ['conf_os_family'] == 'debian':
initial_user = 'ubuntu'
sudo_group = 'sudo'
if os.environ['conf_os_family'] == 'redhat':
initial_user = 'ec2-user'
sudo_group = 'wheel'
logging.info('[CREATING DLAB SSH USER]')
print('[CREATING DLAB SSH USER]')
params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format\
(instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem", initial_user,
notebook_config['dlab_ssh_user'], sudo_group)
try:
local("~/scripts/{}.py {}".format('create_ssh_user', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed creating ssh user 'dlab'.", str(err))
remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
sys.exit(1)
# configuring proxy on Notebook instance
try:
logging.info('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
print('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
.format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config), notebook_config['dlab_ssh_user'])
try:
local("~/scripts/{}.py {}".format('common_configure_proxy', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed to configure proxy.", str(err))
remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
sys.exit(1)
# updating repositories & installing python packages
try:
logging.info('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
print('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
params = "--hostname {} --keyfile {} --user {} --region {}".\
format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'], os.environ['aws_region'])
try:
local("~/scripts/{}.py {}".format('install_prerequisites', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed installing apps: apt & pip.", str(err))
remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
sys.exit(1)
# installing and configuring jupiter and all dependencies
try:
logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
params = "--hostname {} " \
"--keyfile {} " \
"--region {} " \
"--spark_version {} " \
"--hadoop_version {} " \
"--os_user {} " \
"--scala_version {} " \
"--r_mirror {} " \
"--exploratory_name {}".\
format(instance_hostname,
keyfile_name,
os.environ['aws_region'],
os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'],
notebook_config['dlab_ssh_user'],
os.environ['notebook_scala_version'],
os.environ['notebook_r_mirror'],
notebook_config['exploratory_name'])
try:
local("~/scripts/{}.py {}".format('configure_jupyter_node', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed to configure jupyter.", str(err))
remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
sys.exit(1)
try:
print('[INSTALLING USERs KEY]')
logging.info('[INSTALLING USERs KEY]')
additional_config = {"user_keyname": notebook_config['user_keyname'],
"user_keydir": os.environ['conf_key_dir']}
params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['dlab_ssh_user'])
try:
local("~/scripts/{}.py {}".format('install_user_key', params))
except:
append_result("Failed installing users key")
raise Exception
except Exception as err:
append_result("Failed installing users key.", str(err))
remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
sys.exit(1)
try:
print('[SETUP USER GIT CREDENTIALS]')
logging.info('[SETUP USER GIT CREDENTIALS]')
params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
.format(notebook_config['dlab_ssh_user'], instance_hostname, keyfile_name)
try:
local("~/scripts/{}.py {}".format('common_download_git_certfile', params))
local("~/scripts/{}.py {}".format('manage_git_creds', params))
except:
append_result("Failed setup git credentials")
raise Exception
except Exception as err:
append_result("Failed to setup git credentials.", str(err))
remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
sys.exit(1)
try:
logging.info('[POST CONFIGURING PROCESS]')
print('[POST CONFIGURING PROCESS')
if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None']:
params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
.format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'],
notebook_config['tag_name'], notebook_config['instance_name'])
try:
local("~/scripts/{}.py {}".format('common_remove_remote_kernels', params))
except:
traceback.print_exc()
raise Exception
except Exception as err:
append_result("Failed to post configuring instance.", str(err))
remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
sys.exit(1)
try:
print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
additional_info = {
'instance_hostname': instance_hostname,
'tensor': False
}
params = "--edge_hostname {} " \
"--keyfile {} " \
"--os_user {} " \
"--type {} " \
"--exploratory_name {} " \
"--additional_info '{}'"\
.format(edge_instance_hostname,
keyfile_name,
notebook_config['dlab_ssh_user'],
'jupyter',
notebook_config['exploratory_name'],
json.dumps(additional_info))
try:
local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
except:
append_result("Failed edge reverse proxy template")
raise Exception
except Exception as err:
append_result("Failed to set edge reverse proxy template.", str(err))
remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
sys.exit(1)
if notebook_config['shared_image_enabled'] == 'true':
try:
print('[CREATING AMI]')
ami_id = get_ami_id_by_name(notebook_config['expected_image_name'])
if ami_id == '':
print("Looks like it's first time we configure notebook server. Creating image.")
image_id = create_image_from_instance(tag_name=notebook_config['tag_name'],
instance_name=notebook_config['instance_name'],
image_name=notebook_config['expected_image_name'])
if image_id != '':
print("Image was successfully created. It's ID is {}".format(image_id))
except Exception as err:
append_result("Failed creating image.", str(err))
remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
sys.exit(1)
# generating output information
ip_address = get_instance_ip_address(notebook_config['tag_name'], notebook_config['instance_name']).get('Private')
dns_name = get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
jupyter_ip_url = "http://" + ip_address + ":8888/{}/".format(notebook_config['exploratory_name'])
jupyter_dns_url = "http://" + dns_name + ":8888/{}/".format(notebook_config['exploratory_name'])
jupyter_notebook_acces_url = "http://" + edge_instance_ip + "/{}/".format(notebook_config['exploratory_name'])
jupyter_ungit_acces_url = "http://" + edge_instance_ip + "/{}-ungit/".format(notebook_config['exploratory_name'])
ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(notebook_config['exploratory_name'])
print('[SUMMARY]')
logging.info('[SUMMARY]')
print("Instance name: {}".format(notebook_config['instance_name']))
print("Private DNS: {}".format(dns_name))
print("Private IP: {}".format(ip_address))
print("Instance ID: {}".format(get_instance_by_name(notebook_config['tag_name'], notebook_config['instance_name'])))
print("Instance type: {}".format(notebook_config['instance_type']))
print("Key name: {}".format(notebook_config['key_name']))
print("User key name: {}".format(notebook_config['user_keyname']))
print("Image name: {}".format(notebook_config['notebook_image_name']))
print("Profile name: {}".format(notebook_config['role_profile_name']))
print("SG name: {}".format(notebook_config['security_group_name']))
print("Jupyter URL: {}".format(jupyter_ip_url))
print("Jupyter URL: {}".format(jupyter_dns_url))
print("Ungit URL: {}".format(ungit_ip_url))
print("ReverseProxyNotebook".format(jupyter_notebook_acces_url))
print("ReverseProxyUngit".format(jupyter_ungit_acces_url))
print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
format(notebook_config['key_name'], notebook_config['dlab_ssh_user'], ip_address))
print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
format(notebook_config['key_name'], notebook_config['dlab_ssh_user'], dns_name))
with open("/root/result.json", 'w') as result:
res = {"hostname": dns_name,
"ip": ip_address,
"instance_id": get_instance_by_name(notebook_config['tag_name'], notebook_config['instance_name']),
"master_keyname": os.environ['conf_key_name'],
"notebook_name": notebook_config['instance_name'],
"notebook_image_name": notebook_config['notebook_image_name'],
"Action": "Create new notebook server",
"exploratory_url": [
{"description": "Jupyter",
"url": jupyter_notebook_acces_url},
{"description": "Ungit",
"url": jupyter_ungit_acces_url},
{"description": "Jupyter (via tunnel)",
"url": jupyter_ip_url},
{"description": "Ungit (via tunnel)",
"url": ungit_ip_url}
]}
result.write(json.dumps(res)) | epam/DLab | infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py | Python | apache-2.0 | 15,517 |
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Module used after C{%(destdir)s} has been finalized to create the
initial packaging. Also contains error reporting.
"""
import codecs
import imp
import itertools
import os
import re
import site
import sre_constants
import stat
import subprocess
import sys
from conary import files, trove
from conary.build import buildpackage, filter, policy, recipe, tags, use
from conary.build import smartform
from conary.deps import deps
from conary.lib import elf, magic, util, pydeps, fixedglob, graph
from conary.build.action import TARGET_LINUX
from conary.build.action import TARGET_WINDOWS
try:
from xml.etree import ElementTree
except ImportError:
try:
from elementtree import ElementTree
except ImportError:
ElementTree = None
# Helper class
class _DatabaseDepCache(object):
__slots__ = ['db', 'cache']
def __init__(self, db):
self.db = db
self.cache = {}
def getProvides(self, depSetList):
ret = {}
missing = []
for depSet in depSetList:
if depSet in self.cache:
ret[depSet] = self.cache[depSet]
else:
missing.append(depSet)
newresults = self.db.getTrovesWithProvides(missing)
ret.update(newresults)
self.cache.update(newresults)
return ret
class _filterSpec(policy.Policy):
"""
Pure virtual base class from which C{ComponentSpec} and C{PackageSpec}
are derived.
"""
bucket = policy.PACKAGE_CREATION
processUnmodified = False
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
def __init__(self, *args, **keywords):
self.extraFilters = []
policy.Policy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
"""
Call derived classes (C{ComponentSpec} or C{PackageSpec}) as::
ThisClass('<name>', 'filterexp1', 'filterexp2')
where C{filterexp} is either a regular expression or a
tuple of C{(regexp[, setmodes[, unsetmodes]])}
"""
if args:
theName = args[0]
for filterexp in args[1:]:
self.extraFilters.append((theName, filterexp))
policy.Policy.updateArgs(self, **keywords)
class _addInfo(policy.Policy):
"""
Pure virtual class for policies that add information such as tags,
requirements, and provision, to files.
"""
bucket = policy.PACKAGE_CREATION
processUnmodified = False
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
)
keywords = {
'included': {},
'excluded': {}
}
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
def updateArgs(self, *args, **keywords):
"""
Call as::
C{I{ClassName}(I{info}, I{filterexp})}
or::
C{I{ClassName}(I{info}, exceptions=I{filterexp})}
where C{I{filterexp}} is either a regular expression or a
tuple of C{(regexp[, setmodes[, unsetmodes]])}
"""
if args:
args = list(args)
info = args.pop(0)
if args:
if not self.included:
self.included = {}
if info not in self.included:
self.included[info] = []
self.included[info].extend(args)
elif 'exceptions' in keywords:
# not the usual exception handling, this is an exception
if not self.excluded:
self.excluded = {}
if info not in self.excluded:
self.excluded[info] = []
self.excluded[info].append(keywords.pop('exceptions'))
else:
raise TypeError, 'no paths provided'
policy.Policy.updateArgs(self, **keywords)
def doProcess(self, recipe):
# for filters
self.rootdir = self.rootdir % recipe.macros
# instantiate filters
d = {}
for info in self.included:
newinfo = info % recipe.macros
l = []
for item in self.included[info]:
l.append(filter.Filter(item, recipe.macros))
d[newinfo] = l
self.included = d
d = {}
for info in self.excluded:
newinfo = info % recipe.macros
l = []
for item in self.excluded[info]:
l.append(filter.Filter(item, recipe.macros))
d[newinfo] = l
self.excluded = d
policy.Policy.doProcess(self, recipe)
def doFile(self, path):
fullpath = self.recipe.macros.destdir+path
if not util.isregular(fullpath) and not os.path.islink(fullpath):
return
self.runInfo(path)
def runInfo(self, path):
'pure virtual'
pass
class Config(policy.Policy):
"""
NAME
====
B{C{r.Config()}} - Mark files as configuration files
SYNOPSIS
========
C{r.Config([I{filterexp}] || [I{exceptions=filterexp}])}
DESCRIPTION
===========
The C{r.Config} policy marks all files below C{%(sysconfdir)s}
(that is, C{/etc}) and C{%(taghandlerdir)s} (that is,
C{/usr/libexec/conary/tags/}), and any other files explicitly
mentioned, as configuration files.
- To mark files as exceptions, use
C{r.Config(exceptions='I{filterexp}')}.
- To mark explicit inclusions as configuration files, use:
C{r.Config('I{filterexp}')}
A file marked as a Config file cannot also be marked as a
Transient file or an InitialContents file. Conary enforces this
requirement.
EXAMPLES
========
C{r.Config(exceptions='%(sysconfdir)s/X11/xkb/xkbcomp')}
The file C{/etc/X11/xkb/xkbcomp} is marked as an exception, since it is
not actually a configuration file even though it is within the C{/etc}
(C{%(sysconfdir)s}) directory hierarchy and would be marked as a
configuration file by default.
C{r.Config('%(mmdir)s/Mailman/mm_cfg.py')}
Marks the file C{%(mmdir)s/Mailman/mm_cfg.py} as a configuration file;
it would not be automatically marked as a configuration file otherwise.
"""
bucket = policy.PACKAGE_CREATION
processUnmodified = True
requires = (
# for :config component, ComponentSpec must run after Config
# Otherwise, this policy would follow PackageSpec and just set isConfig
# on each config file
('ComponentSpec', policy.REQUIRED_SUBSEQUENT),
)
invariantinclusions = [ '%(sysconfdir)s/', '%(taghandlerdir)s/']
invariantexceptions = [ '%(userinfodir)s/', '%(groupinfodir)s' ]
def doFile(self, filename):
m = self.recipe.magic[filename]
if m and m.name == "ELF":
# an ELF file cannot be a config file, some programs put
# ELF files under /etc (X, for example), and tag handlers
# can be ELF or shell scripts; we just want tag handlers
# to be config files if they are shell scripts.
# Just in case it was not intentional, warn...
if self.macros.sysconfdir in filename:
self.info('ELF file %s found in config directory', filename)
return
fullpath = self.macros.destdir + filename
if os.path.isfile(fullpath) and util.isregular(fullpath):
if self._fileIsBinary(filename, fullpath):
self.error("binary file '%s' is marked as config" % \
filename)
self._markConfig(filename, fullpath)
def _fileIsBinary(self, path, fn, maxsize=None, decodeFailIsError=True):
limit = os.stat(fn)[stat.ST_SIZE]
if maxsize is not None and limit > maxsize:
self.warn('%s: file size %d longer than max %d',
path, limit, maxsize)
return True
# we'll consider file to be binary file if we don't find any
# good reason to mark it as text, or if we find a good reason
# to mark it as binary
foundFF = False
foundNL = False
f = open(fn, 'r')
try:
while f.tell() < limit:
buf = f.read(65536)
if chr(0) in buf:
self.warn('%s: file contains NULL byte', path)
return True
if '\xff\xff' in buf:
self.warn('%s: file contains 0xFFFF sequence', path)
return True
if '\xff' in buf:
foundFF = True
if '\n' in buf:
foundNL = True
finally:
f.close()
if foundFF and not foundNL:
self.error('%s: found 0xFF without newline', path)
utf8 = codecs.open(fn, 'r', 'utf-8')
win1252 = codecs.open(fn, 'r', 'windows-1252')
try:
try:
while utf8.tell() < limit:
utf8.read(65536)
except UnicodeDecodeError, e:
# Still want to print a warning if it is not unicode;
# Note that Code Page 1252 is considered a legacy
# encoding on Windows
self.warn('%s: %s', path, str(e))
try:
while win1252.tell() < limit:
win1252.read(65536)
except UnicodeDecodeError, e:
self.warn('%s: %s', path, str(e))
return decodeFailIsError
finally:
utf8.close()
win1252.close()
return False
def _addTrailingNewline(self, filename, fullpath):
# FIXME: This exists only for stability; there is no longer
# any need to add trailing newlines to config files. This
# also violates the rule that no files are modified after
# destdir modification has been completed.
self.warn("adding trailing newline to config file '%s'" % \
filename)
mode = os.lstat(fullpath)[stat.ST_MODE]
oldmode = None
if mode & 0600 != 0600:
# need to be able to read and write the file to fix it
oldmode = mode
os.chmod(fullpath, mode|0600)
f = open(fullpath, 'a')
f.seek(0, 2)
f.write('\n')
f.close()
if oldmode is not None:
os.chmod(fullpath, oldmode)
def _markConfig(self, filename, fullpath):
self.info(filename)
f = file(fullpath)
f.seek(0, 2)
if f.tell():
# file has contents
f.seek(-1, 2)
lastchar = f.read(1)
f.close()
if lastchar != '\n':
self._addTrailingNewline(filename, fullpath)
f.close()
self.recipe.ComponentSpec(_config=filename)
class ComponentSpec(_filterSpec):
"""
NAME
====
B{C{r.ComponentSpec()}} - Determines which component each file is in
SYNOPSIS
========
C{r.ComponentSpec([I{componentname}, I{filterexp}] || [I{packagename}:I{componentname}, I{filterexp}])}
DESCRIPTION
===========
The C{r.ComponentSpec} policy includes the filter expressions that specify
the default assignment of files to components. The expressions are
considered in the order in which they are evaluated in the recipe, and the
first match wins. After all the recipe-provided expressions are
evaluated, the default expressions are evaluated. If no expression
matches, then the file is assigned to the C{catchall} component.
Note that in the C{I{packagename}:I{componentname}} form, the C{:}
must be literal, it cannot be part of a macro.
KEYWORDS
========
B{catchall} : Specify the component name which gets all otherwise
unassigned files. Default: C{runtime}
EXAMPLES
========
C{r.ComponentSpec('manual', '%(contentdir)s/manual/')}
Uses C{r.ComponentSpec} to specify that all files below the
C{%(contentdir)s/manual/} directory are part of the C{:manual} component.
C{r.ComponentSpec('foo:bar', '%(sharedir)s/foo/')}
Uses C{r.ComponentSpec} to specify that all files below the
C{%(sharedir)s/foo/} directory are part of the C{:bar} component
of the C{foo} package, avoiding the need to invoke both the
C{ComponentSpec} and C{PackageSpec} policies.
C{r.ComponentSpec(catchall='data')}
Uses C{r.ComponentSpec} to specify that all files not otherwise specified
go into the C{:data} component instead of the default {:runtime}
component.
"""
requires = (
('Config', policy.REQUIRED_PRIOR),
('PackageSpec', policy.REQUIRED_SUBSEQUENT),
)
keywords = { 'catchall': 'runtime' }
def __init__(self, *args, **keywords):
"""
@keyword catchall: The component name which gets all otherwise
unassigned files. Default: C{runtime}
"""
_filterSpec.__init__(self, *args, **keywords)
self.configFilters = []
self.derivedFilters = []
def updateArgs(self, *args, **keywords):
if '_config' in keywords:
configPath=keywords.pop('_config')
self.recipe.PackageSpec(_config=configPath)
if args:
name = args[0]
if ':' in name:
package, name = name.split(':')
args = list(itertools.chain([name], args[1:]))
if package:
# we've got a package as well as a component, pass it on
pkgargs = list(itertools.chain((package,), args[1:]))
self.recipe.PackageSpec(*pkgargs)
_filterSpec.updateArgs(self, *args, **keywords)
def doProcess(self, recipe):
compFilters = []
self.macros = recipe.macros
self.rootdir = self.rootdir % recipe.macros
self.loadFilterDirs()
# The extras need to come before base in order to override decisions
# in the base subfilters; invariants come first for those very few
# specs that absolutely should not be overridden in recipes.
for filteritem in itertools.chain(self.invariantFilters,
self.extraFilters,
self.derivedFilters,
self.configFilters,
self.baseFilters):
if not isinstance(filteritem, (filter.Filter, filter.PathSet)):
name = filteritem[0] % self.macros
assert(name != 'source')
args, kwargs = self.filterExpArgs(filteritem[1:], name=name)
filteritem = filter.Filter(*args, **kwargs)
compFilters.append(filteritem)
# by default, everything that hasn't matched a filter pattern yet
# goes in the catchall component ('runtime' by default)
compFilters.append(filter.Filter('.*', self.macros, name=self.catchall))
# pass these down to PackageSpec for building the package
recipe.PackageSpec(compFilters=compFilters)
def loadFilterDirs(self):
invariantFilterMap = {}
baseFilterMap = {}
self.invariantFilters = []
self.baseFilters = []
# Load all component python files
for componentDir in self.recipe.cfg.componentDirs:
for filterType, map in (('invariant', invariantFilterMap),
('base', baseFilterMap)):
oneDir = os.sep.join((componentDir, filterType))
if not os.path.isdir(oneDir):
continue
for filename in os.listdir(oneDir):
fullpath = os.sep.join((oneDir, filename))
if (not filename.endswith('.py') or
not util.isregular(fullpath)):
continue
self.loadFilter(filterType, map, filename, fullpath)
# populate the lists with dependency-sorted information
for filterType, map, filterList in (
('invariant', invariantFilterMap, self.invariantFilters),
('base', baseFilterMap, self.baseFilters)):
dg = graph.DirectedGraph()
for filterName in map.keys():
dg.addNode(filterName)
filter, follows, precedes = map[filterName]
def warnMissing(missing):
self.error('%s depends on missing %s', filterName, missing)
for prior in follows:
if not prior in map:
warnMissing(prior)
dg.addEdge(prior, filterName)
for subsequent in precedes:
if not subsequent in map:
warnMissing(subsequent)
dg.addEdge(filterName, subsequent)
# test for dependency loops
depLoops = [x for x in dg.getStronglyConnectedComponents()
if len(x) > 1]
if depLoops:
self.error('dependency loop(s) in component filters: %s',
' '.join(sorted(':'.join(x)
for x in sorted(list(depLoops)))))
return
# Create a stably-sorted list of config filters where
# the filter is not empty. (An empty filter with both
# follows and precedes specified can be used to induce
# ordering between otherwise unrelated components.)
#for name in dg.getTotalOrdering(nodeSort=lambda a, b: cmp(a,b)):
for name in dg.getTotalOrdering():
filters = map[name][0]
if not filters:
continue
componentName = filters[0]
for filterExp in filters[1]:
filterList.append((componentName, filterExp))
def loadFilter(self, filterType, map, filename, fullpath):
# do not load shared libraries
desc = [x for x in imp.get_suffixes() if x[0] == '.py'][0]
f = file(fullpath)
modname = filename[:-3]
m = imp.load_module(modname, f, fullpath, desc)
f.close()
if not 'filters' in m.__dict__:
self.warn('%s missing "filters"; not a valid component'
' specification file', fullpath)
return
filters = m.__dict__['filters']
if filters and len(filters) > 1 and type(filters[1]) not in (list,
tuple):
self.error('invalid expression in %s: filters specification'
" must be ('name', ('expression', ...))", fullpath)
follows = ()
if 'follows' in m.__dict__:
follows = m.__dict__['follows']
precedes = ()
if 'precedes' in m.__dict__:
precedes = m.__dict__['precedes']
map[modname] = (filters, follows, precedes)
class PackageSpec(_filterSpec):
"""
NAME
====
B{C{r.PackageSpec()}} - Determines which package each file is in
SYNOPSIS
========
C{r.PackageSpec(I{packagename}, I{filterexp})}
DESCRIPTION
===========
The C{r.PackageSpec()} policy determines which package each file
is in. (Use C{r.ComponentSpec()} to specify the component without
specifying the package, or to specify C{I{package}:I{component}}
in one invocation.)
EXAMPLES
========
C{r.PackageSpec('openssh-server', '%(sysconfdir)s/pam.d/sshd')}
Specifies that the file C{%(sysconfdir)s/pam.d/sshd} is in the package
C{openssh-server} rather than the default (which in this case would have
been C{openssh} because this example was provided by C{openssh.recipe}).
"""
requires = (
('ComponentSpec', policy.REQUIRED_PRIOR),
)
keywords = { 'compFilters': None }
def __init__(self, *args, **keywords):
"""
@keyword compFilters: reserved for C{ComponentSpec} to pass information
needed by C{PackageSpec}.
"""
_filterSpec.__init__(self, *args, **keywords)
self.configFiles = []
self.derivedFilters = []
def updateArgs(self, *args, **keywords):
if '_config' in keywords:
self.configFiles.append(keywords.pop('_config'))
# keep a list of packages filtered for in PackageSpec in the recipe
if args:
newTrove = args[0] % self.recipe.macros
self.recipe.packages[newTrove] = True
_filterSpec.updateArgs(self, *args, **keywords)
def preProcess(self):
self.pkgFilters = []
recipe = self.recipe
self.destdir = recipe.macros.destdir
if self.exceptions:
self.warn('PackageSpec does not honor exceptions')
self.exceptions = None
if self.inclusions:
# would have an effect only with exceptions listed, so no warning...
self.inclusions = None
# userinfo and groupinfo are invariant filters, so they must come first
for infoType in ('user', 'group'):
infoDir = '%%(%sinfodir)s' % infoType % self.macros
realDir = util.joinPaths(self.destdir, infoDir)
if not os.path.isdir(realDir):
continue
for infoPkgName in os.listdir(realDir):
pkgPath = util.joinPaths(infoDir, infoPkgName)
self.pkgFilters.append( \
filter.Filter(pkgPath, self.macros,
name = 'info-%s' % infoPkgName))
# extras need to come before derived so that derived packages
# can change the package to which a file is assigned
for filteritem in itertools.chain(self.extraFilters,
self.derivedFilters):
if not isinstance(filteritem, (filter.Filter, filter.PathSet)):
name = filteritem[0] % self.macros
if not trove.troveNameIsValid(name):
self.error('%s is not a valid package name', name)
args, kwargs = self.filterExpArgs(filteritem[1:], name=name)
self.pkgFilters.append(filter.Filter(*args, **kwargs))
else:
self.pkgFilters.append(filteritem)
# by default, everything that hasn't matched a pattern in the
# main package filter goes in the package named recipe.name
self.pkgFilters.append(filter.Filter('.*', self.macros, name=recipe.name))
# OK, all the filters exist, build an autopackage object that
# knows about them
recipe.autopkg = buildpackage.AutoBuildPackage(
self.pkgFilters, self.compFilters, recipe)
self.autopkg = recipe.autopkg
def do(self):
# Walk capsule contents ignored by doFile
for filePath, _, componentName in self.recipe._iterCapsulePaths():
realPath = self.destdir + filePath
if util.exists(realPath):
# Files that do not exist on the filesystem (devices)
# are handled separately
self.autopkg.addFile(filePath, realPath, componentName)
# Walk normal files
_filterSpec.do(self)
def doFile(self, path):
# all policy classes after this require that the initial tree is built
if not self.recipe._getCapsulePathsForFile(path):
realPath = self.destdir + path
self.autopkg.addFile(path, realPath)
def postProcess(self):
# flag all config files
for confname in self.configFiles:
self.recipe.autopkg.pathMap[confname].flags.isConfig(True)
class InitialContents(policy.Policy):
"""
NAME
====
B{C{r.InitialContents()}} - Mark only explicit inclusions as initial
contents files
SYNOPSIS
========
C{InitialContents([I{filterexp}])}
DESCRIPTION
===========
By default, C{r.InitialContents()} does not apply to any files.
It is used to specify all files that Conary needs to mark as
providing only initial contents. When Conary installs or
updates one of these files, it will never replace existing
contents; it uses the provided contents only if the file does
not yet exist at the time Conary is creating it.
A file marked as an InitialContents file cannot also be marked
as a Transient file or a Config file. Conary enforces this
requirement.
EXAMPLES
========
C{r.InitialContents('%(sysconfdir)s/conary/.*gpg')}
The files C{%(sysconfdir)s/conary/.*gpg} are being marked as initial
contents files. Conary will use those contents when creating the files
the first time, but will never overwrite existing contents in those files.
"""
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('Config', policy.REQUIRED_PRIOR),
)
bucket = policy.PACKAGE_CREATION
processUnmodified = True
invariantexceptions = [ '%(userinfodir)s/', '%(groupinfodir)s' ]
invariantinclusions = ['%(localstatedir)s/run/',
'%(localstatedir)s/log/',
'%(cachedir)s/']
def postInit(self, *args, **kwargs):
self.recipe.Config(exceptions = self.invariantinclusions,
allowUnusedFilters = True)
def updateArgs(self, *args, **keywords):
policy.Policy.updateArgs(self, *args, **keywords)
self.recipe.Config(exceptions=args, allowUnusedFilters = True)
def doFile(self, filename):
fullpath = self.macros.destdir + filename
recipe = self.recipe
if os.path.isfile(fullpath) and util.isregular(fullpath):
self.info(filename)
f = recipe.autopkg.pathMap[filename]
f.flags.isInitialContents(True)
if f.flags.isConfig():
self.error(
'%s is marked as both a configuration file and'
' an initial contents file', filename)
class Transient(policy.Policy):
"""
NAME
====
B{C{r.Transient()}} - Mark files that have transient contents
SYNOPSIS
========
C{r.Transient([I{filterexp}])}
DESCRIPTION
===========
The C{r.Transient()} policy marks files as containing transient
contents. It automatically marks the two most common uses of transient
contents: python and emacs byte-compiled files
(C{.pyc}, C{.pyo}, and C{.elc} files).
Files containing transient contents are almost the opposite of
configuration files: their contents should be overwritten by
the new contents without question at update time, even if the
contents in the filesystem have changed. (Conary raises an
error if file contents have changed in the filesystem for normal
files.)
A file marked as a Transient file cannot also be marked as an
InitialContents file or a Config file. Conary enforces this
requirement.
EXAMPLES
========
C{r.Transient('%(libdir)s/firefox/extensions/')}
Marks all the files in the directory C{%(libdir)s/firefox/extensions/} as
having transient contents.
"""
bucket = policy.PACKAGE_CREATION
filetree = policy.PACKAGE
processUnmodified = True
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('Config', policy.REQUIRED_PRIOR),
('InitialContents', policy.REQUIRED_PRIOR),
)
invariantinclusions = [
r'..*\.py(c|o)$',
r'..*\.elc$',
r'%(userinfodir)s/',
r'%(groupinfodir)s'
]
def doFile(self, filename):
fullpath = self.macros.destdir + filename
if os.path.isfile(fullpath) and util.isregular(fullpath):
recipe = self.recipe
f = recipe.autopkg.pathMap[filename]
f.flags.isTransient(True)
if f.flags.isConfig() or f.flags.isInitialContents():
self.error(
'%s is marked as both a transient file and'
' a configuration or initial contents file', filename)
class TagDescription(policy.Policy):
"""
NAME
====
B{C{r.TagDescription()}} - Marks tag description files
SYNOPSIS
========
C{r.TagDescription([I{filterexp}])}
DESCRIPTION
===========
The C{r.TagDescription} class marks tag description files as
such so that conary handles them correctly. Every file in
C{%(tagdescriptiondir)s/} is marked as a tag description file by default.
No file outside of C{%(tagdescriptiondir)s/} will be considered by this
policy.
EXAMPLES
========
This policy is not called explicitly.
"""
bucket = policy.PACKAGE_CREATION
processUnmodified = False
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
)
invariantsubtrees = [ '%(tagdescriptiondir)s/' ]
def doFile(self, path):
if self.recipe._getCapsulePathsForFile(path):
return
fullpath = self.macros.destdir + path
if os.path.isfile(fullpath) and util.isregular(fullpath):
self.info('conary tag file: %s', path)
self.recipe.autopkg.pathMap[path].tags.set("tagdescription")
class TagHandler(policy.Policy):
"""
NAME
====
B{C{r.TagHandler()}} - Mark tag handler files
SYNOPSIS
========
C{r.TagHandler([I{filterexp}])}
DESCRIPTION
===========
All files in C{%(taghandlerdir)s/} are marked as a tag
handler files.
EXAMPLES
========
This policy is not called explicitly.
"""
bucket = policy.PACKAGE_CREATION
processUnmodified = False
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
)
invariantsubtrees = [ '%(taghandlerdir)s/' ]
def doFile(self, path):
if self.recipe._getCapsulePathsForFile(path):
return
fullpath = self.macros.destdir + path
if os.path.isfile(fullpath) and util.isregular(fullpath):
self.info('conary tag handler: %s', path)
self.recipe.autopkg.pathMap[path].tags.set("taghandler")
class TagSpec(_addInfo):
"""
NAME
====
B{C{r.TagSpec()}} - Apply tags defined by tag descriptions
SYNOPSIS
========
C{r.TagSpec([I{tagname}, I{filterexp}] || [I{tagname}, I{exceptions=filterexp}])}
DESCRIPTION
===========
The C{r.TagSpec()} policy automatically applies tags defined by tag
descriptions in both the current system and C{%(destdir)s} to all
files in C{%(destdir)s}.
To apply tags manually (removing a dependency on the tag description
file existing when the packages is cooked), use the syntax:
C{r.TagSpec(I{tagname}, I{filterexp})}.
To set an exception to this policy, use:
C{r.TagSpec(I{tagname}, I{exceptions=filterexp})}.
EXAMPLES
========
C{r.TagSpec('initscript', '%(initdir)s/')}
Applies the C{initscript} tag to all files in the directory
C{%(initdir)s/}.
"""
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
)
def doProcess(self, recipe):
self.tagList = []
self.buildReqsComputedForTags = set()
self.suggestBuildRequires = set()
# read the system and %(destdir)s tag databases
for directory in (recipe.macros.destdir+'/etc/conary/tags/',
'/etc/conary/tags/'):
if os.path.isdir(directory):
for filename in os.listdir(directory):
path = util.joinPaths(directory, filename)
self.tagList.append(tags.TagFile(path, recipe.macros, True))
self.fullReqs = self.recipe._getTransitiveBuildRequiresNames()
_addInfo.doProcess(self, recipe)
def markTag(self, name, tag, path, tagFile=None):
# commonly, a tagdescription will nominate a file to be
# tagged, but it will also be set explicitly in the recipe,
# and therefore markTag will be called twice.
if (len(tag.split()) > 1 or
not tag.replace('-', '').replace('_', '').isalnum()):
# handlers for multiple tags require strict tag names:
# no whitespace, only alphanumeric plus - and _ characters
self.error('illegal tag name %s for file %s' %(tag, path))
return
tags = self.recipe.autopkg.pathMap[path].tags
if tag not in tags:
self.info('%s: %s', name, path)
tags.set(tag)
if tagFile and tag not in self.buildReqsComputedForTags:
self.buildReqsComputedForTags.add(tag)
db = self._getDb()
for trove in db.iterTrovesByPath(tagFile.tagFile):
troveName = trove.getName()
if troveName not in self.fullReqs:
# XXX should be error, change after bootstrap
self.warn("%s assigned by %s to file %s, so add '%s'"
' to buildRequires or call r.TagSpec()'
%(tag, tagFile.tagFile, path, troveName))
self.suggestBuildRequires.add(troveName)
def runInfo(self, path):
if self.recipe._getCapsulePathsForFile(path):
# capsules do not participate in the tag protocol
return
excludedTags = {}
for tag in self.included:
for filt in self.included[tag]:
if filt.match(path):
isExcluded = False
if tag in self.excluded:
for filt in self.excluded[tag]:
if filt.match(path):
s = excludedTags.setdefault(tag, set())
s.add(path)
isExcluded = True
break
if not isExcluded:
self.markTag(tag, tag, path)
for tag in self.tagList:
if tag.match(path):
if tag.name:
name = tag.name
else:
name = tag.tag
isExcluded = False
if tag.tag in self.excluded:
for filt in self.excluded[tag.tag]:
# exception handling is per-tag, so handled specially
if filt.match(path):
s = excludedTags.setdefault(name, set())
s.add(path)
isExcluded = True
break
if not isExcluded:
self.markTag(name, tag.tag, path, tag)
if excludedTags:
for tag in excludedTags:
self.info('ignoring tag match for %s: %s',
tag, ', '.join(sorted(excludedTags[tag])))
def postProcess(self):
if self.suggestBuildRequires:
self.info('possibly add to buildRequires: %s',
str(sorted(list(self.suggestBuildRequires))))
self.recipe.reportMissingBuildRequires(self.suggestBuildRequires)
class Properties(policy.Policy):
"""
NAME
====
B{C{r.Properties()}} - Read property definition files
SYNOPSIS
========
C{r.Properties(I{exceptions=filterexp} || [I{contents=xml},
I{package=pkg:component}] ||
[I{/path/to/file}, I{filterexp}], I{contents=ipropcontents})}
DESCRIPTION
===========
The C{r.Properties()} policy automatically parses iconfig property
definition files, making the properties available for configuration
management with iconfig.
To add configuration properties manually, use the syntax:
C{r.Properties(I{contents=ipropcontents}, I{package=pkg:component}}
Where contents is the xml string that would normally be stored in the iprop
file and package is the component where to attach the config metadata.
(NOTE: This component must exist)
or
C{r.Properties([I{/path/to/file}, I{filterexp}], I{contents=ipropcontents})
Where contents is the xml string that would normally be stored in the iprop
file and the path or filterexp matches the files that represent the
conponent that the property should be attached to.
"""
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
bucket = policy.PACKAGE_CREATION
processUnmodified = True
_supports_file_properties = True
requires = (
# We need to know what component files have been assigned to
('PackageSpec', policy.REQUIRED_PRIOR),
)
def __init__(self, *args, **kwargs):
policy.Policy.__init__(self, *args, **kwargs)
self.ipropFilters = []
self.ipropPaths = [ r'%(prefix)s/lib/iconfig/properties/.*\.iprop' ]
self.contents = []
self.paths = []
self.fileFilters = []
self.propMap = {}
def updateArgs(self, *args, **kwargs):
if 'contents' in kwargs:
contents = kwargs.pop('contents')
pkg = kwargs.pop('package', None)
if pkg is None and args:
for arg in args:
self.paths.append((arg, contents))
else:
self.contents.append((pkg, contents))
policy.Policy.updateArgs(self, *args, **kwargs)
def doProcess(self, recipe):
for filterSpec, iprop in self.paths:
self.fileFilters.append((
filter.Filter(filterSpec, recipe.macros),
iprop,
))
for ipropPath in self.ipropPaths:
self.ipropFilters.append(
filter.Filter(ipropPath, recipe.macros))
policy.Policy.doProcess(self, recipe)
def _getComponent(self, path):
componentMap = self.recipe.autopkg.componentMap
if path not in componentMap:
return
main, comp = componentMap[path].getName().split(':')
return main, comp
def doFile(self, path):
if path not in self.recipe.autopkg.pathMap:
return
for fltr, iprop in self.fileFilters:
if fltr.match(path):
main, comp = self._getComponent(path)
self._parsePropertyData(iprop, main, comp)
# Make sure any remaining files are actually in the root.
fullpath = self.recipe.macros.destdir + path
if not os.path.isfile(fullpath) or not util.isregular(fullpath):
return
# Check to see if this is an iprop file locaiton that we know about.
for fltr in self.ipropFilters:
if fltr.match(path):
break
else:
return
main, comp = self._getComponent(path)
xml = open(fullpath).read()
self._parsePropertyData(xml, main, comp)
def postProcess(self):
for pkg, content in self.contents:
pkg = pkg % self.macros
pkgName, compName = pkg.split(':')
self._parsePropertyData(content, pkgName, compName)
def _parsePropertyData(self, xml, pkgName, compName):
pkgSet = self.propMap.setdefault(xml, set())
if (pkgName, compName) in pkgSet:
return
pkgSet.add((pkgName, compName))
self.recipe._addProperty(trove._PROPERTY_TYPE_SMARTFORM, pkgName,
compName, xml)
class MakeDevices(policy.Policy):
"""
NAME
====
B{C{r.MakeDevices()}} - Make device nodes
SYNOPSIS
========
C{MakeDevices([I{path},] [I{type},] [I{major},] [I{minor},] [I{owner},] [I{groups},] [I{mode}])}
DESCRIPTION
===========
The C{r.MakeDevices()} policy creates device nodes. Conary's
policy of non-root builds requires that these nodes exist only in the
package, and not in the filesystem, as only root may actually create
device nodes.
EXAMPLES
========
C{r.MakeDevices(I{'/dev/tty', 'c', 5, 0, 'root', 'root', mode=0666, package=':dev'})}
Creates the device node C{/dev/tty}, as type 'c' (character, as opposed to
type 'b', or block) with a major number of '5', minor number of '0',
owner, and group are both the root user, and permissions are 0666.
"""
bucket = policy.PACKAGE_CREATION
processUnmodified = True
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('Ownership', policy.REQUIRED_SUBSEQUENT),
)
def __init__(self, *args, **keywords):
self.devices = []
policy.Policy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
"""
MakeDevices(path, devtype, major, minor, owner, group, mode=0400)
"""
if args:
args = list(args)
l = len(args)
if not ((l > 5) and (l < 9)):
self.recipe.error('MakeDevices: incorrect arguments: %r %r'
%(args, keywords))
mode = keywords.pop('mode', None)
package = keywords.pop('package', None)
if l > 6 and mode is None:
mode = args[6]
if mode is None:
mode = 0400
if l > 7 and package is None:
package = args[7]
self.devices.append(
(args[0:6], {'perms': mode, 'package': package}))
policy.Policy.updateArgs(self, **keywords)
def do(self):
for device, kwargs in self.devices:
r = self.recipe
filename = device[0]
owner = device[4]
group = device[5]
r.Ownership(owner, group, filename)
device[0] = device[0] % r.macros
r.autopkg.addDevice(*device, **kwargs)
class setModes(policy.Policy):
"""
Do not call from recipes; this is used internally by C{r.SetModes},
C{r.ParseManifest}, and unpacking derived packages. This policy
modified modes relative to the mode on the file in the filesystem.
It adds setuid/setgid bits not otherwise set/honored on files on the
filesystem, and sets user r/w/x bits if they were altered for the
purposes of accessing the files during packaging. Otherwise,
it honors the bits found on the filesystem. It does not modify
bits in capsules.
"""
bucket = policy.PACKAGE_CREATION
processUnmodified = True
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('WarnWriteable', policy.REQUIRED_SUBSEQUENT),
('ExcludeDirectories', policy.CONDITIONAL_SUBSEQUENT),
)
def __init__(self, *args, **keywords):
self.sidbits = {}
self.userbits = {}
policy.Policy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
"""
setModes(path(s), [sidbits=int], [userbits=int])
"""
sidbits = keywords.pop('sidbits', None)
userbits = keywords.pop('userbits', None)
for path in args:
if sidbits is not None:
self.sidbits[path] = sidbits
if userbits is not None:
self.userbits[path] = userbits
self.recipe.WarnWriteable(
exceptions=re.escape(path).replace('%', '%%'),
allowUnusedFilters = True)
policy.Policy.updateArgs(self, **keywords)
def doFile(self, path):
# Don't set modes on capsule files
if self.recipe._getCapsulePathsForFile(path):
return
# Skip files that aren't part of the package
if path not in self.recipe.autopkg.pathMap:
return
newmode = oldmode = self.recipe.autopkg.pathMap[path].inode.perms()
if path in self.userbits:
newmode = (newmode & 077077) | self.userbits[path]
if path in self.sidbits and self.sidbits[path]:
newmode |= self.sidbits[path]
self.info('suid/sgid: %s mode 0%o', path, newmode & 07777)
if newmode != oldmode:
self.recipe.autopkg.pathMap[path].inode.perms.set(newmode)
class LinkType(policy.Policy):
"""
NAME
====
B{C{r.LinkType()}} - Ensures only regular, non-configuration files are hardlinked
SYNOPSIS
========
C{r.LinkType([I{filterexp}])}
DESCRIPTION
===========
The C{r.LinkType()} policy ensures that only regular, non-configuration
files are hardlinked.
EXAMPLES
========
This policy is not called explicitly.
"""
bucket = policy.PACKAGE_CREATION
processUnmodified = True
requires = (
('Config', policy.REQUIRED_PRIOR),
('PackageSpec', policy.REQUIRED_PRIOR),
)
def do(self):
for component in self.recipe.autopkg.getComponents():
for path in sorted(component.hardlinkMap.keys()):
if self.recipe.autopkg.pathMap[path].flags.isConfig():
self.error("Config file %s has illegal hard links", path)
for path in component.badhardlinks:
self.error("Special file %s has illegal hard links", path)
class LinkCount(policy.Policy):
"""
NAME
====
B{C{r.LinkCount()}} - Restricts hardlinks across directories.
SYNOPSIS
========
C{LinkCount([I{filterexp}] | [I{exceptions=filterexp}])}
DESCRIPTION
===========
The C{r.LinkCount()} policy restricts hardlinks across directories.
It is generally an error to have hardlinks across directories, except when
the packager knows that there is no reasonable chance that they will be on
separate filesystems.
In cases where the packager is certain hardlinks will not cross
filesystems, a list of regular expressions specifying files
which are excepted from this rule may be passed to C{r.LinkCount}.
EXAMPLES
========
C{r.LinkCount(exceptions='/usr/share/zoneinfo/')}
Uses C{r.LinkCount} to except zoneinfo files, located in
C{/usr/share/zoneinfo/}, from the policy against cross-directory
hardlinks.
"""
bucket = policy.PACKAGE_CREATION
processUnmodified = False
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
)
def __init__(self, *args, **keywords):
policy.Policy.__init__(self, *args, **keywords)
self.excepts = set()
def updateArgs(self, *args, **keywords):
allowUnusedFilters = keywords.pop('allowUnusedFilters', False) or \
self.allowUnusedFilters
exceptions = keywords.pop('exceptions', None)
if exceptions:
if type(exceptions) is str:
self.excepts.add(exceptions)
if not allowUnusedFilters:
self.unusedFilters['exceptions'].add(exceptions)
elif type(exceptions) in (tuple, list):
self.excepts.update(exceptions)
if not allowUnusedFilters:
self.unusedFilters['exceptions'].update(exceptions)
# FIXME: we may want to have another keyword argument
# that passes information down to the buildpackage
# that causes link groups to be broken for some
# directories but not others. We need to research
# first whether this is useful; it may not be.
def do(self):
if self.recipe.getType() == recipe.RECIPE_TYPE_CAPSULE:
return
filters = [(x, filter.Filter(x, self.macros)) for x in self.excepts]
for component in self.recipe.autopkg.getComponents():
for inode in component.linkGroups:
# ensure all in same directory, except for directories
# matching regexps that have been passed in
allPaths = [x for x in component.linkGroups[inode]]
for path in allPaths[:]:
for regexp, f in filters:
if f.match(path):
self.unusedFilters['exceptions'].discard(regexp)
allPaths.remove(path)
dirSet = set(os.path.dirname(x) + '/' for x in allPaths)
if len(dirSet) > 1:
self.error('files %s are hard links across directories %s',
', '.join(sorted(component.linkGroups[inode])),
', '.join(sorted(list(dirSet))))
self.error('If these directories cannot reasonably be'
' on different filesystems, disable this'
' warning by calling'
" r.LinkCount(exceptions=('%s')) or"
" equivalent"
% "', '".join(sorted(list(dirSet))))
class ExcludeDirectories(policy.Policy):
"""
NAME
====
B{C{r.ExcludeDirectories()}} - Exclude directories from package
SYNOPSIS
========
C{r.ExcludeDirectories([I{filterexp}] | [I{exceptions=filterexp}])}
DESCRIPTION
===========
The C{r.ExcludeDirectories} policy causes directories to be
excluded from the package by default. Use
C{r.ExcludeDirectories(exceptions=I{filterexp})} to set exceptions to this
policy, which will cause directories matching the regular expression
C{filterexp} to be included in the package. Remember that Conary
packages cannot share files, including directories, so only one
package installed on a system at any one time can own the same
directory.
There are only three reasons to explicitly package a directory: the
directory needs permissions other than 0755, it needs non-root owner
or group, or it must exist even if it is empty.
Therefore, it should generally not be necessary to invoke this policy
directly. If your directory requires permissions other than 0755, simply
use C{r.SetMode} to specify the permissions, and the directory will be
automatically included. Similarly, if you wish to include an empty
directory with owner or group information, call C{r.Ownership} on that
empty directory,
Because C{r.Ownership} can reasonably be called on an entire
subdirectory tree and indiscriminately applied to files and
directories alike, non-empty directories with owner or group
set will be excluded from packaging unless an exception is
explicitly provided.
If you call C{r.Ownership} with a filter that applies to an
empty directory, but you do not want to package that directory,
you will have to remove the directory with C{r.Remove}.
Packages do not need to explicitly include directories to ensure
existence of a target to place a file in. Conary will appropriately
create the directory, and delete it later if the directory becomes empty.
EXAMPLES
========
C{r.ExcludeDirectories(exceptions='/tftpboot')}
Sets the directory C{/tftboot} as an exception to the
C{r.ExcludeDirectories} policy, so that the C{/tftpboot}
directory will be included in the package.
"""
bucket = policy.PACKAGE_CREATION
processUnmodified = True
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('Ownership', policy.REQUIRED_PRIOR),
('MakeDevices', policy.CONDITIONAL_PRIOR),
)
invariantinclusions = [ ('.*', stat.S_IFDIR) ]
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
def doFile(self, path):
# temporarily do nothing for capsules, we might do something later
if self.recipe._getCapsulePathsForFile(path):
return
fullpath = self.recipe.macros.destdir + os.sep + path
s = os.lstat(fullpath)
mode = s[stat.ST_MODE]
if mode & 0777 != 0755:
self.info('excluding directory %s with mode %o', path, mode&0777)
elif not os.listdir(fullpath):
d = self.recipe.autopkg.pathMap[path]
if d.inode.owner.freeze() != 'root':
self.info('not excluding empty directory %s'
' because of non-root owner', path)
return
elif d.inode.group.freeze() != 'root':
self.info('not excluding empty directory %s'
' because of non-root group', path)
return
self.info('excluding empty directory %s', path)
# if its empty and we're not packaging it, there's no need for it
# to continue to exist on the filesystem to potentially confuse
# other policy actions... see CNP-18
os.rmdir(fullpath)
self.recipe.autopkg.delFile(path)
class ByDefault(policy.Policy):
"""
NAME
====
B{C{r.ByDefault()}} - Determines components to be installed by default
SYNOPSIS
========
C{r.ByDefault([I{inclusions} || C{exceptions}=I{exceptions}])}
DESCRIPTION
===========
The C{r.ByDefault()} policy determines which components should
be installed by default at the time the package is installed on the
system. The default setting for the C{ByDefault} policy is that the
C{:debug}, and C{:test} packages are not installed with the package.
The inclusions and exceptions do B{not} specify filenames. They are
either C{I{package}:I{component}} or C{:I{component}}. Inclusions
are considered before exceptions, and inclusions and exceptions are
considered in the order provided in the recipe, and first match wins.
EXAMPLES
========
C{r.ByDefault(exceptions=[':manual'])}
Uses C{r.ByDefault} to ignore C{:manual} components when enforcing the
policy.
C{r.ByDefault(exceptions=[':manual'])}
C{r.ByDefault('foo:manual')}
If these lines are in the C{bar} package, and there is both a
C{foo:manual} and a C{bar:manual} component, then the C{foo:manual}
component will be installed by default when the C{foo} package is
installed, but the C{bar:manual} component will not be installed by
default when the C{bar} package is installed.
"""
bucket = policy.PACKAGE_CREATION
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
)
filetree = policy.NO_FILES
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
invariantexceptions = [':test', ':debuginfo']
allowUnusedFilters = True
def doProcess(self, recipe):
if not self.inclusions:
self.inclusions = []
if not self.exceptions:
self.exceptions = []
recipe.setByDefaultOn(frozenset(self.inclusions))
recipe.setByDefaultOff(frozenset(self.exceptions +
self.invariantexceptions))
class _UserGroup(policy.Policy):
"""
Abstract base class that implements marking owner/group dependencies.
"""
bucket = policy.PACKAGE_CREATION
# All classes that descend from _UserGroup must run before the
# Requires policy, as they implicitly depend on it to set the
# file requirements and union the requirements up to the package.
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('Requires', policy.REQUIRED_SUBSEQUENT),
)
filetree = policy.PACKAGE
processUnmodified = True
def setUserGroupDep(self, path, info, depClass):
componentMap = self.recipe.autopkg.componentMap
if path not in componentMap:
return
pkg = componentMap[path]
f = pkg.getFile(path)
if path not in pkg.requiresMap:
pkg.requiresMap[path] = deps.DependencySet()
pkg.requiresMap[path].addDep(depClass, deps.Dependency(info, []))
class Ownership(_UserGroup):
"""
NAME
====
B{C{r.Ownership()}} - Set file ownership
SYNOPSIS
========
C{r.Ownership([I{username},] [I{groupname},] [I{filterexp}])}
DESCRIPTION
===========
The C{r.Ownership()} policy sets user and group ownership of files when
the default of C{root:root} is not appropriate.
List the ownerships in order, most specific first, ending with least
specific. The filespecs will be matched in the order that you provide them.
KEYWORDS
========
None.
EXAMPLES
========
C{r.Ownership('apache', 'apache', '%(localstatedir)s/lib/php/session')}
Sets ownership of C{%(localstatedir)s/lib/php/session} to owner
C{apache}, and group C{apache}.
"""
def __init__(self, *args, **keywords):
self.filespecs = []
self.systemusers = ('root',)
self.systemgroups = ('root',)
policy.Policy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
if args:
for filespec in args[2:]:
self.filespecs.append((filespec, args[0], args[1]))
policy.Policy.updateArgs(self, **keywords)
def doProcess(self, recipe):
# we must NEVER take ownership from the filesystem
assert(not self.exceptions)
self.rootdir = self.rootdir % recipe.macros
self.fileFilters = []
for (filespec, user, group) in self.filespecs:
self.fileFilters.append(
(filter.Filter(filespec, recipe.macros),
user %recipe.macros,
group %recipe.macros))
del self.filespecs
policy.Policy.doProcess(self, recipe)
def doFile(self, path):
if self.recipe._getCapsulePathsForFile(path):
return
pkgfile = self.recipe.autopkg.pathMap[path]
pkgOwner = pkgfile.inode.owner()
pkgGroup = pkgfile.inode.group()
bestOwner = pkgOwner
bestGroup = pkgGroup
for (f, owner, group) in self.fileFilters:
if f.match(path):
bestOwner, bestGroup = owner, group
break
if bestOwner != pkgOwner:
pkgfile.inode.owner.set(bestOwner)
if bestGroup != pkgGroup:
pkgfile.inode.group.set(bestGroup)
if bestOwner and bestOwner not in self.systemusers:
self.setUserGroupDep(path, bestOwner, deps.UserInfoDependencies)
if bestGroup and bestGroup not in self.systemgroups:
self.setUserGroupDep(path, bestGroup, deps.GroupInfoDependencies)
class _Utilize(_UserGroup):
"""
Pure virtual base class for C{UtilizeUser} and C{UtilizeGroup}
"""
def __init__(self, *args, **keywords):
self.filespecs = []
policy.Policy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
"""
call as::
UtilizeFoo(item, filespec(s)...)
List them in order, most specific first, ending with most
general; the filespecs will be matched in the order that
you provide them.
"""
item = args[0] % self.recipe.macros
if args:
for filespec in args[1:]:
self.filespecs.append((filespec, item))
policy.Policy.updateArgs(self, **keywords)
def doProcess(self, recipe):
self.rootdir = self.rootdir % recipe.macros
self.fileFilters = []
for (filespec, item) in self.filespecs:
self.fileFilters.append(
(filter.Filter(filespec, recipe.macros), item))
del self.filespecs
policy.Policy.doProcess(self, recipe)
def doFile(self, path):
for (f, item) in self.fileFilters:
if f.match(path):
self._markItem(path, item)
return
def _markItem(self, path, item):
# pure virtual
assert(False)
class UtilizeUser(_Utilize):
"""
NAME
====
B{C{r.UtilizeUser()}} - Marks files as requiring a user definition to exist
SYNOPSIS
========
C{r.UtilizeUser([I{username}, I{filterexp}])}
DESCRIPTION
===========
The C{r.UtilizeUser} policy marks files as requiring a user definition
to exist even though the file is not owned by that user.
This is particularly useful for daemons that are setuid root
ant change their user id to a user id with no filesystem permissions
after they start.
EXAMPLES
========
C{r.UtilizeUser('sshd', '%(sbindir)s/sshd')}
Marks the file C{%(sbindir)s/sshd} as requiring the user definition
'sshd' although the file is not owned by the 'sshd' user.
"""
def _markItem(self, path, user):
if not self.recipe._getCapsulePathsForFile(path):
self.info('user %s: %s' % (user, path))
self.setUserGroupDep(path, user, deps.UserInfoDependencies)
class UtilizeGroup(_Utilize):
"""
NAME
====
B{C{r.UtilizeGroup()}} - Marks files as requiring a user definition to
exist
SYNOPSIS
========
C{r.UtilizeGroup([groupname, filterexp])}
DESCRIPTION
===========
The C{r.UtilizeGroup} policy marks files as requiring a group definition
to exist even though the file is not owned by that group.
This is particularly useful for daemons that are setuid root
ant change their user id to a group id with no filesystem permissions
after they start.
EXAMPLES
========
C{r.UtilizeGroup('users', '%(sysconfdir)s/default/useradd')}
Marks the file C{%(sysconfdir)s/default/useradd} as requiring the group
definition 'users' although the file is not owned by the 'users' group.
"""
def _markItem(self, path, group):
if not self.recipe._getCapsulePathsForFile(path):
self.info('group %s: %s' % (group, path))
self.setUserGroupDep(path, group, deps.GroupInfoDependencies)
class ComponentRequires(policy.Policy):
"""
NAME
====
B{C{r.ComponentRequires()}} - Create automatic intra-package,
inter-component dependencies
SYNOPSIS
========
C{r.ComponentRequires([{'I{componentname}': I{requiringComponentSet}}] |
[{'I{packagename}': {'I{componentname}': I{requiringComponentSet}}}])}
DESCRIPTION
===========
The C{r.ComponentRequires()} policy creates automatic,
intra-package, inter-component dependencies, such as a corresponding
dependency between C{:lib} and C{:data} components.
Changes are passed in using dictionaries, both for additions that
are specific to a specific package, and additions that apply
generally to all binary packages being cooked from one recipe.
For general changes that are not specific to a package, use this syntax:
C{r.ComponentRequires({'I{componentname}': I{requiringComponentSet}})}.
For package-specific changes, you need to specify packages as well
as components:
C{r.ComponentRequires({'I{packagename}': 'I{componentname}': I{requiringComponentSet}})}.
By default, both C{:lib} and C{:runtime} components (if they exist)
require the C{:data} component (if it exists). If you call
C{r.ComponentRequires({'data': set(('lib',))})}, you limit it
so that C{:runtime} components will not require C{:data} components
for this recipe.
In recipes that create more than one binary package, you may need
to limit your changes to a single binary package. To do so, use
the package-specific syntax. For example, to remove the C{:runtime}
requirement on C{:data} only for the C{foo} package, call:
C{r.ComponentRequires({'foo': 'data': set(('lib',))})}.
Note that C{r.ComponentRequires} cannot require capability flags; use
C{r.Requires} if you need to specify requirements, including capability
flags.
EXAMPLES
========
C{r.ComponentRequires({'openssl': {'config': set(('runtime', 'lib'))}})}
Uses C{r.ComponentRequires} to create dependencies in a top-level manner
for the C{:runtime} and C{:lib} component sets to require the
C{:config} component for the C{openssl} package.
"""
bucket = policy.PACKAGE_CREATION
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('ExcludeDirectories', policy.CONDITIONAL_PRIOR),
)
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
def __init__(self, *args, **keywords):
self.depMap = {
# component: components that require it if they both exist
'data': frozenset(('lib', 'runtime', 'devellib', 'cil', 'java',
'perl', 'python', 'ruby')),
'devellib': frozenset(('devel',)),
'lib': frozenset(('devel', 'devellib', 'runtime')),
'config': frozenset(('runtime', 'lib', 'devellib', 'devel')),
}
self.overridesMap = {}
policy.Policy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
d = args[0]
if isinstance(d[d.keys()[0]], dict): # dict of dicts
for packageName in d:
if packageName not in self.overridesMap:
# start with defaults, then override them individually
o = {}
o.update(self.depMap)
self.overridesMap[packageName] = o
self.overridesMap[packageName].update(d[packageName])
else: # dict of sets
self.depMap.update(d)
def do(self):
flags = []
if self.recipe.isCrossCompileTool():
flags.append((_getTargetDepFlag(self.macros), deps.FLAG_SENSE_REQUIRED))
components = self.recipe.autopkg.components
for packageName in [x.name for x in self.recipe.autopkg.packageMap]:
if packageName in self.overridesMap:
d = self.overridesMap[packageName]
else:
d = self.depMap
for requiredComponent in d:
for requiringComponent in d[requiredComponent]:
reqName = ':'.join((packageName, requiredComponent))
wantName = ':'.join((packageName, requiringComponent))
if (reqName in components and wantName in components and
components[reqName] and components[wantName]):
if (d == self.depMap and
reqName in self.recipe._componentReqs and
wantName in self.recipe._componentReqs):
# this is an automatically generated dependency
# which was not in the parent of a derived
# pacakge. don't add it here either
continue
# Note: this does not add dependencies to files;
# these dependencies are insufficiently specific
# to attach to files.
ds = deps.DependencySet()
depClass = deps.TroveDependencies
ds.addDep(depClass, deps.Dependency(reqName, flags))
p = components[wantName]
p.requires.union(ds)
class ComponentProvides(policy.Policy):
"""
NAME
====
B{C{r.ComponentProvides()}} - Causes each trove to explicitly provide
itself.
SYNOPSIS
========
C{r.ComponentProvides(I{flags})}
DESCRIPTION
===========
The C{r.ComponentProvides()} policy causes each trove to explicitly
provide its name. Call it to provide optional capability flags
consisting of a single string, or a list, tuple, or set of strings,
It is impossible to provide a capability flag for one component but
not another within a single package.
EXAMPLES
========
C{r.ComponentProvides("addcolumn")}
Uses C{r.ComponentProvides} in the context of the sqlite recipe, and
causes sqlite to provide itself explicitly with the capability flag
C{addcolumn}.
"""
bucket = policy.PACKAGE_CREATION
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('ExcludeDirectories', policy.CONDITIONAL_PRIOR),
)
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
def __init__(self, *args, **keywords):
self.flags = set()
self.excepts = set()
policy.Policy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
if 'exceptions' in keywords:
exceptions = keywords.pop('exceptions')
if type(exceptions) is str:
self.excepts.add(exceptions)
elif type(exceptions) in (tuple, list):
self.excepts.update(set(exceptions))
if not args:
return
if len(args) >= 2:
# update the documentation if we ever support the
# pkgname, flags calling convention
#pkgname = args[0]
flags = args[1]
else:
flags = args[0]
if not isinstance(flags, (list, tuple, set)):
flags=(flags,)
self.flags |= set(flags)
def do(self):
self.excepts = set(re.compile(x) for x in self.excepts)
self.flags = set(x for x in self.flags
if not [y.match(x) for y in self.excepts])
if self.flags:
flags = [ (x % self.macros, deps.FLAG_SENSE_REQUIRED)
for x in self.flags ]
else:
flags = []
if self.recipe.isCrossCompileTool():
flags.append(('target-%s' % self.macros.target,
deps.FLAG_SENSE_REQUIRED))
for component in self.recipe.autopkg.components.values():
component.provides.addDep(deps.TroveDependencies,
deps.Dependency(component.name, flags))
def _getTargetDepFlag(macros):
return 'target-%s' % macros.target
class _dependency(policy.Policy):
"""
Internal class for shared code between Provides and Requires
"""
def __init__(self, *args, **kwargs):
# bootstrap keeping only one copy of these around
self.bootstrapPythonFlags = None
self.bootstrapSysPath = []
self.bootstrapPerlIncPath = []
self.bootstrapRubyLibs = []
self.cachedProviders = {}
self.pythonFlagNamespace = None
self.removeFlagsByDependencyClass = None # pre-transform
self.removeFlagsByDependencyClassMap = {}
def updateArgs(self, *args, **keywords):
removeFlagsByDependencyClass = keywords.pop(
'removeFlagsByDependencyClass', None)
if removeFlagsByDependencyClass is not None:
clsName, ignoreFlags = removeFlagsByDependencyClass
cls = deps.dependencyClassesByName[clsName]
l = self.removeFlagsByDependencyClassMap.setdefault(cls, [])
if isinstance(ignoreFlags, (list, set, tuple)):
l.append(set(ignoreFlags))
else:
l.append(re.compile(ignoreFlags))
policy.Policy.updateArgs(self, **keywords)
def preProcess(self):
self.CILPolicyRE = re.compile(r'.*mono/.*/policy.*/policy.*\.config$')
self.legalCharsRE = re.compile('[.0-9A-Za-z_+-/]')
self.pythonInterpRE = re.compile(r'\.[a-z]+-\d\dm?')
# interpolate macros, using canonical path form with no trailing /
self.sonameSubtrees = set(os.path.normpath(x % self.macros)
for x in self.sonameSubtrees)
self.pythonFlagCache = {}
self.pythonTroveFlagCache = {}
self.pythonVersionCache = {}
def _hasContents(self, m, contents):
"""
Return False if contents is set and m does not have that contents
"""
if contents and (contents not in m.contents or not m.contents[contents]):
return False
return True
def _isELF(self, m, contents=None):
"Test whether is ELF file and optionally has certain contents"
# Note: for provides, check for 'abi' not 'provides' because we
# can provide the filename even if there is no provides list
# as long as a DT_NEEDED entry has been present to set the abi
return m and m.name == 'ELF' and self._hasContents(m, contents)
def _isPython(self, path):
return path.endswith('.py') or path.endswith('.pyc')
def _isPythonModuleCandidate(self, path):
return path.endswith('.so') or self._isPython(path)
def _runPythonScript(self, binPath, destdir, libdir, scriptLines):
script = '\n'.join(scriptLines)
environ = {}
if binPath.startswith(destdir):
environ['LD_LIBRARY_PATH'] = destdir + libdir
proc = subprocess.Popen([binPath, '-Ec', script],
executable=binPath,
stdout=subprocess.PIPE,
shell=False,
env=environ,
)
stdout, _ = proc.communicate()
if proc.returncode:
raise RuntimeError("Process exited with status %s" %
(proc.returncode,))
return stdout
def _getPythonVersion(self, pythonPath, destdir, libdir):
if pythonPath not in self.pythonVersionCache:
try:
stdout = self._runPythonScript(pythonPath, destdir, libdir,
["import sys", "print('%d.%d' % sys.version_info[:2])"])
self.pythonVersionCache[pythonPath] = stdout.strip()
except (OSError, RuntimeError):
self.warn("Unable to determine Python version directly; "
"guessing based on path.")
self.pythonVersionCache[pythonPath] = self._getPythonVersionFromPath(pythonPath, destdir)
return self.pythonVersionCache[pythonPath]
def _getPythonSysPath(self, pythonPath, destdir, libdir, useDestDir=False):
"""Return the system path for the python interpreter at C{pythonPath}
@param pythonPath: Path to the target python interpreter
@param destdir: Destination root, in case of a python bootstrap
@param libdir: Destination libdir, in case of a python bootstrap
@param useDestDir: If True, look in the destdir instead.
"""
script = ["import sys, site"]
if useDestDir:
# Repoint site.py at the destdir so it picks up .pth files there.
script.extend([
"sys.path = []",
"sys.prefix = %r + sys.prefix" % (destdir,),
"sys.exec_prefix = %r + sys.exec_prefix" % (destdir,),
"site.PREFIXES = [sys.prefix, sys.exec_prefix]",
"site.addsitepackages(None)",
])
script.append(r"print('\0'.join(sys.path))")
try:
stdout = self._runPythonScript(pythonPath, destdir, libdir, script)
except (OSError, RuntimeError):
# something went wrong, don't trust any output
self.info('Could not run system python "%s", guessing sys.path...',
pythonPath)
sysPath = []
else:
sysPath = [x.strip() for x in stdout.split('\0') if x.strip()]
if not sysPath and not useDestDir:
# probably a cross-build -- let's try a decent assumption
# for the syspath.
self.info("Failed to detect system python path, using fallback")
pyVer = self._getPythonVersionFromPath(pythonPath, destdir)
if not pyVer and self.bootstrapPythonFlags is not None:
pyVer = self._getPythonVersionFromFlags(
self.bootstrapPythonFlags)
if pyVer and self.bootstrapSysPath is not None:
lib = self.recipe.macros.lib
# this list needs to include all sys.path elements that
# might be needed for python per se -- note that
# bootstrapPythonFlags and bootstrapSysPath go
# together
sysPath = self.bootstrapSysPath + [
'/usr/%s/%s' %(lib, pyVer),
'/usr/%s/%s/plat-linux2' %(lib, pyVer),
'/usr/%s/%s/lib-tk' %(lib, pyVer),
'/usr/%s/%s/lib-dynload' %(lib, pyVer),
'/usr/%s/%s/site-packages' %(lib, pyVer),
# for purelib python on x86_64
'/usr/lib/%s/site-packages' %pyVer,
]
return sysPath
def _warnPythonPathNotInDB(self, pathName):
self.warn('%s found on system but not provided by'
' system database; python requirements'
' may be generated incorrectly as a result', pathName)
return set([])
def _getPythonTroveFlags(self, pathName):
if pathName in self.pythonTroveFlagCache:
return self.pythonTroveFlagCache[pathName]
db = self._getDb()
foundPath = False
pythonFlags = set()
pythonTroveList = db.iterTrovesByPath(pathName)
if pythonTroveList:
depContainer = pythonTroveList[0]
assert(depContainer.getName())
foundPath = True
for dep in depContainer.getRequires().iterDepsByClass(
deps.PythonDependencies):
flagNames = [x[0] for x in dep.getFlags()[0]]
pythonFlags.update(flagNames)
self.pythonTroveFlagCache[pathName] = pythonFlags
if not foundPath:
self.pythonTroveFlagCache[pathName] = self._warnPythonPathNotInDB(
pathName)
return self.pythonTroveFlagCache[pathName]
def _getPythonFlags(self, pathName, bootstrapPythonFlags=None):
if pathName in self.pythonFlagCache:
return self.pythonFlagCache[pathName]
if bootstrapPythonFlags:
self.pythonFlagCache[pathName] = bootstrapPythonFlags
return self.pythonFlagCache[pathName]
db = self._getDb()
foundPath = False
# FIXME: This should be iterFilesByPath when implemented (CNY-1833)
# For now, cache all the python deps in all the files in the
# trove(s) so that we iterate over each trove only once
containingTroveList = db.iterTrovesByPath(pathName)
for containerTrove in containingTroveList:
for pathid, p, fileid, v in containerTrove.iterFileList():
if pathName == p:
foundPath = True
pythonFlags = set()
f = files.ThawFile(db.getFileStream(fileid), pathid)
for dep in f.provides().iterDepsByClass(
deps.PythonDependencies):
flagNames = [x[0] for x in dep.getFlags()[0]]
pythonFlags.update(flagNames)
self.pythonFlagCache[p] = pythonFlags
if not foundPath:
self.pythonFlagCache[pathName] = self._warnPythonPathNotInDB(
pathName)
return self.pythonFlagCache[pathName]
def _getPythonFlagsFromPath(self, pathName):
pathList = pathName.split('/')
foundLib = False
foundVer = False
flags = set()
for dirName in pathList:
if not foundVer and not foundLib and dirName.startswith('lib'):
# lib will always come before ver
foundLib = True
flags.add(dirName)
elif not foundVer and dirName.startswith('python'):
foundVer = True
flags.add(dirName[6:])
if foundLib and foundVer:
break
if self.pythonFlagNamespace:
flags = set('%s:%s' %(self.pythonFlagNamespace, x) for x in flags)
return flags
def _stringIsPythonVersion(self, s):
return not set(s).difference(set('.0123456789'))
def _getPythonVersionFromFlags(self, flags):
nameSpace = self.pythonFlagNamespace
for flag in flags:
if nameSpace and flag.startswith(nameSpace):
flag = flag[len(nameSpace):]
if self._stringIsPythonVersion(flag):
return 'python'+flag
def _getPythonVersionFromPath(self, pathName, destdir):
if destdir and pathName.startswith(destdir):
pathName = pathName[len(destdir):]
pathList = pathName.split('/')
for dirName in pathList:
if dirName.startswith('python') and self._stringIsPythonVersion(
dirName[6:]):
# python2.4 or python2.5 or python3.9 but not python.so
return dirName
return ''
def _isCIL(self, m):
return m and m.name == 'CIL'
def _isJava(self, m, contents=None):
return m and isinstance(m, (magic.jar, magic.java)) and self._hasContents(m, contents)
def _isPerlModule(self, path):
return (path.endswith('.pm') or
path.endswith('.pl') or
path.endswith('.ph'))
def _isPerl(self, path, m, f):
return self._isPerlModule(path) or (
f.inode.perms() & 0111 and m and m.name == 'script'
and 'interpreter' in m.contents
and '/bin/perl' in m.contents['interpreter'])
def _createELFDepSet(self, m, elfinfo, recipe=None, basedir=None,
soname=None, soflags=None,
libPathMap={}, getRPATH=None, path=None,
isProvides=None):
"""
Add dependencies from ELF information.
@param m: magic.ELF object
@param elfinfo: requires or provides from magic.ELF.contents
@param recipe: recipe object for calling Requires if basedir is not None
@param basedir: directory to add into dependency
@param soname: alternative soname to use
@param libPathMap: mapping from base dependency name to new dependency name
@param isProvides: whether the dependency being created is a provides
"""
abi = m.contents['abi']
elfClass = abi[0]
nameMap = {}
usesLinuxAbi = False
depSet = deps.DependencySet()
for depClass, main, flags in elfinfo:
if soflags:
flags = itertools.chain(*(flags, soflags))
flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ]
if depClass == 'soname':
if '/' in main:
main = os.path.basename(main)
if getRPATH:
rpath = getRPATH(main)
if rpath:
# change the name to follow the rpath
main = '/'.join((rpath, main))
elif soname:
main = soname
if basedir:
oldname = os.path.normpath('/'.join((elfClass, main)))
main = '/'.join((basedir, main))
main = os.path.normpath('/'.join((elfClass, main)))
if basedir:
nameMap[main] = oldname
if libPathMap and main in libPathMap:
# if we have a mapping to a provided library that would be
# satisfied, then we modify the requirement to match the
# provision
provided = libPathMap[main]
requiredSet = set(x[0] for x in flags)
providedSet = set(provided.flags.keys())
if requiredSet.issubset(providedSet):
main = provided.getName()[0]
else:
pathString = ''
if path:
pathString = 'for path %s' %path
self.warn('Not replacing %s with %s because of missing %s%s',
main, provided.getName()[0],
sorted(list(requiredSet-providedSet)),
pathString)
curClass = deps.SonameDependencies
for flag in abi[1]:
if flag == 'Linux':
usesLinuxAbi = True
flags.append(('SysV', deps.FLAG_SENSE_REQUIRED))
else:
flags.append((flag, deps.FLAG_SENSE_REQUIRED))
dep = deps.Dependency(main, flags)
elif depClass == 'abi':
curClass = deps.AbiDependency
dep = deps.Dependency(main, flags)
else:
assert(0)
depSet.addDep(curClass, dep)
# This loop has to happen late so that the soname
# flag merging from multiple flag instances has happened
if nameMap:
for soDep in depSet.iterDepsByClass(deps.SonameDependencies):
newName = soDep.getName()[0]
if newName in nameMap:
oldName = nameMap[newName]
recipe.Requires(_privateDepMap=(oldname, soDep))
if usesLinuxAbi and not isProvides:
isnset = m.contents.get('isnset', None)
if elfClass == 'ELF32' and isnset == 'x86':
main = 'ELF32/ld-linux.so.2'
elif elfClass == 'ELF64' and isnset == 'x86_64':
main = 'ELF64/ld-linux-x86-64.so.2'
else:
self.error('%s: unknown ELF class %s or instruction set %s',
path, elfClass, isnset)
return depSet
flags = [('Linux', deps.FLAG_SENSE_REQUIRED),
('SysV', deps.FLAG_SENSE_REQUIRED),
(isnset, deps.FLAG_SENSE_REQUIRED)]
dep = deps.Dependency(main, flags)
depSet.addDep(curClass, dep)
return depSet
def _addDepToMap(self, path, depMap, depType, dep):
"Add a single dependency to a map, regardless of whether path was listed before"
if path not in depMap:
depMap[path] = deps.DependencySet()
depMap[path].addDep(depType, dep)
def _addDepSetToMap(self, path, depMap, depSet):
"Add a dependency set to a map, regardless of whether path was listed before"
if path in depMap:
depMap[path].union(depSet)
else:
depMap[path] = depSet
@staticmethod
def _recurseSymlink(path, destdir, fullpath=None):
"""
Recurse through symlinks in destdir and get the final path and fullpath.
If initial fullpath (or destdir+path if fullpath not specified)
does not exist, return path.
"""
if fullpath is None:
fullpath = destdir + path
while os.path.islink(fullpath):
contents = os.readlink(fullpath)
if contents.startswith('/'):
fullpath = os.path.normpath(contents)
else:
fullpath = os.path.normpath(
os.path.dirname(fullpath)+'/'+contents)
return fullpath[len(destdir):], fullpath
def _symlinkMagic(self, path, fullpath, macros, m=None):
"Recurse through symlinks and get the final path and magic"
path, _ = self._recurseSymlink(path, macros.destdir, fullpath=fullpath)
m = self.recipe.magic[path]
return m, path
def _enforceProvidedPath(self, path, fileType='interpreter',
unmanagedError=False):
key = path, fileType
if key in self.cachedProviders:
return self.cachedProviders[key]
db = self._getDb()
troveNames = [ x.getName() for x in db.iterTrovesByPath(path) ]
if not troveNames:
talk = {True: self.error, False: self.warn}[bool(unmanagedError)]
talk('%s file %s not managed by conary' %(fileType, path))
return None
troveName = sorted(troveNames)[0]
# prefer corresponding :devel to :devellib if it exists
package, component = troveName.split(':', 1)
if component in ('devellib', 'lib'):
for preferredComponent in ('devel', 'devellib'):
troveSpec = (
':'.join((package, preferredComponent)),
None, None
)
results = db.findTroves(None, [troveSpec],
allowMissing = True)
if troveSpec in results:
troveName = results[troveSpec][0][0]
break
if troveName not in self.recipe._getTransitiveBuildRequiresNames():
self.recipe.reportMissingBuildRequires(troveName)
self.cachedProviders[key] = troveName
return troveName
def _getRuby(self, macros, path):
# For bootstrapping purposes, prefer the just-built version if
# it exists
# Returns tuple: (pathToRubyInterpreter, bootstrap)
ruby = '%(ruby)s' %macros
if os.access('%(destdir)s/%(ruby)s' %macros, os.X_OK):
return '%(destdir)s/%(ruby)s' %macros, True
elif os.access(ruby, os.X_OK):
# Enforce the build requirement, since it is not in the package
self._enforceProvidedPath(ruby)
return ruby, False
else:
self.warn('%s not available for Ruby dependency discovery'
' for path %s' %(ruby, path))
return False, None
def _getRubyLoadPath(self, macros, rubyInvocation, bootstrap):
# Returns tuple of (invocationString, loadPathList)
destdir = macros.destdir
if bootstrap:
rubyLibPath = [destdir + x for x in self.bootstrapRubyLibs]
rubyInvocation = (('LD_LIBRARY_PATH=%(destdir)s%(libdir)s '
'RUBYLIB="'+':'.join(rubyLibPath)+'" '
+rubyInvocation)%macros)
rubyLoadPath = util.popen(
"%s -e 'puts $:'" %
rubyInvocation).readlines()
# get gem dir if rubygems is installed
if os.access('%(bindir)s/gem' %macros, os.X_OK):
rubyLoadPath.extend(
util.popen("%s -rubygems -e 'puts Gem.default_dir'" %
rubyInvocation).readlines())
rubyLoadPath = [ x.strip() for x in rubyLoadPath if x.startswith('/') ]
loadPathList = rubyLoadPath[:]
if bootstrap:
rubyLoadPath = [ destdir+x for x in rubyLoadPath ]
rubyInvocation = ('LD_LIBRARY_PATH=%(destdir)s%(libdir)s'
' RUBYLIB="'+':'.join(rubyLoadPath)+'"'
' %(destdir)s/%(ruby)s') % macros
return (rubyInvocation, loadPathList)
def _getRubyVersion(self, macros):
cmd = self.rubyInvocation + (" -e 'puts RUBY_VERSION'" % macros)
rubyVersion = util.popen(cmd).read()
rubyVersion = '.'.join(rubyVersion.split('.')[0:2])
return rubyVersion
def _getRubyFlagsFromPath(self, pathName, rubyVersion):
pathList = pathName.split('/')
pathList = [ x for x in pathList if x ]
foundLib = False
foundVer = False
flags = set()
for dirName in pathList:
if not foundLib and dirName.startswith('lib'):
foundLib = True
flags.add(dirName)
elif not foundVer and dirName.split('.')[:1] == rubyVersion.split('.')[:1]:
# we only compare major and minor versions due to
# ruby api version (dirName) differing from programs
# version (rubyVersion)
foundVer = True
flags.add(dirName)
if foundLib and foundVer:
break
return flags
def _getmonodis(self, macros, path):
# For bootstrapping purposes, prefer the just-built version if
# it exists
monodis = '%(monodis)s' %macros
if os.access('%(destdir)s/%(monodis)s' %macros, os.X_OK):
return ('MONO_PATH=%(destdir)s%(prefix)s/lib'
' LD_LIBRARY_PATH=%(destdir)s%(libdir)s'
' %(destdir)s/%(monodis)s' %macros)
elif os.access(monodis, os.X_OK):
# Enforce the build requirement, since it is not in the package
self._enforceProvidedPath(monodis)
return monodis
else:
self.warn('%s not available for CIL dependency discovery'
' for path %s' %(monodis, path))
return None
def _getperlincpath(self, perl, destdir):
"""
Fetch the perl @INC path, falling back to bootstrapPerlIncPath
only if perl cannot be run. All elements of the search path
will be resolved against symlinks in destdir if they exist. (CNY-2949)
"""
if not perl:
return []
p = util.popen(r"""%s -e 'print join("\n", @INC)'""" %perl)
perlIncPath = p.readlines()
# make sure that the command completed successfully
try:
rc = p.close()
perlIncPath = [x.strip() for x in perlIncPath if not x.startswith('.')]
return [self._recurseSymlink(x, destdir)[0] for x in perlIncPath]
except RuntimeError:
return [self._recurseSymlink(x, destdir)[0]
for x in self.bootstrapPerlIncPath]
def _getperl(self, macros, recipe):
"""
Find the preferred instance of perl to use, including setting
any environment variables necessary to use that perl.
Returns string for running it, the C{@INC} path, and a separate
string, if necessary, for adding to @INC.
"""
perlDestPath = '%(destdir)s%(bindir)s/perl' %macros
# not %(bindir)s so that package modifications do not affect
# the search for system perl
perlPath = '/usr/bin/perl'
destdir = macros.destdir
def _perlDestInc(destdir, perlDestInc):
return ' '.join(['-I' + destdir + x for x in perlDestInc])
if os.access(perlDestPath, os.X_OK):
# must use packaged perl if it exists
m = recipe.magic[perlDestPath[len(destdir):]] # not perlPath
if m and 'RPATH' in m.contents and m.contents['RPATH']:
# we need to prepend the destdir to each element of the RPATH
# in order to run perl in the destdir
perl = ''.join((
'export LD_LIBRARY_PATH=',
'%s%s:' %(destdir, macros.libdir),
':'.join([destdir+x
for x in m.contents['RPATH'].split(':')]),
';',
perlDestPath
))
perlIncPath = self._getperlincpath(perl, destdir)
perlDestInc = _perlDestInc(destdir, perlIncPath)
return [perl, perlIncPath, perlDestInc]
else:
# perl that does not use/need rpath
perl = 'LD_LIBRARY_PATH=%s%s %s' %(
destdir, macros.libdir, perlDestPath)
perlIncPath = self._getperlincpath(perl, destdir)
perlDestInc = _perlDestInc(destdir, perlIncPath)
return [perl, perlIncPath, perlDestInc]
elif os.access(perlPath, os.X_OK):
# system perl if no packaged perl, needs no @INC mangling
self._enforceProvidedPath(perlPath)
perlIncPath = self._getperlincpath(perlPath, destdir)
return [perlPath, perlIncPath, '']
# must be no perl at all
return ['', [], '']
def _getPython(self, macros, path):
"""
Takes a path
Returns, for that path, a tuple of
- the preferred instance of python to use
- whether that instance is in the destdir
"""
m = self.recipe.magic[path]
if m and m.name == 'script' and 'python' in m.contents['interpreter']:
pythonPath = [m.contents['interpreter']]
else:
pythonVersion = self._getPythonVersionFromPath(path, None)
# After PATH, fall back to %(bindir)s. If %(bindir)s should be
# preferred, it needs to be earlier in the PATH. Include
# unversioned python as a last resort for confusing cases.
shellPath = os.environ.get('PATH', '').split(':') + [ '%(bindir)s' ]
pythonPath = []
if pythonVersion:
pythonPath = [ os.path.join(x, pythonVersion) for x in shellPath ]
pythonPath.extend([ os.path.join(x, 'python') for x in shellPath ])
for pathElement in pythonPath:
pythonDestPath = ('%(destdir)s'+pathElement) %macros
if os.access(pythonDestPath, os.X_OK):
return (pythonDestPath, True)
for pathElement in pythonPath:
pythonDestPath = pathElement %macros
if os.access(pythonDestPath, os.X_OK):
self._enforceProvidedPath(pythonDestPath)
return (pythonDestPath, False)
# Specified python not found on system (usually because of
# bad interpreter path -- CNY-2050)
if len(pythonPath) == 1:
missingPythonPath = '%s ' % pythonPath[0]
else:
missingPythonPath = ''
self.warn('Python interpreter %snot found for %s',
missingPythonPath, path)
return (None, None)
def _stripDestDir(self, pathList, destdir):
destDirLen = len(destdir)
pathElementList = []
for pathElement in pathList:
if pathElement.startswith(destdir):
pathElementList.append(pathElement[destDirLen:])
else:
pathElementList.append(pathElement)
return pathElementList
class Provides(_dependency):
"""
NAME
====
B{C{r.Provides()}} - Creates dependency provision
SYNOPSIS
========
C{r.Provides([I{provision}, I{filterexp}] || [I{exceptions=filterexp}])}
DESCRIPTION
===========
The C{r.Provides()} policy marks files as providing certain features
or characteristics, and can be called to explicitly provide things
that cannot be automatically discovered. C{r.Provides} can also override
automatic discovery, and prevent marking a file as providing things, such
as for package-private plugin modules installed in system library
directories.
A C{I{provision}} may be C{'file'} to mark a file as providing its
filename, or a dependency type. You can create a file, soname or
ABI C{I{provision}} manually; all other types are only automatically
discovered. Provisions that begin with C{file} are files, those that
start with C{soname:} are sonames, and those that start with C{abi:}
are ABIs. Other prefixes are reserved.
Soname provisions are normally discovered automatically; they need
to be provided manually only in two cases:
- If a shared library was not built with a soname at all.
- If a symbolic link to a shared library needs to provide its name
as a soname.
Note: Use {Cr.ComponentProvides} rather than C{r.Provides} to add
capability flags to components.
For unusual cases where you want to remove a provision Conary
automatically finds, you can specify C{r.Provides(exceptDeps='regexp')}
to override all provisions matching a regular expression,
C{r.Provides(exceptDeps=('filterexp', 'regexp'))}
to override provisions matching a regular expression only for files
matching filterexp, or
C{r.Provides(exceptDeps=(('filterexp', 'regexp'), ...))} to specify
multiple overrides.
EXAMPLES
========
C{r.Provides('file', '/usr/share/dict/words')}
Demonstrates using C{r.Provides} to specify the file provision
C{/usr/share/dict/words}, so that other files can now require that file.
C{r.Provides('soname: libperl.so', '%(libdir)s/perl5/.*/CORE/libperl.so')}
Demonstrates synthesizing a shared library provision for all the
libperl.so symlinks.
C{r.Provides(exceptDeps = 'java: .*')}
Demonstrates removing all java provisions.
"""
bucket = policy.PACKAGE_CREATION
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('SharedLibrary', policy.REQUIRED),
# _ELFPathProvide calls Requires to pass in discovered info
# _addCILPolicyProvides does likewise
('Requires', policy.REQUIRED_SUBSEQUENT),
)
filetree = policy.PACKAGE
invariantexceptions = (
'%(docdir)s/',
)
dbDepCacheClass = _DatabaseDepCache
def __init__(self, *args, **keywords):
_dependency.__init__(self, *args, **keywords)
self.provisions = []
self.sonameSubtrees = set()
self.sysPath = None
self.monodisPath = None
self.rubyInterpreter = None
self.rubyVersion = None
self.rubyInvocation = None
self.rubyLoadPath = None
self.perlIncPath = None
self.pythonSysPathMap = {}
self.exceptDeps = []
policy.Policy.__init__(self, *args, **keywords)
self.depCache = self.dbDepCacheClass(self._getDb())
def updateArgs(self, *args, **keywords):
if args:
for filespec in args[1:]:
self.provisions.append((filespec, args[0]))
sonameSubtrees = keywords.pop('sonameSubtrees', None)
if sonameSubtrees:
if type(sonameSubtrees) in (list, tuple):
self.sonameSubtrees.update(set(sonameSubtrees))
else:
self.sonameSubtrees.add(sonameSubtrees)
exceptDeps = keywords.pop('exceptDeps', None)
if exceptDeps:
if type(exceptDeps) is str:
exceptDeps = ('.*', exceptDeps)
assert(type(exceptDeps) == tuple)
if type(exceptDeps[0]) is tuple:
self.exceptDeps.extend(exceptDeps)
else:
self.exceptDeps.append(exceptDeps)
# The next three are called only from Requires and should override
# completely to make sure the policies are in sync
pythonFlagNamespace = keywords.pop('_pythonFlagNamespace', None)
if pythonFlagNamespace is not None:
self.pythonFlagNamespace = pythonFlagNamespace
bootstrapPythonFlags = keywords.pop('_bootstrapPythonFlags', None)
if bootstrapPythonFlags is not None:
self.bootstrapPythonFlags = bootstrapPythonFlags
bootstrapSysPath = keywords.pop('_bootstrapSysPath', None)
if bootstrapSysPath is not None:
self.bootstrapSysPath = bootstrapSysPath
bootstrapPerlIncPath = keywords.pop('_bootstrapPerlIncPath', None)
if bootstrapPerlIncPath is not None:
self.bootstrapPerlIncPath = bootstrapPerlIncPath
bootstrapRubyLibs = keywords.pop('_bootstrapRubyLibs', None)
if bootstrapRubyLibs is not None:
self.bootstrapRubyLibs = bootstrapRubyLibs
if keywords.get('removeFlagsByDependencyClass', None):
self.error('removeFlagsByDependencyClass not currently implemented for Provides (CNY-3443)')
_dependency.updateArgs(self, **keywords)
def preProcess(self):
macros = self.macros
if self.bootstrapPythonFlags is not None:
self.bootstrapPythonFlags = set(x % macros
for x in self.bootstrapPythonFlags)
if self.bootstrapSysPath:
self.bootstrapSysPath = [x % macros for x in self.bootstrapSysPath]
if self.pythonFlagNamespace is not None:
self.pythonFlagNamespace = self.pythonFlagNamespace % macros
if self.bootstrapPerlIncPath:
self.bootstrapPerlIncPath = [x % macros for x in self.bootstrapPerlIncPath]
self.rootdir = self.rootdir % macros
self.fileFilters = []
self.binDirs = frozenset(
x % macros for x in [
'%(bindir)s', '%(sbindir)s',
'%(essentialbindir)s', '%(essentialsbindir)s',
'%(libexecdir)s', ])
self.noProvDirs = frozenset(
x % macros for x in [
'%(testdir)s',
'%(debuglibdir)s',
]).union(self.binDirs)
exceptDeps = []
for fE, rE in self.exceptDeps:
try:
exceptDeps.append((filter.Filter(fE, macros),
re.compile(rE % self.macros)))
except sre_constants.error, e:
self.error('Bad regular expression %s for file spec %s: %s', rE, fE, e)
self.exceptDeps= exceptDeps
for filespec, provision in self.provisions:
self.fileFilters.append(
(filter.Filter(filespec, macros), provision % macros))
del self.provisions
_dependency.preProcess(self)
def doFile(self, path):
pkgs = self.recipe.autopkg.findComponents(path)
if not pkgs:
return
pkgFiles = [(x, x.getFile(path)) for x in pkgs]
macros = self.recipe.macros
m = self.recipe.magic[path]
fullpath = macros.destdir + path
basepath = os.path.basename(path)
dirpath = os.path.dirname(path)
if os.path.exists(fullpath):
mode = os.lstat(fullpath)[stat.ST_MODE]
# First, add in the manual provisions
self.addExplicitProvides(path, fullpath, pkgFiles, macros, m)
# Next, discover all automatically-discoverable provisions
if os.path.exists(fullpath):
if (self._isELF(m, 'abi')
and m.contents['Type'] != elf.ET_EXEC
and not [ x for x in self.noProvDirs if path.startswith(x) ]):
# we do not add elf provides for programs that won't be linked to
self._ELFAddProvide(path, m, pkgFiles, basedir=dirpath)
if dirpath in self.sonameSubtrees:
# only export filename as soname if is shlib
sm, finalpath = self._symlinkMagic(path, fullpath, macros, m)
if sm and self._isELF(sm, 'abi') and sm.contents['Type'] != elf.ET_EXEC:
# add the filename as a soname provision (CNY-699)
# note: no provides necessary
self._ELFAddProvide(path, sm, pkgFiles, soname=basepath, basedir=dirpath)
if self._isPythonModuleCandidate(path):
self._addPythonProvides(path, m, pkgFiles, macros)
rubyProv = self._isRubyModule(path, macros, fullpath)
if rubyProv:
self._addRubyProvides(path, m, pkgFiles, macros, rubyProv)
elif self._isCIL(m):
self._addCILProvides(path, m, pkgFiles, macros)
elif self.CILPolicyRE.match(path):
self._addCILPolicyProvides(path, pkgFiles, macros)
elif self._isJava(m, 'provides'):
# Cache the internal provides
if not hasattr(self.recipe, '_internalJavaDepMap'):
self.recipe._internalJavaDepMap = None
self._addJavaProvides(path, m, pkgFiles)
elif self._isPerlModule(path):
self._addPerlProvides(path, m, pkgFiles)
self.addPathDeps(path, dirpath, pkgFiles)
self.whiteOut(path, pkgFiles)
self.unionDeps(path, pkgFiles)
def whiteOut(self, path, pkgFiles):
# remove intentionally discarded provides
for pkg, f in pkgFiles:
if self.exceptDeps and path in pkg.providesMap:
depSet = deps.DependencySet()
for depClass, dep in pkg.providesMap[path].iterDeps():
for filt, exceptRe in self.exceptDeps:
if filt.match(path):
matchName = '%s: %s' %(depClass.tagName, str(dep))
if exceptRe.match(matchName):
# found one to not copy
dep = None
break
if dep is not None:
depSet.addDep(depClass, dep)
pkg.providesMap[path] = depSet
def addExplicitProvides(self, path, fullpath, pkgFiles, macros, m):
for (filter, provision) in self.fileFilters:
if filter.match(path):
self._markProvides(path, fullpath, provision, pkgFiles, macros, m)
def addPathDeps(self, path, dirpath, pkgFiles):
# Because paths can change, individual files do not provide their
# paths. However, within a trove, a file does provide its name.
# Furthermore, non-regular files can be path dependency targets
# Therefore, we have to handle this case a bit differently.
for pkg, f in pkgFiles:
if dirpath in self.binDirs and not isinstance(f, files.Directory):
# CNY-930: automatically export paths in bindirs
# CNY-1721: but not directories in bindirs
f.flags.isPathDependencyTarget(True)
if f.flags.isPathDependencyTarget():
pkg.provides.addDep(deps.FileDependencies, deps.Dependency(path))
def unionDeps(self, path, pkgFiles):
for pkg, f in pkgFiles:
if path in pkg.providesMap:
f.provides.set(pkg.providesMap[path])
pkg.provides.union(f.provides())
def _getELFinfo(self, m, soname):
if 'provides' in m.contents and m.contents['provides']:
return m.contents['provides']
else:
# we need to synthesize some provides information
return [('soname', soname, ())]
def _ELFAddProvide(self, path, m, pkgFiles, soname=None, soflags=None, basedir=None):
if basedir is None:
basedir = os.path.dirname(path)
if basedir in self.sonameSubtrees:
# do not record the basedir
basedir = None
else:
# path needs to be in the dependency, since the
# provides is too broad otherwise, so add it.
# We can only add characters from the path that are legal
# in a dependency name
basedir = ''.join(x for x in basedir if self.legalCharsRE.match(x))
elfinfo = self._getELFinfo(m, os.path.basename(path))
depSet = self._createELFDepSet(m, elfinfo,
recipe=self.recipe, basedir=basedir,
soname=soname, soflags=soflags,
path=path, isProvides=True)
for pkg, _ in pkgFiles:
self._addDepSetToMap(path, pkg.providesMap, depSet)
def _getPythonProvidesSysPath(self, path):
"""Generate an ordered list of python paths for the target package.
This includes the current system path, plus any paths added by the new
package in the destdir through .pth files or a newly built python.
@return: (sysPath, pythonVersion)
"""
pythonPath, bootstrapPython = self._getPython(self.macros, path)
if not pythonPath:
# Most likely bad interpreter path in a .py file
return (None, None)
if pythonPath in self.pythonSysPathMap:
return self.pythonSysPathMap[pythonPath]
destdir = self.macros.destdir
libdir = self.macros.libdir
pythonVersion = self._getPythonVersion(pythonPath, destdir, libdir)
# Get default sys.path from python interpreter, either the one just
# built (in the case of a python bootstrap) or from the system.
systemPaths = set(self._getPythonSysPath(pythonPath, destdir, libdir,
useDestDir=False))
# Now add paths from the destdir's site-packages, typically due to
# newly installed .pth files.
systemPaths.update(self._getPythonSysPath(pythonPath, destdir, libdir,
useDestDir=True))
# Sort in descending order so that the longest path matches first.
sysPath = sorted(self._stripDestDir(systemPaths, destdir), reverse=True)
self.pythonSysPathMap[pythonPath] = (sysPath, pythonVersion)
return self.pythonSysPathMap[pythonPath]
def _fetchPerlIncPath(self):
"""
Cache the perl @INC path, sorted longest first
"""
if self.perlIncPath is not None:
return
_, self.perlIncPath, _ = self._getperl(
self.recipe.macros, self.recipe)
self.perlIncPath.sort(key=len, reverse=True)
def _addPythonProvides(self, path, m, pkgFiles, macros):
if not self._isPythonModuleCandidate(path):
return
sysPath, pythonVersion = self._getPythonProvidesSysPath(path)
if not sysPath:
return
# Add provides for every match in sys.path. For example, PIL.Imaging
# and Imaging should both be provided since they are both reachable
# names.
for sysPathEntry in sysPath:
if not path.startswith(sysPathEntry):
continue
newDepPath = path[len(sysPathEntry)+1:]
if newDepPath.split('.')[0] == '__init__':
# we don't allow bare __init__ as a python import
# hopefully we'll find this init as a deeper import at some
# other point in the sysPath
continue
elif ('site-packages' in newDepPath
or 'lib-dynload' in newDepPath
or 'plat-linux' in newDepPath
):
# site-packages should be specifically excluded since both it
# and its parent are always in sys.path. However, invalid
# python package names in general are allowed due to certain
# cases where relative imports happen inside a hyphenated
# directory and the requires detector picks up on that.
continue
# Note that it's possible to have a false positive here. For
# example, in the PIL case if PIL/__init__.py did not exist,
# PIL.Imaging would still be provided. The odds of this causing
# problems are so small that it is not checked for here.
self._addPythonProvidesSingle(path, m, pkgFiles, macros,
newDepPath)
def _addPythonProvidesSingle(self, path, m, pkgFiles, macros, depPath):
# remove extension
depPath, extn = depPath.rsplit('.', 1)
if depPath == '__future__':
return
# remove python3 __pycache__ directory from dep
if '__pycache__/' in depPath:
depPath = depPath.replace('__pycache__/', '')
# PEP 3147 adds the interperter and version to the pyc file
depPath = self.pythonInterpRE.sub('', depPath)
if depPath.endswith('/__init__'):
depPath = depPath.replace('/__init__', '')
depPath = depPath.replace('/', '.')
depPaths = [ depPath ]
if extn == 'so':
fname = util.joinPaths(macros.destdir, path)
try:
syms = elf.getDynSym(fname)
# Does this module have an init<blah> function?
initfuncs = [ x[4:] for x in syms if x.startswith('init') ]
# This is the equivalent of dirname()
comps = depPath.rsplit('.', 1)
dpPrefix = comps[0]
if len(comps) == 1:
# Top-level python module
depPaths.extend(initfuncs)
else:
for initfunc in initfuncs:
depPaths.append('.'.join([dpPrefix, initfunc]))
except elf.error:
pass
flags = self._getPythonFlagsFromPath(path)
flags = [(x, deps.FLAG_SENSE_REQUIRED) for x in sorted(list(flags))]
for dpath in depPaths:
dep = deps.Dependency(dpath, flags)
for pkg, _ in pkgFiles:
self._addDepToMap(path, pkg.providesMap, deps.PythonDependencies, dep)
def _addOneCILProvide(self, pkgFiles, path, name, ver):
for pkg, _ in pkgFiles:
self._addDepToMap(path, pkg.providesMap, deps.CILDependencies,
deps.Dependency(name, [(ver, deps.FLAG_SENSE_REQUIRED)]))
def _addCILPolicyProvides(self, path, pkgFiles, macros):
if ElementTree is None:
return
try:
keys = {'urn': '{urn:schemas-microsoft-com:asm.v1}'}
fullpath = macros.destdir + path
tree = ElementTree.parse(fullpath)
root = tree.getroot()
identity, redirect = root.find('runtime/%(urn)sassemblyBinding/%(urn)sdependentAssembly' % keys).getchildren()
assembly = identity.get('name')
self._addOneCILProvide(pkgFiles, path, assembly,
redirect.get('oldVersion'))
self.recipe.Requires(_CILPolicyProvides={
path: (assembly, redirect.get('newVersion'))})
except:
return
def _addCILProvides(self, path, m, pkgFiles, macros):
if not m or m.name != 'CIL':
return
fullpath = macros.destdir + path
if not self.monodisPath:
self.monodisPath = self._getmonodis(macros, path)
if not self.monodisPath:
return
p = util.popen('%s --assembly %s' %(
self.monodisPath, fullpath))
name = None
ver = None
for line in [ x.strip() for x in p.readlines() ]:
if 'Name:' in line:
name = line.split()[1]
elif 'Version:' in line:
ver = line.split()[1]
p.close()
# monodis did not give us any info
if not name or not ver:
return
self._addOneCILProvide(pkgFiles, path, name, ver)
def _isRubyModule(self, path, macros, fullpath):
if not util.isregular(fullpath) or os.path.islink(fullpath):
return False
if '/ruby/' in path:
# load up ruby opportunistically; this is our first chance
if self.rubyInterpreter is None:
self.rubyInterpreter, bootstrap = self._getRuby(macros, path)
if not self.rubyInterpreter:
return False
self.rubyInvocation, self.rubyLoadPath = self._getRubyLoadPath(
macros, self.rubyInterpreter, bootstrap)
self.rubyVersion = self._getRubyVersion(macros)
# we need to look deep first
self.rubyLoadPath = sorted(list(self.rubyLoadPath),
key=len, reverse=True)
elif self.rubyInterpreter is False:
return False
for pathElement in self.rubyLoadPath:
if path.startswith(pathElement) \
and (path.endswith('.rb') or path.endswith('.so')):
if '/gems/' in path:
path = path.partition("/gems/")[-1]
if '/lib/' in path:
return path.partition('/lib/')[-1].rsplit('.', 1)[0]
else:
return path[len(pathElement)+1:].rsplit('.', 1)[0]
return False
def _addRubyProvides(self, path, m, pkgFiles, macros, prov):
flags = self._getRubyFlagsFromPath(path, self.rubyVersion)
flags = [(x, deps.FLAG_SENSE_REQUIRED) for x in sorted(list(flags))]
dep = deps.Dependency(prov, flags)
for pkg, _ in pkgFiles:
self._addDepToMap(path, pkg.providesMap, deps.RubyDependencies, dep)
def _addJavaProvides(self, path, m, pkgFiles):
if 'provides' not in m.contents or not m.contents['provides']:
return
if not hasattr(self.recipe, '_reqExceptDeps'):
self.recipe._reqExceptDeps = []
# Compile requires exceptDeps (and persist them)
if not hasattr(self.recipe, '_compiledReqExceptDeps'):
self.recipe._compiledReqExceptDeps = exceptDeps = []
macros = self.recipe.macros
for fE, rE in self.recipe._reqExceptDeps:
try:
exceptDeps.append((filter.Filter(fE, macros),
re.compile(rE % macros)))
except sre_constants.error, e:
self.error('Bad regular expression %s for file spec %s: %s',
rE, fE, e)
# We will no longer need this, we have the compiled version now
self.recipe._reqExceptDeps = []
if self.recipe._internalJavaDepMap is None:
# Instantiate the dictionary of provides from this package
self.recipe._internalJavaDepMap = internalJavaDepMap = {}
componentMap = self.recipe.autopkg.componentMap
for opath in componentMap:
om = self.recipe.magic[opath]
if not self._isJava(om, 'provides'):
continue
# The file could be a .jar, in which case it contains multiple
# classes. contents['files'] is a dict, keyed on the file name
# within the jar and with a provide and a set of requires as
# value.
internalJavaDepMap.setdefault(opath, {}).update(
om.contents['files'])
else:
internalJavaDepMap = self.recipe._internalJavaDepMap
if hasattr(self.recipe, '_internalJavaProvides'):
internalProvides = self.recipe._internalJavaProvides
else:
# We need to cache the internal java provides, otherwise we do too
# much work for each file (CNY-3372)
self.recipe._internalJavaProvides = internalProvides = set()
for opath, ofiles in internalJavaDepMap.items():
internalProvides.update(x[0] for x in ofiles.values()
if x[0] is not None)
# Now drop internal provides from individual class requires
for opath, ofiles in internalJavaDepMap.items():
for oclassName, (oclassProv, oclassReqSet) in ofiles.items():
if oclassReqSet is None:
continue
oclassReqSet.difference_update(internalProvides)
reqs = set()
if self._isJava(m, 'requires'):
# Extract this file's requires
reqs.update(m.contents['requires'])
# Remove the ones that are satisfied internally
reqs.difference_update(internalProvides)
# For now, we are only trimming the provides (and requires) for
# classes for which the requires are not satisfied, neither internally
# nor from the system Conary database. In the future we may need to
# build a dependency tree between internal classes, such that we do
# the removal transitively (class A requires class B which doesn't
# have its deps satisfied should make class A unusable). This can come
# at a later time
# CNY-3362: we don't drop provides for classes which had requires on
# classes that had their dependencies pruned. (at least not yet)
if reqs:
# Try to resolve these deps against the Conary database
depSetList = []
depSetMap = {}
for req in reqs:
depSet = deps.DependencySet()
depSet.addDep(deps.JavaDependencies, deps.Dependency(req, []))
depSetList.append(depSet)
depSetMap[depSet] = req
troves = self.depCache.getProvides(depSetList)
missingDepSets = set(depSetList) - set(troves)
missingReqs = set(depSetMap[x] for x in missingDepSets)
# White out the missing requires if exceptDeps for them are found
rExceptDeps = self.recipe._compiledReqExceptDeps
if missingReqs and rExceptDeps:
depClass = deps.JavaDependencies
filteredMissingDeps = set()
for dep in list(missingReqs):
for filt, exceptRe in rExceptDeps:
if not filt.match(path):
continue
matchName = '%s: %s' %(depClass.tagName, str(dep))
if exceptRe.match(matchName):
# found one to not copy
missingReqs.remove(dep)
filteredMissingDeps.add(dep)
break
if filteredMissingDeps:
# We need to take them out of the per-file requires
ofiles = internalJavaDepMap[path]
for _, (oclassProv, oclassReqSet) in ofiles.items():
if oclassProv is not None:
oclassReqSet.difference_update(filteredMissingDeps)
if missingReqs:
fileDeps = internalJavaDepMap[path]
# This file has unsatisfied dependencies.
# Walk its list of classes to determine which ones are not
# satisfied.
satisfiedClasses = dict((fpath, (fprov, freqs))
for (fpath, (fprov, freqs)) in fileDeps.iteritems()
if freqs is not None
and not freqs.intersection(missingReqs))
internalJavaDepMap[path] = satisfiedClasses
self.warn('Provides and requirements for file %s are disabled '
'because of unsatisfied dependencies. To re-enable '
'them, add to the recipe\'s buildRequires the '
'packages that provide the following '
'requirements: %s' %
(path, " ".join(sorted(missingReqs))))
# Add the remaining provides
fileDeps = internalJavaDepMap[path]
provs = set(fprov for fpath, (fprov, freqs) in fileDeps.iteritems()
if fprov is not None)
for prov in provs:
dep = deps.Dependency(prov, [])
for pkg, _ in pkgFiles:
self._addDepToMap(path, pkg.providesMap, deps.JavaDependencies, dep)
def _addPerlProvides(self, path, m, pkgFiles):
# do not call perl to get @INC unless we have something to do for perl
self._fetchPerlIncPath()
# It is possible that we'll want to allow user-specified
# additions to the perl search path, but if so, we need
# to path-encode those files, so we can't just prepend
# those elements to perlIncPath. We would need to end up
# with something like "perl: /path/to/foo::bar" because
# for perl scripts that don't modify @INC, they could not
# find those scripts. It is not clear that we need this
# at all, because most if not all of those cases would be
# intra-package dependencies that we do not want to export.
depPath = None
for pathPrefix in self.perlIncPath:
if path.startswith(pathPrefix):
depPath = path[len(pathPrefix)+1:]
break
if depPath is None:
return
# foo/bar/baz.pm -> foo::bar::baz
prov = '::'.join(depPath.split('/')).rsplit('.', 1)[0]
dep = deps.Dependency(prov, [])
for pkg, _ in pkgFiles:
self._addDepToMap(path, pkg.providesMap, deps.PerlDependencies, dep)
def _markProvides(self, path, fullpath, provision, pkgFiles, macros, m):
if provision.startswith("file"):
# can't actually specify what to provide, just that it provides...
for _, f in pkgFiles:
f.flags.isPathDependencyTarget(True)
elif provision.startswith("abi:"):
abistring = provision[4:].strip()
op = abistring.index('(')
abi = abistring[:op]
flags = abistring[op+1:-1].split()
flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ]
dep = deps.Dependency(abi, flags)
for pkg, _ in pkgFiles:
self._addDepToMap(path, pkg.providesMap, deps.AbiDependency, dep)
elif provision.startswith("soname:"):
sm, finalpath = self._symlinkMagic(path, fullpath, macros, m)
if self._isELF(sm, 'abi'):
# Only ELF files can provide sonames.
# This is for libraries that don't really include a soname,
# but programs linked against them require a soname.
# For this reason, we do not pass 'provides' to _isELF
soname = provision[7:].strip()
soflags = []
if '(' in soname:
# get list of arbitrary flags
soname, rest = soname.split('(')
soflags.extend(rest[:-1].split())
basedir = None
if '/' in soname:
basedir, soname = soname.rsplit('/', 1)
self._ELFAddProvide(path, sm, pkgFiles, soname=soname, soflags=soflags,
basedir=basedir)
else:
self.error('Provides %s for file %s does not start with one of'
' "file", "abi:", or "soname"',
provision, path)
class Requires(_addInfo, _dependency):
"""
NAME
====
B{C{r.Requires()}} - Creates dependency requirements
SYNOPSIS
========
C{r.Requires([I{/path/to/file}, I{filterexp}] || [I{packagename:component[(FLAGS)]}, I{filterexp}] || [I{exceptions=filterexp)}])}
DESCRIPTION
===========
The C{r.Requires()} policy adds requirements for a file.
You can pass in exceptions that should not have automatic requirement
discovery done, such as example shell scripts outside of C{%(docdir)s}.
Note: Components are the only troves which can be required.
For executables executed only through wrappers that
use C{LD_LIBRARY_PATH} to find the libraries instead of
embedding an RPATH in the binary, you will need to provide
a synthetic RPATH using C{r.Requires(rpath='I{RPATH}')}
or C{r.Requires(rpath=('I{filterExp}', 'I{RPATH}'))} calls,
which are tested in the order provided.
The RPATH is a standard Unix-style path string containing one or more
directory names, separated only by colon characters, except for one
significant change: Each path component is interpreted using shell-style
globs, which are checked first in the C{%(destdir)s} and then on the
installed system. (The globs are useful for cases like perl where
statically determining the entire content of the path is difficult. Use
globs only for variable parts of paths; be as specific as you can without
using the glob feature any more than necessary.)
Executables that use C{dlopen()} to open a shared library will not
automatically have a dependency on that shared library. If the program
unconditionally requires that it be able to C{dlopen()} the shared
library, encode that requirement by manually creating the requirement
by calling C{r.Requires('soname: libfoo.so', 'filterexp')} or
C{r.Requires('soname: /path/to/libfoo.so', 'filterexp')} depending on
whether the library is in a system library directory or not. (It should be
the same as how the soname dependency is expressed by the providing
package.)
For unusual cases where a system library is not listed in C{ld.so.conf}
but is instead found through a search through special subdirectories with
architecture-specific names (such as C{i686} and C{tls}), you can pass in
a string or list of strings specifying the directory or list of
directories. with C{r.Requires(sonameSubtrees='/directoryname')}
or C{r.Requires(sonameSubtrees=['/list', '/of', '/dirs'])}
Note: These are B{not} regular expressions. They will have macro
expansion expansion performed on them.
For unusual cases where Conary finds a false or misleading dependency,
or in which you need to override a true dependency, you can specify
C{r.Requires(exceptDeps='regexp')} to override all dependencies matching
a regular expression, C{r.Requires(exceptDeps=('filterexp', 'regexp'))}
to override dependencies matching a regular expression only for files
matching filterexp, or
C{r.Requires(exceptDeps=(('filterexp', 'regexp'), ...))} to specify
multiple overrides.
EXAMPLES
========
C{r.Requires('mailbase:runtime', '%(sbindir)s/sendmail')}
Demonstrates using C{r.Requires} to specify a manual requirement of the
file C{%(sbindir)s/sendmail} to the C{:runtime} component of package
C{mailbase}.
C{r.Requires('file: %(sbindir)s/sendmail', '%(datadir)s/squirrelmail/index.php')}
Specifies that conary should require the file C{%(sbindir)s/sendmail} to
be present when trying to install C{%(datadir)s/squirrelmail/index.php}.
C{r.Requires('soname: %(libdir)/kde3/kgreet_classic.so', '%(bindir)/kdm')}
Demonstrates using C{r.Requires} to specify a manual soname requirement
of the file C{%(bindir)s/kdm} to the soname
C{%(libdir)/kde3/kgreet_classic.so}.
C{r.Requires(exceptions='/usr/share/vim/.*/doc/')}
Demonstrates using C{r.Requires} to specify that files in the
subdirectory C{/usr/share/vim/.*/doc} are excepted from being marked as
requirements.
C{r.Requires(exceptDeps='trove:$trovename')}
Uses C{r.Requires} to specify that the trove C{trovename} is excluded
from the dependencies for the package.
"""
bucket = policy.PACKAGE_CREATION
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('SharedLibrary', policy.REQUIRED_PRIOR),
# Requires depends on ELF dep path discovery previously done in Provides
('Provides', policy.REQUIRED_PRIOR),
)
filetree = policy.PACKAGE
invariantexceptions = (
'%(docdir)s/',
)
dbDepCacheClass = _DatabaseDepCache
def __init__(self, *args, **keywords):
_dependency.__init__(self, *args, **keywords)
self.bootstrapPythonFlags = set()
self.bootstrapSysPath = []
self.bootstrapPerlIncPath = []
self.bootstrapRubyLibs = []
self.pythonFlagNamespace = None
self.sonameSubtrees = set()
self._privateDepMap = {}
self.rpathFixup = []
self.exceptDeps = []
self.sysPath = None
self.monodisPath = None
self.rubyInterpreter = None
self.rubyVersion = None
self.rubyInvocation = None
self.rubyLoadPath = None
self.perlReqs = None
self.perlPath = None
self.perlIncArgs = None
self._CILPolicyProvides = {}
self.pythonSysPathMap = {}
self.pythonModuleFinderMap = {}
self.troveDeps = {}
policy.Policy.__init__(self, *args, **keywords)
self.depCache = self.dbDepCacheClass(self._getDb())
ISD = deps.InstructionSetDependency
TISD = deps.TargetInstructionSetDependency
instructionDeps = list(self.recipe._buildFlavor.iterDepsByClass(ISD))
instructionDeps += list(self.recipe._buildFlavor.iterDepsByClass(TISD))
self.allowableIsnSets = [ x.name for x in instructionDeps ]
def updateArgs(self, *args, **keywords):
# _privateDepMap is used only for Provides to talk to Requires
privateDepMap = keywords.pop('_privateDepMap', None)
if privateDepMap:
self._privateDepMap.update([privateDepMap])
sonameSubtrees = keywords.pop('sonameSubtrees', None)
if sonameSubtrees:
if type(sonameSubtrees) in (list, tuple):
self.sonameSubtrees.update(set(sonameSubtrees))
else:
self.sonameSubtrees.add(sonameSubtrees)
bootstrapPythonFlags = keywords.pop('bootstrapPythonFlags', None)
if bootstrapPythonFlags:
if type(bootstrapPythonFlags) in (list, tuple):
self.bootstrapPythonFlags.update(set(bootstrapPythonFlags))
else:
self.bootstrapPythonFlags.add(bootstrapPythonFlags)
# pass full set to Provides to share the exact same data
self.recipe.Provides(
_bootstrapPythonFlags=self.bootstrapPythonFlags)
bootstrapSysPath = keywords.pop('bootstrapSysPath', None)
if bootstrapSysPath:
if type(bootstrapSysPath) in (list, tuple):
self.bootstrapSysPath.extend(bootstrapSysPath)
else:
self.error('bootstrapSysPath must be list or tuple')
# pass full set to Provides to share the exact same data
self.recipe.Provides(
_bootstrapSysPath=self.bootstrapSysPath)
pythonFlagNamespace = keywords.pop('pythonFlagNamespace', None)
if pythonFlagNamespace is not None:
self.pythonFlagNamespace = pythonFlagNamespace
self.recipe.Provides(_pythonFlagNamespace=pythonFlagNamespace)
bootstrapPerlIncPath = keywords.pop('bootstrapPerlIncPath', None)
if bootstrapPerlIncPath:
if type(bootstrapPerlIncPath) in (list, tuple):
self.bootstrapPerlIncPath.extend(bootstrapPerlIncPath)
else:
self.error('bootstrapPerlIncPath must be list or tuple')
# pass full set to Provides to share the exact same data
self.recipe.Provides(
_bootstrapPerlIncPath=self.bootstrapPerlIncPath)
bootstrapRubyLibs = keywords.pop('bootstrapRubyLibs', None)
if bootstrapRubyLibs is not None:
if type(bootstrapRubyLibs) in (list, tuple):
self.bootstrapRubyLibs.extend(bootstrapRubyLibs)
else:
self.error('bootstrapRubyLibs must be list or tuple')
# pass full set to Provides to share the exact same data
self.recipe.Provides(
_bootstrapRubyLibs=self.bootstrapRubyLibs)
_CILPolicyProvides = keywords.pop('_CILPolicyProvides', None)
if _CILPolicyProvides:
self._CILPolicyProvides.update(_CILPolicyProvides)
rpath = keywords.pop('rpath', None)
if rpath:
if type(rpath) is str:
rpath = ('.*', rpath)
assert(type(rpath) == tuple)
self.rpathFixup.append(rpath)
exceptDeps = keywords.pop('exceptDeps', None)
if exceptDeps:
if type(exceptDeps) is str:
exceptDeps = ('.*', exceptDeps)
assert(type(exceptDeps) == tuple)
if type(exceptDeps[0]) is tuple:
self.exceptDeps.extend(exceptDeps)
else:
self.exceptDeps.append(exceptDeps)
if not hasattr(self.recipe, '_reqExceptDeps'):
self.recipe._reqExceptDeps = []
self.recipe._reqExceptDeps.extend(self.exceptDeps)
# Filter out trove deps that are not associated with a file.
if len(args) >= 2:
troves = []
component = re.compile('^[-a-zA-Z0-9]*:[a-zA-Z]+$')
for arg in args[1:]:
arg = arg % self.recipe.macros
# Make sure arg looks like a component
if not component.match(arg):
break
troves.append(arg.lstrip(':'))
else:
self.troveDeps[args[0]] = troves
args = ()
_dependency.updateArgs(self, *args, **keywords)
_addInfo.updateArgs(self, *args, **keywords)
def preProcess(self):
macros = self.macros
self.systemLibPaths = set(os.path.normpath(x % macros)
for x in self.sonameSubtrees)
self.bootstrapPythonFlags = set(x % macros
for x in self.bootstrapPythonFlags)
self.bootstrapSysPath = [x % macros for x in self.bootstrapSysPath]
if self.pythonFlagNamespace is not None:
self.pythonFlagNamespace = self.pythonFlagNamespace % macros
self.bootstrapPerlIncPath = [x % macros for x in self.bootstrapPerlIncPath]
# anything that any buildreqs have caused to go into ld.so.conf
# or ld.so.conf.d/*.conf is a system library by definition,
# but only look at paths, not (for example) "include" lines
if os.path.exists('/etc/ld.so.conf'):
self.systemLibPaths |= set(os.path.normpath(x.strip())
for x in file('/etc/ld.so.conf').readlines()
if x.startswith('/'))
for fileName in fixedglob.glob('/etc/ld.so.conf.d/*.conf'):
self.systemLibPaths |= set(os.path.normpath(x.strip())
for x in file(fileName).readlines()
if x.startswith('/'))
self.rpathFixup = [(filter.Filter(x, macros), y % macros)
for x, y in self.rpathFixup]
exceptDeps = []
for fE, rE in self.exceptDeps:
try:
exceptDeps.append((filter.Filter(fE, macros), re.compile(rE % macros)))
except sre_constants.error, e:
self.error('Bad regular expression %s for file spec %s: %s', rE, fE, e)
self.exceptDeps= exceptDeps
_dependency.preProcess(self)
def postProcess(self):
self._delPythonRequiresModuleFinder()
components = {}
for comp in self.recipe.autopkg.getComponents():
components[comp.getName()] = comp
shortName = comp.getName().split(':')[1]
# Mark copmonent names with duplicates
if shortName in components:
components[shortName] = None
else:
components[shortName] = comp
# r.Requires('foo:runtime', 'msi')
# r.Requires('foo:runtime', ':msi')
# r.Requires('foo:runtime', 'bar:msi')
depClass = deps.TroveDependencies
for info, troves in self.troveDeps.iteritems():
# Sanity check inputs.
if ':' not in info:
self.error('package dependency %s not allowed', info)
return
for trove in troves:
if trove not in components:
self.error('no component named %s', trove)
return
if components[trove] is None:
self.error('specified component name matches multiple '
'components %s', trove)
return
# Add the trove dependency.
dep = deps.Dependency(info)
for trove in troves:
components[trove].requires.addDep(depClass, dep)
def doFile(self, path):
pkgs = self.recipe.autopkg.findComponents(path)
if not pkgs:
return
pkgFiles = [(x, x.getFile(path)) for x in pkgs]
# this file object used only for tests, not for doing packaging
f = pkgFiles[0][1]
macros = self.recipe.macros
fullpath = macros.destdir + path
m = self.recipe.magic[path]
if self._isELF(m, 'requires'):
isnset = m.contents['isnset']
if isnset in self.allowableIsnSets:
# only add requirements for architectures
# that we are actually building for (this may include
# major and minor architectures)
self._addELFRequirements(path, m, pkgFiles)
# now go through explicit requirements
for info in self.included:
for filt in self.included[info]:
if filt.match(path):
self._markManualRequirement(info, path, pkgFiles, m)
# now check for automatic dependencies besides ELF
if f.inode.perms() & 0111 and m and m.name == 'script':
interp = m.contents['interpreter']
if interp.strip().startswith('/') and self._checkInclusion(interp,
path):
# no interpreter string warning is in BadInterpreterPaths
if not (os.path.exists(interp) or
os.path.exists(macros.destdir+interp)):
# this interpreter not on system, warn
# cannot be an error to prevent buildReq loops
self.warn('interpreter "%s" (referenced in %s) missing',
interp, path)
# N.B. no special handling for /{,usr/}bin/env here;
# if there has been an exception to
# NormalizeInterpreterPaths, then it is a
# real dependency on the env binary
self._addRequirement(path, interp, [], pkgFiles,
deps.FileDependencies)
if (f.inode.perms() & 0111 and m and m.name == 'script' and
os.path.basename(m.contents['interpreter']).startswith('python')):
self._addPythonRequirements(path, fullpath, pkgFiles)
elif self._isPython(path):
self._addPythonRequirements(path, fullpath, pkgFiles)
if (f.inode.perms() & 0111 and m and m.name == 'script' and
os.path.basename(m.contents['interpreter']).startswith('ruby')):
self._addRubyRequirements(path, fullpath, pkgFiles, script=True)
elif '/ruby/' in path and path.endswith('.rb'):
self._addRubyRequirements(path, fullpath, pkgFiles, script=False)
if self._isCIL(m):
if not self.monodisPath:
self.monodisPath = self._getmonodis(macros, path)
if not self.monodisPath:
return
p = util.popen('%s --assemblyref %s' %(
self.monodisPath, fullpath))
for line in [ x.strip() for x in p.readlines() ]:
if ': Version=' in line:
ver = line.split('=')[1]
elif 'Name=' in line:
name = line.split('=')[1]
self._addRequirement(path, name, [ver], pkgFiles,
deps.CILDependencies)
p.close()
elif self.CILPolicyRE.match(path):
name, ver = self._CILPolicyProvides[path]
self._addRequirement(path, name, [ver], pkgFiles, deps.CILDependencies)
if self._isJava(m, 'requires'):
self._addJavaRequirements(path, m, pkgFiles)
db = self._getDb()
if self._isPerl(path, m, f):
perlReqs = self._getPerlReqs(path, fullpath)
for req in perlReqs:
thisReq = deps.parseDep('perl: ' + req)
if db.getTrovesWithProvides([thisReq]) or [
x for x in self.recipe.autopkg.getComponents()
if x.provides.satisfies(thisReq)]:
self._addRequirement(path, req, [], pkgFiles,
deps.PerlDependencies)
self.whiteOut(path, pkgFiles)
self.unionDeps(path, pkgFiles)
def _addJavaRequirements(self, path, m, pkgFiles):
if not hasattr(self.recipe, '_internalJavaDepMap'):
self.recipe._internalJavaDepMap = {}
fileDeps = self.recipe._internalJavaDepMap.get(path, {})
reqs = set()
for fpath, (fprov, freq) in fileDeps.items():
if freq is not None:
reqs.update(freq)
for req in reqs:
self._addRequirement(path, req, [], pkgFiles,
deps.JavaDependencies)
def whiteOut(self, path, pkgFiles):
# remove intentionally discarded dependencies
for pkg, _ in pkgFiles:
if self.exceptDeps and path in pkg.requiresMap:
depSet = deps.DependencySet()
for depClass, dep in pkg.requiresMap[path].iterDeps():
for filt, exceptRe in self.exceptDeps:
if filt.match(path):
matchName = '%s: %s' %(depClass.tagName, str(dep))
if exceptRe.match(matchName):
# found one to not copy
dep = None
break
if dep is not None:
depSet.addDep(depClass, dep)
pkg.requiresMap[path] = depSet
def unionDeps(self, path, pkgFiles):
# finally, package the dependencies up
for pkg, f in pkgFiles:
if path in pkg.requiresMap:
# files should not require items they provide directly. CNY-2177
f.requires.set(pkg.requiresMap[path] - f.provides())
pkg.requires.union(f.requires())
def _addELFRequirements(self, path, m, pkgFiles):
"""
Add ELF and abi dependencies, including paths when not shlibs
"""
def appendUnique(ul, items):
for item in items:
if item not in ul:
ul.append(item)
def _canonicalRPATH(rpath, glob=False):
# normalize all elements of RPATH
l = [ util.normpath(x) for x in rpath.split(':') ] # CNY-3425
# prune system paths and relative paths from RPATH
l = [ x for x in l
if x not in self.systemLibPaths and x.startswith('/') ]
if glob:
destdir = self.macros.destdir
dlen = len(destdir)
gl = []
for item in l:
# prefer destdir elements
paths = util.braceGlob(destdir + item)
paths = [ os.path.normpath(x[dlen:]) for x in paths ]
appendUnique(gl, paths)
# then look on system
paths = util.braceGlob(item)
paths = [ os.path.normpath(x) for x in paths ]
appendUnique(gl, paths)
l = gl
return l
rpathList = []
def _findSonameInRpath(soname):
for rpath in rpathList:
destpath = '/'.join((self.macros.destdir, rpath, soname))
if os.path.exists(destpath):
return rpath
destpath = '/'.join((rpath, soname))
if os.path.exists(destpath):
return rpath
# didn't find anything
return None
# fixup should come first so that its path elements can override
# the included RPATH if necessary
if self.rpathFixup:
for f, rpath in self.rpathFixup:
if f.match(path):
# synthetic RPATH items are globbed
rpathList = _canonicalRPATH(rpath, glob=True)
break
if m and 'RPATH' in m.contents and m.contents['RPATH']:
rpathList += _canonicalRPATH(m.contents['RPATH'])
depSet = self._createELFDepSet(m, m.contents['requires'],
libPathMap=self._privateDepMap,
getRPATH=_findSonameInRpath,
path=path, isProvides=False)
for pkg, _ in pkgFiles:
self._addDepSetToMap(path, pkg.requiresMap, depSet)
def _getPythonRequiresSysPath(self, pathName):
# Generate the correct sys.path for finding the required modules.
# we use the built in site.py to generate a sys.path for the
# current system and another one where destdir is the root.
# note the below code is similar to code in Provides,
# but it creates an ordered path list with and without destdir prefix,
# while provides only needs a complete list without destdir prefix.
# Returns tuple:
# (sysPath, pythonModuleFinder, pythonVersion)
pythonPath, bootstrapPython = self._getPython(self.macros, pathName)
if not pythonPath:
return (None, None, None)
if pythonPath in self.pythonSysPathMap:
return self.pythonSysPathMap[pythonPath]
destdir = self.macros.destdir
libdir = self.macros.libdir
pythonVersion = self._getPythonVersion(pythonPath, destdir, libdir)
# Start with paths inside the destdir so that imports within a package
# are discovered correctly.
systemPaths = self._getPythonSysPath(pythonPath, destdir, libdir,
useDestDir=True)
# Now add paths from the system (or bootstrap python)
systemPaths += self._getPythonSysPath(pythonPath, destdir, libdir,
useDestDir=False)
if not bootstrapPython:
# update pythonTroveFlagCache to require correct flags
self._getPythonTroveFlags(pythonPath)
# Keep original order for use with the module finder.
sysPathForModuleFinder = list(systemPaths)
# Strip destdir and sort in descending order for converting paths to
# qualified python module names.
sysPath = sorted(set(self._stripDestDir(systemPaths, destdir)),
reverse=True)
# load module finder after sys.path is restored
# in case delayed importer is installed.
pythonModuleFinder = self._getPythonRequiresModuleFinder(
pythonPath, destdir, libdir, sysPathForModuleFinder,
bootstrapPython)
self.pythonSysPathMap[pythonPath] = (
sysPath, pythonModuleFinder, pythonVersion)
return self.pythonSysPathMap[pythonPath]
def _getPythonRequiresModuleFinder(self, pythonPath, destdir, libdir, sysPath, bootstrapPython):
if self.recipe.isCrossCompiling():
return None
if pythonPath not in self.pythonModuleFinderMap:
try:
self.pythonModuleFinderMap[pythonPath] = pydeps.moduleFinderProxy(pythonPath, destdir, libdir, sysPath, self.error)
except pydeps.ModuleFinderInitializationError, e:
if bootstrapPython:
# another case, like isCrossCompiling, where we cannot
# run pythonPath -- ModuleFinderInitializationError
# is raised before looking at any path, so should
# be consistent for any pythonPath
self.pythonModuleFinderMap[pythonPath] = None
else:
raise
return self.pythonModuleFinderMap[pythonPath]
def _delPythonRequiresModuleFinder(self):
for finder in self.pythonModuleFinderMap.values():
if finder is not None:
finder.close()
def _addPythonRequirements(self, path, fullpath, pkgFiles):
destdir = self.recipe.macros.destdir
destDirLen = len(destdir)
(sysPath, pythonModuleFinder, pythonVersion
)= self._getPythonRequiresSysPath(path)
if not sysPath:
# Probably a bad interpreter path
return
if not pythonModuleFinder:
# We cannot (reliably) determine runtime python requirements
# in the cross-compile case, so don't even try (for
# consistency).
return
pythonModuleFinder.load_file(fullpath)
data = pythonModuleFinder.getDepsForPath(fullpath)
if data['result'] != 'ok':
self.info('File %s is not a valid python file', path)
return
for depPath in data['paths']:
if not depPath:
continue
flags = None
absPath = None
if depPath.startswith(destdir):
depPath = depPath[destDirLen:]
flags = self._getPythonFlagsFromPath(depPath)
# The file providing this dependency is part of this package.
absPath = depPath
for sysPathEntry in sysPath:
if depPath.startswith(sysPathEntry):
newDepPath = depPath[len(sysPathEntry)+1:]
if newDepPath not in ('__init__', '__init__.py'):
# we don't allow bare __init__'s as dependencies.
# hopefully we'll find this at deeper level in
# in the sysPath
if flags is None:
# this is provided by the system, so we have
# to see with which flags it is provided with
flags = self._getPythonFlags(depPath,
self.bootstrapPythonFlags)
depPath = newDepPath
break
if depPath.startswith('/'):
# a python file not found in sys.path will not have been
# provided, so we must not depend on it either
return
if not (depPath.endswith('.py') or depPath.endswith('.pyc') or
depPath.endswith('.so')):
# Not something we provide, so not something we can
# require either. Drop it and go on. We have seen
# this when a script in /usr/bin has ended up in the
# requires list.
continue
if depPath.endswith('module.so'):
# Strip 'module.so' from the end, make it a candidate
cands = [ depPath[:-9] + '.so', depPath ]
cands = [ self._normalizePythonDep(x) for x in cands ]
if absPath:
depName = self._checkPackagePythonDeps(pkgFiles, absPath,
cands, flags)
else:
depName = self._checkSystemPythonDeps(cands, flags)
else:
depName = self._normalizePythonDep(depPath)
if depName == '__future__':
continue
self._addRequirement(path, depName, flags, pkgFiles,
deps.PythonDependencies)
#if data['missing']:
# self.warn("Python file %s is missing requirements: %s" % (
# path, ', '.join(data['missing'])))
def _checkPackagePythonDeps(self, pkgFiles, depPath, depNames, flags):
# Try to match depNames against all current packages
# Use the last value in depNames as the fault value
assert depNames, "No dependencies passed"
for pkg, _ in pkgFiles:
if depPath in pkg:
fileProvides = pkg[depPath][1].provides()
if flags:
flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ]
# Walk the depNames list in order, pick the first dependency
# available.
for dp in depNames:
depSet = deps.DependencySet()
depSet.addDep(deps.PythonDependencies,
deps.Dependency(dp, flags))
if fileProvides.intersection(depSet):
# this dep is provided
return dp
# If we got here, the file doesn't provide this dep. Return the last
# candidate and hope for the best
return depNames[-1]
def _checkSystemPythonDeps(self, depNames, flags):
if flags:
flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ]
for dp in depNames:
depSet = deps.DependencySet()
depSet.addDep(deps.PythonDependencies, deps.Dependency(dp, flags))
troves = self.depCache.getProvides([depSet])
if troves:
return dp
return depNames[-1]
def _normalizePythonDep(self, depName):
# remove extension
depName = depName.rsplit('.', 1)[0]
depName = depName.replace('/', '.')
depName = depName.replace('.__init__', '')
depName = self.pythonInterpRE.sub('', depName)
return depName
def _addRubyRequirements(self, path, fullpath, pkgFiles, script=False):
macros = self.recipe.macros
destdir = macros.destdir
destDirLen = len(destdir)
if self.rubyInterpreter is None:
self.rubyInterpreter, bootstrap = self._getRuby(macros, path)
if not self.rubyInterpreter:
return
self.rubyInvocation, self.rubyLoadPath = self._getRubyLoadPath(
macros, self.rubyInterpreter, bootstrap)
self.rubyVersion = self._getRubyVersion(macros)
elif self.rubyInterpreter is False:
return
if not script:
if not util.isregular(fullpath) or os.path.islink(fullpath):
return
foundInLoadPath = False
for pathElement in self.rubyLoadPath:
if path.startswith(pathElement):
foundInLoadPath = True
break
if not foundInLoadPath:
return
# This is a very limited hack, but will work for the 90% case
# better parsing may be written later
# Note that we only honor "require" at the beginning of
# the line and only requirements enclosed in single quotes
# to avoid conditional requirements and requirements that
# do any sort of substitution. Because most ruby packages
# contain multiple ruby modules, getting 90% of the ruby
# dependencies will find most of the required packages in
# practice
depEntries = [x.strip() for x in file(fullpath)
if x.startswith('require ') or
x.startswith('require(')]
depEntries = (x.split() for x in depEntries)
depEntries = (x[1].strip("\"'") for x in depEntries
if len(x) == 2 and x[1].startswith("'") and
x[1].endswith("'"))
depEntries = set(depEntries)
# I know of no way to ask ruby to report deps from scripts
# Unfortunately, so far it seems that there are too many
# Ruby modules which have code that runs in the body; this
# code runs slowly, has not been useful in practice for
# filtering out bogus dependencies, and has been hanging
# and causing other unintended side effects from modules
# that have code in the main body.
#if not script:
# depClosure = util.popen(r'''%s -e "require '%s'; puts $\""'''
# %(self.rubyInvocation%macros, fullpath)).readlines()
# depClosure = set([x.split('.')[0] for x in depClosure])
# # remove any entries from the guessed immediate requirements
# # that are not in the closure
# depEntries = set(x for x in depEntries if x in depClosure)
def _getDepEntryPath(depEntry):
for prefix in (destdir, ''):
for pathElement in self.rubyLoadPath:
for suffix in ('.rb', '.so'):
candidate = util.searchPath(
os.path.basename(depEntry) + suffix,
prefix + pathElement,
)
if candidate:
return candidate
return None
for depEntry in depEntries:
depEntryPath = _getDepEntryPath(depEntry)
if depEntryPath is None:
continue
if depEntryPath.startswith(destdir):
depPath = depEntryPath[destDirLen:]
else:
depPath = depEntryPath
flags = self._getRubyFlagsFromPath(depPath, self.rubyVersion)
self._addRequirement(path, depEntry, flags, pkgFiles,
deps.RubyDependencies)
def _fetchPerl(self):
"""
Cache the perl path and @INC path with -I%(destdir)s prepended to
each element if necessary
"""
if self.perlPath is not None:
return
macros = self.recipe.macros
self.perlPath, perlIncPath, perlDestInc = self._getperl(macros, self.recipe)
if perlDestInc:
self.perlIncArgs = perlDestInc
else:
self.perlIncArgs = ' '.join('-I'+x for x in perlIncPath)
def _getPerlReqs(self, path, fullpath):
if self.perlReqs is None:
self._fetchPerl()
if not self.perlPath:
# no perl == bootstrap, but print warning
self.info('Unable to find perl interpreter,'
' disabling perl: requirements')
self.perlReqs = False
return []
# get the base directory where conary lives. In a checked
# out version, this would be .../conary/conary/build/package.py
# chop off the last 3 directories to find where
# .../conary/Scandeps and .../conary/scripts/perlreqs.pl live
basedir = '/'.join(sys.modules[__name__].__file__.split('/')[:-3])
scandeps = '/'.join((basedir, 'conary/ScanDeps'))
if (os.path.exists(scandeps) and
os.path.exists('%s/scripts/perlreqs.pl' % basedir)):
perlreqs = '%s/scripts/perlreqs.pl' % basedir
else:
# we assume that conary is installed in
# $prefix/$libdir/python?.?/site-packages. Use this
# assumption to find the prefix for
# /usr/lib/conary and /usr/libexec/conary
regexp = re.compile(r'(.*)/lib(64){0,1}/python[1-9].[0-9]/site-packages')
match = regexp.match(basedir)
if not match:
# our regexp didn't work. fall back to hardcoded
# paths
prefix = '/usr'
else:
prefix = match.group(1)
# ScanDeps is not architecture specific
scandeps = '%s/lib/conary/ScanDeps' %prefix
if not os.path.exists(scandeps):
# but it might have been moved to lib64 for multilib
scandeps = '%s/lib64/conary/ScanDeps' %prefix
perlreqs = '%s/libexec/conary/perlreqs.pl' %prefix
self.perlReqs = '%s -I%s %s %s' %(
self.perlPath, scandeps, self.perlIncArgs, perlreqs)
if self.perlReqs is False:
return []
cwd = os.getcwd()
os.chdir(os.path.dirname(fullpath))
try:
p = os.popen('%s %s' %(self.perlReqs, fullpath))
finally:
try:
os.chdir(cwd)
except:
pass
reqlist = [x.strip().split('//') for x in p.readlines()]
# make sure that the command completed successfully
rc = p.close()
if rc:
# make sure that perl didn't blow up
assert(os.WIFEXITED(rc))
# Apparantly ScanDeps could not handle this input
return []
# we care only about modules right now
# throwing away the filenames for now, but we might choose
# to change that later
reqlist = [x[2] for x in reqlist if x[0] == 'module']
# foo/bar/baz.pm -> foo::bar::baz
reqlist = ['::'.join(x.split('/')).rsplit('.', 1)[0] for x in reqlist]
return reqlist
def _markManualRequirement(self, info, path, pkgFiles, m):
flags = []
if self._checkInclusion(info, path):
if info[0] == '/':
depClass = deps.FileDependencies
elif info.startswith('file:') and info[5:].strip()[0] == '/':
info = info[5:].strip()
depClass = deps.FileDependencies
elif info.startswith('soname:'):
if not m or m.name != 'ELF':
# only an ELF file can have a soname requirement
return
# we need to synthesize a dependency that encodes the
# same ABI as this binary
depClass = deps.SonameDependencies
for depType, dep, f in m.contents['requires']:
if depType == 'abi':
flags = tuple(x == 'Linux' and 'SysV' or x
for x in f) # CNY-3604
info = '%s/%s' %(dep, info.split(None, 1)[1])
info = os.path.normpath(info)
else: # by process of elimination, must be a trove
if info.startswith('group-'):
self.error('group dependency %s not allowed', info)
return
if info.startswith('fileset-'):
self.error('fileset dependency %s not allowed', info)
return
if ':' not in info:
self.error('package dependency %s not allowed', info)
return
depClass = deps.TroveDependencies
self._addRequirement(path, info, flags, pkgFiles, depClass)
def _checkInclusion(self, info, path):
if info in self.excluded:
for filt in self.excluded[info]:
# exception handling is per-requirement,
# so handled specially
if filt.match(path):
self.info('ignoring requirement match for %s: %s',
path, info)
return False
return True
def _addRequirement(self, path, info, flags, pkgFiles, depClass):
if depClass == deps.FileDependencies:
pathMap = self.recipe.autopkg.pathMap
componentMap = self.recipe.autopkg.componentMap
if (info in pathMap and not
componentMap[info][info][1].flags.isPathDependencyTarget()):
# if a package requires a file, includes that file,
# and does not provide that file, it should error out
self.error('%s requires %s, which is included but not'
' provided; use'
" r.Provides('file', '%s')", path, info, info)
return
# in some cases, we get literal "(flags)" from the recipe
if '(' in info:
flagindex = info.index('(')
flags = set(info[flagindex+1:-1].split() + list(flags))
info = info.split('(')[0]
# CNY-3443
if depClass in self.removeFlagsByDependencyClassMap:
flags = set(flags)
for ignoreItem in self.removeFlagsByDependencyClassMap[depClass]:
if isinstance(ignoreItem, set):
ignoreFlags = ignoreItem
else:
ignoreFlags = set(f for f in flags if ignoreItem.match(f))
flags -= ignoreFlags
if flags:
flags = [ (x, deps.FLAG_SENSE_REQUIRED) for x in flags ]
for pkg, _ in pkgFiles:
# we may need to create a few more DependencySets.
if path not in pkg.requiresMap:
pkg.requiresMap[path] = deps.DependencySet()
pkg.requiresMap[path].addDep(depClass, deps.Dependency(info, flags))
class _basePluggableRequires(Requires):
"""
Base class for pluggable Requires policies.
"""
# This set of policies get executed before the Requires policy,
# and inherits the Requires' ordering constraints
requires = list(Requires.requires) + [
('Requires', policy.REQUIRED_SUBSEQUENT),
]
def preProcess(self):
# We want to inherit the exceptions from the Requires class, so we
# need to peek into the Required policy object. We can still pass
# explicit exceptions into the pluggable sub-policies, and they will
# only apply to the sub-policy.
exceptions = self.recipe._policyMap['Requires'].exceptions
if exceptions:
Requires.updateArgs(self, exceptions=exceptions,
allowUnusedFilters = True)
Requires.preProcess(self)
def reportErrors(self, *args, **kwargs):
return self.recipe._policyMap['Requires'].reportErrors(*args, **kwargs)
def error(self, *args, **kwargs):
return self.recipe._policyMap['Requires'].error(*args, **kwargs)
def warn(self, *args, **kwargs):
return self.recipe._policyMap['Requires'].warn(*args, **kwargs)
def info(self, *args, **kwargs):
return self.recipe._policyMap['Requires'].info(*args, **kwargs)
def _addClassName(self, *args, **kwargs):
return self.recipe._policyMap['Requires']._addClassName(*args, **kwargs)
def doFile(self, path):
pkgs = self.recipe.autopkg.findComponents(path)
if not pkgs:
return
pkgFiles = [(x, x.getFile(path)) for x in pkgs]
macros = self.recipe.macros
fullpath = macros.destdir + path
self.addPluggableRequirements(path, fullpath, pkgFiles, macros)
self.whiteOut(path, pkgFiles)
self.unionDeps(path, pkgFiles)
def addPluggableRequirements(self, path, fullpath, pkgFiles, macros):
"""Override in subclasses"""
pass
class RemoveSelfProvidedRequires(policy.Policy):
"""
This policy is used to remove component requirements when they are provided
by the component itself.
Do not call it directly; it is for internal use only.
"""
bucket = policy.PACKAGE_CREATION
requires = (
('Requires', policy.REQUIRED_PRIOR),
)
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
def do(self):
if use.Use.bootstrap._get():
return
for comp in self.recipe.autopkg.getComponents():
comp.requires -= comp.provides
class Flavor(policy.Policy):
"""
NAME
====
B{C{r.Flavor()}} - Controls the Flavor mechanism
SYNOPSIS
========
C{r.Flavor([I{filterexp}] | [I{exceptions=filterexp}])}
DESCRIPTION
===========
The C{r.Flavor} policy marks files with the appropriate Flavor.
To except a file's flavor from being marked, use:
C{r.Flavor(exceptions='I{filterexp}')}.
EXAMPLES
========
C{r.Flavor(exceptions='%(crossprefix)s/lib/gcc-lib/.*')}
Files in the directory C{%(crossprefix)s/lib/gcc-lib} are being excepted
from having their Flavor marked, because they are not flavored for
the system on which the trove is being installed.
"""
bucket = policy.PACKAGE_CREATION
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('Requires', policy.REQUIRED_PRIOR),
# For example: :lib component contains only a single packaged empty
# directory, which must be artificially flavored for multilib
('ExcludeDirectories', policy.REQUIRED_PRIOR),
)
filetree = policy.PACKAGE
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
def preProcess(self):
self.libRe = re.compile(
'^(%(libdir)s'
'|/%(lib)s'
'|%(x11prefix)s/%(lib)s'
'|%(krbprefix)s/%(lib)s)(/|$)' %self.recipe.macros)
self.libReException = re.compile('^/usr/(lib|%(lib)s)/(python|ruby).*$')
self.baseIsnset = use.Arch.getCurrentArch()._name
self.baseArchFlavor = use.Arch.getCurrentArch()._toDependency()
self.archFlavor = use.createFlavor(None, use.Arch._iterUsed())
self.packageFlavor = deps.Flavor()
self.troveMarked = False
self.componentMap = self.recipe.autopkg.componentMap
ISD = deps.InstructionSetDependency
TISD = deps.TargetInstructionSetDependency
instructionDeps = list(self.recipe._buildFlavor.iterDepsByClass(ISD))
instructionDeps += list(self.recipe._buildFlavor.iterDepsByClass(TISD))
self.allowableIsnSets = [ x.name for x in instructionDeps ]
def postProcess(self):
# If this is a Windows package, include the flavor from the windows
# helper.
if (self._getTarget() == TARGET_WINDOWS and
hasattr(self.recipe, 'winHelper')):
flavorStr = self.recipe.winHelper.flavor
if flavorStr:
self.packageFlavor.union(deps.parseFlavor(flavorStr))
# all troves need to share the same flavor so that we can
# distinguish them later
for pkg in self.recipe.autopkg.components.values():
pkg.flavor.union(self.packageFlavor)
def hasLibInPath(self, path):
return self.libRe.match(path) and not self.libReException.match(path)
def hasLibInDependencyFlag(self, path, f):
for depType in (deps.PythonDependencies, deps.RubyDependencies):
for dep in ([x for x in f.requires.deps.iterDepsByClass(depType)] +
[x for x in f.provides.deps.iterDepsByClass(depType)]):
flagNames = [x[0] for x in dep.getFlags()[0]]
flagNames = [x for x in flagNames if x.startswith('lib')]
if flagNames:
return True
return False
def doFile(self, path):
autopkg = self.recipe.autopkg
pkg = autopkg.findComponent(path)
if pkg is None:
return
f = pkg.getFile(path)
m = self.recipe.magic[path]
if m and m.name == 'ELF' and 'isnset' in m.contents:
isnset = m.contents['isnset']
elif self.hasLibInPath(path) or self.hasLibInDependencyFlag(path, f):
# all possible paths in a %(lib)s-derived path get default
# instruction set assigned if they don't have one already
if f.hasContents:
isnset = self.baseIsnset
else:
# this file can't be marked by arch, but the troves
# and package must be. (e.g. symlinks and empty directories)
# we don't need to union in the base arch flavor more
# than once.
if self.troveMarked:
return
self.packageFlavor.union(self.baseArchFlavor)
self.troveMarked = True
return
else:
return
flv = deps.Flavor()
flv.addDep(deps.InstructionSetDependency, deps.Dependency(isnset, []))
# get the Arch.* dependencies
# set the flavor for the file to match that discovered in the
# magic - but do not let that propagate up to the flavor of
# the package - instead the package will have the flavor that
# it was cooked with. This is to avoid unnecessary or extra files
# causing the entire package from being flavored inappropriately.
# Such flavoring requires a bunch of Flavor exclusions to fix.
# Note that we need to set all shared paths between containers
# to share flavors and ensure that fileIds are the same
for pkg in autopkg.findComponents(path):
f = pkg.getFile(path)
f.flavor.set(flv)
# get the Arch.* dependencies
flv.union(self.archFlavor)
if isnset in self.allowableIsnSets:
self.packageFlavor.union(flv)
class _ProcessInfoPackage(policy.UserGroupBasePolicy):
bucket = policy.PACKAGE_CREATION
requires = (
('PackageSpec', policy.REQUIRED_PRIOR),
('ComponentSpec', policy.REQUIRED_PRIOR),
('Provides', policy.CONDITIONAL_PRIOR),
('Requires', policy.CONDITIONAL_PRIOR),
('Config', policy.CONDITIONAL_PRIOR),
('InitialContents', policy.CONDITIONAL_PRIOR)
)
def preProcess(self):
if self.exceptions:
self.error('%s does not honor exceptions' % self.__class__.__name__)
self.exceptions = None
if self.inclusions:
self.inclusions = None
def doFile(self, path):
expectedName = 'info-%s:%s' % (os.path.basename(path), self.component)
comp = self.recipe.autopkg.componentMap[path]
compName = comp.name
if not isinstance(comp.getFile(path), files.RegularFile):
self.error("Only regular files may appear in '%s'" % expectedName)
return
if len(comp) > 1:
badPaths = [x for x in comp if x != path]
self.error("The following files are not allowed in '%s': '%s'" % \
(compName, "', '".join(badPaths)))
else:
fileObj = comp[path][1]
for tag in fileObj.tags():
self.error("TagSpec '%s' is not allowed for %s" % \
(tag, expectedName))
fileObj.tags.set('%s-info' % self.component)
fileObj.flags.isTransient(True)
self.parseError = False
self.addProvides(path)
if not self.parseError:
self.addRequires(path)
def parseInfoFile(self, path):
infoname = "info-%s:%s" % (os.path.basename(path), self.component)
data = {}
try:
data = dict([x.strip().split('=', 1) \
for x in open(path).readlines()])
extraKeys = set(data.keys()).difference(self.legalKeys)
if extraKeys:
for key in extraKeys:
self.error("%s is not is not a valid value for %s" % \
(key, infoname))
self.parseError = True
except ValueError:
self.error("Unable to parse info file for '%s'" % infoname)
self.parseError = True
return data
def addProvides(self, path):
realpath, fileObj = self.recipe.autopkg.findComponent(path)[path]
data = self.parseInfoFile(realpath)
pkg = self.recipe.autopkg.componentMap[path]
infoname = os.path.basename(path)
if path in pkg.providesMap:
# only deps related to userinfo/troveinfo are allowed
self.error("Illegal provision for 'info-%s:%s': '%s'" % \
(infoname, self.component, str(pkg.providesMap[path])))
pkg.providesMap[path] = deps.DependencySet()
depSet = self.getProvides(infoname, data)
fileObj.provides.set(depSet)
pkg.providesMap[path].union(depSet)
pkg.provides.union(depSet)
def addRequires(self, path):
realpath, fileObj = self.recipe.autopkg.findComponent(path)[path]
data = self.parseInfoFile(realpath)
pkg = self.recipe.autopkg.componentMap[path]
infoname = os.path.basename(path)
if path in pkg.requiresMap:
# only deps related to userinfo/troveinfo are allowed
self.error("Illegal requirement on 'info-%s:%s': '%s'" % \
(infoname, self.component, str(pkg.requiresMap[path])))
pkg.requiresMap[path] = deps.DependencySet()
depSet = self.getRequires(infoname, data)
fileObj.requires.set(depSet)
pkg.requiresMap[path].union(depSet)
pkg.requires.union(depSet)
class ProcessUserInfoPackage(_ProcessInfoPackage):
"""
NAME
====
B{C{r.ProcessUserInfoPackage()}} - Set dependencies and tags for User
info packages
SYNOPSIS
========
C{r.ProcessUserInfoPackage()}
DESCRIPTION
===========
The C{r.ProcessUserInfoPackage} policy automatically sets up provides
and requries, as well as tags for user info files create by the
C{r.User} build action.
This policy is not intended to be invoked from recipes. Do not use it.
"""
invariantsubtrees = ['%(userinfodir)s']
component = 'user'
legalKeys = ['PREFERRED_UID', 'GROUP', 'GROUPID', 'HOMEDIR', 'COMMENT',
'SHELL', 'SUPPLEMENTAL', 'PASSWORD']
def parseInfoFile(self, path):
if self.recipe._getCapsulePathsForFile(path):
return {}
data = _ProcessInfoPackage.parseInfoFile(self, path)
if data:
supplemental = data.get('SUPPLEMENTAL')
if supplemental is not None:
data['SUPPLEMENTAL'] = supplemental.split(',')
return data
def getProvides(self, infoname, data):
depSet = deps.DependencySet()
groupname = data.get('GROUP', infoname)
depSet.addDep(deps.UserInfoDependencies,
deps.Dependency(infoname, []))
if self.recipe._provideGroup.get(infoname, True):
depSet.addDep(deps.GroupInfoDependencies,
deps.Dependency(groupname, []))
return depSet
def getRequires(self, infoname, data):
groupname = data.get('GROUP', infoname)
supp = data.get('SUPPLEMENTAL', [])
depSet = deps.DependencySet()
for grpDep in supp:
depSet.addDep(deps.GroupInfoDependencies,
deps.Dependency(grpDep, []))
if not self.recipe._provideGroup.get(infoname):
depSet.addDep(deps.GroupInfoDependencies,
deps.Dependency(groupname, []))
return depSet
class ProcessGroupInfoPackage(_ProcessInfoPackage):
"""
NAME
====
B{C{r.ProcessGroupInfoPackage()}} - Set dependencies and tags for Group
info packages
SYNOPSIS
========
C{r.ProcessGroupInfoPackage()}
DESCRIPTION
===========
The C{r.ProcessGroupInfoPackage} policy automatically sets up provides
and requries, as well as tags for group info files create by the
C{r.Group} and C{r.SupplementalGroup} build actions.
This policy is not intended to be invoked from recipes. Do not use it.
"""
invariantsubtrees = ['%(groupinfodir)s']
component = 'group'
legalKeys = ['PREFERRED_GID', 'USER']
def getProvides(self, groupname, data):
depSet = deps.DependencySet()
depSet.addDep(deps.GroupInfoDependencies,
deps.Dependency(groupname, []))
return depSet
def getRequires(self, groupname, data):
infoname = data.get('USER')
depSet = deps.DependencySet()
if infoname:
depSet.addDep(deps.UserInfoDependencies,
deps.Dependency(infoname, []))
return depSet
class reportExcessBuildRequires(policy.Policy):
"""
NAME
====
B{C{r.reportExcessBuildRequires()}} - suggest items to remove from C{buildRequires} list
SYNOPSIS
========
C{r.reportExcessBuildRequires('required:component')}
C{r.reportExcessBuildRequires(['list:of', 'required:components'])}
DESCRIPTION
===========
The C{r.reportExcessBuildRequires()} policy is used to report
together all suggestions for possible items to remove from the
C{buildRequires} list.
The suggestions provided by this policy are build requirements
listed in the recipe's C{buildRequires} list for which Conary
has not specifically discovered a need. Build requirement
discovery is not perfect, which means that even though this
policy prints a warning that a build requirement might not be
necessary, Conary does not know that it is definitely not needed.
These are only hints. If you are not sure whether a component
should be removed from the C{buildRequires} list, it is safer
to leave it in the list. This is because an extra component
in the C{buildRequires} list is very unlikely to cause trouble,
but a truly missing component causes failure (by definition).
Because dependencies on C{:runtime} components are the least
likely dependencies to be discovered automatically, this policy
currently does not recommend removing any C{:runtime} components.
EXAMPLES
========
This policy is normally called only internally by other Conary
policies. However, a recipe can report build requirements
that are known by the recipe maintainer to be required but
which Conary does not discover automatically by passing a
list of these components. For example, if this policy
says that C{foo:devel} and C{blah:perl} are possible extra
build requirements, but you know that they are required in
order to correctly build the included software, you can
turn off the warnings like this:
C{r.reportExcessBuildRequires(['foo:devel', 'blah:perl'])}
This will tell the C{reportExcessBuildRequires} policy that
C{foo:devel} and C{blah:perl} are known to be required to
build the package.
No regular expressions are honored.
"""
bucket = policy.ERROR_REPORTING
processUnmodified = True
filetree = policy.NO_FILES
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
def __init__(self, *args, **keywords):
self.found = set()
policy.Policy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
for arg in args:
if type(arg) in (list, tuple, set):
self.found.update(arg)
else:
self.found.add(arg)
def do(self):
# If absolutely no buildRequires were found automatically,
# assume that the buildRequires list has been carefully crafted
# for some reason that the buildRequires enforcement policy
# doesn't yet support, and don't warn that all of the listed
# buildRequires might be excessive.
if self.found and self.recipe._logFile:
r = self.recipe
def getReqNames(key):
return set(x.split('=')[0] for x in r._recipeRequirements[key])
recipeReqs = getReqNames('buildRequires')
superReqs = getReqNames('buildRequiresSuper')
foundPackages = set(x.split(':')[0] for x in self.found)
superClosure = r._getTransitiveDepClosure(superReqs)
foundClosure = r._getTransitiveDepClosure(self.found)
def removeCore(candidates):
# conary, python, and setup are always required; gcc
# is often an implicit requirement, and sqlite:lib is
# listed explicitly make bootstrapping easier
return set(x for x in candidates if
not x.startswith('conary')
and not x.startswith('python:')
and not x.startswith('gcc:')
and not x in ('libgcc:devellib',
'setup:runtime',
'sqlite:lib'))
def removeSome(candidates):
# at this point, we don't have good enough detection
# of :runtime in particular to recommend getting rid
# of it
return set(x for x in removeCore(candidates) if
not x.endswith(':runtime'))
def removeDupComponents(candidates):
# If any component is required, we don't really need
# to flag others as excessive in superclass excess
return set(x for x in candidates
if x.split(':')[0] not in foundPackages)
# for superclass reqs
excessSuperReqs = superReqs - foundClosure
if excessSuperReqs:
# note that as this is for debugging only, we do not
# remove runtime requirements
deDupedSuperReqs = sorted(list(
removeDupComponents(removeCore(excessSuperReqs))))
if deDupedSuperReqs:
self._reportExcessSuperclassBuildRequires(deDupedSuperReqs)
excessReqs = recipeReqs - self.found
redundantReqs = recipeReqs.intersection(superClosure)
if excessReqs or redundantReqs:
excessBuildRequires = sorted(list(
removeSome(excessReqs.union(redundantReqs))))
# all potential excess build requires might have
# been removed by removeSome
if excessBuildRequires:
self._reportExcessBuildRequires(excessBuildRequires)
def _reportExcessBuildRequires(self, reqList):
self.recipe._logFile.reportExcessBuildRequires(
sorted(list(reqList)))
def _reportExcessSuperclassBuildRequires(self, reqList):
self.recipe._logFile.reportExcessSuperclassBuildRequires(
sorted(list(reqList)))
class reportMissingBuildRequires(policy.Policy):
"""
This policy is used to report together all suggestions for
additions to the C{buildRequires} list.
Do not call it directly; it is for internal use only.
"""
bucket = policy.ERROR_REPORTING
processUnmodified = True
filetree = policy.NO_FILES
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
def __init__(self, *args, **keywords):
self.errors = set()
policy.Policy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
for arg in args:
if type(arg) in (list, tuple, set):
self.errors.update(arg)
else:
self.errors.add(arg)
def do(self):
if self.errors and self.recipe._logFile:
self.recipe._logFile.reportMissingBuildRequires(
sorted(list(self.errors)))
class reportErrors(policy.Policy, policy.GroupPolicy):
"""
This policy is used to report together all package errors.
Do not call it directly; it is for internal use only.
"""
bucket = policy.ERROR_REPORTING
processUnmodified = True
filetree = policy.NO_FILES
groupError = False
supported_targets = (TARGET_LINUX, TARGET_WINDOWS)
def __init__(self, *args, **keywords):
self.errors = []
policy.Policy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
"""
Called once, with printf-style arguments, for each warning.
"""
self.errors.append(args[0] %tuple(args[1:]))
groupError = keywords.pop('groupError', None)
if groupError is not None:
self.groupError = groupError
def do(self):
if self.errors:
msg = self.groupError and 'Group' or 'Package'
raise policy.PolicyError, ('%s Policy errors found:\n%%s' % msg) \
% "\n".join(self.errors)
class _TroveScript(policy.PackagePolicy):
processUnmodified = False
keywords = { 'contents' : None }
_troveScriptName = None
def __init__(self, *args, **keywords):
policy.PackagePolicy.__init__(self, *args, **keywords)
def updateArgs(self, *args, **keywords):
if args:
troveNames = args
else:
troveNames = [ self.recipe.name ]
self.troveNames = troveNames
policy.PackagePolicy.updateArgs(self, **keywords)
def do(self):
if not self.contents:
return
# Build component map
availTroveNames = dict((x.name, None) for x in
self.recipe.autopkg.getComponents())
availTroveNames.update(self.recipe.packages)
troveNames = set(self.troveNames) & set(availTroveNames)
# We don't support compatibility classes for troves (yet)
self.recipe._addTroveScript(troveNames, self.contents,
self._troveScriptName, None)
class ScriptPreUpdate(_TroveScript):
_troveScriptName = 'preUpdate'
class ScriptPostUpdate(_TroveScript):
_troveScriptName = 'postUpdate'
class ScriptPreInstall(_TroveScript):
_troveScriptName = 'preInstall'
class ScriptPostInstall(_TroveScript):
_troveScriptName = 'postInstall'
class ScriptPreErase(_TroveScript):
_troveScriptName = 'preErase'
class ScriptPostErase(_TroveScript):
_troveScriptName = 'postErase'
class ScriptPreRollback(_TroveScript):
_troveScriptName = 'preRollback'
class ScriptPostRollback(_TroveScript):
_troveScriptName = 'postRollback'
| sassoftware/conary | conary/build/packagepolicy.py | Python | apache-2.0 | 195,877 |
import unittest
import pytest
from libweasyl import ratings
from weasyl.test import db_utils
from weasyl import character
@pytest.mark.usefixtures('db')
class SelectCountTestCase(unittest.TestCase):
def setUp(self):
self.user1 = db_utils.create_user()
self.user2 = db_utils.create_user()
self.friend1 = db_utils.create_user()
db_utils.create_friendship(self.user1, self.friend1)
self.count = 20
self.pivot = 5
s = db_utils.create_characters(self.count, self.user1, ratings.GENERAL.code)
self.pivotid = s[self.pivot]
def test_count_backid(self):
self.assertEqual(
self.count - self.pivot - 1,
character.select_count(self.user1, ratings.GENERAL.code, backid=self.pivotid))
def test_count_nextid(self):
self.assertEqual(
self.pivot,
character.select_count(self.user1, ratings.GENERAL.code, nextid=self.pivotid))
def test_see_friends_character(self):
"""
Should be able to see a friend's friends-only character in a listing.
"""
c = db_utils.create_character(self.friend1, friends_only=True)
self.assertEqual(
self.count + 1,
character.select_count(self.user1, ratings.GENERAL.code))
self.assertEqual(
c,
character.select_list(self.user1, ratings.GENERAL.code, 100)[0]['charid'])
def test_cannot_see_non_friends_character(self):
"""
Should not be able to see a non-friend's friends-ony character in a listing.
"""
db_utils.create_character(self.user2, friends_only=True)
self.assertEqual(
self.count,
character.select_count(self.user1, ratings.GENERAL.code))
def test_can_see_own_blocktag_character(self):
"""
Can see your own character in a listing even with a blocked tag.
"""
block_tagid = db_utils.create_tag("blocked")
db_utils.create_blocktag(self.user1, block_tagid, ratings.GENERAL.code)
charid = db_utils.create_character(self.user1, name="My blocktag character")
db_utils.create_character_tag(block_tagid, charid)
# A journal that we should NOT see.
other_charid = db_utils.create_character(self.user2, name="Other user's blocktag character")
db_utils.create_character_tag(block_tagid, other_charid)
self.assertEqual(
charid,
character.select_list(self.user1, ratings.GENERAL.code, 100)[0]['charid'])
def test_can_see_own_rating_character(self):
"""
Can see your own character in a listing even when it's above your max rating.
"""
charid = db_utils.create_character(self.user1, rating=ratings.EXPLICIT.code)
db_utils.create_character(self.user2, rating=ratings.EXPLICIT.code)
self.assertEqual(
charid,
character.select_list(self.user1, ratings.GENERAL.code, 100)[0]['charid'])
| Weasyl/weasyl | weasyl/test/test_character.py | Python | apache-2.0 | 2,994 |
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
from mri import MriServer
from mri.dispatch import MriServerDispatch
class TestMriServer(unittest.TestCase):
def test_new_dispatch(self):
server = MriServer("http://www.httpbin.com", "testuser", "testpass")
task = {"title": "TEST", "id": "000112233"}
dispatch = server.new_dispatch(task)
test_against = MriServerDispatch(task, "http://www.httpbin.com", "testuser", "testpass")
self.assertEqual(dispatch, test_against)
if __name__ == '__main__':
unittest.main()
| Mri-monitoring/Mri-python-client | tests/TestMriServer.py | Python | apache-2.0 | 750 |
from JumpScale import j
descr = """
This jumpscript returns network info
"""
category = "monitoring"
organization = "jumpscale"
author = "[email protected]"
license = "bsd"
version = "1.0"
roles = []
def action():
return j.sal.nettools.getNetworkInfo()
if __name__ == "__main__":
print(action())
| Jumpscale/jumpscale_core8 | apps/agentcontroller/jumpscripts/jumpscale/network_info.py | Python | apache-2.0 | 312 |
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests."""
import mock
import pandas
import pytest
from google.api_core import exceptions
from google.auth.credentials import AnonymousCredentials
from google.cloud import automl_v1beta1
from google.cloud.automl_v1beta1.proto import data_types_pb2
PROJECT = "project"
REGION = "region"
LOCATION_PATH = "projects/{}/locations/{}".format(PROJECT, REGION)
class TestTablesClient(object):
def tables_client(
self, client_attrs={}, prediction_client_attrs={}, gcs_client_attrs={}
):
client_mock = mock.Mock(**client_attrs)
prediction_client_mock = mock.Mock(**prediction_client_attrs)
gcs_client_mock = mock.Mock(**gcs_client_attrs)
return automl_v1beta1.TablesClient(
client=client_mock,
prediction_client=prediction_client_mock,
gcs_client=gcs_client_mock,
project=PROJECT,
region=REGION,
)
def test_list_datasets_empty(self):
client = self.tables_client(
{
"list_datasets.return_value": [],
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_datasets()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_datasets.assert_called_with(LOCATION_PATH)
assert ds == []
def test_list_datasets_not_empty(self):
datasets = ["some_dataset"]
client = self.tables_client(
{
"list_datasets.return_value": datasets,
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_datasets()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_datasets.assert_called_with(LOCATION_PATH)
assert len(ds) == 1
assert ds[0] == "some_dataset"
def test_get_dataset_no_value(self):
dataset_actual = "dataset"
client = self.tables_client({}, {})
with pytest.raises(ValueError):
dataset = client.get_dataset()
client.auto_ml_client.get_dataset.assert_not_called()
def test_get_dataset_name(self):
dataset_actual = "dataset"
client = self.tables_client({"get_dataset.return_value": dataset_actual}, {})
dataset = client.get_dataset(dataset_name="my_dataset")
client.auto_ml_client.get_dataset.assert_called_with("my_dataset")
assert dataset == dataset_actual
def test_get_no_dataset(self):
client = self.tables_client(
{"get_dataset.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_dataset(dataset_name="my_dataset")
client.auto_ml_client.get_dataset.assert_called_with("my_dataset")
def test_get_dataset_from_empty_list(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.get_dataset(dataset_display_name="my_dataset")
def test_get_dataset_from_list_not_found(self):
client = self.tables_client(
{"list_datasets.return_value": [mock.Mock(display_name="not_it")]}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_dataset(dataset_display_name="my_dataset")
def test_get_dataset_from_list(self):
client = self.tables_client(
{
"list_datasets.return_value": [
mock.Mock(display_name="not_it"),
mock.Mock(display_name="my_dataset"),
]
},
{},
)
dataset = client.get_dataset(dataset_display_name="my_dataset")
assert dataset.display_name == "my_dataset"
def test_get_dataset_from_list_ambiguous(self):
client = self.tables_client(
{
"list_datasets.return_value": [
mock.Mock(display_name="my_dataset"),
mock.Mock(display_name="not_my_dataset"),
mock.Mock(display_name="my_dataset"),
]
},
{},
)
with pytest.raises(ValueError):
client.get_dataset(dataset_display_name="my_dataset")
def test_create_dataset(self):
client = self.tables_client(
{
"location_path.return_value": LOCATION_PATH,
"create_dataset.return_value": mock.Mock(display_name="name"),
},
{},
)
metadata = {"metadata": "values"}
dataset = client.create_dataset("name", metadata=metadata)
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.create_dataset.assert_called_with(
LOCATION_PATH, {"display_name": "name", "tables_dataset_metadata": metadata}
)
assert dataset.display_name == "name"
def test_delete_dataset(self):
dataset = mock.Mock()
dataset.configure_mock(name="name")
client = self.tables_client({"delete_dataset.return_value": None}, {})
client.delete_dataset(dataset=dataset)
client.auto_ml_client.delete_dataset.assert_called_with("name")
def test_delete_dataset_not_found(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
client.delete_dataset(dataset_display_name="not_found")
client.auto_ml_client.delete_dataset.assert_not_called()
def test_delete_dataset_name(self):
client = self.tables_client({"delete_dataset.return_value": None}, {})
client.delete_dataset(dataset_name="name")
client.auto_ml_client.delete_dataset.assert_called_with("name")
def test_export_not_found(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.export_data(dataset_display_name="name", gcs_input_uris="uri")
client.auto_ml_client.export_data.assert_not_called()
def test_export_gcs_uri(self):
client = self.tables_client({"export_data.return_value": None}, {})
client.export_data(dataset_name="name", gcs_output_uri_prefix="uri")
client.auto_ml_client.export_data.assert_called_with(
"name", {"gcs_destination": {"output_uri_prefix": "uri"}}
)
def test_export_bq_uri(self):
client = self.tables_client({"export_data.return_value": None}, {})
client.export_data(dataset_name="name", bigquery_output_uri="uri")
client.auto_ml_client.export_data.assert_called_with(
"name", {"bigquery_destination": {"output_uri": "uri"}}
)
def test_import_not_found(self):
client = self.tables_client({"list_datasets.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.import_data(dataset_display_name="name", gcs_input_uris="uri")
client.auto_ml_client.import_data.assert_not_called()
def test_import_pandas_dataframe(self):
client = self.tables_client(
gcs_client_attrs={
"bucket_name": "my_bucket",
"upload_pandas_dataframe.return_value": "uri",
}
)
dataframe = pandas.DataFrame({})
client.import_data(
project=PROJECT,
region=REGION,
dataset_name="name",
pandas_dataframe=dataframe,
)
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri"]}}
)
def test_import_pandas_dataframe_init_gcs(self):
client = automl_v1beta1.TablesClient(
client=mock.Mock(),
prediction_client=mock.Mock(),
project=PROJECT,
region=REGION,
credentials=AnonymousCredentials(),
)
dataframe = pandas.DataFrame({})
patch = mock.patch(
"google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient",
bucket_name="my_bucket",
)
with patch as MockGcsClient:
mockInstance = MockGcsClient.return_value
mockInstance.upload_pandas_dataframe.return_value = "uri"
client.import_data(dataset_name="name", pandas_dataframe=dataframe)
assert client.gcs_client is mockInstance
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri"]}}
)
def test_import_gcs_uri(self):
client = self.tables_client({"import_data.return_value": None}, {})
client.import_data(dataset_name="name", gcs_input_uris="uri")
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri"]}}
)
def test_import_gcs_uris(self):
client = self.tables_client({"import_data.return_value": None}, {})
client.import_data(dataset_name="name", gcs_input_uris=["uri", "uri"])
client.auto_ml_client.import_data.assert_called_with(
"name", {"gcs_source": {"input_uris": ["uri", "uri"]}}
)
def test_import_bq_uri(self):
client = self.tables_client({"import_data.return_value": None}, {})
client.import_data(dataset_name="name", bigquery_input_uri="uri")
client.auto_ml_client.import_data.assert_called_with(
"name", {"bigquery_source": {"input_uri": "uri"}}
)
def test_list_table_specs(self):
client = self.tables_client({"list_table_specs.return_value": None}, {})
client.list_table_specs(dataset_name="name")
client.auto_ml_client.list_table_specs.assert_called_with("name")
def test_list_table_specs_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("not found")}, {}
)
with pytest.raises(exceptions.NotFound):
client.list_table_specs(dataset_name="name")
client.auto_ml_client.list_table_specs.assert_called_with("name")
def test_get_table_spec(self):
client = self.tables_client({}, {})
client.get_table_spec("name")
client.auto_ml_client.get_table_spec.assert_called_with("name")
def test_get_column_spec(self):
client = self.tables_client({}, {})
client.get_column_spec("name")
client.auto_ml_client.get_column_spec.assert_called_with("name")
def test_list_column_specs(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [],
},
{},
)
client.list_column_specs(dataset_name="name")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
def test_update_column_spec_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.update_column_spec(dataset_name="name", column_spec_name="column2")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_not_called()
def test_update_column_spec_display_name_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.update_column_spec(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_not_called()
def test_update_column_spec_name_no_args(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column/2", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(dataset_name="name", column_spec_name="column/2")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{"name": "column/2", "data_type": {"type_code": "type_code"}}
)
def test_update_column_spec_no_args(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name", column_spec_display_name="column"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{"name": "column", "data_type": {"type_code": "type_code"}}
)
def test_update_column_spec_nullable(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name", column_spec_display_name="column", nullable=True
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{
"name": "column",
"data_type": {"type_code": "type_code", "nullable": True},
}
)
def test_update_column_spec_type_code(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name",
column_spec_display_name="column",
type_code="type_code2",
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{"name": "column", "data_type": {"type_code": "type_code2"}}
)
def test_update_column_spec_type_code_nullable(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name",
nullable=True,
column_spec_display_name="column",
type_code="type_code2",
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{
"name": "column",
"data_type": {"type_code": "type_code2", "nullable": True},
}
)
def test_update_column_spec_type_code_nullable_false(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
data_type_mock = mock.Mock(type_code="type_code")
column_spec_mock.configure_mock(
name="column", display_name="column", data_type=data_type_mock
)
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.update_column_spec(
dataset_name="name",
nullable=False,
column_spec_display_name="column",
type_code="type_code2",
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_column_spec.assert_called_with(
{
"name": "column",
"data_type": {"type_code": "type_code2", "nullable": False},
}
)
def test_set_target_column_table_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.set_target_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_not_called()
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_target_column_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.set_target_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_target_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="2",
weight_column_spec_id="2",
ml_use_column_spec_id="3",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_target_column(dataset_name="name", column_spec_display_name="column")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": "3",
},
}
)
def test_set_weight_column_table_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("err")}, {}
)
try:
client.set_weight_column(
dataset_name="name", column_spec_display_name="column2"
)
except exceptions.NotFound:
pass
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_not_called()
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_weight_column_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.set_weight_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_weight_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/2", display_name="column")
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="1",
ml_use_column_spec_id="3",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_weight_column(dataset_name="name", column_spec_display_name="column")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": "3",
},
}
)
def test_clear_weight_column(self):
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="2",
ml_use_column_spec_id="3",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client({"get_dataset.return_value": dataset_mock}, {})
client.clear_weight_column(dataset_name="name")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": None,
"ml_use_column_spec_id": "3",
},
}
)
def test_set_test_train_column_table_not_found(self):
client = self.tables_client(
{"list_table_specs.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.set_test_train_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_not_called()
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_test_train_column_not_found(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/1", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
with pytest.raises(exceptions.NotFound):
client.set_test_train_column(
dataset_name="name", column_spec_display_name="column2"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_not_called()
def test_set_test_train_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/3", display_name="column")
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="2",
ml_use_column_spec_id="2",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_test_train_column(
dataset_name="name", column_spec_display_name="column"
)
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": "3",
},
}
)
def test_clear_test_train_column(self):
dataset_mock = mock.Mock()
tables_dataset_metadata_mock = mock.Mock()
tables_dataset_metadata_mock.configure_mock(
target_column_spec_id="1",
weight_column_spec_id="2",
ml_use_column_spec_id="2",
)
dataset_mock.configure_mock(
name="dataset", tables_dataset_metadata=tables_dataset_metadata_mock
)
client = self.tables_client({"get_dataset.return_value": dataset_mock}, {})
client.clear_test_train_column(dataset_name="name")
client.auto_ml_client.update_dataset.assert_called_with(
{
"name": "dataset",
"tables_dataset_metadata": {
"target_column_spec_id": "1",
"weight_column_spec_id": "2",
"ml_use_column_spec_id": None,
},
}
)
def test_set_time_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/3", display_name="column")
dataset_mock = mock.Mock()
dataset_mock.configure_mock(name="dataset")
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
},
{},
)
client.set_time_column(dataset_name="name", column_spec_display_name="column")
client.auto_ml_client.list_table_specs.assert_called_with("name")
client.auto_ml_client.list_column_specs.assert_called_with("table")
client.auto_ml_client.update_table_spec.assert_called_with(
{"name": "table", "time_column_spec_id": "3"}
)
def test_clear_time_column(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
dataset_mock = mock.Mock()
dataset_mock.configure_mock(name="dataset")
client = self.tables_client(
{
"get_dataset.return_value": dataset_mock,
"list_table_specs.return_value": [table_spec_mock],
},
{},
)
client.clear_time_column(dataset_name="name")
client.auto_ml_client.update_table_spec.assert_called_with(
{"name": "table", "time_column_spec_id": None}
)
def test_get_model_evaluation(self):
client = self.tables_client({}, {})
ds = client.get_model_evaluation(model_evaluation_name="x")
client.auto_ml_client.get_model_evaluation.assert_called_with("x")
def test_list_model_evaluations_empty(self):
client = self.tables_client({"list_model_evaluations.return_value": []}, {})
ds = client.list_model_evaluations(model_name="model")
client.auto_ml_client.list_model_evaluations.assert_called_with("model")
assert ds == []
def test_list_model_evaluations_not_empty(self):
evaluations = ["eval"]
client = self.tables_client(
{
"list_model_evaluations.return_value": evaluations,
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_model_evaluations(model_name="model")
client.auto_ml_client.list_model_evaluations.assert_called_with("model")
assert len(ds) == 1
assert ds[0] == "eval"
def test_list_models_empty(self):
client = self.tables_client(
{
"list_models.return_value": [],
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_models()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_models.assert_called_with(LOCATION_PATH)
assert ds == []
def test_list_models_not_empty(self):
models = ["some_model"]
client = self.tables_client(
{
"list_models.return_value": models,
"location_path.return_value": LOCATION_PATH,
},
{},
)
ds = client.list_models()
client.auto_ml_client.location_path.assert_called_with(PROJECT, REGION)
client.auto_ml_client.list_models.assert_called_with(LOCATION_PATH)
assert len(ds) == 1
assert ds[0] == "some_model"
def test_get_model_name(self):
model_actual = "model"
client = self.tables_client({"get_model.return_value": model_actual}, {})
model = client.get_model(model_name="my_model")
client.auto_ml_client.get_model.assert_called_with("my_model")
assert model == model_actual
def test_get_no_model(self):
client = self.tables_client(
{"get_model.side_effect": exceptions.NotFound("err")}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_model(model_name="my_model")
client.auto_ml_client.get_model.assert_called_with("my_model")
def test_get_model_from_empty_list(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.get_model(model_display_name="my_model")
def test_get_model_from_list_not_found(self):
client = self.tables_client(
{"list_models.return_value": [mock.Mock(display_name="not_it")]}, {}
)
with pytest.raises(exceptions.NotFound):
client.get_model(model_display_name="my_model")
def test_get_model_from_list(self):
client = self.tables_client(
{
"list_models.return_value": [
mock.Mock(display_name="not_it"),
mock.Mock(display_name="my_model"),
]
},
{},
)
model = client.get_model(model_display_name="my_model")
assert model.display_name == "my_model"
def test_get_model_from_list_ambiguous(self):
client = self.tables_client(
{
"list_models.return_value": [
mock.Mock(display_name="my_model"),
mock.Mock(display_name="not_my_model"),
mock.Mock(display_name="my_model"),
]
},
{},
)
with pytest.raises(ValueError):
client.get_model(model_display_name="my_model")
def test_delete_model(self):
model = mock.Mock()
model.configure_mock(name="name")
client = self.tables_client({"delete_model.return_value": None}, {})
client.delete_model(model=model)
client.auto_ml_client.delete_model.assert_called_with("name")
def test_delete_model_not_found(self):
client = self.tables_client({"list_models.return_value": []}, {})
client.delete_model(model_display_name="not_found")
client.auto_ml_client.delete_model.assert_not_called()
def test_delete_model_name(self):
client = self.tables_client({"delete_model.return_value": None}, {})
client.delete_model(model_name="name")
client.auto_ml_client.delete_model.assert_called_with("name")
def test_deploy_model_no_args(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.deploy_model()
client.auto_ml_client.deploy_model.assert_not_called()
def test_deploy_model(self):
client = self.tables_client({}, {})
client.deploy_model(model_name="name")
client.auto_ml_client.deploy_model.assert_called_with("name")
def test_deploy_model_not_found(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.deploy_model(model_display_name="name")
client.auto_ml_client.deploy_model.assert_not_called()
def test_undeploy_model(self):
client = self.tables_client({}, {})
client.undeploy_model(model_name="name")
client.auto_ml_client.undeploy_model.assert_called_with("name")
def test_undeploy_model_not_found(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.undeploy_model(model_display_name="name")
client.auto_ml_client.undeploy_model.assert_not_called()
def test_create_model(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock = mock.Mock()
column_spec_mock.configure_mock(name="column/2", display_name="column")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [column_spec_mock],
"location_path.return_value": LOCATION_PATH,
},
{},
)
client.create_model(
"my_model", dataset_name="my_dataset", train_budget_milli_node_hours=1000
)
client.auto_ml_client.create_model.assert_called_with(
LOCATION_PATH,
{
"display_name": "my_model",
"dataset_id": "my_dataset",
"tables_model_metadata": {"train_budget_milli_node_hours": 1000},
},
)
def test_create_model_include_columns(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock1 = mock.Mock()
column_spec_mock1.configure_mock(name="column/1", display_name="column1")
column_spec_mock2 = mock.Mock()
column_spec_mock2.configure_mock(name="column/2", display_name="column2")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [
column_spec_mock1,
column_spec_mock2,
],
"location_path.return_value": LOCATION_PATH,
},
{},
)
client.create_model(
"my_model",
dataset_name="my_dataset",
include_column_spec_names=["column1"],
train_budget_milli_node_hours=1000,
)
client.auto_ml_client.create_model.assert_called_with(
LOCATION_PATH,
{
"display_name": "my_model",
"dataset_id": "my_dataset",
"tables_model_metadata": {
"train_budget_milli_node_hours": 1000,
"input_feature_column_specs": [column_spec_mock1],
},
},
)
def test_create_model_exclude_columns(self):
table_spec_mock = mock.Mock()
# name is reserved in use of __init__, needs to be passed here
table_spec_mock.configure_mock(name="table")
column_spec_mock1 = mock.Mock()
column_spec_mock1.configure_mock(name="column/1", display_name="column1")
column_spec_mock2 = mock.Mock()
column_spec_mock2.configure_mock(name="column/2", display_name="column2")
client = self.tables_client(
{
"list_table_specs.return_value": [table_spec_mock],
"list_column_specs.return_value": [
column_spec_mock1,
column_spec_mock2,
],
"location_path.return_value": LOCATION_PATH,
},
{},
)
client.create_model(
"my_model",
dataset_name="my_dataset",
exclude_column_spec_names=["column1"],
train_budget_milli_node_hours=1000,
)
client.auto_ml_client.create_model.assert_called_with(
LOCATION_PATH,
{
"display_name": "my_model",
"dataset_id": "my_dataset",
"tables_model_metadata": {
"train_budget_milli_node_hours": 1000,
"input_feature_column_specs": [column_spec_mock2],
},
},
)
def test_create_model_invalid_hours_small(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model(
"my_model", dataset_name="my_dataset", train_budget_milli_node_hours=1
)
client.auto_ml_client.create_model.assert_not_called()
def test_create_model_invalid_hours_large(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model(
"my_model",
dataset_name="my_dataset",
train_budget_milli_node_hours=1000000,
)
client.auto_ml_client.create_model.assert_not_called()
def test_create_model_invalid_no_dataset(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model("my_model", train_budget_milli_node_hours=1000)
client.auto_ml_client.get_dataset.assert_not_called()
client.auto_ml_client.create_model.assert_not_called()
def test_create_model_invalid_include_exclude(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.create_model(
"my_model",
dataset_name="my_dataset",
include_column_spec_names=["a"],
exclude_column_spec_names=["b"],
train_budget_milli_node_hours=1000,
)
client.auto_ml_client.get_dataset.assert_not_called()
client.auto_ml_client.create_model.assert_not_called()
def test_predict_from_array(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec = mock.Mock(display_name="a", data_type=data_type)
model_metadata = mock.Mock(input_feature_column_specs=[column_spec])
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict(["1"], model_name="my_model")
client.prediction_client.predict.assert_called_with(
"my_model", {"row": {"values": [{"string_value": "1"}]}}, None
)
def test_predict_from_dict(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_a = mock.Mock(display_name="a", data_type=data_type)
column_spec_b = mock.Mock(display_name="b", data_type=data_type)
model_metadata = mock.Mock(
input_feature_column_specs=[column_spec_a, column_spec_b]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict({"a": "1", "b": "2"}, model_name="my_model")
client.prediction_client.predict.assert_called_with(
"my_model",
{"row": {"values": [{"string_value": "1"}, {"string_value": "2"}]}},
None,
)
def test_predict_from_dict_with_feature_importance(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_a = mock.Mock(display_name="a", data_type=data_type)
column_spec_b = mock.Mock(display_name="b", data_type=data_type)
model_metadata = mock.Mock(
input_feature_column_specs=[column_spec_a, column_spec_b]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict(
{"a": "1", "b": "2"}, model_name="my_model", feature_importance=True
)
client.prediction_client.predict.assert_called_with(
"my_model",
{"row": {"values": [{"string_value": "1"}, {"string_value": "2"}]}},
{"feature_importance": "true"},
)
def test_predict_from_dict_missing(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_a = mock.Mock(display_name="a", data_type=data_type)
column_spec_b = mock.Mock(display_name="b", data_type=data_type)
model_metadata = mock.Mock(
input_feature_column_specs=[column_spec_a, column_spec_b]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict({"a": "1"}, model_name="my_model")
client.prediction_client.predict.assert_called_with(
"my_model",
{"row": {"values": [{"string_value": "1"}, {"null_value": 0}]}},
None,
)
def test_predict_all_types(self):
float_type = mock.Mock(type_code=data_types_pb2.FLOAT64)
timestamp_type = mock.Mock(type_code=data_types_pb2.TIMESTAMP)
string_type = mock.Mock(type_code=data_types_pb2.STRING)
array_type = mock.Mock(type_code=data_types_pb2.ARRAY)
struct_type = mock.Mock(type_code=data_types_pb2.STRUCT)
category_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_float = mock.Mock(display_name="float", data_type=float_type)
column_spec_timestamp = mock.Mock(
display_name="timestamp", data_type=timestamp_type
)
column_spec_string = mock.Mock(display_name="string", data_type=string_type)
column_spec_array = mock.Mock(display_name="array", data_type=array_type)
column_spec_struct = mock.Mock(display_name="struct", data_type=struct_type)
column_spec_category = mock.Mock(
display_name="category", data_type=category_type
)
column_spec_null = mock.Mock(display_name="null", data_type=category_type)
model_metadata = mock.Mock(
input_feature_column_specs=[
column_spec_float,
column_spec_timestamp,
column_spec_string,
column_spec_array,
column_spec_struct,
column_spec_category,
column_spec_null,
]
)
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict(
{
"float": 1.0,
"timestamp": "EST",
"string": "text",
"array": [1],
"struct": {"a": "b"},
"category": "a",
"null": None,
},
model_name="my_model",
)
client.prediction_client.predict.assert_called_with(
"my_model",
{
"row": {
"values": [
{"number_value": 1.0},
{"string_value": "EST"},
{"string_value": "text"},
{"list_value": [1]},
{"struct_value": {"a": "b"}},
{"string_value": "a"},
{"null_value": 0},
]
}
},
None,
)
def test_predict_from_array_missing(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec = mock.Mock(display_name="a", data_type=data_type)
model_metadata = mock.Mock(input_feature_column_specs=[column_spec])
model = mock.Mock()
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
with pytest.raises(ValueError):
client.predict([], model_name="my_model")
client.prediction_client.predict.assert_not_called()
def test_batch_predict_pandas_dataframe(self):
client = self.tables_client(
gcs_client_attrs={
"bucket_name": "my_bucket",
"upload_pandas_dataframe.return_value": "gs://input",
}
)
dataframe = pandas.DataFrame({})
client.batch_predict(
project=PROJECT,
region=REGION,
model_name="my_model",
pandas_dataframe=dataframe,
gcs_output_uri_prefix="gs://output",
)
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"gcs_destination": {"output_uri_prefix": "gs://output"}},
)
def test_batch_predict_pandas_dataframe_init_gcs(self):
client = automl_v1beta1.TablesClient(
client=mock.Mock(),
prediction_client=mock.Mock(),
project=PROJECT,
region=REGION,
credentials=AnonymousCredentials(),
)
dataframe = pandas.DataFrame({})
patch = mock.patch(
"google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient",
bucket_name="my_bucket",
)
with patch as MockGcsClient:
mockInstance = MockGcsClient.return_value
mockInstance.upload_pandas_dataframe.return_value = "gs://input"
dataframe = pandas.DataFrame({})
client.batch_predict(
model_name="my_model",
pandas_dataframe=dataframe,
gcs_output_uri_prefix="gs://output",
)
client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION)
client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"gcs_destination": {"output_uri_prefix": "gs://output"}},
)
def test_batch_predict_gcs(self):
client = self.tables_client({}, {})
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"gcs_destination": {"output_uri_prefix": "gs://output"}},
)
def test_batch_predict_bigquery(self):
client = self.tables_client({}, {})
client.batch_predict(
model_name="my_model",
bigquery_input_uri="bq://input",
bigquery_output_uri="bq://output",
)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"bigquery_source": {"input_uri": "bq://input"}},
{"bigquery_destination": {"output_uri": "bq://output"}},
)
def test_batch_predict_mixed(self):
client = self.tables_client({}, {})
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
bigquery_output_uri="bq://output",
)
client.prediction_client.batch_predict.assert_called_with(
"my_model",
{"gcs_source": {"input_uris": ["gs://input"]}},
{"bigquery_destination": {"output_uri": "bq://output"}},
)
def test_batch_predict_missing_input_gcs_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
gcs_input_uris=None,
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_input_bigquery_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
bigquery_input_uri=None,
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_output_gcs_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
gcs_output_uri_prefix=None,
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_output_bigquery_uri(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
model_name="my_model",
gcs_input_uris="gs://input",
bigquery_output_uri=None,
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_missing_model(self):
client = self.tables_client({"list_models.return_value": []}, {})
with pytest.raises(exceptions.NotFound):
client.batch_predict(
model_display_name="my_model",
gcs_input_uris="gs://input",
gcs_output_uri_prefix="gs://output",
)
client.prediction_client.batch_predict.assert_not_called()
def test_batch_predict_no_model(self):
client = self.tables_client({}, {})
with pytest.raises(ValueError):
client.batch_predict(
gcs_input_uris="gs://input", gcs_output_uri_prefix="gs://output"
)
client.auto_ml_client.list_models.assert_not_called()
client.prediction_client.batch_predict.assert_not_called()
def test_auto_ml_client_credentials(self):
credentials_mock = mock.Mock()
patch_auto_ml_client = mock.patch(
"google.cloud.automl_v1beta1.gapic.auto_ml_client.AutoMlClient"
)
with patch_auto_ml_client as MockAutoMlClient:
client = automl_v1beta1.TablesClient(credentials=credentials_mock)
_, auto_ml_client_kwargs = MockAutoMlClient.call_args
assert "credentials" in auto_ml_client_kwargs
assert auto_ml_client_kwargs["credentials"] == credentials_mock
def test_prediction_client_credentials(self):
credentials_mock = mock.Mock()
patch_prediction_client = mock.patch(
"google.cloud.automl_v1beta1.gapic.prediction_service_client.PredictionServiceClient"
)
with patch_prediction_client as MockPredictionClient:
client = automl_v1beta1.TablesClient(credentials=credentials_mock)
_, prediction_client_kwargs = MockPredictionClient.call_args
assert "credentials" in prediction_client_kwargs
assert prediction_client_kwargs["credentials"] == credentials_mock
def test_prediction_client_client_info(self):
client_info_mock = mock.Mock()
patch_prediction_client = mock.patch(
"google.cloud.automl_v1beta1.gapic.prediction_service_client.PredictionServiceClient"
)
with patch_prediction_client as MockPredictionClient:
client = automl_v1beta1.TablesClient(client_info=client_info_mock)
_, prediction_client_kwargs = MockPredictionClient.call_args
assert "client_info" in prediction_client_kwargs
assert prediction_client_kwargs["client_info"] == client_info_mock
| tswast/google-cloud-python | automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py | Python | apache-2.0 | 59,675 |
from rest_framework import status
from rest_framework.exceptions import APIException, ParseError
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for error_key, error_description in message.iteritems():
if error_key in top_level_error_keys:
errors.append({error_key: error_description})
else:
if isinstance(error_description, basestring):
error_description = [error_description]
errors.extend([{'source': {'pointer': '/data/attributes/' + error_key}, 'detail': reason}
for reason in error_description])
else:
if isinstance(message, basestring):
message = [message]
errors.extend([{'detail': error} for error in message])
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
class InvalidFilterError(ParseError):
"""Raised when client passes an invalid filter in the querystring."""
default_detail = 'Querystring contains an invalid filter.'
| arpitar/osf.io | api/base/exceptions.py | Python | apache-2.0 | 1,838 |
Subsets and Splits