blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1680eee25e123cd65af8e484e82f821ffcef73f5 | 72fd091cf4f9ad8c1a6475a8344bb750889e3b53 | /cars/migrations/0001_initial.py | 6f561308a8ce2a3549922aa1b79b89063e822140 | [] | no_license | petrshirin/example-web-app-for-using-db | abe312ab9dee36e5f53b795a2a0bc7529fa245f3 | c625815525cc8427a6e0fc749afc14f126a90e05 | refs/heads/master | 2023-02-18T08:31:03.842057 | 2021-01-14T14:43:11 | 2021-01-14T14:43:11 | 328,006,038 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,449 | py | # Generated by Django 3.1.5 on 2021-01-08 18:02
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Car',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('car_model', models.CharField(max_length=255)),
('production_date', models.DateField()),
('is_free', models.BooleanField(default=True)),
('price', models.DecimalField(decimal_places=2, max_digits=10)),
],
),
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('FIO', models.CharField(max_length=500)),
('car', models.ManyToManyField(blank=True, null=True, to='cars.Car')),
],
),
migrations.CreateModel(
name='Color',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Manager',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('FIO', models.CharField(max_length=500)),
('salary', models.DecimalField(decimal_places=2, max_digits=10)),
],
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(blank=True, default=django.utils.timezone.now)),
('days_to_use', models.PositiveIntegerField()),
('total_price', models.DecimalField(decimal_places=2, max_digits=10)),
('closed', models.BooleanField(default=False)),
('car', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cars.car')),
('client', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cars.client')),
('manager', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cars.manager')),
],
),
migrations.CreateModel(
name='ClientPassportData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('series', models.CharField(max_length=4, null=True)),
('number', models.CharField(max_length=6, null=True)),
('issued_by_whom', models.CharField(max_length=255, null=True)),
('client', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='cars.client')),
],
),
migrations.AddField(
model_name='car',
name='color',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='cars.color'),
),
]
| [
"[email protected]"
] | |
fab4e484e4788c8cdc5b3d600f1bcd29168b91b8 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02897/s211396554.py | 103568fcd8071619233fcea4b16b1f438fe3b2be | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | #A
import math
n = int(input())
print((math.ceil(n / 2)) / n)
| [
"[email protected]"
] | |
222aba3bdc6078a2bbd2e01c79d319ab45d44737 | 30dea47f44695f3eeacb8270496cdced39485cbd | /tonedetect/tones.py | b72f95f4783db9396b689fd9cd31df1dc93ba559 | [] | no_license | cheind/py-tonedetect | 67490d9b6f238226486e0cfa2831c4855e079c07 | 662b5d335ba9e830914cc0d0d2a1515f832f743b | refs/heads/master | 2021-01-10T23:36:59.131599 | 2016-10-29T06:52:37 | 2016-10-29T06:52:37 | 70,419,207 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 880 | py |
import json
import itertools
import math
import numpy as np
class Tones(object):
""" A list of tones """
def __init__(self):
self.items = []
def add_tone(self, frequencies, sym=None):
sym = sym if sym is not None else len(self.items)
self.items.append({'f': frequencies, 'sym': sym})
def all_tone_frequencies(self):
""" Return a list of all frequencies across all tones """
f = []
for e in self.items:
f.extend(e['f'])
return list(set(f))
def minimum_frequency_step(self):
dists = [math.fabs(pair[0]-pair[1]) for pair in itertools.combinations(self.all_tone_frequencies(), 2)]
return np.min(dists)
@staticmethod
def from_json_file(filename):
t = Tones()
with open(filename) as f:
t.items = json.load(f)
return t
| [
"[email protected]"
] | |
281d0127dbb5560633ec0da580a15c81be6ba978 | 24f29f50988c59785011f3bc2645fa5d2a7a7d97 | /wlct/cogs/ladders.py | 333e6289dac1c32fa28dd01d4fe5d125622fc1c7 | [] | no_license | h-shan/wzclot | d1c0c0f83b0b2916e0352c7cc0bfd25775a632d9 | 88d0e57c053a69a212af43f52168e234f41f6351 | refs/heads/master | 2021-01-07T20:17:04.065980 | 2020-02-19T22:44:18 | 2020-02-19T22:44:18 | 241,809,955 | 0 | 0 | null | 2020-02-20T06:25:12 | 2020-02-20T06:25:11 | null | UTF-8 | Python | false | false | 5,329 | py | import discord
from wlct.models import Clan, Player
from wlct.tournaments import Tournament, TournamentTeam, TournamentPlayer, MonthlyTemplateRotation, get_games_finished_for_team_since, find_tournament_by_id, get_team_data_no_clan, RealTimeLadder, get_real_time_ladder, TournamentGame
from discord.ext import commands, tasks
from wlct.cogs.common import is_admin
from django.utils import timezone
from traceback import print_exc
class Ladders(commands.Cog, name="ladders"):
''' Actually sends the help command '''
def __init__(self, bot):
self.bot = bot
@commands.command(brief="Lists all real-time ladders hosted by this bot and their IDs",
usage='''
109 -j : joins ladder 109
109 -l : leaves ladder 109
109 -t : displays all templates on the ladder
109 -p : displays all players currently on the ladder
109 -r : displays full ladder rankings
109 -g : displays all in progress games
109 -v templateid: vetoes a template or displays the current one if no template id is passed
''')
async def rtl(self, ctx, arg_id="invalid_id", arg_cmd="invalid_cmd", arg_cmd2="invalid_cmd2"):
print("Arguments for RTL id: {} command: {}".format(arg_id, arg_cmd))
invalid_cmd_text = "You've entered an invalid command. Please correct it and try again."
retStr = ""
do_embed = False
do_all_channels = False
embed_name = ""
if arg_id != "invalid_id":
emb = discord.Embed(color=self.bot.embed_color)
emb.set_author(icon_url=ctx.message.author.avatar_url, name=ctx.message.author)
emb.set_footer(text="Bot created and maintained by -B#0292")
if arg_id.isnumeric():
ladder = get_real_time_ladder(int(arg_id))
discord_id = ctx.message.author.id
if ladder is not None:
if arg_cmd == "-p":
# display current players in the ladder
retStr = ladder.get_current_joined()
elif arg_cmd == "-j":
retStr = ladder.join_ladder(discord_id)
retStr += "\n\n" + ladder.get_current_joined()
do_all_channels = True
elif arg_cmd == "-l":
retStr = ladder.leave_ladder(discord_id)
retStr += "\n\n" + ladder.get_current_joined()
do_all_channels = True
elif arg_cmd == "-t":
retStr = ladder.get_current_templates()
do_embed = True
emb.title = "Current Templates - Ladder {}".format(ladder.name)
emb.add_field(name="Templates", value=retStr)
elif arg_cmd == "-r":
retStr = ladder.get_current_rankings()
elif arg_cmd == "-g":
do_embed = True
retStr = ladder.get_current_games()
emb.title = "Current Games - Ladder {}".format(ladder.name)
emb.add_field(name="In Progress", value=retStr)
elif arg_cmd == "-v":
if arg_cmd2 != "invalid_cmd2":
retStr = ladder.veto_template(discord_id, arg_cmd2)
else:
# display the users current veto
retStr = ladder.get_current_vetoes(discord_id)
elif arg_cmd == "-ta":
if arg_cmd2 != "invalid_cmd2":
# check to make sure the author has access here
if is_admin(ctx.message.author.id):
retStr = ladder.add_template(arg_cmd2)
else:
retStr = invalid_cmd_text
elif arg_cmd == "-tr":
if arg_cmd2 != "invalid_cmd2":
# check for access
if is_admin(ctx.message.author.id):
retStr = ladder.remove_template(arg_cmd2)
else:
retStr = invalid_cmd_text
else:
retStr = invalid_cmd_text
else:
retStr = "You've entered an invalid ladder ID."
else:
retStr = "You've entered an invalid ladder ID."
elif arg_id == "invalid_id":
retStr += "__**Current Real-Time Ladders**__\n"
ladders = RealTimeLadder.objects.all()
if not ladders or ladders.count() == 0:
retStr += "There are no real-time ladders created yet."
else:
for ladder in ladders:
retStr += "{} | Id: {}".format(ladder.name, ladder.id)
else:
retStr = "You have entered an invalid command. Please correct it and try again."
if do_embed:
await ctx.send(embed=emb)
else:
await ctx.send(retStr)
def setup(bot):
bot.add_cog(Ladders(bot)) | [
"[email protected]"
] | |
23cc696455e578661687ff0eb2c8e466a0ecca7b | 492d3e666b87eff971628a74fe13facde01e2949 | /htmlcov/_python_Django_My Projects_student-portal_Lib_site-packages_PIL_GifImagePlugin_py.html.py | 1dc887ff9dbe11ce042ec22d45bfd7fc4da3fc41 | [] | no_license | OmarFateh/Student-Portal | 42050da15327aa01944dc79b5e00ca34deb51531 | 167ffd3a4183529c0cbc5db4ab232026711ea915 | refs/heads/master | 2023-06-13T01:03:16.475588 | 2021-07-08T11:09:09 | 2021-07-08T11:09:09 | 382,895,837 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251,069 | py | XXXXXXXXX XXXXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XX
XXXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXXXX XXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXX
XXXXX XXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXXX XXXXXXXXXX XX
XXX XXXXXXXXXXXXXX
XXX XXXXXXXXXX XXXXXX
XXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXXXX XXXXXXXXXX XX
XX XXXXXXXXXXXXXXXXXXXXXXX XX XXXX XXXXXXXX
XXXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXX XXXX XXXXXXXX
XXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXX XXXXXXXXXXX XXXXX
XXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXX XXX XX XXXX
XXXX
XX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXX XXXXX XXXXX XXXXXXXXXXX XXXXX
XXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXX XXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXX XXXXXXXX XXXXX XXXXX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXX XXXX XX XXXX XXXXXX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXXX XXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXX XXX XXXXX XXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXXX XXXXXX XXXXXXXXX XX XXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXXX XXXXXXX XXXXX XX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXX XXXXXXXXXXXX XXXXXXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXX XX XXXXXXX XXXXXXXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXX XXX XXXXXXXXX XX XXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXX XXX XXXXXXXXX XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXX XXX XXXXXX XXXX XXX XXXXXXXXXXX XX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXX XXX XXX XXXXXXX XXXX XXXXXX XXXXXXXX XXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX X XXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XX XXXXXXX XXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXXXX XX XXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXX XXXXXX XX XXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXX XXX XXXXXXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXX XX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXX XXXXXX X XXXX XXX XXXXX XX XXXX X X XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXX XXX XXXXXXX XX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXX XXX XXX XXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXX XXX XX XXXXX XX XXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXX XXXXXXX XXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXX XXXXXXXX XXX XXX XXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XX XXX XXXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXXX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXXXXXX XXX XXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXXXXXX XXX XXXXXX XX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXX XX XXXXX XXX XXXXXXX XXXXXXX XX XXXXX XX X XXXX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXX XXXXXX XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XX XXX XXXXXX XXX XXXXXXXX XXXXXX XX XX XXX XXXXXX XX XXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXXX XX XXX XXXXX XX XXXXXXX XXXX XXX XXXXXXX XXXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XX XXXXX XXXX XXXXXXXXX XXXX X XXXXXXXX XX XX XXXXX XX X XXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXX XXXXXXXX XX XXX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXXXXXXX XXXXXXX XXXXXX XXX XX XXXX XXX X XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXX XXX XXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX X XXXX XXX XXXXXXX XX XXX XXXXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX X XXXXXXX XXXX XXXXXXX X XXXXXXX XXX X XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX X XXXXXXXXX XXX XXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXX XXXXX XXXXXX XXXXXXXXXX XXX XXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX X XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX X XXXX XX XXXXXXXX XXXX XXXXX XXXX XXX XXXXX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXXXX XX XXXXXXXXX XX XXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXX XXXXXX XXXX XXX XX XXXXXXXX XXX XXXXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXX XXX XXXXX XXXX XX XXXXXXXXXXX XXXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXX XXXXXXXXXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XX XXXX XXXXXXXXX XXX XXXXXXXXXXXXX XXXX XX XXX XXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XX XXX XXXX XXXX XXX XXXXXXXXXXX XXXXXX XXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXX XXX XXXXXXXX XXXXXXXXXXXXXX XXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXX XXXXXXXXXXX XX XXX XX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXXXXXXX XXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX XXX XX X XXXXXXXX X XXX X XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXX XX XXXXXXX XXXXXXX XX XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXX XX XXXX XX XXX XXXX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXX XXXXX XX XXXX XXXX XX XXXXXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXXXXXXXXXXX XX X XXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXX XXXXXXXX XXXXXXXXXX XX XXX XXXXXXX XXXXXX XX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXX XXXXXXXXXXX XXXX XXXXXXX XXX XXXX XX XXXXXXX XXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXX XX XXXXXXX XX XXXXXXX XXXXXXX XX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXXXXXX XXXXX X XXXXX XXX XXXXX XXX XXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXX XXXXXXXXXX XX XXXXXXXX XXXXXX XXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XX XXX XX X XXXXX XX XX XXXX XXX XXX XXX XXXX XX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX X XX XXXXX XXXX XXX XXXXXXX XX XXXXX XXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX X XX XXXXX XXXXXX XXX XXXXXX XXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX X XX XX XXXX X XXXXXXX XXXXXX XXX XXXXXXX XX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXX XXX XXXXXXX XX XXX XXXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXX XXX XXXXXXX XXXX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXX XXXXXXXX XXXX XXXXXX XX XXX XXXX XXXXX XX X XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXX XXX XXXXXX XXXXXXXXX XX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXX XXXXXXXX XXXXXXX XXXXXX XX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXX XXXXXXX XXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XXXXXXX XXX XX XX XXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXX XXX XXXXXXX XXX XXXXXXXXX XX XXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXXX XXXXXXXXXXX XXXXXXXX XXX XXXXXXXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXX XX XXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XX XX XXXX XX XXXXXXXXX XX XXX XXXX XXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX X X XXXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX X XXXX XX XXXXXXX XXXXXXXXXXXX X XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XX XXXXXX XXXXX XXXXX X XXXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXX XX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXX XX XXX XXX XXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXX XXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXX XXXXX XXXXXX XXXXXXXXXX XXX XXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXXX XXXXXXX XX XXXXXX XXXXXX XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XX XXXXXXX XXXXXXXXX XXX XXX XXXX XX XXXXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX X XXXX XX XXXXXXX XXXXXXXXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXX XXXXX XXXXXX XX X XXXXX XXXXX XXXXXXX XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXX XXXXX XX XXX XX XXXXXXX XXXXXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXXXXX XXXX XXXXXXXX XX XXXXX XXXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXX XXXX XX XXXXX XXXXXXXXXX XXX XXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXX XXXX XXXX XXXXXX XXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXX XXX XXXXXXXXX XXXX XX XXX XXXX XX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXX XX XXX XXXXXXXX XXXXXXXXXXXXXX XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXX
XXX
XX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX XXXXXX XXXXXX XX XXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXX
XXXXXXX XX XXXXXXXXXX XXXXX XXXXX
XXXX
XXXXXX
XXXXXX
XXXXXXX
XXXXXXX
| [
"[email protected]"
] | |
8fbd6c02b72607b5cd55525e6fb33f0ce4082fef | a964f0f3f93a84d5195042d3c1bb2288e8b62161 | /muddery/server/utils/utils.py | a69f6d5ace3c6d1dc5892332ebc02a3c67a0e928 | [
"BSD-3-Clause"
] | permissive | nobodxbodon/muddery | 474433791b75d2f2130e6b758fb3126e2d56230b | 4b4c6c0dc5cc237a5df012a05ed260fad1a793a7 | refs/heads/master | 2023-06-19T19:28:39.252340 | 2021-07-14T15:07:47 | 2021-07-14T15:07:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,890 | py | """
General helper functions that don't fit neatly under any given category.
They provide some useful string and conversion methods that might
be of use when designing your own game.
"""
import os, re, inspect
from importlib import import_module
from pkgutil import iter_modules
from django.conf import settings
from evennia.utils import search, logger
from muddery.launcher import configs
from muddery.server.dao.localized_strings import LocalizedStrings
def get_muddery_version():
"""
Get muddery's version.
"""
import muddery
return muddery.__version__
def set_obj_data_key(obj, key):
"""
Set data key. Put it info into an object's attributes.
Args:
obj: (object) object to be set
key: (string) key of the data.
"""
obj.attributes.add("key", key, category=settings.DATA_KEY_CATEGORY, strattr=True)
def search_obj_data_key(key):
"""
Search objects which have the given key.
Args:
key: (string) Data's key.
"""
if not key:
return None
return search.search_object_attribute(key="key", strvalue=key, category=settings.DATA_KEY_CATEGORY)
def search_db_data_type(key, value, typeclass):
"""
Search objects of the given typeclass which have the given value.
"""
objs = search.search_object_attribute(key=key, value=value)
return [obj for obj in objs if obj.is_typeclass(typeclass, exact=False)]
def set_obj_unique_type(obj, type):
"""
Set unique object's type.
Args:
obj: (object) object to be set
type: (string) unique object's type.
"""
obj.attributes.add("type", type, category=settings.DATA_KEY_CATEGORY, strattr=True)
def search_obj_unique_type(type):
"""
Search objects which have the given unique type.
Args:
type: (string) unique object's type.
"""
obj = search.search_object_attribute(key="type", strvalue=type, category=settings.DATA_KEY_CATEGORY)
return obj
def is_child(child, parent):
"""
Check if the child class is inherited from the parent.
Args:
child: child class
parent: parent class
Returns:
boolean
"""
for base in child.__bases__:
if base is parent:
return True
for base in child.__bases__:
if is_child(base, parent):
return True
return False
def file_iterator(file, erase=False, chunk_size=512):
while True:
c = file.read(chunk_size)
if c:
yield c
else:
# remove temp file
file.close()
if erase:
os.remove(file.name)
break
def get_unlocalized_py_strings(filename, filter):
"""
Get all unlocalized strings.
Args:
file_type: (string) type of file.
filter: (boolean) filter exits strings or not.
Returns:
(set): a list of tuple (string, category).
"""
re_func = re.compile(r'_\(\s*".+?\)')
re_string = re.compile(r'".*?"')
re_category = re.compile(r'category.*=.*".*?"')
strings = set()
# search in python files
with open(filename, "r") as file:
lines = file.readlines()
for line in lines:
# parse _() function
for func in re_func.findall(line):
str = ""
cate = ""
str_search = re_string.search(func)
if str_search:
str = str_search.group()
#remove quotations
str = str[1:-1]
cate_search = re_category.search(func)
if cate_search:
group = cate_search.group()
cate = re_string.search(group).group()
#remove quotations
cate = cate[1:-1]
if str or cate:
if filter:
# check database
try:
LocalizedStrings.get(str, cate)
continue
except Exception as e:
pass
strings.add((str, cate,))
return strings
def all_unlocalized_py_strings(filter):
"""
Get all unlocalized strings.
Args:
file_type: (string) type of file.
filter: (boolean) filter exits strings or not.
Returns:
(set): a list of tuple (string, category).
"""
rootdir = configs.MUDDERY_LIB
strings = set()
ext = ".py"
# get all _() args in all files
for parent, dirnames, filenames in os.walk(rootdir):
for filename in filenames:
file_ext = os.path.splitext(filename)[1].lower()
if file_ext == ext:
full_name = os.path.join(parent, filename)
strings.update(get_unlocalized_py_strings(full_name, filter))
return strings
def get_unlocalized_js_strings(filename, filter_set):
"""
Get all unlocalized strings.
Args:
file_type: (string) type of file.
filter_set: (set) current localized stings set.
Returns:
(set): a list of strings.
"""
re_func = re.compile(r'_\(\s*".+?\)')
re_string = re.compile(r'".*?"')
strings = set()
# search in python files
with open(filename, "r") as file:
lines = file.readlines()
for line in lines:
# parse _() function
for func in re_func.findall(line):
str = ""
cate = ""
str_search = re_string.search(func)
if str_search:
str = str_search.group()
#remove quotations
str = str[1:-1]
if str:
if filter_set:
# check dict
if str not in filter_set:
strings.add(str)
else:
strings.add(str)
return strings
def all_unlocalized_js_strings(filter):
"""
Get all unlocalized strings.
Args:
file_type: (string) type of file.
filter: (boolean) filter exits strings or not.
Returns:
(set): a list of tuple (string, category).
"""
rootdir = configs.MUDDERY_LIB
strings = set()
ext = ".js"
filter_set = set()
# get filter
if filter:
local_string_filename = os.path.join(configs.MUDDERY_LIB, "web", "webclient", "webclient",
"lang", settings.LANGUAGE_CODE, "strings.js")
with open(local_string_filename, "r") as file:
re_dict = re.compile(r'".+?"\s*:\s*".+?"')
re_string = re.compile(r'".*?"')
lines = file.readlines()
for line in lines:
# find localization dict
dict_search = re_dict.search(line)
if dict_search:
word_dict = dict_search.group()
str_search = re_string.search(word_dict)
str = str_search.group()
#remove quotations
str = str[1:-1]
filter_set.add(str)
# get all _() args in all files
for parent, dirnames, filenames in os.walk(rootdir):
for filename in filenames:
file_ext = os.path.splitext(filename)[1].lower()
if file_ext == ext:
full_name = os.path.join(parent, filename)
strings.update(get_unlocalized_js_strings(full_name, filter_set))
return strings
def load_modules(path):
"""
Load all modules ans sub modules in the path.
Args:
path: (string) modules' path
"""
modules = []
m = import_module(path)
if hasattr(m, '__path__'):
for _, subpath, ispkg in iter_modules(m.__path__):
fullpath = path + '.' + subpath
if ispkg:
modules += load_modules(fullpath)
else:
modules.append(import_module(fullpath))
return modules
def classes_in_path(path, cls):
"""
Load all classes in the path.
Args:
path: (string) classes' path
cls: (class) classes' base class
"""
modules = load_modules(path)
for module in modules:
for name, obj in vars(module).items():
if inspect.isclass(obj) and issubclass(obj, cls) and obj is not cls:
yield obj
def get_module_path(path):
"""
Transform a normal path to a python module style path.
"""
root, name = os.path.split(path)
if not name:
return
root = get_module_path(root)
if root:
return root + "." + name
else:
return name
| [
"[email protected]"
] | |
58d9f299b2be3ac5b35e101e9d797493adbbab9e | 502fc0002d5575d0a37b4f13706c7072f860033c | /Chapter06/cyclegan/datasets.py | c851a615e497b6d1583f95c8955550f2b29adf88 | [
"MIT"
] | permissive | PacktPublishing/Hands-On-Generative-Adversarial-Networks-with-PyTorch-1.x | 665d9364af54d7fd44787d0753400d7625ac8b82 | beee21343078b607f393bbb1321ac49cf17ffb5f | refs/heads/master | 2023-02-10T22:12:08.980700 | 2023-01-30T09:26:20 | 2023-01-30T09:26:20 | 227,829,701 | 66 | 50 | null | null | null | null | UTF-8 | Python | false | false | 1,129 | py | import glob
import random
import os
import torchvision
from torch.utils.data import Dataset
from PIL import Image
class ImageDataset(Dataset):
def __init__(self, root_dir, transform=None, unaligned=False, mode='train'):
self.transform = torchvision.transforms.Compose(transform)
self.unaligned = unaligned
self.train = (mode == 'train')
self.files_A = sorted(glob.glob(os.path.join(root_dir, '%sA' % mode) + '/*.*'))
self.files_B = sorted(glob.glob(os.path.join(root_dir, '%sB' % mode) + '/*.*'))
def __getitem__(self, index):
item_A = self.transform(Image.open(self.files_A[index % len(self.files_A)]))
if self.unaligned:
item_B = self.transform(Image.open(self.files_B[random.randint(0, len(self.files_B) - 1)]))
else:
item_B = self.transform(Image.open(self.files_B[index % len(self.files_B)]))
if self.train:
return {'trainA': item_A, 'trainB': item_B}
else:
return {'testA': item_A, 'testB': item_B}
def __len__(self):
return max(len(self.files_A), len(self.files_B)) | [
"[email protected]"
] | |
e65cbf5b64816e289ebbda33044e7070ef649a39 | a5e591dc09e11e88af56fb5a881fae064fb9c495 | /recruitment/recruitment/doctype/sds/test_sds.py | c6cce9a11e7909f1af24d4e5044701cb9cfd6ede | [
"MIT"
] | permissive | barathprathosh/recruitment | 6b61dd1ee9c0b9d7851b0b3e5bab307f7ee2d1b5 | 9660944856e72288e47960e6802ec97a220a656d | refs/heads/master | 2020-04-29T03:03:51.722972 | 2019-03-15T08:58:32 | 2019-03-15T08:58:32 | 175,794,797 | 0 | 0 | NOASSERTION | 2019-03-15T10:00:32 | 2019-03-15T10:00:31 | null | UTF-8 | Python | false | false | 247 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2015, VHRS and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
# test_records = frappe.get_test_records('SDS')
class TestSDS(unittest.TestCase):
pass
| [
"[email protected]"
] | |
a697dce54965d918f7b330653707e0336ac916cc | 4bd4bacecee33cada173e427b5ecb1d758bafaad | /src/scalarizr/storage2/filesystems/ext3.py | db6c14a7eb67d5266be90987c0ab7e3c2a861102 | [] | no_license | kenorb-contrib/scalarizr | 3f2492b20910c42f6ab38749545fdbb79969473f | 3cc8b64d5a1b39c4cf36f5057f1a6a84a9a74c83 | refs/heads/master | 2022-11-26T10:00:58.706301 | 2017-11-02T16:41:34 | 2017-11-02T16:41:34 | 108,550,233 | 0 | 2 | null | 2020-07-24T11:05:36 | 2017-10-27T13:33:46 | Python | UTF-8 | Python | false | false | 1,840 | py | """
Created on Aug 29, 2012
@author: marat
"""
from scalarizr import storage2
from scalarizr.storage2 import filesystems
E2LABEL_EXEC = "/sbin/e2label"
RESIZE2FS_EXEC = "/sbin/resize2fs"
E2FSCK_EXEC = "/sbin/e2fsck"
MAX_LABEL_LENGTH = 16
class ExtFileSystem(filesystems.FileSystem):
features = filesystems.FileSystem.features.copy()
features['umount_on_resize'] = True
error_messages = filesystems.FileSystem.error_messages.copy()
error_messages['fsck'] = 'Error occured during filesystem check on device %s'
os_packages = ('e2fsprogs', )
def mkfs(self, device, *short_args):
short_args = list(short_args)
short_args += list(opt for opt in ('-F', '-q') if opt not in short_args)
super(ExtFileSystem, self).mkfs(device, *short_args)
def resize(self, device, size=None, *short_args, **long_kwds):
cmd = (E2FSCK_EXEC, '-fy', device)
rcode = filesystems.system(cmd, raise_exc=False,
error_text=self.error_messages['fsck'] % device)[2]
if rcode not in (0, 1):
raise storage2.StorageError('Fsck failed to correct file system errors')
cmd = (RESIZE2FS_EXEC, device)
filesystems.system(cmd, error_text=self.error_messages['resize'] % device)
def set_label(self, device, label):
cmd = (E2LABEL_EXEC, device, label[:MAX_LABEL_LENGTH])
filesystems.system(cmd, error_text=self.error_messages['set_label'] % device)
def get_label(self, device):
cmd = (E2LABEL_EXEC, device)
return filesystems.system(cmd, error_text=self.error_messages['get_label'] % device)[0].strip()
class Ext3FileSystem(ExtFileSystem):
type = 'ext3'
storage2.filesystem_types[Ext3FileSystem.type] = Ext3FileSystem
| [
"[email protected]"
] | |
e2dd3fc6ed5023653573ada2255327ccf464b401 | d694a99c910ce36c8d6981e126548fc91e74046e | /Regression/regression.py | 60bd820898953a66d0998f401bded14011439700 | [] | no_license | YiddishKop/ml_src_adam_compare | 08ac23cf1fb02222da1f04e833e296b1b75ae9af | cfeadebd41f802686828958068c15bcfdfea0be9 | refs/heads/master | 2020-03-25T22:16:19.581690 | 2018-08-09T23:37:31 | 2018-08-09T23:37:31 | 144,213,684 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,177 | py | import numpy as np
import tensorflow as tf
from math import sqrt
from keras import optimizers
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, normalization
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.5
set_session(tf.Session(config = config))
class Model(object):
def __init__(self):
self.x = np.load("x_train.npy")
self.y = np.load("y_train.npy")
def select_training_data(self):
self.x_train, self.y_train = np.array(self.x), np.array(self.y)
self.add_x()
n = 18000
self.x_valid = self.x[n:]
self.y_valid = self.y[n:]
self.x = self.x[:n]
self.y = self.y[:n]
print "x_train =", self.x.shape, self.x_valid.shape
print "y_train =", self.y.shape, self.y_valid.shape
def add_x(self):
grade0 = [0, 1, 6, 7]
grade1 = [2, 3, 8, 9, 10]
square = np.square(self.x[:, grade0])
sqrt = np.sqrt(self.x[:, :])
n = self.x_train.shape[1]
cross_term = np.empty([self.x.shape[0], n*(n-1)/2])
s = 0
for i in range(n-1):
for j in range(i+1, n):
cross_term[:, s] = self.x[:, i] * self.x[:, j]
s += 1
cube = np.power(self.x[:, grade0], 3)
self.x = np.concatenate([self.x, square], 1)
self.x = np.concatenate([self.x, cross_term], 1)
self.x = np.concatenate([self.x, np.ones([self.x.shape[0], 1])], 1)
print self.x.shape
def build_nn_model(self):
nn_model = Sequential()
nn_model.add(Dense(input_dim = self.x.shape[1], output_dim = 1000))
nn_model.add(Activation('relu'))
nn_model.add(Dense(output_dim = 1000))
nn_model.add(Activation('relu'))
nn_model.add(Dense(output_dim = 1000))
nn_model.add(Dense(output_dim = 1))
nn_model.summary()
opt = optimizers.Adam(lr = 0.0001, beta_1 = 0.9, beta_2 = 0.999, epsilon = 1e-8, decay=0.0)
#opt = optimizers.SGD(lr = 10E-5)
nn_model.compile(loss = 'mean_squared_error', optimizer = opt, metrics = ['accuracy'])
nn_model.fit(self.x, self.y, batch_size = 100, nb_epoch = 0, shuffle = True, validation_data = (self.x_valid, self.y_valid))
nn_model.save('model.h5')
fout = open("result", 'w')
self.result = nn_model.predict(self.x[:5000])
self.output_result(fout, self.y[:5000])
self.result = nn_model.predict(self.x_valid)
self.output_result(fout, self.y_valid)
def output_result(self, fout, y_true): # write file
fout.write("y_pred, y_train, error, rms_error\n")
ave_error = 0
rms_error = 0
count = self.result.shape[0]
for i in range(self.result.shape[0]):
if self.y[i] > 0:
err1 = np.abs((self.result[i][0] - y_true[i]))/y_true[i]#self.y[i][0]
ave_error += err1
err2 = np.square((self.result[i][0] - y_true[i]))
rms_error += err2
fout.write("%.2f" %(self.result[i][0]) + " - " + "%.2f" %(y_true[i]) + " - ")
fout.write("%.2f" %(err1*100) + ", %.2f" %(err2) + "\n")
else:
count -= 1
ave_error = ave_error / float(count)
rms_error = sqrt(rms_error / float(count))
print "Number =", count
print "Ave error = %.3f" %(ave_error * 100), "%"
print "RMS error = %.3f" %(rms_error)
model = Model()
model.select_training_data()
model.build_nn_model()
| [
"[email protected]"
] | |
5497665447fb033386b2092a63fbef7149fd845b | dddd89637373f455a476431f4fcb7e17b4e9dd57 | /py/display.py | 85bcfe46e59c909fad72f4b04abaf084a127d399 | [] | no_license | DhirManish/Python | 35304eb47dea61934426fb6fc5094e1a83517cf3 | 10df7245d0964340d6c8d14cf26a9cf8f93ecf5d | refs/heads/master | 2020-06-05T07:09:41.856780 | 2015-03-07T12:53:10 | 2015-03-07T12:53:10 | 20,372,496 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 991 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# display.py
#
# Copyright 2014 Ajay Bhatia <ajay@dumb-box>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import sys
def main(fname):
file = open(fname)
for line in file.readlines():
print line,
if __name__ == '__main__':
main(sys.argv[1])
| [
"[email protected]"
] | |
51d3cd83c17924f57928febd4c77f7e11a693a64 | ac42f1d918bdbd229968cea0954ed75250acd55c | /admin/dashboard/openstack_dashboard/test/integration_tests/tests/test_credentials.py | 45c7f9956f3bb90d941ff841ff21f9390cc0aa7a | [
"Apache-2.0"
] | permissive | naanal/product | 016e18fd2f35608a0d8b8e5d2f75b653bac7111a | bbaa4cd60d4f2cdda6ce4ba3d36312c1757deac7 | refs/heads/master | 2020-04-03T22:40:48.712243 | 2016-11-15T11:22:00 | 2016-11-15T11:22:00 | 57,004,514 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,250 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from horizon.test import firefox_binary
from openstack_dashboard.test.integration_tests import helpers
from os import listdir
from os.path import join
from os import remove
class TestDownloadRCFile(helpers.AdminTestCase):
_directory = firefox_binary.WebDriver.TEMPDIR
_openrc_template = "-openrc.sh"
def setUp(self):
super(TestDownloadRCFile, self).setUp()
username = self.TEST_USER_NAME
tenant_name = self.HOME_PROJECT
projects_page = self.home_pg.go_to_identity_projectspage()
tenant_id = projects_page.get_project_id_from_row(tenant_name)
self.actual_dict = {'OS_USERNAME': username,
'OS_TENANT_NAME': tenant_name,
'OS_TENANT_ID': tenant_id}
def test_download_rc_v2_file(self):
"""This is a basic scenario test:
Steps:
1) Login to Horizon Dashboard as admin user
2) Navigate to Project > Compute > Access & Security > API Access tab
3) Click on "Download OpenStack RC File v2.0" button
4) File named by template "<tenant_name>-openrc.sh" must be downloaded
5) Check that username, tenant name and tenant id correspond to current
username, tenant name and tenant id
"""
api_access_page = self.home_pg.\
go_to_compute_accessandsecurity_apiaccesspage()
api_access_page.download_openstack_rc_file(
2, self._directory, self._openrc_template)
cred_dict = api_access_page.get_credentials_from_file(
2, self._directory, self._openrc_template)
self.assertEqual(cred_dict, self.actual_dict)
def test_download_rc_v3_file(self):
"""This is a basic scenario test:
Steps:
1) Login to Horizon Dashboard as admin user
2) Navigate to Project > Compute > Access & Security > API Access tab
3) Click on "Download OpenStack RC File v3" button
4) File named by template "<tenant_name>-openrc.sh" must be downloaded
5) Check that username, project name and project id correspond to
current username, tenant name and tenant id
"""
api_access_page = self.home_pg.\
go_to_compute_accessandsecurity_apiaccesspage()
api_access_page.download_openstack_rc_file(
3, self._directory, self._openrc_template)
cred_dict = api_access_page.get_credentials_from_file(
3, self._directory, self._openrc_template)
self.assertEqual(cred_dict, self.actual_dict)
def tearDown(self):
super(TestDownloadRCFile, self).tearDown()
remove(join(self._directory, listdir(self._directory)[0]))
| [
"[email protected]"
] | |
8654ed8796db644dd4805d8b68137f4e06de7879 | f2b44af5372c6318a941015f64b279ccf9099a18 | /rest130/wsgi.py | 5726a659e41211d23cf13244551e776d53d36061 | [] | no_license | yuansuixin/Rest-Framework-page-view | c459ca54c1998cde4c0fe207ba6464353471cbdf | a8663c09a00ce4f4d055ca96e3132ae0a4ddea54 | refs/heads/master | 2020-03-13T06:58:12.512163 | 2018-04-25T14:09:15 | 2018-04-25T14:09:15 | 131,015,642 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | """
WSGI config for rest130 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "rest130.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
f80eb0ee75c7c6f6668996e9f7d6ded968c8c196 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/tree-big-937.py | 1cb8651f581c5c2f244b40205a24f459938ff392 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,291 | py | # Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:$Type = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
| [
"[email protected]"
] | |
43eeefacfcf0e69ed91cff1e139d21ea5ca3dcb0 | 55b57d64ec547869835334318f3059fbb507558c | /Fred2/Data/pssms/tepitopepan/mat/DRB3_0303_9.py | 9a0ad63b34e9c15470b38ea93c1c9309344d6602 | [
"BSD-3-Clause"
] | permissive | FRED-2/Fred2 | 9845f6678d4011cb746c7a5a6f283eea68077a02 | b3e54c8c4ed12b780b61f74672e9667245a7bb78 | refs/heads/master | 2021-07-12T05:05:54.515427 | 2020-05-25T06:56:25 | 2020-05-25T06:56:25 | 16,275,425 | 42 | 35 | null | 2021-07-07T12:05:11 | 2014-01-27T10:08:11 | Python | UTF-8 | Python | false | false | 2,168 | py | DRB3_0303_9 = {0: {'A': -999.0, 'E': -999.0, 'D': -999.0, 'G': -999.0, 'F': -0.004754, 'I': -0.99525, 'H': -999.0, 'K': -999.0, 'M': -0.99525, 'L': -0.99525, 'N': -999.0, 'Q': -999.0, 'P': -999.0, 'S': -999.0, 'R': -999.0, 'T': -999.0, 'W': -0.004754, 'V': -0.99525, 'Y': -0.004754}, 1: {'A': 0.0, 'E': 0.1, 'D': -1.3, 'G': 0.5, 'F': 0.8, 'I': 1.1, 'H': 0.8, 'K': 1.1, 'M': 1.1, 'L': 1.0, 'N': 0.8, 'Q': 1.2, 'P': -0.5, 'S': -0.3, 'R': 2.2, 'T': 0.0, 'W': -0.1, 'V': 2.1, 'Y': 0.9}, 2: {'A': 0.0, 'E': -1.2, 'D': -1.3, 'G': 0.2, 'F': 0.8, 'I': 1.5, 'H': 0.2, 'K': 0.0, 'M': 1.4, 'L': 1.0, 'N': 0.5, 'Q': 0.0, 'P': 0.3, 'S': 0.2, 'R': 0.7, 'T': 0.0, 'W': 0.0, 'V': 0.5, 'Y': 0.8}, 3: {'A': 0.0, 'E': -0.74263, 'D': 1.9303, 'G': 0.15152, 'F': -0.88385, 'I': 0.54359, 'H': 0.066424, 'K': -1.101, 'M': 0.17312, 'L': 0.14265, 'N': 0.23879, 'Q': 0.081257, 'P': -1.1015, 'S': 0.61335, 'R': -1.1477, 'T': -0.82428, 'W': -1.0004, 'V': -0.079372, 'Y': -1.0353}, 4: {'A': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 5: {'A': 0.0, 'E': -1.6108, 'D': -2.1332, 'G': -0.86057, 'F': -1.3415, 'I': 0.51964, 'H': -0.37968, 'K': 0.51952, 'M': -0.94544, 'L': -0.079917, 'N': -0.21194, 'Q': -0.55568, 'P': 0.39251, 'S': 0.1234, 'R': 0.28561, 'T': 1.0129, 'W': -1.3229, 'V': 1.1232, 'Y': -1.4223}, 6: {'A': 0.0, 'E': -0.19258, 'D': -0.78133, 'G': -1.1317, 'F': -0.44204, 'I': 0.17504, 'H': -0.15349, 'K': -0.5888, 'M': 0.72971, 'L': 0.52181, 'N': 0.49952, 'Q': -0.021692, 'P': -0.56978, 'S': -0.010359, 'R': -0.88875, 'T': 0.036589, 'W': -0.93873, 'V': 0.10742, 'Y': -0.61142}, 7: {'A': 0.0, 'E': 0.0, 'D': 0.0, 'G': 0.0, 'F': 0.0, 'I': 0.0, 'H': 0.0, 'K': 0.0, 'M': 0.0, 'L': 0.0, 'N': 0.0, 'Q': 0.0, 'P': 0.0, 'S': 0.0, 'R': 0.0, 'T': 0.0, 'W': 0.0, 'V': 0.0, 'Y': 0.0}, 8: {'A': 0.0, 'E': -0.61422, 'D': -0.86927, 'G': -0.56068, 'F': -0.1483, 'I': 0.46799, 'H': 0.34784, 'K': -0.63915, 'M': 0.36782, 'L': 0.11655, 'N': -0.68387, 'Q': 0.44053, 'P': -1.049, 'S': 0.51357, 'R': -0.9464, 'T': -0.64651, 'W': -0.41564, 'V': -0.094717, 'Y': -0.29141}} | [
"[email protected]"
] | |
fd0e88862a3552ff5a444410f25c478bc09b9ccc | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_185/ch21_2019_09_02_16_11_34_126817.py | caeb588b2448887a21cc708aae209b3242a27c36 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 138 | py | valor_conta = float(input("Digite o valor da conta"))
valor_10 = valor_conta * 1.1
print("Valor da conta com 10% {.:2f}".format(valor_10)) | [
"[email protected]"
] | |
962e96893571f492778142b708236e00d56b680e | c54f5a7cf6de3ed02d2e02cf867470ea48bd9258 | /pyobjc/pyobjc-framework-Quartz/PyObjCTest/test_cikernel.py | b0db5ef848f5d457400c0faee426365fabfe1997 | [
"MIT"
] | permissive | orestis/pyobjc | 01ad0e731fbbe0413c2f5ac2f3e91016749146c6 | c30bf50ba29cb562d530e71a9d6c3d8ad75aa230 | refs/heads/master | 2021-01-22T06:54:35.401551 | 2009-09-01T09:24:47 | 2009-09-01T09:24:47 | 16,895 | 8 | 5 | null | null | null | null | UTF-8 | Python | false | false | 306 | py |
from PyObjCTools.TestSupport import *
from Quartz.QuartzCore import *
from Quartz import *
class TestCIKernel (TestCase):
def testMethods(self):
self.failUnlessArgIsSEL(CIKernel.setROISelector_, 0, CGRect.__typestr__ + '@:i' + CGRect.__typestr__ + '@')
if __name__ == "__main__":
main()
| [
"ronaldoussoren@f55f28a5-9edb-0310-a011-a803cfcd5d25"
] | ronaldoussoren@f55f28a5-9edb-0310-a011-a803cfcd5d25 |
bdbb08709eafee58b0e2298380f07b3701a79bd1 | 605356250c655a7f98d5f1158e0ffc94175de4f7 | /devel/lib/python2.7/dist-packages/pal_vision_msgs/msg/_FaceRecognitionFeedback.py | 463e9865ee7b4e52e18bc66246629867f1f725f5 | [] | no_license | MatthewCallery/msc-tiago-project | 4d3dcf07b7bc6915d2f203bbff46f6c11720ff9f | 8c9e987c45d6152192ba36bb27781e961e7900c3 | refs/heads/master | 2020-11-30T04:17:53.649839 | 2017-07-11T14:38:47 | 2017-07-11T14:38:47 | 96,903,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | /home/mc16766/tiago_public_ws/devel/.private/pal_vision_msgs/lib/python2.7/dist-packages/pal_vision_msgs/msg/_FaceRecognitionFeedback.py | [
"mc16766@it051534"
] | mc16766@it051534 |
cbc6351fd46ad8ea36dc9847027121c21c9f0537 | c81d7dfef424b088bf2509a1baf406a80384ea5a | /venv/Lib/site-packages/pandas/tests/indexes/period/test_period_range.py | 49d34248207919814f02e980ff00b963e21dcdd9 | [] | no_license | Goutham2591/OMK_PART2 | 111210d78fc4845481ed55c852b8f2f938918f4a | cb54fb21ebf472bffc6ee4f634bf1e68303e113d | refs/heads/master | 2022-12-10T01:43:08.213010 | 2018-04-05T02:09:41 | 2018-04-05T02:09:41 | 124,828,094 | 0 | 1 | null | 2022-12-07T23:43:03 | 2018-03-12T03:20:14 | Python | UTF-8 | Python | false | false | 3,740 | py | import pytest
import pandas.util.testing as tm
from pandas import date_range, NaT, period_range, Period, PeriodIndex
class TestPeriodRange(object):
@pytest.mark.parametrize('freq', ['D', 'W', 'M', 'Q', 'A'])
def test_construction_from_string(self, freq):
# non-empty
expected = date_range(start='2017-01-01', periods=5,
freq=freq, name='foo').to_period()
start, end = str(expected[0]), str(expected[-1])
result = period_range(start=start, end=end, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
result = period_range(start=start, periods=5, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
result = period_range(end=end, periods=5, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
# empty
expected = PeriodIndex([], freq=freq, name='foo')
result = period_range(start=start, periods=0, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
result = period_range(end=end, periods=0, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
result = period_range(start=end, end=start, freq=freq, name='foo')
tm.assert_index_equal(result, expected)
def test_construction_from_period(self):
# upsampling
start, end = Period('2017Q1', freq='Q'), Period('2018Q1', freq='Q')
expected = date_range(start='2017-03-31', end='2018-03-31', freq='M',
name='foo').to_period()
result = period_range(start=start, end=end, freq='M', name='foo')
tm.assert_index_equal(result, expected)
# downsampling
start, end = Period('2017-1', freq='M'), Period('2019-12', freq='M')
expected = date_range(start='2017-01-31', end='2019-12-31', freq='Q',
name='foo').to_period()
result = period_range(start=start, end=end, freq='Q', name='foo')
tm.assert_index_equal(result, expected)
# empty
expected = PeriodIndex([], freq='W', name='foo')
result = period_range(start=start, periods=0, freq='W', name='foo')
tm.assert_index_equal(result, expected)
result = period_range(end=end, periods=0, freq='W', name='foo')
tm.assert_index_equal(result, expected)
result = period_range(start=end, end=start, freq='W', name='foo')
tm.assert_index_equal(result, expected)
def test_errors(self):
# not enough params
msg = ('Of the three parameters: start, end, and periods, '
'exactly two must be specified')
with tm.assert_raises_regex(ValueError, msg):
period_range(start='2017Q1')
with tm.assert_raises_regex(ValueError, msg):
period_range(end='2017Q1')
with tm.assert_raises_regex(ValueError, msg):
period_range(periods=5)
with tm.assert_raises_regex(ValueError, msg):
period_range()
# too many params
with tm.assert_raises_regex(ValueError, msg):
period_range(start='2017Q1', end='2018Q1', periods=8, freq='Q')
# start/end NaT
msg = 'start and end must not be NaT'
with tm.assert_raises_regex(ValueError, msg):
period_range(start=NaT, end='2018Q1')
with tm.assert_raises_regex(ValueError, msg):
period_range(start='2017Q1', end=NaT)
# invalid periods param
msg = 'periods must be a number, got foo'
with tm.assert_raises_regex(TypeError, msg):
period_range(start='2017Q1', periods='foo')
| [
"[email protected]"
] | |
820d4e8465a9ca575fe26c9092050f29834c8f99 | 5891051796778cfb44a255248ce38789bfef9e70 | /DjangoLearn/apps/test_django/models.py | 8820b1517c116ec9de6dbc52b1199d2373a5e12e | [] | no_license | Faithlmy/Python_base | cc546a5d86b123e102a69df1227cde9b6e567493 | 5a43557e6375dc9dbe5f6701d7c10e549873a5ab | refs/heads/master | 2021-01-01T17:07:04.097978 | 2018-03-31T16:44:01 | 2018-03-31T16:44:01 | 98,000,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,296 | py | from django.db import models
# Create your models here.
class TbCustomInfo(models.Model):
c_name = models.CharField(max_length=255, blank=True, null=True)
c_name_short = models.CharField(max_length=255, blank=True, null=True)
c_num = models.CharField(max_length=255, blank=True, null=True)
c_type = models.CharField(max_length=255, blank=True, null=True)
c_site = models.CharField(max_length=255, blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True, auto_now_add=True)
modify_time = models.DateTimeField(blank=True, null=True, auto_now_add=True)
delete_flag = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tb_custom_info'
class TbCustomerPaper(models.Model):
# mm = models.Manager
ecn_no = models.CharField(db_column='ECN_NO', max_length=255, blank=True, null=True) # Field name made lowercase.
version = models.CharField(max_length=255, blank=True, null=True)
p_name = models.CharField(max_length=255, blank=True, null=True)
st_site = models.CharField(db_column='ST_site', max_length=255, blank=True, null=True) # Field name made lowercase.
drawing_type = models.CharField(max_length=255, blank=True, null=True)
definer_name = models.CharField(max_length=255, blank=True, null=True)
c_name = models.CharField(max_length=255, blank=True, null=True)
c_type = models.CharField(max_length=255, blank=True, null=True)
c_site = models.CharField(max_length=255, blank=True, null=True)
uploader = models.CharField(max_length=255, blank=True, null=True)
custters_spec = models.CharField(max_length=255, blank=True, null=True)
case_name = models.CharField(max_length=255, blank=True, null=True)
priority = models.IntegerField(blank=True, null=True)
alter_cause = models.TextField(blank=True, null=True)
alter_front = models.TextField(blank=True, null=True)
alter_later = models.TextField(blank=True, null=True)
desc_file = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
modify_time = models.DateTimeField(blank=True, null=True)
upload_time = models.DateTimeField(blank=True, null=True)
c_confirm = models.IntegerField(blank=True, null=True)
customer_info = models.ForeignKey('TbCustomInfo', on_delete=models.DO_NOTHING, db_column='customer_info', blank=True, null=True)
copy_id = models.IntegerField(blank=True, null=True)
drawingtype = models.IntegerField(blank=True, null=True)
modify_draft = models.IntegerField(blank=True, null=True)
valid = models.IntegerField(blank=True, null=True)
create_people = models.CharField(max_length=255, blank=True, null=True)
current = models.IntegerField(blank=True, null=True)
enabled = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tb_customer_paper'
class TbDrawingFile(models.Model):
file_name = models.CharField(max_length=255, blank=True, null=True)
file_size = models.IntegerField(blank=True, null=True)
file_path = models.CharField(max_length=255, blank=True, null=True)
uploader = models.CharField(max_length=255, blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
customer = models.ForeignKey('TbCustomerPaper', on_delete=models.DO_NOTHING, blank=True, null=True)
file_type = models.IntegerField(blank=True, null=True)
# project = models.ForeignKey('TbProjectPaper', on_delete=models.DO_NOTHING, blank=True, null=True)
# drawing_type = models.ForeignKey('TbDrawing', on_delete=models.DO_NOTHING, blank=True, null=True)
cus = models.IntegerField(blank=True, null=True)
drawing_type_name = models.CharField(max_length=255, blank=True, null=True)
dev_type = models.CharField(max_length=255, blank=True, null=True)
# id_dev_type = models.ForeignKey('TbTypeDevice', on_delete=models.DO_NOTHING, db_column='id_dev_type', blank=True, null=True)
b_all = models.IntegerField(blank=True, null=True)
sign_filepath = models.CharField(max_length=255, blank=True, null=True)
seal_filepath = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'tb_drawing_file'
| [
"[email protected]"
] | |
717bd41fe9b212c3ecf9faf925de10324dd12ab9 | ef48efff7b9022f9745145509193d551f4084376 | /novedades/models.py | f731d1dcb004d9927fff918f8e0a6360dcc718a1 | [] | no_license | ljarufe/incamotors | b5ace5cfb2f5208a31859f06da3e6cf46867b35c | 79926654e286e9fd496bb1be9ce8d03ca218d654 | refs/heads/master | 2020-05-06T12:20:00.996574 | 2013-10-21T16:09:20 | 2013-10-21T16:09:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,527 | py | # -*- coding: utf-8 -*-
from django.db import models
from sorl.thumbnail.fields import ImageWithThumbnailsField
class Novedad(models.Model):
"""
Clase abstracta para todas las novedades
"""
nombre = models.CharField(max_length=100)
descripcion = models.TextField(verbose_name=u"descripción")
def __unicode__(self):
return '%s' % self.nombre
class Meta:
abstract = True
class Evento(Novedad):
"""
Eventos de la página de inicio
"""
foto = ImageWithThumbnailsField(
upload_to = 'img/eventos',
thumbnail = {'size': (625, 420),
'options': ['upscale', 'max', 'crop']},
generate_on_save = True,
)
class Evento_home(Novedad):
"""
Eventos para la página de inicio
"""
foto = ImageWithThumbnailsField(
upload_to = 'img/eventos',
thumbnail = {'size': (625, 420),
'options': ['upscale', 'max', 'crop']},
generate_on_save = True,
)
foto_pie = ImageWithThumbnailsField(
upload_to = 'img/eventos',
thumbnail = {'size': (625, 220),
'options': ['upscale', 'max', 'crop']},
generate_on_save = True,
)
class Meta:
verbose_name = u"Evento en home"
verbose_name_plural = u"Eventos en home"
class Promocion(Novedad):
"""
Promociones de incamotors, iguales a los eventos
"""
foto = ImageWithThumbnailsField(
upload_to = 'img/promociones',
thumbnail = {'size': (625, 420),
'options': ['upscale', 'max', 'crop']},
generate_on_save = True,
)
class Meta:
verbose_name = u"promoción"
verbose_name_plural = u"promociones"
class Noticia(Novedad):
"""
Noticias del menu superior
"""
foto = ImageWithThumbnailsField(
upload_to = 'img/noticias',
thumbnail = {'size': (625, 420),
'options': ['upscale', 'max', 'crop']},
generate_on_save = True,
)
class Enlace(models.Model):
"""
Enlaces a otros sitios web
"""
url = models.URLField()
descripcion = models.TextField(verbose_name=u"descripción")
def __unicode__(self):
return u"%s" % self.url
| [
"[email protected]"
] | |
d0da70c270dd71617fd4d3449d199f1a9f94549a | 8004cc465359aecb7a1890617646ea5b49f11d9a | /cnn_models/cnn_dropout.py | 4034ce3be9e2c7aad18f0a543a39bd23928ad07f | [] | no_license | jhyang12345/facial-recognition | 661e34d874986943f50b5b9691d186760957b594 | 2f2ba5cfcbd58efbc9de5b9f0bafc7bc640d9c26 | refs/heads/master | 2020-04-06T09:52:20.932793 | 2018-12-13T08:15:25 | 2018-12-13T08:15:25 | 157,360,076 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,747 | py | from keras.layers import Input, Convolution2D, SeparableConvolution2D, \
GlobalAveragePooling2D, GlobalMaxPooling2D, MaxPooling2D, \
Dense, Activation, BatchNormalization, Dropout
from keras.models import Sequential, Model
from keras.callbacks import ModelCheckpoint
class CNNDropout:
def __init__(self, input_shape=(128, 128, 3), summarize=True):
self.image_width = input_shape[0]
self.image_height = input_shape[1]
self.channels = input_shape[2]
self.input_shape = input_shape
self.alpha = 1
self.name = "cnn_dropout"
self.model = None
self.checkpoint_path = 'models/cnn_dropout.best.hdf5'
self.checkpointer = ModelCheckpoint(filepath=self.checkpoint_path, verbose=1,
save_best_only=True)
self.build_model()
if summarize: self.model.summary()
def build_model(self):
model_input = Input(shape=self.input_shape)
alpha = self.alpha
activation_type = 'relu'
# applying dropout factor to prevent overfitting
dropout_factor = 0.4
# input format will usually be 128 or 2^7
# strides of 2 halfs input shape
# usually kernel sizes are in odd numbers
# kernel strides alternate between 1 and 2 so that we don't miss out
x = Convolution2D(int(32 * alpha), (3, 3), strides=(1, 1), padding='same')(model_input)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = Convolution2D(int(64 * alpha), (3, 3), strides=(1, 1), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = Convolution2D(int(64 * alpha), (3, 3), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
# kernel size of 3 halfs the input dimensions
x = MaxPooling2D(pool_size=(3, 3), strides=1, padding='same')(x)
x = Dropout(dropout_factor)(x)
x = Convolution2D(int(128 * alpha), (3, 3), strides=(1, 1), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = Convolution2D(int(128 * alpha), (3, 3), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = MaxPooling2D(pool_size=(3, 3), strides=1, padding='same')(x)
x = Dropout(dropout_factor)(x)
x = Convolution2D(int(256 * alpha), (3, 3), strides=(1, 1), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = Convolution2D(int(256 * alpha), (3, 3), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = MaxPooling2D(pool_size=(3, 3), strides=1, padding='same')(x)
x = Dropout(dropout_factor)(x)
x = Convolution2D(int(512 * alpha), (3, 3), strides=(1, 1), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
x = Convolution2D(int(512 * alpha), (3, 3), strides=(2, 2), padding='same')(x)
x = BatchNormalization()(x)
x = Activation(activation_type)(x)
# basically flattens a dimension
x = GlobalMaxPooling2D()(x)
# maybe add another dense layer in between
out = Dense(1, activation='sigmoid')(x)
self.model = Model(model_input, out, name='cnn_dropout')
self.model.compile(loss='binary_crossentropy', optimizer='adam',
metrics=['accuracy'])
def load_model(self):
self.model.load_weights(self.checkpoint_path)
if __name__ == '__main__':
CNNPool()
| [
"[email protected]"
] | |
7455afa728f6af5d47411cfac7fe64159c81c7f0 | 7e20c4ad991159e46462c1ef6674340d1427452e | /mfr/version.py | 3e33c308c4918e8b00b6d51b698aa58ad76155f4 | [
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
] | permissive | CenterForOpenScience/modular-file-renderer | 2758fa4142251b21a10f0f8f42fdae135dd69497 | 64a04870eb6b358e4ba65dfe275869450868a2aa | refs/heads/develop | 2023-08-09T03:27:20.875353 | 2023-04-17T18:57:47 | 2023-04-17T18:57:47 | 13,421,446 | 37 | 49 | Apache-2.0 | 2023-04-17T18:58:12 | 2013-10-08T18:11:00 | JavaScript | UTF-8 | Python | false | false | 23 | py | __version__ = '23.1.0'
| [
"[email protected]"
] | |
6496d16a1187ee2afb1d4f13a192c17ebc29b49a | e71b6d14fbdbc57c7234ca45a47329d7d02fc6f7 | /flask_api/venv/lib/python3.7/site-packages/vsts/work/v4_1/models/board_reference.py | 6e58999b0b5322ab38b6dc0d498ad651c3982709 | [] | no_license | u-blavins/secret_sasquatch_society | c36993c738ab29a6a4879bfbeb78a5803f4f2a57 | 0214eadcdfa9b40254e331a6617c50b422212f4c | refs/heads/master | 2020-08-14T00:39:52.948272 | 2020-01-22T13:54:58 | 2020-01-22T13:54:58 | 215,058,646 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,204 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class BoardReference(Model):
"""BoardReference.
:param id: Id of the resource
:type id: str
:param name: Name of the resource
:type name: str
:param url: Full http link to the resource
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'}
}
def __init__(self, id=None, name=None, url=None):
super(BoardReference, self).__init__()
self.id = id
self.name = name
self.url = url
| [
"[email protected]"
] | |
e7bab45d83b2b3418bbf9dfb6ebb11ed89573d0a | 13a32b92b1ba8ffb07e810dcc8ccdf1b8b1671ab | /home--tommy--mypy/mypy/lib/python2.7/site-packages/joblib/test/common.py | 038ab537b9faa4cf059cd0c8b2edab674cdc1a7a | [
"Unlicense"
] | permissive | tommybutler/mlearnpy2 | 8ec52bcd03208c9771d8d02ede8eaa91a95bda30 | 9e5d377d0242ac5eb1e82a357e6701095a8ca1ff | refs/heads/master | 2022-10-24T23:30:18.705329 | 2022-10-17T15:41:37 | 2022-10-17T15:41:37 | 118,529,175 | 0 | 2 | Unlicense | 2022-10-15T23:32:18 | 2018-01-22T23:27:10 | Python | UTF-8 | Python | false | false | 3,061 | py | """
Small utilities for testing.
"""
import threading
import signal
import time
import os
import sys
import gc
from joblib._multiprocessing_helpers import mp
from joblib.testing import SkipTest, skipif
# A decorator to run tests only when numpy is available
try:
import numpy as np
def with_numpy(func):
"""A decorator to skip tests requiring numpy."""
return func
except ImportError:
def with_numpy(func):
"""A decorator to skip tests requiring numpy."""
def my_func():
raise SkipTest('Test requires numpy')
return my_func
np = None
# TODO: Turn this back on after refactoring yield based tests in test_hashing
# with_numpy = skipif(not np, reason='Test requires numpy.')
# we use memory_profiler library for memory consumption checks
try:
from memory_profiler import memory_usage
def with_memory_profiler(func):
"""A decorator to skip tests requiring memory_profiler."""
return func
def memory_used(func, *args, **kwargs):
"""Compute memory usage when executing func."""
gc.collect()
mem_use = memory_usage((func, args, kwargs), interval=.001)
return max(mem_use) - min(mem_use)
except ImportError:
def with_memory_profiler(func):
"""A decorator to skip tests requiring memory_profiler."""
def dummy_func():
raise SkipTest('Test requires memory_profiler.')
return dummy_func
memory_usage = memory_used = None
# A utility to kill the test runner in case a multiprocessing assumption
# triggers an infinite wait on a pipe by the master process for one of its
# failed workers
_KILLER_THREADS = dict()
def setup_autokill(module_name, timeout=30):
"""Timeout based suiciding thread to kill the test runner process
If some subprocess dies in an unexpected way we don't want the
parent process to block indefinitely.
"""
if "NO_AUTOKILL" in os.environ or "--pdb" in sys.argv:
# Do not install the autokiller
return
# Renew any previous contract under that name by first cancelling the
# previous version (that should normally not happen in practice)
teardown_autokill(module_name)
def autokill():
pid = os.getpid()
print("Timeout exceeded: terminating stalled process: %d" % pid)
os.kill(pid, signal.SIGTERM)
# If were are still there ask the OS to kill ourself for real
time.sleep(0.5)
print("Timeout exceeded: killing stalled process: %d" % pid)
os.kill(pid, signal.SIGKILL)
_KILLER_THREADS[module_name] = t = threading.Timer(timeout, autokill)
t.start()
def teardown_autokill(module_name):
"""Cancel a previously started killer thread"""
killer = _KILLER_THREADS.get(module_name)
if killer is not None:
killer.cancel()
with_multiprocessing = skipif(
mp is None, reason='Needs multiprocessing to run.')
with_dev_shm = skipif(
not os.path.exists('/dev/shm'),
reason='This test requires the /dev/shm shared memory fs.')
| [
"[email protected]"
] | |
dae634b72dac458cfa57d1bcb809f4d6d4bedf11 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2790/60730/256936.py | ed7cd9f25137e77fa2fe3809ef5f880ec7402267 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 450 | py | num_m, num_n = map(int, input().split())
m = list(map(int, input().split()))
n = list(map(int, input().split()))
m.sort(reverse=False)
tmp = 0
for i in range(num_n):
for j in range(num_m):
if (m[j] <= n[i]):
tmp = tmp + 1
if (j == 4):
print(str(tmp))
tmp = 0;
else:
continue
else:
print(str(tmp))
tmp = 0
break | [
"[email protected]"
] | |
61786e2faf6ca3617cc2547869f76cce86441d76 | b76c6813f2ce2fd24a33175a0249cd9544583fe7 | /acerca_de/url_acerca_de.py | 50417f63558bf4aa93ed35902ad121d612af8286 | [] | no_license | adrianglez2203/nuevo_as | 0074e6d8155a471bb7d81bc3456914acdc7fba98 | df375410e9d6922ebb931645ff8f1c7b3f5cb93b | refs/heads/master | 2022-08-01T23:43:51.328124 | 2020-06-06T15:35:23 | 2020-06-06T15:35:23 | 270,111,577 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | from django.urls import path
from acerca_de import views
urlpatterns = [
path('acerca',views.vista , name='acerca_de'),
] | [
"[email protected]"
] | |
ba8c22cf3cea258a5f767182a5ff8dbf68b2b507 | 16b26e6a9e6d6a7db2a20a6327b3d042e2245747 | /bigramas/bigramas.py | dbef177920a9911443ced6ff74a1eed9c8710e79 | [
"Unlicense"
] | permissive | jabaier/iic1103.20152.s4 | 3826b8de35470acc0387c8199b6ecce50d4222bd | 63ddd5f9b73caff218b6744e7392e7a66afba570 | refs/heads/master | 2020-05-27T16:59:32.232746 | 2015-11-20T14:34:21 | 2015-11-20T14:34:21 | 41,114,018 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,540 | py | import sys
import random
class Bigramas:
def __init__(self):
self.bigramas=[] # lista de bigramas
self.nbigramas=0 # numero de bigramas
def buscar(self,palabra):
for par in self.bigramas:
if par[0]==palabra:
return par[1]
# la palabra no está
self.bigramas.append([palabra,[]])
return self.bigramas[len(self.bigramas)-1][1]
def incrementar(self,lista,palabra):
i=0
while i < len(lista):
if lista[i][0]==palabra:
lista[i][1] = lista[i][1] + 1
return
i+=1
lista.append([palabra,1])
self.nbigramas=self.nbigramas+1
if self.nbigramas%1000==0:
print(".",end="")
sys.stdout.flush()
def agregar(self,pal1,pal2):
lista=self.buscar(pal1)
self.incrementar(lista,pal2)
def caminata(self,palabra,limite):
def sumalista(lista):
suma=0
for elem in lista:
suma+=elem[1]
return suma
contador=0
lista_palabras=[]
while contador < limite:
lista_palabras.append(palabra)
lista=self.buscar(palabra)
if len(lista)>0:
total=sumalista(lista)
rand=random.randint(0,total)
acc=0
i=0
while acc + lista[i][1] < rand and i < len(lista):
acc = acc + lista[i][1]
i = i + 1
palabra=lista[i][0]
else:
palabra=palabras[random.randint(0,len(self.palabras)-1)]
contador = contador + 1
return lista_palabras
def limpiar(palabra):
puntuacion=".,!:;?»«-¿¡"
respuesta=""
for c in palabra:
if not c in puntuacion:
respuesta += c
return respuesta
filename = input("Archivo con datos: ")
f = open(filename,'r')
print("Leyendo los datos")
entrada=''
for linea in f:
linea.rstrip()
entrada += linea
import time
tic1=time.time()
print("Datos leidos. Procesando.",end="")
f.close()
palabras = [limpiar(x.lower()) for x in entrada.split() if x!=""]
b=Bigramas()
i=0
while i < len(palabras)-1:
b.agregar(palabras[i],palabras[i+1])
i+=1
tic2=time.time()
print("\nDatos procesados en ",round(tic2-tic1,2),"seg.")
print("Base de datos tiene",len(b.bigramas),"palabras y",b.nbigramas,"bigramas.")
print(b.bigramas)
while True:
p = input("palabra: ")
print(b.caminata(p,20))
| [
"[email protected]"
] | |
11f4ba38d434e1643f04aaf34ec8646b27782520 | 7e0fdfb76cae8145a4bbac0cac0e4cac6b8e3788 | /dingding_shouqi/dingding_shouqi.py | daaee3a0bc4aec488160ca6adf2ad171e17d3204 | [] | no_license | w193241125/dingdingdaka | 603b06323b5c5e3341e0077614e044b2c80c038b | 3d2088d2e876fc4e80fc06bea3eaa5b8833392ed | refs/heads/master | 2020-05-17T21:45:44.276659 | 2019-05-13T10:16:29 | 2019-05-13T10:16:29 | 183,981,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,007 | py | #!/usr/local/bin python
# -*- coding: utf-8 -*-
# @Time : 2019/3/5 10:38
# @Author : Larwas
# @Link : [email protected]
# @Site : www.larwas.com
# @File : dingding.py
# @Software: PyCharm
import os
import re
import time
import urllib
import json
import requests
# nameplt = re.compile("package: name='(.*?)' versionCode")
# activityplt = re.compile("launchable-activity: name='(.*?)'")
adbshell = "adb shell" # 启用shell命令可以直接操作Android系统
# adbstr = "adb push D:/1.txt /mnt/txt/1.txt" # 把电脑的文件推送到安卓
# adbpng1 = "adb pull /sdcard/screencap.png d://"
# adbpng2 = "adb pull /sdcard/screencap.png d://1.png"
# adb_use_screencap = "adb shell /system/bin/screencap -p /sdcard/667.png" # 截取安卓的屏幕
# adbpng3 = "adb pull /sdcard/667.png d://3.png" # 把安卓的截图导入到电脑
# get_app_info = "adb shell pm list packages" # 获取模拟器所有包名
tap_place_index = "adb shell input tap 78 1219"
tap_place = "adb shell input tap 363 1219"
tap_place_kaoqin = "adb shell input tap 272 724"
tap_place_kaoqin2 = "adb shell input tap 268 1051"
tap_place_daka = "adb shell input tap 185 1234"
tap_place_shangban = "adb shell input tap 353 499"
tap_place_xiaban = "adb shell input tap 353 756"
wake_up = "adb shell input keyevent 26"
unlock = "adb shell input swipe 370 952 370 318"
wrap_a = "adb shell input swipe 370 318 370 952"
shut_up = "adb shell input tap 654 594" # 收起
return_a = "adb shell input tap 52 101"
return_b = "adb shell input tap 156 101"
return_ding_index = "adb shell input tap 75 1224"
return_index = "adb shell input keyevent 3"
turnback = "adb shell input keyevent 4"
donyin_package_name = "com.alibaba.android.rimet"
douyin_activity_name = "com.alibaba.android.rimet.biz.SplashActivity"
power_stat = "adb shell dumpsys window policy"
kill_dingding = "adb shell am force-stop com.alibaba.android.rimet"
# 获取抖音app的com信息
# get_com_info = r"aapt dump badging G:\python\dingding\rimet.apk > dingding.txt"
# os.system(get_com_info)
# with open("dingding.txt", "r", encoding="utf-8") as fs:
# donyin = fs.read()
#
# donyin_package_name = nameplt.findall(donyin)[0]
# douyin_activity_name = activityplt.findall(donyin)[0]
#
# print("钉钉activity", douyin_activity_name)
# print("钉钉的包名", donyin_package_name)
# os.system(adb_use_screencap)
# #print(os.system(adbpng3))
start_app = f"adb shell am start -n {donyin_package_name}/{douyin_activity_name}"
start_airdroid = f"adb shell am start -n com.sand.airdroid/com.sand.airdroid.ui.splash.SplashActivity_"
# 获取当前周数
current_week = time.strftime("%W")
# 取余 当前1为双休0为单休
mod = int(current_week) % 2
# 获取今天周几
current_weekday = time.strftime("%w", time.localtime())
# 获取当前时间
current_time = time.strftime('%H:%M', time.localtime(time.time()))
def isAwaked(deviceid = ''):
'''
判断的依据是' mAwake=false\n'
'''
if deviceid == '':
cmd = 'adb shell dumpsys window policy'
else:
cmd = 'adb -s ' + deviceid + ' shell dumpsys window policy'
screenAwakevalue = ' mScreenOnEarly=true mScreenOnFully=true mOrientationSensorEnabled=false\n'
allList = os.popen(cmd).readlines()
if screenAwakevalue in allList:
return True
else:
return False
def isLock(deviceid = ''):
'''
判断的依据是' mAwake=false\n'
'''
if deviceid == '':
cmd = 'adb shell dumpsys window policy'
else:
cmd = 'adb -s ' + deviceid + ' shell dumpsys window policy'
screenAwakevalue = ' mShowingLockscreen=true mShowingDream=false mDreamingLockscreen=true\n'
allList = os.popen(cmd).readlines()
if screenAwakevalue in allList:
return True # 锁着
else:
return False # 没锁
def sign():
start_app = f"adb shell am start -n {donyin_package_name}/{douyin_activity_name}"
if isAwaked():
if isLock():
print('unlock')
os.system(unlock)
else:
pass
else:
# 唤醒屏幕并解锁
print('wake up and unlock')
os.system(wake_up)
time.sleep(1)
os.system(unlock)
print("启动dd")
os.system(start_app)
time.sleep(20)
if isAwaked():
if isLock():
os.system(unlock)
else:
pass
else:
# 唤醒屏幕并解锁
os.system(wake_up)
time.sleep(1)
os.system(unlock)
# 操作钉钉
os.system(tap_place_index)
time.sleep(2)
os.system(tap_place)
# 下滑一下 保证位置
os.system(wrap_a)
# 收起
time.sleep(4)
print('收起')
os.system(shut_up)
time.sleep(3)
print('点击考勤')
os.system(tap_place_kaoqin2)
# os.system(tap_place_kaoqin)
# time.sleep(6)
# os.system(tap_place_daka)
time.sleep(10)
if current_time <= "10:30":
os.system(tap_place_shangban)
print(1)
time.sleep(5)
# 打卡完 返回
os.system(turnback)
else:
if current_time < "20:00":
os.system(tap_place_xiaban)
print("下班")
else:
os.system(tap_place_xiaban)
print("点击一次下班保证打了卡")
time.sleep(3)
tap_place_gengxin = "adb shell input tap 115 713"
tap_place_gengxin_queren = "adb shell input tap 569 713"
os.system(tap_place_gengxin)
print("更新")
time.sleep(3)
os.system(tap_place_gengxin_queren)
time.sleep(5)
# 打卡完 返回
os.system(turnback)
# 退出
os.system(return_a)
os.system(return_b)
os.system(return_ding_index)
os.system(return_index)
# 获取当前时间 格式20180213
nowTime = time.strftime('%Y%m%d', time.localtime())
date = nowTime
# print(date)
# 节假日接口
server_url = "http://api.goseek.cn/Tools/holiday?date="
vop_url_request = requests.get(server_url + date)
vop_response = vop_url_request.text
vop_data = json.loads(vop_response)
print(vop_data)
# 获取节假日结束
if vop_data['data'] == 1:
pass # 法定节假日跳过
else:
print('不是法定节假日')
if isAwaked():
if isLock():
print('unlock')
os.system(unlock)
else:
print('屏幕没锁')
pass
else:
# 唤醒屏幕并解锁
print('wake up and unlock2')
# 唤醒屏幕并解锁
os.system(wake_up)
time.sleep(1)
os.system(unlock)
time.sleep(1)
os.system(return_index)
# 双休打卡
if mod == 1 and int(current_weekday) in [1, 2, 3, 4, 5]:
if int(current_weekday) == 5 and current_time < "20:30" and current_time > "10:30":
sign() # 打卡
elif int(current_weekday) in [1, 2, 3, 4] and current_time > "20:30":
sign() # 打卡
elif int(current_weekday) in [1, 2, 3, 4, 5] and current_time < "10:30":
sign()
else:
if current_time > "18:00":
sign()
else:
print('不是周末,打卡太早1') # 跳过
# 单休打卡
elif mod == 0 and int(current_weekday) in [1, 2, 3, 4, 5, 6]:
if int(current_weekday) == 6 and current_time < "20:30" and current_time > "10:30":
sign() # 打下班卡
elif int(current_weekday) in [1, 2, 3, 4, 5] and current_time > "20:30":
sign() # 打下班卡
elif int(current_weekday) in [1, 2, 3, 4, 5,6] and current_time < "10:30":
sign() # 打上班卡
else:
if current_time > "18:00":
sign()
else:
print('不是周末,打卡太早_单休') # 跳过
else:
print('未知原因取消打卡') # 跳过
os.system(kill_dingding)
os.system(start_airdroid)
time.sleep(3)
os.system(wake_up)
time.sleep(3)
exit()
| [
"[email protected]"
] | |
68f57b90cb2d6b9dd5cb519f88279a89150b0f99 | 070eabad59ba058bbdf4a8d76be03c692143cd32 | /exp/exp12.py | 554b19772087d50833d69b718d2632c0dc1dfe03 | [] | no_license | osuossu8/Kaggle_Bengali2019 | 1ebaabd36a0225a42e77d07ea2da04a7860a01d4 | a32397e73b2861cb677988c7d39abb719e229d8c | refs/heads/master | 2022-04-20T16:19:36.961665 | 2020-03-14T13:02:38 | 2020-03-14T13:02:38 | 239,647,488 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,950 | py | import numpy as np
import pandas as pd
import albumentations
import argparse
import collections
import cv2
import datetime
import gc
import glob
import logging
import math
import operator
import os
import pickle
import pkg_resources
import random
import re
import scipy.stats as stats
import seaborn as sns
import shutil
import sys
import time
import torch
import torch.nn as nn
import torch.utils.data
import torch.nn.functional as F
import torchvision
import torchvision.transforms as transforms
from torchvision import models, transforms
from contextlib import contextmanager
from collections import OrderedDict
from sklearn import metrics
from sklearn import model_selection
from sklearn.model_selection import KFold, GroupKFold
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.decomposition import TruncatedSVD
from sklearn.metrics import mean_squared_log_error
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder, OneHotEncoder, StandardScaler
from torch.nn import CrossEntropyLoss, MSELoss
import torch.optim as optim
from torch.optim import lr_scheduler
from torch.utils.data import (Dataset,DataLoader, RandomSampler, SequentialSampler,
TensorDataset)
import tensorflow as tf
import PIL
from PIL import Image
from tqdm import tqdm, tqdm_notebook, trange
import warnings
warnings.filterwarnings('ignore')
# from apex import amp
from torch.utils.data.distributed import DistributedSampler
try:
from torch.utils.tensorboard import SummaryWriter
except:
from tensorboardX import SummaryWriter
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
sys.path.append("/usr/src/app/kaggle/bengaliai-cv19")
from src.machine_learning_util import seed_everything, prepare_labels, DownSampler, timer, \
to_pickle, unpickle
from src.image_util import resize_to_square_PIL, pad_PIL, threshold_image, \
bbox, crop_resize, Resize, \
image_to_tensor, train_one_epoch, validate, macro_recall
from src.scheduler import GradualWarmupScheduler
from src.layers import ResidualBlock
from src.image_bengali import rand_bbox, cutmix, mixup, cutmix_criterion, mixup_criterion
from src.trainer_bengali import train_one_epoch_mixup_cutmix
SEED = 1129
seed_everything(SEED)
LOGGER = logging.getLogger()
FORMATTER = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
def setup_logger(out_file=None, stderr=True, stderr_level=logging.INFO, file_level=logging.DEBUG):
LOGGER.handlers = []
LOGGER.setLevel(min(stderr_level, file_level))
if stderr:
handler = logging.StreamHandler(sys.stderr)
handler.setFormatter(FORMATTER)
handler.setLevel(stderr_level)
LOGGER.addHandler(handler)
if out_file is not None:
handler = logging.FileHandler(out_file)
handler.setFormatter(FORMATTER)
handler.setLevel(file_level)
LOGGER.addHandler(handler)
LOGGER.info("logger set up")
return LOGGER
EXP_ID = "exp12_mixup_cutmix_15epoch_4e-4"
LOGGER_PATH = f"logs/log_{EXP_ID}.txt"
setup_logger(out_file=LOGGER_PATH)
LOGGER.info("seed={}".format(SEED))
SIZE = 128
HEIGHT=137
WIDTH=236
OUT_DIR = 'models'
# https://albumentations.readthedocs.io/en/latest/api/augmentations.html
data_transforms = albumentations.Compose([
albumentations.Flip(p=0.2),
albumentations.Rotate(limit=15, p=0.2),
albumentations.ShiftScaleRotate(rotate_limit=15, p=0.2),
albumentations.Cutout(p=0.2),
# albumentations.RandomGridShuffle(grid=(3, 3), p=0.2),
])
data_transforms_test = albumentations.Compose([
albumentations.Flip(p=0),
])
class BengaliAIDataset(torch.utils.data.Dataset):
def __init__(self, df, y=None, transform=None):
self.df = df
self.y = y
self.transform = transform
self.size = 128
def __len__(self):
return len(self.df)
def __getitem__(self, idx):
input_dic = {}
row = self.df.iloc[idx]
image = self.df.iloc[idx][1:].values.reshape(128,128).astype(np.float)
if self.transform is not None:
image = np.array(image)
image = threshold_image(image)
image = self.transform(image=image)['image']
image = (image.astype(np.float32) - 0.0692) / 0.2051
image = image_to_tensor(image, normalize=None)
else:
image = np.array(image)
image = (image.astype(np.float32) - 0.0692) / 0.2051
image = image_to_tensor(image, normalize=None)
input_dic["image"] = image
if self.y is not None:
label1 = self.y.vowel_diacritic.values[idx]
label2 = self.y.grapheme_root.values[idx]
label3 = self.y.consonant_diacritic.values[idx]
return input_dic, label1, label2, label3
else:
return input_dic
hidden_size = 64
channel_size = 1
class ResNet18(nn.Module):
def __init__(self):
super(ResNet18,self).__init__()
self.block1 = nn.Sequential(
nn.Conv2d(channel_size,hidden_size,kernel_size=2,stride=2,padding=3,bias=False),
nn.BatchNorm2d(hidden_size),
nn.ReLU(True)
)
self.block2 = nn.Sequential(
nn.MaxPool2d(1,1),
ResidualBlock(hidden_size,hidden_size),
ResidualBlock(hidden_size,hidden_size,2)
)
self.block3 = nn.Sequential(
ResidualBlock(hidden_size,hidden_size*2),
ResidualBlock(hidden_size*2,hidden_size*2,2)
)
self.block4 = nn.Sequential(
ResidualBlock(hidden_size*2,hidden_size*4),
ResidualBlock(hidden_size*4,hidden_size*4,2)
)
self.block5 = nn.Sequential(
ResidualBlock(hidden_size*4,hidden_size*8),
ResidualBlock(hidden_size*8,hidden_size*8,2)
)
self.avgpool = nn.AvgPool2d(2)
self.fc = nn.Linear(512*4,512)
# vowel_diacritic
self.fc1 = nn.Linear(512,11)
# grapheme_root
self.fc2 = nn.Linear(512,168)
# consonant_diacritic
self.fc3 = nn.Linear(512,7)
def forward(self,x):
x = self.block1(x)
x = self.block2(x)
x = self.block3(x)
x = self.block4(x)
x = self.block5(x)
x = self.avgpool(x)
x = x.view(x.size(0),-1)
x = self.fc(x)
x1 = self.fc1(x)
x2 = self.fc2(x)
x3 = self.fc3(x)
return x1,x2,x3
with timer('load csv data'):
fold_id = 0
epochs = 15
batch_size = 64
train = pd.read_csv('input/train.csv')
y = train[["grapheme_root", "vowel_diacritic", "consonant_diacritic"]]
num_folds = 5
train_idx, val_idx = train_test_split(train.index.tolist(), test_size=0.2, random_state=SEED, stratify=train["grapheme_root"])
gc.collect()
with timer('load feather data'):
train_path = [
'input/resize_cropped_128_train_image_data_0.feather',
'input/resize_cropped_128_train_image_data_1.feather',
'input/resize_cropped_128_train_image_data_2.feather',
'input/resize_cropped_128_train_image_data_3.feather'
]
data0 = pd.read_feather(train_path[0])
data1 = pd.read_feather(train_path[1])
data2 = pd.read_feather(train_path[2])
data3 = pd.read_feather(train_path[3])
data = pd.concat([data0, data1, data2, data3])
print(data.shape)
del data0, data1, data2, data3
gc.collect()
with timer('prepare validation data'):
y_train = y.iloc[train_idx]
train_dataset = BengaliAIDataset(data.iloc[train_idx], y=y_train, transform=data_transforms)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size*4, shuffle=True, num_workers=0, pin_memory=True)
y_val = y.iloc[val_idx]
val_dataset = BengaliAIDataset(data.iloc[val_idx], y=y_val, transform=data_transforms_test)
val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=batch_size*2, shuffle=False, num_workers=0, pin_memory=True)
del train_dataset, val_dataset
gc.collect()
with timer('create model'):
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# model = torchvision.models.resnet50(pretrained=True)
# model.load_state_dict(torch.load("../input/pytorch-pretrained-models/resnet101-5d3b4d8f.pth"))
model = ResNet18()
model = model.to(device)
criterion = nn.CrossEntropyLoss(reduction='mean').to(device)
optimizer = optim.Adam(model.parameters(), lr=4e-4)
scheduler = GradualWarmupScheduler(optimizer, multiplier=1.1, total_epoch=5,
after_scheduler=None)
with timer('training loop'):
best_score = -999
best_epoch = 0
for epoch in range(1, epochs + 1):
LOGGER.info("Starting {} epoch...".format(epoch))
# tr_loss = train_one_epoch(model, train_loader, criterion, optimizer, device)
tr_loss = train_one_epoch_mixup_cutmix(model, train_loader, criterion, optimizer, device)
LOGGER.info('Mean train loss: {}'.format(round(tr_loss, 5)))
val_pred, y_true, val_loss = validate(model, val_loader, criterion, device)
score = macro_recall(y_true, val_pred)
LOGGER.info('Mean valid loss: {} score: {}'.format(round(val_loss, 5), round(score, 5)))
if score > best_score:
best_score = score
best_epoch = epoch
torch.save(model.state_dict(), os.path.join(OUT_DIR, '{}_fold{}.pth'.format(EXP_ID, fold_id)))
np.save(os.path.join(OUT_DIR, "{}_fold{}.npy".format(EXP_ID, fold_id)), val_pred)
scheduler.step()
LOGGER.info("best score={} on epoch={}".format(best_score, best_epoch))
| [
"[email protected]"
] | |
16d7d093326b863bf363be47886a033059a9f1f4 | 96f181736c9975adfabd45cc776cab7a37d2e7a1 | /transformer/SubLayers.py | 636b67878e40d8e50b27f1d0f49b7d9bf2797668 | [
"MIT"
] | permissive | fangxiaoquan/transformer-pytorch | 6b43fb75635bb512c38c6f2ac8ec306b6e6ba5d9 | c9c5c81151c37ad7a088ea96aa5248fd4f4ad2d1 | refs/heads/master | 2020-05-17T00:36:59.073875 | 2019-03-17T14:42:02 | 2019-03-17T14:42:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,844 | py | ''' Define the sublayers in encoder/decoder layer '''
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from transformer.Modules import ScaledDotProductAttention
class MultiHeadAttention(nn.Module):
''' Multi-Head Attention module '''
def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1):
super().__init__()
self.n_head = n_head
self.d_k = d_k
self.d_v = d_v
self.w_qs = nn.Linear(d_model, n_head * d_k)
self.w_ks = nn.Linear(d_model, n_head * d_k)
self.w_vs = nn.Linear(d_model, n_head * d_v)
nn.init.normal_(self.w_qs.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_k)))
nn.init.normal_(self.w_ks.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_k)))
nn.init.normal_(self.w_vs.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_v)))
self.attention = ScaledDotProductAttention(temperature=np.power(d_k, 0.5))
self.layer_norm = nn.LayerNorm(d_model)
self.fc = nn.Linear(n_head * d_v, d_model)
nn.init.xavier_normal_(self.fc.weight)
self.dropout = nn.Dropout(dropout)
def forward(self, q, k, v, mask=None):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
sz_b, len_q, _ = q.size()
sz_b, len_k, _ = k.size()
sz_b, len_v, _ = v.size()
residual = q
q = self.w_qs(q).view(sz_b, len_q, n_head, d_k)
k = self.w_ks(k).view(sz_b, len_k, n_head, d_k)
v = self.w_vs(v).view(sz_b, len_v, n_head, d_v)
q = q.permute(2, 0, 1, 3).contiguous().view(-1, len_q, d_k) # (n*b) x lq x dk
k = k.permute(2, 0, 1, 3).contiguous().view(-1, len_k, d_k) # (n*b) x lk x dk
v = v.permute(2, 0, 1, 3).contiguous().view(-1, len_v, d_v) # (n*b) x lv x dv
mask = mask.repeat(n_head, 1, 1) # (n*b) x .. x ..
output, attn = self.attention(q, k, v, mask=mask)
output = output.view(n_head, sz_b, len_q, d_v)
output = output.permute(1, 2, 0, 3).contiguous().view(sz_b, len_q, -1) # b x lq x (n*dv)
output = self.dropout(self.fc(output))
output = self.layer_norm(output + residual)
return output, attn
class PositionwiseFeedForward(nn.Module):
''' A two-feed-forward-layer module '''
def __init__(self, d_in, d_hid, dropout=0.1):
super().__init__()
self.w_1 = nn.Conv1d(d_in, d_hid, 1) # position-wise
self.w_2 = nn.Conv1d(d_hid, d_in, 1) # position-wise
self.layer_norm = nn.LayerNorm(d_in)
self.dropout = nn.Dropout(dropout)
def forward(self, x):
residual = x
output = x.transpose(1, 2)
output = self.w_2(F.relu(self.w_1(output)))
output = output.transpose(1, 2)
output = self.dropout(output)
output = self.layer_norm(output + residual)
return output
| [
"[email protected]"
] | |
214e9acf4f63476ff161a03f36b9d65e5158d29c | e8c5d8a473a71f88616b692dcdfd2d604485f580 | /test_corrector.py | 1db70ec4aff76eef61b92ad17796f78834360551 | [
"Apache-2.0"
] | permissive | modernYan/corrector | a6907bf1dc0e5e91048704365b4ade7327939592 | 75c86075ea51a53ebe6d649d729e690e3b414f7a | refs/heads/master | 2021-04-03T01:51:33.407724 | 2018-03-07T12:49:45 | 2018-03-07T12:49:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,148 | py | # -*- coding: utf-8 -*-
# Author: XuMing <[email protected]>
# Brief:
import unittest
from pycorrector.cn_spell import correct
class BasicTestSuite(unittest.TestCase):
"""Basic test cases."""
@staticmethod
def test_text1():
error_sentence_1 = '机七学习是人工智能领遇最能体现智能的一个分知'
correct_sent = correct(error_sentence_1)
print("original sentence:{} => correct sentence:{}".format(error_sentence_1, correct_sent))
@staticmethod
def test_text2():
error_sentence_2 = '杭洲是中国的八大古都之一,因风景锈丽,享有“人间天棠”的美誉!'
correct_sent = correct(error_sentence_2)
print("original sentence:{} => correct sentence:{}".format(error_sentence_2, correct_sent))
@staticmethod
def test_text3():
error_sentence_3 = '我们现今所"使用"的大部分舒学符号,你们用的什么婊点符号'
correct_sent = correct(error_sentence_3)
print("original sentence:{} => correct sentence:{}".format(error_sentence_3, correct_sent))
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
e0409dc7eb85b5766250889ef408b577012505a7 | 8338bde799fab50fa28b3c9e85035fce12f1e152 | /src/crystal_analysis/fluctuations.py | a31246b51e26e6bab8b8c6c145aa242d5bfe1576 | [
"MIT"
] | permissive | malramsay64/Crystal_Melting | c5941ad261ef71f1357d6064302344b093b22b53 | e8305928b06b536d7293cb751963d058d55627aa | refs/heads/master | 2021-03-24T10:24:23.291821 | 2020-08-07T07:19:09 | 2020-08-07T07:19:09 | 119,946,491 | 0 | 0 | MIT | 2020-02-12T07:35:47 | 2018-02-02T07:13:03 | Python | UTF-8 | Python | false | false | 8,047 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2019 Malcolm Ramsay <[email protected]>
#
# Distributed under terms of the MIT license.
"""A module to measure the structural fluctuations of each state.
The concept of this module is to understand how much particles within each state are
moving, to get an idea of the likelihood of transitioning from one state to anther.
States which are highly constrained will allow small amounts of motion, while states
which are more flexible will be able to rapidly change configuration.
"""
import logging
from pathlib import Path
from typing import Tuple
import click
import numpy as np
import pandas as pd
import scipy.optimize
from pandas.api.types import CategoricalDtype
from sdanalysis import order
from sdanalysis.read import open_trajectory
from sdanalysis.util import get_filename_vars
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
@click.group()
def main():
pass
BINS = np.linspace(-1, 1, 5001)
BIN_VALUES = (BINS[1:] + BINS[:-1]) / 2
def aggregate(values: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""Convert values to a histogram of bins and values
This takes a collection of values in the range [0,1], binning values into a
histogram. Values are binned in increments of 1a-4, with only the bins containing values being returned.
Args:
values: A collection of values which will be binned into
Returns:
centers: The centers of each of the bins
counts: The count of each bin
"""
hist = np.histogram(values, bins=BINS, density=True)[0]
non_zero = np.nonzero(hist)
return BIN_VALUES[non_zero], hist[non_zero]
def gaussian(x, A, mu, sigma):
return A * np.exp(-np.square(x - mu) / (2 * np.square(sigma)))
def fit_gaussian(bins: np.ndarray, count: np.ndarray):
# Initial guess at parameter values
p0 = (1.0, 0.0, 1.0)
coeffs, _ = scipy.optimize.curve_fit(gaussian, bins, count, p0=p0, maxfev=2000)
return coeffs
@main.command()
@click.argument("output", type=click.Path(file_okay=True, dir_okay=False))
@click.argument(
"infiles", nargs=-1, type=click.Path(exists=True, file_okay=True, dir_okay=False)
)
def collate_disc(output, infiles):
with pd.HDFStore(output) as dst:
for file in infiles:
file = Path(file)
print(file)
df = pd.read_csv(file)
fvars = get_filename_vars(file)
df["temperature"] = float(fvars.temperature)
df["pressure"] = float(fvars.pressure)
if fvars.crystal is None:
crystal = "liquid"
else:
crystal = fvars.crystal
df["crystal"] = crystal
bin_values, count = aggregate(df["hexatic_order"])
df = pd.DataFrame(
{
"temperature": float(df["temperature"].values[0]),
"pressure": float(df["pressure"].values[0]),
"crystal": df["crystal"].values[0],
"bins": bin_values,
"count": count,
"probability": count * (BINS[1] - BINS[0]),
}
)
df["crystal"] = df["crystal"].astype(
CategoricalDtype(
categories=["SquareCircle", "HexagonalCircle", "liquid"]
)
)
dst.append("ordering", df)
@main.command()
@click.argument("output", type=click.Path(file_okay=True, dir_okay=False))
@click.argument(
"infiles", nargs=-1, type=click.Path(exists=True, file_okay=True, dir_okay=False)
)
def collate(output, infiles):
with pd.HDFStore(output) as dst:
for file in infiles:
file = Path(file)
print(file)
if file.suffix == ".h5":
with pd.HDFStore(file) as src:
df = src.get("ordering")
elif file.suffix == ".csv":
df = pd.read_csv(file)
df = df.rename(columns={"orient_order": "orientational_order"})
fvars = get_filename_vars(file)
df["temperature"] = float(fvars.temperature)
df["pressure"] = float(fvars.pressure)
if fvars.crystal is None:
crystal = "liquid"
else:
crystal = fvars.crystal
df["crystal"] = crystal
else:
raise ValueError("Filetype is not supported")
bin_values, count = aggregate(df["orientational_order"])
df = pd.DataFrame(
{
"temperature": float(df["temperature"].values[0]),
"pressure": float(df["pressure"].values[0]),
"crystal": df["crystal"].values[0],
"bins": bin_values,
"count": count,
"probability": count * (BINS[1] - BINS[0]),
}
)
df["crystal"] = df["crystal"].astype(
CategoricalDtype(categories=["p2", "p2gg", "pg", "liquid"])
)
dst.append("ordering", df)
@main.command()
@click.argument("infile", type=click.Path(exists=True, file_okay=True, dir_okay=False))
@click.argument("outfile", type=click.Path(file_okay=True, dir_okay=False))
def analyse(infile, outfile):
dataframes = []
file_vars = get_filename_vars(infile)
crystal = file_vars.crystal
if crystal is None:
crystal = "liquid"
for snap in open_trajectory(infile, progressbar=True):
orientational_order = order.orientational_order(
snap.box, snap.position, snap.orientation
)
df = pd.DataFrame(
{
"molecule": np.arange(snap.num_mols),
"orientational_order": orientational_order,
"temperature": float(file_vars.temperature),
"pressure": float(file_vars.pressure),
"crystal": crystal,
}
)
df["crystal"] = df["crystal"].astype("category")
dataframes.append(df)
with pd.HDFStore(outfile) as dst:
dst.append("ordering", pd.concat(dataframes))
@main.command()
@click.argument("outfile", type=click.Path(file_okay=True, dir_okay=False))
@click.argument(
"infiles", nargs=-1, type=click.Path(exists=True, file_okay=True, dir_okay=False)
)
def thermodynamics(outfile, infiles):
dfs = []
for filename in infiles:
fvars = get_filename_vars(filename)
df = pd.read_csv(filename, sep="\t")
# All the values are written to the same output file, so make sure there is only
# a single trajectory worth of values.
df = df.drop_duplicates("timestep", keep="last")
# We want quantities for each
df = df.div(df.N, axis=0)
# Take the second half of the values to ensure there is no issue with
# equilibration
df = df.iloc[len(df) // 2 :, :]
# Calculate Total Energy
df["total_energy"] = df["kinetic_energy"] + df["potential_energy"]
# Calculate enthalpy.
# This is the total energy (potential + kinetic) + the configuration energy (pV)
# The multiplication by N is because the pressure was also divided by N above.
df["enthalpy"] = (
df["potential_energy"]
+ df["kinetic_energy"]
+ df["pressure"] * df["volume"] * df.N
)
if fvars.crystal is not None:
df["crystal"] = fvars.crystal
else:
df["crystal"] = "liquid"
df["pressure"] = float(fvars.pressure)
df["temperature"] = float(fvars.temperature)
df = df.set_index(["pressure", "temperature", "crystal"])
# Perform aggregations on the dataframe, making it much easier to work with.
df = df.groupby(["pressure", "temperature", "crystal"]).agg(["mean", "std"])
dfs.append(df)
pd.concat(dfs).to_hdf(outfile, "thermo")
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
4c2de55d37b6463dc9cb09e1b3fab791b94fb59f | 1842d2e7989b9fb1bdd6edff2b2ce187ca9f27ad | /BIOMD0000000484/model.py | 9d53fe8ba1862ad4b82c03c37bafa36698e3a221 | [
"CC0-1.0"
] | permissive | biomodels/BIOMD0000000484 | cc08199b3d324bf10425829755d70e67d52b155d | 293ac221c1615e7446f55960cff130f784243220 | refs/heads/master | 2016-09-06T17:32:40.282597 | 2014-10-16T05:17:52 | 2014-10-16T05:17:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 427 | py | import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'BIOMD0000000484.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString) | [
"[email protected]"
] | |
563a9e4d56769ebc277637da90f87d84e0eb46b2 | 16dcbf88ae9514109151fe5ff447b2b653ddf48b | /2016/035-TheasGame/thea.py | 60cf62e1e123e8f303f839ac3e8286c58ec426a6 | [] | no_license | ChristerNilsson/Lab | efa55ef5e79dff84b232dfcf94473eacdb263175 | b1f730f45ec6e901bd14c1e4196aa5e0f591ecd2 | refs/heads/master | 2023-07-06T04:35:09.458936 | 2023-06-24T21:40:54 | 2023-06-24T21:40:54 | 48,474,249 | 8 | 8 | null | 2022-12-10T07:03:31 | 2015-12-23T06:51:11 | JavaScript | UTF-8 | Python | false | false | 690 | py | import pygame
pygame.init()
windowSurface = pygame.display.set_mode((1000, 750), pygame.DOUBLEBUF)
done = False
while not done:
# --- Main event loop
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
s = pygame.Surface((1000,750), pygame.SRCALPHA) # per-pixel alpha
s.fill((255,255,255,128)) # notice the alpha value in the color
pygame.draw.circle(s, pygame.Color(255, 0, 0, 128), (100, 100), 100)
pygame.draw.circle(windowSurface, pygame.Color(0, 255, 0, 128), (150, 100), 100)
windowSurface.blit(s, (0,0), pygame.BLEND_RGBA_ADD)
s.fill((255,255,255))
pygame.display.flip() | [
"[email protected]"
] | |
96e362619f5e1ca63b616907a81a83d7ad5268b9 | 7a17f9e6706b6e3f6d55c8e30f0dcec97f495541 | /src/hyperka/hyperbolic/manifold.py | e5bfa0abf060d8a9093cb836b10fa666c81e5cfe | [
"MIT"
] | permissive | HELL-TO-HEAVEN/HyperKA | 8d097c58e0188961de6e4ea74f214e40d9408a04 | cadaf824a739b55211997e73d9948ddbfbe7ce83 | refs/heads/main | 2023-03-30T04:20:31.477323 | 2021-03-25T07:56:55 | 2021-03-25T07:56:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 713 | py | from abc import abstractmethod
from abc import ABC
class Manifold(ABC):
def __init__(self, *args, **kwargs):
pass
@property
def name(self):
raise NotImplementedError
@staticmethod
def dim(dim):
return dim
def normalize(self, u):
return u
@abstractmethod
def distance(self, u, v):
"""
Distance function
"""
raise NotImplementedError
@abstractmethod
def expm(self, p, d_p, lr=None, out=None):
"""
Exponential map
"""
raise NotImplementedError
@abstractmethod
def logm(self, x, y):
"""
Logarithmic map
"""
raise NotImplementedError
| [
"[email protected]"
] | |
4233d111f76ceabf87cbb1c1701d766aeaf5393b | 17e813f4f20a6ce2d82619be9fac517b5176a74b | /Trees/btToLinkedList.py | 84dacb4426c7a87aa171291005e233ea32317e16 | [] | no_license | AryanGanotra07/DSALGO | 0ee86bbca4b345d21f5d6eb60d96c7aff6f1fc93 | 8cbac991ceec43522a57c65d68f00b54ccb6ea3f | refs/heads/master | 2022-12-08T22:33:14.031636 | 2020-09-13T14:05:41 | 2020-09-13T14:05:41 | 283,800,432 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,052 | py | # Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
import sys
sys.setrecursionlimit(15000000)
def solve(a, b):
if a is None:
return
b.append(a.val)
solve(a.left, b)
solve(a.right, b)
def solve(a):
if a is None or (a.left is None and a.right is None):
return None
if a.left is not None:
solve(a.left)
tmp = a.right
a.right = a.left
a.left = None
b = a.right
while b.right is not None:
b= b.right
b.right = tmp
solve(a.right)
class Solution:
# @param A : root node of tree
# @return the root node in the tree
def flatten(self, A):
solve(A)
return A
# b = []
# solve(A, b)
# r = TreeNode(b[0])
# n = r
# for i in range(1, len(b)):
# n.right = TreeNode(b[i])
# n = n.right
# return r
| [
"[email protected]"
] | |
ee8f01d9ff709d7f69ae6cebd6b938844bdd5ee8 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03290/s821428254.py | d06c09c404c991b8d3ef901f9f4a5f494259ea0e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 608 | py | from math import ceil
from itertools import product
D,G=map(int,input().split())
pc=[list(map(int,input().split())) for _ in range(D)]
def calc(P):
count = 0
g = G
for i in range(D):
p,c = pc[-1-i]
if P[-1-i]==1:
count += p
g -= (D-i)*100*p+c
if g<=0:
return count
for i in range(D):
p,c = pc[-1-i]
if P[-1-i]==0:
tmp = min(p-1,ceil(g/((D-i)*100)))
count += tmp
g -= (D-i)*100*tmp
if g<=0:
return count
return -1
MIN=10**9
for P in product(range(2),repeat=D):
tmp = calc(P)
if tmp != -1 and tmp < MIN: MIN=tmp
print(MIN) | [
"[email protected]"
] | |
bcf4f86043750b69eed83dad9603a1d113b66fb7 | a176f3705c92ec1974ada17af2a891e0bf763b97 | /core/get_input.py | 21fcd1c9c836417783cbba4c9450586ea9653811 | [
"Apache-2.0"
] | permissive | indrajithbandara/OWASP-Nettacker | 593bdf5426606e67a94e447b4a9534bf79b0396b | 087ce32f06758db03039a34e6e32fbc57fb4ffef | refs/heads/master | 2021-05-08T14:22:54.520184 | 2018-01-27T20:34:59 | 2018-01-27T20:34:59 | 120,082,592 | 1 | 0 | null | 2018-02-03T10:49:34 | 2018-02-03T10:49:33 | null | UTF-8 | Python | false | false | 514 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from core.compatible import version
from core.alert import __input_msg
def __input(msg, default):
if version() is 2:
try:
data = raw_input(__input_msg(msg))
if data == '':
data = default
except:
data = default
else:
try:
data = input(__input_msg(msg))
if data == '':
data = default
except:
data = default
return data
| [
"[email protected]"
] | |
ec0fe22ab52822601adcf965f531dec7895c63aa | 66fb5bbf3cd0f2c7b00db7081271c376812b68dd | /control_planner/scripts/purepursuit.py | c4385ac0777d26b0a19d879eb7b8d5ef81f9ef78 | [] | no_license | freesloth/wecar_2 | d5e95ae67d65bcd78a60ceae95a48161656e4fab | c05888cc70ddd775a3151b722db06aa41705f6b9 | refs/heads/master | 2023-01-22T15:47:39.505150 | 2020-12-10T03:29:36 | 2020-12-10T03:29:36 | 278,523,277 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,179 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import rospy
import rospkg
from sensor_msgs.msg import LaserScan,PointCloud,Imu
from std_msgs.msg import Float64
from vesc_msgs.msg import VescStateStamped
from laser_geometry import LaserProjection
from math import cos,sin,pi,sqrt,pow,atan2
from geometry_msgs.msg import Point32,PoseStamped, Point32,PoseStamped,Point, PoseWithCovarianceStamped
from nav_msgs.msg import Odometry,Path
import tf
from tf.transformations import euler_from_quaternion,quaternion_from_euler
class pure_pursuit:
def __init__(self):
rospy.init_node("make_path", anonymous=True)
rospy.Subscriber("path", Path, self.path_callback)
rospy.Subscriber("odom",Odometry, self.odom_callback)
# rospy.Subscriber("/amcl_pose",PoseWithCovarianceStamped, self.amcl_callback)
self.motor_pub=rospy.Publisher('commands/motor/speed',Float64,queue_size=1)
self.servo_pub=rospy.Publisher('commands/servo/position',Float64,queue_size=1)
self.motor_msg=Float64()
self.servo_msg=Float64()
self.is_path=False
self.is_odom=False
self.is_amcl=False
self.forward_point=Point()
self.current_position=Point()
self.is_look_forward_point=False
self.vehicle_length=0.5
self.lfd=0.5
self.steering=0
self.steering_angle_to_servo_gain=-1.2135
self.steering_angle_to_servo_offset=0.5304
rate=rospy.Rate(30)
while not rospy.is_shutdown():
if self.is_path==True and(self.is_odom==True or self.is_amcl==True):
vehicle_position=self.current_position
rotated_point=Point()
self.is_look_forward_point=False
for num,i in enumerate(self.path.poses):
path_point=i.pose.position
dx=path_point.x-vehicle_position.x
dy=path_point.y-vehicle_position.y
rotated_point.x=cos(self.vehicle_yaw)*dx+sin(self.vehicle_yaw)*dy
rotated_point.y=sin(self.vehicle_yaw)*dx-cos(self.vehicle_yaw)*dy
if rotated_point.x>0:
dis=sqrt(pow(rotated_point.x,2)+pow(rotated_point.y,2))
if dis>=self.lfd:
self.forward_point=path_point
self.is_look_forward_point=True
break
theta=-atan2(rotated_point.y,rotated_point.x)
if self.is_look_forward_point:
self.steering=atan2((2*self.vehicle_length*sin(theta)),self.lfd) #rad
print(self.steering*180/pi)
self.motor_msg.data=2000
else:
self.steering=0
print("no found forward point")
self.motor_msg.data=0
self.steering_command=(self.steering_angle_to_servo_gain*self.steering)+self.steering_angle_to_servo_offset
self.servo_msg.data=self.steering_command
self.servo_pub.publish(self.servo_msg)
self.motor_pub.publish(self.motor_msg)
rate.sleep()
def path_callback(self,msg):
self.is_path=True
self.path=msg
def odom_callback(self,msg):
self.is_odom=True
odom_quaternion=(msg.pose.pose.orientation.x,msg.pose.pose.orientation.y,msg.pose.pose.orientation.z,msg.pose.pose.orientation.w)
_,_,self.vehicle_yaw=euler_from_quaternion(odom_quaternion)
self.current_position.x=msg.pose.pose.position.x
self.current_position.y=msg.pose.pose.position.y
def amcl_callback(self,msg):
self.is_amcl=True
amcl_quaternion=(msg.pose.pose.orientation.x,msg.pose.pose.orientation.y,msg.pose.pose.orientation.z,msg.pose.pose.orientation.w)
_,_,self.vehicle_yaw=euler_from_quaternion(amcl_quaternion)
self.current_position.x=msg.pose.pose.position.x
self.current_position.y=msg.pose.pose.position.y
if __name__ == '__main__':
try:
test_track=pure_pursuit()
except rospy.ROSInterruptException:
pass
| [
"[email protected]"
] | |
6fb892e8583fa7eb8e4ec468bbb83d3a7077a633 | fec9d5e77fe0248853c11084cb27ed3b1dcb500c | /users/arxiv/users/legacy/tests/test_endorsements.py | f38ff79e7bc0d63bd241498e861451266964fc1c | [
"MIT"
] | permissive | SamanthaFeidFischer/arxiv-auth | bb1c41f018a3573fa1cbc7b5def9532deb6bdd9c | 08df4e0196a04a06eac1d26477b3ad56ebf56f08 | refs/heads/develop | 2021-06-13T09:58:48.592901 | 2018-09-12T21:31:04 | 2018-09-12T21:31:04 | 148,851,802 | 1 | 0 | MIT | 2021-04-29T19:40:12 | 2018-09-14T23:37:49 | Python | UTF-8 | Python | false | false | 11,057 | py | """Tests for :mod:`arxiv.users.legacy.endorsements` using a live test DB."""
import os
from unittest import TestCase, mock
from datetime import datetime
from pytz import timezone
from flask import Flask
from mimesis import Person, Internet, Datetime
from .. import endorsements, util, models
from ... import domain
EASTERN = timezone('US/Eastern')
class TestAutoEndorsement(TestCase):
"""Tests for :func:`get_autoendorsements`."""
def setUp(self):
"""Generate some fake data."""
self.app = Flask('test')
util.init_app(self.app)
self.app.config['CLASSIC_DATABASE_URI'] = 'sqlite:///test.db'
self.app.config['CLASSIC_SESSION_HASH'] = 'foohash'
with self.app.app_context():
util.create_all()
with util.transaction() as session:
person = Person('en')
net = Internet('en')
ip_addr = net.ip_v4()
email = person.email()
approved = 1
deleted = 0
banned = 0
first_name = person.name()
last_name = person.surname()
suffix_name = person.title()
joined_date = util.epoch(
Datetime('en').datetime().replace(tzinfo=EASTERN)
)
db_user = models.DBUser(
first_name=first_name,
last_name=last_name,
suffix_name=suffix_name,
share_first_name=1,
share_last_name=1,
email=email,
flag_approved=approved,
flag_deleted=deleted,
flag_banned=banned,
flag_edit_users=0,
flag_edit_system=0,
flag_email_verified=1,
share_email=8,
email_bouncing=0,
policy_class=2, # Public user. TODO: consider admin.
joined_date=joined_date,
joined_ip_num=ip_addr,
joined_remote_host=ip_addr
)
session.add(db_user)
self.user = domain.User(
user_id=str(db_user.user_id),
username='foouser',
email=db_user.email,
name=domain.UserFullName(
forename=db_user.first_name,
surname=db_user.last_name,
suffix=db_user.suffix_name
)
)
def tearDown(self):
"""Remove the test DB."""
os.remove('./test.db')
def test_invalidated_autoendorsements(self):
"""The user has two autoendorsements that have been invalidated."""
with self.app.app_context():
with util.transaction() as session:
issued_when = util.epoch(
Datetime('en').datetime().replace(tzinfo=EASTERN)
)
session.add(models.DBEndorsement(
endorsee_id=self.user.user_id,
archive='astro-ph',
subject_class='CO',
flag_valid=0,
endorsement_type='auto',
point_value=10,
issued_when=issued_when
))
session.add(models.DBEndorsement(
endorsee_id=self.user.user_id,
archive='astro-ph',
subject_class='CO',
flag_valid=0,
endorsement_type='auto',
point_value=10,
issued_when=issued_when
))
session.add(models.DBEndorsement(
endorsee_id=self.user.user_id,
archive='astro-ph',
subject_class='CO',
flag_valid=1,
endorsement_type='auto',
point_value=10,
issued_when=issued_when
))
session.add(models.DBEndorsement(
endorsee_id=self.user.user_id,
archive='astro-ph',
subject_class='CO',
flag_valid=1,
endorsement_type='user',
point_value=10,
issued_when=issued_when
))
result = endorsements.invalidated_autoendorsements(self.user)
self.assertEqual(len(result), 2, "Two revoked endorsements are loaded")
def test_category_policies(self):
"""Load category endorsement policies from the database."""
with self.app.app_context():
with util.transaction() as session:
session.add(models.DBCategory(
archive='astro-ph',
subject_class='CO',
definitive=1,
active=1,
endorsement_domain='astro-ph'
))
session.add(models.DBEndorsementDomain(
endorsement_domain='astro-ph',
endorse_all='n',
mods_endorse_all='n',
endorse_email='y',
papers_to_endorse=3
))
policies = endorsements.category_policies()
category = domain.Category('astro-ph', 'CO')
self.assertIn(category, policies, "Data are loaded for categories")
self.assertEqual(policies[category]['domain'], 'astro-ph')
self.assertFalse(policies[category]['endorse_all'])
self.assertTrue(policies[category]['endorse_email'])
self.assertEqual(policies[category]['min_papers'], 3)
def test_domain_papers(self):
"""Get the number of papers published in each domain."""
with self.app.app_context():
with util.transaction() as session:
# User owns three papers.
session.add(models.DBPaperOwners(
document_id=1,
user_id=self.user.user_id,
flag_author=0, # <- User is _not_ an author.
valid=1
))
session.add(models.DBDocuments(
document_id=1,
paper_id='2101.00123',
dated=util.epoch(datetime.now(tz=EASTERN))
))
session.add(models.DBDocumentInCategory(
document_id=1,
archive='cs',
subject_class='DL',
is_primary=1
))
session.add(models.DBCategory(
archive='cs',
subject_class='DL',
definitive=1,
active=1,
endorsement_domain='firstdomain'
))
# Here's another paper.
session.add(models.DBPaperOwners(
document_id=2,
user_id=self.user.user_id,
flag_author=1, # <- User is an author.
valid=1
))
session.add(models.DBDocuments(
document_id=2,
paper_id='2101.00124',
dated=util.epoch(datetime.now(tz=EASTERN))
))
session.add(models.DBDocumentInCategory(
document_id=2,
archive='cs',
subject_class='IR',
is_primary=1
))
session.add(models.DBCategory(
archive='cs',
subject_class='IR',
definitive=1,
active=1,
endorsement_domain='firstdomain'
))
# Here's a paper for which the user is an author.
session.add(models.DBPaperOwners(
document_id=3,
user_id=self.user.user_id,
flag_author=1,
valid=1
))
session.add(models.DBDocuments(
document_id=3,
paper_id='2101.00125',
dated=util.epoch(datetime.now(tz=EASTERN))
))
# It has both a primary and a secondary classification.
session.add(models.DBDocumentInCategory(
document_id=3,
archive='astro-ph',
subject_class='EP',
is_primary=1
))
session.add(models.DBDocumentInCategory(
document_id=3,
archive='astro-ph',
subject_class='CO',
is_primary=0 # <- secondary!
))
session.add(models.DBCategory(
archive='astro-ph',
subject_class='EP',
definitive=1,
active=1,
endorsement_domain='seconddomain'
))
session.add(models.DBCategory(
archive='astro-ph',
subject_class='CO',
definitive=1,
active=1,
endorsement_domain='seconddomain'
))
papers = endorsements.domain_papers(self.user)
self.assertEqual(papers['firstdomain'], 2)
self.assertEqual(papers['seconddomain'], 2)
def test_is_academic(self):
"""Determine whether a user is academic based on email."""
ok_patterns = ['%w3.org', '%aaas.org', '%agu.org', '%ams.org']
bad_patterns = ['%.com', '%.net', '%.biz.%']
with self.app.app_context():
with util.transaction() as session:
for pattern in ok_patterns:
session.add(models.DBEmailWhitelist(
pattern=str(pattern)
))
for pattern in bad_patterns:
session.add(models.DBEmailBlacklist(
pattern=str(pattern)
))
self.assertTrue(endorsements.is_academic(domain.User(
user_id='2',
email='[email protected]',
username='someone'
)))
self.assertFalse(endorsements.is_academic(domain.User(
user_id='2',
email='[email protected]',
username='someone'
)))
self.assertTrue(endorsements.is_academic(domain.User(
user_id='2',
email='[email protected]',
username='someone'
)))
self.assertFalse(endorsements.is_academic(domain.User(
user_id='2',
email='[email protected]',
username='someone'
)))
| [
"[email protected]"
] | |
a8d0358b14e0899a93fda27fcd872490e907be31 | 5c0a253bf2fb83db01abc99097871c965f4cf565 | /study/machinelearning/clustering/flat/KMeans/kMeansWithScratch.py | 6e509df62cf33b8d8436cb1ad679ecef276bd6d5 | [] | no_license | airuibel/python-1 | 3b16553ede9d069ec56efbb12a89a4de6917a447 | 94f387e2d406fab2128bcfffce6146da720b2ccc | refs/heads/master | 2020-07-05T15:43:00.957221 | 2017-09-17T14:05:48 | 2017-09-17T14:05:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,870 | py | # -*- utf-8 -*-
import matplotlib.pyplot as plt
from matplotlib import style
style.use('ggplot')
import numpy as np
X = np.array([[1, 2],
[1.5, 1.8],
[5, 8],
[8, 8],
[1, 0.6],
[9, 11],
[1, 3],
[8, 9],
[0, 3],
[5, 4],
[6, 4], ])
plt.scatter(X[:, 0], X[:, 1], s = 50)
plt.show()
colors = 10 * ["g", "r", "c", "b", "k"]
class K_Means:
def __init__(self, k = 2, tol = 0.001, max_iter = 300):
self.k = k
self.tol = tol
self.max_iter = max_iter
def fit(self, data):
self.centroids = {}
for i in range(self.k):
self.centroids[i] = data[i]
for i in range(self.max_iter):
self.classifications = {}
for i in range(self.k):
self.classifications[i] = []
for featureset in data:
distances = [np.linalg.norm(featureset - self.centroids[centroid]) for centroid in self.centroids]
classification = distances.index(min(distances))
self.classifications[classification].append(featureset)
prev_centroids = dict(self.centroids)
for classification in self.classifications:
self.centroids[classification] = np.average(self.classifications[classification], axis = 0)
optimized = True
for c in self.centroids:
original_centroid = prev_centroids[c]
current_centroid = self.centroids[c]
if np.sum((current_centroid - original_centroid) / original_centroid * 100) > self.tol:
print(np.sum((current_centroid - original_centroid) / original_centroid * 100.0))
optimized = False
if optimized:
break
def predict(self, data):
distances = [np.linalg.norm(data - self.centroids[centroid]) for centroid in self.centroids]
classification = distances.index(min(distances))
return classification
clf = K_Means()
clf.fit(X)
for centroid in clf.centroids:
plt.scatter(clf.centroids[centroid][0], clf.centroids[centroid][1],
marker = "o", color = "k", s = 150, linewidths = 5)
for classification in clf.classifications:
color = colors[classification]
for featureset in clf.classifications[classification]:
plt.scatter(featureset[0], featureset[1], marker = "x", color = color, s = 150, linewidths = 5)
unknowns = np.array([[1, 3],
[8, 9],
[0, 3],
[5, 4],
[6, 4], ])
for unknown in unknowns:
classification = clf.predict(unknown)
plt.scatter(unknown[0], unknown[1], marker = "x", color = colors[classification], s = 50, linewidths = 5)
plt.show()
| [
"[email protected]"
] | |
1fc8c3edd2c2ef3d16c220f36cb7d72c3bcad84f | 5da5473ff3026165a47f98744bac82903cf008e0 | /packages/google-cloud-datalabeling/samples/generated_samples/datalabeling_v1beta1_generated_data_labeling_service_list_annotated_datasets_async.py | 5f3fe6b186baf941dd84aef169d534faaebfa3cf | [
"Apache-2.0"
] | permissive | googleapis/google-cloud-python | ed61a5f03a476ab6053870f4da7bc5534e25558b | 93c4e63408c65129422f65217325f4e7d41f7edf | refs/heads/main | 2023-09-04T09:09:07.852632 | 2023-08-31T22:49:26 | 2023-08-31T22:49:26 | 16,316,451 | 2,792 | 917 | Apache-2.0 | 2023-09-14T21:45:18 | 2014-01-28T15:51:47 | Python | UTF-8 | Python | false | false | 2,012 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListAnnotatedDatasets
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-datalabeling
# [START datalabeling_v1beta1_generated_DataLabelingService_ListAnnotatedDatasets_async]
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import datalabeling_v1beta1
async def sample_list_annotated_datasets():
# Create a client
client = datalabeling_v1beta1.DataLabelingServiceAsyncClient()
# Initialize request argument(s)
request = datalabeling_v1beta1.ListAnnotatedDatasetsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_annotated_datasets(request=request)
# Handle the response
async for response in page_result:
print(response)
# [END datalabeling_v1beta1_generated_DataLabelingService_ListAnnotatedDatasets_async]
| [
"[email protected]"
] | |
d49d4df253e01b51cbef0fd3337a5d662b8bb43c | 6a2c2af113bb8b4d55db6ceabc6e78a0bbcd1f91 | /genus processing/Shorts Back Pocket Flap.py | c5157eb6d81a0e4a0684d901d89b3339771afb61 | [] | no_license | JinghongM/Everlasting_Data_Cleansing | 4a966aca5cba102961f64338411d76e51f60f51e | 237073980b2bd1697db578013c7463dcbc1492fb | refs/heads/master | 2021-04-26T23:48:38.083155 | 2018-06-21T20:00:11 | 2018-06-21T20:00:11 | 123,861,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 495 | py | import pandas as pd
import copy
import os.path
Pattern=6
Material=7
Species=4
CGP = pd.read_excel("../Realdata.xlsx")
for row in range(1,CGP.shape[0]):
genus = str(CGP.iat[row,3])
if "Shorts Back Pocket Flap" == genus:
print(row)
CGP.iat[row,Species] = "Back Pocket"
CGP.iat[row,3] = "Shorts"
i=0 #process headers
while i<len(CGP.columns.values):
if "Unnamed" in CGP.columns.values[i]:
CGP.columns.values[i] = ''
i+=1
CGP.to_excel('../Realdata.xlsx',index=False) | [
"[email protected]"
] | |
42ae99636bef2b466a152cd47dbb195677753fcc | 4960e3e005ba04ec0a8b0defc6642dff5e71f5ae | /knowledge/cron/flow4/scan_domain2sentiment.py | 82aed86bc47562f200906949fc65791e17a4ba4e | [] | no_license | jianjian0dandan/knowledge_revised | aa7d772ba9efcaa579907b0418e145d6b440a9c9 | ffc80dcca932c977755128c80c17dca603ee8a8b | refs/heads/master | 2021-01-24T18:27:41.117166 | 2017-04-25T14:55:42 | 2017-04-25T14:55:42 | 84,448,466 | 1 | 0 | null | 2017-06-15T07:32:31 | 2017-03-09T14:02:46 | HTML | UTF-8 | Python | false | false | 1,974 | py | # -*- coding:utf-8 -*-
'''
use to scan user domain to redis hash for compute sentiment
update: one month
'''
import sys
import time
from elasticsearch.helpers import scan
reload(sys)
sys.path.append('../../')
from global_utils import es_user_portrait, portrait_index_name, portrait_index_type
from global_utils import R_DOMAIN, r_domain_name
from parameter import domain_ch2en_dict
from time_utils import ts2datetime, datetime2ts
def del_domain_redis():
R_DOMAIN.delete(r_domain_name)
#use to scan user domain to redis which save as english
def scan_domain2redis():
count = 0
s_re = scan(es_user_portrait, query={'query':{'match_all':{}}, 'size':1000}, index=portrait_index_name, doc_type=portrait_index_type)
start_ts = time.time()
hmset_dict = {}
while True:
try:
scan_re = s_re.next()['_source']
count += 1
uid = scan_re['uid']
domain_en = domain_ch2en_dict[scan_re['domain']]
hmset_dict[uid] = domain_en
if count % 1000 == 0 and count != 0:
R_DOMAIN.hmset(r_domain_name, hmset_dict)
end_ts = time.time()
print '%s sec count 1000' % (end_ts -start_ts)
start_ts = end_ts
hmset_dict = {}
except StopIteration:
if hmset_dict:
R_DOMAIN.hmset(r_domain_name, hmset_dict)
hmset_dict = {}
break
except Exception as e:
raise e
break
if hmset_dict:
R_DOMAIN.hmset(r_domain_name, hmset_dict)
print 'all count:', count
if __name__=='__main__':
log_time_ts = time.time()
log_time_date = ts2datetime(log_time_ts)
print 'cron/flow4/scan_domain2sentiment.py&start&' + log_time_date
del_domain_redis()
scan_domain2redis()
log_time_ts = time.time()
log_time_date = ts2datetime(log_time_ts)
print 'cron/flow4/scan_domain2sentiment&end&' + log_time_date
| [
"[email protected]"
] | |
1d2e2eb2c10108687a1dc49559484804e918c456 | 93d700b0275bca7bbe10da7b05afb63129180327 | /cmsplugin_rst/forms.py | 52c718fb178b45fdcd2bb193c48470d6edaec1f1 | [
"BSD-3-Clause"
] | permissive | nwojcik/cmsplugin-rst | d251a4bc029b4f804ee81b8cb5a4efbe719d3270 | afc564dc32fff5fa5e0ad7a9449088cb49737db6 | refs/heads/master | 2021-01-16T20:26:48.392784 | 2011-10-09T13:29:06 | 2011-10-09T13:29:06 | 2,537,568 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel | [
"[email protected]"
] | |
b1b7fec1ef05d9659b007e38d824978221d4191c | 0aa2db201678205e9eccd3f4f2dcb6f95a97b5f6 | /tut_23.py | ca22460439ce1e75e3520ad687c01f55a1515c70 | [] | no_license | udoy382/PyTutorial_telusko | ffa76b4b6772d289c787e4b682df2d0965a2bf62 | 5dc5f3fc331605310f7c3923d7865f55a4592e28 | refs/heads/main | 2023-06-09T11:00:41.915456 | 2021-06-30T14:29:56 | 2021-06-30T14:29:56 | 381,730,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | # run this type of shape
# # # #
# # # #
# # # #
'''
for i in range(4):
for j in range(4):
print("# ", end="")
print()
'''
# run this type of shape
#
# #
# # #
# # # #
'''
for i in range(4):
for j in range(i+1):
print("# ", end="")
print()
'''
# run this type of shape
# # # #
# # #
# #
#
for i in range(4):
for j in range(4-i):
print("# ", end="")
print()
| [
"[email protected]"
] | |
45afd3a8a7c4e27c8a14cae91ba000ca278b0c88 | e0f13152e4575f09f0b1e4b1811726bbe5066f90 | /tests/spend.py | 6f12c1c3b03880c9039e75a9eab6c88f5ec189dc | [] | no_license | yagamidev/amoveo | 88bc0dea994fab72c9f430f838ffc54418e30abf | 4dfd6cc8a5cb740500a873c83ff979fa521ec4e7 | refs/heads/master | 2021-04-09T11:45:23.697801 | 2018-03-16T09:13:35 | 2018-03-16T09:13:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 728 | py | from get_request import request
def spend_test():
print("spend test")
pub = "BGRv3asifl1g/nACvsJoJiB1UiKU7Ll8O1jN/VD2l/rV95aRPrMm1cfV1917dxXVERzaaBGYtsGB5ET+4aYz7ws="
priv = "nJgWyLTX1La8eCbPv85r3xs7DfmJ9AG4tLrJ5fiW6qY="
brainwallet = ''
request(2, "load_key", [pub, priv, brainwallet], 1)
request(1, "create_account", [pub, 1], 0.1)
request(1, "sync", [[127,0,0,1], 3020], 0.1)
request(1, "spend", [pub, 2])
request(1, "spend", [pub, 3])
request(1, "spend", [pub, 1])
request(1, "spend", [pub, 1])
request(1, "sync", [[127,0,0,1], 3020], 0.1)
request(1, "mine_block", [1,100000], 0.3)
request(1, "sync", [[127,0,0,1], 3020])
if __name__ == "__main__":
spend_test()
| [
"[email protected]"
] | |
c5c02b528246d16171faa687ec9e7fb3d4df0a74 | 039ba9dba0f131496a959338c32e811904f00708 | /mycrm/shopcrm/shopcrm/settings.py | bd18f1c8d64d8efad0647ac52c6bf4b7e4fd5f5d | [] | no_license | mageshrocky/python_django_projects | 49e12aff8fe08704c2d17b60d5373f0990120336 | 444c30780632ceea5c6b7377356ed2c3c5ce6253 | refs/heads/master | 2023-05-18T11:57:01.691868 | 2021-06-15T11:18:26 | 2021-06-15T11:18:26 | 377,125,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,265 | py | """
Django settings for shopcrm project.
Generated by 'django-admin startproject' using Django 3.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'kqgrn!^+k=hkp)_mxpm+9_)0w=k)b@lsjyibe$qsog*$^3%hs7'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'myapp.apps.MyappConfig',
'django_filters',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'shopcrm.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'shopcrm.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/images/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
MEDIA_ROOT = os.path.join(BASE_DIR, 'static/images') | [
"[email protected]"
] | |
93fb80555ba83304ee0774e8a8d306de3231038c | d8edd97f8f8dea3f9f02da6c40d331682bb43113 | /networks439.py | 52a3ed76800ad9bf46c5c6733e530a3fa5cc21d3 | [] | no_license | mdubouch/noise-gan | bdd5b2fff3aff70d5f464150443d51c2192eeafd | 639859ec4a2aa809d17eb6998a5a7d217559888a | refs/heads/master | 2023-07-15T09:37:57.631656 | 2021-08-27T11:02:45 | 2021-08-27T11:02:45 | 284,072,311 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,889 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
__version__ = 205
# Number of wires in the CDC
n_wires = 3606
# Number of continuous features (E, t, dca)
n_features = 3
class Gen(nn.Module):
def __init__(self, ngf, latent_dims, seq_len, encoded_dim):
super().__init__()
self.ngf = ngf
self.seq_len = seq_len
self.version = __version__
# Input: (B, latent_dims, 1)
self.act = nn.ReLU()
self.lin0 = nn.Linear(latent_dims, seq_len//64*8192, bias=True)
class GBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.convp = nn.ConvTranspose1d(in_channels, out_channels, 1, 1, 0)
self.convu = nn.ConvTranspose1d(in_channels, out_channels, 4, 2, 1)
self.conv1 = nn.ConvTranspose1d(out_channels, out_channels, 3, 1, 1)
self.bnu = nn.BatchNorm1d(out_channels)
self.bn1 = nn.BatchNorm1d(out_channels)
self.act = nn.ReLU()
def forward(self, x):
y0 = F.interpolate(self.convp(x), scale_factor=2, mode='nearest')
y = self.act(self.bnu(self.convu(x)))
y = self.act(y0 + self.bn1(self.conv1(y)))
return y
self.conv1 = nn.ConvTranspose1d(8192, 6144, 4, 4, 0)
self.conv2 = nn.ConvTranspose1d(6144, 4096, 4, 4, 0)
self.conv3 = nn.ConvTranspose1d(4096, n_wires, 4, 4, 0)
self.bn1 = nn.BatchNorm1d(6144)
self.bn2 = nn.BatchNorm1d(4096)
self.bn3 = nn.InstanceNorm1d(n_wires)
self.convw1 = nn.ConvTranspose1d(n_wires, n_wires, 1, 1, 0)
#self.bnp0 = nn.BatchNorm1d(n_wires)
self.convxp = nn.ConvTranspose1d(n_wires, 256, 1, 1, 0)
self.bnp1 = nn.InstanceNorm1d(256)
self.convp2 = nn.ConvTranspose1d(256, 64, 1, 1, 0)
self.bnp2 = nn.InstanceNorm1d(64)
self.convp3 = nn.ConvTranspose1d(64, n_features, 1, 1, 0)
self.out = nn.Tanh()
def forward(self, z, wire_to_xy):
# z: random point in latent space
x = self.act(self.lin0(z).view(-1, 8192, self.seq_len // 64))
x = self.act(self.bn1(self.conv1(x)))
x = self.act(self.bn2(self.conv2(x)))
x = self.act(self.bn3(self.conv3(x)))
w = self.convw1(x)
wg = F.gumbel_softmax(w, dim=1, hard=True, tau=2/3)
xy = torch.tensordot(wg, wire_to_xy, dims=[[1],[1]]).permute(0,2,1)
p = self.act(self.bnp1(self.convxp(x)))
p = self.act(self.bnp2(self.convp2(p)))
p = self.convp3(p)
return torch.cat([self.out(p), xy], dim=1), wg
class Disc(nn.Module):
def __init__(self, ndf, seq_len, encoded_dim):
super().__init__()
self.version = __version__
# (B, n_features, 256)
self.act = nn.LeakyReLU(0.2)
class DBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.convd = nn.Conv1d(in_channels, out_channels, 3, 2, 1)
self.act = nn.LeakyReLU(0.2)
def forward(self, x):
y = self.act(self.convd(x))
return y
self.convpxy = nn.Conv1d(n_features+2, 64, 1, 1, 0)
self.db1 = DBlock(64, 128)
self.db2 = DBlock(128, 256)
#self.conv2 = nn.Conv1d(256, 512, 3, 2, 1)
#self.conv3 = nn.Conv1d(512, 1024, 3, 2, 1)
#self.conv4 = nn.Conv1d(1024, 2048, 3, 2, 1)
#self.lin0 = nn.Linear(256 * seq_len // 1, 1, bias=True)
self.lin0 = nn.Linear(seq_len//4*256, 1)
self.out = nn.Identity()
def forward(self, x_):
# x_ is concatenated tensor of p_ and w_, shape (batch, features+n_wires, seq_len)
# p_ shape is (batch, features, seq_len),
# w_ is AE-encoded wire (batch, encoded_dim, seq_len)
seq_len = x_.shape[2]
x = x_
#dist = ((xy - nn.ConstantPad1d((1, 0), 0.0)(xy[:,:,:-1]))**2).sum(dim=1).unsqueeze(1)
p = x[:,:n_features]
w = x[:,n_features:n_features+2]
wg = x[:,n_features+2:]
pxy = x[:,:n_features+2]
#x = torch.cat([p, w], dim=1)
#x = self.act(self.conv0(pxy))
p = self.convpxy(x[:,:n_features+2])
#x = torch.cat([xy, xwg], dim=1)
x = p
x = self.db1(x)
x = self.db2(x)
x = self.lin0(x.flatten(1,2))
return self.out(x)#.squeeze(1)
class VAE(nn.Module):
def __init__(self, encoded_dim):
super().__init__()
class Enc(nn.Module):
def __init__(self, hidden_size):
super().__init__()
self.act = nn.LeakyReLU(0.2)
self.lin1 = nn.Linear(n_wires, hidden_size)
self.lin2 = nn.Linear(hidden_size, encoded_dim)
self.out = nn.Tanh()
def forward(self, x):
x = self.act(self.lin1(x))
return self.out(self.lin2(x))
class Dec(nn.Module):
def __init__(self, hidden_size):
super().__init__()
self.act = nn.ReLU()
self.lin1 = nn.Linear(encoded_dim, hidden_size)
self.lin2 = nn.Linear(hidden_size, n_wires)
def forward(self, x):
x = self.act(self.lin1(x))
return self.lin2(x)
self.enc_net = Enc(512)
self.dec_net = Dec(512)
def enc(self, x):
return self.enc_net(x.permute(0, 2, 1)).permute(0,2,1)
def dec(self, x):
return self.dec_net(x.permute(0, 2, 1)).permute(0,2,1)
def forward(self, x):
y = self.dec_net(self.enc_net(x))
return y
def get_n_params(model):
return sum(p.reshape(-1).shape[0] for p in model.parameters())
| [
"[email protected]"
] | |
0fbcb4dace7a1a4154e9787babcc6e51e2fb5d94 | 46769b03aa33875bf4c7b1d4a2c51635a397cdfc | /new_test_22_dev_10089/wsgi.py | 71a95a093114e20c8b2cd6e41198ec4cfed9c106 | [] | no_license | crowdbotics-apps/new-test-22-dev-10089 | 89b7e69c0dc0144f507ad6a0b84f06386d9a4e1c | ff18ba29e44581c139829607c663d731730b7bd9 | refs/heads/master | 2022-12-14T20:30:56.738780 | 2020-09-07T13:15:20 | 2020-09-07T13:15:20 | 293,532,502 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 419 | py | """
WSGI config for new_test_22_dev_10089 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'new_test_22_dev_10089.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
1cd44967f3be59b5da4887abb18f70c55b13da1d | 79c1f1e1f9a123a146a314b9e4cd7f57c2301ed3 | /visualize/CSVVsersion/SceneCSV.py | 1fa2468f95df5ef35c99938e975a8dbef066891a | [
"MIT"
] | permissive | ys1998/motion-forecast | 41a1ebc553edddcf8c463d67237f53c23aa53729 | ef8fa9d597906a756f28952a731f6bc8d178f2bf | refs/heads/master | 2020-04-17T16:41:20.276427 | 2019-07-07T06:19:30 | 2019-07-07T06:19:30 | 166,751,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 874 | py | import vtk
class SceneCSV(object):
def __init__(self):
self.sceneSources = list()
self.sceneMappers = list()
self.sceneActors = list()
self.sceneLights = list()
self.addLight(1.0, 1.0, 1.0, 1000, 1000, -1000, 0.75, 180, 0.75)
self.addLight(1.0, 1.0, 1.0, -1000, 500, 1000, 0.5, 180, 0.0)
self.addLight(1.0, 1.0, 1.0, -1000, 500,- 1000, 0.5, 180, 0.0)
def addLight(self, cR, cG, cB, pX, pY, pZ, Intensity, ConeAngle, Attenuation):
self.sceneLights.append(vtk.vtkLight())
self.sceneLights[-1].SetColor(cR, cG, cB)
self.sceneLights[-1].SetPosition(pX, pY, pZ)
self.sceneLights[-1].SetIntensity(Intensity)
self.sceneLights[-1].SetConeAngle(ConeAngle)
self.sceneLights[-1].SetShadowAttenuation(Attenuation)
self.sceneLights[-1].SetLightTypeToSceneLight()
| [
"[email protected]"
] | |
91b1c10e64a295b822581c8cf1127bbf61ba56b2 | 7cecdae92dc9b32df74d45a1bd529bc544283f94 | /tests/test_config_reader.py | 20ca54e3cbd8839410c09f8f079c3409405dcb82 | [
"MIT"
] | permissive | Josef-Friedrich/jflib | 6d680bebc414a7b56918fdb33694ce666e41d9af | 722f8cd9fa7008fc1db77d7e785f6bfd90b5d7f6 | refs/heads/main | 2023-05-28T23:32:33.381374 | 2023-05-23T21:30:22 | 2023-05-23T21:30:22 | 180,605,227 | 3 | 0 | MIT | 2023-05-23T21:30:23 | 2019-04-10T14:58:43 | Python | UTF-8 | Python | false | false | 16,179 | py | import argparse
import os
import tempfile
import unittest
from jflib import config_reader
from jflib.config_reader import (
ArgparseReader,
ConfigReader,
ConfigValueError,
DictionaryReader,
EnvironReader,
IniReader,
ReaderBase,
ReaderSelector,
load_readers_by_keyword,
validate_key,
)
FILES_DIR = os.path.join(os.path.dirname(__file__), "files")
# [Classical]
# name = Mozart
# [Romantic]
# name = Schumann
INI_FILE = os.path.join(FILES_DIR, "config.ini")
os.environ["XXX__Classical__name"] = "Mozart"
os.environ["XXX__Baroque__name"] = "Bach"
parser = argparse.ArgumentParser()
parser.add_argument("--classical-name")
parser.add_argument("--baroque-name")
ARGPARSER_NAMESPACE = parser.parse_args(
["--baroque-name", "Bach", "--classical-name", "Mozart"]
)
class TestFunctionValidateKey(unittest.TestCase):
def test_valid(self):
self.assertTrue(validate_key("test"))
self.assertTrue(validate_key("test_1"))
self.assertTrue(validate_key("1"))
self.assertTrue(validate_key("a"))
self.assertTrue(validate_key("ABC_abc_123"))
def test_invalid(self):
with self.assertRaises(ValueError) as context:
validate_key("l o l")
self.assertEqual(
str(context.exception),
"The key “l o l” contains invalid characters " "(allowed: a-zA-Z0-9_).",
)
with self.assertRaises(ValueError) as context:
validate_key("ö")
# Reader classes ##############################################################
class FalseReader(ReaderBase):
def not_get(self):
return "It’s not get"
class TestClassReaderBase(unittest.TestCase):
def test_exception(self):
with self.assertRaises(TypeError):
FalseReader() # pylint: disable=abstract-class-instantiated
class TestClassArgparseReader(unittest.TestCase):
def test_method_get_without_mapping(self):
argparse = ArgparseReader(args=ARGPARSER_NAMESPACE)
self.assertEqual(argparse.get("Classical", "name"), "Mozart")
self.assertEqual(argparse.get("Baroque", "name"), "Bach")
def test_method_get_with_mapping(self):
argparse = ArgparseReader(
args=ARGPARSER_NAMESPACE,
mapping={
"Classical.name": "classical_name",
"Baroque.name": "baroque_name",
},
)
self.assertEqual(argparse.get("Classical", "name"), "Mozart")
self.assertEqual(argparse.get("Baroque", "name"), "Bach")
def test_exception(self):
argparse = ArgparseReader(
args=ARGPARSER_NAMESPACE,
mapping={
"Classical.name": "classical_name",
"Baroque.name": "baroque_name",
"Romantic.name": "romantic_name",
},
)
with self.assertRaises(ConfigValueError):
argparse.get("Romantic", "name")
with self.assertRaises(ConfigValueError):
argparse.get("Modern", "name")
class TestClassDictionaryReader(unittest.TestCase):
dictionary = {"Classical": {"name": "Mozart"}}
def test_method_get(self):
dictionary = DictionaryReader(dictionary=self.dictionary)
self.assertEqual(dictionary.get("Classical", "name"), "Mozart")
def test_exception(self):
dictionary = DictionaryReader(dictionary=self.dictionary)
with self.assertRaises(ConfigValueError):
dictionary.get("Romantic", "name")
class TestClassEnvironReader(unittest.TestCase):
def test_method_get(self):
os.environ["AAA__bridge__ip"] = "1.2.3.4"
os.environ["AAA__bridge__username"] = "test"
environ = EnvironReader(prefix="AAA")
self.assertEqual(environ.get("bridge", "ip"), "1.2.3.4")
self.assertEqual(environ.get("bridge", "username"), "test")
def test_exception(self):
environ = EnvironReader(prefix="AAA")
with self.assertRaises(ConfigValueError) as cm:
environ.get("lol", "lol")
self.assertEqual(
str(cm.exception),
"Environment variable not found: AAA__lol__lol",
)
class TestClassEnvironWithoutPrefix(unittest.TestCase):
def test_method_get(self):
os.environ["Avantgarde__name"] = "Stockhausen"
environ = EnvironReader()
self.assertEqual(environ.get("Avantgarde", "name"), "Stockhausen")
del os.environ["Avantgarde__name"]
def test_exception(self):
environ = EnvironReader()
with self.assertRaises(ConfigValueError) as cm:
environ.get("xxxAvantgarde", "xxxname")
self.assertEqual(
str(cm.exception),
"Environment variable not found: xxxAvantgarde__xxxname",
)
class TestClassIniReader(unittest.TestCase):
def test_method_get(self):
ini = IniReader(path=INI_FILE)
self.assertEqual(ini.get("Classical", "name"), "Mozart")
self.assertEqual(ini.get("Romantic", "name"), "Schumann")
def test_exception(self):
ini = IniReader(path=INI_FILE)
with self.assertRaises(ConfigValueError) as context:
ini.get("lol", "lol")
self.assertEqual(
str(context.exception),
"Configuration value could not be found (section “lol” key " "“lol”).",
)
def test_non_existent_ini_file(self):
tmp_path = tempfile.mkdtemp()
non_existent = os.path.join(tmp_path, "xxx")
with self.assertRaises(config_reader.IniReaderError):
IniReader(path=non_existent)
def test_none(self):
with self.assertRaises(config_reader.IniReaderError):
IniReader(path=None)
def test_false(self):
with self.assertRaises(config_reader.IniReaderError):
IniReader(path=False)
def test_emtpy_string(self):
with self.assertRaises(config_reader.IniReaderError):
IniReader(path="")
# Common code #################################################################
class TestClassReaderSelector(unittest.TestCase):
def test_ini_first(self):
reader = ReaderSelector(IniReader(INI_FILE), EnvironReader(prefix="XXX"))
self.assertEqual(reader.get("Classical", "name"), "Mozart")
def test_environ_first(self):
reader = ReaderSelector(EnvironReader("XXX"), IniReader(INI_FILE))
self.assertEqual(reader.get("Baroque", "name"), "Bach")
def test_exception(self):
reader = ReaderSelector(EnvironReader("XXX"), IniReader(INI_FILE))
with self.assertRaises(ValueError) as context:
reader.get("lol", "lol")
self.assertEqual(
str(context.exception),
"Configuration value could not be found (section “lol” key " "“lol”).",
)
class TestFunctionLoadReadersByKeyword(unittest.TestCase):
def test_without_keywords_arguments(self):
with self.assertRaises(TypeError):
load_readers_by_keyword(INI_FILE, "XXX") # pylint: disable=E1121
def test_order_ini_environ(self):
readers = load_readers_by_keyword(ini=INI_FILE, environ="XXX")
self.assertEqual(readers[0].__class__.__name__, "IniReader")
self.assertEqual(readers[1].__class__.__name__, "EnvironReader")
def test_order_environ_ini(self):
readers = load_readers_by_keyword(
environ="XXX",
ini=INI_FILE,
)
self.assertEqual(readers[0].__class__.__name__, "EnvironReader")
self.assertEqual(readers[1].__class__.__name__, "IniReader")
def test_argparse_single_arguemnt(self):
readers = load_readers_by_keyword(argparse=ARGPARSER_NAMESPACE)
self.assertEqual(readers[0].__class__.__name__, "ArgparseReader")
# Integration tests ###########################################################
class TestClassConfigReader(unittest.TestCase):
def setUp(self):
# argparser
parser = argparse.ArgumentParser()
parser.add_argument("--common-key")
parser.add_argument("--specific-argparse")
args = parser.parse_args(
["--common-key", "argparse", "--specific-argparse", "argparse"]
)
self.argparse = (
args,
{"common.key": "common_key", "specific.argparse": "specific_argparse"},
)
# dictionary
self.dictionary = {
"common": {"key": "dictionary"},
"specific": {"dictionary": "dictionary"},
}
# environ
self.environ = "YYY"
os.environ["YYY__common__key"] = "environ"
os.environ["YYY__specific__environ"] = "environ"
# ini
self.ini = os.path.join(FILES_DIR, "integration.ini")
def tearDown(self):
del os.environ["YYY__common__key"]
del os.environ["YYY__specific__environ"]
def test_argparse_first(self):
config_reader = ConfigReader(
argparse=self.argparse,
dictionary=self.dictionary,
environ=self.environ,
ini=self.ini,
)
config = config_reader.get_class_interface()
self.assertEqual(config.common.key, "argparse")
def test_argparse_empty(self):
parser = argparse.ArgumentParser()
parser.add_argument("--empty-key")
args = parser.parse_args([])
config_reader = ConfigReader(
argparse=(args, {}),
dictionary={"empty": {"key": "from_dict"}},
)
config = config_reader.get_class_interface()
self.assertEqual(config.empty.key, "from_dict")
def test_dictionary_first(self):
config_reader = ConfigReader(
dictionary=self.dictionary,
argparse=self.argparse,
environ=self.environ,
ini=self.ini,
)
config = config_reader.get_class_interface()
self.assertEqual(config.common.key, "dictionary")
def test_environ_first(self):
config_reader = ConfigReader(
environ=self.environ,
argparse=self.argparse,
dictionary=self.dictionary,
ini=self.ini,
)
config = config_reader.get_class_interface()
self.assertEqual(config.common.key, "environ")
def test_ini_first(self):
config_reader = ConfigReader(
ini=self.ini,
argparse=self.argparse,
dictionary=self.dictionary,
environ=self.environ,
)
config = config_reader.get_class_interface()
self.assertEqual(config.common.key, "ini")
def test_specifiy_values(self):
config_reader = ConfigReader(
argparse=self.argparse,
dictionary=self.dictionary,
environ=self.environ,
ini=self.ini,
)
config = config_reader.get_class_interface()
self.assertEqual(config.specific.argparse, "argparse")
self.assertEqual(config.specific.dictionary, "dictionary")
self.assertEqual(config.specific.environ, "environ")
self.assertEqual(config.specific.ini, "ini")
def test_method_get_class_interface(self):
config_reader = ConfigReader(
argparse=self.argparse,
dictionary=self.dictionary,
environ=self.environ,
ini=self.ini,
)
config = config_reader.get_class_interface()
self.assertEqual(config.specific.argparse, "argparse")
self.assertEqual(config.specific.dictionary, "dictionary")
self.assertEqual(config.specific.environ, "environ")
self.assertEqual(config.specific.ini, "ini")
def test_method_get_dictionary_interface(self):
config_reader = ConfigReader(
argparse=self.argparse,
dictionary=self.dictionary,
environ=self.environ,
ini=self.ini,
)
config = config_reader.get_dictionary_interface()
self.assertEqual(config["specific"]["argparse"], "argparse")
self.assertEqual(config["specific"]["dictionary"], "dictionary")
self.assertEqual(config["specific"]["environ"], "environ")
self.assertEqual(config["specific"]["ini"], "ini")
def test_method_check_section(self):
dictionary = {
"missing_key": {"key": "value"},
"all_good": {"key": "value"},
"empty": {"key": ""},
}
spec = {
"missing_key": { # section
"key": { # key
"not_empty": True,
},
"not_configured_key": { # key
"not_empty": False,
},
},
"all_good": { # section
"key": { # key
"not_empty": True,
}
},
"empty": { # section
"key": { # key
"not_empty": True,
}
},
}
config_reader = ConfigReader(
spec=spec,
dictionary=dictionary,
)
self.assertTrue(config_reader.check_section("all_good"))
with self.assertRaises(ValueError):
config_reader.check_section("missing_key")
with self.assertRaises(KeyError):
config_reader.check_section("xxx")
with self.assertRaises(ValueError):
config_reader.check_section("empty")
def test_spec_defaults(self):
dictionary = {
"no_default": {
"key": "No default value",
},
}
spec = {
"default": {
"key": {
"description": "A default value",
"default": 123,
},
},
"no_default": {
"key": {
"description": "No default value",
},
},
}
config_reader = ConfigReader(
spec=spec,
dictionary=dictionary,
)
config = config_reader.get_class_interface()
self.assertEqual(config.no_default.key, "No default value")
self.assertEqual(config.default.key, 123)
def test_method_spec_to_argparse(self):
spec = {
"email": {
"smtp_login": {
"description": "The SMTP login name",
"default": "user1",
},
},
}
config_reader = ConfigReader(spec=spec)
parser = argparse.ArgumentParser()
config_reader.spec_to_argparse(parser)
args = parser.parse_args([])
self.assertEqual(args.email_smtp_login, "user1")
args = parser.parse_args(["--email-smtp-login", "user2"])
self.assertEqual(args.email_smtp_login, "user2")
class TestTypes(unittest.TestCase):
def setUp(self):
config_reader = ConfigReader(ini=os.path.join(FILES_DIR, "types.ini"))
self.config = config_reader.get_class_interface()
def test_int(self):
self.assertEqual(self.config.types.int, 1)
def test_float(self):
self.assertEqual(self.config.types.float, 1.1)
def test_str(self):
self.assertEqual(self.config.types.str, "Some text")
def test_list(self):
self.assertEqual(self.config.types.list, [1, 2, 3])
def test_tuple(self):
self.assertEqual(self.config.types.tuple, (1, 2, 3))
def test_dict(self):
self.assertEqual(self.config.types.dict, {"one": 1, "two": 2})
def test_code(self):
self.assertEqual(self.config.types.code, "print('lol')")
def test_invalid_code(self):
self.assertEqual(self.config.types.invalid_code, "print('lol)'")
def test_bool(self):
self.assertEqual(self.config.types.bool, True)
def test_empty_string(self):
self.assertEqual(self.config.types.empty_str, "")
def test_none(self):
self.assertEqual(self.config.types.none, None)
def test_zero(self):
self.assertEqual(self.config.types.zero, 0)
def test_false(self):
self.assertEqual(self.config.types.false, False)
def test_false_str(self):
self.assertEqual(self.config.types.false_str, "false")
| [
"[email protected]"
] | |
be9f7d8b3ac111643c48d86d2142203de3228393 | 7b383cab8f9708dd9bc00c939cbab9600c0ca894 | /UP2/NCS1/USBCam/Classes/NCS1.py | 87c91137d99cfd455a3f827b54d10b7e35d6f62c | [
"MIT"
] | permissive | amirunpri2018/TassAI | 3f05b1b848d6d93fe491761589352bc0521496c3 | 3451f34d8973b67a823784e7db8fde03b274a60d | refs/heads/master | 2022-12-22T06:48:46.024068 | 2020-10-01T15:43:25 | 2020-10-01T15:43:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,806 | py | ######################################################################################################
#
# Organization: Asociacion De Investigacion En Inteligencia Artificial Para La Leucemia Peter Moss
# Project: UP2 NCS1 Facial Recognition USB Security System
#
# Author: Adam Milton-Barker (AdamMiltonBarker.com)
#
# Title: NCS1 Class
# Description: NCS1 helper functions.
# License: MIT License
# Last Modified: 2020-09-28
#
######################################################################################################
import os, json, cv2, dlib, imutils
import numpy as np
from datetime import datetime
from imutils import face_utils
from mvnc import mvncapi as mvnc
from Classes.Helpers import Helpers
class NCS1():
""" NCS1 Class
NCS1 helper functions.
"""
def __init__(self):
""" Initializes the class. """
self.Known = []
self.Helpers = Helpers("NCS1")
self.Detector = dlib.get_frontal_face_detector()
self.Predictor = dlib.shape_predictor(
self.Helpers.confs["Classifier"]["Dlib"])
self.check()
self.load()
self.preprocess()
self.Helpers.logger.info("NCS1 class initialized.")
def check(self):
""" Checks for NCS1 device. """
#mvnc.SetGlobalOption(mvnc.GlobalOption.LOGLEVEL, 2)
devices = mvnc.EnumerateDevices()
if len(devices) == 0:
self.Helpers.logger.info(
"No Neural Compute Stick 1 devices, exiting")
quit()
self.ncs1 = mvnc.Device(devices[0])
self.ncs1.OpenDevice()
self.Helpers.logger.info("Connected to Neural Compute Stick 1")
def load(self):
""" Loads NCS1 graph. """
with open(self.Helpers.confs["Classifier"]["Graph"], mode='rb') as f:
graphFile = f.read()
self.Helpers.logger.info("Loaded NCS1 graph")
self.graph = self.ncs1.AllocateGraph(graphFile)
def preprocess(self):
""" Encodes the known users images. """
self.encoded = []
# Loops through all images in the security folder
for filename in os.listdir(self.Helpers.confs["Classifier"]["Known"]):
# Checks file type
if filename.lower().endswith(tuple(self.Helpers.confs["Classifier"]["Allowed"])):
fpath = os.path.join(
self.Helpers.confs["Classifier"]["Known"], filename)
# Gets user id from filename
user = os.path.splitext(filename)[0]
# Reads the image
raw, frame = self.prepareImg(cv2.imread(fpath))
# Saves the user id and encoded image to a list
self.encoded.append((user, self.infer(frame)))
self.Helpers.logger.info("Known data preprocessed!")
def faces(self, image):
""" Finds faces and their coordinates in an image. """
# Find faces
faces = self.Detector(image, 0)
# Gets coordinates for faces
coords = [self.Predictor(image, face) for face in faces]
return faces, coords
def prepareImg(self, frame):
""" Reads & processes frame from the local TassAI. """
# Resizes the frame
frame = cv2.resize(frame, (640, 480))
# Makes a copy of the frame
raw = frame.copy()
return raw, frame
def processImg(self, img):
""" Preprocesses an image for inference. """
dims = 160
resized = cv2.resize(img, (dims, dims))
processed = self.whiten(resized)
return processed
def whiten(self, grayscaled):
""" Creates a whitened image. """
mean = np.mean(grayscaled)
std_dev = np.std(grayscaled)
std_adjusted = np.maximum(std_dev, 1.0 / np.sqrt(grayscaled.size))
whitened_image = np.multiply(np.subtract(grayscaled, mean), 1 / std_adjusted)
return whitened_image
def infer(self, img):
""" Runs the image through NCS1. """
self.graph.LoadTensor(self.processImg(img).astype(np.float16), None)
output, userobj = self.graph.GetResult()
return output
def match(self, frame, coords):
""" Checks faces for matches against known users. """
msg = ""
person = 0
confidence = 0
# Loops through known encodings
for enc in self.encoded:
# Encode current frame
encoded = self.infer(frame)
# Calculate if difference is less than or equal to
recognize = self.compare(enc[1], encoded)
# If known
if recognize[0] == True:
person = int(enc[0])
confidence = recognize[1]
msg = "TassAI identified User #" + str(person)
break
if(person == 0):
msg = "TassAI identified an intruder"
self.Helpers.logger.info(msg)
return person, confidence
def compare(self, face1, face2):
""" Determines whether two images are a match. """
if (len(face1) != len(face2)):
self.Helpers.logger.info("Distance Missmatch")
return False
tdiff = 0
for index in range(0, len(face1)):
diff = np.square(face1[index] - face2[index])
tdiff += diff
if (tdiff < 1.3):
self.Helpers.logger.info("Calculated Match: " + str(tdiff))
return True, tdiff
else:
self.Helpers.logger.info("Calculated Mismatch: " + str(tdiff))
return False, tdiff
| [
"[email protected]"
] | |
2de76a36c9302294182913712a4cbdbd7f90c964 | 9e1f60a867f66b1f4e4fc84fa4252c581e5e1a36 | /Chapter09/test_state_1.py | e33eaec766eaab31fbfd5891365def90b52cc707 | [
"MIT"
] | permissive | PacktPublishing/Clean-Code-in-Python | c216e002485b8cd7736f97b59215a3930f35359a | 7348d0f9f42871f499b352e0696e0cef51c4f8c6 | refs/heads/master | 2023-06-10T13:40:33.331115 | 2023-05-30T17:48:09 | 2023-05-30T17:48:09 | 145,072,942 | 523 | 181 | MIT | 2023-05-30T17:48:10 | 2018-08-17T04:48:38 | Python | UTF-8 | Python | false | false | 1,489 | py | """Clean Code in Python - Chapter 9: Common Design Patterns
> Test State
"""
import unittest
from state_1 import Closed, InvalidTransitionError, Merged, MergeRequest, Open
class TestMergeRequestTransitions(unittest.TestCase):
def setUp(self):
self.mr = MergeRequest("develop", "master")
def test_reopen(self):
self.mr.approvals = 3
self.mr.open()
self.assertEqual(self.mr.approvals, 0)
def test_open_to_closed(self):
self.mr.approvals = 2
self.assertIsInstance(self.mr.state, Open)
self.mr.close()
self.assertEqual(self.mr.approvals, 0)
self.assertIsInstance(self.mr.state, Closed)
def test_closed_to_open(self):
self.mr.close()
self.assertIsInstance(self.mr.state, Closed)
self.mr.open()
self.assertIsInstance(self.mr.state, Open)
def test_double_close(self):
self.mr.close()
self.mr.close()
def test_open_to_merge(self):
self.mr.merge()
self.assertIsInstance(self.mr.state, Merged)
def test_merge_is_final(self):
self.mr.merge()
regex = "already merged request"
self.assertRaisesRegex(InvalidTransitionError, regex, self.mr.open)
self.assertRaisesRegex(InvalidTransitionError, regex, self.mr.close)
def test_cannot_merge_closed(self):
self.mr.close()
self.assertRaises(InvalidTransitionError, self.mr.merge)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
2145526225efcedfd80d26083e233b193732814f | 8ff6c3e513e17be6c51b484bed81d03150bdd175 | /2013-01-facegif/cutface.py | afb58002b5ace80bf5625eccd582ac4bee62a9f2 | [] | no_license | ricbit/Oldies | f1a2ac520b64e43d11c250cc372d526e9febeedd | 2d884c61ac777605f7260cd4d36a13ed5a2c6a58 | refs/heads/master | 2023-04-27T20:35:19.485763 | 2023-04-26T04:45:44 | 2023-04-26T04:45:44 | 2,050,140 | 40 | 8 | null | null | null | null | UTF-8 | Python | false | false | 1,089 | py | import cv
import os
import sys
def DetectFaces(image):
cascade = cv.Load('haarcascade_frontalface_alt.xml')
storage = cv.CreateMemStorage(0)
return cv.HaarDetectObjects(
image, cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING)
def cut_image(image):
grayscale = cv.CreateImage(cv.GetSize(image), 8, 1)
cv.CvtColor(image, grayscale, cv.CV_BGR2GRAY)
#cv.EqualizeHist(grayscale, grayscale)
faces = DetectFaces(grayscale)
ans = []
for face in faces:
x, y, dx, dy = face[0]
cropped = cv.CreateMat(dx, dy, cv.CV_8UC1)
cv.GetRectSubPix(grayscale, cropped, (x + dx / 2, y + dy / 2))
resized = cv.CreateImage((92, 112), 8, 1)
cv.Resize(cropped, resized)
ans.append(resized)
return ans
def main():
path = sys.argv[1]
i = 0
for filename in os.listdir(path):
fullpath = os.path.join(path, filename)
print fullpath
image = cv.LoadImage(fullpath)
for cut in cut_image(image):
output = os.path.join(sys.argv[2], '%d.jpg' % i)
cv.SaveImage(output, cut)
i += 1
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
4e6ebbffaa7463d3cfd44e97df532f2dee48d07a | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2788/60595/251681.py | ce493d0bf81d1f4beb8b4ad066205aea69c75fcb | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | def Test():
n=int(input())
boys=eval("["+input().strip().replace(" ",",")+"]")
m=int(input())
girls=eval("["+input().strip().replace(" ",",")+"]")
z=min(m,n)
parts=[]
j=0
if(z==n):
while(j<len(girls)):
if(check(boys[0],girls[j])):
parts.append([boys[0],girls[j]])
boys.remove(boys[0])
girls.remove(girls[j])
else:
j=j+1
else:
while(j<len(boys)):
if (check(girls[0], boys[j])):
parts.append([boys[j], girls[0]])
boys.remove(boys[j])
girls.remove(girls[0])
else:
j=j+1
print(len(parts))
def check(a,b):
return abs(a-b)<=1
if __name__ == "__main__":
Test() | [
"[email protected]"
] | |
a4b1a60545658a8d8f78c60948c9a730f08c530f | 2daa3894e6d6929fd04145100d8a3be5eedbe21c | /tests/artificial/transf_sqr/trend_constant/cycle_12/ar_/test_artificial_1024_sqr_constant_12__100.py | 2239a2b9360d2647f6e4ca61b30fe39991f9a2e9 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Henri-Lo/pyaf | a1f73a0cc807873bd7b79648fe51de9cfd6c126a | 08c968425d85dcace974d90db7f07c845a0fe914 | refs/heads/master | 2021-07-01T12:27:31.600232 | 2017-09-21T11:19:04 | 2017-09-21T11:19:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
dataset = tsds.generate_random_TS(N = 1024 , FREQ = 'D', seed = 0, trendtype = "constant", cycle_length = 12, transform = "sqr", sigma = 0.0, exog_count = 100, ar_order = 0);
art.process_dataset(dataset); | [
"[email protected]"
] | |
082a1d0a91074a3a3545799fa89ac845b5d5790b | bf448de248ec95325839b5e355af6982b4e5632d | /todo_project/testing_app/views/index.py | ac307f51c5dad4b28290138bc92c859ef3195724 | [] | no_license | rusalinastaneva/Python-Web-Basics | b459b8f47918b52654d304f3db3de54156299f66 | ecdd72b44d4d0a1b107ef1f9fa10eb252bd4fd0e | refs/heads/master | 2023-01-05T13:15:58.691511 | 2020-11-02T20:32:16 | 2020-11-02T20:32:16 | 309,486,341 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 674 | py | from django.shortcuts import render, redirect
from testing_app.forms.profile import ProfileForm
from testing_app.models import Profile
def index(request):
if request.method == 'GET':
context = {
'profiles': Profile.objects.all(),
'form': ProfileForm(),
}
return render(request, 'testing/index.html', context)
else:
form = ProfileForm(request.POST)
if form.is_valid():
form.save()
return redirect('profiles')
context = {
'profiles': Profile.objects.all(),
'form': form,
}
return render(request, 'testing/index.html', context)
| [
"[email protected]"
] | |
b4391290703dfc6a10438c48de9f21f63720a332 | cc0c0f99a5cf563ff52a76f2ac17cdad09d22f01 | /venv/Lib/site-packages/itk/itkReinitializeLevelSetImageFilterPython.py | 3d980dd5b7380f3bcd09a86b369d64e85eb438f6 | [] | no_license | Marxss/carck_detect_system | 9c0d338bde322b4c7304fd0addb524d8697c8a7b | d2480f2108052af8af0aa5265a5239c309885043 | refs/heads/master | 2022-04-15T23:34:20.988335 | 2020-03-29T16:24:00 | 2020-03-29T16:24:00 | 214,625,168 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,562 | py | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.8
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3, 0, 0):
new_instancemethod = lambda func, inst, cls: _itkReinitializeLevelSetImageFilterPython.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_itkReinitializeLevelSetImageFilterPython', [dirname(__file__)])
except ImportError:
import _itkReinitializeLevelSetImageFilterPython
return _itkReinitializeLevelSetImageFilterPython
if fp is not None:
try:
_mod = imp.load_module('_itkReinitializeLevelSetImageFilterPython', fp, pathname, description)
finally:
fp.close()
return _mod
_itkReinitializeLevelSetImageFilterPython = swig_import_helper()
del swig_import_helper
else:
import _itkReinitializeLevelSetImageFilterPython
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
object.__setattr__(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self, name, value):
if (name == "thisown"):
return self.this.own(value)
if hasattr(self, name) or (name == "this"):
set(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import itkImageToImageFilterAPython
import itkImageSourcePython
import itkImageSourceCommonPython
import ITKCommonBasePython
import pyBasePython
import itkImagePython
import itkOffsetPython
import itkSizePython
import itkPointPython
import vnl_vector_refPython
import stdcomplexPython
import vnl_vectorPython
import vnl_matrixPython
import itkVectorPython
import itkFixedArrayPython
import itkMatrixPython
import vnl_matrix_fixedPython
import itkCovariantVectorPython
import itkSymmetricSecondRankTensorPython
import itkImageRegionPython
import itkIndexPython
import itkRGBPixelPython
import itkRGBAPixelPython
import itkVectorImagePython
import itkVariableLengthVectorPython
import itkImageToImageFilterCommonPython
import ITKFastMarchingBasePython
import itkFastMarchingStoppingCriterionBasePython
import itkNodePairPython
import itkLevelSetNodePython
def itkReinitializeLevelSetImageFilterIF3_New():
return itkReinitializeLevelSetImageFilterIF3.New()
def itkReinitializeLevelSetImageFilterIF2_New():
return itkReinitializeLevelSetImageFilterIF2.New()
class itkReinitializeLevelSetImageFilterIF2(itkImageToImageFilterAPython.itkImageToImageFilterIF2IF2):
"""Proxy of C++ itkReinitializeLevelSetImageFilterIF2 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkReinitializeLevelSetImageFilterIF2_Pointer":
"""__New_orig__() -> itkReinitializeLevelSetImageFilterIF2_Pointer"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkReinitializeLevelSetImageFilterIF2_Pointer":
"""Clone(itkReinitializeLevelSetImageFilterIF2 self) -> itkReinitializeLevelSetImageFilterIF2_Pointer"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_Clone(self)
def SetLevelSetValue(self, _arg: 'double const') -> "void":
"""SetLevelSetValue(itkReinitializeLevelSetImageFilterIF2 self, double const _arg)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetLevelSetValue(self, _arg)
def GetLevelSetValue(self) -> "double":
"""GetLevelSetValue(itkReinitializeLevelSetImageFilterIF2 self) -> double"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetLevelSetValue(self)
def SetNarrowBanding(self, _arg: 'bool const') -> "void":
"""SetNarrowBanding(itkReinitializeLevelSetImageFilterIF2 self, bool const _arg)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetNarrowBanding(self, _arg)
def GetNarrowBanding(self) -> "bool":
"""GetNarrowBanding(itkReinitializeLevelSetImageFilterIF2 self) -> bool"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetNarrowBanding(self)
def NarrowBandingOn(self) -> "void":
"""NarrowBandingOn(itkReinitializeLevelSetImageFilterIF2 self)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_NarrowBandingOn(self)
def NarrowBandingOff(self) -> "void":
"""NarrowBandingOff(itkReinitializeLevelSetImageFilterIF2 self)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_NarrowBandingOff(self)
def SetInputNarrowBandwidth(self, _arg: 'double') -> "void":
"""SetInputNarrowBandwidth(itkReinitializeLevelSetImageFilterIF2 self, double _arg)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetInputNarrowBandwidth(self, _arg)
def GetInputNarrowBandwidth(self) -> "double":
"""GetInputNarrowBandwidth(itkReinitializeLevelSetImageFilterIF2 self) -> double"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetInputNarrowBandwidth(self)
def SetOutputNarrowBandwidth(self, _arg: 'double') -> "void":
"""SetOutputNarrowBandwidth(itkReinitializeLevelSetImageFilterIF2 self, double _arg)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetOutputNarrowBandwidth(self, _arg)
def GetOutputNarrowBandwidth(self) -> "double":
"""GetOutputNarrowBandwidth(itkReinitializeLevelSetImageFilterIF2 self) -> double"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetOutputNarrowBandwidth(self)
def SetNarrowBandwidth(self, value: 'double') -> "void":
"""SetNarrowBandwidth(itkReinitializeLevelSetImageFilterIF2 self, double value)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetNarrowBandwidth(self, value)
def SetInputNarrowBand(self, ptr: 'itkVectorContainerUILSNF2') -> "void":
"""SetInputNarrowBand(itkReinitializeLevelSetImageFilterIF2 self, itkVectorContainerUILSNF2 ptr)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetInputNarrowBand(self, ptr)
def GetInputNarrowBand(self) -> "itkVectorContainerUILSNF2_Pointer":
"""GetInputNarrowBand(itkReinitializeLevelSetImageFilterIF2 self) -> itkVectorContainerUILSNF2_Pointer"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetInputNarrowBand(self)
def GetOutputNarrowBand(self) -> "itkVectorContainerUILSNF2_Pointer":
"""GetOutputNarrowBand(itkReinitializeLevelSetImageFilterIF2 self) -> itkVectorContainerUILSNF2_Pointer"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetOutputNarrowBand(self)
LevelSetDoubleAdditiveOperatorsCheck = _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_LevelSetDoubleAdditiveOperatorsCheck
LevelSetOStreamWritableCheck = _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_LevelSetOStreamWritableCheck
__swig_destroy__ = _itkReinitializeLevelSetImageFilterPython.delete_itkReinitializeLevelSetImageFilterIF2
def cast(obj: 'itkLightObject') -> "itkReinitializeLevelSetImageFilterIF2 *":
"""cast(itkLightObject obj) -> itkReinitializeLevelSetImageFilterIF2"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkReinitializeLevelSetImageFilterIF2
Create a new object of the class itkReinitializeLevelSetImageFilterIF2 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkReinitializeLevelSetImageFilterIF2.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkReinitializeLevelSetImageFilterIF2.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkReinitializeLevelSetImageFilterIF2.Clone = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_Clone, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.SetLevelSetValue = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetLevelSetValue, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.GetLevelSetValue = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetLevelSetValue, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.SetNarrowBanding = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetNarrowBanding, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.GetNarrowBanding = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetNarrowBanding, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.NarrowBandingOn = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_NarrowBandingOn, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.NarrowBandingOff = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_NarrowBandingOff, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.SetInputNarrowBandwidth = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetInputNarrowBandwidth, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.GetInputNarrowBandwidth = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetInputNarrowBandwidth, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.SetOutputNarrowBandwidth = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetOutputNarrowBandwidth, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.GetOutputNarrowBandwidth = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetOutputNarrowBandwidth, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.SetNarrowBandwidth = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetNarrowBandwidth, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.SetInputNarrowBand = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetInputNarrowBand, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.GetInputNarrowBand = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetInputNarrowBand, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2.GetOutputNarrowBand = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetOutputNarrowBand, None, itkReinitializeLevelSetImageFilterIF2)
itkReinitializeLevelSetImageFilterIF2_swigregister = _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_swigregister
itkReinitializeLevelSetImageFilterIF2_swigregister(itkReinitializeLevelSetImageFilterIF2)
def itkReinitializeLevelSetImageFilterIF2___New_orig__() -> "itkReinitializeLevelSetImageFilterIF2_Pointer":
"""itkReinitializeLevelSetImageFilterIF2___New_orig__() -> itkReinitializeLevelSetImageFilterIF2_Pointer"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2___New_orig__()
def itkReinitializeLevelSetImageFilterIF2_cast(obj: 'itkLightObject') -> "itkReinitializeLevelSetImageFilterIF2 *":
"""itkReinitializeLevelSetImageFilterIF2_cast(itkLightObject obj) -> itkReinitializeLevelSetImageFilterIF2"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_cast(obj)
class itkReinitializeLevelSetImageFilterIF3(itkImageToImageFilterAPython.itkImageToImageFilterIF3IF3):
"""Proxy of C++ itkReinitializeLevelSetImageFilterIF3 class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkReinitializeLevelSetImageFilterIF3_Pointer":
"""__New_orig__() -> itkReinitializeLevelSetImageFilterIF3_Pointer"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkReinitializeLevelSetImageFilterIF3_Pointer":
"""Clone(itkReinitializeLevelSetImageFilterIF3 self) -> itkReinitializeLevelSetImageFilterIF3_Pointer"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_Clone(self)
def SetLevelSetValue(self, _arg: 'double const') -> "void":
"""SetLevelSetValue(itkReinitializeLevelSetImageFilterIF3 self, double const _arg)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetLevelSetValue(self, _arg)
def GetLevelSetValue(self) -> "double":
"""GetLevelSetValue(itkReinitializeLevelSetImageFilterIF3 self) -> double"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetLevelSetValue(self)
def SetNarrowBanding(self, _arg: 'bool const') -> "void":
"""SetNarrowBanding(itkReinitializeLevelSetImageFilterIF3 self, bool const _arg)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetNarrowBanding(self, _arg)
def GetNarrowBanding(self) -> "bool":
"""GetNarrowBanding(itkReinitializeLevelSetImageFilterIF3 self) -> bool"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetNarrowBanding(self)
def NarrowBandingOn(self) -> "void":
"""NarrowBandingOn(itkReinitializeLevelSetImageFilterIF3 self)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_NarrowBandingOn(self)
def NarrowBandingOff(self) -> "void":
"""NarrowBandingOff(itkReinitializeLevelSetImageFilterIF3 self)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_NarrowBandingOff(self)
def SetInputNarrowBandwidth(self, _arg: 'double') -> "void":
"""SetInputNarrowBandwidth(itkReinitializeLevelSetImageFilterIF3 self, double _arg)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetInputNarrowBandwidth(self, _arg)
def GetInputNarrowBandwidth(self) -> "double":
"""GetInputNarrowBandwidth(itkReinitializeLevelSetImageFilterIF3 self) -> double"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetInputNarrowBandwidth(self)
def SetOutputNarrowBandwidth(self, _arg: 'double') -> "void":
"""SetOutputNarrowBandwidth(itkReinitializeLevelSetImageFilterIF3 self, double _arg)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetOutputNarrowBandwidth(self, _arg)
def GetOutputNarrowBandwidth(self) -> "double":
"""GetOutputNarrowBandwidth(itkReinitializeLevelSetImageFilterIF3 self) -> double"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetOutputNarrowBandwidth(self)
def SetNarrowBandwidth(self, value: 'double') -> "void":
"""SetNarrowBandwidth(itkReinitializeLevelSetImageFilterIF3 self, double value)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetNarrowBandwidth(self, value)
def SetInputNarrowBand(self, ptr: 'itkVectorContainerUILSNF3') -> "void":
"""SetInputNarrowBand(itkReinitializeLevelSetImageFilterIF3 self, itkVectorContainerUILSNF3 ptr)"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetInputNarrowBand(self, ptr)
def GetInputNarrowBand(self) -> "itkVectorContainerUILSNF3_Pointer":
"""GetInputNarrowBand(itkReinitializeLevelSetImageFilterIF3 self) -> itkVectorContainerUILSNF3_Pointer"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetInputNarrowBand(self)
def GetOutputNarrowBand(self) -> "itkVectorContainerUILSNF3_Pointer":
"""GetOutputNarrowBand(itkReinitializeLevelSetImageFilterIF3 self) -> itkVectorContainerUILSNF3_Pointer"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetOutputNarrowBand(self)
LevelSetDoubleAdditiveOperatorsCheck = _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_LevelSetDoubleAdditiveOperatorsCheck
LevelSetOStreamWritableCheck = _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_LevelSetOStreamWritableCheck
__swig_destroy__ = _itkReinitializeLevelSetImageFilterPython.delete_itkReinitializeLevelSetImageFilterIF3
def cast(obj: 'itkLightObject') -> "itkReinitializeLevelSetImageFilterIF3 *":
"""cast(itkLightObject obj) -> itkReinitializeLevelSetImageFilterIF3"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkReinitializeLevelSetImageFilterIF3
Create a new object of the class itkReinitializeLevelSetImageFilterIF3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkReinitializeLevelSetImageFilterIF3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkReinitializeLevelSetImageFilterIF3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkReinitializeLevelSetImageFilterIF3.Clone = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_Clone, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.SetLevelSetValue = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetLevelSetValue, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.GetLevelSetValue = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetLevelSetValue, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.SetNarrowBanding = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetNarrowBanding, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.GetNarrowBanding = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetNarrowBanding, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.NarrowBandingOn = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_NarrowBandingOn, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.NarrowBandingOff = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_NarrowBandingOff, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.SetInputNarrowBandwidth = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetInputNarrowBandwidth, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.GetInputNarrowBandwidth = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetInputNarrowBandwidth, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.SetOutputNarrowBandwidth = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetOutputNarrowBandwidth, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.GetOutputNarrowBandwidth = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetOutputNarrowBandwidth, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.SetNarrowBandwidth = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetNarrowBandwidth, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.SetInputNarrowBand = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetInputNarrowBand, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.GetInputNarrowBand = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetInputNarrowBand, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3.GetOutputNarrowBand = new_instancemethod(_itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetOutputNarrowBand, None, itkReinitializeLevelSetImageFilterIF3)
itkReinitializeLevelSetImageFilterIF3_swigregister = _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_swigregister
itkReinitializeLevelSetImageFilterIF3_swigregister(itkReinitializeLevelSetImageFilterIF3)
def itkReinitializeLevelSetImageFilterIF3___New_orig__() -> "itkReinitializeLevelSetImageFilterIF3_Pointer":
"""itkReinitializeLevelSetImageFilterIF3___New_orig__() -> itkReinitializeLevelSetImageFilterIF3_Pointer"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3___New_orig__()
def itkReinitializeLevelSetImageFilterIF3_cast(obj: 'itkLightObject') -> "itkReinitializeLevelSetImageFilterIF3 *":
"""itkReinitializeLevelSetImageFilterIF3_cast(itkLightObject obj) -> itkReinitializeLevelSetImageFilterIF3"""
return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)
def reinitialize_level_set_image_filter(*args, **kwargs):
"""Procedural interface for ReinitializeLevelSetImageFilter"""
import itk
instance = itk.ReinitializeLevelSetImageFilter.New(*args, **kwargs)
return instance.__internal_call__()
def reinitialize_level_set_image_filter_init_docstring():
import itk
import itkTemplate
if isinstance(itk.ReinitializeLevelSetImageFilter, itkTemplate.itkTemplate):
reinitialize_level_set_image_filter.__doc__ = itk.ReinitializeLevelSetImageFilter.values()[0].__doc__
else:
reinitialize_level_set_image_filter.__doc__ = itk.ReinitializeLevelSetImageFilter.__doc__
| [
"[email protected]"
] | |
4fa0efb8dc0dd6987ee766db7a94b8729fca3054 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startQiskit_Class168.py | 6263a03eb4672e046fa05de76c19787287b6648e | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,188 | py | # qubit number=3
# total number=30
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.rx(-0.09738937226128368,input_qubit[2]) # number=2
prog.h(input_qubit[1]) # number=3
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_Class168.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = BasicAer.get_backend('statevector_simulator')
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
] | |
c7de402e23ffc1e90db73f1063f4c6f29ac379e0 | a0f0bfa44979566732b76c609a9198457e031a2b | /docs/source/conf.py | de97daef2a9bc7e1ee09bea6783833a6f2872132 | [
"BSD-3-Clause"
] | permissive | davidfischer/sporco-cuda | 951aebd1001e9cfe58e490c470b6addb163ca271 | ffe633b9cf7055550a0f2250e29bedd99cd098f1 | refs/heads/master | 2020-03-16T11:41:31.922467 | 2018-05-08T00:41:26 | 2018-05-08T00:41:26 | 132,652,756 | 0 | 0 | null | 2018-05-08T19:03:16 | 2018-05-08T19:03:15 | null | UTF-8 | Python | false | false | 10,177 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# sporco_cuda documentation build configuration file, created by
# sphinx-quickstart on Wed Nov 22 21:03:01 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
from ast import parse
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'numpydoc',
'sphinx_tabs.tabs',
'sphinx_fontawesome'
]
# generate autosummary pages
autosummary_generate = True
autodoc_mock_imports = ['sporco_cuda.util', 'sporco_cuda.cbpdn']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'sporco-cuda'
copyright = '2017-2018, Gustavo Silva, Brendt Wohlberg'
author = 'Gustavo Silva, Brendt Wohlberg'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
with open(os.path.join('../../sporco_cuda', '__init__.py')) as f:
version = parse(next(filter(
lambda line: line.startswith('__version__'),
f))).body[0].value.s
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['tmp', '*.tmp.*', '*.tmp']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'alabaster'
html_theme = "haiku"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'sporco.css'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'sporco_cudadoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'sporco_cuda.tex', 'SPORCO-CUDA Documentation',
[author], 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'sporco_cuda', 'SPORCO-CUDA Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'sporco_cuda', 'SPORCO-CUDA Documentation',
author, 'sporco_cuda', 'SParse Optimization Research COde (SPORCO) CUDA',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None,
'http://sporco.rtfd.io/en/latest/': None}
| [
"[email protected]"
] | |
b1baa37ccc2300a62d8d9375b75162e34c2989df | 5a281cb78335e06c631181720546f6876005d4e5 | /openstack-placement-1.0.0/placement/tests/fixtures.py | be235b355a8c4dee2334904091ad0f96969adaab | [
"Apache-2.0"
] | permissive | scottwedge/OpenStack-Stein | d25b2a5bb54a714fc23f0ff0c11fb1fdacad85e8 | 7077d1f602031dace92916f14e36b124f474de15 | refs/heads/master | 2021-03-22T16:07:19.561504 | 2020-03-15T01:31:10 | 2020-03-15T01:31:10 | 247,380,811 | 0 | 0 | Apache-2.0 | 2020-03-15T01:24:15 | 2020-03-15T01:24:15 | null | UTF-8 | Python | false | false | 3,272 | py | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Fixtures for Placement tests."""
from __future__ import absolute_import
from oslo_config import cfg
from oslo_db.sqlalchemy import test_fixtures
from placement.db.sqlalchemy import migration
from placement import db_api as placement_db
from placement import deploy
from placement.objects import resource_class
from placement.objects import trait
from placement import resource_class_cache as rc_cache
class Database(test_fixtures.GeneratesSchema, test_fixtures.AdHocDbFixture):
def __init__(self, conf_fixture, set_config=False):
"""Create a database fixture."""
super(Database, self).__init__()
if set_config:
try:
conf_fixture.register_opt(
cfg.StrOpt('connection'), group='placement_database')
except cfg.DuplicateOptError:
# already registered
pass
conf_fixture.config(connection='sqlite://',
group='placement_database')
self.conf_fixture = conf_fixture
self.get_engine = placement_db.get_placement_engine
placement_db.configure(self.conf_fixture.conf)
def get_enginefacade(self):
return placement_db.placement_context_manager
def generate_schema_create_all(self, engine):
# note: at this point in oslo_db's fixtures, the incoming
# Engine has **not** been associated with the global
# context manager yet.
migration.create_schema(engine)
# so, to work around that placement's setup code really wants to
# use the enginefacade, we will patch the engine into it early.
# oslo_db is going to patch it anyway later. So the bug in oslo.db
# is that code these days really wants the facade to be set up fully
# when it's time to create the database. When oslo_db's fixtures
# were written, enginefacade was not in use yet so it was not
# anticipated that everyone would be doing things this way
_reset_facade = placement_db.placement_context_manager.patch_engine(
engine)
self.addCleanup(_reset_facade)
# Make sure db flags are correct at both the start and finish
# of the test.
self.addCleanup(self.cleanup)
self.cleanup()
# Sync traits and resource classes.
deploy.update_database(self.conf_fixture.conf)
def cleanup(self):
trait._TRAITS_SYNCED = False
resource_class._RESOURCE_CLASSES_SYNCED = False
rc_cache.RC_CACHE = None
| [
"Wayne [email protected]"
] | Wayne [email protected] |
393ccdde7ef08c546deeb32ee7f792b458c689fa | a63419b2c457a219c010876ece3980af8cfc3c1b | /_DJANGO_/django-player/gameplay/views.py | 556971862e1019a0cf46647d9fb7fb1688c69685 | [] | no_license | thomasm1/python_2018 | ba87560a1e25343c0429fcafe51bb867dc299223 | 6a57c7603055a2511a8734ab34ce21f76e4427ef | refs/heads/master | 2023-05-10T07:20:07.911734 | 2023-05-05T03:58:36 | 2023-05-05T03:58:36 | 147,065,041 | 2 | 5 | null | 2023-03-03T15:15:08 | 2018-09-02T07:41:32 | Rich Text Format | UTF-8 | Python | false | false | 1,150 | py | from django.shortcuts import render, get_object_or_404, redirect
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.views.generic import ListView
from .models import Game
from .forms import MoveForm
@login_required()
def game_detail(request, id):
game = get_object_or_404(Game, pk=id)
context = {'game': game }
if game.is_users_move(request.user):
context['form'] = MoveForm()
return render(request,
"gameplay/game_detail.html",
context
)
@login_required()
def make_move(request, id):
game = get_object_or_404(Game, pk=id)
if not game.is_users_move(request.user):
raise PermissionDenied
move = game.new_move()
form = MoveForm(instance=move, data=request.POST)
if form.is_valid():
move.save()
return redirect("gameplay_detail", id)
else:
return render(request,
"gameplay/game_detail.html",
{'game': game, 'form': form}
)
class AllGamesList(ListView):
model = Game
| [
"[email protected]"
] | |
3110c67c6673e46cd839713921988a00f652a37e | e7290064b5df4731167bab10606f451b446a21f7 | /python/ray/runtime_context.py | 1ec4a38511968bc138256290746c4f4428646c0f | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | sven1977/ray | dce9f6fa114741837341f14aef0a8c64c442aba6 | b73a496af19bce627a611e7af2cb02a3c5d99684 | refs/heads/master | 2023-09-02T00:57:47.167794 | 2023-08-17T09:33:04 | 2023-08-17T09:33:04 | 229,269,728 | 2 | 5 | Apache-2.0 | 2023-07-29T07:08:41 | 2019-12-20T13:27:01 | Python | UTF-8 | Python | false | false | 13,583 | py | import logging
from typing import Any, Dict, Optional
import ray._private.worker
from ray._private.client_mode_hook import client_mode_hook
from ray._private.utils import pasre_pg_formatted_resources_to_original
from ray.runtime_env import RuntimeEnv
from ray.util.annotations import Deprecated, PublicAPI
logger = logging.getLogger(__name__)
@PublicAPI
class RuntimeContext(object):
"""A class used for getting runtime context."""
def __init__(self, worker):
assert worker is not None
self.worker = worker
@Deprecated(
message="Use get_xxx_id() methods to get relevant ids instead", warning=True
)
def get(self) -> Dict[str, Any]:
"""Get a dictionary of the current context.
Returns:
dict: Dictionary of the current context.
"""
context = {
"job_id": self.job_id,
"node_id": self.node_id,
"namespace": self.namespace,
}
if self.worker.mode == ray._private.worker.WORKER_MODE:
if self.task_id is not None:
context["task_id"] = self.task_id
if self.actor_id is not None:
context["actor_id"] = self.actor_id
return context
@property
@Deprecated(message="Use get_job_id() instead", warning=True)
def job_id(self):
"""Get current job ID for this worker or driver.
Job ID is the id of your Ray drivers that create tasks or actors.
Returns:
If called by a driver, this returns the job ID. If called in
a task, return the job ID of the associated driver.
"""
job_id = self.worker.current_job_id
assert not job_id.is_nil()
return job_id
def get_job_id(self) -> str:
"""Get current job ID for this worker or driver.
Job ID is the id of your Ray drivers that create tasks or actors.
Returns:
If called by a driver, this returns the job ID. If called in
a task, return the job ID of the associated driver. The
job ID will be hex format.
Raises:
AssertionError: If not called in a driver or worker. Generally,
this means that ray.init() was not called.
"""
assert ray.is_initialized(), (
"Job ID is not available because " "Ray has not been initialized."
)
job_id = self.worker.current_job_id
return job_id.hex()
@property
@Deprecated(message="Use get_node_id() instead", warning=True)
def node_id(self):
"""Get current node ID for this worker or driver.
Node ID is the id of a node that your driver, task, or actor runs.
Returns:
A node id for this worker or driver.
"""
node_id = self.worker.current_node_id
assert not node_id.is_nil()
return node_id
def get_node_id(self) -> str:
"""Get current node ID for this worker or driver.
Node ID is the id of a node that your driver, task, or actor runs.
The ID will be in hex format.
Returns:
A node id in hex format for this worker or driver.
Raises:
AssertionError: If not called in a driver or worker. Generally,
this means that ray.init() was not called.
"""
assert ray.is_initialized(), (
"Node ID is not available because " "Ray has not been initialized."
)
node_id = self.worker.current_node_id
return node_id.hex()
def get_worker_id(self) -> str:
"""Get current worker ID for this worker or driver process.
Returns:
A worker id in hex format for this worker or driver process.
"""
assert (
ray.is_initialized()
), "Worker ID is not available because Ray has not been initialized."
return self.worker.worker_id.hex()
@property
@Deprecated(message="Use get_task_id() instead", warning=True)
def task_id(self):
"""Get current task ID for this worker or driver.
Task ID is the id of a Ray task.
This shouldn't be used in a driver process.
Example:
.. testcode::
import ray
@ray.remote
class Actor:
def ready(self):
return True
@ray.remote
def f():
return True
# All the below code generates different task ids.
# Task ids are available for actor creation.
a = Actor.remote()
# Task ids are available for actor tasks.
a.ready.remote()
# Task ids are available for normal tasks.
f.remote()
Returns:
The current worker's task id. None if there's no task id.
"""
# only worker mode has actor_id
assert (
self.worker.mode == ray._private.worker.WORKER_MODE
), f"This method is only available when the process is a\
worker. Current mode: {self.worker.mode}"
task_id = self.worker.current_task_id
return task_id if not task_id.is_nil() else None
def get_task_id(self) -> Optional[str]:
"""Get current task ID for this worker or driver.
Task ID is the id of a Ray task. The ID will be in hex format.
This shouldn't be used in a driver process.
Example:
.. testcode::
import ray
@ray.remote
class Actor:
def get_task_id(self):
return ray.get_runtime_context().get_task_id()
@ray.remote
def get_task_id():
return ray.get_runtime_context().get_task_id()
# All the below code generates different task ids.
a = Actor.remote()
# Task ids are available for actor tasks.
print(ray.get(a.get_task_id.remote()))
# Task ids are available for normal tasks.
print(ray.get(get_task_id.remote()))
.. testoutput::
:options: +MOCK
16310a0f0a45af5c2746a0e6efb235c0962896a201000000
c2668a65bda616c1ffffffffffffffffffffffff01000000
Returns:
The current worker's task id in hex. None if there's no task id.
"""
# only worker mode has actor_id
if self.worker.mode != ray._private.worker.WORKER_MODE:
logger.warning(
"This method is only available when the process is a "
f"worker. Current mode: {self.worker.mode}"
)
return None
task_id = self.worker.current_task_id
return task_id.hex() if not task_id.is_nil() else None
@property
@Deprecated(message="Use get_actor_id() instead", warning=True)
def actor_id(self):
"""Get the current actor ID in this worker.
ID of the actor of the current process.
This shouldn't be used in a driver process.
Returns:
The current actor id in this worker. None if there's no actor id.
"""
# only worker mode has actor_id
assert (
self.worker.mode == ray._private.worker.WORKER_MODE
), f"This method is only available when the process is a\
worker. Current mode: {self.worker.mode}"
actor_id = self.worker.actor_id
return actor_id if not actor_id.is_nil() else None
def get_actor_id(self) -> Optional[str]:
"""Get the current actor ID in this worker.
ID of the actor of the current process.
This shouldn't be used in a driver process.
The ID will be in hex format.
Returns:
The current actor id in hex format in this worker. None if there's no
actor id.
"""
# only worker mode has actor_id
if self.worker.mode != ray._private.worker.WORKER_MODE:
logger.warning(
"This method is only available when the process is a "
"worker. Current mode: {self.worker.mode}"
)
return None
actor_id = self.worker.actor_id
return actor_id.hex() if not actor_id.is_nil() else None
@property
def namespace(self):
"""Get the current namespace of this worker.
Returns:
The current namespace of this worker.
"""
return self.worker.namespace
@property
def was_current_actor_reconstructed(self):
"""Check whether this actor has been restarted.
Returns:
Whether this actor has been ever restarted.
"""
assert (
not self.actor_id.is_nil()
), "This method should't be called inside Ray tasks."
actor_info = ray._private.state.actors(self.actor_id.hex())
return actor_info and actor_info["NumRestarts"] != 0
@property
@Deprecated(message="Use get_placement_group_id() instead", warning=True)
def current_placement_group_id(self):
"""Get the current Placement group ID of this worker.
Returns:
The current placement group id of this worker.
"""
return self.worker.placement_group_id
def get_placement_group_id(self) -> Optional[str]:
"""Get the current Placement group ID of this worker.
Returns:
The current placement group id in hex format of this worker.
"""
pg_id = self.worker.placement_group_id
return pg_id.hex() if not pg_id.is_nil() else None
@property
def should_capture_child_tasks_in_placement_group(self):
"""Get if the current task should capture parent's placement group.
This returns True if it is called inside a driver.
Returns:
Return True if the current task should implicitly
capture the parent placement group.
"""
return self.worker.should_capture_child_tasks_in_placement_group
def get_assigned_resources(self):
"""Get the assigned resources to this worker.
By default for tasks, this will return {"CPU": 1}.
By default for actors, this will return {}. This is because
actors do not have CPUs assigned to them by default.
Returns:
A dictionary mapping the name of a resource to a float, where
the float represents the amount of that resource reserved
for this worker.
"""
assert (
self.worker.mode == ray._private.worker.WORKER_MODE
), f"This method is only available when the process is a\
worker. Current mode: {self.worker.mode}"
self.worker.check_connected()
resource_id_map = self.worker.core_worker.resource_ids()
resource_map = {
res: sum(amt for _, amt in mapping)
for res, mapping in resource_id_map.items()
}
return pasre_pg_formatted_resources_to_original(resource_map)
def get_runtime_env_string(self):
"""Get the runtime env string used for the current driver or worker.
Returns:
The runtime env string currently using by this worker.
"""
return self.worker.runtime_env
@property
def runtime_env(self):
"""Get the runtime env used for the current driver or worker.
Returns:
The runtime env currently using by this worker. The type of
return value is ray.runtime_env.RuntimeEnv.
"""
return RuntimeEnv.deserialize(self.get_runtime_env_string())
@property
def current_actor(self):
"""Get the current actor handle of this actor itsself.
Returns:
The handle of current actor.
"""
worker = self.worker
worker.check_connected()
actor_id = worker.actor_id
if actor_id.is_nil():
raise RuntimeError("This method is only available in an actor.")
return worker.core_worker.get_actor_handle(actor_id)
@property
def gcs_address(self):
"""Get the GCS address of the ray cluster.
Returns:
The GCS address of the cluster.
"""
self.worker.check_connected()
return self.worker.gcs_client.address
def _get_actor_call_stats(self):
"""Get the current worker's task counters.
Returns:
A dictionary keyed by the function name. The values are
dictionaries with form ``{"pending": 0, "running": 1,
"finished": 2}``.
"""
worker = self.worker
worker.check_connected()
return worker.core_worker.get_actor_call_stats()
_runtime_context = None
@PublicAPI
@client_mode_hook
def get_runtime_context() -> RuntimeContext:
"""Get the runtime context of the current driver/worker.
The obtained runtime context can be used to get the metadata
of the current task and actor.
Example:
.. testcode::
import ray
# Get the job id.
ray.get_runtime_context().get_job_id()
# Get the actor id.
ray.get_runtime_context().get_actor_id()
# Get the task id.
ray.get_runtime_context().get_task_id()
"""
global _runtime_context
if _runtime_context is None:
_runtime_context = RuntimeContext(ray._private.worker.global_worker)
return _runtime_context
| [
"[email protected]"
] | |
ae77d3ac091bad1efdaf9614fef017f8eeb6c022 | ade9a658d227f8b91c9c19ceb7dfaf1bab24f909 | /tests/test_items/test_contacts.py | 4e77130f21744d24f35265713b9da73ed45cedc5 | [
"BSD-2-Clause"
] | permissive | spmsh/exchangelib | 3da14e89e5606178f0594ac0498063db0c3e62a2 | 9cf3d9f08926d8a923c00d3a3cb9d96203cb25b1 | refs/heads/master | 2023-08-30T23:01:03.929611 | 2021-11-23T00:41:03 | 2021-11-23T00:41:03 | 203,696,045 | 0 | 0 | BSD-2-Clause | 2021-11-23T00:43:06 | 2019-08-22T02:04:23 | Python | UTF-8 | Python | false | false | 10,088 | py | import datetime
try:
import zoneinfo
except ImportError:
from backports import zoneinfo
from exchangelib.errors import ErrorInvalidIdMalformed
from exchangelib.folders import Contacts
from exchangelib.indexed_properties import EmailAddress, PhysicalAddress, PhoneNumber
from exchangelib.items import Contact, DistributionList, Persona
from exchangelib.properties import Mailbox, Member, Attribution, SourceId, FolderId, StringAttributedValue, \
PhoneNumberAttributedValue, PersonaPhoneNumberTypeValue
from exchangelib.services import GetPersona
from ..common import get_random_string, get_random_email
from .test_basics import CommonItemTest
class ContactsTest(CommonItemTest):
TEST_FOLDER = 'contacts'
FOLDER_CLASS = Contacts
ITEM_CLASS = Contact
def test_order_by_on_indexed_field(self):
# Test order_by() on IndexedField (simple and multi-subfield). Only Contact items have these
test_items = []
label = self.random_val(EmailAddress.get_field_by_fieldname('label'))
for i in range(4):
item = self.get_test_item()
item.email_addresses = [EmailAddress(email='%[email protected]' % i, label=label)]
test_items.append(item)
self.test_folder.bulk_create(items=test_items)
qs = self.test_folder.filter(categories__contains=self.categories)
self.assertEqual(
[i[0].email for i in qs.order_by('email_addresses__%s' % label)
.values_list('email_addresses', flat=True)],
['[email protected]', '[email protected]', '[email protected]', '[email protected]']
)
self.assertEqual(
[i[0].email for i in qs.order_by('-email_addresses__%s' % label)
.values_list('email_addresses', flat=True)],
['[email protected]', '[email protected]', '[email protected]', '[email protected]']
)
self.bulk_delete(qs)
test_items = []
label = self.random_val(PhysicalAddress.get_field_by_fieldname('label'))
for i in range(4):
item = self.get_test_item()
item.physical_addresses = [PhysicalAddress(street='Elm St %s' % i, label=label)]
test_items.append(item)
self.test_folder.bulk_create(items=test_items)
qs = self.test_folder.filter(categories__contains=self.categories)
self.assertEqual(
[i[0].street for i in qs.order_by('physical_addresses__%s__street' % label)
.values_list('physical_addresses', flat=True)],
['Elm St 0', 'Elm St 1', 'Elm St 2', 'Elm St 3']
)
self.assertEqual(
[i[0].street for i in qs.order_by('-physical_addresses__%s__street' % label)
.values_list('physical_addresses', flat=True)],
['Elm St 3', 'Elm St 2', 'Elm St 1', 'Elm St 0']
)
self.bulk_delete(qs)
def test_order_by_failure(self):
# Test error handling on indexed properties with labels and subfields
qs = self.test_folder.filter(categories__contains=self.categories)
with self.assertRaises(ValueError):
qs.order_by('email_addresses') # Must have label
with self.assertRaises(ValueError):
qs.order_by('email_addresses__FOO') # Must have a valid label
with self.assertRaises(ValueError):
qs.order_by('email_addresses__EmailAddress1__FOO') # Must not have a subfield
with self.assertRaises(ValueError):
qs.order_by('physical_addresses__Business') # Must have a subfield
with self.assertRaises(ValueError):
qs.order_by('physical_addresses__Business__FOO') # Must have a valid subfield
def test_update_on_single_field_indexed_field(self):
home = PhoneNumber(label='HomePhone', phone_number='123')
business = PhoneNumber(label='BusinessPhone', phone_number='456')
item = self.get_test_item()
item.phone_numbers = [home]
item.save()
item.phone_numbers = [business]
item.save(update_fields=['phone_numbers'])
item.refresh()
self.assertListEqual(item.phone_numbers, [business])
def test_update_on_multi_field_indexed_field(self):
home = PhysicalAddress(label='Home', street='ABC')
business = PhysicalAddress(label='Business', street='DEF', city='GHI')
item = self.get_test_item()
item.physical_addresses = [home]
item.save()
item.physical_addresses = [business]
item.save(update_fields=['physical_addresses'])
item.refresh()
self.assertListEqual(item.physical_addresses, [business])
def test_distribution_lists(self):
dl = DistributionList(folder=self.test_folder, display_name=get_random_string(255), categories=self.categories)
dl.save()
new_dl = self.test_folder.get(categories__contains=dl.categories)
self.assertEqual(new_dl.display_name, dl.display_name)
self.assertEqual(new_dl.members, None)
dl.refresh()
# We set mailbox_type to OneOff because otherwise the email address must be an actual account
dl.members = {
Member(mailbox=Mailbox(email_address=get_random_email(), mailbox_type='OneOff')) for _ in range(4)
}
dl.save()
new_dl = self.test_folder.get(categories__contains=dl.categories)
self.assertEqual({m.mailbox.email_address for m in new_dl.members}, dl.members)
dl.delete()
def test_find_people(self):
# The test server may not have any contacts. Just test that the FindPeople and GetPersona services work.
self.assertGreaterEqual(len(list(self.test_folder.people())), 0)
self.assertGreaterEqual(
len(list(
self.test_folder.people().only('display_name').filter(display_name='john').order_by('display_name')
)),
0
)
def test_get_persona(self):
xml = b'''\
<?xml version="1.0" encoding="utf-8"?>
<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/">
<s:Body>
<m:GetPersonaResponseMessage ResponseClass="Success"
xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"
xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types">
<m:ResponseCode>NoError</m:ResponseCode>
<m:Persona>
<t:PersonaId Id="AAQkADEzAQAKtOtR="/>
<t:PersonaType>Person</t:PersonaType>
<t:CreationTime>2012-06-01T17:00:34Z</t:CreationTime>
<t:DisplayName>Brian Johnson</t:DisplayName>
<t:RelevanceScore>4255550110</t:RelevanceScore>
<t:Attributions>
<t:Attribution>
<t:Id>0</t:Id>
<t:SourceId Id="AAMkA =" ChangeKey="EQAAABY+"/>
<t:DisplayName>Outlook</t:DisplayName>
<t:IsWritable>true</t:IsWritable>
<t:IsQuickContact>false</t:IsQuickContact>
<t:IsHidden>false</t:IsHidden>
<t:FolderId Id="AAMkA=" ChangeKey="AQAAAA=="/>
</t:Attribution>
</t:Attributions>
<t:DisplayNames>
<t:StringAttributedValue>
<t:Value>Brian Johnson</t:Value>
<t:Attributions>
<t:Attribution>2</t:Attribution>
<t:Attribution>3</t:Attribution>
</t:Attributions>
</t:StringAttributedValue>
</t:DisplayNames>
<t:MobilePhones>
<t:PhoneNumberAttributedValue>
<t:Value>
<t:Number>(425)555-0110</t:Number>
<t:Type>Mobile</t:Type>
</t:Value>
<t:Attributions>
<t:Attribution>0</t:Attribution>
</t:Attributions>
</t:PhoneNumberAttributedValue>
<t:PhoneNumberAttributedValue>
<t:Value>
<t:Number>(425)555-0111</t:Number>
<t:Type>Mobile</t:Type>
</t:Value>
<t:Attributions>
<t:Attribution>1</t:Attribution>
</t:Attributions>
</t:PhoneNumberAttributedValue>
</t:MobilePhones>
</m:Persona>
</m:GetPersonaResponseMessage>
</s:Body>
</s:Envelope>'''
ws = GetPersona(account=self.account)
persona = ws.parse(xml)
self.assertEqual(persona.id, 'AAQkADEzAQAKtOtR=')
self.assertEqual(persona.persona_type, 'Person')
self.assertEqual(
persona.creation_time, datetime.datetime(2012, 6, 1, 17, 0, 34, tzinfo=zoneinfo.ZoneInfo('UTC'))
)
self.assertEqual(persona.display_name, 'Brian Johnson')
self.assertEqual(persona.relevance_score, '4255550110')
self.assertEqual(persona.attributions[0], Attribution(
ID=None,
_id=SourceId(id='AAMkA =', changekey='EQAAABY+'),
display_name='Outlook',
is_writable=True,
is_quick_contact=False,
is_hidden=False,
folder_id=FolderId(id='AAMkA=', changekey='AQAAAA==')
))
self.assertEqual(persona.display_names, [
StringAttributedValue(value='Brian Johnson', attributions=['2', '3']),
])
self.assertEqual(persona.mobile_phones, [
PhoneNumberAttributedValue(
value=PersonaPhoneNumberTypeValue(number='(425)555-0110', type='Mobile'),
attributions=['0'],
),
PhoneNumberAttributedValue(
value=PersonaPhoneNumberTypeValue(number='(425)555-0111', type='Mobile'),
attributions=['1'],
)
])
def test_get_persona_failure(self):
# The test server may not have any personas. Just test that the service response with something we can parse
persona = Persona(id='AAA=', changekey='xxx')
try:
GetPersona(account=self.account).call(persona=persona)
except ErrorInvalidIdMalformed:
pass
| [
"[email protected]"
] | |
e1eca6386c795d0c9133574f9c9d774114791f16 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/word-count/5cf7181856df4d0a963c76fedfbdd36a.py | f9930a4d6aa5f919c9e249a5f9a7b5e2abcc8d31 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 908 | py | """Word Counting."""
from collections import Counter
import re
import string
# A regular expression that matches any punctuation character.
PUNCTUATION_REGEX = re.compile("[{}]".format(re.escape(string.punctuation)))
class Phrase(str):
"""A subclass of str that supports word counting."""
def __init__(self, phrase=''):
super(Phrase, self).__init__(phrase)
self._counter = None
def __repr__(self):
return "{!s}({!r})".format(self.__class__.__name__, str(self))
def word_count(self):
"""Return a word frequency dictionary.
A word is delimited by runs of consecutive whitespace or punctuation.
"""
if self._counter is None:
punctuation_erased = re.sub(PUNCTUATION_REGEX, ' ', self)
self._counter = Counter(
word.lower() for word in punctuation_erased.split())
return self._counter
| [
"[email protected]"
] | |
88441b7e0974e4fc5de5bd965e9a9ad800acd21e | c7dfacea4969b4fef264429e7c21d6c2d4c932b4 | /src/baxter_examples/src/baxter_examples/recorder.py | 032f365f567755301f351b69f2b1e7a75a832475 | [
"BSD-2-Clause"
] | permissive | DeepBlue14/arm_wkspc | 697944c72be9a8efaf97a84b6c26a84ebc8de3a6 | 04009550321868722d207924eed3609be7f54882 | refs/heads/master | 2020-03-21T10:10:05.644158 | 2018-06-23T23:16:40 | 2018-06-23T23:16:40 | 138,436,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,350 | py | # Copyright (c) 2013-2014, Rethink Robotics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the Rethink Robotics nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import rospy
import baxter_interface
from baxter_interface import CHECK_VERSION
class JointRecorder(object):
def __init__(self, filename, rate):
"""
Records joint data to a file at a specified rate.
"""
self._filename = filename
self._raw_rate = rate
self._rate = rospy.Rate(rate)
self._start_time = rospy.get_time()
self._done = False
self._limb_left = baxter_interface.Limb("left")
self._limb_right = baxter_interface.Limb("right")
self._gripper_left = baxter_interface.Gripper("left", CHECK_VERSION)
self._gripper_right = baxter_interface.Gripper("right", CHECK_VERSION)
self._io_left_lower = baxter_interface.DigitalIO('left_lower_button')
self._io_left_upper = baxter_interface.DigitalIO('left_upper_button')
self._io_right_lower = baxter_interface.DigitalIO('right_lower_button')
self._io_right_upper = baxter_interface.DigitalIO('right_upper_button')
# Verify Grippers Have No Errors and are Calibrated
if self._gripper_left.error():
self._gripper_left.reset()
if self._gripper_right.error():
self._gripper_right.reset()
if (not self._gripper_left.calibrated() and
self._gripper_left.type() != 'custom'):
self._gripper_left.calibrate()
if (not self._gripper_right.calibrated() and
self._gripper_right.type() != 'custom'):
self._gripper_right.calibrate()
def _time_stamp(self):
return rospy.get_time() - self._start_time
def stop(self):
"""
Stop recording.
"""
self._done = True
def done(self):
"""
Return whether or not recording is done.
"""
if rospy.is_shutdown():
self.stop()
return self._done
def record(self):
"""
Records the current joint positions to a csv file if outputFilename was
provided at construction this function will record the latest set of
joint angles in a csv format.
This function does not test to see if a file exists and will overwrite
existing files.
"""
if self._filename:
joints_left = self._limb_left.joint_names()
joints_right = self._limb_right.joint_names()
with open(self._filename, 'w') as f:
f.write('time,')
f.write(','.join([j for j in joints_left]) + ',')
f.write('left_gripper,')
f.write(','.join([j for j in joints_right]) + ',')
f.write('right_gripper\n')
while not self.done():
# Look for gripper button presses
if self._io_left_lower.state:
self._gripper_left.open()
elif self._io_left_upper.state:
self._gripper_left.close()
if self._io_right_lower.state:
self._gripper_right.open()
elif self._io_right_upper.state:
self._gripper_right.close()
angles_left = [self._limb_left.joint_angle(j)
for j in joints_left]
angles_right = [self._limb_right.joint_angle(j)
for j in joints_right]
f.write("%f," % (self._time_stamp(),))
f.write(','.join([str(x) for x in angles_left]) + ',')
f.write(str(self._gripper_left.position()) + ',')
f.write(','.join([str(x) for x in angles_right]) + ',')
f.write(str(self._gripper_right.position()) + '\n')
self._rate.sleep()
| [
"[email protected]"
] | |
1b619b69b89bc115f7076f0884b11de16029f768 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/operations/_graph_ql_api_resolver_policy_operations.py | 38e54b6b1cfaa067458f84d88918cd32ea53c32f | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 38,418 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
from .._vendor import ApiManagementClientMixinABC, _convert_request, _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_by_resolver_request(
resource_group_name: str, service_name: str, api_id: str, resolver_id: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/resolvers/{resolverId}/policies",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"serviceName": _SERIALIZER.url(
"service_name",
service_name,
"str",
max_length=50,
min_length=1,
pattern=r"^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$",
),
"apiId": _SERIALIZER.url("api_id", api_id, "str", max_length=256, min_length=1, pattern=r"^[^*#&+:<>?]+$"),
"resolverId": _SERIALIZER.url("resolver_id", resolver_id, "str", max_length=80, min_length=1),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_entity_tag_request(
resource_group_name: str,
service_name: str,
api_id: str,
resolver_id: str,
policy_id: Union[str, _models.PolicyIdName],
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/resolvers/{resolverId}/policies/{policyId}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"serviceName": _SERIALIZER.url(
"service_name",
service_name,
"str",
max_length=50,
min_length=1,
pattern=r"^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$",
),
"apiId": _SERIALIZER.url("api_id", api_id, "str", max_length=256, min_length=1, pattern=r"^[^*#&+:<>?]+$"),
"resolverId": _SERIALIZER.url("resolver_id", resolver_id, "str", max_length=80, min_length=1),
"policyId": _SERIALIZER.url("policy_id", policy_id, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_request(
resource_group_name: str,
service_name: str,
api_id: str,
resolver_id: str,
policy_id: Union[str, _models.PolicyIdName],
subscription_id: str,
*,
format: Union[str, _models.PolicyExportFormat] = "xml",
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/resolvers/{resolverId}/policies/{policyId}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"serviceName": _SERIALIZER.url(
"service_name",
service_name,
"str",
max_length=50,
min_length=1,
pattern=r"^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$",
),
"apiId": _SERIALIZER.url("api_id", api_id, "str", max_length=256, min_length=1, pattern=r"^[^*#&+:<>?]+$"),
"resolverId": _SERIALIZER.url("resolver_id", resolver_id, "str", max_length=80, min_length=1),
"policyId": _SERIALIZER.url("policy_id", policy_id, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if format is not None:
_params["format"] = _SERIALIZER.query("format", format, "str")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_create_or_update_request(
resource_group_name: str,
service_name: str,
api_id: str,
resolver_id: str,
policy_id: Union[str, _models.PolicyIdName],
subscription_id: str,
*,
if_match: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/resolvers/{resolverId}/policies/{policyId}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"serviceName": _SERIALIZER.url(
"service_name",
service_name,
"str",
max_length=50,
min_length=1,
pattern=r"^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$",
),
"apiId": _SERIALIZER.url("api_id", api_id, "str", max_length=256, min_length=1, pattern=r"^[^*#&+:<>?]+$"),
"resolverId": _SERIALIZER.url("resolver_id", resolver_id, "str", max_length=80, min_length=1),
"policyId": _SERIALIZER.url("policy_id", policy_id, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if if_match is not None:
_headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str")
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_request(
resource_group_name: str,
service_name: str,
api_id: str,
resolver_id: str,
policy_id: Union[str, _models.PolicyIdName],
subscription_id: str,
*,
if_match: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/resolvers/{resolverId}/policies/{policyId}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"serviceName": _SERIALIZER.url(
"service_name",
service_name,
"str",
max_length=50,
min_length=1,
pattern=r"^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$",
),
"apiId": _SERIALIZER.url("api_id", api_id, "str", max_length=256, min_length=1, pattern=r"^[^*#&+:<>?]+$"),
"resolverId": _SERIALIZER.url("resolver_id", resolver_id, "str", max_length=80, min_length=1),
"policyId": _SERIALIZER.url("policy_id", policy_id, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
class GraphQLApiResolverPolicyOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.apimanagement.ApiManagementClient`'s
:attr:`graph_ql_api_resolver_policy` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_by_resolver(
self, resource_group_name: str, service_name: str, api_id: str, resolver_id: str, **kwargs: Any
) -> Iterable["_models.PolicyContract"]:
"""Get the list of policy configuration at the GraphQL API Resolver level.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param service_name: The name of the API Management service. Required.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number. Required.
:type api_id: str
:param resolver_id: Resolver identifier within a GraphQL API. Must be unique in the current API
Management service instance. Required.
:type resolver_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PolicyContract or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.apimanagement.models.PolicyContract]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PolicyCollection] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resolver_request(
resource_group_name=resource_group_name,
service_name=service_name,
api_id=api_id,
resolver_id=resolver_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_resolver.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PolicyCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_by_resolver.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/resolvers/{resolverId}/policies"
}
@distributed_trace
def get_entity_tag(
self,
resource_group_name: str,
service_name: str,
api_id: str,
resolver_id: str,
policy_id: Union[str, _models.PolicyIdName],
**kwargs: Any
) -> bool:
"""Gets the entity state (Etag) version of the GraphQL API resolver policy specified by its
identifier.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param service_name: The name of the API Management service. Required.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number. Required.
:type api_id: str
:param resolver_id: Resolver identifier within a GraphQL API. Must be unique in the current API
Management service instance. Required.
:type resolver_id: str
:param policy_id: The identifier of the Policy. "policy" Required.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool or the result of cls(response)
:rtype: bool
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_get_entity_tag_request(
resource_group_name=resource_group_name,
service_name=service_name,
api_id=api_id,
resolver_id=resolver_id,
policy_id=policy_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_entity_tag.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
if cls:
return cls(pipeline_response, None, response_headers)
return 200 <= response.status_code <= 299
get_entity_tag.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/resolvers/{resolverId}/policies/{policyId}"
}
@distributed_trace
def get(
self,
resource_group_name: str,
service_name: str,
api_id: str,
resolver_id: str,
policy_id: Union[str, _models.PolicyIdName],
format: Union[str, _models.PolicyExportFormat] = "xml",
**kwargs: Any
) -> _models.PolicyContract:
"""Get the policy configuration at the GraphQL API Resolver level.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param service_name: The name of the API Management service. Required.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number. Required.
:type api_id: str
:param resolver_id: Resolver identifier within a GraphQL API. Must be unique in the current API
Management service instance. Required.
:type resolver_id: str
:param policy_id: The identifier of the Policy. "policy" Required.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param format: Policy Export Format. Known values are: "xml" and "rawxml". Default value is
"xml".
:type format: str or ~azure.mgmt.apimanagement.models.PolicyExportFormat
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyContract or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.PolicyContract
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PolicyContract] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
service_name=service_name,
api_id=api_id,
resolver_id=resolver_id,
policy_id=policy_id,
subscription_id=self._config.subscription_id,
format=format,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
deserialized = self._deserialize("PolicyContract", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/resolvers/{resolverId}/policies/{policyId}"
}
@overload
def create_or_update(
self,
resource_group_name: str,
service_name: str,
api_id: str,
resolver_id: str,
policy_id: Union[str, _models.PolicyIdName],
parameters: _models.PolicyContract,
if_match: Optional[str] = None,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyContract:
"""Creates or updates policy configuration for the GraphQL API Resolver level.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param service_name: The name of the API Management service. Required.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number. Required.
:type api_id: str
:param resolver_id: Resolver identifier within a GraphQL API. Must be unique in the current API
Management service instance. Required.
:type resolver_id: str
:param policy_id: The identifier of the Policy. "policy" Required.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param parameters: The policy contents to apply. Required.
:type parameters: ~azure.mgmt.apimanagement.models.PolicyContract
:param if_match: ETag of the Entity. Not required when creating an entity, but required when
updating an entity. Default value is None.
:type if_match: str
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyContract or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.PolicyContract
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def create_or_update(
self,
resource_group_name: str,
service_name: str,
api_id: str,
resolver_id: str,
policy_id: Union[str, _models.PolicyIdName],
parameters: IO,
if_match: Optional[str] = None,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PolicyContract:
"""Creates or updates policy configuration for the GraphQL API Resolver level.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param service_name: The name of the API Management service. Required.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number. Required.
:type api_id: str
:param resolver_id: Resolver identifier within a GraphQL API. Must be unique in the current API
Management service instance. Required.
:type resolver_id: str
:param policy_id: The identifier of the Policy. "policy" Required.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param parameters: The policy contents to apply. Required.
:type parameters: IO
:param if_match: ETag of the Entity. Not required when creating an entity, but required when
updating an entity. Default value is None.
:type if_match: str
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyContract or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.PolicyContract
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def create_or_update(
self,
resource_group_name: str,
service_name: str,
api_id: str,
resolver_id: str,
policy_id: Union[str, _models.PolicyIdName],
parameters: Union[_models.PolicyContract, IO],
if_match: Optional[str] = None,
**kwargs: Any
) -> _models.PolicyContract:
"""Creates or updates policy configuration for the GraphQL API Resolver level.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param service_name: The name of the API Management service. Required.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number. Required.
:type api_id: str
:param resolver_id: Resolver identifier within a GraphQL API. Must be unique in the current API
Management service instance. Required.
:type resolver_id: str
:param policy_id: The identifier of the Policy. "policy" Required.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param parameters: The policy contents to apply. Is either a PolicyContract type or a IO type.
Required.
:type parameters: ~azure.mgmt.apimanagement.models.PolicyContract or IO
:param if_match: ETag of the Entity. Not required when creating an entity, but required when
updating an entity. Default value is None.
:type if_match: str
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyContract or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.PolicyContract
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.PolicyContract] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PolicyContract")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
service_name=service_name,
api_id=api_id,
resolver_id=resolver_id,
policy_id=policy_id,
subscription_id=self._config.subscription_id,
if_match=if_match,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 200:
response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
deserialized = self._deserialize("PolicyContract", pipeline_response)
if response.status_code == 201:
response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
deserialized = self._deserialize("PolicyContract", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized # type: ignore
create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/resolvers/{resolverId}/policies/{policyId}"
}
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
service_name: str,
api_id: str,
resolver_id: str,
policy_id: Union[str, _models.PolicyIdName],
if_match: str,
**kwargs: Any
) -> None:
"""Deletes the policy configuration at the GraphQL Api Resolver.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param service_name: The name of the API Management service. Required.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number. Required.
:type api_id: str
:param resolver_id: Resolver identifier within a GraphQL API. Must be unique in the current API
Management service instance. Required.
:type resolver_id: str
:param policy_id: The identifier of the Policy. "policy" Required.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param if_match: ETag of the Entity. ETag should match the current entity state from the header
response of the GET request or it should be * for unconditional update. Required.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_request(
resource_group_name=resource_group_name,
service_name=service_name,
api_id=api_id,
resolver_id=resolver_id,
policy_id=policy_id,
subscription_id=self._config.subscription_id,
if_match=if_match,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/resolvers/{resolverId}/policies/{policyId}"
}
| [
"[email protected]"
] | |
94b0a380ce4543b547a5176caef0e9ce5901f3ca | ad71b2aaab2bf1127f40fef008ac6f6d1334c32c | /share/rpcauth/rpcauth.py | 219286929731362f0553450cf62c1d98e5fd84b4 | [
"MIT"
] | permissive | minblock/carpaticoin | 00eb755770f370d54d73ae9b227e4d4bbd60babb | f65cf89970b36a073b49435a3833a2a83a7f2145 | refs/heads/master | 2021-05-22T01:38:29.187393 | 2020-04-04T04:41:16 | 2020-04-04T04:41:16 | 252,909,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,579 | py | #!/usr/bin/env python3
# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from argparse import ArgumentParser
from base64 import urlsafe_b64encode
from binascii import hexlify
from getpass import getpass
from os import urandom
import hmac
def generate_salt(size):
"""Create size byte hex salt"""
return hexlify(urandom(size)).decode()
def generate_password():
"""Create 32 byte b64 password"""
return urlsafe_b64encode(urandom(32)).decode('utf-8')
def password_to_hmac(salt, password):
m = hmac.new(bytearray(salt, 'utf-8'), bytearray(password, 'utf-8'), 'SHA256')
return m.hexdigest()
def main():
parser = ArgumentParser(description='Create login credentials for a JSON-RPC user')
parser.add_argument('username', help='the username for authentication')
parser.add_argument('password', help='leave empty to generate a random password or specify "-" to prompt for password', nargs='?')
args = parser.parse_args()
if not args.password:
args.password = generate_password()
elif args.password == '-':
args.password = getpass()
# Create 16 byte hex salt
salt = generate_salt(16)
password_hmac = password_to_hmac(salt, args.password)
print('String to be appended to carpaticoin.conf:')
print('rpcauth={0}:{1}${2}'.format(args.username, salt, password_hmac))
print('Your password:\n{0}'.format(args.password))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
6ab3e1a39cd93671912027393c5a68e9026211cb | 7941390ad02fca9f8c66ceaf1d71a9fd0815f50e | /simple_NER/annotators/remote/allenai.py | b54499e860da539887f6b0098ac177a14e67b49b | [
"MIT"
] | permissive | msgpo/simple_NER | 08cde36758f1d97560c3db9e36918a7e4abe08dd | 5eaed615d9075d879e4b2af461f2e99acc0f9e68 | refs/heads/master | 2022-04-19T13:07:56.507469 | 2020-04-12T18:43:42 | 2020-04-12T18:43:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,620 | py | from simple_NER.annotators import NERWrapper
from simple_NER import Entity
import requests
def allen_NER(text, host):
url = host + "named-entity-recognition"
data = {"sentence": text}
return requests.post(url, json=data).json()
class AllenNlpNER(NERWrapper):
def __init__(self, host="http://demo.allennlp.org/predict/"):
super().__init__()
self.host = host
self.add_detector(self.annotate)
def annotate(self, text):
res = allen_NER(text, self.host)
tags = res["tags"]
words = res["words"]
for idx, tag in enumerate(tags):
if tag != 'O':
yield Entity(words[idx], tag, source_text=text)
if __name__ == "__main__":
ner = AllenNlpNER()
ents = [r for r in
ner.extract_entities("Lisbon is the capital of Portugal")]
assert ents[0].as_json() == {'confidence': 1,
'data': {},
'entity_type': 'U-LOC',
'rules': [],
'source_text': 'Lisbon is the capital of Portugal',
'spans': [(0, 6)],
'value': 'Lisbon'}
assert ents[1].as_json() == {'confidence': 1,
'data': {},
'entity_type': 'U-LOC',
'rules': [],
'source_text': 'Lisbon is the capital of Portugal',
'spans': [(25, 33)],
'value': 'Portugal'}
| [
"[email protected]"
] | |
3dd267f794d2b0b929fd7ea3529b59d9507ba38a | a3cc7286d4a319cb76f3a44a593c4a18e5ddc104 | /lib/surface/app/__init__.py | 9826abe1d2a81963769b6af01ae5ebf38641f8f6 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | jordanistan/Google-Cloud-SDK | f2c6bb7abc2f33b9dfaec5de792aa1be91154099 | 42b9d7914c36a30d1e4b84ae2925df7edeca9962 | refs/heads/master | 2023-09-01T01:24:53.495537 | 2023-08-22T01:12:23 | 2023-08-22T01:12:23 | 127,072,491 | 0 | 1 | NOASSERTION | 2023-08-22T01:12:24 | 2018-03-28T02:31:19 | Python | UTF-8 | Python | false | false | 2,758 | py | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The gcloud app group."""
from googlecloudsdk.calliope import base
DETAILED_HELP = {
'brief': 'Manage your App Engine deployments.',
'DESCRIPTION': """
The gcloud app command group lets you deploy and manage your Google App
Engine apps. These commands replace their equivalents in the appcfg
tool.
App Engine is a platform for building scalable web applications
and mobile backends. App Engine provides you with built-in services and
APIs such as NoSQL datastores, memcache, and a user authentication API,
common to most applications.
More information on App Engine can be found here:
https://cloud.google.com/appengine and detailed documentation can be
found here: https://cloud.google.com/appengine/docs/
""",
'EXAMPLES': """\
To run your app locally in the development application server
to simulate your application running in production App Engine with
sandbox restrictions and services provided by App Engine SDK libraries,
use the `dev_appserver.py` command and your app's `app.yaml`
configuration file to run:
$ dev_appserver.py ~/my_app/app.yaml
For an in-depth look into using the local development server, follow
this guide : https://cloud.google.com/appengine/docs/standard/python/tools/using-local-server.
To deploy the code and configuration of your app to the App Engine
server, run:
$ {command} deploy ~/my_app/app.yaml
To list all versions of all services of your existing deployments, run:
$ {command} versions list
To generate all relevant config files for `~/my_app` (or emit an error
message if the directory contents are not recognized), run:
$ {command} gen-config ~/my_app
"""
}
@base.ReleaseTracks(base.ReleaseTrack.ALPHA,
base.ReleaseTrack.BETA,
base.ReleaseTrack.GA)
class AppengineGA(base.Group):
def Filter(self, context, args):
del context, args
base.DisableUserProjectQuota()
AppengineGA.detailed_help = DETAILED_HELP
| [
"[email protected]"
] | |
541e28ec93c85cc1adc61eecd87bdde2a641136b | c91d029b59f4e6090a523bf571b3094e09852258 | /src/utils/middlewares.py | 64ca375d393cdaff4d003588764a00f79181c0a1 | [
"MIT"
] | permissive | anselmobd/fo2 | d51b63ebae2541b00af79448ede76b02638c41f0 | 8e7f8f3d9a296c7da39d0faf38a266e9c6c162ab | refs/heads/master | 2023-08-31T19:59:33.964813 | 2023-08-31T19:50:53 | 2023-08-31T19:50:53 | 92,856,677 | 1 | 0 | MIT | 2023-04-21T21:50:46 | 2017-05-30T17:04:27 | Python | UTF-8 | Python | false | false | 2,927 | py | import re
import threading
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import redirect
from geral.functions import is_alternativa
from utils.functions import get_client_ip
request_cfg = threading.local()
try:
from django.utils.deprecation import MiddlewareMixin
except ImportError:
MiddlewareMixin = object
from .classes import LoggedInUser, AcessoInterno
class LoggedInUserMiddleware(MiddlewareMixin):
'''
Insert this middleware after
django.contrib.auth.middleware.AuthenticationMiddleware
'''
def process_request(self, request):
'''
Returned None for continue request
'''
logged_in_user = LoggedInUser()
logged_in_user.set_user(request)
return None
class NeedToLoginOrLocalMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
user_ip = get_client_ip(request)
authenticated_by_ip = False
for ip in settings.N2LOL_ALLOWED_IP_BLOCKS:
if re.compile(ip).match(user_ip) is not None:
authenticated_by_ip = True
break
acesso_interno = AcessoInterno()
acesso_interno.set_interno(authenticated_by_ip)
acesso_interno.set_ip(user_ip)
if request.user.is_authenticated:
return self.get_response(request)
if authenticated_by_ip:
return self.get_response(request)
user_url = request.META['PATH_INFO']
for url in settings.N2LOL_ALLOWED_URLS:
if re.compile(url).match(user_url) is not None:
return self.get_response(request)
return redirect(settings.N2LOL_REDIRECT)
class AlterRouterMiddleware:
"""
Based on
https://gist.github.com/gijzelaerr/7a3130c494215a0dd9b2/
The Alternative db router middelware.
Before the view sets some context from the URL into thread local storage.
After, deletes it.
In between, any database operation will call the router, which checks for
the thread local storage and returns an appropriate database alias.
Add this to your middleware, for example:
MIDDLEWARE += ['utils.middlewares.AlterRouterMiddleware']
"""
def __init__(self, get_response):
self.get_response = get_response
# One-time configuration and initialization.
def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
request_cfg.alter_db = is_alternativa(request)
request.alter_db = request_cfg.alter_db
response = self.get_response(request)
# Code to be executed for each request/response after
# the view is called.
if hasattr(request_cfg, 'alter_db'):
del request_cfg.alter_db
return response
| [
"[email protected]"
] | |
f3e46e8de53108b8175863fac2003556b51fdbdc | 5dc393ffb3d65094d2c4f6bc8b9980e2fc167670 | /pandas/stats/tests/test_fama_macbeth.py | f48dde20f138ac2a2f78bf479e668b579e96ac1f | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | JWCornV/pandas | 913db2a34cb9f9820f986412e9c3cf868ecef24d | 6078fba9410918baa486ca008cc9e3ba066c03ec | refs/heads/master | 2020-12-25T10:14:13.384789 | 2012-06-27T17:10:54 | 2012-06-27T17:10:54 | 4,813,052 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,022 | py | from pandas import DataFrame, Panel
from pandas.stats.api import fama_macbeth
from common import assert_almost_equal, BaseTest
import numpy as np
class TestFamaMacBeth(BaseTest):
def testFamaMacBethRolling(self):
# self.checkFamaMacBethExtended('rolling', self.panel_x, self.panel_y,
# nw_lags_beta=2)
# df = DataFrame(np.random.randn(50, 10))
x = dict((k, DataFrame(np.random.randn(50, 10))) for k in 'abcdefg')
x = Panel.from_dict(x)
y = (DataFrame(np.random.randn(50, 10)) +
DataFrame(0.01 * np.random.randn(50, 10)))
self.checkFamaMacBethExtended('rolling', x, y, nw_lags_beta=2)
self.checkFamaMacBethExtended('expanding', x, y, nw_lags_beta=2)
def checkFamaMacBethExtended(self, window_type, x, y, **kwds):
window = 25
result = fama_macbeth(y=y, x=x, window_type=window_type, window=window,
**kwds)
self._check_stuff_works(result)
index = result._index
time = len(index)
for i in xrange(time - window + 1):
if window_type == 'rolling':
start = index[i]
else:
start = index[0]
end = index[i + window - 1]
x2 = {}
for k, v in x.iteritems():
x2[k] = v.truncate(start, end)
y2 = y.truncate(start, end)
reference = fama_macbeth(y=y2, x=x2, **kwds)
assert_almost_equal(reference._stats, result._stats[:, i])
static = fama_macbeth(y=y2, x=x2, **kwds)
self._check_stuff_works(static)
def _check_stuff_works(self, result):
# does it work?
attrs = ['mean_beta', 'std_beta', 't_stat']
for attr in attrs:
getattr(result, attr)
# does it work?
result.summary
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__,'-vvs','-x','--pdb', '--pdb-failure'],
exit=False)
| [
"[email protected]"
] | |
7dbb8959b233092833590760e034f6ebe7360014 | 1065a2782e4947b5bf14ec4536e4ad7addc7aec3 | /strategy/cryptoalpha/casubc.py | f4bb09a5cb12cc696d58f6af909445370b6e56b8 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | Johannesduvenage/siis | 0bf6875d4a5f3638cadb01ed5541aab29ba1d77a | 57e537cf9b6a71c8ad0b3bb0759772d126496a17 | refs/heads/master | 2020-09-10T21:51:56.814014 | 2019-11-13T23:57:34 | 2019-11-13T23:57:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,900 | py | # @date 2018-08-24
# @author Frederic SCHERMA
# @license Copyright (c) 2018 Dream Overflow
# Crypto Alpha strategy, sub-strategy B.
from strategy.indicator import utils
from strategy.strategysignal import StrategySignal
from monitor.streamable import StreamMemberFloatSerie, StreamMemberSerie, StreamMemberFloatBarSerie, StreamMemberOhlcSerie
from .casub import CryptoAlphaStrategySub
import logging
logger = logging.getLogger('siis.strategy.cryptoalpha')
class CryptoAlphaStrategySubC(CryptoAlphaStrategySub):
"""
Crypto Alpha strategy, sub-strategy C.
"""
def __init__(self, strategy_trader, params):
super().__init__(strategy_trader, params)
self.rsi_low = params['constants']['rsi_low']
self.rsi_high = params['constants']['rsi_high']
def process(self, timestamp):
candles = self.get_candles()
if len(candles) < self.depth:
# not enought samples
return
last_timestamp = candles[-1].timestamp
prices = self.price.compute(last_timestamp, candles)
volumes = self.volume.compute(last_timestamp, candles)
signal = self.process1(timestamp, last_timestamp, candles, prices, volumes)
# avoid duplicates signals
if signal and self.need_signal:
# self.last_signal = signal
if (self.last_signal and (signal.signal == self.last_signal.signal) and
(signal.dir == self.last_signal.dir) and
(signal.base_time() == self.last_signal.base_time())): # or (signal.ts - self.last_signal.ts) < (self.tf * 0.5):
# same base time avoid multiple entries on the same candle
signal = None
else:
# retains the last valid signal only if valid
self.last_signal = signal
self.complete(candles)
return signal
def process1(self, timestamp, last_timestamp, candles, prices, volumes):
signal = None
# volume sma, increase signal strength when volume increase over its SMA
# volume_sma = utils.MM_n(self.depth-1, self.volume.volumes)
rsi_30_70 = 0 # 1 <30, -1 >70
rsi_40_60 = 0 # 1 if RSI in 40-60
stochrsi_20_80 = 0 # 1 <20, -1 >80
stochrsi_40_60 = 0 # 1 if stochRSI in 40-60
volume_signal = 0
ema_sma_cross = 0
ema_sma_height = 0
if self.rsi:
self.rsi.compute(last_timestamp, prices)
rsi = self.rsi.last
if self.rsi.last < self.rsi_low:
rsi_30_70 = 1.0
elif self.rsi.last > self.rsi_high:
rsi_30_70 = -1.0
if self.rsi.last > 0.4 and self.rsi.last < 0.6:
rsi_40_60 = 1
if self.stochrsi:
self.stochrsi.compute(last_timestamp, prices)
if self.stochrsi.last_k < 0.2:
stochrsi_20_80 = 1.0
elif self.stochrsi.last_k > 0.8:
stochrsi_20_80 = -1.0
if self.stochrsi.last_k > 0.4 and self.stochrsi.last_k < 0.6:
stochrsi_40_60 = 1
# if self.volume.last > volume_sma[-1]:
# volume_signal = 1
# elif self.volume.last < volume_sma[-1]:
# volume_signal = -1
if self.sma and self.ema:
self.sma.compute(last_timestamp, prices)
self.ema.compute(last_timestamp, prices)
# ema over sma crossing
ema_sma_cross = utils.cross((self.ema.prev, self.sma.prev), (self.ema.last, self.sma.last))
if self.ema.last > self.sma.last:
ema_sma_height = 1
elif self.ema.last < self.sma.last:
ema_sma_height = -1
if self.atr:
if self.last_closed:
self.atr.compute(last_timestamp, self.price.high, self.price.low, self.price.close)
if self.pivotpoint:
if self.pivotpoint.compute_at_close and self.last_closed:
self.pivotpoint.compute(last_timestamp, self.price.open, self.price.high, self.price.low, self.price.close)
return signal
def setup_streamer(self, streamer):
streamer.add_member(StreamMemberSerie('begin'))
streamer.add_member(StreamMemberOhlcSerie('ohlc'))
streamer.add_member(StreamMemberFloatSerie('price', 0))
streamer.add_member(StreamMemberFloatBarSerie('volume', 1))
streamer.add_member(StreamMemberFloatSerie('rsi-low', 2))
streamer.add_member(StreamMemberFloatSerie('rsi-high', 2))
streamer.add_member(StreamMemberFloatSerie('rsi', 2))
streamer.add_member(StreamMemberFloatSerie('stochrsi-low', 3))
streamer.add_member(StreamMemberFloatSerie('stochrsi-high', 3))
streamer.add_member(StreamMemberFloatSerie('stochrsi-k', 3))
streamer.add_member(StreamMemberFloatSerie('stochrsi-d', 3))
streamer.add_member(StreamMemberFloatSerie('sma', 0))
streamer.add_member(StreamMemberFloatSerie('ema', 0))
streamer.add_member(StreamMemberFloatSerie('hma', 0))
streamer.add_member(StreamMemberFloatSerie('vwma', 0))
streamer.add_member(StreamMemberFloatSerie('perf', 3))
streamer.add_member(StreamMemberSerie('end'))
streamer.next_timestamp = self.next_timestamp
def stream(self, streamer):
delta = min(int((self.next_timestamp - streamer.next_timestamp) / self.tf) + 1, len(self.price.prices))
for i in range(-delta, 0, 1):
ts = self.price.timestamp[i]
streamer.member('begin').update(ts)
streamer.member('ohlc').update((self.price.open[i], self.price.high[i], self.price.low[i], self.price.close[i]), ts)
streamer.member('price').update(self.price.prices[i], ts)
streamer.member('volume').update(self.volume.volumes[i], ts)
streamer.member('rsi-low').update(self.rsi_low, ts)
streamer.member('rsi-high').update(self.rsi_high, ts)
streamer.member('rsi').update(self.rsi.rsis[i], ts)
# streamer.member('stochrsi-low').update(20, ts)
# streamer.member('stochrsi-high').update(80, ts)
# streamer.member('stochrsi-k').update(self.stochrsi.stochrsis[i], ts)
# streamer.member('stochrsi-d').update(self.stochrsi.stochrsis[i], ts)
streamer.member('sma').update(self.sma.smas[i], ts)
streamer.member('ema').update(self.ema.emas[i], ts)
# streamer.member('hma').update(self.hma.hmas[i], ts)
# streamer.member('vwma').update(self.vwma.vwmas[i], ts)
streamer.member('perf').update(self.strategy_trader._stats['perf']*100, ts)
streamer.member('end').update(ts)
# push per frame
streamer.push()
streamer.next_timestamp = self.next_timestamp
| [
"[email protected]"
] | |
901ed40757453f72482e04347a567f48a4452388 | b29acb2e230b3cf2f8be070850c34ed5d62dc80c | /Python/YPS/10/Sample1.py | ff99a7a4d11b7f13bd9fd07db5ef310bd2a798dc | [] | no_license | MasatakaShibataSS/lesson | be6e3557c52c6157b303be268822cad613a7e0f7 | 4f3f81ba0161b820410e2a481b63a999d0d4338c | refs/heads/master | 2020-06-17T13:42:08.383167 | 2019-11-11T07:23:14 | 2019-11-11T07:23:14 | 195,940,605 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | f = open("Sample.txt", "w")
f.write("こんにちは\n")
f.write("さようなら\n")
f.close()
| [
"[email protected]"
] | |
3fc5667310c4f116f9078ae8b07b622e7b91b154 | bbb5447fa5c39c050f23e8b62a91df052818570d | /modules/ibm_app_domain_shared_info.py | e47b4b30b53100306ee9d568fa4aca3a21e41555 | [] | no_license | Trickster4/ansible-collection-ibm | b28b79174b1f1b3f0dcba09e4888396e16b945bd | 14455103b7cb4845e6cfa3803cce7b2a0546af03 | refs/heads/master | 2021-01-03T22:33:46.082228 | 2020-02-09T19:49:09 | 2020-02-11T04:55:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,417 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Mozilla Public License, version 2.0
1. Definitions
1.1. “Contributor”
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. “Contributor Version”
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributor’s Contribution.
1.3. “Contribution”
means Covered Software of a particular Contributor.
1.4. “Covered Software”
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
1.5. “Incompatible With Secondary Licenses”
means
a. that the initial Contributor has attached the notice described in
Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of version
1.1 or earlier of the License, but not also under the terms of a
Secondary License.
1.6. “Executable Form”
means any form of the work other than Source Code Form.
1.7. “Larger Work”
means a work that combines Covered Software with other material, in a separate
file or files, that is not Covered Software.
1.8. “License”
means this document.
1.9. “Licensable”
means having the right to grant, to the maximum extent possible, whether at the
time of the initial grant or subsequently, any and all of the rights conveyed by
this License.
1.10. “Modifications”
means any of the following:
a. any file in Source Code Form that results from an addition to, deletion
from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. “Patent Claims” of a Contributor
means any patent claim(s), including without limitation, method, process,
and apparatus claims, in any patent Licensable by such Contributor that
would be infringed, but for the grant of the License, by the making,
using, selling, offering for sale, having made, import, or transfer of
either its Contributions or its Contributor Version.
1.12. “Secondary License”
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
1.13. “Source Code Form”
means the form of the work preferred for making modifications.
1.14. “You” (or “Your”)
means an individual or a legal entity exercising rights under this
License. For legal entities, “You” includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
definition, “control” means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or as
part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell, offer for
sale, have made, import, and otherwise transfer either its Contributions
or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution become
effective for each Contribution on the date the Contributor first distributes
such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under this
License. No additional rights or licenses will be implied from the distribution
or licensing of Covered Software under this License. Notwithstanding Section
2.1(b) above, no patent license is granted by a Contributor:
a. for any code that a Contributor has removed from Covered Software; or
b. for infringements caused by: (i) Your and any other third party’s
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
c. under Patent Claims infringed by Covered Software in the absence of its
Contributions.
This License does not grant any rights in the trademarks, service marks, or
logos of any Contributor (except as may be necessary to comply with the
notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this License
(see Section 10.2) or under the terms of a Secondary License (if permitted
under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its Contributions
are its original creation(s) or it has sufficient rights to grant the
rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under applicable
copyright doctrines of fair use, fair dealing, or other equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under the
terms of this License. You must inform recipients that the Source Code Form
of the Covered Software is governed by the terms of this License, and how
they can obtain a copy of this License. You may not attempt to alter or
restrict the recipients’ rights in the Source Code Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the
Executable Form how they can obtain a copy of such Source Code Form by
reasonable means in a timely manner, at a charge no more than the cost
of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this License,
or sublicense it under different terms, provided that the license for
the Executable Form does not attempt to limit or alter the recipients’
rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for the
Covered Software. If the Larger Work is a combination of Covered Software
with a work governed by one or more Secondary Licenses, and the Covered
Software is not Incompatible With Secondary Licenses, this License permits
You to additionally distribute such Covered Software under the terms of
such Secondary License(s), so that the recipient of the Larger Work may, at
their option, further distribute the Covered Software under the terms of
either this License or such Secondary License(s).
3.4. Notices
You may not remove or alter the substance of any license notices (including
copyright notices, patent notices, disclaimers of warranty, or limitations
of liability) contained within the Source Code Form of the Covered
Software, except that You may alter any license notices to the extent
required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on behalf
of any Contributor. You must make it absolutely clear that any such
warranty, support, indemnity, or liability obligation is offered by You
alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute, judicial
order, or regulation then You must: (a) comply with the terms of this License
to the maximum extent possible; and (b) describe the limitations and the code
they affect. Such description must be placed in a text file included with all
distributions of the Covered Software under this License. Except to the
extent prohibited by statute or regulation, such description must be
sufficiently detailed for a recipient of ordinary skill to be able to
understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if You
fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor
are reinstated (a) provisionally, unless and until such Contributor
explicitly and finally terminates Your grants, and (b) on an ongoing basis,
if such Contributor fails to notify You of the non-compliance by some
reasonable means prior to 60 days after You have come back into compliance.
Moreover, Your grants from a particular Contributor are reinstated on an
ongoing basis if such Contributor notifies You of the non-compliance by
some reasonable means, this is the first time You have received notice of
non-compliance with this License from such Contributor, and You become
compliant prior to 30 days after Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions, counter-claims,
and cross-claims) alleging that a Contributor Version directly or
indirectly infringes any patent, then the rights granted to You by any and
all Contributors for the Covered Software under Section 2.1 of this License
shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
license agreements (excluding distributors and resellers) which have been
validly granted by You or Your distributors under this License prior to
termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an “as is” basis, without
warranty of any kind, either expressed, implied, or statutory, including,
without limitation, warranties that the Covered Software is free of defects,
merchantable, fit for a particular purpose or non-infringing. The entire
risk as to the quality and performance of the Covered Software is with You.
Should any Covered Software prove defective in any respect, You (not any
Contributor) assume the cost of any necessary servicing, repair, or
correction. This disclaimer of warranty constitutes an essential part of this
License. No use of any Covered Software is authorized under this License
except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any
character including, without limitation, damages for lost profits, loss of
goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from such
party’s negligence to the extent applicable law prohibits such limitation.
Some jurisdictions do not allow the exclusion or limitation of incidental or
consequential damages, so this exclusion and limitation may not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts of
a jurisdiction where the defendant maintains its principal place of business
and such litigation shall be governed by laws of that jurisdiction, without
reference to its conflict-of-law provisions. Nothing in this Section shall
prevent a party’s ability to bring cross-claims or counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject matter
hereof. If any provision of this License is held to be unenforceable, such
provision shall be reformed only to the extent necessary to make it
enforceable. Any law or regulation which provides that the language of a
contract shall be construed against the drafter shall not be used to construe
this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version of
the License under which You originally received the Covered Software, or
under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a modified
version of this License if you rename the license and remove any
references to the name of the license steward (except to note that such
modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the
terms of the Mozilla Public License, v.
2.0. If a copy of the MPL was not
distributed with this file, You can
obtain one at
http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file, then
You may include the notice in a location (such as a LICENSE file in a relevant
directory) where a recipient would be likely to look for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - “Incompatible With Secondary Licenses” Notice
This Source Code Form is “Incompatible
With Secondary Licenses”, as defined by
the Mozilla Public License, v. 2.0.
"""
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ibm_app_domain_shared_info
short_description: Retrieve IBM Cloud 'ibm_app_domain_shared' resource
version_added: "2.8"
description:
- Retrieve an IBM Cloud 'ibm_app_domain_shared' resource
requirements:
- IBM-Cloud terraform-provider-ibm v1.2.0
- Terraform v0.12.20
options:
name:
description:
- The name of the shared domain
required: True
type: str
ibmcloud_api_key:
description:
- The API Key used for authentification. This can also be provided
via the environment variable 'IC_API_KEY'.
required: True
ibmcloud_region:
description:
- Denotes which IBM Cloud region to connect to
default: us-south
required: False
author:
- Jay Carman (@jaywcarman)
'''
# Top level parameter keys required by Terraform module
TL_REQUIRED_PARAMETERS = [
('name', 'str'),
]
# All top level parameter keys supported by Terraform module
TL_ALL_PARAMETERS = [
'name',
]
# define available arguments/parameters a user can pass to the module
from ansible.module_utils.basic import env_fallback
module_args = dict(
name=dict(
required=True,
type='str'),
ibmcloud_api_key=dict(
type='str',
no_log=True,
fallback=(env_fallback, ['IC_API_KEY']),
required=True),
ibmcloud_region=dict(
type='str',
fallback=(env_fallback, ['IC_REGION']),
default='us-south')
)
def run_module():
from ansible.module_utils.basic import AnsibleModule
import ansible.module_utils.ibmcloud as ibmcloud
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=False
)
result = ibmcloud.ibmcloud_terraform(
resource_type='ibm_app_domain_shared',
tf_type='data',
parameters=module.params,
ibm_provider_version='1.2.0',
tl_required_params=TL_REQUIRED_PARAMETERS,
tl_all_params=TL_ALL_PARAMETERS)
if result['rc'] > 0:
module.fail_json(
msg=ibmcloud.Terraform.parse_stderr(result['stderr']), **result)
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
ba6aeae64431208cbabea1456729c92c602f9921 | 386d5d4f8f102e701d02b326cd066f520e3dff9f | /ProjectApplication/project_core/migrations/0163_add_account_number.py | b5f637bb6f9f02103a1375f7fd3e3ed9b338b0fa | [
"MIT"
] | permissive | Swiss-Polar-Institute/project-application | ae2561c3ae2c1d5412d165d959ce2e5886135e0a | 7dc4a9f7e0f8d28c89977b85f99bc5e35ea77d43 | refs/heads/master | 2023-08-31T04:01:23.492272 | 2023-08-25T14:33:02 | 2023-08-25T14:33:02 | 206,330,401 | 7 | 5 | MIT | 2023-09-13T08:03:53 | 2019-09-04T13:49:39 | Python | UTF-8 | Python | false | false | 2,043 | py | # Generated by Django 3.2 on 2021-04-23 10:25
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('project_core', '0162_callpartfile_proposal_keywords_null'),
]
operations = [
migrations.AddField(
model_name='financialkey',
name='account_number',
field=models.IntegerField(help_text='Code use by the accounting department', null=True, unique=True),
),
migrations.AlterField(
model_name='call',
name='scientific_clusters_question',
field=models.BooleanField(default=False, help_text='True if the Research Cluster question is enabled'),
),
migrations.AlterField(
model_name='historicalcall',
name='scientific_clusters_question',
field=models.BooleanField(default=False, help_text='True if the Research Cluster question is enabled'),
),
migrations.AlterField(
model_name='proposalscientificcluster',
name='keywords',
field=models.ManyToManyField(help_text='Keywords that describe the research cluster', to='project_core.Keyword'),
),
migrations.AlterField(
model_name='proposalscientificcluster',
name='proposal',
field=models.ForeignKey(help_text='Proposal that this Research Cluster refers to', on_delete=django.db.models.deletion.PROTECT, to='project_core.proposal'),
),
migrations.AlterField(
model_name='proposalscientificcluster',
name='sub_pi',
field=models.ForeignKey(help_text='Main person of this research cluster', on_delete=django.db.models.deletion.PROTECT, to='project_core.personposition'),
),
migrations.AlterField(
model_name='proposalscientificcluster',
name='title',
field=models.CharField(help_text='Title of the research cluster', max_length=500),
),
]
| [
"[email protected]"
] | |
01f2b39e906fa6896ddad81b11c800af607781d7 | 652121d51e6ff25aa5b1ad6df2be7eb341683c35 | /examples/mouse_and_key_modifiers.py | 6d28b7351cebc4f69acb343c35eb8233fa6877a3 | [] | no_license | jgalaz84/eman2 | be93624f1c261048170b85416e517e5813992501 | 6d3a1249ed590bbc92e25fb0fc319e3ce17deb65 | refs/heads/master | 2020-04-25T18:15:55.870663 | 2015-06-05T20:21:44 | 2015-06-05T20:21:44 | 36,952,784 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,615 | py | from PyQt4 import QtCore, QtGui
class MouseAndKeyModifiers(QtGui.QWidget):
def __init__(self, parent = None):
QtGui.QWidget.__init__(self, parent)
layout = QtGui.QVBoxLayout()
label = QtGui.QLabel("Click here to test mouse buttons: Left, Right, Middle\nand keyboard modifiers: Ctrl, Alt, Shift, and Command (a Mac key)")
self.text_browser = QtGui.QTextBrowser()
layout.addWidget(label)
layout.addWidget(self.text_browser)
self.setLayout(layout)
def mousePressEvent(self, event):
self.text_browser.clear()
self.text_browser.append("Mouse press info...")
if event.buttons()&QtCore.Qt.LeftButton:
self.text_browser.append("Left Button")
if event.buttons()&QtCore.Qt.MidButton:
self.text_browser.append("Middle Button")
if event.buttons()&QtCore.Qt.RightButton:
self.text_browser.append("Right Button")
if event.modifiers()&QtCore.Qt.ShiftModifier:
self.text_browser.append("Shift Modifier")
if event.modifiers()&QtCore.Qt.ControlModifier:
#Apple/Command key on a Mac... NOT CONTROL KEY ON A MAC!
self.text_browser.append("Control Modifier")
if event.modifiers()&QtCore.Qt.AltModifier:
#Alt/Option key on a Mac. An EMAN convention is that Alt+Left click works like a middle click.
self.text_browser.append("Alt Modifier")
if event.modifiers()&QtCore.Qt.MetaModifier:
#Control Key on a Mac. A Mac convention is that Ctrl+Left Click works like a right click.
self.text_browser.append("Meta Modifier")
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
window = MouseAndKeyModifiers()
window.show()
sys.exit(app.exec_()) | [
"[email protected]"
] | |
b8e8df4165e5c68d730eab8b8fe69f808f78ca32 | bc08302533982d4a154f5615a2c8366f695234e5 | /work/tools/free_ip.py | 721bcc9f76e13c7c646d22000c17c96dd1e0352c | [] | no_license | rvfedorin/PythonDevelopment | bfd3cfb4ad22d72e2002d7430fa8821ea35717f6 | d6748189874b53b8357d5b3ff7d520ff0a93e15a | refs/heads/master | 2022-12-13T11:53:16.041737 | 2019-02-15T08:50:15 | 2019-02-15T08:50:15 | 122,050,190 | 0 | 0 | null | 2022-12-08T01:18:43 | 2018-02-19T10:57:37 | Python | UTF-8 | Python | false | false | 3,528 | py | def mark_used_ip(list_used_ip, list_all_ip):
for ip in list_all_ip:
if ip in list_used_ip:
position_ip = list_all_ip.index(ip)
list_all_ip[position_ip] = 'x'
def get_free_lan(list_ip_with_used):
free_lan = []
for lan in range(25, 33):
count_subnet = 2**(lan - 24)
count_ip_in_subnet = 2**(32 - lan)
start_ip = 0
end_ip = count_ip_in_subnet
for subnet in range(count_subnet):
if len(list_ip_with_used) >= end_ip and 'x' not in list_ip_with_used[start_ip:end_ip]:
free_lan.append(f'{list_ip_with_used[start_ip]}/{lan}')
all_ip_temp = [_ for _ in list_ip_with_used if _ not in list_ip_with_used[start_ip:end_ip]]
list_ip_with_used = all_ip_temp[:]
else:
start_ip += count_ip_in_subnet
end_ip += count_ip_in_subnet
if len(list_ip_with_used) == 0:
break
if len(list_ip_with_used) == 0:
break
return free_lan
def get_only_fourth_octet(list_ip):
list_octets = []
for i in list_ip:
octet = i.split('.')
list_octets.append(int(octet[3]))
lan = f'{octet[0]}.{octet[1]}.{octet[2]}.'
return list_octets, lan
def get_all_ip_in_lan(list_lan):
ip_of_all_lan = []
for lan in list_lan:
mask_lan = lan.split('/')
lan_ip = mask_lan[0].split('.')
for i in range(2**(32-int(mask_lan[1]))):
four_octet = int(lan_ip[3])+i
ip_of_all_lan.append(f'{lan_ip[0]}.{lan_ip[1]}.{lan_ip[2]}.{four_octet}')
return ip_of_all_lan
if __name__ == '__main__':
all_ip = []
for i in range(256):
all_ip.append(i)
x = (get_all_ip_in_lan(['172.30.86.164/30', '172.30.86.216/30', '172.30.86.152/30', '172.30.86.156/30',
'172.30.86.160/30', '172.30.86.144/30', '172.30.86.140/30', '172.30.86.136/30',
'172.30.86.120/30', '172.30.86.116/30', '172.30.86.88/30', '172.30.86.92/30',
'172.30.86.96/30', '172.30.86.80/30', '172.30.86.20/30', '172.30.86.184/30',
'172.30.86.196/30', '172.30.86.212/30', '172.30.86.220/30', '172.30.86.224/30',
'172.30.86.232/30', '172.30.86.236/30', '172.30.86.240/30', '172.30.86.248/30',
'172.30.86.252/30', '172.30.86.132/30', '172.30.86.44/30', '172.30.86.148/30',
'172.30.86.76/30', '172.30.86.48/30', '172.30.86.40/30', '172.30.86.84/30',
'172.30.86.36/30', '172.30.86.72/30', '172.30.86.104/30', '172.30.86.108/30',
'172.30.86.24/30', '172.30.86.228/30', '172.30.86.204/30', '172.30.86.0/30',
'172.30.86.4/30', '172.30.86.8/30', '172.30.86.12/30', '172.30.86.244/30',
'172.30.86.192/30', '172.30.86.124/30', '172.30.86.112/30', '172.30.86.60/30',
'172.30.86.208/30', '172.30.86.176/30', '172.30.86.68/30', '172.30.86.28/30',
'172.30.86.32/30', '172.30.86.56/30', '172.30.86.100/30', '172.30.86.168/29',
'172.30.86.200/30', '172.30.86.188/30', '172.30.86.180/30']))
list_used_ip = x
list_used_ip_octet, lan24 = get_only_fourth_octet(list_used_ip)
mark_used_ip(list_used_ip_octet, all_ip)
free = get_free_lan(all_ip)
for i in free:
print(f'{lan24}{i}')
| [
"[email protected]"
] | |
1c95e6322f01f2981b14f4584444c325432b8207 | 490ffe1023a601760ae7288e86723f0c6e366bba | /kolla-docker/patching/zun_compute_api/providerregion.py | 4dc91fc51a266902d9ef25f4c4c4b88fc506ef8c | [] | no_license | bopopescu/Cloud-User-Management | 89696a5ea5d2f95191327fbeab6c3e400bbfb2b8 | 390988bf4915a276c7bf8d96b62c3051c17d9e6e | refs/heads/master | 2022-11-19T10:09:36.662906 | 2018-11-07T20:28:31 | 2018-11-07T20:28:31 | 281,786,345 | 0 | 0 | null | 2020-07-22T21:26:07 | 2020-07-22T21:26:06 | null | UTF-8 | Python | false | false | 1,658 | py | def providerregion_update(self, context, container, *args):
if direct_action:
return self.manager.providerregion_update(context, container, *args)
else:
return self.rpcapi.providerregion_update(context, container, *args)
def providerregion_show(self, context, container, *args):
if direct_action:
return self.manager.providerregion_show(context, container)
else:
return self.rpcapi.providerregion_show(context, container)
def providerregion_create(self, context, new_providerregion, extra_spec,
requested_networks):
host_state = None
try:
host_state = {} # self._schedule_container(context, new_providerregion, extra_spec)
except Exception as exc:
# new_providerregion.status = consts.ERROR
# new_providerregion.status_reason = str(exc)
# new_providerregion.save(context)
return
if direct_action:
self.manager.providerregion_create(context, "", requested_networks, new_providerregion)
else:
self.rpcapi.providerregion_create(context, "", new_providerregion, "", requested_networks)
# self.rpcapi.providerregion_create(context, host_state['host'],
# new_providerregion, host_state['limits'],
# requested_networks)
def providerregion_delete(self, context, container, *args):
return self.manager.providerregion_delete(context, container, True)
# return self.rpcapi.providerregion_delete(context, container, *args)
| [
"[email protected]"
] | |
ea468b999d209aa5949f47fbf2a33213a78b306b | 4369c5a214f8c4fb1f8a286f72d57cfa9c3f02c7 | /geotrek/maintenance/migrations/0010_auto_20200228_1755.py | dfa45158e5acc1cee91f1e527738011a5ef40379 | [
"BSD-2-Clause"
] | permissive | GeotrekCE/Geotrek-admin | c13d251066e92359c26f22d185b8bd2e26e622ef | a91b75261a876be51ad2a693618629900bea6003 | refs/heads/master | 2023-08-21T12:45:25.586551 | 2023-08-09T12:28:33 | 2023-08-09T12:28:33 | 9,886,107 | 71 | 56 | BSD-2-Clause | 2023-09-13T09:40:33 | 2013-05-06T12:17:21 | Python | UTF-8 | Python | false | false | 1,121 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2020-02-28 16:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('maintenance', '0009_auto_20200211_1011'),
]
operations = [
migrations.AlterField(
model_name='intervention',
name='date_insert',
field=models.DateTimeField(auto_now_add=True, verbose_name='Insertion date'),
),
migrations.AlterField(
model_name='intervention',
name='date_update',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Update date'),
),
migrations.AlterField(
model_name='project',
name='date_insert',
field=models.DateTimeField(auto_now_add=True, verbose_name='Insertion date'),
),
migrations.AlterField(
model_name='project',
name='date_update',
field=models.DateTimeField(auto_now=True, db_index=True, verbose_name='Update date'),
),
]
| [
"[email protected]"
] | |
a67fd9139f1b1a1a5f55555ccfc4ea911006371a | f1e98def25f88d17c328c07d4052cd6c34c27707 | /app/accounts/migrations/0002_alter_account_is_active.py | c7dd55e323cc76df4802b6c243d6f44f3d2b3d94 | [
"MIT"
] | permissive | iyanuashiri/exchange-api | c7f1fd2257c3369de9c0b7bea6806c602f25662e | 86f7a4e9fb17f71888e6854510618876d1010c19 | refs/heads/main | 2023-05-30T22:57:46.901693 | 2021-06-13T21:59:22 | 2021-06-13T21:59:22 | 376,542,066 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 520 | py | # Generated by Django 3.2.4 on 2021-06-13 17:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='account',
name='is_active',
field=models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active'),
),
]
| [
"[email protected]"
] | |
34b10e72f10087aaa703323b4c4bb1e1fefdc4d2 | 5b7d5723b84f5011965aba18ebf0c080253b2fcb | /posts/views/feed.py | 607dc5b67c34d133f749e9e5c0c63d56c801c80a | [
"MIT"
] | permissive | ReDetection/vas3k.club | 5980e6849ccf2e67a380d80d23036ec09ed94453 | ad97fefca5ec52047b2daad77ddf2eb2aecb03b3 | refs/heads/master | 2023-06-11T19:48:56.730541 | 2020-08-12T17:44:26 | 2020-08-12T17:44:26 | 287,545,618 | 0 | 0 | MIT | 2020-08-14T13:58:31 | 2020-08-14T13:58:30 | null | UTF-8 | Python | false | false | 2,905 | py | from datetime import datetime, timedelta
from django.db.models import Q
from django.http import Http404
from django.shortcuts import get_object_or_404, render
from auth.helpers import auth_required
from common.pagination import paginate
from posts.models import Post, Topic
POST_TYPE_ALL = "all"
ORDERING_ACTIVITY = "activity"
ORDERING_NEW = "new"
ORDERING_TOP = "top"
ORDERING_TOP_WEEK = "top_week"
ORDERING_TOP_MONTH = "top_month"
@auth_required
def feed(request, post_type=POST_TYPE_ALL, topic_slug=None, ordering=ORDERING_ACTIVITY):
post_type = post_type or Post
if request.me:
request.me.update_last_activity()
posts = Post.objects_for_user(request.me)
else:
posts = Post.visible_objects()
# filter posts by type
if post_type != POST_TYPE_ALL:
posts = posts.filter(type=post_type)
# filter by topic
topic = None
if topic_slug:
topic = get_object_or_404(Topic, slug=topic_slug)
posts = posts.filter(topic=topic)
# hide non-public posts and intros from unauthorized users
if not request.me:
posts = posts.exclude(is_public=False).exclude(type=Post.TYPE_INTRO)
# exclude shadow banned posts, but show them in "new" tab
if ordering != ORDERING_NEW:
if request.me:
posts = posts.exclude(Q(is_shadow_banned=True) & ~Q(author_id=request.me.id))
else:
posts = posts.exclude(is_shadow_banned=True)
# no type and topic? probably it's the main page, let's apply some more filters
if not topic and post_type == POST_TYPE_ALL:
posts = posts.filter(is_visible_on_main_page=True)
# order posts by some metric
if ordering:
if ordering == ORDERING_ACTIVITY:
posts = posts.order_by("-last_activity_at")
elif ordering == ORDERING_NEW:
posts = posts.order_by("-published_at", "-created_at")
elif ordering == ORDERING_TOP:
posts = posts.order_by("-upvotes")
elif ordering == ORDERING_TOP_WEEK:
posts = posts.filter(
published_at__gte=datetime.utcnow() - timedelta(days=7)
).order_by("-upvotes")
elif ordering == ORDERING_TOP_MONTH:
posts = posts.filter(
published_at__gte=datetime.utcnow() - timedelta(days=31)
).order_by("-upvotes")
else:
raise Http404()
# split results into pinned and unpinned posts on main page
pinned_posts = []
if ordering == ORDERING_ACTIVITY:
pinned_posts = posts.filter(is_pinned_until__gte=datetime.utcnow())
posts = posts.exclude(id__in=[p.id for p in pinned_posts])
return render(request, "posts/feed.html", {
"post_type": post_type or POST_TYPE_ALL,
"ordering": ordering,
"topic": topic,
"posts": paginate(request, posts),
"pinned_posts": pinned_posts,
})
| [
"[email protected]"
] | |
5c3b9ce4daac002df7317fca1d9da2026544660a | 82205ef1622ef3bb3bd4982f6ddc52509686af8c | /numba2/pipeline.py | 6a3813352da9a461afdad7bd801f5f684e75bdd7 | [] | no_license | cooperliu101/numba-lang | 22f1567e17cd7cf831f254bf64bc7e3192c973c3 | 37abfcbb516175153e73474dababb2d89cba7a8b | refs/heads/master | 2021-07-21T14:35:23.943243 | 2013-11-15T12:07:53 | 2013-11-15T12:07:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,460 | py | # -*- coding: utf-8 -*-
"""
Pipeline that determines phase ordering and execution.
"""
from __future__ import print_function, division, absolute_import
import dis
import types
import pykit.ir
#===------------------------------------------------------------------===
# Pipeline
#===------------------------------------------------------------------===
def run_pipeline(func, env, passes):
"""
Run a sequence of transforms (given as functions or modules) on the
AIR function.
"""
env['numba.state.crnt_func'] = func
for transform in passes:
func, env = apply_transform(transform, func, env)
env['numba.state.crnt_func'] = func
return func, env
def apply_transform(transform, func, env):
if isinstance(transform, types.ModuleType):
result = transform.run(func, env)
else:
result = transform(func, env)
result = _check_transform_result(transform, func, env, result)
return result or (func, env)
def _check_transform_result(transform, func, env, result):
if result is not None and not isinstance(result, tuple):
if isinstance(result, pykit.ir.Function):
return result, env
if isinstance(transform, types.ModuleType):
transform = transform.run
transform = transform.__module__ + '.' + transform.__name__
raise ValueError(
"Expected (func, env) result in %r, got %s" % (transform, result))
return result | [
"[email protected]"
] | |
f3e7ef114ef2471fbf5671381769253a62f14fce | f693c9c487d31a677f009afcdf922b4e7f7d1af0 | /biomixer-venv/bin/rst2latex.py | f2bf88ab2545f14b13e630bbf6232fa0aac977a0 | [
"MIT"
] | permissive | Shellowb/BioMixer | 9048b6c07fa30b83c87402284f0cebd11a58e772 | 1939261589fe8d6584a942a99f0308e898a28c1c | refs/heads/master | 2022-10-05T08:16:11.236866 | 2021-06-29T17:20:45 | 2021-06-29T17:20:45 | 164,722,008 | 1 | 3 | MIT | 2022-09-30T20:23:34 | 2019-01-08T19:52:12 | Python | UTF-8 | Python | false | false | 831 | py | #!/home/shello/Documents/BioMixer/biomixer-venv/bin/python
# $Id: rst2latex.py 5905 2009-04-16 12:04:49Z milde $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing LaTeX.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline
description = ('Generates LaTeX documents from standalone reStructuredText '
'sources. '
'Reads from <source> (default is stdin) and writes to '
'<destination> (default is stdout). See '
'<http://docutils.sourceforge.net/docs/user/latex.html> for '
'the full reference.')
publish_cmdline(writer_name='latex', description=description)
| [
"[email protected]"
] | |
5114e9fb9e89b7c121fe36ba7adffd2c63ca57fb | 1f5299e547125f7ba2c3b72984e82ba3b1f984de | /basejumper/security.py | ad0e4a6b627153a5f150932198651845b40573cd | [] | no_license | ESGF/basejump | 72b805818188ae09b1e3329035718b0816927a13 | 593d5e57e63848c30219ca9b49e25f49b59dcf82 | refs/heads/master | 2021-07-23T04:19:57.489628 | 2017-11-02T16:51:52 | 2017-11-02T16:51:52 | 47,994,570 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 908 | py | import hmac
import hashlib
import json
import collections
def constant_time_compare(val1, val2):
# We'll allow them to know that the lengths of the strings don't match
if len(val1) != len(val2):
return False
result = 0
for x, y in zip(val1, val2):
result |= ord(x) ^ ord(y)
return result == 0
def hmac_compare(key, msg, known):
h = hmac.new(key, msg, hashlib.sha256)
return constant_time_compare(h.hexdigest(), known)
def get_dict_signature(dictionary, key):
h = hmac.new(key, digestmod=hashlib.sha256)
for k in sorted(dictionary.keys()):
h.update(k)
h.update(str(dictionary[k]))
return h.hexdigest()
def check_json_sig(dictionary, key, signature):
return constant_time_compare(get_dict_signature(dictionary, key), signature)
def sign_path(path, key):
h = hmac.new(key, path, hashlib.sha256)
return h.hexdigest()
| [
"[email protected]"
] | |
c433ae0599808eb1ea13010018c8a7e094198719 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/63/usersdata/189/29190/submittedfiles/swamee.py | 3b19ef457ff49af07dc44e820878bdc4c334e957 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 414 | py | # -*- coding: utf-8 -*-
import math
f= float(input('digite f:'))
l= float(input('digite l:'))
q= float(input('digite q:'))
delta= float(input('digite delta:'))
v= float(input('digite v:'))
d=(8*f*l*(q*q)/3.14159**2*9.81*delta)/(1/5)
rey=((4*q)/(3.14159*d*v))
k=0.25/(math.log10(0.000002/3.7*d+5.74/rey**0.9))**2
print('O valor de D é %.4f' %d)
print('O valor de Rey é %.4f' %rey)
print('O valor de K é %.4f' %k) | [
"[email protected]"
] | |
8bafb18de3c09b4e845ad8d2df44676d5617bfad | 3e3743928f43aaef5cfb72e257b6f091fc2a39cb | /src/whirlwind/tornado/carbon/persist.py | 6a4360ba75d08129ad746ac60e9c350f75c12a51 | [
"Apache-2.0"
] | permissive | bearstech/whirlwind-tornado | 3871862944f584816a1d90891cec815f64209e14 | 85b9e6b4b3413694cb6e5040ce5c72b6e5e436ac | refs/heads/master | 2021-01-18T14:10:05.666763 | 2020-10-13T09:47:05 | 2020-10-13T09:47:05 | 10,527,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,992 | py | import time
import struct
import os.path
import os
import whisper
from redis import StrictRedis as Redis
from whirlwind import target_to_path
METRICS = 'metrics'
PERIOD = 30
METRIC_WRITE = 'carbon.write'
METRIC_POINTS = 'carbon.points'
class Persist(object):
""" Sequential writer for Carbon server.
The story is simple, fetch data from redis, write them, wait, loop.
This code is supervised by Carbon daemon.
"""
def __init__(self, path="/tmp/"):
self.redis = Redis()
self.path = path
self.dirs = set()
self.redis.sadd(METRICS, METRIC_POINTS, METRIC_WRITE)
def metric(self, name, value):
"Add some metrics : make your own dogfood, just before lunch."
timestamp = time.time()
serialized = struct.pack('!ff', timestamp, value)
pipe = self.redis.pipeline()
pipe.zadd(name, timestamp, serialized)
pipe.publish(name, serialized)
pipe.execute()
def run(self):
while True:
before = time.time()
self.handle()
after = time.time()
self.metric(METRIC_WRITE, (after - before) * 1000)
time.sleep(PERIOD - int(before) + int(after))
def handle(self):
points = 0
for metric in self.redis.smembers(METRICS):
values = self.redis.zrange(metric, 0, -1)
points += len(values)
f = target_to_path(self.path, metric)
d = os.path.dirname(f)
if d not in self.dirs:
if not os.path.isdir(d):
os.makedirs(d)
self.dirs.add(d)
if not os.path.exists(f):
whisper.create(f, [(10, 1000)]) # [FIXME] hardcoded values
whisper.update_many(f, [struct.unpack('!ff', a) for a in values])
if len(values):
self.redis.zrem(metric, *values)
self.metric(METRIC_POINTS, points)
if __name__ == "__main__":
p = Persist()
p.run()
| [
"[email protected]"
] | |
dc49aee3b646e4e2864be55fd34519a351e9c3ad | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p4VQE/R1/benchmark/startQiskit_noisy83.py | 997aaa1938fcb497d01eab2eff9be00509126ac8 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,391 | py | # qubit number=3
# total number=9
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.swap(input_qubit[1],input_qubit[0]) # number=5
prog.swap(input_qubit[1],input_qubit[0]) # number=6
prog.y(input_qubit[2]) # number=7
prog.y(input_qubit[2]) # number=8
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5200
writefile = open("../data/startQiskit_noisy83.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = FakeYorktown()
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
] | |
e680ac98db693298f1bdeb9376fa11577feeb89a | c237e2f29eac2b92bd2b77d055e33cf760960284 | /todobackend/todo/serializers.py | dc073eafa9c9e17d9bdc60cc128b53a776392e7b | [] | no_license | razyesh/Django-TODO-React | 7f3293c858099f8656a287c8706f739b1d513077 | b8529138334710d582324f286c10a39197aca25d | refs/heads/master | 2022-12-11T11:20:33.622468 | 2019-06-28T10:28:03 | 2019-06-28T10:28:03 | 194,255,236 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 272 | py | from rest_framework import serializers
from .models import Todo
class TodoSerializer(serializers.ModelSerializer):
class Meta:
model = Todo
fields = (
'id',
'title',
'description',
'completed',
) | [
"[email protected]"
] | |
a9844ec6bc76cbabb561aa8a324b90f85c935090 | f09dc121f213f2881df3572288b7ee5b39246d73 | /aliyun-python-sdk-hbr/aliyunsdkhbr/request/v20170908/DeleteSqlServerInstanceRequest.py | 8647a804d9271b31c253673d44db33fd213dcbae | [
"Apache-2.0"
] | permissive | hetw/aliyun-openapi-python-sdk | 2f31378ad6be0896fb8090423f607e9c7d3ae774 | 7443eacee9fbbaa93c7975c6dbec92d3c364c577 | refs/heads/master | 2023-01-19T22:42:36.214770 | 2020-12-04T10:55:14 | 2020-12-04T10:55:14 | 318,689,093 | 1 | 0 | NOASSERTION | 2020-12-05T03:03:03 | 2020-12-05T03:03:03 | null | UTF-8 | Python | false | false | 1,604 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkhbr.endpoint import endpoint_data
class DeleteSqlServerInstanceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'hbr', '2017-09-08', 'DeleteSqlServerInstance','hbr')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_VaultId(self):
return self.get_query_params().get('VaultId')
def set_VaultId(self,VaultId):
self.add_query_param('VaultId',VaultId)
def get_ClusterId(self):
return self.get_query_params().get('ClusterId')
def set_ClusterId(self,ClusterId):
self.add_query_param('ClusterId',ClusterId) | [
"[email protected]"
] | |
25c3b42a397977592ebd6616aeefe441954c721c | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/built-in/cv/classification/3D_ResNet_ID0421_for_PyTorch/pth2onnx.py | 344f0acb80d30eeb817bac80202a259a4e4fbed2 | [
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 2,739 | py | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
import json
import random
import os
import numpy as np
import torch
from opts import parse_opts
from model import (generate_model, load_pretrained_model, make_data_parallel,
get_fine_tuning_parameters)
def json_serial(obj):
if isinstance(obj, Path):
return str(obj)
def get_opt():
opt = parse_opts()
if opt.root_path is not None:
opt.video_path = opt.root_path / opt.video_path
opt.annotation_path = opt.root_path / opt.annotation_path
opt.result_path = opt.root_path / opt.result_path
if opt.resume_path is not None:
opt.resume_path = opt.root_path / opt.resume_path
opt.arch = '{}-{}'.format(opt.model, opt.model_depth)
opt.begin_epoch = 1
opt.n_input_channels = 3
print(opt)
with (opt.result_path / 'opts.json').open('w') as opt_file:
json.dump(vars(opt), opt_file, default=json_serial)
return opt
def resume_model(resume_path, arch, model):
print('loading checkpoint {} model'.format(resume_path))
checkpoint = torch.load(resume_path, map_location='cpu')
assert arch == checkpoint['arch']
if hasattr(model, 'module'):
model.module.load_state_dict(checkpoint['state_dict'])
else:
model.load_state_dict(checkpoint['state_dict'])
return model
def main_worker(index, opt):
random.seed(opt.manual_seed)
np.random.seed(opt.manual_seed)
torch.manual_seed(opt.manual_seed)
model = generate_model(opt)
if opt.resume_path is not None:
model = resume_model(opt.resume_path, opt.arch, model)
model = make_data_parallel(model, opt.distributed, opt.device)
dummy_input = torch.ones(10, 3, 16, 112, 112)
torch.onnx.export(
model,
dummy_input,
'3D-ResNets.onnx',
input_names=['input'],
output_names=['output'],
export_params=True,
do_constant_folding=True,
verbose=True,
opset_version=11)
print('3D-ResNets.onnx export success')
if __name__ == '__main__':
opt = get_opt()
opt.device = torch.device('cpu')
main_worker(-1, opt) | [
"[email protected]"
] | |
fe0b784d9423f6752a2e04ea2db13d45f4526bf0 | 7249edf3365731c92a9c9c05db3186894306cc17 | /python/src/vmaf/svmutil.py | ca49e991943642c3fce7b7aca7f07566c7af3622 | [
"LGPL-3.0-or-later",
"Apache-2.0"
] | permissive | sunery/vmaf | 22e2f782549e1c71aa6f5160f26350e0aca06189 | 03eb8a4980b1bf2b3edd66767e67927109dbd9de | refs/heads/master | 2020-04-01T15:12:57.469291 | 2018-10-15T00:31:21 | 2018-10-15T00:31:21 | 153,327,009 | 1 | 0 | Apache-2.0 | 2018-10-16T17:32:53 | 2018-10-16T17:32:53 | null | UTF-8 | Python | false | false | 735 | py | # TODO: dependency on libsvm/svmutil needs to be properly done, this is a temporary workaround wrapper
from __future__ import absolute_import
import sys
from vmaf.config import VmafConfig
# This will work only when running with a checked out vmaf source, but not via pip install
libsvm_path = VmafConfig.root_path('libsvm', 'python')
if libsvm_path not in sys.path:
# Inject {project}/libsvm/python to PYTHONPATH dynamically
sys.path.append(libsvm_path)
try:
# This import will work only if above injection was meaningful (ie: user has the files in the right place)
from svmutil import * # noqa
except ImportError as e:
print "Can't import svmutil from %s: %s" % (libsvm_path, e)
sys.exit(1)
| [
"[email protected]"
] | |
a85f58e88b5664a708051c99c0c4ada535118d4e | 70121257e52e0fd2f0895414fcee3c991737443a | /python_recipes/danfo_csv.py | 96c22906850257b9b40aa400587b7180e3fa23bd | [] | no_license | OlgaBelitskaya/cookbooks | 2e54208bb5e5157814deea6ff71cd7ce5b1e4972 | 216dde3e5617203371ed4c4bb7d9e8391640c588 | refs/heads/master | 2021-07-11T15:56:44.923442 | 2021-03-25T08:38:46 | 2021-03-25T08:38:46 | 99,447,645 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,696 | py | from IPython.core.display import display,HTML
def danfo_table_csv(url,columns,header_font_size):
html_str="""<html><head><meta charset='UTF-8'>"""+\
"""<meta name='viewport' """+\
"""content='width=device-width,initial-scale=1.0'>"""+\
"""<script src='https://cdn.jsdelivr.net/npm/"""+\
"""[email protected]/dist/index.min.js'></script></head>"""+\
"""<div><p> CSV =>>> Danfo DataFrames</p>"""+\
"""<div id='div015_1'></div><script>"""+\
"""var url='"""+url+"""'; """+\
"""dfd.read_csv(url)"""+\
""" .then(df=>{df.loc({columns:"""+str(columns)+\
"""}).plot('div015_1').table({header_style:"""+\
"""{font:{size:"""+str(header_font_size)+"""}}})})"""+\
""" .catch(err=>{console.log(err);})"""+\
"""</script></div></html>"""
display(HTML(html_str))
def danfo_chart_csv(url,columns,line_width,title):
html_str="""<html><head><meta charset='UTF-8'>"""+\
"""<meta name='viewport' """+\
"""content='width=device-width,initial-scale=1.0'>"""+\
"""<script src='https://cdn.jsdelivr.net/npm/"""+\
"""[email protected]/dist/index.min.js'> </script></head>"""+\
"""<body><p> CSV =>>> Danfo DataFrames</p>"""+\
"""<div id='div015_2'></div><script>"""+\
"""var url='"""+url+"""'; """+\
"""dfd.read_csv(url).then(df=>{var layout={"""+\
""" title:'"""+title+\
"""',xaxis:{title:'columns'},"""+\
""" yaxis:{title:'value'}}; """+\
""" df.plot('div015_2').line({"""+\
"""line:{width:"""+str(line_width)+"""},"""+\
"""columns:"""+str(columns)+""",layout:layout})})"""+\
""" .catch(err=>{console.log(err);})"""+\
"""</script></body></html>"""
display(HTML(html_str)) | [
"[email protected]"
] | |
55dbe8317f1c57f0eda91ec6f4ea5d6a3355faf5 | 44064ed79f173ddca96174913910c1610992b7cb | /Second_Processing_app/temboo/Library/Flickr/Places/FindByKeyword.py | c0620abf4ec0d8e85c197a86e1e3a6d28555d771 | [] | no_license | dattasaurabh82/Final_thesis | 440fb5e29ebc28dd64fe59ecd87f01494ed6d4e5 | 8edaea62f5987db026adfffb6b52b59b119f6375 | refs/heads/master | 2021-01-20T22:25:48.999100 | 2014-10-14T18:58:00 | 2014-10-14T18:58:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,859 | py | # -*- coding: utf-8 -*-
###############################################################################
#
# FindByKeyword
# Returns a list of place IDs for a query string.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class FindByKeyword(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the FindByKeyword Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/Flickr/Places/FindByKeyword')
def new_input_set(self):
return FindByKeywordInputSet()
def _make_result_set(self, result, path):
return FindByKeywordResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return FindByKeywordChoreographyExecution(session, exec_id, path)
class FindByKeywordInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the FindByKeyword
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by Flickr (AKA the OAuth Consumer Key).)
"""
InputSet._set_input(self, 'APIKey', value)
def set_Query(self, value):
"""
Set the value of the Query input for this Choreo. ((required, string) The query string to use for place ID lookups.)
"""
InputSet._set_input(self, 'Query', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: xml and json. Defaults to json.)
"""
InputSet._set_input(self, 'ResponseFormat', value)
class FindByKeywordResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the FindByKeyword Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Flickr.)
"""
return self._output.get('Response', None)
class FindByKeywordChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return FindByKeywordResultSet(response, path)
| [
"[email protected]"
] | |
96b7ae9b557800edf96fa234ccdc6c5e23c59dea | 1125345341e496920b661e612cd67cdb96a1d170 | /createCampaign/parameter_tests/CREATIVE_NAME/test02_valid_p.py | 4718d851e9cdc50ac51047f4f44e5f3ae48e305b | [] | no_license | Stephen-Williams/swarm-qa | 0bac526f0ee44b8c3677fb35959e6f7d0e258be2 | 90e36b5eab475788d9ab54051ad9c2736f3633ec | refs/heads/master | 2021-01-01T20:11:51.033059 | 2015-07-08T16:07:06 | 2015-07-08T16:07:06 | 38,764,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,678 | py | { 'all':
{ '0':
{ 'ADOMAIN': 'abc.com',
'ADVERTISER_CATEGORY': 'IAB8-5',
'APP_FILTER': 'sites',
'CREATIVE_ATTR': '0',
'CREATIVE_BASE_64': 'iVBORw0KGgoAAAANSUhEUgAAAUAAAAAyCAIAAACib5WDAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAAD2EAAA9hAag/p2kAAAAYdEVYdFNvZnR3YXJlAHBhaW50Lm5ldCA0LjAuNWWFMmUAAAYHSURBVHhe7ZtNaB5VFIYHUkREoYuCglIUKmRRsBQRhSwKCpGgYCCLQEWCKCgWEbGgUMii0IJFFAxkEWgoIhayCFRokBYqZBGwFJEKLbjowkUXLlx04cKFPsO9nJw589Nu2nK/eV8eJOeeO983wnfu+Zlp9d9NIUSpRFsIURDRFkIURLSFEAURbSFEQURbCFEQ0RZCFES0hRAFEW0hREFEWwhRENEWQhREtIUQBRFtIURBRFsIURDRFgNcOVe99Wp15KVqYbb68+fofSjsnK+OHa2W5qvP3q1ub0evmHiiLQYgek1XN6L3ofDs0/l+0OZK9IqJJ9pigGeezKGyZ6r657foffDcuZbvJ+mvnbhBTDzRFn0QHqZD0w3X1lourRfnHmhpTRVgOrA/esUYiLbo48fVHCrovYWG67VX8jr646eG676yfip/KaINDl4xBqIt+jj5cQ4V9O2JhmvvE3mdP/z6/eaTd/L3otXl6BVjINoTxr+/V9cv1NPjnfN36VrJnGyjKO3rJPsmWFxoIhXbeh/cEpdvrnQX2yxyG/c4T359Jn8v+nUzesUYiPbEQJDMHK4efST/vtHjj9XPWu5ca2y7cbEOgz1TeU8SLS4Fs98G7QnW6U/zSlt879+/5AttUEzQ8nUvHswmPXPakCCr79ubXYirtr+vTwduO6kdok/tyy72BJcYCdGeDD5/P8akib7Rtp34sHcb2lrb3dk5wZp+Lq+0xceSadnj8/Payd1oRAuz+XM4U3wXbWIze0x2IiQwTfeS+cVEEu0JoJ0YfWaz1HrmeF5B9K5L83XYH3w+ryByoH1m5wRrIPipt9OejW/yCjqwP/+R9PUXeY+P0j61h8zkZ9PyR9ErRkK0S4eftY8rcmxKXHSVi3N1GZyqXx+Q1LTWjuK1uhRZ0uucYHGVT7CcArcuZ6xQp2gP4jb4hGNH84fzt4k7X12u1+nYfcuNLF0b/kJfLIhREe3SobE0tfOSzbEog5OIGfpS20DgWbomkm3dhxPRZev0pabOOtbXxnzXd182vJTZPi3b0ZBcfkZFvWCuBEeAyQ4aMTaiXTS+qqQATl1oG/KV6Y0j9Qo7SZtEl02YEBFil9gEC/kxGG2tiWRr64Y9YUIfLEavLwSo3sMN+9L60tmGC+yo4sLgEuMh2kVDE2uieA5ewz8+7RPBY+HkJ1ghWnwapN31LvAFtp9LG4S0yaffhH/PuX2tHQ3hrRIxKqJdNC+/kH/TaKAtHJgeIzLb+qlGMvR58u03d9dh5nBeR4Srd4GfYNlYy+Pv5PqFhssX52zzLqClN1EFBK8YD9EuGj9tHngn2UQSo08GYuCHr+o3KDqv8hMsGx0n7LEQH+XXE36C1RlmXsFFBWEKpwZc3ciPuKntKf6DV4yHaBeNV997V74epuMN3k78BIs229bJmaa7TrDar2H4O9kz1XBxjvjmOZwaQhjRLhqvUEITLSm70kyaSGLhxaxO+iZYJG1TZyNqQcgXtSdqvgxGduLwFX7+jPypkeD/gsIe+s4pMRKiXTT+kQzZL72xTOiSwYglKuG0zVfa7ec6l842FgcmWNTeJpI5gQfWyvoJVl+q90q3R6z6SXhSOGVuXNw9GmjCvUuMjWgXzdJ8/lmbfCFqU2JfElO7LszWFwJ5L/W0vh4emGCtLud1L3vD2U+w/BMpj39qHWSHUfspkW+t214xKqJdNBTJ/mVjLyLZ8tity43Abss/0R2YYJEJif8gi9W7TrDAnw5efKn1z+0Jlh99t58ti1ER7dKhgvUPk5JIueEhDabPwyYCm26Wstl22pNe+tj2IOrM8TwNTuJy22OvYbDoX/YKrJ9qnAKHpvM7G/ZGJ5223w+Lc9lFLzDwyWIMRHsCoI+l/9xaq2Pjyrmh1wxvb9fvRVLrspP/th/kJkjsJO32FCrB5/Mhnf/kOF3oVzrhKlrfzZXGDbDItZ0zKu6E/XT4oTcWIyTaQoiCiLYQoiCiLYQoiGgLIQoi2kKIgoi2EKIgoi2EKIhoCyEKItpCiIKIthCiIKIthCiIaAshCiLaQohiuFn9D1yZ9AWuOgemAAAAAElFTkSuQmCC',
'CREATIVE_HEIGHT': 50,
'CREATIVE_NAME': 'Creative Name.png',
#CREATIVE_NAME is valid
'CREATIVE_TYPE': '3',
'CREATIVE_WIDTH': 320,
'DAY_PARTING': '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111',
'DELIVERY_RATE_UNIT': 'impressions',
'ENCODE_A_HREF': 0,
'START': 1433908800, #June 10th 2015, midnight
'END': 1434513599, #June 16th 2015, 23:59:59
'EXCHANGE': 'mpb',
'LANG': 'en',
'LOCATIONS': ['CAN', 'USA'],
'MAX_RATE_IN_DOLLARS': 0.8,
'MPB_TYPE': '',
'NECTAR_ALLOCATION': 9602,
'NECTAR_CRID': 9602,
'QUANTITY': '1000000',
'TARGET_ANDROID': True,
'TARGET_IOS': True,
'SITE_LIST': ['0c3e797b933649ab84619d8e8a1c0ab6',
'07ab13ce6ae511e281c11231392559e4',
'f8289871fe0d48318d36bf3ea197f65d',
'bd80deae924f11e281c11231392559e4'],
'TAG': '<A HREF="http://ad.foobar.net/ddm/jump/N6041.368591.JUICEMOBILE.CA/B8760366.118973391;sz=728x90;ord=[NECTAR_TIME]?">\r\n'
'<IMG SRC="http://ad.foobar.net/ddm/ad/N6041.368591.JUICEMOBILE.CA/B8760366.118973391;sz=728x90;ord=[NECTAR_TIME]?" '
'BORDER=0 WIDTH=728 HEIGHT=90 '
'ALT="Advertisement"></A>'
}
}
}
| [
"[email protected]"
] | |
cf1c7ed666e3e2ffefa00c4742ed5302dc0b15bd | 8f580f80eae1f947ebb3fed099a996ba961dfe95 | /view/resources_rc.py | 7179cf4a2b5a045727fb4f05db70fd80865c4d6c | [] | no_license | ankhbold/sesmim_training | 45efb172b8269708cc4a352fa944a5c2b35936a1 | 162ba8fe798c565fbd46f6b5f06f0d8aa17d2962 | refs/heads/master | 2020-04-28T08:01:02.805373 | 2019-03-19T00:34:19 | 2019-03-19T00:34:19 | 175,111,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,103 | py | # -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt4 (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x06\xa7\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\
\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\x95\x2b\
\x0e\x1b\x00\x00\x06\x3c\x49\x44\x41\x54\x58\x47\xd5\x57\x09\x6c\
\x94\x45\x14\xfe\x76\xb7\xbb\xdb\xed\x6e\xb7\xf7\xdd\xd2\x83\x80\
\x50\xac\x5c\x2d\x50\x48\x53\x11\xac\x20\x68\x50\x23\x04\x30\x96\
\x43\xb4\x82\x07\xd1\x28\xda\x20\x42\x62\x8a\xa6\x68\xc1\x40\x01\
\xc5\x03\x42\x21\x02\xc5\x88\xd8\x16\x0a\x85\x26\xdc\x5d\xa0\xa4\
\xb4\x50\x28\x3d\xe8\xb5\xdb\x8b\xde\xdd\x7b\x7d\x33\xdd\xca\x6e\
\x8f\x85\x48\x90\xf8\x6d\x9a\x7f\xfe\xf9\xdf\xcc\xfb\xe6\xcd\xf7\
\xde\x4c\x05\xc9\xf9\x33\x2c\x78\x82\x10\x5a\x9f\x4f\x0c\xff\x1f\
\x02\x26\x8b\x11\x3a\x63\x37\xba\x0d\xed\xe8\x32\xb4\xf1\x27\x7b\
\x67\xfd\x8f\x82\x07\x6a\xc0\x64\x36\xa2\x5d\xdf\x0c\x6f\x59\x30\
\xa2\xfd\x67\x23\xc4\x75\x34\x64\x62\x05\x11\xe8\x40\x75\xc7\x4d\
\xa8\xd4\xd9\x68\xee\xa9\x81\x52\xe2\x05\x91\xd0\xc9\x3a\xca\x1e\
\x16\x8b\x19\x06\xb3\x9e\xb5\x20\x11\xc9\x7a\x3b\xad\x70\x48\x80\
\x39\x61\x13\xaf\x9c\x90\x0e\x3f\x79\x18\xef\x33\xd3\x64\x6d\xba\
\x06\xb8\x49\x7d\x21\x14\xf4\x06\xb0\xa1\xab\x0a\xdb\xae\xae\x44\
\xbb\xae\x11\x2e\x62\x25\x8f\x8c\xd1\x62\xe0\xe4\x75\xa6\x2e\x28\
\xc4\x9e\x98\xe0\x97\x00\x77\x67\x7f\xe4\x57\x67\xd8\x91\x18\x92\
\x40\xbb\xae\x09\x53\x03\x5f\xc5\xc2\x31\xeb\xf8\xfb\xfe\x92\xaf\
\x70\xb6\x36\x93\x26\xd5\xc1\x49\x28\x81\xde\xac\xa5\x05\x09\x30\
\x35\xe8\x15\x24\x46\xa5\x70\x9b\xdf\x6e\xa4\x20\xb7\xf2\x67\x2c\
\x8a\xdc\x00\x1f\x97\x60\x1e\xb5\x00\xc5\x70\xfe\x8d\x11\xcf\xab\
\xda\x83\xec\xf2\x9d\x70\x76\x92\xf3\x3e\x86\x41\x35\xc0\xf6\x37\
\x36\xa8\xd7\xf9\xcd\xe6\x8b\x78\x27\x67\x14\x2e\xab\xb3\xe0\x2a\
\xf1\x80\x5c\xe2\xce\xf7\xdd\x4d\xe2\x47\x36\xf3\x20\x17\xbb\x61\
\xe1\x11\x6f\x14\x37\x9e\xc1\x82\xd1\xc9\x98\x3e\xec\x0d\x14\x6a\
\x72\x11\xe5\x13\xcf\x9d\x5f\xac\x3b\x8a\xf3\xb5\x47\x78\xb4\x4a\
\x9a\xce\x12\x79\xb1\xd5\x4b\x2f\x06\x44\x80\x85\x4d\x2a\x72\xc1\
\x86\xb8\x2c\x72\x7e\x01\x9b\x2e\x2d\x86\xaf\x4b\x18\x39\x35\xa0\
\x43\xd7\x8c\x84\xf0\x15\x88\x0f\x59\x08\x0f\x99\x9f\x75\x04\x70\
\x55\x7d\x12\x69\xaa\x44\x7c\x3a\x79\x3f\x9e\xf6\x89\xc3\x27\xa7\
\xa6\x91\xcd\x22\xdc\x6a\xb9\x44\xa4\xd6\x22\xc8\x75\x04\xb7\x4b\
\x3a\x16\x09\x4f\xe7\x00\x08\x04\x02\xfe\xce\x30\x20\x02\x4c\x70\
\xab\x26\xee\xe0\xed\xef\x0a\x12\xb9\x73\xbd\xb9\x87\x56\xef\x8d\
\xf4\x17\x8a\x31\x6f\xe4\x6a\x94\xb6\x5c\xe0\xdf\x19\xb4\xb4\xdf\
\x62\x27\x09\xf6\xcc\xad\x41\xca\xf9\xd7\x78\xdf\x9a\x29\x07\xb0\
\xaf\x64\x03\x3e\x9a\xb4\xfb\x1f\xe7\xb7\x5b\x54\xb4\x30\x99\x9d\
\x73\x06\x3b\x02\x6c\xf5\x3e\xb2\x10\x72\x3a\x0c\x19\xc5\xeb\xb9\
\x00\x7b\xc3\xed\x8b\xb5\x53\x0f\x73\x9b\xfa\xce\x72\x3b\xb5\xb7\
\xea\x34\xb8\x46\x11\x48\xce\x9f\x09\x7f\x79\x04\xf6\x5e\x5f\x47\
\x7b\x1f\xc4\xdb\x15\xad\x45\x56\x2b\x20\xef\xee\x5e\x1e\xd9\xfe\
\xb0\x23\x60\x20\x81\xc5\x04\xcc\xe1\x6d\x26\x38\xa6\xd6\x56\x52\
\x7c\x72\xec\x21\xde\xc7\xe0\x45\x93\xd7\x75\xdc\xe6\xed\xcc\x9b\
\xa9\xf8\x5e\xb5\x02\x5e\x2e\x41\x48\x89\x3f\x81\xa4\xf1\x5b\x91\
\x53\xf1\x03\xff\x16\x17\x3c\x1f\xa7\xef\x66\xf0\xb6\xc9\x6c\x22\
\x0d\x65\x43\x2c\x92\xf2\x77\x5b\xd8\x11\x30\xd3\x6a\x43\x94\x91\
\x94\x46\xa4\x70\xca\x59\x96\x4a\x63\x7d\x9e\xb3\x5b\xf1\xdd\xf6\
\x62\x1c\x28\xfd\x1a\x1b\xcf\xcf\x87\xa7\x2c\x90\xd4\x3e\x0c\xb3\
\x22\x56\xf0\x6f\x23\x3d\xa3\x49\x94\x9e\xe8\x31\x74\x21\xd4\x6d\
\x0c\x15\xac\x56\xde\xbf\xb3\xf0\x43\xb8\x53\xda\x0e\x86\x7e\x04\
\x2c\x3c\xec\x6d\x14\x56\xa6\x56\x23\x15\x8f\x71\xbe\x33\xf9\xb7\
\x93\x95\x7b\xb0\xe6\x74\x3c\x8a\x1a\xf3\x91\xf1\x92\x1a\x9f\xc7\
\x1e\xc0\xf4\xd0\xc5\x78\x7b\x6c\x1a\x36\x17\x2c\xc7\x35\x4d\x1e\
\xb7\x0b\x24\xe5\xb7\xe9\x1b\xa0\x94\x7a\xd3\x78\x03\x45\xab\x0c\
\x45\x4d\xa7\x06\x5d\x3d\x83\x1d\x01\x26\x90\x4e\xfd\x3d\x1a\xec\
\x43\x83\x8d\x10\x09\x9c\xd0\x44\x55\x6e\x59\x56\x38\xb4\xa6\x6e\
\xe8\x29\x32\x13\xfd\x67\x59\xad\x7b\x21\x97\xb8\x61\x75\xcc\x4f\
\xf0\xa5\x42\x55\x50\x9f\x85\x16\x6d\x3d\x5c\xc5\x5e\xbc\x88\x69\
\xa9\x08\x7d\x5b\xf0\x26\x3c\xa4\xfe\x56\xeb\x81\xb0\x23\xc0\x1c\
\xd6\x50\x79\x75\x76\x72\xe1\xe5\xd3\x42\xbf\xb3\xb5\x87\xb0\x6b\
\xf6\x1d\xcc\x19\x9e\x84\xb4\x99\x17\xb1\xbb\xe8\x33\x9c\xa8\xfc\
\xd5\x3a\xe2\x3e\x02\x14\x11\xa4\x9f\x17\x51\xd7\x79\x8b\x48\x29\
\x49\xac\x65\x50\x77\xdd\x19\x54\xf9\xb6\xb0\x23\x20\xa6\x0a\x77\
\x59\x73\x8c\xb7\x63\x02\xe6\xf2\x8c\xf8\xe6\xd9\x7c\xb4\x6a\x1b\
\xa0\xaa\xcf\xe6\xfd\x5f\x4c\xfb\x03\x23\x3c\xa2\xf1\x7b\xe9\x66\
\x94\xdd\xbb\xc2\xfb\xfa\x90\x53\xbe\x0b\x33\x42\x13\x79\x5b\xa5\
\xce\x81\x33\xa9\xde\x91\x73\x86\x01\x85\xa8\xa5\xa7\x0e\xa9\xd3\
\xcf\x91\x98\x94\x48\xfc\x2b\x84\x32\x20\x13\x5b\x54\xcb\xb9\x26\
\xe4\x62\x77\x72\xb0\x84\xb6\x21\x81\x0b\xb0\x0f\xea\xce\x0a\x9c\
\xab\x3d\x8c\x83\xa5\x1b\x49\x1f\x1a\xf4\x18\x3b\xf1\x71\x5e\x2c\
\x65\xcc\x7d\x9b\xa1\x30\x80\x00\x13\x0e\x3b\x68\x92\x63\x0f\xf2\
\x55\x6f\xb9\xbc\x8c\x4e\xc0\x48\xbe\x12\xb3\xc5\x04\xbd\xa9\x87\
\xfe\x74\x7c\x7f\x85\xf4\x33\xd3\x4f\x26\x92\x53\x01\x6b\xc2\xbb\
\xe3\xb7\x63\x72\xe0\x5c\x2a\x48\xaf\xf3\x03\xab\x7f\xd9\x1d\x0c\
\x76\x5b\xc0\xc0\x06\x69\xba\xca\xf1\x67\xd9\x56\x44\x07\xcc\xc6\
\xfb\x13\x7f\x44\x43\x77\x15\x39\xd5\x42\x40\xe6\xce\x4e\x0a\x12\
\xa9\x17\x2f\x56\xde\x74\xe0\x78\x48\xfd\xd0\x6d\xec\x40\xd2\xb8\
\x6d\xdc\x39\xdb\x86\x36\x9d\x9a\xd4\x63\xb6\xce\xe8\x18\x03\x08\
\x30\x28\xe8\xd0\xc9\x2e\xdf\x81\xa3\x65\xe9\x98\x44\x5a\xd8\x91\
\x70\x83\x72\x7c\x32\x65\x44\x2d\x4d\xde\x88\x0e\x7d\x0b\x7f\x36\
\x76\x57\x93\x1e\x62\x90\x9e\x50\x8c\x29\x41\x2f\xf3\x54\x9d\x15\
\xf1\x16\x02\xe5\x4f\xc1\x57\x16\x4e\x76\xcd\xd6\x19\x87\x86\xc3\
\xfb\x00\x4b\xc9\x40\xc5\x08\x7c\x10\xbd\x0b\x32\x5a\x39\xc3\x3d\
\xad\x86\x52\xac\x8d\xde\x95\xa4\x83\xde\xf4\x62\xe9\x99\xa6\x5a\
\x42\xf6\x2d\x94\x8e\xe1\x14\xb5\x9d\xbc\xff\x78\xc5\x2f\x38\x7c\
\x2b\xd5\xa1\x16\x1c\x12\x60\x60\x9a\x60\xfb\x19\xa6\x7c\x86\x6f\
\x49\xa8\x32\x8a\x22\xe4\x4e\xd5\xae\x03\x55\xed\xd7\xb9\xda\xcb\
\x5a\x55\xbc\xd2\x31\x8d\x84\xb9\x8d\xc5\x2a\xba\xc0\xf4\x21\xb7\
\x92\x48\x94\xa6\xda\x89\xd6\x16\x0f\x24\xd0\x07\x46\xc4\x48\x67\
\x05\x3b\x9c\xd8\xe5\x82\x09\x50\x48\x25\x5a\x2c\x94\xda\x89\x8d\
\x9d\x1d\xcb\xa2\x36\xf1\x4c\xe9\x43\x2e\x45\x22\x73\x88\x48\x88\
\xe2\x96\x46\xac\xb7\xb6\x1d\x42\x28\x10\x91\x23\x09\x1d\x50\xce\
\xbc\xb8\xb0\x27\xab\x1b\xac\xdf\x16\xec\xb6\x93\x5f\xbd\x0f\x41\
\x8a\x51\xbc\x2c\x33\x0c\xf7\x18\x0f\x29\x15\xb7\x42\xcd\x49\xbb\
\xdb\x10\xc3\xa0\x22\x7c\x54\xf8\xba\x84\xd2\x01\xf4\x1e\xae\xa8\
\x8f\x5b\x7b\x80\xe7\xc3\x96\x22\x2e\x64\x01\x4f\x63\x5b\x3c\x16\
\x02\x0c\xec\x94\xdc\x5e\xb8\x8a\x8e\xe1\xfb\x24\x58\xc6\xb0\x3b\
\x87\x2d\x1e\x7a\x0b\xfe\x0d\x58\xe5\x3c\x53\x73\x90\xce\x18\x31\
\xdd\x8e\x3b\x91\x51\xf2\x25\xbf\x63\xd8\x96\xe7\x87\x16\xe1\xa3\
\x40\x6b\xec\xe2\x19\xc2\x8a\x58\xdf\x55\xbe\x0f\x8f\x6d\x0b\x6c\
\xc1\x84\xc7\xfe\x5f\xe8\xef\x9c\xe1\x3f\x21\xe0\x08\x4f\x98\x00\
\xf0\x37\xdb\xdc\x8b\x3f\xd8\x8c\xa4\x06\x00\x00\x00\x00\x49\x45\
\x4e\x44\xae\x42\x60\x82\
\x00\x00\x04\x96\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x04\x5d\x49\x44\x41\x54\x78\x5e\xed\x97\x5f\x88\x55\x55\
\x14\xc6\x7f\x6b\xef\x7d\x47\x9d\xc1\xcc\xd1\xb9\x33\x93\x9a\xe3\
\x9f\x34\xd1\xd1\x19\x15\x8d\xb0\x12\x91\x0c\xc2\x4a\x2b\x28\x11\
\xa2\x1e\x22\xc2\xa7\x1e\x8c\xe8\xb1\xb7\x88\x20\x21\x31\xc9\x7a\
\x8b\xa0\x88\xd4\x1e\xb2\xe9\x41\x24\x82\x5e\x52\x47\xa3\x8c\x8c\
\x89\xc4\x74\x66\x9a\xa1\x87\x2c\xbd\xf7\xec\xd5\x71\xb5\x71\x33\
\xdd\x49\x88\x31\x7d\xf1\x83\x7d\xd6\x3d\xeb\xae\xb3\xbe\x6f\xef\
\xb5\xf6\xf9\x23\xaa\xca\x8d\x84\xe3\x06\xe3\xa6\x00\x01\xc2\x37\
\xef\x3c\xf1\x85\xa3\xb6\x16\x8d\xf0\x7f\xf7\x84\x08\x88\x23\x52\
\xf9\x6a\xe9\x33\xef\xaf\x0b\x40\xf3\x65\xf2\xae\x6d\xef\x42\x2c\
\x30\x68\x44\x55\x91\xa4\x71\x62\x50\x14\x90\x44\x6c\x70\x9e\x81\
\xf7\x9e\x5e\x0b\x34\x07\x60\x12\x36\xf3\x82\xef\x3e\xd9\x69\x41\
\x33\x97\xdd\x0d\x51\x93\x5a\x99\x20\xbf\xda\xc0\x09\xc3\x27\xbf\
\x04\x8d\xdc\xb9\xf9\x55\xb3\xc0\xa4\x00\x08\x16\x04\xe2\x02\x4e\
\x3c\xde\x4d\x06\x07\x43\x27\x8e\x20\xfc\x47\x01\xd2\x28\x60\x66\
\xf7\xbd\x00\x78\x57\x21\x6a\x01\x9a\x84\x81\x84\x1c\x18\x69\xeb\
\x5e\x6f\x02\x10\x61\xe8\xf8\x61\xaa\x2b\x36\x80\x02\x92\x33\x83\
\x72\x35\x38\x51\x9c\xa4\x70\x85\x88\x30\x78\xec\x30\x33\x97\xaf\
\xb7\xfc\x26\x40\x23\x09\x24\x01\x8a\x6a\x44\x10\x10\xcf\x70\xbf\
\x91\x9b\x0f\x11\xc4\x55\x72\x29\xd4\x62\x01\x45\xc4\x99\x1f\x05\
\xa3\xd2\x82\xe1\x51\xa5\xff\x34\xd4\x0a\xa8\x04\x58\xbe\x00\xda\
\x7b\x36\x30\x58\x4e\x68\x46\x29\x40\x34\x92\xae\x6f\xd8\x86\x46\
\xf4\xeb\xc9\x23\x54\x7b\x36\x82\x0a\xe0\xcc\x77\x7a\xef\x4e\x7e\
\x7c\xfb\x25\x46\x3f\xdf\xcb\x48\xdf\x5b\x8c\x7e\xb6\x07\x17\x26\
\x97\xfe\x17\x19\x2d\xcf\x47\xfa\xf6\x30\x72\x68\x37\xde\x07\x8e\
\x9f\x56\x3a\x67\xc0\xba\x6e\x47\x67\x2b\xf4\xff\x10\x71\x22\xb4\
\xad\xd8\x68\xb9\x6d\x32\x19\x04\x32\x10\x09\x08\x1e\xf0\xa8\xa4\
\xe6\x51\x61\xe1\xf3\x6f\xe0\xc2\x24\x54\x04\x62\x5a\x01\x55\xee\
\xd8\xb1\x1b\x44\xd0\x18\x21\xd6\x40\x6b\xd4\xea\xca\x9c\xce\x26\
\x5a\xa7\x0a\xea\x94\x9f\xce\x5f\x04\xb1\x9c\x08\xde\x38\x1a\x04\
\x68\x3a\x88\x0f\x88\x73\x88\xf3\x08\x49\x80\x08\x02\x7c\xbf\xeb\
\x39\xc4\x7b\xa6\xcf\xed\x42\x55\xc9\xcd\xa1\x68\x51\xd0\xf6\xe0\
\x0b\x76\x6d\xc5\xc3\x99\xf3\x05\x4e\x2a\x9c\x39\x57\xb7\x73\x11\
\x8f\x0d\xe7\x8c\xc3\x52\x37\xae\x80\x5a\x50\x1e\x89\x04\x20\xc2\
\xa2\x1d\x6f\x5a\xcd\x35\xa9\xcd\xfc\x80\x46\x88\x75\x14\x47\xef\
\xe2\x29\x1c\x3d\xf5\x07\x03\x67\x6b\x54\x82\xb0\x66\x59\xb3\xf9\
\x85\x9c\x1b\x74\xdc\x12\xa4\x00\xd7\x20\xc0\x89\xe0\xf2\x5d\x12\
\x4d\x56\x10\x20\xf9\xd2\xaa\x75\xb6\x05\x6e\xab\x4e\xb1\x18\x55\
\x88\x51\x89\x2a\x88\xcb\xb9\xaf\xde\x03\x22\x63\x04\x48\x12\xf0\
\x61\xdf\x39\x16\xce\x69\x4e\x13\x56\x56\x2f\xbd\x95\x0f\xfa\x7e\
\x61\xc1\x6c\xf3\x65\xa4\xff\x57\x2e\x99\x46\x4c\x22\x72\x4e\x19\
\xb7\x07\x32\x4c\x9d\x33\x4b\x12\x60\x47\xe0\xb1\xfb\x67\xe5\xf9\
\x2a\x44\x85\xc7\x4b\x5f\x82\x11\x69\xfe\x8d\x91\x03\x08\xa8\x11\
\xe7\xdc\xe3\x0a\x50\xd4\xd4\x35\x96\x20\x1d\x63\x26\x10\x01\xe7\
\xb0\x06\xf3\xce\xfc\xd4\xea\x50\xa4\x18\x49\x31\x2a\x24\x87\x90\
\xcb\x1b\xd0\x7f\xed\x01\xef\x1b\x77\xc1\x58\x20\x80\xf7\x50\xaf\
\x2b\xbb\x0e\x2a\x27\x06\x60\x6a\x33\xbc\xb2\x5d\x98\x71\x8b\x10\
\x23\x68\x5e\x11\x3b\x8f\x2a\x70\x65\x17\x5c\xb5\x07\x7c\x43\x0f\
\xfc\x13\x4e\xa0\x29\xc0\xce\x7d\x35\x36\xad\xf2\xdc\xd7\x0d\xfb\
\x0e\x15\xbc\xf6\x11\x74\x55\xe1\xb7\xdf\x23\xd5\xe9\x8e\x98\xf4\
\xb7\x95\xa2\x36\xad\xf6\x68\xce\x3d\xbe\x00\x8b\xc6\x03\x02\x2e\
\x20\x80\xc6\x48\x03\x1c\x9c\x3a\x1b\x69\x6f\x75\x6c\xb9\xa7\x89\
\x07\x5e\xbe\xc0\x92\xdb\x1d\xf3\x3a\x84\x9e\x79\x81\x6f\x7f\x2e\
\xe8\x9d\xef\x89\x8a\x8d\xf9\xa5\x1f\x09\x20\x29\x37\x1e\xf4\x9f\
\x25\xd0\xbc\xbc\x6d\xbd\x8f\x30\x78\x6c\x3f\xd5\x9e\x87\x50\x29\
\xd2\x96\xcb\x10\x81\x8e\xe9\x9e\xa7\x36\x06\x2e\x5e\x12\x5e\x7f\
\xb6\x85\xc5\xb3\x1c\xf5\x88\xc5\xae\x5e\x54\x49\x25\x48\x03\x41\
\xf1\x0c\x1d\x3b\x40\xb5\xcc\xad\x5a\x90\x39\x21\x40\xee\x1a\xe7\
\x9b\x00\x68\x5f\xf9\x28\x83\x47\x3f\x2e\x2f\x78\x78\xdc\x3e\x68\
\x69\x2e\x47\x0b\xd4\x14\xe6\x76\xc0\x9f\x45\x22\x23\x03\x49\x03\
\x61\xe8\xe8\x01\xcb\x69\x72\xf0\xc6\x85\x64\x01\x58\xac\x73\x0c\
\xf7\x7f\x8a\x3d\x96\x7b\x36\x97\x22\xb6\x32\xf8\xf5\x7e\xab\xdb\
\x44\xa0\xaa\x25\xf9\x16\x80\x72\x15\x0e\x82\x38\x3a\xee\xda\x8e\
\xe4\x12\x64\xb9\xce\x05\x54\x23\x22\x66\x69\x5f\xb5\x15\x98\xc0\
\x5b\x91\x2a\xa0\x26\xc2\xb6\xa0\x0b\x66\x13\x5f\x16\x80\xf3\x88\
\x6f\xa2\xba\xe6\x49\x50\xb5\x3a\x89\x2a\xd7\x0a\x62\x07\xa1\x7d\
\xcd\x36\xb3\x25\x97\x71\x26\x01\xe8\xc5\xc2\x9d\x29\x5f\x12\x67\
\x13\x0b\x4c\x31\x66\xae\x2d\x52\xd9\x41\x8c\xfc\x32\x27\xa0\x02\
\x4c\x03\xba\x80\xd6\xeb\xf8\x9d\x10\x81\x11\x60\x20\x00\x17\x80\
\x01\xe0\x2c\x20\x5c\x1f\x28\x70\xc9\xb8\x6f\x34\x6e\x7e\x1d\xff\
\x05\x99\x91\xdf\x52\x52\xa7\x93\x17\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
"
qt_resource_name = "\
\x00\x07\
\x07\x3b\xe0\xb3\
\x00\x70\
\x00\x6c\x00\x75\x00\x67\x00\x69\x00\x6e\x00\x73\
\x00\x06\
\x07\x9c\xa3\xfd\
\x00\x73\
\x00\x65\x00\x73\x00\x6d\x00\x69\x00\x6d\
\x00\x09\
\x06\x76\x82\x67\
\x00\x63\
\x00\x72\x00\x6f\x00\x70\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x09\xa9\x84\xc7\
\x00\x63\
\x00\x65\x00\x72\x00\x74\x00\x66\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = "\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x02\x00\x00\x00\x03\
\x00\x00\x00\x26\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x06\xab\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| [
"[email protected]"
] | |
a3a07052f03a7dc28d6f2c41f80889cbc46bc905 | 32c6590127686114bfacff11fa7cd646025d3819 | /test_project/test_app/models.py | e45ce8711f5854e739457ed3382e0356433d6b1c | [
"BSD-2-Clause"
] | permissive | revsys/django-test-plus | 42cc6cddde30f561bec91294d2e85c21cbc62887 | 9cfb0c865b1dcad1ca6c9c4717d67ea8d476269c | refs/heads/main | 2023-08-29T03:52:59.089300 | 2023-07-11T11:37:47 | 2023-07-11T11:37:47 | 36,131,033 | 618 | 75 | BSD-3-Clause | 2023-07-11T11:35:24 | 2015-05-23T16:08:52 | Python | UTF-8 | Python | false | false | 154 | py | from django.db import models
class Data(models.Model):
""" Simple model to test our query assertions """
name = models.CharField(max_length=50)
| [
"[email protected]"
] | |
053f64e6385d70d8e49c045ff44d38e56873a99a | d4a569dcf616b7f05e53a44803e38196b436b8b9 | /[email protected]/Lib/site-packages/mypy/typeshed/third_party/2and3/dateutil/utils.pyi | 3eefd2e48ba5311cbdb709991a6815cdd94459e1 | [
"MIT"
] | permissive | nverbois/TFE21-232 | ac3178d24939c872c02a671c0f1d8cc471af516b | 7113837b5263b5c508bfc6903cb6982b48aa7ee4 | refs/heads/main | 2023-06-05T18:50:59.207392 | 2021-06-25T19:54:40 | 2021-06-25T19:54:40 | 337,691,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 281 | pyi | from typing import Optional
from datetime import datetime, tzinfo, timedelta
def default_tzinfo(dt: datetime, tzinfo: tzinfo) -> datetime: ...
def today(tzinfo: Optional[tzinfo] = ...) -> datetime: ...
def within_delta(dt1: datetime, dt2: datetime, delta: timedelta) -> bool: ...
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.